Compare commits

..

2 Commits

Author SHA1 Message Date
f1d512b2e5 Merge pull request 'dev' (#61) from dev into main
All checks were successful
Build and Push Docker Images / build-backend (push) Successful in 17s
Build and Push Docker Images / build-frontend (push) Successful in 16s
Reviewed-on: #61
2026-01-15 11:29:15 +00:00
db21b43120 Merge pull request 'dev' (#40) from dev into main
All checks were successful
Build and Push Docker Images / build-backend (push) Successful in 18s
Build and Push Docker Images / build-frontend (push) Successful in 16s
Reviewed-on: #40
2026-01-08 22:21:06 +00:00
248 changed files with 5440 additions and 36901 deletions

View File

@@ -1,20 +0,0 @@
---
name: ✨ Feature Request
about: Schlage eine Idee für dieses Projekt vor.
title: '[FEATURE] '
labels: enhancement
assignees: ''
---
**Ist dein Feature-Wunsch mit einem Problem verbunden?**
Eine klare Beschreibung des Problems (z.B. "Ich bin immer genervt, wenn...").
**Lösungsvorschlag**
Eine klare Beschreibung dessen, was du dir wünschst und wie es funktionieren soll.
**Alternativen**
Hast du über alternative Lösungen oder Workarounds nachgedacht?
**Zusätzlicher Kontext**
Hier ist Platz für weitere Informationen, Skizzen oder Beispiele von anderen Tools.

View File

@@ -1,3 +0,0 @@
{
"rules": []
}

View File

@@ -1,27 +1,13 @@
// src/db/index.ts import { drizzle } from "drizzle-orm/node-postgres"
import { drizzle } from "drizzle-orm/node-postgres"; import { Pool } from "pg"
import { Pool } from "pg";
import * as schema from "./schema";
import {secrets} from "../src/utils/secrets"; import {secrets} from "../src/utils/secrets";
import * as schema from "./schema"
console.log("[DB INIT] 1. Suche Connection String...");
// Checken woher die URL kommt
let connectionString = process.env.DATABASE_URL || secrets.DATABASE_URL;
if (connectionString) {
console.log("[DB INIT] -> Gefunden in process.env.DATABASE_URL");
} else {
console.error("[DB INIT] ❌ KEIN CONNECTION STRING GEFUNDEN! .env nicht geladen?");
}
export const pool = new Pool({ export const pool = new Pool({
connectionString, connectionString: secrets.DATABASE_URL,
max: 10, max: 10, // je nach Last
}); })
// TEST: Ist die DB wirklich da? export const db = drizzle(pool , {schema})
pool.query('SELECT NOW()')
.then(res => console.log(`[DB INIT] ✅ VERBINDUNG ERFOLGREICH! Zeit auf DB: ${res.rows[0].now}`))
.catch(err => console.error(`[DB INIT] ❌ VERBINDUNGSFEHLER:`, err.message));
export const db = drizzle(pool, { schema });

View File

@@ -1,2 +0,0 @@
-- No-op migration: Datei war im Journal referenziert, aber fehlte im Repository.
SELECT 1;

View File

@@ -1,2 +0,0 @@
-- No-op migration: Datei war im Journal referenziert, aber fehlte im Repository.
SELECT 1;

View File

@@ -1,123 +0,0 @@
CREATE TABLE "m2m_api_keys" (
"id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
"created_at" timestamp with time zone DEFAULT now() NOT NULL,
"updated_at" timestamp with time zone DEFAULT now() NOT NULL,
"tenant_id" bigint NOT NULL,
"user_id" uuid NOT NULL,
"created_by" uuid,
"name" text NOT NULL,
"key_prefix" text NOT NULL,
"key_hash" text NOT NULL,
"active" boolean DEFAULT true NOT NULL,
"last_used_at" timestamp with time zone,
"expires_at" timestamp with time zone,
CONSTRAINT "m2m_api_keys_key_hash_unique" UNIQUE("key_hash")
);
--> statement-breakpoint
CREATE TABLE "staff_time_events" (
"id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
"tenant_id" bigint NOT NULL,
"user_id" uuid NOT NULL,
"actor_type" text NOT NULL,
"actor_user_id" uuid,
"event_time" timestamp with time zone NOT NULL,
"event_type" text NOT NULL,
"source" text NOT NULL,
"invalidates_event_id" uuid,
"related_event_id" uuid,
"metadata" jsonb,
"created_at" timestamp with time zone DEFAULT now() NOT NULL,
CONSTRAINT "time_events_actor_user_check" CHECK (
(actor_type = 'system' AND actor_user_id IS NULL)
OR
(actor_type = 'user' AND actor_user_id IS NOT NULL)
)
);
--> statement-breakpoint
CREATE TABLE "serialtypes" (
"id" bigint PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY (sequence name "serialtypes_id_seq" INCREMENT BY 1 MINVALUE 1 MAXVALUE 9223372036854775807 START WITH 1 CACHE 1),
"created_at" timestamp with time zone DEFAULT now() NOT NULL,
"name" text NOT NULL,
"intervall" text,
"icon" text,
"tenant" bigint NOT NULL,
"archived" boolean DEFAULT false NOT NULL,
"updated_at" timestamp with time zone,
"updated_by" uuid
);
--> statement-breakpoint
CREATE TABLE "serial_executions" (
"id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
"tenant" bigint NOT NULL,
"execution_date" timestamp NOT NULL,
"status" text DEFAULT 'draft',
"created_by" text,
"created_at" timestamp DEFAULT now(),
"summary" text
);
--> statement-breakpoint
CREATE TABLE "public_links" (
"id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
"token" text NOT NULL,
"tenant" integer NOT NULL,
"default_profile" uuid,
"is_protected" boolean DEFAULT false NOT NULL,
"pin_hash" text,
"config" jsonb DEFAULT '{}'::jsonb,
"name" text NOT NULL,
"description" text,
"active" boolean DEFAULT true NOT NULL,
"created_at" timestamp DEFAULT now(),
"updated_at" timestamp DEFAULT now(),
CONSTRAINT "public_links_token_unique" UNIQUE("token")
);
--> statement-breakpoint
CREATE TABLE "wiki_pages" (
"id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
"tenant_id" bigint NOT NULL,
"parent_id" uuid,
"title" text NOT NULL,
"content" jsonb,
"is_folder" boolean DEFAULT false NOT NULL,
"sort_order" integer DEFAULT 0 NOT NULL,
"entity_type" text,
"entity_id" bigint,
"entity_uuid" uuid,
"created_at" timestamp with time zone DEFAULT now() NOT NULL,
"updated_at" timestamp with time zone,
"created_by" uuid,
"updated_by" uuid
);
--> statement-breakpoint
ALTER TABLE "time_events" DISABLE ROW LEVEL SECURITY;--> statement-breakpoint
DROP TABLE "time_events" CASCADE;--> statement-breakpoint
ALTER TABLE "projects" ALTER COLUMN "active_phase" SET DEFAULT 'Erstkontakt';--> statement-breakpoint
ALTER TABLE "createddocuments" ADD COLUMN "serialexecution" uuid;--> statement-breakpoint
ALTER TABLE "devices" ADD COLUMN "last_seen" timestamp with time zone;--> statement-breakpoint
ALTER TABLE "devices" ADD COLUMN "last_debug_info" jsonb;--> statement-breakpoint
ALTER TABLE "files" ADD COLUMN "size" bigint;--> statement-breakpoint
ALTER TABLE "m2m_api_keys" ADD CONSTRAINT "m2m_api_keys_tenant_id_tenants_id_fk" FOREIGN KEY ("tenant_id") REFERENCES "public"."tenants"("id") ON DELETE cascade ON UPDATE cascade;--> statement-breakpoint
ALTER TABLE "m2m_api_keys" ADD CONSTRAINT "m2m_api_keys_user_id_auth_users_id_fk" FOREIGN KEY ("user_id") REFERENCES "public"."auth_users"("id") ON DELETE cascade ON UPDATE cascade;--> statement-breakpoint
ALTER TABLE "m2m_api_keys" ADD CONSTRAINT "m2m_api_keys_created_by_auth_users_id_fk" FOREIGN KEY ("created_by") REFERENCES "public"."auth_users"("id") ON DELETE set null ON UPDATE cascade;--> statement-breakpoint
ALTER TABLE "staff_time_events" ADD CONSTRAINT "staff_time_events_tenant_id_tenants_id_fk" FOREIGN KEY ("tenant_id") REFERENCES "public"."tenants"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
ALTER TABLE "staff_time_events" ADD CONSTRAINT "staff_time_events_user_id_auth_users_id_fk" FOREIGN KEY ("user_id") REFERENCES "public"."auth_users"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
ALTER TABLE "staff_time_events" ADD CONSTRAINT "staff_time_events_actor_user_id_auth_users_id_fk" FOREIGN KEY ("actor_user_id") REFERENCES "public"."auth_users"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
ALTER TABLE "staff_time_events" ADD CONSTRAINT "staff_time_events_invalidates_event_id_staff_time_events_id_fk" FOREIGN KEY ("invalidates_event_id") REFERENCES "public"."staff_time_events"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
ALTER TABLE "staff_time_events" ADD CONSTRAINT "staff_time_events_related_event_id_staff_time_events_id_fk" FOREIGN KEY ("related_event_id") REFERENCES "public"."staff_time_events"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
ALTER TABLE "serialtypes" ADD CONSTRAINT "serialtypes_tenant_tenants_id_fk" FOREIGN KEY ("tenant") REFERENCES "public"."tenants"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
ALTER TABLE "serialtypes" ADD CONSTRAINT "serialtypes_updated_by_auth_users_id_fk" FOREIGN KEY ("updated_by") REFERENCES "public"."auth_users"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
ALTER TABLE "serial_executions" ADD CONSTRAINT "serial_executions_tenant_tenants_id_fk" FOREIGN KEY ("tenant") REFERENCES "public"."tenants"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
ALTER TABLE "public_links" ADD CONSTRAINT "public_links_tenant_tenants_id_fk" FOREIGN KEY ("tenant") REFERENCES "public"."tenants"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
ALTER TABLE "public_links" ADD CONSTRAINT "public_links_default_profile_auth_profiles_id_fk" FOREIGN KEY ("default_profile") REFERENCES "public"."auth_profiles"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
ALTER TABLE "wiki_pages" ADD CONSTRAINT "wiki_pages_tenant_id_tenants_id_fk" FOREIGN KEY ("tenant_id") REFERENCES "public"."tenants"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
ALTER TABLE "wiki_pages" ADD CONSTRAINT "wiki_pages_parent_id_wiki_pages_id_fk" FOREIGN KEY ("parent_id") REFERENCES "public"."wiki_pages"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
ALTER TABLE "wiki_pages" ADD CONSTRAINT "wiki_pages_created_by_auth_users_id_fk" FOREIGN KEY ("created_by") REFERENCES "public"."auth_users"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
ALTER TABLE "wiki_pages" ADD CONSTRAINT "wiki_pages_updated_by_auth_users_id_fk" FOREIGN KEY ("updated_by") REFERENCES "public"."auth_users"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
CREATE INDEX "idx_time_events_tenant_user_time" ON "staff_time_events" USING btree ("tenant_id","user_id","event_time");--> statement-breakpoint
CREATE INDEX "idx_time_events_created_at" ON "staff_time_events" USING btree ("created_at");--> statement-breakpoint
CREATE INDEX "idx_time_events_invalidates" ON "staff_time_events" USING btree ("invalidates_event_id");--> statement-breakpoint
CREATE INDEX "wiki_pages_tenant_idx" ON "wiki_pages" USING btree ("tenant_id");--> statement-breakpoint
CREATE INDEX "wiki_pages_parent_idx" ON "wiki_pages" USING btree ("parent_id");--> statement-breakpoint
CREATE INDEX "wiki_pages_entity_int_idx" ON "wiki_pages" USING btree ("tenant_id","entity_type","entity_id");--> statement-breakpoint
CREATE INDEX "wiki_pages_entity_uuid_idx" ON "wiki_pages" USING btree ("tenant_id","entity_type","entity_uuid");--> statement-breakpoint
ALTER TABLE "createddocuments" ADD CONSTRAINT "createddocuments_serialexecution_serial_executions_id_fk" FOREIGN KEY ("serialexecution") REFERENCES "public"."serial_executions"("id") ON DELETE no action ON UPDATE no action;

View File

@@ -1 +0,0 @@
ALTER TABLE "services" ADD COLUMN "priceUpdateLocked" boolean DEFAULT false NOT NULL;

View File

@@ -1 +0,0 @@
ALTER TABLE "customers" ADD COLUMN "customTaxType" text;

View File

@@ -1,16 +0,0 @@
CREATE TABLE "contracttypes" (
"id" bigint PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY (sequence name "contracttypes_id_seq" INCREMENT BY 1 MINVALUE 1 MAXVALUE 9223372036854775807 START WITH 1 CACHE 1),
"created_at" timestamp with time zone DEFAULT now() NOT NULL,
"tenant" bigint NOT NULL,
"name" text NOT NULL,
"description" text,
"paymentType" text,
"recurring" boolean DEFAULT false NOT NULL,
"archived" boolean DEFAULT false NOT NULL,
"updated_at" timestamp with time zone,
"updated_by" uuid
);
--> statement-breakpoint
ALTER TABLE "contracttypes" ADD CONSTRAINT "contracttypes_tenant_tenants_id_fk" FOREIGN KEY ("tenant") REFERENCES "public"."tenants"("id") ON DELETE no action ON UPDATE no action;
--> statement-breakpoint
ALTER TABLE "contracttypes" ADD CONSTRAINT "contracttypes_updated_by_auth_users_id_fk" FOREIGN KEY ("updated_by") REFERENCES "public"."auth_users"("id") ON DELETE no action ON UPDATE no action;

View File

@@ -1,3 +0,0 @@
ALTER TABLE "contracts" ADD COLUMN "contracttype" bigint;
--> statement-breakpoint
ALTER TABLE "contracts" ADD CONSTRAINT "contracts_contracttype_contracttypes_id_fk" FOREIGN KEY ("contracttype") REFERENCES "public"."contracttypes"("id") ON DELETE no action ON UPDATE no action;

View File

@@ -1,3 +0,0 @@
ALTER TABLE "contracttypes" ADD COLUMN "billingInterval" text;
--> statement-breakpoint
ALTER TABLE "contracts" ADD COLUMN "billingInterval" text;

View File

@@ -1,16 +0,0 @@
CREATE TABLE "entitybankaccounts" (
"id" bigint PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY (sequence name "entitybankaccounts_id_seq" INCREMENT BY 1 MINVALUE 1 MAXVALUE 9223372036854775807 START WITH 1 CACHE 1),
"created_at" timestamp with time zone DEFAULT now() NOT NULL,
"tenant" bigint NOT NULL,
"iban_encrypted" jsonb NOT NULL,
"bic_encrypted" jsonb NOT NULL,
"bank_name_encrypted" jsonb NOT NULL,
"description" text,
"updated_at" timestamp with time zone,
"updated_by" uuid,
"archived" boolean DEFAULT false NOT NULL
);
--> statement-breakpoint
ALTER TABLE "entitybankaccounts" ADD CONSTRAINT "entitybankaccounts_tenant_tenants_id_fk" FOREIGN KEY ("tenant") REFERENCES "public"."tenants"("id") ON DELETE no action ON UPDATE no action;
--> statement-breakpoint
ALTER TABLE "entitybankaccounts" ADD CONSTRAINT "entitybankaccounts_updated_by_auth_users_id_fk" FOREIGN KEY ("updated_by") REFERENCES "public"."auth_users"("id") ON DELETE no action ON UPDATE no action;

View File

@@ -1,73 +0,0 @@
CREATE TABLE "customerspaces" (
"id" bigint PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY (sequence name "customerspaces_id_seq" INCREMENT BY 1 MINVALUE 1 MAXVALUE 9223372036854775807 START WITH 1 CACHE 1),
"created_at" timestamp with time zone DEFAULT now() NOT NULL,
"name" text NOT NULL,
"type" text NOT NULL,
"tenant" bigint NOT NULL,
"customer" bigint NOT NULL,
"spaceNumber" text NOT NULL,
"parentSpace" bigint,
"infoData" jsonb DEFAULT '{"zip":"","city":"","streetNumber":""}'::jsonb NOT NULL,
"description" text,
"archived" boolean DEFAULT false NOT NULL,
"updated_at" timestamp with time zone,
"updated_by" uuid
);
--> statement-breakpoint
CREATE TABLE "customerinventoryitems" (
"id" bigint PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY (sequence name "customerinventoryitems_id_seq" INCREMENT BY 1 MINVALUE 1 MAXVALUE 9223372036854775807 START WITH 1 CACHE 1),
"created_at" timestamp with time zone DEFAULT now() NOT NULL,
"name" text NOT NULL,
"description" text,
"tenant" bigint NOT NULL,
"customer" bigint NOT NULL,
"customerspace" bigint,
"customerInventoryId" text NOT NULL,
"serialNumber" text,
"quantity" bigint DEFAULT 0 NOT NULL,
"manufacturer" text,
"manufacturerNumber" text,
"purchaseDate" date,
"purchasePrice" double precision DEFAULT 0,
"currentValue" double precision,
"product" bigint,
"archived" boolean DEFAULT false NOT NULL,
"updated_at" timestamp with time zone,
"updated_by" uuid
);
--> statement-breakpoint
ALTER TABLE "customerspaces" ADD CONSTRAINT "customerspaces_tenant_tenants_id_fk" FOREIGN KEY ("tenant") REFERENCES "public"."tenants"("id") ON DELETE no action ON UPDATE no action;
--> statement-breakpoint
ALTER TABLE "customerspaces" ADD CONSTRAINT "customerspaces_customer_customers_id_fk" FOREIGN KEY ("customer") REFERENCES "public"."customers"("id") ON DELETE no action ON UPDATE no action;
--> statement-breakpoint
ALTER TABLE "customerspaces" ADD CONSTRAINT "customerspaces_parentSpace_customerspaces_id_fk" FOREIGN KEY ("parentSpace") REFERENCES "public"."customerspaces"("id") ON DELETE no action ON UPDATE no action;
--> statement-breakpoint
ALTER TABLE "customerspaces" ADD CONSTRAINT "customerspaces_updated_by_auth_users_id_fk" FOREIGN KEY ("updated_by") REFERENCES "public"."auth_users"("id") ON DELETE no action ON UPDATE no action;
--> statement-breakpoint
ALTER TABLE "customerinventoryitems" ADD CONSTRAINT "customerinventoryitems_tenant_tenants_id_fk" FOREIGN KEY ("tenant") REFERENCES "public"."tenants"("id") ON DELETE no action ON UPDATE no action;
--> statement-breakpoint
ALTER TABLE "customerinventoryitems" ADD CONSTRAINT "customerinventoryitems_customer_customers_id_fk" FOREIGN KEY ("customer") REFERENCES "public"."customers"("id") ON DELETE no action ON UPDATE no action;
--> statement-breakpoint
ALTER TABLE "customerinventoryitems" ADD CONSTRAINT "customerinventoryitems_customerspace_customerspaces_id_fk" FOREIGN KEY ("customerspace") REFERENCES "public"."customerspaces"("id") ON DELETE no action ON UPDATE no action;
--> statement-breakpoint
ALTER TABLE "customerinventoryitems" ADD CONSTRAINT "customerinventoryitems_product_products_id_fk" FOREIGN KEY ("product") REFERENCES "public"."products"("id") ON DELETE no action ON UPDATE no action;
--> statement-breakpoint
ALTER TABLE "customerinventoryitems" ADD CONSTRAINT "customerinventoryitems_updated_by_auth_users_id_fk" FOREIGN KEY ("updated_by") REFERENCES "public"."auth_users"("id") ON DELETE no action ON UPDATE no action;
--> statement-breakpoint
CREATE UNIQUE INDEX "customerinventoryitems_tenant_customerInventoryId_idx" ON "customerinventoryitems" USING btree ("tenant","customerInventoryId");
--> statement-breakpoint
ALTER TABLE "historyitems" ADD COLUMN "customerspace" bigint;
--> statement-breakpoint
ALTER TABLE "historyitems" ADD COLUMN "customerinventoryitem" bigint;
--> statement-breakpoint
ALTER TABLE "historyitems" ADD CONSTRAINT "historyitems_customerspace_customerspaces_id_fk" FOREIGN KEY ("customerspace") REFERENCES "public"."customerspaces"("id") ON DELETE no action ON UPDATE no action;
--> statement-breakpoint
ALTER TABLE "historyitems" ADD CONSTRAINT "historyitems_customerinventoryitem_customerinventoryitems_id_fk" FOREIGN KEY ("customerinventoryitem") REFERENCES "public"."customerinventoryitems"("id") ON DELETE no action ON UPDATE no action;
--> statement-breakpoint
ALTER TABLE "tenants" ALTER COLUMN "numberRanges" SET DEFAULT '{"vendors":{"prefix":"","suffix":"","nextNumber":10000},"customers":{"prefix":"","suffix":"","nextNumber":10000},"products":{"prefix":"AT-","suffix":"","nextNumber":1000},"quotes":{"prefix":"AN-","suffix":"","nextNumber":1000},"confirmationOrders":{"prefix":"AB-","suffix":"","nextNumber":1000},"invoices":{"prefix":"RE-","suffix":"","nextNumber":1000},"spaces":{"prefix":"LP-","suffix":"","nextNumber":1000},"customerspaces":{"prefix":"KLP-","suffix":"","nextNumber":1000},"inventoryitems":{"prefix":"IA-","suffix":"","nextNumber":1000},"customerinventoryitems":{"prefix":"KIA-","suffix":"","nextNumber":1000},"projects":{"prefix":"PRJ-","suffix":"","nextNumber":1000},"costcentres":{"prefix":"KST-","suffix":"","nextNumber":1000}}'::jsonb;
--> statement-breakpoint
UPDATE "tenants"
SET "numberRanges" = COALESCE("numberRanges", '{}'::jsonb) || jsonb_build_object(
'customerspaces', COALESCE("numberRanges"->'customerspaces', '{"prefix":"KLP-","suffix":"","nextNumber":1000}'::jsonb),
'customerinventoryitems', COALESCE("numberRanges"->'customerinventoryitems', '{"prefix":"KIA-","suffix":"","nextNumber":1000}'::jsonb)
);

View File

@@ -1,3 +0,0 @@
ALTER TABLE "customerinventoryitems" ADD COLUMN "vendor" bigint;
--> statement-breakpoint
ALTER TABLE "customerinventoryitems" ADD CONSTRAINT "customerinventoryitems_vendor_vendors_id_fk" FOREIGN KEY ("vendor") REFERENCES "public"."vendors"("id") ON DELETE no action ON UPDATE no action;

View File

@@ -1,20 +0,0 @@
CREATE TABLE "memberrelations" (
"id" bigint PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY (sequence name "memberrelations_id_seq" INCREMENT BY 1 MINVALUE 1 MAXVALUE 9223372036854775807 START WITH 1 CACHE 1),
"created_at" timestamp with time zone DEFAULT now() NOT NULL,
"tenant" bigint NOT NULL,
"type" text NOT NULL,
"billingInterval" text NOT NULL,
"billingAmount" double precision DEFAULT 0 NOT NULL,
"archived" boolean DEFAULT false NOT NULL,
"updated_at" timestamp with time zone,
"updated_by" uuid
);
--> statement-breakpoint
ALTER TABLE "customers" ADD COLUMN "memberrelation" bigint;
--> statement-breakpoint
ALTER TABLE "memberrelations" ADD CONSTRAINT "memberrelations_tenant_tenants_id_fk" FOREIGN KEY ("tenant") REFERENCES "public"."tenants"("id") ON DELETE no action ON UPDATE no action;
--> statement-breakpoint
ALTER TABLE "memberrelations" ADD CONSTRAINT "memberrelations_updated_by_auth_users_id_fk" FOREIGN KEY ("updated_by") REFERENCES "public"."auth_users"("id") ON DELETE no action ON UPDATE no action;
--> statement-breakpoint
ALTER TABLE "customers" ADD CONSTRAINT "customers_memberrelation_memberrelations_id_fk" FOREIGN KEY ("memberrelation") REFERENCES "public"."memberrelations"("id") ON DELETE no action ON UPDATE no action;

View File

@@ -1,4 +0,0 @@
ALTER TABLE "historyitems" ADD COLUMN "memberrelation" bigint;
--> statement-breakpoint
ALTER TABLE "historyitems" ADD CONSTRAINT "historyitems_memberrelation_memberrelations_id_fk" FOREIGN KEY ("memberrelation") REFERENCES "public"."memberrelations"("id") ON DELETE no action ON UPDATE no action;

View File

@@ -1,33 +0,0 @@
ALTER TABLE "customers" ADD COLUMN IF NOT EXISTS "memberrelation" bigint;
DO $$
BEGIN
IF NOT EXISTS (
SELECT 1
FROM pg_constraint
WHERE conname = 'customers_memberrelation_memberrelations_id_fk'
) THEN
ALTER TABLE "customers"
ADD CONSTRAINT "customers_memberrelation_memberrelations_id_fk"
FOREIGN KEY ("memberrelation")
REFERENCES "public"."memberrelations"("id")
ON DELETE no action
ON UPDATE no action;
END IF;
END $$;
UPDATE "customers"
SET "memberrelation" = ("infoData"->>'memberrelation')::bigint
WHERE
"memberrelation" IS NULL
AND "type" = 'Mitglied'
AND jsonb_typeof(COALESCE("infoData", '{}'::jsonb)) = 'object'
AND COALESCE("infoData", '{}'::jsonb) ? 'memberrelation'
AND ("infoData"->>'memberrelation') ~ '^[0-9]+$';
UPDATE "customers"
SET "infoData" = COALESCE("infoData", '{}'::jsonb) - 'memberrelation'
WHERE
"type" = 'Mitglied'
AND jsonb_typeof(COALESCE("infoData", '{}'::jsonb)) = 'object'
AND COALESCE("infoData", '{}'::jsonb) ? 'memberrelation';

View File

@@ -1,108 +0,0 @@
CREATE TABLE "contracttypes" (
"id" bigint PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY (sequence name "contracttypes_id_seq" INCREMENT BY 1 MINVALUE 1 MAXVALUE 9223372036854775807 START WITH 1 CACHE 1),
"created_at" timestamp with time zone DEFAULT now() NOT NULL,
"tenant" bigint NOT NULL,
"name" text NOT NULL,
"description" text,
"paymentType" text,
"recurring" boolean DEFAULT false NOT NULL,
"billingInterval" text,
"archived" boolean DEFAULT false NOT NULL,
"updated_at" timestamp with time zone,
"updated_by" uuid
);
--> statement-breakpoint
CREATE TABLE "customerinventoryitems" (
"id" bigint PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY (sequence name "customerinventoryitems_id_seq" INCREMENT BY 1 MINVALUE 1 MAXVALUE 9223372036854775807 START WITH 1 CACHE 1),
"created_at" timestamp with time zone DEFAULT now() NOT NULL,
"name" text NOT NULL,
"description" text,
"tenant" bigint NOT NULL,
"customer" bigint NOT NULL,
"customerspace" bigint,
"customerInventoryId" text NOT NULL,
"serialNumber" text,
"quantity" bigint DEFAULT 0 NOT NULL,
"manufacturer" text,
"manufacturerNumber" text,
"purchaseDate" date,
"purchasePrice" double precision DEFAULT 0,
"currentValue" double precision,
"product" bigint,
"vendor" bigint,
"archived" boolean DEFAULT false NOT NULL,
"updated_at" timestamp with time zone,
"updated_by" uuid
);
--> statement-breakpoint
CREATE TABLE "customerspaces" (
"id" bigint PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY (sequence name "customerspaces_id_seq" INCREMENT BY 1 MINVALUE 1 MAXVALUE 9223372036854775807 START WITH 1 CACHE 1),
"created_at" timestamp with time zone DEFAULT now() NOT NULL,
"name" text NOT NULL,
"type" text NOT NULL,
"tenant" bigint NOT NULL,
"customer" bigint NOT NULL,
"spaceNumber" text NOT NULL,
"parentSpace" bigint,
"infoData" jsonb DEFAULT '{"zip":"","city":"","streetNumber":""}'::jsonb NOT NULL,
"description" text,
"archived" boolean DEFAULT false NOT NULL,
"updated_at" timestamp with time zone,
"updated_by" uuid
);
--> statement-breakpoint
CREATE TABLE "entitybankaccounts" (
"id" bigint PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY (sequence name "entitybankaccounts_id_seq" INCREMENT BY 1 MINVALUE 1 MAXVALUE 9223372036854775807 START WITH 1 CACHE 1),
"created_at" timestamp with time zone DEFAULT now() NOT NULL,
"tenant" bigint NOT NULL,
"iban_encrypted" jsonb NOT NULL,
"bic_encrypted" jsonb NOT NULL,
"bank_name_encrypted" jsonb NOT NULL,
"description" text,
"updated_at" timestamp with time zone,
"updated_by" uuid,
"archived" boolean DEFAULT false NOT NULL
);
--> statement-breakpoint
CREATE TABLE "memberrelations" (
"id" bigint PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY (sequence name "memberrelations_id_seq" INCREMENT BY 1 MINVALUE 1 MAXVALUE 9223372036854775807 START WITH 1 CACHE 1),
"created_at" timestamp with time zone DEFAULT now() NOT NULL,
"tenant" bigint NOT NULL,
"type" text NOT NULL,
"billingInterval" text NOT NULL,
"billingAmount" double precision DEFAULT 0 NOT NULL,
"archived" boolean DEFAULT false NOT NULL,
"updated_at" timestamp with time zone,
"updated_by" uuid
);
--> statement-breakpoint
ALTER TABLE "tenants" ALTER COLUMN "numberRanges" SET DEFAULT '{"vendors":{"prefix":"","suffix":"","nextNumber":10000},"customers":{"prefix":"","suffix":"","nextNumber":10000},"products":{"prefix":"AT-","suffix":"","nextNumber":1000},"quotes":{"prefix":"AN-","suffix":"","nextNumber":1000},"confirmationOrders":{"prefix":"AB-","suffix":"","nextNumber":1000},"invoices":{"prefix":"RE-","suffix":"","nextNumber":1000},"spaces":{"prefix":"LP-","suffix":"","nextNumber":1000},"customerspaces":{"prefix":"KLP-","suffix":"","nextNumber":1000},"inventoryitems":{"prefix":"IA-","suffix":"","nextNumber":1000},"customerinventoryitems":{"prefix":"KIA-","suffix":"","nextNumber":1000},"projects":{"prefix":"PRJ-","suffix":"","nextNumber":1000},"costcentres":{"prefix":"KST-","suffix":"","nextNumber":1000}}'::jsonb;--> statement-breakpoint
ALTER TABLE "contracts" ADD COLUMN "contracttype" bigint;--> statement-breakpoint
ALTER TABLE "contracts" ADD COLUMN "billingInterval" text;--> statement-breakpoint
ALTER TABLE "customers" ADD COLUMN "customTaxType" text;--> statement-breakpoint
ALTER TABLE "customers" ADD COLUMN "memberrelation" bigint;--> statement-breakpoint
ALTER TABLE "historyitems" ADD COLUMN "customerspace" bigint;--> statement-breakpoint
ALTER TABLE "historyitems" ADD COLUMN "customerinventoryitem" bigint;--> statement-breakpoint
ALTER TABLE "historyitems" ADD COLUMN "memberrelation" bigint;--> statement-breakpoint
ALTER TABLE "services" ADD COLUMN "priceUpdateLocked" boolean DEFAULT false NOT NULL;--> statement-breakpoint
ALTER TABLE "contracttypes" ADD CONSTRAINT "contracttypes_tenant_tenants_id_fk" FOREIGN KEY ("tenant") REFERENCES "public"."tenants"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
ALTER TABLE "contracttypes" ADD CONSTRAINT "contracttypes_updated_by_auth_users_id_fk" FOREIGN KEY ("updated_by") REFERENCES "public"."auth_users"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
ALTER TABLE "customerinventoryitems" ADD CONSTRAINT "customerinventoryitems_tenant_tenants_id_fk" FOREIGN KEY ("tenant") REFERENCES "public"."tenants"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
ALTER TABLE "customerinventoryitems" ADD CONSTRAINT "customerinventoryitems_customer_customers_id_fk" FOREIGN KEY ("customer") REFERENCES "public"."customers"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
ALTER TABLE "customerinventoryitems" ADD CONSTRAINT "customerinventoryitems_customerspace_customerspaces_id_fk" FOREIGN KEY ("customerspace") REFERENCES "public"."customerspaces"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
ALTER TABLE "customerinventoryitems" ADD CONSTRAINT "customerinventoryitems_product_products_id_fk" FOREIGN KEY ("product") REFERENCES "public"."products"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
ALTER TABLE "customerinventoryitems" ADD CONSTRAINT "customerinventoryitems_vendor_vendors_id_fk" FOREIGN KEY ("vendor") REFERENCES "public"."vendors"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
ALTER TABLE "customerinventoryitems" ADD CONSTRAINT "customerinventoryitems_updated_by_auth_users_id_fk" FOREIGN KEY ("updated_by") REFERENCES "public"."auth_users"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
ALTER TABLE "customerspaces" ADD CONSTRAINT "customerspaces_tenant_tenants_id_fk" FOREIGN KEY ("tenant") REFERENCES "public"."tenants"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
ALTER TABLE "customerspaces" ADD CONSTRAINT "customerspaces_customer_customers_id_fk" FOREIGN KEY ("customer") REFERENCES "public"."customers"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
ALTER TABLE "customerspaces" ADD CONSTRAINT "customerspaces_parentSpace_customerspaces_id_fk" FOREIGN KEY ("parentSpace") REFERENCES "public"."customerspaces"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
ALTER TABLE "customerspaces" ADD CONSTRAINT "customerspaces_updated_by_auth_users_id_fk" FOREIGN KEY ("updated_by") REFERENCES "public"."auth_users"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
ALTER TABLE "entitybankaccounts" ADD CONSTRAINT "entitybankaccounts_tenant_tenants_id_fk" FOREIGN KEY ("tenant") REFERENCES "public"."tenants"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
ALTER TABLE "entitybankaccounts" ADD CONSTRAINT "entitybankaccounts_updated_by_auth_users_id_fk" FOREIGN KEY ("updated_by") REFERENCES "public"."auth_users"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
ALTER TABLE "memberrelations" ADD CONSTRAINT "memberrelations_tenant_tenants_id_fk" FOREIGN KEY ("tenant") REFERENCES "public"."tenants"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
ALTER TABLE "memberrelations" ADD CONSTRAINT "memberrelations_updated_by_auth_users_id_fk" FOREIGN KEY ("updated_by") REFERENCES "public"."auth_users"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
ALTER TABLE "contracts" ADD CONSTRAINT "contracts_contracttype_contracttypes_id_fk" FOREIGN KEY ("contracttype") REFERENCES "public"."contracttypes"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
ALTER TABLE "customers" ADD CONSTRAINT "customers_memberrelation_memberrelations_id_fk" FOREIGN KEY ("memberrelation") REFERENCES "public"."memberrelations"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
ALTER TABLE "historyitems" ADD CONSTRAINT "historyitems_customerspace_customerspaces_id_fk" FOREIGN KEY ("customerspace") REFERENCES "public"."customerspaces"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
ALTER TABLE "historyitems" ADD CONSTRAINT "historyitems_customerinventoryitem_customerinventoryitems_id_fk" FOREIGN KEY ("customerinventoryitem") REFERENCES "public"."customerinventoryitems"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
ALTER TABLE "historyitems" ADD CONSTRAINT "historyitems_memberrelation_memberrelations_id_fk" FOREIGN KEY ("memberrelation") REFERENCES "public"."memberrelations"("id") ON DELETE no action ON UPDATE no action;

View File

@@ -1,3 +0,0 @@
ALTER TABLE "accounts" ADD COLUMN "accountChart" text DEFAULT 'skr03' NOT NULL;
--> statement-breakpoint
ALTER TABLE "tenants" ADD COLUMN "accountChart" text DEFAULT 'skr03' NOT NULL;

View File

@@ -1,3 +0,0 @@
ALTER TABLE "createddocuments"
ALTER COLUMN "customSurchargePercentage" TYPE double precision
USING "customSurchargePercentage"::double precision;

View File

@@ -36,104 +36,6 @@
"when": 1765716877146, "when": 1765716877146,
"tag": "0004_stormy_onslaught", "tag": "0004_stormy_onslaught",
"breakpoints": true "breakpoints": true
},
{
"idx": 5,
"version": "7",
"when": 1771096926109,
"tag": "0005_green_shinobi_shaw",
"breakpoints": true
},
{
"idx": 6,
"version": "7",
"when": 1772000000000,
"tag": "0006_nifty_price_lock",
"breakpoints": true
},
{
"idx": 7,
"version": "7",
"when": 1772000100000,
"tag": "0007_bright_default_tax_type",
"breakpoints": true
},
{
"idx": 8,
"version": "7",
"when": 1773000000000,
"tag": "0008_quick_contracttypes",
"breakpoints": true
},
{
"idx": 9,
"version": "7",
"when": 1773000100000,
"tag": "0009_heavy_contract_contracttype",
"breakpoints": true
},
{
"idx": 10,
"version": "7",
"when": 1773000200000,
"tag": "0010_sudden_billing_interval",
"breakpoints": true
},
{
"idx": 11,
"version": "7",
"when": 1773000300000,
"tag": "0011_mighty_member_bankaccounts",
"breakpoints": true
},
{
"idx": 12,
"version": "7",
"when": 1773000400000,
"tag": "0012_shiny_customer_inventory",
"breakpoints": true
},
{
"idx": 13,
"version": "7",
"when": 1773000500000,
"tag": "0013_brisk_customer_inventory_vendor",
"breakpoints": true
},
{
"idx": 14,
"version": "7",
"when": 1773000600000,
"tag": "0014_smart_memberrelations",
"breakpoints": true
},
{
"idx": 15,
"version": "7",
"when": 1773000700000,
"tag": "0015_wise_memberrelation_history",
"breakpoints": true
},
{
"idx": 16,
"version": "7",
"when": 1773000800000,
"tag": "0016_fix_memberrelation_column_usage",
"breakpoints": true
},
{
"idx": 17,
"version": "7",
"when": 1771704862789,
"tag": "0017_slow_the_hood",
"breakpoints": true
},
{
"idx": 18,
"version": "7",
"when": 1773000900000,
"tag": "0018_account_chart",
"breakpoints": true
} }
] ]
} }

View File

@@ -16,7 +16,6 @@ export const accounts = pgTable("accounts", {
number: text("number").notNull(), number: text("number").notNull(),
label: text("label").notNull(), label: text("label").notNull(),
accountChart: text("accountChart").notNull().default("skr03"),
description: text("description"), description: text("description"),
}) })

View File

@@ -11,7 +11,6 @@ import {
import { tenants } from "./tenants" import { tenants } from "./tenants"
import { customers } from "./customers" import { customers } from "./customers"
import { contacts } from "./contacts" import { contacts } from "./contacts"
import { contracttypes } from "./contracttypes"
import { authUsers } from "./auth_users" import { authUsers } from "./auth_users"
export const contracts = pgTable( export const contracts = pgTable(
@@ -49,9 +48,6 @@ export const contracts = pgTable(
contact: bigint("contact", { mode: "number" }).references( contact: bigint("contact", { mode: "number" }).references(
() => contacts.id () => contacts.id
), ),
contracttype: bigint("contracttype", { mode: "number" }).references(
() => contracttypes.id
),
bankingIban: text("bankingIban"), bankingIban: text("bankingIban"),
bankingBIC: text("bankingBIC"), bankingBIC: text("bankingBIC"),
@@ -61,7 +57,6 @@ export const contracts = pgTable(
sepaDate: timestamp("sepaDate", { withTimezone: true }), sepaDate: timestamp("sepaDate", { withTimezone: true }),
paymentType: text("paymentType"), paymentType: text("paymentType"),
billingInterval: text("billingInterval"),
invoiceDispatch: text("invoiceDispatch"), invoiceDispatch: text("invoiceDispatch"),
ownFields: jsonb("ownFields").notNull().default({}), ownFields: jsonb("ownFields").notNull().default({}),

View File

@@ -1,40 +0,0 @@
import {
pgTable,
bigint,
timestamp,
text,
boolean,
uuid,
} from "drizzle-orm/pg-core"
import { tenants } from "./tenants"
import { authUsers } from "./auth_users"
export const contracttypes = pgTable("contracttypes", {
id: bigint("id", { mode: "number" })
.primaryKey()
.generatedByDefaultAsIdentity(),
createdAt: timestamp("created_at", { withTimezone: true })
.notNull()
.defaultNow(),
tenant: bigint("tenant", { mode: "number" })
.notNull()
.references(() => tenants.id),
name: text("name").notNull(),
description: text("description"),
paymentType: text("paymentType"),
recurring: boolean("recurring").notNull().default(false),
billingInterval: text("billingInterval"),
archived: boolean("archived").notNull().default(false),
updatedAt: timestamp("updated_at", { withTimezone: true }),
updatedBy: uuid("updated_by").references(() => authUsers.id),
})
export type ContractType = typeof contracttypes.$inferSelect
export type NewContractType = typeof contracttypes.$inferInsert

View File

@@ -6,7 +6,6 @@ import {
jsonb, jsonb,
boolean, boolean,
smallint, smallint,
doublePrecision,
uuid, uuid,
} from "drizzle-orm/pg-core" } from "drizzle-orm/pg-core"
@@ -97,7 +96,7 @@ export const createddocuments = pgTable("createddocuments", {
taxType: text("taxType"), taxType: text("taxType"),
customSurchargePercentage: doublePrecision("customSurchargePercentage") customSurchargePercentage: smallint("customSurchargePercentage")
.notNull() .notNull()
.default(0), .default(0),

View File

@@ -1,66 +0,0 @@
import {
pgTable,
bigint,
timestamp,
text,
boolean,
doublePrecision,
uuid,
date,
} from "drizzle-orm/pg-core"
import { tenants } from "./tenants"
import { customers } from "./customers"
import { customerspaces } from "./customerspaces"
import { products } from "./products"
import { vendors } from "./vendors"
import { authUsers } from "./auth_users"
export const customerinventoryitems = pgTable("customerinventoryitems", {
id: bigint("id", { mode: "number" })
.primaryKey()
.generatedByDefaultAsIdentity(),
createdAt: timestamp("created_at", { withTimezone: true })
.notNull()
.defaultNow(),
name: text("name").notNull(),
description: text("description"),
tenant: bigint("tenant", { mode: "number" })
.notNull()
.references(() => tenants.id),
customer: bigint("customer", { mode: "number" })
.notNull()
.references(() => customers.id),
customerspace: bigint("customerspace", { mode: "number" }).references(
() => customerspaces.id
),
customerInventoryId: text("customerInventoryId").notNull(),
serialNumber: text("serialNumber"),
quantity: bigint("quantity", { mode: "number" }).notNull().default(0),
manufacturer: text("manufacturer"),
manufacturerNumber: text("manufacturerNumber"),
purchaseDate: date("purchaseDate"),
purchasePrice: doublePrecision("purchasePrice").default(0),
currentValue: doublePrecision("currentValue"),
product: bigint("product", { mode: "number" }).references(() => products.id),
vendor: bigint("vendor", { mode: "number" }).references(() => vendors.id),
archived: boolean("archived").notNull().default(false),
updatedAt: timestamp("updated_at", { withTimezone: true }),
updatedBy: uuid("updated_by").references(() => authUsers.id),
})
export type CustomerInventoryItem = typeof customerinventoryitems.$inferSelect
export type NewCustomerInventoryItem = typeof customerinventoryitems.$inferInsert

View File

@@ -10,7 +10,6 @@ import {
} from "drizzle-orm/pg-core" } from "drizzle-orm/pg-core"
import { tenants } from "./tenants" import { tenants } from "./tenants"
import { authUsers } from "./auth_users" import { authUsers } from "./auth_users"
import { memberrelations } from "./memberrelations"
export const customers = pgTable( export const customers = pgTable(
"customers", "customers",
@@ -63,8 +62,6 @@ export const customers = pgTable(
updatedBy: uuid("updated_by").references(() => authUsers.id), updatedBy: uuid("updated_by").references(() => authUsers.id),
customPaymentType: text("custom_payment_type"), // ENUM payment_types separat? customPaymentType: text("custom_payment_type"), // ENUM payment_types separat?
customTaxType: text("customTaxType"),
memberrelation: bigint("memberrelation", { mode: "number" }).references(() => memberrelations.id),
} }
) )

View File

@@ -1,54 +0,0 @@
import {
pgTable,
bigint,
timestamp,
text,
boolean,
jsonb,
uuid,
} from "drizzle-orm/pg-core"
import { tenants } from "./tenants"
import { customers } from "./customers"
import { authUsers } from "./auth_users"
export const customerspaces = pgTable("customerspaces", {
id: bigint("id", { mode: "number" })
.primaryKey()
.generatedByDefaultAsIdentity(),
createdAt: timestamp("created_at", { withTimezone: true })
.notNull()
.defaultNow(),
name: text("name").notNull(),
type: text("type").notNull(),
tenant: bigint("tenant", { mode: "number" })
.notNull()
.references(() => tenants.id),
customer: bigint("customer", { mode: "number" })
.notNull()
.references(() => customers.id),
space_number: text("spaceNumber").notNull(),
parentSpace: bigint("parentSpace", { mode: "number" }).references(
() => customerspaces.id
),
info_data: jsonb("infoData")
.notNull()
.default({ zip: "", city: "", streetNumber: "" }),
description: text("description"),
archived: boolean("archived").notNull().default(false),
updatedAt: timestamp("updated_at", { withTimezone: true }),
updatedBy: uuid("updated_by").references(() => authUsers.id),
})
export type CustomerSpace = typeof customerspaces.$inferSelect
export type NewCustomerSpace = typeof customerspaces.$inferInsert

View File

@@ -3,7 +3,7 @@ import {
uuid, uuid,
timestamp, timestamp,
text, text,
bigint, jsonb, bigint,
} from "drizzle-orm/pg-core" } from "drizzle-orm/pg-core"
import { tenants } from "./tenants" import { tenants } from "./tenants"
@@ -23,11 +23,6 @@ export const devices = pgTable("devices", {
password: text("password"), password: text("password"),
externalId: text("externalId"), externalId: text("externalId"),
lastSeen: timestamp("last_seen", { withTimezone: true }),
// Hier speichern wir den ganzen Payload (RSSI, Heap, IP, etc.)
lastDebugInfo: jsonb("last_debug_info"),
}) })
export type Device = typeof devices.$inferSelect export type Device = typeof devices.$inferSelect

View File

@@ -1,39 +0,0 @@
import {
pgTable,
bigint,
timestamp,
text,
boolean,
jsonb,
uuid,
} from "drizzle-orm/pg-core"
import { tenants } from "./tenants"
import { authUsers } from "./auth_users"
export const entitybankaccounts = pgTable("entitybankaccounts", {
id: bigint("id", { mode: "number" })
.primaryKey()
.generatedByDefaultAsIdentity(),
createdAt: timestamp("created_at", { withTimezone: true })
.notNull()
.defaultNow(),
tenant: bigint("tenant", { mode: "number" })
.notNull()
.references(() => tenants.id),
ibanEncrypted: jsonb("iban_encrypted").notNull(),
bicEncrypted: jsonb("bic_encrypted").notNull(),
bankNameEncrypted: jsonb("bank_name_encrypted").notNull(),
description: text("description"),
updatedAt: timestamp("updated_at", { withTimezone: true }),
updatedBy: uuid("updated_by").references(() => authUsers.id),
archived: boolean("archived").notNull().default(false),
})
export type EntityBankAccount = typeof entitybankaccounts.$inferSelect
export type NewEntityBankAccount = typeof entitybankaccounts.$inferInsert

View File

@@ -73,7 +73,6 @@ export const files = pgTable("files", {
createdBy: uuid("created_by").references(() => authUsers.id), createdBy: uuid("created_by").references(() => authUsers.id),
authProfile: uuid("auth_profile").references(() => authProfiles.id), authProfile: uuid("auth_profile").references(() => authProfiles.id),
size: bigint("size", { mode: "number" }),
}) })
export type File = typeof files.$inferSelect export type File = typeof files.$inferSelect

View File

@@ -20,8 +20,6 @@ import { tasks } from "./tasks"
import { vehicles } from "./vehicles" import { vehicles } from "./vehicles"
import { bankstatements } from "./bankstatements" import { bankstatements } from "./bankstatements"
import { spaces } from "./spaces" import { spaces } from "./spaces"
import { customerspaces } from "./customerspaces"
import { customerinventoryitems } from "./customerinventoryitems"
import { costcentres } from "./costcentres" import { costcentres } from "./costcentres"
import { ownaccounts } from "./ownaccounts" import { ownaccounts } from "./ownaccounts"
import { createddocuments } from "./createddocuments" import { createddocuments } from "./createddocuments"
@@ -34,7 +32,6 @@ import { events } from "./events"
import { inventoryitemgroups } from "./inventoryitemgroups" import { inventoryitemgroups } from "./inventoryitemgroups"
import { authUsers } from "./auth_users" import { authUsers } from "./auth_users"
import {files} from "./files"; import {files} from "./files";
import { memberrelations } from "./memberrelations";
export const historyitems = pgTable("historyitems", { export const historyitems = pgTable("historyitems", {
id: bigint("id", { mode: "number" }) id: bigint("id", { mode: "number" })
@@ -102,12 +99,6 @@ export const historyitems = pgTable("historyitems", {
space: bigint("space", { mode: "number" }).references(() => spaces.id), space: bigint("space", { mode: "number" }).references(() => spaces.id),
customerspace: bigint("customerspace", { mode: "number" }).references(() => customerspaces.id),
customerinventoryitem: bigint("customerinventoryitem", { mode: "number" }).references(() => customerinventoryitems.id),
memberrelation: bigint("memberrelation", { mode: "number" }).references(() => memberrelations.id),
config: jsonb("config"), config: jsonb("config"),
projecttype: bigint("projecttype", { mode: "number" }).references( projecttype: bigint("projecttype", { mode: "number" }).references(

View File

@@ -14,7 +14,7 @@ export const hourrates = pgTable("hourrates", {
name: text("name").notNull(), name: text("name").notNull(),
purchase_price: doublePrecision("purchasePrice").notNull(), purchasePrice: doublePrecision("purchasePrice").notNull(),
sellingPrice: doublePrecision("sellingPrice").notNull(), sellingPrice: doublePrecision("sellingPrice").notNull(),
archived: boolean("archived").notNull().default(false), archived: boolean("archived").notNull().default(false),

View File

@@ -13,19 +13,15 @@ export * from "./checks"
export * from "./citys" export * from "./citys"
export * from "./contacts" export * from "./contacts"
export * from "./contracts" export * from "./contracts"
export * from "./contracttypes"
export * from "./costcentres" export * from "./costcentres"
export * from "./countrys" export * from "./countrys"
export * from "./createddocuments" export * from "./createddocuments"
export * from "./createdletters" export * from "./createdletters"
export * from "./customers" export * from "./customers"
export * from "./customerspaces"
export * from "./customerinventoryitems"
export * from "./devices" export * from "./devices"
export * from "./documentboxes" export * from "./documentboxes"
export * from "./enums" export * from "./enums"
export * from "./events" export * from "./events"
export * from "./entitybankaccounts"
export * from "./files" export * from "./files"
export * from "./filetags" export * from "./filetags"
export * from "./folders" export * from "./folders"
@@ -46,9 +42,7 @@ export * from "./incominginvoices"
export * from "./inventoryitemgroups" export * from "./inventoryitemgroups"
export * from "./inventoryitems" export * from "./inventoryitems"
export * from "./letterheads" export * from "./letterheads"
export * from "./memberrelations"
export * from "./movements" export * from "./movements"
export * from "./m2m_api_keys"
export * from "./notifications_event_types" export * from "./notifications_event_types"
export * from "./notifications_items" export * from "./notifications_items"
export * from "./notifications_preferences" export * from "./notifications_preferences"
@@ -78,4 +72,3 @@ export * from "./staff_time_events"
export * from "./serialtypes" export * from "./serialtypes"
export * from "./serialexecutions" export * from "./serialexecutions"
export * from "./public_links" export * from "./public_links"
export * from "./wikipages"

View File

@@ -1,48 +0,0 @@
import {
pgTable,
uuid,
bigint,
text,
timestamp,
boolean,
} from "drizzle-orm/pg-core"
import { tenants } from "./tenants"
import { authUsers } from "./auth_users"
export const m2mApiKeys = pgTable("m2m_api_keys", {
id: uuid("id").primaryKey().defaultRandom(),
createdAt: timestamp("created_at", { withTimezone: true })
.notNull()
.defaultNow(),
updatedAt: timestamp("updated_at", { withTimezone: true })
.notNull()
.defaultNow(),
tenantId: bigint("tenant_id", { mode: "number" })
.notNull()
.references(() => tenants.id, { onDelete: "cascade", onUpdate: "cascade" }),
userId: uuid("user_id")
.notNull()
.references(() => authUsers.id, { onDelete: "cascade", onUpdate: "cascade" }),
createdBy: uuid("created_by").references(() => authUsers.id, {
onDelete: "set null",
onUpdate: "cascade",
}),
name: text("name").notNull(),
keyPrefix: text("key_prefix").notNull(),
keyHash: text("key_hash").notNull().unique(),
active: boolean("active").notNull().default(true),
lastUsedAt: timestamp("last_used_at", { withTimezone: true }),
expiresAt: timestamp("expires_at", { withTimezone: true }),
})
export type M2mApiKey = typeof m2mApiKeys.$inferSelect
export type NewM2mApiKey = typeof m2mApiKeys.$inferInsert

View File

@@ -1,39 +0,0 @@
import {
pgTable,
bigint,
timestamp,
text,
boolean,
uuid,
doublePrecision,
} from "drizzle-orm/pg-core"
import { tenants } from "./tenants"
import { authUsers } from "./auth_users"
export const memberrelations = pgTable("memberrelations", {
id: bigint("id", { mode: "number" })
.primaryKey()
.generatedByDefaultAsIdentity(),
createdAt: timestamp("created_at", { withTimezone: true })
.notNull()
.defaultNow(),
tenant: bigint("tenant", { mode: "number" })
.notNull()
.references(() => tenants.id),
type: text("type").notNull(),
billingInterval: text("billingInterval").notNull(),
billingAmount: doublePrecision("billingAmount").notNull().default(0),
archived: boolean("archived").notNull().default(false),
updatedAt: timestamp("updated_at", { withTimezone: true }),
updatedBy: uuid("updated_by").references(() => authUsers.id),
})
export type MemberRelation = typeof memberrelations.$inferSelect
export type NewMemberRelation = typeof memberrelations.$inferInsert

View File

@@ -54,7 +54,6 @@ export const services = pgTable("services", {
materialComposition: jsonb("materialComposition").notNull().default([]), materialComposition: jsonb("materialComposition").notNull().default([]),
personalComposition: jsonb("personalComposition").notNull().default([]), personalComposition: jsonb("personalComposition").notNull().default([]),
priceUpdateLocked: boolean("priceUpdateLocked").notNull().default(false),
updatedAt: timestamp("updated_at", { withTimezone: true }), updatedAt: timestamp("updated_at", { withTimezone: true }),
updatedBy: uuid("updated_by").references(() => authUsers.id), updatedBy: uuid("updated_by").references(() => authUsers.id),

View File

@@ -74,48 +74,6 @@ export const tenants = pgTable(
timeTracking: true, timeTracking: true,
planningBoard: true, planningBoard: true,
workingTimeTracking: true, workingTimeTracking: true,
dashboard: true,
historyitems: true,
tasks: true,
wiki: true,
files: true,
createdletters: true,
documentboxes: true,
helpdesk: true,
email: true,
members: true,
customers: true,
vendors: true,
contactsList: true,
staffTime: true,
createDocument: true,
serialInvoice: true,
incomingInvoices: true,
costcentres: true,
accounts: true,
ownaccounts: true,
banking: true,
spaces: true,
customerspaces: true,
customerinventoryitems: true,
inventoryitems: true,
inventoryitemgroups: true,
products: true,
productcategories: true,
services: true,
servicecategories: true,
memberrelations: true,
staffProfiles: true,
hourrates: true,
projecttypes: true,
contracttypes: true,
plants: true,
settingsNumberRanges: true,
settingsEmailAccounts: true,
settingsBanking: true,
settingsTexttemplates: true,
settingsTenant: true,
export: true,
}), }),
ownFields: jsonb("ownFields"), ownFields: jsonb("ownFields"),
@@ -130,13 +88,10 @@ export const tenants = pgTable(
confirmationOrders: { prefix: "AB-", suffix: "", nextNumber: 1000 }, confirmationOrders: { prefix: "AB-", suffix: "", nextNumber: 1000 },
invoices: { prefix: "RE-", suffix: "", nextNumber: 1000 }, invoices: { prefix: "RE-", suffix: "", nextNumber: 1000 },
spaces: { prefix: "LP-", suffix: "", nextNumber: 1000 }, spaces: { prefix: "LP-", suffix: "", nextNumber: 1000 },
customerspaces: { prefix: "KLP-", suffix: "", nextNumber: 1000 },
inventoryitems: { prefix: "IA-", suffix: "", nextNumber: 1000 }, inventoryitems: { prefix: "IA-", suffix: "", nextNumber: 1000 },
customerinventoryitems: { prefix: "KIA-", suffix: "", nextNumber: 1000 },
projects: { prefix: "PRJ-", suffix: "", nextNumber: 1000 }, projects: { prefix: "PRJ-", suffix: "", nextNumber: 1000 },
costcentres: { prefix: "KST-", suffix: "", nextNumber: 1000 }, costcentres: { prefix: "KST-", suffix: "", nextNumber: 1000 },
}), }),
accountChart: text("accountChart").notNull().default("skr03"),
standardEmailForInvoices: text("standardEmailForInvoices"), standardEmailForInvoices: text("standardEmailForInvoices"),

View File

@@ -1,99 +0,0 @@
import {
pgTable,
bigint,
text,
timestamp,
boolean,
jsonb,
integer,
index,
uuid,
AnyPgColumn
} from "drizzle-orm/pg-core"
import { relations } from "drizzle-orm"
import { tenants } from "./tenants"
import { authUsers } from "./auth_users"
export const wikiPages = pgTable(
"wiki_pages",
{
// ID des Wiki-Eintrags selbst (neu = UUID)
id: uuid("id")
.primaryKey()
.defaultRandom(),
tenantId: bigint("tenant_id", { mode: "number" })
.notNull()
.references(() => tenants.id, { onDelete: "cascade" }),
parentId: uuid("parent_id")
.references((): AnyPgColumn => wikiPages.id, { onDelete: "cascade" }),
title: text("title").notNull(),
content: jsonb("content"),
isFolder: boolean("is_folder").notNull().default(false),
sortOrder: integer("sort_order").notNull().default(0),
// --- POLYMORPHE BEZIEHUNG (Split) ---
// Art der Entität (z.B. 'customer', 'invoice', 'iot_device')
entityType: text("entity_type"),
// SPALTE 1: Für Legacy-Tabellen (BigInt)
// Nutzung: Wenn entityType='customer', wird hier die ID 1050 gespeichert
entityId: bigint("entity_id", { mode: "number" }),
// SPALTE 2: Für neue Tabellen (UUID)
// Nutzung: Wenn entityType='iot_device', wird hier die UUID gespeichert
entityUuid: uuid("entity_uuid"),
// ------------------------------------
createdAt: timestamp("created_at", { withTimezone: true })
.notNull()
.defaultNow(),
updatedAt: timestamp("updated_at", { withTimezone: true }),
createdBy: uuid("created_by").references(() => authUsers.id),
updatedBy: uuid("updated_by").references(() => authUsers.id),
},
(table) => ({
tenantIdx: index("wiki_pages_tenant_idx").on(table.tenantId),
parentIdx: index("wiki_pages_parent_idx").on(table.parentId),
// ZWEI separate Indexe für schnelle Lookups, je nachdem welche ID genutzt wird
// Fall 1: Suche nach Notizen für Kunde 1050
entityIntIdx: index("wiki_pages_entity_int_idx")
.on(table.tenantId, table.entityType, table.entityId),
// Fall 2: Suche nach Notizen für IoT-Device 550e84...
entityUuidIdx: index("wiki_pages_entity_uuid_idx")
.on(table.tenantId, table.entityType, table.entityUuid),
})
)
export const wikiPagesRelations = relations(wikiPages, ({ one, many }) => ({
tenant: one(tenants, {
fields: [wikiPages.tenantId],
references: [tenants.id],
}),
parent: one(wikiPages, {
fields: [wikiPages.parentId],
references: [wikiPages.id],
relationName: "parent_child",
}),
children: many(wikiPages, {
relationName: "parent_child",
}),
author: one(authUsers, {
fields: [wikiPages.createdBy],
references: [authUsers.id],
}),
}))
export type WikiPage = typeof wikiPages.$inferSelect
export type NewWikiPage = typeof wikiPages.$inferInsert

View File

@@ -6,6 +6,6 @@ export default defineConfig({
schema: "./db/schema", schema: "./db/schema",
out: "./db/migrations", out: "./db/migrations",
dbCredentials: { dbCredentials: {
url: secrets.DATABASE_URL || "postgres://postgres:wJw7aNpEBJdcxgoct6GXNpvY4Cn6ECqu@fedeo-db-001.vpn.internal:5432/fedeo", url: secrets.DATABASE_URL,
}, },
}) })

View File

@@ -5,14 +5,9 @@
"main": "index.js", "main": "index.js",
"scripts": { "scripts": {
"dev": "tsx watch src/index.ts", "dev": "tsx watch src/index.ts",
"fill": "ts-node src/webdav/fill-file-sizes.ts",
"dev:dav": "tsx watch src/webdav/server.ts",
"build": "tsc", "build": "tsc",
"start": "node dist/src/index.js", "start": "node dist/src/index.js",
"schema:index": "ts-node scripts/generate-schema-index.ts", "schema:index": "ts-node scripts/generate-schema-index.ts"
"bankcodes:update": "tsx scripts/generate-de-bank-codes.ts",
"members:import:csv": "tsx scripts/import-members-csv.ts",
"accounts:import:skr42": "ts-node scripts/import-skr42-accounts.ts"
}, },
"repository": { "repository": {
"type": "git", "type": "git",
@@ -32,6 +27,7 @@
"@infisical/sdk": "^4.0.6", "@infisical/sdk": "^4.0.6",
"@mmote/niimbluelib": "^0.0.1-alpha.29", "@mmote/niimbluelib": "^0.0.1-alpha.29",
"@prisma/client": "^6.15.0", "@prisma/client": "^6.15.0",
"@supabase/supabase-js": "^2.56.1",
"@zip.js/zip.js": "^2.7.73", "@zip.js/zip.js": "^2.7.73",
"archiver": "^7.0.1", "archiver": "^7.0.1",
"axios": "^1.12.1", "axios": "^1.12.1",
@@ -52,7 +48,6 @@
"pg": "^8.16.3", "pg": "^8.16.3",
"pngjs": "^7.0.0", "pngjs": "^7.0.0",
"sharp": "^0.34.5", "sharp": "^0.34.5",
"webdav-server": "^2.6.2",
"xmlbuilder": "^15.1.1", "xmlbuilder": "^15.1.1",
"zpl-image": "^0.2.0", "zpl-image": "^0.2.0",
"zpl-renderer-js": "^2.0.2" "zpl-renderer-js": "^2.0.2"

View File

@@ -1,95 +0,0 @@
import fs from "node:fs/promises"
import path from "node:path"
import https from "node:https"
const DEFAULT_SOURCE_URL =
"https://www.bundesbank.de/resource/blob/602632/bec25ca5df1eb62fefadd8325dafe67c/472B63F073F071307366337C94F8C870/blz-aktuell-txt-data.txt"
const OUTPUT_NAME_FILE = path.resolve("src/utils/deBankCodes.ts")
const OUTPUT_BIC_FILE = path.resolve("src/utils/deBankBics.ts")
function fetchBuffer(url: string): Promise<Buffer> {
return new Promise((resolve, reject) => {
https
.get(url, (res) => {
if (res.statusCode && res.statusCode >= 300 && res.statusCode < 400 && res.headers.location) {
return resolve(fetchBuffer(res.headers.location))
}
if (res.statusCode !== 200) {
return reject(new Error(`Download failed with status ${res.statusCode}`))
}
const chunks: Buffer[] = []
res.on("data", (chunk) => chunks.push(Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk)))
res.on("end", () => resolve(Buffer.concat(chunks)))
res.on("error", reject)
})
.on("error", reject)
})
}
function escapeTsString(value: string) {
return value.replace(/\\/g, "\\\\").replace(/"/g, '\\"')
}
async function main() {
const source = process.env.BLZ_SOURCE_URL || DEFAULT_SOURCE_URL
const sourceFile = process.env.BLZ_SOURCE_FILE
let raw: Buffer
if (sourceFile) {
console.log(`Reading BLZ source file: ${sourceFile}`)
raw = await fs.readFile(sourceFile)
} else {
console.log(`Downloading BLZ source: ${source}`)
raw = await fetchBuffer(source)
}
const content = raw.toString("latin1")
const lines = content.split(/\r?\n/)
const nameMap = new Map<string, string>()
const bicMap = new Map<string, string>()
for (const line of lines) {
if (!line || line.length < 150) continue
const blz = line.slice(0, 8).trim()
const name = line.slice(9, 67).trim()
const bic = line.slice(139, 150).trim()
if (!/^\d{8}$/.test(blz) || !name) continue
if (!nameMap.has(blz)) nameMap.set(blz, name)
if (bic && !bicMap.has(blz)) bicMap.set(blz, bic)
}
const sortedNames = [...nameMap.entries()].sort(([a], [b]) => a.localeCompare(b))
const sortedBics = [...bicMap.entries()].sort(([a], [b]) => a.localeCompare(b))
const nameOutputLines = [
"// Lokale Bankleitzahl-zu-Institut Zuordnung (DE).",
"// Quelle: Deutsche Bundesbank, BLZ-Datei (vollstaendig).",
"export const DE_BANK_CODE_TO_NAME: Record<string, string> = {",
...sortedNames.map(([blz, name]) => ` "${blz}": "${escapeTsString(name)}",`),
"}",
"",
]
const bicOutputLines = [
"// Lokale Bankleitzahl-zu-BIC Zuordnung (DE).",
"// Quelle: Deutsche Bundesbank, BLZ-Datei (vollstaendig).",
"export const DE_BANK_CODE_TO_BIC: Record<string, string> = {",
...sortedBics.map(([blz, bic]) => ` "${blz}": "${escapeTsString(bic)}",`),
"}",
"",
]
await fs.writeFile(OUTPUT_NAME_FILE, nameOutputLines.join("\n"), "utf8")
await fs.writeFile(OUTPUT_BIC_FILE, bicOutputLines.join("\n"), "utf8")
console.log(`Wrote ${sortedNames.length} bank names to ${OUTPUT_NAME_FILE}`)
console.log(`Wrote ${sortedBics.length} bank BICs to ${OUTPUT_BIC_FILE}`)
}
main().catch((err) => {
console.error(err)
process.exit(1)
})

View File

@@ -1,5 +1,6 @@
import Fastify from "fastify"; import Fastify from "fastify";
import swaggerPlugin from "./plugins/swagger" import swaggerPlugin from "./plugins/swagger"
import supabasePlugin from "./plugins/supabase";
import dayjsPlugin from "./plugins/dayjs"; import dayjsPlugin from "./plugins/dayjs";
import healthRoutes from "./routes/health"; import healthRoutes from "./routes/health";
import meRoutes from "./routes/auth/me"; import meRoutes from "./routes/auth/me";
@@ -28,7 +29,6 @@ import staffTimeRoutes from "./routes/staff/time";
import staffTimeConnectRoutes from "./routes/staff/timeconnects"; import staffTimeConnectRoutes from "./routes/staff/timeconnects";
import userRoutes from "./routes/auth/user"; import userRoutes from "./routes/auth/user";
import publiclinksAuthenticatedRoutes from "./routes/publiclinks/publiclinks-authenticated"; import publiclinksAuthenticatedRoutes from "./routes/publiclinks/publiclinks-authenticated";
import wikiRoutes from "./routes/wiki";
//Public Links //Public Links
import publiclinksNonAuthenticatedRoutes from "./routes/publiclinks/publiclinks-non-authenticated"; import publiclinksNonAuthenticatedRoutes from "./routes/publiclinks/publiclinks-non-authenticated";
@@ -42,11 +42,9 @@ import helpdeskInboundEmailRoutes from "./routes/helpdesk.inbound.email";
import deviceRoutes from "./routes/internal/devices"; import deviceRoutes from "./routes/internal/devices";
import tenantRoutesInternal from "./routes/internal/tenant"; import tenantRoutesInternal from "./routes/internal/tenant";
import staffTimeRoutesInternal from "./routes/internal/time"; import staffTimeRoutesInternal from "./routes/internal/time";
import authM2mInternalRoutes from "./routes/internal/auth.m2m";
//Devices //Devices
import devicesRFIDRoutes from "./routes/devices/rfid"; import devicesRFIDRoutes from "./routes/devices/rfid";
import devicesManagementRoutes from "./routes/devices/management";
import {sendMail} from "./utils/mailer"; import {sendMail} from "./utils/mailer";
@@ -54,7 +52,6 @@ import {loadSecrets, secrets} from "./utils/secrets";
import {initMailer} from "./utils/mailer" import {initMailer} from "./utils/mailer"
import {initS3} from "./utils/s3"; import {initS3} from "./utils/s3";
//Services //Services
import servicesPlugin from "./plugins/services"; import servicesPlugin from "./plugins/services";
@@ -73,6 +70,8 @@ async function main() {
// Plugins Global verfügbar // Plugins Global verfügbar
await app.register(swaggerPlugin); await app.register(swaggerPlugin);
await app.register(corsPlugin);
await app.register(supabasePlugin);
await app.register(tenantPlugin); await app.register(tenantPlugin);
await app.register(dayjsPlugin); await app.register(dayjsPlugin);
await app.register(dbPlugin); await app.register(dbPlugin);
@@ -108,7 +107,6 @@ async function main() {
await app.register(async (m2mApp) => { await app.register(async (m2mApp) => {
await m2mApp.register(authM2m) await m2mApp.register(authM2m)
await m2mApp.register(authM2mInternalRoutes)
await m2mApp.register(helpdeskInboundEmailRoutes) await m2mApp.register(helpdeskInboundEmailRoutes)
await m2mApp.register(deviceRoutes) await m2mApp.register(deviceRoutes)
await m2mApp.register(tenantRoutesInternal) await m2mApp.register(tenantRoutesInternal)
@@ -117,10 +115,8 @@ async function main() {
await app.register(async (devicesApp) => { await app.register(async (devicesApp) => {
await devicesApp.register(devicesRFIDRoutes) await devicesApp.register(devicesRFIDRoutes)
await devicesApp.register(devicesManagementRoutes)
},{prefix: "/devices"}) },{prefix: "/devices"})
await app.register(corsPlugin);
//Geschützte Routes //Geschützte Routes
@@ -145,7 +141,6 @@ async function main() {
await subApp.register(userRoutes); await subApp.register(userRoutes);
await subApp.register(publiclinksAuthenticatedRoutes); await subApp.register(publiclinksAuthenticatedRoutes);
await subApp.register(resourceRoutes); await subApp.register(resourceRoutes);
await subApp.register(wikiRoutes);
},{prefix: "/api"}) },{prefix: "/api"})

View File

@@ -19,14 +19,15 @@ import {
and, and,
} from "drizzle-orm" } from "drizzle-orm"
let badMessageDetected = false
let badMessageMessageSent = false
export function syncDokuboxService (server: FastifyInstance) { let client: ImapFlow | null = null
let badMessageDetected = false
let badMessageMessageSent = false
let client: ImapFlow | null = null // -------------------------------------------------------------
// IMAP CLIENT INITIALIZEN
async function initDokuboxClient() { // -------------------------------------------------------------
export async function initDokuboxClient() {
client = new ImapFlow({ client = new ImapFlow({
host: secrets.DOKUBOX_IMAP_HOST, host: secrets.DOKUBOX_IMAP_HOST,
port: secrets.DOKUBOX_IMAP_PORT, port: secrets.DOKUBOX_IMAP_PORT,
@@ -41,9 +42,15 @@ export function syncDokuboxService (server: FastifyInstance) {
console.log("Dokubox E-Mail Client Initialized") console.log("Dokubox E-Mail Client Initialized")
await client.connect() await client.connect()
} }
const syncDokubox = async () => {
// -------------------------------------------------------------
// MAIN SYNC FUNCTION (DRIZZLE VERSION)
// -------------------------------------------------------------
export const syncDokubox = (server: FastifyInstance) =>
async () => {
console.log("Perform Dokubox Sync") console.log("Perform Dokubox Sync")
@@ -123,11 +130,16 @@ export function syncDokuboxService (server: FastifyInstance) {
} }
} }
const getMessageConfigDrizzle = async (
// -------------------------------------------------------------
// TENANT ERKENNEN + FOLDER/FILETYPES (DRIZZLE VERSION)
// -------------------------------------------------------------
const getMessageConfigDrizzle = async (
server: FastifyInstance, server: FastifyInstance,
message, message,
tenantsList: any[] tenantsList: any[]
) => { ) => {
let possibleKeys: string[] = [] let possibleKeys: string[] = []
@@ -244,13 +256,4 @@ export function syncDokuboxService (server: FastifyInstance) {
folder: folderId, folder: folderId,
filetype: filetypeId filetype: filetypeId
} }
}
return {
run: async () => {
await initDokuboxClient()
await syncDokubox()
console.log("Service: Dokubox sync finished")
}
}
} }

View File

@@ -8,108 +8,9 @@ import {
files, files,
filetags, filetags,
incominginvoices, incominginvoices,
vendors,
} from "../../../db/schema" } from "../../../db/schema"
import { eq, and, isNull, not, desc } from "drizzle-orm" import { eq, and, isNull, not } from "drizzle-orm"
type InvoiceAccount = {
account?: number | null
description?: string | null
taxType?: string | number | null
}
const normalizeAccounts = (accounts: unknown): InvoiceAccount[] => {
if (!Array.isArray(accounts)) return []
return accounts
.map((entry: any) => ({
account: typeof entry?.account === "number" ? entry.account : null,
description: typeof entry?.description === "string" ? entry.description : null,
taxType: entry?.taxType ?? null,
}))
.filter((entry) => entry.account !== null || entry.description || entry.taxType !== null)
}
const buildLearningContext = (historicalInvoices: any[]) => {
if (!historicalInvoices.length) return null
const vendorProfiles = new Map<number, {
vendorName: string
paymentTypes: Map<string, number>
accountUsage: Map<number, number>
sampleDescriptions: string[]
}>()
const recentExamples: any[] = []
for (const invoice of historicalInvoices) {
const accounts = normalizeAccounts(invoice.accounts)
const vendorId = typeof invoice.vendorId === "number" ? invoice.vendorId : null
const vendorName = typeof invoice.vendorName === "string" ? invoice.vendorName : "Unknown"
if (vendorId) {
if (!vendorProfiles.has(vendorId)) {
vendorProfiles.set(vendorId, {
vendorName,
paymentTypes: new Map(),
accountUsage: new Map(),
sampleDescriptions: [],
})
}
const profile = vendorProfiles.get(vendorId)!
if (invoice.paymentType) {
const key = String(invoice.paymentType)
profile.paymentTypes.set(key, (profile.paymentTypes.get(key) ?? 0) + 1)
}
for (const account of accounts) {
if (typeof account.account === "number") {
profile.accountUsage.set(account.account, (profile.accountUsage.get(account.account) ?? 0) + 1)
}
}
if (invoice.description && profile.sampleDescriptions.length < 3) {
profile.sampleDescriptions.push(String(invoice.description).slice(0, 120))
}
}
if (recentExamples.length < 20) {
recentExamples.push({
vendorId,
vendorName,
paymentType: invoice.paymentType ?? null,
accounts: accounts.map((entry) => ({
account: entry.account,
description: entry.description ?? null,
taxType: entry.taxType ?? null,
})),
})
}
}
const vendorPatterns = Array.from(vendorProfiles.entries())
.map(([vendorId, profile]) => {
const commonPaymentType = Array.from(profile.paymentTypes.entries())
.sort((a, b) => b[1] - a[1])[0]?.[0] ?? null
const topAccounts = Array.from(profile.accountUsage.entries())
.sort((a, b) => b[1] - a[1])
.slice(0, 4)
.map(([accountId, count]) => ({ accountId, count }))
return {
vendorId,
vendorName: profile.vendorName,
commonPaymentType,
topAccounts,
sampleDescriptions: profile.sampleDescriptions,
}
})
.slice(0, 50)
return JSON.stringify({
vendorPatterns,
recentExamples,
})
}
export function prepareIncomingInvoices(server: FastifyInstance) { export function prepareIncomingInvoices(server: FastifyInstance) {
const processInvoices = async (tenantId:number) => { const processInvoices = async (tenantId:number) => {
@@ -171,34 +72,13 @@ export function prepareIncomingInvoices(server: FastifyInstance) {
continue continue
} }
const historicalInvoices = await server.db
.select({
vendorId: incominginvoices.vendor,
vendorName: vendors.name,
paymentType: incominginvoices.paymentType,
description: incominginvoices.description,
accounts: incominginvoices.accounts,
})
.from(incominginvoices)
.leftJoin(vendors, eq(incominginvoices.vendor, vendors.id))
.where(
and(
eq(incominginvoices.tenant, tenantId),
eq(incominginvoices.archived, false)
)
)
.orderBy(desc(incominginvoices.createdAt))
.limit(120)
const learningContext = buildLearningContext(historicalInvoices)
// ------------------------------------------------------------- // -------------------------------------------------------------
// 3⃣ Jede Datei einzeln durch GPT jagen & IncomingInvoice erzeugen // 3⃣ Jede Datei einzeln durch GPT jagen & IncomingInvoice erzeugen
// ------------------------------------------------------------- // -------------------------------------------------------------
for (const file of filesRes) { for (const file of filesRes) {
console.log(`Processing file ${file.id} for tenant ${tenantId}`) console.log(`Processing file ${file.id} for tenant ${tenantId}`)
const data = await getInvoiceDataFromGPT(server,file, tenantId, learningContext ?? undefined) const data = await getInvoiceDataFromGPT(server,file, tenantId)
if (!data) { if (!data) {
server.log.warn(`GPT returned no data for file ${file.id}`) server.log.warn(`GPT returned no data for file ${file.id}`)

View File

@@ -1,7 +1,5 @@
// modules/helpdesk/helpdesk.contact.service.ts // modules/helpdesk/helpdesk.contact.service.ts
import { FastifyInstance } from 'fastify' import { FastifyInstance } from 'fastify'
import { and, eq, or } from "drizzle-orm";
import { helpdesk_contacts } from "../../../db/schema";
export async function getOrCreateContact( export async function getOrCreateContact(
server: FastifyInstance, server: FastifyInstance,
@@ -11,35 +9,30 @@ export async function getOrCreateContact(
if (!email && !phone) throw new Error('Contact must have at least an email or phone') if (!email && !phone) throw new Error('Contact must have at least an email or phone')
// Bestehenden Kontakt prüfen // Bestehenden Kontakt prüfen
const matchConditions = [] const { data: existing, error: findError } = await server.supabase
if (email) matchConditions.push(eq(helpdesk_contacts.email, email)) .from('helpdesk_contacts')
if (phone) matchConditions.push(eq(helpdesk_contacts.phone, phone)) .select('*')
.eq('tenant_id', tenant_id)
.or(`email.eq.${email || ''},phone.eq.${phone || ''}`)
.maybeSingle()
const existing = await server.db if (findError) throw findError
.select() if (existing) return existing
.from(helpdesk_contacts)
.where(
and(
eq(helpdesk_contacts.tenantId, tenant_id),
or(...matchConditions)
)
)
.limit(1)
if (existing[0]) return existing[0]
// Anlegen // Anlegen
const created = await server.db const { data: created, error: insertError } = await server.supabase
.insert(helpdesk_contacts) .from('helpdesk_contacts')
.values({ .insert({
tenantId: tenant_id, tenant_id,
email, email,
phone, phone,
displayName: display_name, display_name,
customerId: customer_id, customer_id,
contactId: contact_id contact_id
}) })
.returning() .select()
.single()
return created[0] if (insertError) throw insertError
return created
} }

View File

@@ -2,8 +2,6 @@
import { FastifyInstance } from 'fastify' import { FastifyInstance } from 'fastify'
import { getOrCreateContact } from './helpdesk.contact.service.js' import { getOrCreateContact } from './helpdesk.contact.service.js'
import {useNextNumberRangeNumber} from "../../utils/functions"; import {useNextNumberRangeNumber} from "../../utils/functions";
import { and, desc, eq } from "drizzle-orm";
import { customers, helpdesk_contacts, helpdesk_conversations } from "../../../db/schema";
export async function createConversation( export async function createConversation(
server: FastifyInstance, server: FastifyInstance,
@@ -27,34 +25,24 @@ export async function createConversation(
const {usedNumber } = await useNextNumberRangeNumber(server, tenant_id, "tickets") const {usedNumber } = await useNextNumberRangeNumber(server, tenant_id, "tickets")
const inserted = await server.db const { data, error } = await server.supabase
.insert(helpdesk_conversations) .from('helpdesk_conversations')
.values({ .insert({
tenantId: tenant_id, tenant_id,
contactId: contactRecord.id, contact_id: contactRecord.id,
channelInstanceId: channel_instance_id, channel_instance_id,
subject: subject || null, subject: subject || null,
status: 'open', status: 'open',
createdAt: new Date(), created_at: new Date().toISOString(),
customerId: customer_id, customer_id,
contactPersonId: contact_person_id, contact_person_id,
ticketNumber: usedNumber ticket_number: usedNumber
}) })
.returning() .select()
.single()
const data = inserted[0] if (error) throw error
return data
return {
...data,
channel_instance_id: data.channelInstanceId,
contact_id: data.contactId,
contact_person_id: data.contactPersonId,
created_at: data.createdAt,
customer_id: data.customerId,
last_message_at: data.lastMessageAt,
tenant_id: data.tenantId,
ticket_number: data.ticketNumber,
}
} }
export async function getConversations( export async function getConversations(
@@ -64,34 +52,22 @@ export async function getConversations(
) { ) {
const { status, limit = 50 } = opts || {} const { status, limit = 50 } = opts || {}
const filters = [eq(helpdesk_conversations.tenantId, tenant_id)] let query = server.supabase.from('helpdesk_conversations').select('*, customer_id(*)').eq('tenant_id', tenant_id)
if (status) filters.push(eq(helpdesk_conversations.status, status))
const data = await server.db if (status) query = query.eq('status', status)
.select({ query = query.order('last_message_at', { ascending: false }).limit(limit)
conversation: helpdesk_conversations,
contact: helpdesk_contacts, const { data, error } = await query
customer: customers, if (error) throw error
const mappedData = data.map(entry => {
return {
...entry,
customer: entry.customer_id
}
}) })
.from(helpdesk_conversations)
.leftJoin(helpdesk_contacts, eq(helpdesk_contacts.id, helpdesk_conversations.contactId))
.leftJoin(customers, eq(customers.id, helpdesk_conversations.customerId))
.where(and(...filters))
.orderBy(desc(helpdesk_conversations.lastMessageAt))
.limit(limit)
return data.map((entry) => ({ return mappedData
...entry.conversation,
helpdesk_contacts: entry.contact,
channel_instance_id: entry.conversation.channelInstanceId,
contact_id: entry.conversation.contactId,
contact_person_id: entry.conversation.contactPersonId,
created_at: entry.conversation.createdAt,
customer_id: entry.customer,
last_message_at: entry.conversation.lastMessageAt,
tenant_id: entry.conversation.tenantId,
ticket_number: entry.conversation.ticketNumber,
}))
} }
export async function updateConversationStatus( export async function updateConversationStatus(
@@ -102,22 +78,13 @@ export async function updateConversationStatus(
const valid = ['open', 'in_progress', 'waiting_for_customer', 'answered', 'closed'] const valid = ['open', 'in_progress', 'waiting_for_customer', 'answered', 'closed']
if (!valid.includes(status)) throw new Error('Invalid status') if (!valid.includes(status)) throw new Error('Invalid status')
const updated = await server.db const { data, error } = await server.supabase
.update(helpdesk_conversations) .from('helpdesk_conversations')
.set({ status }) .update({ status })
.where(eq(helpdesk_conversations.id, conversation_id)) .eq('id', conversation_id)
.returning() .select()
.single()
const data = updated[0] if (error) throw error
return { return data
...data,
channel_instance_id: data.channelInstanceId,
contact_id: data.contactId,
contact_person_id: data.contactPersonId,
created_at: data.createdAt,
customer_id: data.customerId,
last_message_at: data.lastMessageAt,
tenant_id: data.tenantId,
ticket_number: data.ticketNumber,
}
} }

View File

@@ -1,7 +1,5 @@
// modules/helpdesk/helpdesk.message.service.ts // modules/helpdesk/helpdesk.message.service.ts
import { FastifyInstance } from 'fastify' import { FastifyInstance } from 'fastify'
import { asc, eq } from "drizzle-orm";
import { helpdesk_conversations, helpdesk_messages } from "../../../db/schema";
export async function addMessage( export async function addMessage(
server: FastifyInstance, server: FastifyInstance,
@@ -25,53 +23,38 @@ export async function addMessage(
) { ) {
if (!payload?.text) throw new Error('Message payload requires text content') if (!payload?.text) throw new Error('Message payload requires text content')
const inserted = await server.db const { data: message, error } = await server.supabase
.insert(helpdesk_messages) .from('helpdesk_messages')
.values({ .insert({
tenantId: tenant_id, tenant_id,
conversationId: conversation_id, conversation_id,
authorUserId: author_user_id, author_user_id,
direction, direction,
payload, payload,
rawMeta: raw_meta, raw_meta,
externalMessageId: external_message_id, created_at: new Date().toISOString(),
receivedAt: new Date(),
}) })
.returning() .select()
.single()
const message = inserted[0] if (error) throw error
// Letzte Nachricht aktualisieren // Letzte Nachricht aktualisieren
await server.db await server.supabase
.update(helpdesk_conversations) .from('helpdesk_conversations')
.set({ lastMessageAt: new Date() }) .update({ last_message_at: new Date().toISOString() })
.where(eq(helpdesk_conversations.id, conversation_id)) .eq('id', conversation_id)
return { return message
...message,
author_user_id: message.authorUserId,
conversation_id: message.conversationId,
created_at: message.createdAt,
external_message_id: message.externalMessageId,
raw_meta: message.rawMeta,
tenant_id: message.tenantId,
}
} }
export async function getMessages(server: FastifyInstance, conversation_id: string) { export async function getMessages(server: FastifyInstance, conversation_id: string) {
const data = await server.db const { data, error } = await server.supabase
.select() .from('helpdesk_messages')
.from(helpdesk_messages) .select('*')
.where(eq(helpdesk_messages.conversationId, conversation_id)) .eq('conversation_id', conversation_id)
.orderBy(asc(helpdesk_messages.createdAt)) .order('created_at', { ascending: true })
return data.map((message) => ({ if (error) throw error
...message, return data
author_user_id: message.authorUserId,
conversation_id: message.conversationId,
created_at: message.createdAt,
external_message_id: message.externalMessageId,
raw_meta: message.rawMeta,
tenant_id: message.tenantId,
}))
} }

View File

@@ -1,8 +1,6 @@
// services/notification.service.ts // services/notification.service.ts
import type { FastifyInstance } from 'fastify'; import type { FastifyInstance } from 'fastify';
import {secrets} from "../utils/secrets"; import {secrets} from "../utils/secrets";
import { eq } from "drizzle-orm";
import { notificationsEventTypes, notificationsItems } from "../../db/schema";
export type NotificationStatus = 'queued' | 'sent' | 'failed'; export type NotificationStatus = 'queued' | 'sent' | 'failed';
@@ -36,16 +34,16 @@ export class NotificationService {
*/ */
async trigger(input: TriggerInput) { async trigger(input: TriggerInput) {
const { tenantId, userId, eventType, title, message, payload } = input; const { tenantId, userId, eventType, title, message, payload } = input;
const supabase = this.server.supabase;
// 1) Event-Typ prüfen (aktiv?) // 1) Event-Typ prüfen (aktiv?)
const eventTypeRows = await this.server.db const { data: eventTypeRow, error: etErr } = await supabase
.select() .from('notifications_event_types')
.from(notificationsEventTypes) .select('event_key,is_active')
.where(eq(notificationsEventTypes.eventKey, eventType)) .eq('event_key', eventType)
.limit(1) .maybeSingle();
const eventTypeRow = eventTypeRows[0]
if (!eventTypeRow || eventTypeRow.isActive !== true) { if (etErr || !eventTypeRow || eventTypeRow.is_active !== true) {
throw new Error(`Unbekannter oder inaktiver Event-Typ: ${eventType}`); throw new Error(`Unbekannter oder inaktiver Event-Typ: ${eventType}`);
} }
@@ -56,40 +54,40 @@ export class NotificationService {
} }
// 3) Notification anlegen (status: queued) // 3) Notification anlegen (status: queued)
const insertedRows = await this.server.db const { data: inserted, error: insErr } = await supabase
.insert(notificationsItems) .from('notifications_items')
.values({ .insert({
tenantId, tenant_id: tenantId,
userId, user_id: userId,
eventType, event_type: eventType,
title, title,
message, message,
payload: payload ?? null, payload: payload ?? null,
channel: 'email', channel: 'email',
status: 'queued' status: 'queued'
}) })
.returning({ id: notificationsItems.id }) .select('id')
const inserted = insertedRows[0] .single();
if (!inserted) { if (insErr || !inserted) {
throw new Error("Fehler beim Einfügen der Notification"); throw new Error(`Fehler beim Einfügen der Notification: ${insErr?.message}`);
} }
// 4) E-Mail versenden // 4) E-Mail versenden
try { try {
await this.sendEmail(user.email, title, message); await this.sendEmail(user.email, title, message);
await this.server.db await supabase
.update(notificationsItems) .from('notifications_items')
.set({ status: 'sent', sentAt: new Date() }) .update({ status: 'sent', sent_at: new Date().toISOString() })
.where(eq(notificationsItems.id, inserted.id)); .eq('id', inserted.id);
return { success: true, id: inserted.id }; return { success: true, id: inserted.id };
} catch (err: any) { } catch (err: any) {
await this.server.db await supabase
.update(notificationsItems) .from('notifications_items')
.set({ status: 'failed', error: String(err?.message || err) }) .update({ status: 'failed', error: String(err?.message || err) })
.where(eq(notificationsItems.id, inserted.id)); .eq('id', inserted.id);
this.server.log.error({ err, notificationId: inserted.id }, 'E-Mail Versand fehlgeschlagen'); this.server.log.error({ err, notificationId: inserted.id }, 'E-Mail Versand fehlgeschlagen');
return { success: false, error: err?.message || 'E-Mail Versand fehlgeschlagen' }; return { success: false, error: err?.message || 'E-Mail Versand fehlgeschlagen' };

View File

@@ -1,249 +0,0 @@
import { and, eq } from "drizzle-orm";
import * as schema from "../../db/schema";
import { FastifyInstance } from "fastify";
type CompositionRow = {
product?: number | string | null;
service?: number | string | null;
hourrate?: string | null;
quantity?: number | string | null;
price?: number | string | null;
purchasePrice?: number | string | null;
[key: string]: any;
};
function toNumber(value: any): number {
const num = Number(value ?? 0);
return Number.isFinite(num) ? num : 0;
}
function round2(value: number): number {
return Number(value.toFixed(2));
}
function getJsonNumber(source: unknown, key: string): number {
if (!source || typeof source !== "object") return 0;
return toNumber((source as Record<string, unknown>)[key]);
}
function normalizeId(value: unknown): number | null {
if (value === null || value === undefined || value === "") return null;
const num = Number(value);
return Number.isFinite(num) ? num : null;
}
function normalizeUuid(value: unknown): string | null {
if (typeof value !== "string") return null;
const trimmed = value.trim();
return trimmed.length ? trimmed : null;
}
function sanitizeCompositionRows(value: unknown): CompositionRow[] {
if (!Array.isArray(value)) return [];
return value.filter((entry): entry is CompositionRow => !!entry && typeof entry === "object");
}
export async function recalculateServicePricesForTenant(server: FastifyInstance, tenantId: number, updatedBy?: string | null) {
const [services, products, hourrates] = await Promise.all([
server.db.select().from(schema.services).where(eq(schema.services.tenant, tenantId)),
server.db.select().from(schema.products).where(eq(schema.products.tenant, tenantId)),
server.db.select().from(schema.hourrates).where(eq(schema.hourrates.tenant, tenantId)),
]);
const serviceMap = new Map(services.map((item) => [item.id, item]));
const productMap = new Map(products.map((item) => [item.id, item]));
const hourrateMap = new Map(hourrates.map((item) => [item.id, item]));
const memo = new Map<number, {
sellingTotal: number;
purchaseTotal: number;
materialTotal: number;
materialPurchaseTotal: number;
workerTotal: number;
workerPurchaseTotal: number;
materialComposition: CompositionRow[];
personalComposition: CompositionRow[];
}>();
const stack = new Set<number>();
const calculateService = (serviceId: number) => {
if (memo.has(serviceId)) return memo.get(serviceId)!;
const service = serviceMap.get(serviceId);
const emptyResult = {
sellingTotal: 0,
purchaseTotal: 0,
materialTotal: 0,
materialPurchaseTotal: 0,
workerTotal: 0,
workerPurchaseTotal: 0,
materialComposition: [],
personalComposition: [],
};
if (!service) return emptyResult;
if (stack.has(serviceId)) return emptyResult;
// Gesperrte Leistungen bleiben bei automatischen Preis-Updates unverändert.
if (service.priceUpdateLocked) {
const lockedResult = {
sellingTotal: getJsonNumber(service.sellingPriceComposed, "total") || toNumber(service.sellingPrice),
purchaseTotal: getJsonNumber(service.purchasePriceComposed, "total"),
materialTotal: getJsonNumber(service.sellingPriceComposed, "material"),
materialPurchaseTotal: getJsonNumber(service.purchasePriceComposed, "material"),
workerTotal: getJsonNumber(service.sellingPriceComposed, "worker"),
workerPurchaseTotal: getJsonNumber(service.purchasePriceComposed, "worker"),
materialComposition: sanitizeCompositionRows(service.materialComposition),
personalComposition: sanitizeCompositionRows(service.personalComposition),
};
memo.set(serviceId, lockedResult);
return lockedResult;
}
stack.add(serviceId);
try {
const materialComposition = sanitizeCompositionRows(service.materialComposition);
const personalComposition = sanitizeCompositionRows(service.personalComposition);
const hasMaterialComposition = materialComposition.length > 0;
const hasPersonalComposition = personalComposition.length > 0;
// Ohne Zusammensetzung keine automatische Überschreibung:
// manuell gepflegte Preise sollen erhalten bleiben.
if (!hasMaterialComposition && !hasPersonalComposition) {
const manualResult = {
sellingTotal: getJsonNumber(service.sellingPriceComposed, "total") || toNumber(service.sellingPrice),
purchaseTotal: getJsonNumber(service.purchasePriceComposed, "total"),
materialTotal: getJsonNumber(service.sellingPriceComposed, "material"),
materialPurchaseTotal: getJsonNumber(service.purchasePriceComposed, "material"),
workerTotal: getJsonNumber(service.sellingPriceComposed, "worker"),
workerPurchaseTotal: getJsonNumber(service.purchasePriceComposed, "worker"),
materialComposition,
personalComposition,
};
memo.set(serviceId, manualResult);
return manualResult;
}
let materialTotal = 0;
let materialPurchaseTotal = 0;
const normalizedMaterialComposition = materialComposition.map((entry) => {
const quantity = toNumber(entry.quantity);
const productId = normalizeId(entry.product);
const childServiceId = normalizeId(entry.service);
let sellingPrice = toNumber(entry.price);
let purchasePrice = toNumber(entry.purchasePrice);
if (productId) {
const product = productMap.get(productId);
sellingPrice = toNumber(product?.selling_price);
purchasePrice = toNumber(product?.purchase_price);
} else if (childServiceId) {
const child = calculateService(childServiceId);
sellingPrice = toNumber(child.sellingTotal);
purchasePrice = toNumber(child.purchaseTotal);
}
materialTotal += quantity * sellingPrice;
materialPurchaseTotal += quantity * purchasePrice;
return {
...entry,
price: round2(sellingPrice),
purchasePrice: round2(purchasePrice),
};
});
let workerTotal = 0;
let workerPurchaseTotal = 0;
const normalizedPersonalComposition = personalComposition.map((entry) => {
const quantity = toNumber(entry.quantity);
const hourrateId = normalizeUuid(entry.hourrate);
let sellingPrice = toNumber(entry.price);
let purchasePrice = toNumber(entry.purchasePrice);
if (hourrateId) {
const hourrate = hourrateMap.get(hourrateId);
if (hourrate) {
sellingPrice = toNumber(hourrate.sellingPrice);
purchasePrice = toNumber(hourrate.purchase_price);
}
}
workerTotal += quantity * sellingPrice;
workerPurchaseTotal += quantity * purchasePrice;
return {
...entry,
price: round2(sellingPrice),
purchasePrice: round2(purchasePrice),
};
});
const result = {
sellingTotal: round2(materialTotal + workerTotal),
purchaseTotal: round2(materialPurchaseTotal + workerPurchaseTotal),
materialTotal: round2(materialTotal),
materialPurchaseTotal: round2(materialPurchaseTotal),
workerTotal: round2(workerTotal),
workerPurchaseTotal: round2(workerPurchaseTotal),
materialComposition: normalizedMaterialComposition,
personalComposition: normalizedPersonalComposition,
};
memo.set(serviceId, result);
return result;
} finally {
stack.delete(serviceId);
}
};
for (const service of services) {
calculateService(service.id);
}
const updates = services
.filter((service) => !service.priceUpdateLocked)
.map(async (service) => {
const calc = memo.get(service.id);
if (!calc) return;
const sellingPriceComposed = {
worker: calc.workerTotal,
material: calc.materialTotal,
total: calc.sellingTotal,
};
const purchasePriceComposed = {
worker: calc.workerPurchaseTotal,
material: calc.materialPurchaseTotal,
total: calc.purchaseTotal,
};
const unchanged =
JSON.stringify(service.materialComposition ?? []) === JSON.stringify(calc.materialComposition) &&
JSON.stringify(service.personalComposition ?? []) === JSON.stringify(calc.personalComposition) &&
JSON.stringify(service.sellingPriceComposed ?? {}) === JSON.stringify(sellingPriceComposed) &&
JSON.stringify(service.purchasePriceComposed ?? {}) === JSON.stringify(purchasePriceComposed) &&
round2(toNumber(service.sellingPrice)) === calc.sellingTotal;
if (unchanged) return;
await server.db
.update(schema.services)
.set({
materialComposition: calc.materialComposition,
personalComposition: calc.personalComposition,
sellingPriceComposed,
purchasePriceComposed,
sellingPrice: calc.sellingTotal,
updatedAt: new Date(),
updatedBy: updatedBy ?? null,
})
.where(and(eq(schema.services.id, service.id), eq(schema.services.tenant, tenantId)));
});
await Promise.all(updates);
}

View File

@@ -1,9 +1,6 @@
import { FastifyInstance } from "fastify"; import { FastifyInstance } from "fastify";
import fp from "fastify-plugin"; import fp from "fastify-plugin";
import { secrets } from "../utils/secrets"; import { secrets } from "../utils/secrets";
import { and, eq } from "drizzle-orm";
import { authUsers, m2mApiKeys } from "../../db/schema";
import { createHash } from "node:crypto";
/** /**
* Fastify Plugin für Machine-to-Machine Authentifizierung. * Fastify Plugin für Machine-to-Machine Authentifizierung.
@@ -15,99 +12,26 @@ import { createHash } from "node:crypto";
* server.register(m2mAuthPlugin, { allowedPrefix: '/internal' }) * server.register(m2mAuthPlugin, { allowedPrefix: '/internal' })
*/ */
export default fp(async (server: FastifyInstance, opts: { allowedPrefix?: string } = {}) => { export default fp(async (server: FastifyInstance, opts: { allowedPrefix?: string } = {}) => {
const hashApiKey = (apiKey: string) => //const allowedPrefix = opts.allowedPrefix || "/internal";
createHash("sha256").update(apiKey, "utf8").digest("hex")
server.addHook("preHandler", async (req, reply) => { server.addHook("preHandler", async (req, reply) => {
try { try {
const apiKeyHeader = req.headers["x-api-key"]; // Nur prüfen, wenn Route unterhalb des Prefix liegt
const apiKey = Array.isArray(apiKeyHeader) ? apiKeyHeader[0] : apiKeyHeader; //if (!req.url.startsWith(allowedPrefix)) return;
if (!apiKey) { const apiKey = req.headers["x-api-key"];
if (!apiKey || apiKey !== secrets.M2M_API_KEY) {
server.log.warn(`[M2M Auth] Ungültiger oder fehlender API-Key bei ${req.url}`); server.log.warn(`[M2M Auth] Ungültiger oder fehlender API-Key bei ${req.url}`);
return reply.status(401).send({ error: "Unauthorized" }); return reply.status(401).send({ error: "Unauthorized" });
} }
const keyHash = hashApiKey(apiKey); // Zusatzinformationen im Request (z. B. interne Kennung)
const keyRows = await server.db
.select({
id: m2mApiKeys.id,
tenantId: m2mApiKeys.tenantId,
userId: m2mApiKeys.userId,
active: m2mApiKeys.active,
expiresAt: m2mApiKeys.expiresAt,
name: m2mApiKeys.name,
userEmail: authUsers.email,
})
.from(m2mApiKeys)
.innerJoin(authUsers, eq(authUsers.id, m2mApiKeys.userId))
.where(and(
eq(m2mApiKeys.keyHash, keyHash),
eq(m2mApiKeys.active, true)
))
.limit(1)
let key = keyRows[0]
if (!key) {
const fallbackValid = apiKey === secrets.M2M_API_KEY
if (!fallbackValid) {
server.log.warn(`[M2M Auth] Ungültiger API-Key bei ${req.url}`)
return reply.status(401).send({ error: "Unauthorized" })
}
// Backward compatibility mode for one global key.
// The caller must provide user/tenant identifiers in headers.
const tenantIdHeader = req.headers["x-tenant-id"]
const userIdHeader = req.headers["x-user-id"]
const tenantId = Number(Array.isArray(tenantIdHeader) ? tenantIdHeader[0] : tenantIdHeader)
const userId = Array.isArray(userIdHeader) ? userIdHeader[0] : userIdHeader
if (!tenantId || !userId) {
return reply.status(401).send({ error: "Missing x-tenant-id or x-user-id for legacy M2M key" })
}
const users = await server.db
.select({ email: authUsers.email })
.from(authUsers)
.where(eq(authUsers.id, userId))
.limit(1)
if (!users[0]) {
return reply.status(401).send({ error: "Unknown user for legacy M2M key" })
}
req.user = {
user_id: userId,
email: users[0].email,
tenant_id: tenantId
}
} else {
if (key.expiresAt && new Date(key.expiresAt).getTime() < Date.now()) {
return reply.status(401).send({ error: "Expired API key" })
}
req.user = {
user_id: key.userId,
email: key.userEmail,
tenant_id: key.tenantId
}
await server.db
.update(m2mApiKeys)
.set({ lastUsedAt: new Date(), updatedAt: new Date() })
.where(eq(m2mApiKeys.id, key.id))
}
(req as any).m2m = { (req as any).m2m = {
verified: true, verified: true,
type: "internal", type: "internal",
key: apiKey, key: apiKey,
}; };
req.role = "m2m"
req.permissions = []
req.hasPermission = () => false
} catch (err) { } catch (err) {
// @ts-ignore // @ts-ignore
server.log.error("[M2M Auth] Fehler beim Prüfen des API-Keys:", err); server.log.error("[M2M Auth] Fehler beim Prüfen des API-Keys:", err);

View File

@@ -9,15 +9,13 @@ export default fp(async (server: FastifyInstance) => {
"http://localhost:3001", // dein Nuxt-Frontend "http://localhost:3001", // dein Nuxt-Frontend
"http://127.0.0.1:3000", // dein Nuxt-Frontend "http://127.0.0.1:3000", // dein Nuxt-Frontend
"http://192.168.1.227:3001", // dein Nuxt-Frontend "http://192.168.1.227:3001", // dein Nuxt-Frontend
"http://192.168.1.234:3000", // dein Nuxt-Frontend
"http://192.168.1.113:3000", // dein Nuxt-Frontend "http://192.168.1.113:3000", // dein Nuxt-Frontend
"https://beta.fedeo.de", // dein Nuxt-Frontend "https://beta.fedeo.de", // dein Nuxt-Frontend
"https://app.fedeo.de", // dein Nuxt-Frontend "https://app.fedeo.de", // dein Nuxt-Frontend
"capacitor://localhost", // dein Nuxt-Frontend "capacitor://localhost", // dein Nuxt-Frontend
], ],
methods: ["GET", "POST", "PUT", "DELETE", "OPTIONS","PATCH", methods: ["GET", "POST", "PUT", "DELETE", "OPTIONS"],
"PROPFIND", "PROPPATCH", "MKCOL", "COPY", "MOVE", "LOCK", "UNLOCK"], allowedHeaders: ["Content-Type", "Authorization", "Context", "X-Public-Pin"],
allowedHeaders: ["Content-Type", "Authorization", "Context", "X-Public-Pin","Depth", "Overwrite", "Destination", "Lock-Token", "If"],
exposedHeaders: ["Authorization", "Content-Disposition", "Content-Type", "Content-Length"], // optional, falls du ihn auch auslesen willst exposedHeaders: ["Authorization", "Content-Disposition", "Content-Type", "Content-Length"], // optional, falls du ihn auch auslesen willst
credentials: true, // wichtig, falls du Cookies nutzt credentials: true, // wichtig, falls du Cookies nutzt
}); });

View File

@@ -1,25 +1,31 @@
// src/plugins/db.ts import fp from "fastify-plugin"
import fp from "fastify-plugin"; import {drizzle, NodePgDatabase} from "drizzle-orm/node-postgres"
import { NodePgDatabase } from "drizzle-orm/node-postgres"; import * as schema from "../../db/schema"
import * as schema from "../../db/schema"; import {secrets} from "../utils/secrets";
import { db, pool } from "../../db"; // <--- Importiert jetzt die globale Instanz import { Pool } from "pg"
export default fp(async (server, opts) => { export default fp(async (server, opts) => {
// Wir nutzen die db, die wir in src/db/index.ts erstellt haben const pool = new Pool({
server.decorate("db", db); connectionString: secrets.DATABASE_URL,
max: 10, // je nach Last
})
// Graceful Shutdown: Wenn Fastify ausgeht, schließen wir den Pool const db = drizzle(pool , {schema})
// Dekorieren -> überall server.db
server.decorate("db", db)
// Graceful Shutdown
server.addHook("onClose", async () => { server.addHook("onClose", async () => {
console.log("[DB] Closing connection pool..."); await pool.end()
await pool.end(); })
});
console.log("[Fastify] Database attached from shared instance"); console.log("Drizzle database connected")
}); })
declare module "fastify" { declare module "fastify" {
interface FastifyInstance { interface FastifyInstance {
db: NodePgDatabase<typeof schema> db:NodePgDatabase<typeof schema>
} }
} }

View File

@@ -58,6 +58,8 @@ const queryConfigPlugin: FastifyPluginAsync<QueryConfigPluginOptions> = async (
const query = req.query as Record<string, any> const query = req.query as Record<string, any>
console.log(query)
// Pagination deaktivieren? // Pagination deaktivieren?
const disablePagination = const disablePagination =
query.noPagination === 'true' || query.noPagination === 'true' ||

View File

@@ -1,7 +1,7 @@
// /plugins/services.ts // /plugins/services.ts
import fp from "fastify-plugin"; import fp from "fastify-plugin";
import { bankStatementService } from "../modules/cron/bankstatementsync.service"; import { bankStatementService } from "../modules/cron/bankstatementsync.service";
import {syncDokuboxService} from "../modules/cron/dokuboximport.service"; //import {initDokuboxClient, syncDokubox} from "../modules/cron/dokuboximport.service";
import { FastifyInstance } from "fastify"; import { FastifyInstance } from "fastify";
import {prepareIncomingInvoices} from "../modules/cron/prepareIncomingInvoices"; import {prepareIncomingInvoices} from "../modules/cron/prepareIncomingInvoices";
@@ -9,7 +9,7 @@ declare module "fastify" {
interface FastifyInstance { interface FastifyInstance {
services: { services: {
bankStatements: ReturnType<typeof bankStatementService>; bankStatements: ReturnType<typeof bankStatementService>;
dokuboxSync: ReturnType<typeof syncDokuboxService>; //dokuboxSync: ReturnType<typeof syncDokubox>;
prepareIncomingInvoices: ReturnType<typeof prepareIncomingInvoices>; prepareIncomingInvoices: ReturnType<typeof prepareIncomingInvoices>;
}; };
} }
@@ -18,7 +18,7 @@ declare module "fastify" {
export default fp(async function servicePlugin(server: FastifyInstance) { export default fp(async function servicePlugin(server: FastifyInstance) {
server.decorate("services", { server.decorate("services", {
bankStatements: bankStatementService(server), bankStatements: bankStatementService(server),
dokuboxSync: syncDokuboxService(server), //dokuboxSync: syncDokubox(server),
prepareIncomingInvoices: prepareIncomingInvoices(server), prepareIncomingInvoices: prepareIncomingInvoices(server),
}); });
}); });

View File

@@ -0,0 +1,19 @@
import { FastifyInstance } from "fastify";
import fp from "fastify-plugin";
import { createClient, SupabaseClient } from "@supabase/supabase-js";
import {secrets} from "../utils/secrets";
export default fp(async (server: FastifyInstance) => {
const supabaseUrl = secrets.SUPABASE_URL
const supabaseServiceKey = secrets.SUPABASE_SERVICE_ROLE_KEY
const supabase: SupabaseClient = createClient(supabaseUrl, supabaseServiceKey);
// Fastify um supabase erweitern
server.decorate("supabase", supabase);
});
declare module "fastify" {
interface FastifyInstance {
supabase: SupabaseClient;
}
}

View File

@@ -5,33 +5,26 @@ import swaggerUi from "@fastify/swagger-ui";
export default fp(async (server: FastifyInstance) => { export default fp(async (server: FastifyInstance) => {
await server.register(swagger, { await server.register(swagger, {
mode: "dynamic", mode: "dynamic", // wichtig: generiert echtes OpenAPI JSON
openapi: { openapi: {
info: { info: {
title: "FEDEO Backend API", title: "Multi-Tenant API",
description: "OpenAPI specification for the FEDEO backend", description: "API Dokumentation für dein Backend",
version: "1.0.0", version: "1.0.0",
}, },
servers: [{ url: "/" }], servers: [{ url: "http://localhost:3000" }],
components: {
securitySchemes: {
bearerAuth: {
type: "http",
scheme: "bearer",
bearerFormat: "JWT"
}
}
}
}, },
}); });
// @ts-ignore // @ts-ignore
await server.register(swaggerUi, { await server.register(swaggerUi, {
routePrefix: "/docs", routePrefix: "/docs", // UI erreichbar unter http://localhost:3000/docs
}); swagger: {
info: {
// Stable raw spec path title: "Multi-Tenant API",
server.get("/openapi.json", async (_req, reply) => { version: "1.0.0",
return reply.send(server.swagger()); },
},
exposeRoute: true,
}); });
}); });

View File

@@ -1,7 +1,5 @@
import { FastifyInstance, FastifyRequest } from "fastify"; import { FastifyInstance, FastifyRequest } from "fastify";
import fp from "fastify-plugin"; import fp from "fastify-plugin";
import { eq } from "drizzle-orm";
import { tenants } from "../../db/schema";
export default fp(async (server: FastifyInstance) => { export default fp(async (server: FastifyInstance) => {
server.addHook("preHandler", async (req, reply) => { server.addHook("preHandler", async (req, reply) => {
@@ -11,12 +9,11 @@ export default fp(async (server: FastifyInstance) => {
return; return;
} }
// Tenant aus DB laden // Tenant aus DB laden
const rows = await server.db const { data: tenant } = await server.supabase
.select() .from("tenants")
.from(tenants) .select("*")
.where(eq(tenants.portalDomain, host)) .eq("portalDomain", host)
.limit(1); .single();
const tenant = rows[0];
if(!tenant) { if(!tenant) {

View File

@@ -94,7 +94,6 @@ export default async function adminRoutes(server: FastifyInstance) {
short: tenants.short, short: tenants.short,
locked: tenants.locked, locked: tenants.locked,
numberRanges: tenants.numberRanges, numberRanges: tenants.numberRanges,
accountChart: tenants.accountChart,
extraModules: tenants.extraModules, extraModules: tenants.extraModules,
}) })
.from(authTenantUsers) .from(authTenantUsers)

View File

@@ -1,60 +1,11 @@
import { FastifyInstance } from "fastify" import { FastifyInstance } from "fastify"
import bcrypt from "bcrypt" import bcrypt from "bcrypt"
import { eq } from "drizzle-orm" import { eq } from "drizzle-orm"
import jwt from "jsonwebtoken"
import { secrets } from "../../utils/secrets"
import { authUsers } from "../../../db/schema" // wichtig: Drizzle Schema importieren! import { authUsers } from "../../../db/schema" // wichtig: Drizzle Schema importieren!
export default async function authRoutesAuthenticated(server: FastifyInstance) { export default async function authRoutesAuthenticated(server: FastifyInstance) {
server.post("/auth/refresh", {
schema: {
tags: ["Auth"],
summary: "Refresh JWT for current authenticated user",
response: {
200: {
type: "object",
properties: {
token: { type: "string" },
},
required: ["token"],
},
401: {
type: "object",
properties: {
error: { type: "string" },
},
required: ["error"],
},
},
},
}, async (req, reply) => {
if (!req.user?.user_id) {
return reply.code(401).send({ error: "Unauthorized" })
}
const token = jwt.sign(
{
user_id: req.user.user_id,
email: req.user.email,
tenant_id: req.user.tenant_id,
},
secrets.JWT_SECRET!,
{ expiresIn: "6h" }
)
reply.setCookie("token", token, {
path: "/",
httpOnly: true,
sameSite: process.env.NODE_ENV === "production" ? "none" : "lax",
secure: process.env.NODE_ENV === "production",
maxAge: 60 * 60 * 6,
})
return { token }
})
server.post("/auth/password/change", { server.post("/auth/password/change", {
schema: { schema: {
tags: ["Auth"], tags: ["Auth"],

View File

@@ -137,7 +137,7 @@ export default async function authRoutes(server: FastifyInstance) {
httpOnly: true, httpOnly: true,
sameSite: process.env.NODE_ENV === "production" ? "none" : "lax", sameSite: process.env.NODE_ENV === "production" ? "none" : "lax",
secure: process.env.NODE_ENV === "production", secure: process.env.NODE_ENV === "production",
maxAge: 60 * 60 * 6, maxAge: 60 * 60 * 3,
}); });
return { token }; return { token };

View File

@@ -51,11 +51,9 @@ export default async function meRoutes(server: FastifyInstance) {
name: tenants.name, name: tenants.name,
short: tenants.short, short: tenants.short,
locked: tenants.locked, locked: tenants.locked,
features: tenants.features,
extraModules: tenants.extraModules, extraModules: tenants.extraModules,
businessInfo: tenants.businessInfo, businessInfo: tenants.businessInfo,
numberRanges: tenants.numberRanges, numberRanges: tenants.numberRanges,
accountChart: tenants.accountChart,
dokuboxkey: tenants.dokuboxkey, dokuboxkey: tenants.dokuboxkey,
standardEmailForInvoices: tenants.standardEmailForInvoices, standardEmailForInvoices: tenants.standardEmailForInvoices,
standardPaymentDays: tenants.standardPaymentDays, standardPaymentDays: tenants.standardPaymentDays,

View File

@@ -4,19 +4,10 @@ import dayjs from "dayjs"
import { secrets } from "../utils/secrets" import { secrets } from "../utils/secrets"
import { insertHistoryItem } from "../utils/history" import { insertHistoryItem } from "../utils/history"
import { decrypt, encrypt } from "../utils/crypt"
import { DE_BANK_CODE_TO_NAME } from "../utils/deBankCodes"
import { DE_BANK_CODE_TO_BIC } from "../utils/deBankBics"
import { import {
bankrequisitions, bankrequisitions,
bankstatements,
createddocuments,
customers,
entitybankaccounts,
incominginvoices,
statementallocations, statementallocations,
vendors,
} from "../../db/schema" } from "../../db/schema"
import { import {
@@ -26,322 +17,6 @@ import {
export default async function bankingRoutes(server: FastifyInstance) { export default async function bankingRoutes(server: FastifyInstance) {
const normalizeIban = (value?: string | null) =>
String(value || "").replace(/\s+/g, "").toUpperCase()
const pickPartnerBankData = (statement: any, partnerType: "customer" | "vendor") => {
if (!statement) return null
const prefersDebit = partnerType === "customer"
? Number(statement.amount) >= 0
: Number(statement.amount) > 0
const primary = prefersDebit
? { iban: statement.debIban }
: { iban: statement.credIban }
const fallback = prefersDebit
? { iban: statement.credIban }
: { iban: statement.debIban }
const primaryIban = normalizeIban(primary.iban)
if (primaryIban) {
return {
iban: primaryIban,
}
}
const fallbackIban = normalizeIban(fallback.iban)
if (fallbackIban) {
return {
iban: fallbackIban,
}
}
return null
}
const mergePartnerIban = (infoData: Record<string, any>, iban: string, bankAccountId?: number | null) => {
if (!iban && !bankAccountId) return infoData || {}
const info = infoData && typeof infoData === "object" ? { ...infoData } : {}
if (iban) {
const existing = Array.isArray(info.bankingIbans) ? info.bankingIbans : []
const merged = [...new Set([...existing.map((i: string) => normalizeIban(i)), iban])]
info.bankingIbans = merged
if (!info.bankingIban) info.bankingIban = iban
}
if (bankAccountId) {
const existingIds = Array.isArray(info.bankAccountIds) ? info.bankAccountIds : []
if (!existingIds.includes(bankAccountId)) {
info.bankAccountIds = [...existingIds, bankAccountId]
}
}
return info
}
const ibanLengthByCountry: Record<string, number> = {
DE: 22,
AT: 20,
CH: 21,
NL: 18,
BE: 16,
FR: 27,
ES: 24,
IT: 27,
LU: 20,
}
const isValidIbanLocal = (iban: string) => {
const normalized = normalizeIban(iban)
if (!normalized || normalized.length < 15 || normalized.length > 34) return false
if (!/^[A-Z]{2}[0-9]{2}[A-Z0-9]+$/.test(normalized)) return false
const country = normalized.slice(0, 2)
const expectedLength = ibanLengthByCountry[country]
if (expectedLength && normalized.length !== expectedLength) return false
const rearranged = normalized.slice(4) + normalized.slice(0, 4)
let numeric = ""
for (const ch of rearranged) {
if (ch >= "A" && ch <= "Z") numeric += (ch.charCodeAt(0) - 55).toString()
else numeric += ch
}
let remainder = 0
for (const digit of numeric) {
remainder = (remainder * 10 + Number(digit)) % 97
}
return remainder === 1
}
const resolveGermanBankDataFromIbanLocal = (iban: string) => {
const normalized = normalizeIban(iban)
if (!isValidIbanLocal(normalized)) return null
// Für DE-IBANs kann die BLZ aus Position 5-12 lokal gelesen werden.
if (normalized.startsWith("DE") && normalized.length === 22) {
const bankCode = normalized.slice(4, 12)
const bankName = DE_BANK_CODE_TO_NAME[bankCode] || `Unbekannt (BLZ ${bankCode})`
const bic = DE_BANK_CODE_TO_BIC[bankCode] || null
return {
bankName,
bic,
bankCode,
}
}
return null
}
const resolveEntityBankAccountId = async (
tenantId: number,
userId: string,
iban: string
) => {
const normalizedIban = normalizeIban(iban)
if (!normalizedIban) return null
const bankData = resolveGermanBankDataFromIbanLocal(normalizedIban)
const allAccounts = await server.db
.select({
id: entitybankaccounts.id,
ibanEncrypted: entitybankaccounts.ibanEncrypted,
bankNameEncrypted: entitybankaccounts.bankNameEncrypted,
bicEncrypted: entitybankaccounts.bicEncrypted,
})
.from(entitybankaccounts)
.where(eq(entitybankaccounts.tenant, tenantId))
const existing = allAccounts.find((row) => {
if (!row.ibanEncrypted) return false
try {
const decryptedIban = decrypt(row.ibanEncrypted as any)
return normalizeIban(decryptedIban) === normalizedIban
} catch {
return false
}
})
if (existing?.id) {
if (bankData) {
let currentBankName = ""
let currentBic = ""
try {
currentBankName = String(decrypt(existing.bankNameEncrypted as any) || "").trim()
} catch {
currentBankName = ""
}
try {
currentBic = String(decrypt((existing as any).bicEncrypted as any) || "").trim()
} catch {
currentBic = ""
}
const nextBankName = bankData?.bankName || "Unbekannt"
const nextBic = bankData?.bic || "UNBEKANNT"
if (currentBankName !== nextBankName || currentBic !== nextBic) {
await server.db
.update(entitybankaccounts)
.set({
bankNameEncrypted: encrypt(nextBankName),
bicEncrypted: encrypt(nextBic),
updatedAt: new Date(),
updatedBy: userId,
})
.where(and(eq(entitybankaccounts.id, Number(existing.id)), eq(entitybankaccounts.tenant, tenantId)))
}
}
return Number(existing.id)
}
const [created] = await server.db
.insert(entitybankaccounts)
.values({
tenant: tenantId,
ibanEncrypted: encrypt(normalizedIban),
bicEncrypted: encrypt(bankData?.bic || "UNBEKANNT"),
bankNameEncrypted: encrypt(bankData?.bankName || "Unbekannt"),
description: "Automatisch aus Bankbuchung übernommen",
updatedAt: new Date(),
updatedBy: userId,
})
.returning({ id: entitybankaccounts.id })
return created?.id ? Number(created.id) : null
}
server.get("/banking/iban/:iban", async (req, reply) => {
try {
const { iban } = req.params as { iban: string }
const normalized = normalizeIban(iban)
if (!normalized) {
return reply.code(400).send({ error: "IBAN missing" })
}
const valid = isValidIbanLocal(normalized)
const bankData = resolveGermanBankDataFromIbanLocal(normalized)
return reply.send({
iban: normalized,
valid,
bic: bankData?.bic || null,
bankName: bankData?.bankName || null,
bankCode: bankData?.bankCode || null,
})
} catch (err) {
server.log.error(err)
return reply.code(500).send({ error: "Failed to resolve IBAN data" })
}
})
const assignIbanFromStatementToCustomer = async (tenantId: number, userId: string, statementId: number, createdDocumentId?: number) => {
if (!createdDocumentId) return
const [statement] = await server.db
.select()
.from(bankstatements)
.where(and(eq(bankstatements.id, statementId), eq(bankstatements.tenant, tenantId)))
.limit(1)
if (!statement) return
const [doc] = await server.db
.select({ customer: createddocuments.customer })
.from(createddocuments)
.where(and(eq(createddocuments.id, createdDocumentId), eq(createddocuments.tenant, tenantId)))
.limit(1)
const customerId = doc?.customer
if (!customerId) return
const partnerBank = pickPartnerBankData(statement, "customer")
if (!partnerBank?.iban) return
const [customer] = await server.db
.select({ id: customers.id, infoData: customers.infoData })
.from(customers)
.where(and(eq(customers.id, customerId), eq(customers.tenant, tenantId)))
.limit(1)
if (!customer) return
const bankAccountId = await resolveEntityBankAccountId(
tenantId,
userId,
partnerBank.iban
)
const newInfoData = mergePartnerIban(
(customer.infoData || {}) as Record<string, any>,
partnerBank.iban,
bankAccountId
)
await server.db
.update(customers)
.set({
infoData: newInfoData,
updatedAt: new Date(),
updatedBy: userId,
})
.where(and(eq(customers.id, customerId), eq(customers.tenant, tenantId)))
}
const assignIbanFromStatementToVendor = async (tenantId: number, userId: string, statementId: number, incomingInvoiceId?: number) => {
if (!incomingInvoiceId) return
const [statement] = await server.db
.select()
.from(bankstatements)
.where(and(eq(bankstatements.id, statementId), eq(bankstatements.tenant, tenantId)))
.limit(1)
if (!statement) return
const [invoice] = await server.db
.select({ vendor: incominginvoices.vendor })
.from(incominginvoices)
.where(and(eq(incominginvoices.id, incomingInvoiceId), eq(incominginvoices.tenant, tenantId)))
.limit(1)
const vendorId = invoice?.vendor
if (!vendorId) return
const partnerBank = pickPartnerBankData(statement, "vendor")
if (!partnerBank?.iban) return
const [vendor] = await server.db
.select({ id: vendors.id, infoData: vendors.infoData })
.from(vendors)
.where(and(eq(vendors.id, vendorId), eq(vendors.tenant, tenantId)))
.limit(1)
if (!vendor) return
const bankAccountId = await resolveEntityBankAccountId(
tenantId,
userId,
partnerBank.iban
)
const newInfoData = mergePartnerIban(
(vendor.infoData || {}) as Record<string, any>,
partnerBank.iban,
bankAccountId
)
await server.db
.update(vendors)
.set({
infoData: newInfoData,
updatedAt: new Date(),
updatedBy: userId,
})
.where(and(eq(vendors.id, vendorId), eq(vendors.tenant, tenantId)))
}
// ------------------------------------------------------------------ // ------------------------------------------------------------------
// 🔐 GoCardLess Token Handling // 🔐 GoCardLess Token Handling
@@ -496,35 +171,9 @@ export default async function bankingRoutes(server: FastifyInstance) {
const createdRecord = inserted[0] const createdRecord = inserted[0]
if (createdRecord?.createddocument) {
try {
await assignIbanFromStatementToCustomer(
req.user.tenant_id,
req.user.user_id,
Number(createdRecord.bankstatement),
Number(createdRecord.createddocument)
)
} catch (err) {
server.log.warn({ err, allocationId: createdRecord.id }, "Konnte IBAN nicht automatisch beim Kunden hinterlegen")
}
}
if (createdRecord?.incominginvoice) {
try {
await assignIbanFromStatementToVendor(
req.user.tenant_id,
req.user.user_id,
Number(createdRecord.bankstatement),
Number(createdRecord.incominginvoice)
)
} catch (err) {
server.log.warn({ err, allocationId: createdRecord.id }, "Konnte IBAN nicht automatisch beim Lieferanten hinterlegen")
}
}
await insertHistoryItem(server, { await insertHistoryItem(server, {
entity: "bankstatements", entity: "bankstatements",
entityId: Number(createdRecord.bankstatement), entityId: createdRecord.id,
action: "created", action: "created",
created_by: req.user.user_id, created_by: req.user.user_id,
tenant_id: req.user.tenant_id, tenant_id: req.user.tenant_id,
@@ -567,7 +216,7 @@ export default async function bankingRoutes(server: FastifyInstance) {
await insertHistoryItem(server, { await insertHistoryItem(server, {
entity: "bankstatements", entity: "bankstatements",
entityId: Number(old.bankstatement), entityId: id,
action: "deleted", action: "deleted",
created_by: req.user.user_id, created_by: req.user.user_id,
tenant_id: req.user.tenant_id, tenant_id: req.user.tenant_id,

View File

@@ -1,58 +0,0 @@
import { FastifyInstance } from "fastify";
import { eq } from "drizzle-orm";
import { db } from "../../../db"; // <--- PFAD ZUR DB INSTANZ ANPASSEN
import { devices } from "../../../db/schema";
// Definition, was wir vom ESP32 erwarten
interface HealthBody {
terminal_id: string;
ip_address?: string;
wifi_rssi?: number;
uptime_seconds?: number;
heap_free?: number;
[key: string]: any; // Erlaubt weitere Felder
}
export default async function devicesManagementRoutes(server: FastifyInstance) {
server.post<{ Body: HealthBody }>(
"/health",
async (req, reply) => {
try {
const data = req.body;
// 1. Validierung: Haben wir eine ID?
if (!data.terminal_id) {
console.warn("Health Check ohne terminal_id empfangen:", data);
return reply.code(400).send({ error: "terminal_id missing" });
}
console.log(`Health Ping von Device ${data.terminal_id}`, data);
// 2. Datenbank Update
// Wir suchen das Gerät mit der passenden externalId
const result = await server.db
.update(devices)
.set({
lastSeen: new Date(), // Setzt Zeit auf JETZT
lastDebugInfo: data // Speichert das ganze JSON
})
.where(eq(devices.externalId, data.terminal_id))
.returning({ id: devices.id }); // Gibt ID zurück, falls gefunden
// 3. Checken ob Gerät gefunden wurde
if (result.length === 0) {
console.warn(`Unbekanntes Terminal versucht Health Check: ${data.terminal_id}`);
// Optional: 404 senden oder ignorieren (Sicherheit)
return reply.code(404).send({ error: "Device not found" });
}
// Alles OK
return reply.code(200).send({ status: "ok" });
} catch (err: any) {
console.error("Health Check Error:", err);
return reply.code(500).send({ error: err.message });
}
}
);
}

View File

@@ -1,39 +1,37 @@
import { FastifyInstance } from "fastify"; import { FastifyInstance } from "fastify";
import { and, desc, eq } from "drizzle-orm"; import {and, desc, eq} from "drizzle-orm";
import { authProfiles, devices, stafftimeevents } from "../../../db/schema"; import {authProfiles, devices, stafftimeevents} from "../../../db/schema";
export default async function devicesRFIDRoutes(server: FastifyInstance) { export default async function devicesRFIDRoutes(server: FastifyInstance) {
server.post( server.post(
"/rfid/createevent/:terminal_id", "/rfid/createevent/:terminal_id",
async (req, reply) => { async (req, reply) => {
try { try {
// 1. Timestamp aus dem Body holen (optional)
const { rfid_id, timestamp } = req.body as {
rfid_id: string,
timestamp?: number // Kann undefined sein (Live) oder Zahl (Offline)
};
const { terminal_id } = req.params as { terminal_id: string }; const {rfid_id} = req.body as {rfid_id: string};
const {terminal_id} = req.params as {terminal_id: string};
if (!rfid_id || !terminal_id) { if(!rfid_id ||!terminal_id) {
console.log(`Missing Params`); console.log(`Missing Params`);
return reply.code(400).send(`Missing Params`); return reply.code(400).send(`Missing Params`)
} }
// 2. Gerät suchen
const device = await server.db const device = await server.db
.select() .select()
.from(devices) .from(devices)
.where(eq(devices.externalId, terminal_id)) .where(
eq(devices.externalId, terminal_id)
)
.limit(1) .limit(1)
.then(rows => rows[0]); .then(rows => rows[0]);
if (!device) { if(!device) {
console.log(`Device ${terminal_id} not found`); console.log(`Device ${terminal_id} not found`);
return reply.code(400).send(`Device ${terminal_id} not found`); return reply.code(400).send(`Device ${terminal_id} not found`)
} }
// 3. User-Profil suchen
const profile = await server.db const profile = await server.db
.select() .select()
.from(authProfiles) .from(authProfiles)
@@ -46,56 +44,55 @@ export default async function devicesRFIDRoutes(server: FastifyInstance) {
.limit(1) .limit(1)
.then(rows => rows[0]); .then(rows => rows[0]);
if (!profile) { if(!profile) {
console.log(`Profile for Token ${rfid_id} not found`); console.log(`Profile for Token ${rfid_id} not found`);
return reply.code(400).send(`Profile for Token ${rfid_id} not found`); return reply.code(400).send(`Profile for Token ${rfid_id} not found`)
} }
// 4. Letztes Event suchen (für Status-Toggle Work Start/End)
const lastEvent = await server.db const lastEvent = await server.db
.select() .select()
.from(stafftimeevents) .from(stafftimeevents)
.where(eq(stafftimeevents.user_id, profile.user_id)) .where(
.orderBy(desc(stafftimeevents.eventtime)) eq(stafftimeevents.user_id, profile.user_id)
)
.orderBy(desc(stafftimeevents.eventtime)) // <-- Sortierung: Neuestes zuerst
.limit(1) .limit(1)
.then(rows => rows[0]); .then(rows => rows[0]);
// 5. Zeitstempel Logik (WICHTIG!) console.log(lastEvent)
// Der ESP32 sendet Unix-Timestamp in SEKUNDEN. JS braucht MILLISEKUNDEN.
// Wenn kein Timestamp kommt (0 oder undefined), nehmen wir JETZT.
const actualEventTime = (timestamp && timestamp > 0)
? new Date(timestamp * 1000)
: new Date();
// 6. Event Typ bestimmen (Toggle Logik)
// Falls noch nie gestempelt wurde (lastEvent undefined), fangen wir mit start an.
const nextEventType = (lastEvent?.eventtype === "work_start")
? "work_end"
: "work_start";
const dataToInsert = { const dataToInsert = {
tenant_id: device.tenant, tenant_id: device.tenant,
user_id: profile.user_id, user_id: profile.user_id,
actortype: "system", actortype: "system",
eventtime: actualEventTime, // Hier nutzen wir die berechnete Zeit eventtime: new Date(),
eventtype: nextEventType, eventtype: lastEvent.eventtype === "work_start" ? "work_end" : "work_start",
source: "TERMINAL" // Habe ich von WEB auf TERMINAL geändert (optional) source: "WEB"
}; }
console.log(`New Event for ${profile.user_id}: ${nextEventType} @ ${actualEventTime.toISOString()}`); console.log(dataToInsert)
const [created] = await server.db const [created] = await server.db
.insert(stafftimeevents) .insert(stafftimeevents)
//@ts-ignore //@ts-ignore
.values(dataToInsert) .values(dataToInsert)
.returning(); .returning()
return created;
return created
} catch (err: any) { } catch (err: any) {
console.error(err); console.error(err)
return reply.code(400).send({ error: err.message }); return reply.code(400).send({ error: err.message })
} }
console.log(req.body)
return
} }
); );
} }

View File

@@ -1,4 +1,6 @@
import { FastifyInstance } from "fastify"; import { FastifyInstance } from "fastify";
import jwt from "jsonwebtoken";
import {insertHistoryItem} from "../utils/history";
import {buildExportZip} from "../utils/export/datev"; import {buildExportZip} from "../utils/export/datev";
import {s3} from "../utils/s3"; import {s3} from "../utils/s3";
import {GetObjectCommand, PutObjectCommand} from "@aws-sdk/client-s3" import {GetObjectCommand, PutObjectCommand} from "@aws-sdk/client-s3"
@@ -7,8 +9,6 @@ import dayjs from "dayjs";
import {randomUUID} from "node:crypto"; import {randomUUID} from "node:crypto";
import {secrets} from "../utils/secrets"; import {secrets} from "../utils/secrets";
import {createSEPAExport} from "../utils/export/sepa"; import {createSEPAExport} from "../utils/export/sepa";
import {generatedexports} from "../../db/schema";
import {eq} from "drizzle-orm";
const createDatevExport = async (server:FastifyInstance,req:any,startDate,endDate,beraternr,mandantennr) => { const createDatevExport = async (server:FastifyInstance,req:any,startDate,endDate,beraternr,mandantennr) => {
try { try {
@@ -45,21 +45,25 @@ const createDatevExport = async (server:FastifyInstance,req:any,startDate,endDat
console.log(url) console.log(url)
// 5) In Haupt-DB speichern // 5) In Supabase-DB speichern
const inserted = await server.db const { data, error } = await server.supabase
.insert(generatedexports) .from("exports")
.values({ .insert([
tenantId: req.user.tenant_id, {
startDate: new Date(startDate), tenant_id: req.user.tenant_id,
endDate: new Date(endDate), start_date: startDate,
validUntil: dayjs().add(24, "hours").toDate(), end_date: endDate,
filePath: fileKey, valid_until: dayjs().add(24,"hours").toISOString(),
url, file_path: fileKey,
type: "datev", url: url,
}) created_at: new Date().toISOString(),
.returning() },
])
.select()
.single()
console.log(inserted[0]) console.log(data)
console.log(error)
} catch (error) { } catch (error) {
console.log(error) console.log(error)
} }
@@ -116,22 +120,9 @@ export default async function exportRoutes(server: FastifyInstance) {
//List Exports Available for Download //List Exports Available for Download
server.get("/exports", async (req,reply) => { server.get("/exports", async (req,reply) => {
const data = await server.db const {data,error} = await server.supabase.from("exports").select().eq("tenant_id",req.user.tenant_id)
.select({
id: generatedexports.id,
created_at: generatedexports.createdAt,
tenant_id: generatedexports.tenantId,
start_date: generatedexports.startDate,
end_date: generatedexports.endDate,
valid_until: generatedexports.validUntil,
type: generatedexports.type,
url: generatedexports.url,
file_path: generatedexports.filePath,
})
.from(generatedexports)
.where(eq(generatedexports.tenantId, req.user.tenant_id))
console.log(data) console.log(data,error)
reply.send(data) reply.send(data)
}) })

View File

@@ -1,6 +1,6 @@
import { FastifyInstance } from "fastify"; import { FastifyInstance } from "fastify";
import {createInvoicePDF, createTimeSheetPDF} from "../utils/pdf"; import {createInvoicePDF, createTimeSheetPDF} from "../utils/pdf";
import {encodeBase64ToNiimbot, generateLabel, useNextNumberRangeNumber} from "../utils/functions"; //import {encodeBase64ToNiimbot, generateLabel, useNextNumberRangeNumber} from "../utils/functions";
import dayjs from "dayjs"; import dayjs from "dayjs";
//import { ready as zplReady } from 'zpl-renderer-js' //import { ready as zplReady } from 'zpl-renderer-js'
//import { renderZPL } from "zpl-image"; //import { renderZPL } from "zpl-image";
@@ -15,6 +15,7 @@ import timezone from "dayjs/plugin/timezone.js";
import {generateTimesEvaluation} from "../modules/time/evaluation.service"; import {generateTimesEvaluation} from "../modules/time/evaluation.service";
import {citys} from "../../db/schema"; import {citys} from "../../db/schema";
import {eq} from "drizzle-orm"; import {eq} from "drizzle-orm";
import {useNextNumberRangeNumber} from "../utils/functions";
import {executeManualGeneration, finishManualGeneration} from "../modules/serialexecution.service"; import {executeManualGeneration, finishManualGeneration} from "../modules/serialexecution.service";
dayjs.extend(customParseFormat) dayjs.extend(customParseFormat)
dayjs.extend(isoWeek) dayjs.extend(isoWeek)
@@ -99,25 +100,31 @@ export default async function functionRoutes(server: FastifyInstance) {
server.get('/functions/check-zip/:zip', async (req, reply) => { server.get('/functions/check-zip/:zip', async (req, reply) => {
const { zip } = req.params as { zip: string } const { zip } = req.params as { zip: string }
const normalizedZip = String(zip || "").replace(/\D/g, "")
if (normalizedZip.length !== 5) { if (!zip) {
return reply.code(400).send({ error: 'ZIP must contain exactly 5 digits' }) return reply.code(400).send({ error: 'ZIP is required' })
} }
try { try {
const data = await server.db //@ts-ignore
const data = await server.db.select().from(citys).where(eq(citys.zip,zip))
/*const { data, error } = await server.supabase
.from('citys')
.select() .select()
.from(citys) .eq('zip', zip)
.where(eq(citys.zip, Number(normalizedZip))) .maybeSingle()
if (error) {
console.log(error)
return reply.code(500).send({ error: 'Database error' })
}*/
if (!data.length) { if (!data) {
return reply.code(404).send({ error: 'ZIP not found' }) return reply.code(404).send({ error: 'ZIP not found' })
} }
const city = data[0]
//districtMap //districtMap
const bundeslaender = [ const bundeslaender = [
{ code: 'DE-BW', name: 'Baden-Württemberg' }, { code: 'DE-BW', name: 'Baden-Württemberg' },
@@ -141,8 +148,9 @@ export default async function functionRoutes(server: FastifyInstance) {
return reply.send({ return reply.send({
...city, ...data,
state_code: bundeslaender.find(i => i.name === city.countryName)?.code || null //@ts-ignore
state_code: bundeslaender.find(i => i.name === data.countryName)
}) })
} catch (err) { } catch (err) {
console.log(err) console.log(err)
@@ -171,25 +179,44 @@ export default async function functionRoutes(server: FastifyInstance) {
await server.services.prepareIncomingInvoices.run(req.user.tenant_id) await server.services.prepareIncomingInvoices.run(req.user.tenant_id)
}) })
server.post('/functions/services/syncdokubox', async (req, reply) => {
await server.services.dokuboxSync.run() /*server.post('/print/zpl/preview', async (req, reply) => {
const { zpl, widthMm = 50, heightMm = 30, dpmm = 8, asBase64 = false } = req.body as {zpl:string,widthMm:number,heightMm:number,dpmm:number,asBase64:string}
console.log(widthMm,heightMm,dpmm)
if (!zpl) {
return reply.code(400).send({ error: 'Missing ZPL string' })
}
try {
// 1⃣ Renderer initialisieren
const { api } = await zplReady
// 2⃣ Rendern (liefert base64-encoded PNG)
const base64Png = await api.zplToBase64Async(zpl, widthMm, heightMm, dpmm)
return await encodeBase64ToNiimbot(base64Png, 'top')
} catch (err) {
console.error('[ZPL Preview Error]', err)
return reply.code(500).send({ error: err.message || 'Failed to render ZPL' })
}
}) })
server.post('/print/label', async (req, reply) => { server.post('/print/label', async (req, reply) => {
const { context, width = 584, height = 354 } = req.body as {context:any,width:number,height:number} const { context, width=584, heigth=354 } = req.body as {context:any,width:number,heigth:number}
try { try {
const base64 = await generateLabel(context,width,height) const base64 = await generateLabel(context,width,heigth)
return { return {
encoded: await encodeBase64ToNiimbot(base64, 'top'), encoded: await encodeBase64ToNiimbot(base64, 'top'),
base64: base64 base64: base64
} }
} catch (err) { } catch (err) {
console.error('[Label Render Error]', err) console.error('[ZPL Preview Error]', err)
return reply.code(500).send({ error: err.message || 'Failed to render label' }) return reply.code(500).send({ error: err.message || 'Failed to render ZPL' })
} }
}) })*/
} }

View File

@@ -3,11 +3,12 @@ import { FastifyInstance } from "fastify";
export default async function routes(server: FastifyInstance) { export default async function routes(server: FastifyInstance) {
server.get("/ping", async () => { server.get("/ping", async () => {
// Testquery gegen DB // Testquery gegen DB
const result = await server.db.execute("SELECT NOW()"); const { data, error } = await server.supabase.from("tenants").select("id").limit(1);
return { return {
status: "ok", status: "ok",
db: JSON.stringify(result.rows[0]), db: error ? "not connected" : "connected",
tenant_count: data?.length ?? 0
}; };
}); });
} }

View File

@@ -3,9 +3,8 @@ import { FastifyPluginAsync } from 'fastify'
import { createConversation } from '../modules/helpdesk/helpdesk.conversation.service.js' import { createConversation } from '../modules/helpdesk/helpdesk.conversation.service.js'
import { addMessage } from '../modules/helpdesk/helpdesk.message.service.js' import { addMessage } from '../modules/helpdesk/helpdesk.message.service.js'
import { getOrCreateContact } from '../modules/helpdesk/helpdesk.contact.service.js' import { getOrCreateContact } from '../modules/helpdesk/helpdesk.contact.service.js'
import { findCustomerOrContactByEmailOrDomain } from "../utils/helpers"; import {extractDomain, findCustomerOrContactByEmailOrDomain} from "../utils/helpers";
import { eq } from "drizzle-orm"; import {useNextNumberRangeNumber} from "../utils/functions";
import { helpdesk_conversations, helpdesk_messages } from "../../db/schema";
// ------------------------------------------------------------- // -------------------------------------------------------------
// 📧 Interne M2M-Route für eingehende E-Mails // 📧 Interne M2M-Route für eingehende E-Mails
@@ -53,12 +52,12 @@ const helpdeskInboundEmailRoutes: FastifyPluginAsync = async (server) => {
// 3⃣ Konversation anhand In-Reply-To suchen // 3⃣ Konversation anhand In-Reply-To suchen
let conversationId: string | null = null let conversationId: string | null = null
if (in_reply_to) { if (in_reply_to) {
const msg = await server.db const { data: msg } = await server.supabase
.select({ conversationId: helpdesk_messages.conversationId }) .from('helpdesk_messages')
.from(helpdesk_messages) .select('conversation_id')
.where(eq(helpdesk_messages.externalMessageId, in_reply_to)) .eq('external_message_id', in_reply_to)
.limit(1) .maybeSingle()
conversationId = msg[0]?.conversationId || null conversationId = msg?.conversation_id || null
} }
// 4⃣ Neue Konversation anlegen falls keine existiert // 4⃣ Neue Konversation anlegen falls keine existiert
@@ -74,12 +73,12 @@ const helpdeskInboundEmailRoutes: FastifyPluginAsync = async (server) => {
}) })
conversationId = conversation.id conversationId = conversation.id
} else { } else {
const rows = await server.db const { data } = await server.supabase
.select() .from('helpdesk_conversations')
.from(helpdesk_conversations) .select('*')
.where(eq(helpdesk_conversations.id, conversationId)) .eq('id', conversationId)
.limit(1) .single()
conversation = rows[0] conversation = data
} }
// 5⃣ Nachricht speichern // 5⃣ Nachricht speichern
@@ -97,7 +96,7 @@ const helpdeskInboundEmailRoutes: FastifyPluginAsync = async (server) => {
return res.status(201).send({ return res.status(201).send({
success: true, success: true,
conversation_id: conversationId, conversation_id: conversationId,
ticket_number: conversation?.ticket_number || conversation?.ticketNumber, ticket_number: conversation.ticket_number,
}) })
}) })
} }

View File

@@ -3,9 +3,70 @@ import { FastifyPluginAsync } from 'fastify'
import { createConversation } from '../modules/helpdesk/helpdesk.conversation.service.js' import { createConversation } from '../modules/helpdesk/helpdesk.conversation.service.js'
import { addMessage } from '../modules/helpdesk/helpdesk.message.service.js' import { addMessage } from '../modules/helpdesk/helpdesk.message.service.js'
import { getOrCreateContact } from '../modules/helpdesk/helpdesk.contact.service.js' import { getOrCreateContact } from '../modules/helpdesk/helpdesk.contact.service.js'
import { findCustomerOrContactByEmailOrDomain } from "../utils/helpers";
import { eq } from "drizzle-orm"; /**
import { helpdesk_channel_instances } from "../../db/schema"; * Öffentliche Route zum Empfang eingehender Kontaktformular-Nachrichten.
* Authentifizierung: über `public_token` aus helpdesk_channel_instances
*/
function extractDomain(email) {
if (!email) return null
const parts = email.split("@")
return parts.length === 2 ? parts[1].toLowerCase() : null
}
async function findCustomerOrContactByEmailOrDomain(server,fromMail, tenantId) {
const sender = fromMail
const senderDomain = extractDomain(sender)
if (!senderDomain) return null
// 1⃣ Direkter Match über contacts
const { data: contactMatch } = await server.supabase
.from("contacts")
.select("id, customer")
.eq("email", sender)
.eq("tenant", tenantId)
.maybeSingle()
if (contactMatch?.customer_id) return {
customer: contactMatch.customer,
contact: contactMatch.id
}
// 2⃣ Kunden laden, bei denen E-Mail oder Rechnungsmail passt
const { data: customers, error } = await server.supabase
.from("customers")
.select("id, infoData")
.eq("tenant", tenantId)
if (error) {
console.error(`[Helpdesk] Fehler beim Laden der Kunden:`, error.message)
return null
}
// 3⃣ Durch Kunden iterieren und prüfen
for (const c of customers || []) {
const info = c.infoData || {}
const email = info.email?.toLowerCase()
const invoiceEmail = info.invoiceEmail?.toLowerCase()
const emailDomain = extractDomain(email)
const invoiceDomain = extractDomain(invoiceEmail)
// exakter Match oder Domain-Match
if (
sender === email ||
sender === invoiceEmail ||
senderDomain === emailDomain ||
senderDomain === invoiceDomain
) {
return {customer: c.id, contact:null}
}
}
return null
}
const helpdeskInboundRoutes: FastifyPluginAsync = async (server) => { const helpdeskInboundRoutes: FastifyPluginAsync = async (server) => {
// Öffentliche POST-Route // Öffentliche POST-Route
@@ -24,18 +85,17 @@ const helpdeskInboundRoutes: FastifyPluginAsync = async (server) => {
} }
// 1⃣ Kanalinstanz anhand des Tokens ermitteln // 1⃣ Kanalinstanz anhand des Tokens ermitteln
const channels = await server.db const { data: channel, error: channelError } = await server.supabase
.select() .from('helpdesk_channel_instances')
.from(helpdesk_channel_instances) .select('*')
.where(eq(helpdesk_channel_instances.publicToken, public_token)) .eq('public_token', public_token)
.limit(1) .single()
const channel = channels[0]
if (!channel) { if (channelError || !channel) {
return res.status(404).send({ error: 'Invalid channel token' }) return res.status(404).send({ error: 'Invalid channel token' })
} }
const tenant_id = channel.tenantId const tenant_id = channel.tenant_id
const channel_instance_id = channel.id const channel_instance_id = channel.id
// @ts-ignore // @ts-ignore

View File

@@ -5,13 +5,6 @@ import { addMessage, getMessages } from '../modules/helpdesk/helpdesk.message.se
import { getOrCreateContact } from '../modules/helpdesk/helpdesk.contact.service.js' import { getOrCreateContact } from '../modules/helpdesk/helpdesk.contact.service.js'
import {decrypt, encrypt} from "../utils/crypt"; import {decrypt, encrypt} from "../utils/crypt";
import nodemailer from "nodemailer" import nodemailer from "nodemailer"
import { eq } from "drizzle-orm";
import {
helpdesk_channel_instances,
helpdesk_contacts,
helpdesk_conversations,
helpdesk_messages,
} from "../../db/schema";
const helpdeskRoutes: FastifyPluginAsync = async (server) => { const helpdeskRoutes: FastifyPluginAsync = async (server) => {
// 📩 1. Liste aller Konversationen // 📩 1. Liste aller Konversationen
@@ -65,30 +58,15 @@ const helpdeskRoutes: FastifyPluginAsync = async (server) => {
const tenant_id = req.user?.tenant_id const tenant_id = req.user?.tenant_id
const {id: conversation_id} = req.params as {id: string} const {id: conversation_id} = req.params as {id: string}
const rows = await server.db const { data, error } = await server.supabase
.select({ .from('helpdesk_conversations')
conversation: helpdesk_conversations, .select('*, helpdesk_contacts(*)')
contact: helpdesk_contacts .eq('tenant_id', tenant_id)
}) .eq('id', conversation_id)
.from(helpdesk_conversations) .single()
.leftJoin(helpdesk_contacts, eq(helpdesk_contacts.id, helpdesk_conversations.contactId))
.where(eq(helpdesk_conversations.id, conversation_id))
const data = rows[0] if (error) return res.status(404).send({ error: 'Conversation not found' })
if (!data || data.conversation.tenantId !== tenant_id) return res.status(404).send({ error: 'Conversation not found' }) return res.send(data)
return res.send({
...data.conversation,
channel_instance_id: data.conversation.channelInstanceId,
contact_id: data.conversation.contactId,
contact_person_id: data.conversation.contactPersonId,
created_at: data.conversation.createdAt,
customer_id: data.conversation.customerId,
last_message_at: data.conversation.lastMessageAt,
tenant_id: data.conversation.tenantId,
ticket_number: data.conversation.ticketNumber,
helpdesk_contacts: data.contact,
})
}) })
// 🔄 4. Konversation Status ändern // 🔄 4. Konversation Status ändern
@@ -203,39 +181,36 @@ const helpdeskRoutes: FastifyPluginAsync = async (server) => {
safeConfig.smtp.pass = encrypt(safeConfig.smtp.pass) safeConfig.smtp.pass = encrypt(safeConfig.smtp.pass)
} }
const inserted = await server.db // Speichern in Supabase
.insert(helpdesk_channel_instances) const { data, error } = await server.supabase
.values({ .from("helpdesk_channel_instances")
tenantId: tenant_id, .insert({
typeId: type_id, tenant_id,
type_id,
name, name,
config: safeConfig, config: safeConfig,
isActive: is_active, is_active,
}) })
.returning() .select()
.single()
const data = inserted[0] if (error) throw error
if (!data) throw new Error("Konnte Channel nicht erstellen")
const responseConfig: any = data.config
// sensible Felder aus Response entfernen // sensible Felder aus Response entfernen
if (responseConfig?.imap) { if (data.config?.imap) {
delete responseConfig.imap.host delete data.config.imap.host
delete responseConfig.imap.user delete data.config.imap.user
delete responseConfig.imap.pass delete data.config.imap.pass
} }
if (responseConfig?.smtp) { if (data.config?.smtp) {
delete responseConfig.smtp.host delete data.config.smtp.host
delete responseConfig.smtp.user delete data.config.smtp.user
delete responseConfig.smtp.pass delete data.config.smtp.pass
} }
reply.send({ reply.send({
message: "E-Mail-Channel erfolgreich erstellt", message: "E-Mail-Channel erfolgreich erstellt",
channel: { channel: data,
...data,
config: responseConfig
},
}) })
} catch (err) { } catch (err) {
console.error("Fehler bei Channel-Erstellung:", err) console.error("Fehler bei Channel-Erstellung:", err)
@@ -259,29 +234,29 @@ const helpdeskRoutes: FastifyPluginAsync = async (server) => {
const { text } = req.body as { text: string } const { text } = req.body as { text: string }
// 🔹 Konversation inkl. Channel + Kontakt laden // 🔹 Konversation inkl. Channel + Kontakt laden
const rows = await server.db const { data: conv, error: convErr } = await server.supabase
.select({ .from("helpdesk_conversations")
conversation: helpdesk_conversations, .select(`
contact: helpdesk_contacts, id,
channel: helpdesk_channel_instances, tenant_id,
}) subject,
.from(helpdesk_conversations) channel_instance_id,
.leftJoin(helpdesk_contacts, eq(helpdesk_contacts.id, helpdesk_conversations.contactId)) helpdesk_contacts(email),
.leftJoin(helpdesk_channel_instances, eq(helpdesk_channel_instances.id, helpdesk_conversations.channelInstanceId)) helpdesk_channel_instances(config, name),
.where(eq(helpdesk_conversations.id, conversationId)) ticket_number
.limit(1) `)
.eq("id", conversationId)
const conv = rows[0] .single()
console.log(conv) console.log(conv)
if (!conv) { if (convErr || !conv) {
reply.status(404).send({ error: "Konversation nicht gefunden" }) reply.status(404).send({ error: "Konversation nicht gefunden" })
return return
} }
const contact = conv.contact as unknown as {email: string} const contact = conv.helpdesk_contacts as unknown as {email: string}
const channel = conv.channel as unknown as {name: string, config: any} const channel = conv.helpdesk_channel_instances as unknown as {name: string}
console.log(contact) console.log(contact)
if (!contact?.email) { if (!contact?.email) {
@@ -313,7 +288,7 @@ const helpdeskRoutes: FastifyPluginAsync = async (server) => {
const mailOptions = { const mailOptions = {
from: `"${channel?.name}" <${user}>`, from: `"${channel?.name}" <${user}>`,
to: contact.email, to: contact.email,
subject: `${conv.conversation.ticketNumber} | ${conv.conversation.subject}` || `${conv.conversation.ticketNumber} | Antwort vom FEDEO Helpdesk`, subject: `${conv.ticket_number} | ${conv.subject}` || `${conv.ticket_number} | Antwort vom FEDEO Helpdesk`,
text, text,
} }
@@ -321,22 +296,24 @@ const helpdeskRoutes: FastifyPluginAsync = async (server) => {
console.log(`[Helpdesk SMTP] Gesendet an ${contact.email}: ${info.messageId}`) console.log(`[Helpdesk SMTP] Gesendet an ${contact.email}: ${info.messageId}`)
// 💾 Nachricht speichern // 💾 Nachricht speichern
await server.db const { error: insertErr } = await server.supabase
.insert(helpdesk_messages) .from("helpdesk_messages")
.values({ .insert({
tenantId: conv.conversation.tenantId, tenant_id: conv.tenant_id,
conversationId: conversationId, conversation_id: conversationId,
direction: "outgoing", direction: "outgoing",
payload: { type: "text", text }, payload: { type: "text", text },
externalMessageId: info.messageId, external_message_id: info.messageId,
receivedAt: new Date(), received_at: new Date().toISOString(),
}) })
if (insertErr) throw insertErr
// 🔁 Konversation aktualisieren // 🔁 Konversation aktualisieren
await server.db await server.supabase
.update(helpdesk_conversations) .from("helpdesk_conversations")
.set({ lastMessageAt: new Date() }) .update({ last_message_at: new Date().toISOString() })
.where(eq(helpdesk_conversations.id, conversationId)) .eq("id", conversationId)
reply.send({ reply.send({
message: "E-Mail erfolgreich gesendet", message: "E-Mail erfolgreich gesendet",

View File

@@ -1,39 +1,12 @@
// src/routes/resources/history.ts // src/routes/resources/history.ts
import { FastifyInstance } from "fastify"; import { FastifyInstance } from "fastify";
import { and, asc, eq, inArray } from "drizzle-orm";
import { authProfiles, historyitems } from "../../db/schema";
const columnMap: Record<string, any> = { const columnMap: Record<string, string> = {
customers: historyitems.customer,
members: historyitems.customer,
vendors: historyitems.vendor,
projects: historyitems.project,
plants: historyitems.plant,
contacts: historyitems.contact,
tasks: historyitems.task,
vehicles: historyitems.vehicle,
events: historyitems.event,
files: historyitems.file,
products: historyitems.product,
inventoryitems: historyitems.inventoryitem,
inventoryitemgroups: historyitems.inventoryitemgroup,
checks: historyitems.check,
costcentres: historyitems.costcentre,
ownaccounts: historyitems.ownaccount,
documentboxes: historyitems.documentbox,
hourrates: historyitems.hourrate,
services: historyitems.service,
customerspaces: historyitems.customerspace,
customerinventoryitems: historyitems.customerinventoryitem,
memberrelations: historyitems.memberrelation,
};
const insertFieldMap: Record<string, string> = {
customers: "customer", customers: "customer",
members: "customer",
vendors: "vendor", vendors: "vendor",
projects: "project", projects: "project",
plants: "plant", plants: "plant",
contracts: "contract",
contacts: "contact", contacts: "contact",
tasks: "task", tasks: "task",
vehicles: "vehicle", vehicles: "vehicle",
@@ -42,61 +15,17 @@ const insertFieldMap: Record<string, string> = {
products: "product", products: "product",
inventoryitems: "inventoryitem", inventoryitems: "inventoryitem",
inventoryitemgroups: "inventoryitemgroup", inventoryitemgroups: "inventoryitemgroup",
absencerequests: "absencerequest",
checks: "check", checks: "check",
costcentres: "costcentre", costcentres: "costcentre",
ownaccounts: "ownaccount", ownaccounts: "ownaccount",
documentboxes: "documentbox", documentboxes: "documentbox",
hourrates: "hourrate", hourrates: "hourrate",
services: "service", services: "service",
customerspaces: "customerspace", roles: "role",
customerinventoryitems: "customerinventoryitem", };
memberrelations: "memberrelation",
}
const parseId = (value: string) => {
if (/^\d+$/.test(value)) return Number(value)
return value
}
export default async function resourceHistoryRoutes(server: FastifyInstance) { export default async function resourceHistoryRoutes(server: FastifyInstance) {
server.get("/history", {
schema: {
tags: ["History"],
summary: "Get all history entries for the active tenant",
},
}, async (req: any) => {
const data = await server.db
.select()
.from(historyitems)
.where(eq(historyitems.tenant, req.user?.tenant_id))
.orderBy(asc(historyitems.createdAt));
const userIds = Array.from(
new Set(data.map((item) => item.createdBy).filter(Boolean))
) as string[];
const profiles = userIds.length > 0
? await server.db
.select()
.from(authProfiles)
.where(and(
eq(authProfiles.tenant_id, req.user?.tenant_id),
inArray(authProfiles.user_id, userIds)
))
: [];
const profileByUserId = new Map(
profiles.map((profile) => [profile.user_id, profile])
);
return data.map((historyitem) => ({
...historyitem,
created_at: historyitem.createdAt,
created_by: historyitem.createdBy,
created_by_profile: historyitem.createdBy ? profileByUserId.get(historyitem.createdBy) || null : null,
}));
});
server.get<{ server.get<{
Params: { resource: string; id: string } Params: { resource: string; id: string }
}>("/resource/:resource/:id/history", { }>("/resource/:resource/:id/history", {
@@ -120,36 +49,29 @@ export default async function resourceHistoryRoutes(server: FastifyInstance) {
return reply.code(400).send({ error: `History not supported for resource '${resource}'` }); return reply.code(400).send({ error: `History not supported for resource '${resource}'` });
} }
const data = await server.db const { data, error } = await server.supabase
.select() .from("historyitems")
.from(historyitems) .select("*")
.where(eq(column, parseId(id))) .eq(column, id)
.orderBy(asc(historyitems.createdAt)); .order("created_at", { ascending: true });
const userIds = Array.from( if (error) {
new Set(data.map((item) => item.createdBy).filter(Boolean)) server.log.error(error);
) as string[] return reply.code(500).send({ error: "Failed to fetch history" });
}
const profiles = userIds.length > 0 const {data:users, error:usersError} = await server.supabase
? await server.db .from("auth_users")
.select() .select("*, auth_profiles(*), tenants!auth_tenant_users(*)")
.from(authProfiles)
.where(and(
eq(authProfiles.tenant_id, req.user?.tenant_id),
inArray(authProfiles.user_id, userIds)
))
: []
const profileByUserId = new Map( const filteredUsers = (users ||[]).filter(i => i.tenants.find((t:any) => t.id === req.user?.tenant_id))
profiles.map((profile) => [profile.user_id, profile])
)
const dataCombined = data.map((historyitem) => ({ const dataCombined = data.map(historyitem => {
return {
...historyitem, ...historyitem,
created_at: historyitem.createdAt, created_by_profile: filteredUsers.find(i => i.id === historyitem.created_by) ? filteredUsers.find(i => i.id === historyitem.created_by).auth_profiles[0] : null
created_by: historyitem.createdBy, }
created_by_profile: historyitem.createdBy ? profileByUserId.get(historyitem.createdBy) || null : null, })
}))
@@ -206,33 +128,29 @@ export default async function resourceHistoryRoutes(server: FastifyInstance) {
const userId = (req.user as any)?.user_id; const userId = (req.user as any)?.user_id;
const fkField = insertFieldMap[resource]; const fkField = columnMap[resource];
if (!fkField) { if (!fkField) {
return reply.code(400).send({ error: `Unknown resource: ${resource}` }); return reply.code(400).send({ error: `Unknown resource: ${resource}` });
} }
const inserted = await server.db const { data, error } = await server.supabase
.insert(historyitems) .from("historyitems")
.values({ .insert({
text, text,
[fkField]: parseId(id), [fkField]: id,
oldVal: old_val || null, oldVal: old_val || null,
newVal: new_val || null, newVal: new_val || null,
config: config || null, config: config || null,
tenant: (req.user as any)?.tenant_id, tenant: (req.user as any)?.tenant_id,
createdBy: userId created_by: userId
}) })
.returning() .select()
.single();
const data = inserted[0] if (error) {
if (!data) { return reply.code(500).send({ error: error.message });
return reply.code(500).send({ error: "Failed to create history entry" });
} }
return reply.code(201).send({ return reply.code(201).send(data);
...data,
created_at: data.createdAt,
created_by: data.createdBy
});
}); });
} }

View File

@@ -1,63 +0,0 @@
import { FastifyInstance } from "fastify"
import jwt from "jsonwebtoken"
import { and, eq } from "drizzle-orm"
import { authTenantUsers } from "../../../db/schema"
import { secrets } from "../../utils/secrets"
export default async function authM2mInternalRoutes(server: FastifyInstance) {
server.post("/auth/m2m/token", {
schema: {
tags: ["Auth"],
summary: "Exchange M2M API key for a short-lived JWT",
body: {
type: "object",
properties: {
expires_in_seconds: { type: "number" }
}
}
}
}, async (req, reply) => {
try {
if (!req.user?.user_id || !req.user?.tenant_id || !req.user?.email) {
return reply.code(401).send({ error: "Unauthorized" })
}
const membership = await server.db
.select()
.from(authTenantUsers)
.where(and(
eq(authTenantUsers.user_id, req.user.user_id),
eq(authTenantUsers.tenant_id, Number(req.user.tenant_id))
))
.limit(1)
if (!membership[0]) {
return reply.code(403).send({ error: "User is not assigned to tenant" })
}
const requestedTtl = Number((req.body as any)?.expires_in_seconds ?? 900)
const ttlSeconds = Math.min(3600, Math.max(60, requestedTtl))
const token = jwt.sign(
{
user_id: req.user.user_id,
email: req.user.email,
tenant_id: req.user.tenant_id,
},
secrets.JWT_SECRET!,
{ expiresIn: ttlSeconds }
)
return {
token_type: "Bearer",
access_token: token,
expires_in_seconds: ttlSeconds,
user_id: req.user.user_id,
tenant_id: req.user.tenant_id
}
} catch (err) {
console.error("POST /internal/auth/m2m/token ERROR:", err)
return reply.code(500).send({ error: "Internal Server Error" })
}
})
}

View File

@@ -1,22 +1,21 @@
// routes/notifications.routes.ts // routes/notifications.routes.ts
import { FastifyInstance } from 'fastify'; import { FastifyInstance } from 'fastify';
import { NotificationService, UserDirectory } from '../modules/notification.service'; import { NotificationService, UserDirectory } from '../modules/notification.service';
import { eq } from "drizzle-orm";
import { authUsers } from "../../db/schema";
// Beispiel: E-Mail aus eigener User-Tabelle laden // Beispiel: E-Mail aus eigener User-Tabelle laden
const getUserDirectory: UserDirectory = async (server:FastifyInstance, userId, tenantId) => { const getUserDirectory: UserDirectory = async (server:FastifyInstance, userId, tenantId) => {
const rows = await server.db const { data, error } = await server.supabase
.select({ email: authUsers.email }) .from('auth_users')
.from(authUsers) .select('email')
.where(eq(authUsers.id, userId)) .eq('id', userId)
.limit(1) .maybeSingle();
const data = rows[0] if (error || !data) return null;
if (!data) return null;
return { email: data.email }; return { email: data.email };
}; };
export default async function notificationsRoutes(server: FastifyInstance) { export default async function notificationsRoutes(server: FastifyInstance) {
// wichtig: server.supabase ist über app verfügbar
const svc = new NotificationService(server, getUserDirectory); const svc = new NotificationService(server, getUserDirectory);
server.post('/notifications/trigger', async (req, reply) => { server.post('/notifications/trigger', async (req, reply) => {

View File

@@ -1,19 +1,40 @@
import { FastifyRequest, FastifyReply, FastifyInstance } from 'fastify'; import { FastifyRequest, FastifyReply, FastifyInstance } from 'fastify';
import { publicLinkService } from '../../modules/publiclinks.service'; import { publicLinkService } from '../../modules/publiclinks.service';
import dayjs from 'dayjs'; // Falls nicht installiert: npm install dayjs
export default async function publiclinksNonAuthenticatedRoutes(server: FastifyInstance) { export default async function publiclinksNonAuthenticatedRoutes(server: FastifyInstance) {
server.get("/workflows/context/:token", async (req, reply) => { server.get("/workflows/context/:token", async (req, reply) => {
const { token } = req.params as { token: string }; const { token } = req.params as { token: string };
// Wir lesen die PIN aus dem Header (Best Practice für Security)
const pin = req.headers['x-public-pin'] as string | undefined; const pin = req.headers['x-public-pin'] as string | undefined;
try { try {
const context = await publicLinkService.getLinkContext(server, token, pin); const context = await publicLinkService.getLinkContext(server, token, pin);
return reply.send(context); return reply.send(context);
} catch (error: any) { } catch (error: any) {
if (error.message === "Link_NotFound") return reply.code(404).send({ error: "Link nicht gefunden" }); // Spezifische Fehlercodes für das Frontend
if (error.message === "Pin_Required") return reply.code(401).send({ error: "PIN erforderlich", requirePin: true }); if (error.message === "Link_NotFound") {
if (error.message === "Pin_Invalid") return reply.code(403).send({ error: "PIN falsch", requirePin: true }); return reply.code(404).send({ error: "Link nicht gefunden oder abgelaufen" });
}
if (error.message === "Pin_Required") {
return reply.code(401).send({
error: "PIN erforderlich",
code: "PIN_REQUIRED",
requirePin: true
});
}
if (error.message === "Pin_Invalid") {
return reply.code(403).send({
error: "PIN falsch",
code: "PIN_INVALID",
requirePin: true
});
}
server.log.error(error); server.log.error(error);
return reply.code(500).send({ error: "Interner Server Fehler" }); return reply.code(500).send({ error: "Interner Server Fehler" });
@@ -22,31 +43,49 @@ export default async function publiclinksNonAuthenticatedRoutes(server: FastifyI
server.post("/workflows/submit/:token", async (req, reply) => { server.post("/workflows/submit/:token", async (req, reply) => {
const { token } = req.params as { token: string }; const { token } = req.params as { token: string };
// PIN sicher aus dem Header lesen
const pin = req.headers['x-public-pin'] as string | undefined; const pin = req.headers['x-public-pin'] as string | undefined;
const body = req.body as any; // Der Body enthält { profile, project, service, ... }
const payload = req.body;
console.log(payload)
try { try {
const quantity = parseFloat(body.quantity) || 0; // Service aufrufen (führt die 3 Schritte aus: Lieferschein -> Zeit -> History)
// Wir nutzen das vom User gewählte deliveryDate
// Falls kein Datum geschickt wurde, Fallback auf Heute
const baseDate = body.deliveryDate ? dayjs(body.deliveryDate) : dayjs();
const payload = {
...body,
// Wir mappen das deliveryDate auf die Zeitstempel
// Start ist z.B. 08:00 Uhr am gewählten Tag, Ende ist Start + Menge
startDate: baseDate.hour(8).minute(0).toDate(),
endDate: baseDate.hour(8).add(quantity, 'hour').toDate(),
deliveryDate: baseDate.format('YYYY-MM-DD')
};
const result = await publicLinkService.submitFormData(server, token, payload, pin); const result = await publicLinkService.submitFormData(server, token, payload, pin);
// 201 Created zurückgeben
return reply.code(201).send(result); return reply.code(201).send(result);
} catch (error: any) { } catch (error: any) {
server.log.error(error); console.log(error);
return reply.code(500).send({ error: "Fehler beim Speichern", details: error.message });
// Fehler-Mapping für saubere HTTP Codes
if (error.message === "Link_NotFound") {
return reply.code(404).send({ error: "Link ungültig oder nicht aktiv" });
}
if (error.message === "Pin_Required") {
return reply.code(401).send({ error: "PIN erforderlich" });
}
if (error.message === "Pin_Invalid") {
return reply.code(403).send({ error: "PIN ist falsch" });
}
if (error.message === "Profile_Missing") {
return reply.code(400).send({ error: "Kein Mitarbeiter-Profil gefunden (weder im Link noch in der Eingabe)" });
}
if (error.message === "Project not found" || error.message === "Service not found") {
return reply.code(400).send({ error: "Ausgewähltes Projekt oder Leistung existiert nicht mehr." });
}
// Fallback für alle anderen Fehler (z.B. DB Constraints)
return reply.code(500).send({
error: "Interner Fehler beim Speichern",
details: error.message
});
} }
}); });
} }

File diff suppressed because it is too large Load Diff

View File

@@ -1,9 +1,9 @@
import { FastifyInstance } from "fastify" import { FastifyInstance } from "fastify"
import { asc, desc, eq } from "drizzle-orm" import { asc, desc } from "drizzle-orm"
import { sortData } from "../utils/sort" import { sortData } from "../utils/sort"
// Schema imports // Schema imports
import { accounts, units, countrys, tenants } from "../../db/schema" import { accounts, units,countrys } from "../../db/schema"
const TABLE_MAP: Record<string, any> = { const TABLE_MAP: Record<string, any> = {
accounts, accounts,
@@ -35,49 +35,11 @@ export default async function resourceRoutesSpecial(server: FastifyInstance) {
} }
// --------------------------------------- // ---------------------------------------
// 📌 SELECT: select-string wird in dieser Route bewusst ignoriert // 📌 SELECT: wir ignorieren select string (wie Supabase)
// Drizzle kann kein dynamisches Select aus String! // Drizzle kann kein dynamisches Select aus String!
// Wir geben IMMER alle Spalten zurück → kompatibel zum Frontend // Wir geben IMMER alle Spalten zurück → kompatibel zum Frontend
// --------------------------------------- // ---------------------------------------
if (resource === "accounts") {
const [tenant] = await server.db
.select({
accountChart: tenants.accountChart,
})
.from(tenants)
.where(eq(tenants.id, Number(req.user.tenant_id)))
.limit(1)
const activeAccountChart = tenant?.accountChart || "skr03"
let data
if (sort && (accounts as any)[sort]) {
const col = (accounts as any)[sort]
data = ascQuery === "true"
? await server.db
.select()
.from(accounts)
.where(eq(accounts.accountChart, activeAccountChart))
.orderBy(asc(col))
: await server.db
.select()
.from(accounts)
.where(eq(accounts.accountChart, activeAccountChart))
.orderBy(desc(col))
} else {
data = await server.db
.select()
.from(accounts)
.where(eq(accounts.accountChart, activeAccountChart))
}
return sortData(
data,
sort as any,
ascQuery === "true"
)
}
let query = server.db.select().from(table) let query = server.db.select().from(table)
// --------------------------------------- // ---------------------------------------

View File

@@ -124,7 +124,6 @@ export default async function staffTimeRoutes(server: FastifyInstance) {
eventtype: "invalidated", eventtype: "invalidated",
source: "WEB", source: "WEB",
related_event_id: id, related_event_id: id,
invalidates_event_id: id,
metadata: { metadata: {
reason: reason || "Bearbeitung", reason: reason || "Bearbeitung",
replaced_by_edit: true replaced_by_edit: true

View File

@@ -1,7 +1,5 @@
import { FastifyInstance } from 'fastify' import { FastifyInstance } from 'fastify'
import { StaffTimeEntryConnect } from '../../types/staff' import { StaffTimeEntryConnect } from '../../types/staff'
import { asc, eq } from "drizzle-orm";
import { stafftimenetryconnects } from "../../../db/schema";
export default async function staffTimeConnectRoutes(server: FastifyInstance) { export default async function staffTimeConnectRoutes(server: FastifyInstance) {
@@ -10,21 +8,16 @@ export default async function staffTimeConnectRoutes(server: FastifyInstance) {
'/staff/time/:id/connects', '/staff/time/:id/connects',
async (req, reply) => { async (req, reply) => {
const { id } = req.params const { id } = req.params
const { started_at, stopped_at, project_id, notes } = req.body const { started_at, stopped_at, project_id, customer_id, task_id, ticket_id, notes } = req.body
const parsedProjectId = project_id ? Number(project_id) : null
const data = await server.db const { data, error } = await server.supabase
.insert(stafftimenetryconnects) .from('staff_time_entry_connects')
.values({ .insert([{ time_entry_id: id, started_at, stopped_at, project_id, customer_id, task_id, ticket_id, notes }])
stafftimeentry: id, .select()
started_at: new Date(started_at), .maybeSingle()
stopped_at: new Date(stopped_at),
project_id: parsedProjectId,
notes
})
.returning()
return reply.send(data[0]) if (error) return reply.code(400).send({ error: error.message })
return reply.send(data)
} }
) )
@@ -33,12 +26,13 @@ export default async function staffTimeConnectRoutes(server: FastifyInstance) {
'/staff/time/:id/connects', '/staff/time/:id/connects',
async (req, reply) => { async (req, reply) => {
const { id } = req.params const { id } = req.params
const data = await server.db const { data, error } = await server.supabase
.select() .from('staff_time_entry_connects')
.from(stafftimenetryconnects) .select('*')
.where(eq(stafftimenetryconnects.stafftimeentry, id)) .eq('time_entry_id', id)
.orderBy(asc(stafftimenetryconnects.started_at)) .order('started_at', { ascending: true })
if (error) return reply.code(400).send({ error: error.message })
return reply.send(data) return reply.send(data)
} }
) )
@@ -48,20 +42,15 @@ export default async function staffTimeConnectRoutes(server: FastifyInstance) {
'/staff/time/connects/:connectId', '/staff/time/connects/:connectId',
async (req, reply) => { async (req, reply) => {
const { connectId } = req.params const { connectId } = req.params
const patchData = { ...req.body } as any const { data, error } = await server.supabase
if (patchData.started_at) patchData.started_at = new Date(patchData.started_at) .from('staff_time_entry_connects')
if (patchData.stopped_at) patchData.stopped_at = new Date(patchData.stopped_at) .update({ ...req.body, updated_at: new Date().toISOString() })
if (patchData.project_id !== undefined) { .eq('id', connectId)
patchData.project_id = patchData.project_id ? Number(patchData.project_id) : null .select()
} .maybeSingle()
const data = await server.db if (error) return reply.code(400).send({ error: error.message })
.update(stafftimenetryconnects) return reply.send(data)
.set({ ...patchData, updated_at: new Date() })
.where(eq(stafftimenetryconnects.id, connectId))
.returning()
return reply.send(data[0])
} }
) )
@@ -70,10 +59,12 @@ export default async function staffTimeConnectRoutes(server: FastifyInstance) {
'/staff/time/connects/:connectId', '/staff/time/connects/:connectId',
async (req, reply) => { async (req, reply) => {
const { connectId } = req.params const { connectId } = req.params
await server.db const { error } = await server.supabase
.delete(stafftimenetryconnects) .from('staff_time_entry_connects')
.where(eq(stafftimenetryconnects.id, connectId)) .delete()
.eq('id', connectId)
if (error) return reply.code(400).send({ error: error.message })
return reply.send({ success: true }) return reply.send({ success: true })
} }
) )

View File

@@ -1,26 +1,18 @@
import { FastifyInstance } from "fastify" import { FastifyInstance } from "fastify"
import jwt from "jsonwebtoken" import jwt from "jsonwebtoken"
import { secrets } from "../utils/secrets" import { secrets } from "../utils/secrets"
import { createHash, randomBytes } from "node:crypto"
import { import {
authTenantUsers, authTenantUsers,
authUsers, authUsers,
authProfiles, authProfiles,
tenants, tenants
m2mApiKeys
} from "../../db/schema" } from "../../db/schema"
import {and, desc, eq, inArray} from "drizzle-orm" import {and, eq, inArray} from "drizzle-orm"
export default async function tenantRoutes(server: FastifyInstance) { export default async function tenantRoutes(server: FastifyInstance) {
const generateApiKey = () => {
const raw = randomBytes(32).toString("base64url")
return `fedeo_m2m_${raw}`
}
const hashApiKey = (apiKey: string) =>
createHash("sha256").update(apiKey, "utf8").digest("hex")
// ------------------------------------------------------------- // -------------------------------------------------------------
@@ -81,7 +73,7 @@ export default async function tenantRoutes(server: FastifyInstance) {
httpOnly: true, httpOnly: true,
sameSite: process.env.NODE_ENV === "production" ? "none" : "lax", sameSite: process.env.NODE_ENV === "production" ? "none" : "lax",
secure: process.env.NODE_ENV === "production", secure: process.env.NODE_ENV === "production",
maxAge: 60 * 60 * 6, maxAge: 60 * 60 * 3,
}) })
return { token } return { token }
@@ -249,172 +241,4 @@ export default async function tenantRoutes(server: FastifyInstance) {
} }
}) })
// -------------------------------------------------------------
// M2M API KEYS
// -------------------------------------------------------------
server.get("/tenant/api-keys", async (req, reply) => {
try {
const tenantId = req.user?.tenant_id
if (!tenantId) return reply.code(401).send({ error: "Unauthorized" })
const keys = await server.db
.select({
id: m2mApiKeys.id,
name: m2mApiKeys.name,
tenant_id: m2mApiKeys.tenantId,
user_id: m2mApiKeys.userId,
active: m2mApiKeys.active,
key_prefix: m2mApiKeys.keyPrefix,
created_at: m2mApiKeys.createdAt,
updated_at: m2mApiKeys.updatedAt,
expires_at: m2mApiKeys.expiresAt,
last_used_at: m2mApiKeys.lastUsedAt,
})
.from(m2mApiKeys)
.where(eq(m2mApiKeys.tenantId, tenantId))
.orderBy(desc(m2mApiKeys.createdAt))
return keys
} catch (err) {
console.error("/tenant/api-keys GET ERROR:", err)
return reply.code(500).send({ error: "Internal Server Error" })
}
})
server.post("/tenant/api-keys", async (req, reply) => {
try {
const tenantId = req.user?.tenant_id
const creatorUserId = req.user?.user_id
if (!tenantId || !creatorUserId) {
return reply.code(401).send({ error: "Unauthorized" })
}
const { name, user_id, expires_at } = req.body as {
name: string
user_id: string
expires_at?: string | null
}
if (!name || !user_id) {
return reply.code(400).send({ error: "name and user_id are required" })
}
const userMembership = await server.db
.select()
.from(authTenantUsers)
.where(and(
eq(authTenantUsers.tenant_id, tenantId),
eq(authTenantUsers.user_id, user_id)
))
.limit(1)
if (!userMembership[0]) {
return reply.code(400).send({ error: "user_id is not assigned to this tenant" })
}
const plainApiKey = generateApiKey()
const keyPrefix = plainApiKey.slice(0, 16)
const keyHash = hashApiKey(plainApiKey)
const inserted = await server.db
.insert(m2mApiKeys)
.values({
tenantId,
userId: user_id,
createdBy: creatorUserId,
name,
keyPrefix,
keyHash,
expiresAt: expires_at ? new Date(expires_at) : null,
})
.returning({
id: m2mApiKeys.id,
name: m2mApiKeys.name,
tenant_id: m2mApiKeys.tenantId,
user_id: m2mApiKeys.userId,
key_prefix: m2mApiKeys.keyPrefix,
created_at: m2mApiKeys.createdAt,
expires_at: m2mApiKeys.expiresAt,
active: m2mApiKeys.active,
})
return reply.code(201).send({
...inserted[0],
api_key: plainApiKey, // only returned once
})
} catch (err) {
console.error("/tenant/api-keys POST ERROR:", err)
return reply.code(500).send({ error: "Internal Server Error" })
}
})
server.patch("/tenant/api-keys/:id", async (req, reply) => {
try {
const tenantId = req.user?.tenant_id
if (!tenantId) return reply.code(401).send({ error: "Unauthorized" })
const { id } = req.params as { id: string }
const { name, active, expires_at } = req.body as {
name?: string
active?: boolean
expires_at?: string | null
}
const updateData: any = {
updatedAt: new Date()
}
if (name !== undefined) updateData.name = name
if (active !== undefined) updateData.active = active
if (expires_at !== undefined) updateData.expiresAt = expires_at ? new Date(expires_at) : null
const updated = await server.db
.update(m2mApiKeys)
.set(updateData)
.where(and(
eq(m2mApiKeys.id, id),
eq(m2mApiKeys.tenantId, tenantId)
))
.returning({
id: m2mApiKeys.id,
name: m2mApiKeys.name,
tenant_id: m2mApiKeys.tenantId,
user_id: m2mApiKeys.userId,
active: m2mApiKeys.active,
key_prefix: m2mApiKeys.keyPrefix,
updated_at: m2mApiKeys.updatedAt,
expires_at: m2mApiKeys.expiresAt,
last_used_at: m2mApiKeys.lastUsedAt,
})
if (!updated[0]) {
return reply.code(404).send({ error: "API key not found" })
}
return updated[0]
} catch (err) {
console.error("/tenant/api-keys PATCH ERROR:", err)
return reply.code(500).send({ error: "Internal Server Error" })
}
})
server.delete("/tenant/api-keys/:id", async (req, reply) => {
try {
const tenantId = req.user?.tenant_id
if (!tenantId) return reply.code(401).send({ error: "Unauthorized" })
const { id } = req.params as { id: string }
await server.db
.delete(m2mApiKeys)
.where(and(
eq(m2mApiKeys.id, id),
eq(m2mApiKeys.tenantId, tenantId)
))
return { success: true }
} catch (err) {
console.error("/tenant/api-keys DELETE ERROR:", err)
return reply.code(500).send({ error: "Internal Server Error" })
}
})
} }

View File

@@ -1,344 +0,0 @@
import { FastifyInstance } from "fastify"
import { and, eq, isNull, asc, inArray } from "drizzle-orm"
// WICHTIG: Hier müssen die Schemas der Entitäten importiert werden!
import {
wikiPages,
authUsers,
// Bereits vorhanden
customers,
projects,
plants,
products,
inventoryitems,
customerinventoryitems,
customerspaces,
// NEU HINZUGEFÜGT (Basierend auf deinem DataStore)
tasks,
contacts,
contracts,
vehicles,
vendors,
spaces,
inventoryitemgroups,
services,
hourrates,
events,
productcategories,
servicecategories,
ownaccounts
} from "../../db/schema/"
// Konfiguration: Welche Entitäten sollen im Wiki auftauchen?
const ENTITY_CONFIG: Record<string, { table: any, labelField: any, rootLabel: string, idField: 'id' | 'uuid' }> = {
// --- BEREITS VORHANDEN ---
'customers': { table: customers, labelField: customers.name, rootLabel: 'Kunden', idField: 'id' },
'projects': { table: projects, labelField: projects.name, rootLabel: 'Projekte', idField: 'id' },
'plants': { table: plants, labelField: plants.name, rootLabel: 'Objekte', idField: 'id' },
'products': { table: products, labelField: products.name, rootLabel: 'Artikel', idField: 'id' },
'inventoryitems': { table: inventoryitems, labelField: inventoryitems.name, rootLabel: 'Inventarartikel', idField: 'id' },
'customerinventoryitems': { table: customerinventoryitems, labelField: customerinventoryitems.name, rootLabel: 'Kundeninventar', idField: 'id' },
'customerspaces': { table: customerspaces, labelField: customerspaces.name, rootLabel: 'Kundenlagerplätze', idField: 'id' },
// --- NEU BASIEREND AUF DATASTORE ---
'tasks': { table: tasks, labelField: tasks.name, rootLabel: 'Aufgaben', idField: 'id' },
'contacts': { table: contacts, labelField: contacts.fullName, rootLabel: 'Kontakte', idField: 'id' },
'contracts': { table: contracts, labelField: contracts.name, rootLabel: 'Verträge', idField: 'id' },
'vehicles': { table: vehicles, labelField: vehicles.license_plate, rootLabel: 'Fahrzeuge', idField: 'id' },
'vendors': { table: vendors, labelField: vendors.name, rootLabel: 'Lieferanten', idField: 'id' },
'spaces': { table: spaces, labelField: spaces.name, rootLabel: 'Lagerplätze', idField: 'id' },
'inventoryitemgroups': { table: inventoryitemgroups, labelField: inventoryitemgroups.name, rootLabel: 'Inventarartikelgruppen', idField: 'id' },
'services': { table: services, labelField: services.name, rootLabel: 'Leistungen', idField: 'id' },
'hourrates': { table: hourrates, labelField: hourrates.name, rootLabel: 'Stundensätze', idField: 'id' },
'events': { table: events, labelField: events.name, rootLabel: 'Termine', idField: 'id' },
'productcategories': { table: productcategories, labelField: productcategories.name, rootLabel: 'Artikelkategorien', idField: 'id' },
'servicecategories': { table: servicecategories, labelField: servicecategories.name, rootLabel: 'Leistungskategorien', idField: 'id' },
'ownaccounts': { table: ownaccounts, labelField: ownaccounts.name, rootLabel: 'Zusätzliche Buchungskonten', idField: 'id' },
}
// Types
interface WikiTreeQuery {
entityType?: string
entityId?: number
entityUuid?: string
}
interface WikiCreateBody {
title: string
parentId?: string
isFolder?: boolean
entityType?: string
entityId?: number
entityUuid?: string
}
interface WikiUpdateBody {
title?: string
content?: any
parentId?: string | null
sortOrder?: number
isFolder?: boolean
}
export default async function wikiRoutes(server: FastifyInstance) {
// ---------------------------------------------------------
// 1. GET /wiki/tree
// Lädt Struktur: Entweder gefiltert (Widget) oder Global (mit virtuellen Ordnern)
// ---------------------------------------------------------
server.get<{ Querystring: WikiTreeQuery }>("/wiki/tree", async (req, reply) => {
const user = req.user
const { entityType, entityId, entityUuid } = req.query
// FALL A: WIDGET-ANSICHT (Spezifische Entität)
// Wenn wir spezifisch filtern, wollen wir nur die echten Seiten ohne virtuelle Ordner
if (entityType && (entityId || entityUuid)) {
const filters = [
eq(wikiPages.tenantId, user.tenant_id),
eq(wikiPages.entityType, entityType)
]
if (entityId) filters.push(eq(wikiPages.entityId, Number(entityId)))
else if (entityUuid) filters.push(eq(wikiPages.entityUuid, entityUuid))
return server.db
.select({
id: wikiPages.id,
parentId: wikiPages.parentId,
title: wikiPages.title,
isFolder: wikiPages.isFolder,
sortOrder: wikiPages.sortOrder,
entityType: wikiPages.entityType,
updatedAt: wikiPages.updatedAt,
})
.from(wikiPages)
.where(and(...filters))
.orderBy(asc(wikiPages.sortOrder), asc(wikiPages.title))
}
// FALL B: GLOBALE ANSICHT (Haupt-Wiki)
// Wir laden ALLES und bauen virtuelle Ordner für die Entitäten
// 1. Alle Wiki-Seiten des Tenants laden
const allPages = await server.db
.select({
id: wikiPages.id,
parentId: wikiPages.parentId,
title: wikiPages.title,
isFolder: wikiPages.isFolder,
sortOrder: wikiPages.sortOrder,
entityType: wikiPages.entityType,
entityId: wikiPages.entityId, // Wichtig für Zuordnung
entityUuid: wikiPages.entityUuid, // Wichtig für Zuordnung
updatedAt: wikiPages.updatedAt,
})
.from(wikiPages)
.where(eq(wikiPages.tenantId, user.tenant_id))
.orderBy(asc(wikiPages.sortOrder), asc(wikiPages.title))
// Trennen in Standard-Seiten und Entity-Seiten
const standardPages = allPages.filter(p => !p.entityType)
const entityPages = allPages.filter(p => p.entityType)
const virtualNodes: any[] = []
// 2. Virtuelle Ordner generieren
// Wir iterieren durch unsere Config (Kunden, Projekte...)
await Promise.all(Object.entries(ENTITY_CONFIG).map(async ([typeKey, config]) => {
// Haben wir überhaupt Notizen für diesen Typ?
const pagesForType = entityPages.filter(p => p.entityType === typeKey)
if (pagesForType.length === 0) return
// IDs sammeln, um Namen aus der DB zu holen
// Wir unterscheiden zwischen ID (int) und UUID
let entities: any[] = []
if (config.idField === 'id') {
const ids = [...new Set(pagesForType.map(p => p.entityId).filter((id): id is number => id !== null))]
if (ids.length > 0) {
//@ts-ignore - Drizzle Typisierung bei dynamischen Tables ist tricky
entities = await server.db.select({ id: config.table.id, label: config.labelField })
.from(config.table)
//@ts-ignore
.where(inArray(config.table.id, ids))
}
} else {
// Falls UUID genutzt wird (z.B. IoT Devices)
const uuids = [...new Set(pagesForType.map(p => p.entityUuid).filter((uuid): uuid is string => uuid !== null))]
if (uuids.length > 0) {
//@ts-ignore
entities = await server.db.select({ id: config.table.id, label: config.labelField })
.from(config.table)
//@ts-ignore
.where(inArray(config.table.id, uuids))
}
}
if (entities.length === 0) return
// 3. Virtuellen Root Ordner erstellen (z.B. "Kunden")
const rootId = `virtual-root-${typeKey}`
virtualNodes.push({
id: rootId,
parentId: null, // Ganz oben im Baum
title: config.rootLabel,
isFolder: true,
isVirtual: true, // Flag fürs Frontend (read-only Folder)
sortOrder: 1000 // Ganz unten anzeigen
})
// 4. Virtuelle Entity Ordner erstellen (z.B. "Müller GmbH")
entities.forEach(entity => {
const entityNodeId = `virtual-entity-${typeKey}-${entity.id}`
virtualNodes.push({
id: entityNodeId,
parentId: rootId,
title: entity.label || 'Unbekannt',
isFolder: true,
isVirtual: true,
sortOrder: 0
})
// 5. Die echten Notizen verschieben
// Wir suchen alle Notizen, die zu dieser Entity gehören
const myPages = pagesForType.filter(p =>
(config.idField === 'id' && p.entityId === entity.id) ||
(config.idField === 'uuid' && p.entityUuid === entity.id)
)
myPages.forEach(page => {
// Nur Root-Notizen der Entity verschieben.
// Sub-Pages bleiben wo sie sind (parentId zeigt ja schon auf die richtige Seite)
if (!page.parentId) {
// Wir modifizieren das Objekt für die Response (nicht in der DB!)
// Wir müssen es clonen, sonst ändern wir es für alle Referenzen
const pageClone = { ...page }
pageClone.parentId = entityNodeId
virtualNodes.push(pageClone)
} else {
// Sub-Pages einfach so hinzufügen
virtualNodes.push(page)
}
})
})
}))
// Ergebnis: Normale Seiten + Virtuelle Struktur
return [...standardPages, ...virtualNodes]
})
// ---------------------------------------------------------
// 2. GET /wiki/:id
// Lädt EINEN Eintrag komplett MIT Content
// ---------------------------------------------------------
server.get<{ Params: { id: string } }>("/wiki/:id", async (req, reply) => {
const user = req.user
const { id } = req.params
const page = await server.db.query.wikiPages.findFirst({
where: and(
eq(wikiPages.id, id),
eq(wikiPages.tenantId, user.tenant_id)
),
with: {
author: {
columns: { id: true } // Name falls vorhanden
}
}
})
if (!page) return reply.code(404).send({ error: "Page not found" })
return page
})
// ---------------------------------------------------------
// 3. POST /wiki
// Erstellt neuen Eintrag
// ---------------------------------------------------------
server.post<{ Body: WikiCreateBody }>("/wiki", async (req, reply) => {
const user = req.user
const body = req.body
if (!body.title) return reply.code(400).send({ error: "Title required" })
const hasEntity = !!body.entityType
const [newPage] = await server.db
.insert(wikiPages)
.values({
tenantId: user.tenant_id,
title: body.title,
parentId: body.parentId || null,
isFolder: body.isFolder ?? false,
entityType: hasEntity ? body.entityType : null,
entityId: hasEntity && body.entityId ? body.entityId : null,
entityUuid: hasEntity && body.entityUuid ? body.entityUuid : null,
//@ts-ignore
createdBy: user.id,
//@ts-ignore
updatedBy: user.id
})
.returning()
return newPage
})
// ---------------------------------------------------------
// 4. PATCH /wiki/:id
// Universal-Update
// ---------------------------------------------------------
server.patch<{ Params: { id: string }; Body: WikiUpdateBody }>(
"/wiki/:id",
async (req, reply) => {
const user = req.user
const { id } = req.params
const body = req.body
const existing = await server.db.query.wikiPages.findFirst({
where: and(eq(wikiPages.id, id), eq(wikiPages.tenantId, user.tenant_id)),
columns: { id: true }
})
if (!existing) return reply.code(404).send({ error: "Not found" })
const [updatedPage] = await server.db
.update(wikiPages)
.set({
title: body.title,
content: body.content,
parentId: body.parentId,
sortOrder: body.sortOrder,
isFolder: body.isFolder,
updatedAt: new Date(),
//@ts-ignore
updatedBy: user.id
})
.where(eq(wikiPages.id, id))
.returning()
return updatedPage
}
)
// ---------------------------------------------------------
// 5. DELETE /wiki/:id
// Löscht Eintrag
// ---------------------------------------------------------
server.delete<{ Params: { id: string } }>("/wiki/:id", async (req, reply) => {
const user = req.user
const { id } = req.params
const result = await server.db
.delete(wikiPages)
.where(and(
eq(wikiPages.id, id),
eq(wikiPages.tenantId, user.tenant_id)
))
.returning({ id: wikiPages.id })
if (result.length === 0) return reply.code(404).send({ error: "Not found" })
return { success: true, deletedId: result[0].id }
})
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -1,5 +1,5 @@
import {diffTranslations, getDiffLabel} from "./diffTranslations"; import {diffTranslations} from "./diffTranslations";
export type DiffChange = { export type DiffChange = {
key: string; key: string;
@@ -43,6 +43,8 @@ export function diffObjects(
const oldVal = obj1?.[key]; const oldVal = obj1?.[key];
const newVal = obj2?.[key]; const newVal = obj2?.[key];
console.log(oldVal, key, newVal);
// Wenn beides null/undefined → ignorieren // Wenn beides null/undefined → ignorieren
if ( if (
(oldVal === null || oldVal === undefined || oldVal === "" || JSON.stringify(oldVal) === "[]") && (oldVal === null || oldVal === undefined || oldVal === "" || JSON.stringify(oldVal) === "[]") &&
@@ -70,11 +72,12 @@ export function diffObjects(
if (type === "unchanged") continue; if (type === "unchanged") continue;
const translation = diffTranslations[key]; const translation = diffTranslations[key];
let label = getDiffLabel(key); let label = key;
let resolvedOld = oldVal; let resolvedOld = oldVal;
let resolvedNew = newVal; let resolvedNew = newVal;
if (translation) { if (translation) {
label = translation.label;
if (translation.resolve) { if (translation.resolve) {
const { oldVal: resOld, newVal: resNew } = translation.resolve( const { oldVal: resOld, newVal: resNew } = translation.resolve(
oldVal, oldVal,

View File

@@ -6,149 +6,6 @@ type ValueResolver = (
ctx?: Record<string, any> ctx?: Record<string, any>
) => { oldVal: any; newVal: any }; ) => { oldVal: any; newVal: any };
const TOKEN_TRANSLATIONS: Record<string, string> = {
account: "Konto",
active: "Aktiv",
address: "Adresse",
amount: "Betrag",
archived: "Archiviert",
article: "Artikel",
bank: "Bank",
barcode: "Barcode",
birthday: "Geburtstag",
category: "Kategorie",
city: "Ort",
color: "Farbe",
comment: "Kommentar",
company: "Firma",
contact: "Kontakt",
contract: "Vertrag",
cost: "Kosten",
country: "Land",
created: "Erstellt",
customer: "Kunde",
date: "Datum",
default: "Standard",
deleted: "Gelöscht",
delivery: "Lieferung",
description: "Beschreibung",
document: "Dokument",
driver: "Fahrer",
due: "Fällig",
duration: "Dauer",
email: "E-Mail",
employee: "Mitarbeiter",
enabled: "Aktiviert",
end: "Ende",
event: "Ereignis",
file: "Datei",
first: "Vorname",
fixed: "Festgeschrieben",
group: "Gruppe",
hour: "Stunde",
iban: "IBAN",
id: "ID",
incoming: "Eingang",
invoice: "Rechnung",
item: "Eintrag",
language: "Sprache",
last: "Nachname",
license: "Kennzeichen",
link: "Link",
list: "Liste",
location: "Standort",
manufacturer: "Hersteller",
markup: "Verkaufsaufschlag",
message: "Nachricht",
mobile: "Mobil",
name: "Name",
note: "Notiz",
notes: "Notizen",
number: "Nummer",
order: "Bestellung",
own: "Eigen",
payment: "Zahlung",
phone: "Telefon",
plant: "Objekt",
postal: "Post",
price: "Preis",
percentage: "%",
product: "Produkt",
profile: "Profil",
project: "Projekt",
purchase: "Kauf",
quantity: "Menge",
rate: "Satz",
reference: "Referenz",
requisition: "Anfrage",
resource: "Ressource",
role: "Rolle",
serial: "Serien",
service: "Leistung",
selling: "Verkauf",
sellign: "Verkauf",
space: "Lagerplatz",
start: "Start",
statement: "Buchung",
status: "Status",
street: "Straße",
surcharge: "Aufschlag",
tax: "Steuer",
tel: "Telefon",
tenant: "Mandant",
time: "Zeit",
title: "Titel",
total: "Gesamt",
type: "Typ",
unit: "Einheit",
updated: "Aktualisiert",
user: "Benutzer",
ustid: "USt-ID",
value: "Wert",
vendor: "Lieferant",
vehicle: "Fahrzeug",
weekly: "Wöchentlich",
working: "Arbeits",
zip: "Postleitzahl",
composed: "Zusammensetzung",
material: "Material",
worker: "Arbeit",
};
function tokenizeKey(key: string): string[] {
return key
.replace(/([a-z0-9])([A-Z])/g, "$1_$2")
.replace(/[^a-zA-Z0-9]+/g, "_")
.split("_")
.filter(Boolean)
.map((p) => p.toLowerCase());
}
function capitalize(word: string) {
if (!word) return word;
return word.charAt(0).toUpperCase() + word.slice(1);
}
function fallbackLabelFromKey(key: string): string {
const parts = tokenizeKey(key);
if (!parts.length) return key;
if (parts.length > 1 && parts[parts.length - 1] === "id") {
const base = parts.slice(0, -1).map((p) => TOKEN_TRANSLATIONS[p] || capitalize(p)).join(" ");
return `${base} ID`.trim();
}
return parts
.map((p) => TOKEN_TRANSLATIONS[p] || capitalize(p))
.join(" ")
.replace(/\s+/g, " ")
.trim();
}
export function getDiffLabel(key: string): string {
return diffTranslations[key]?.label || fallbackLabelFromKey(key);
}
export const diffTranslations: Record< export const diffTranslations: Record<
string, string,
{ label: string; resolve?: ValueResolver } { label: string; resolve?: ValueResolver }
@@ -187,7 +44,7 @@ export const diffTranslations: Record<
}), }),
}, },
resources: { resources: {
label: "Ressourcen", label: "Resourcen",
resolve: (o, n) => ({ resolve: (o, n) => ({
oldVal: Array.isArray(o) ? o.map((i: any) => i.title).join(", ") : "-", oldVal: Array.isArray(o) ? o.map((i: any) => i.title).join(", ") : "-",
newVal: Array.isArray(n) ? n.map((i: any) => i.title).join(", ") : "-", newVal: Array.isArray(n) ? n.map((i: any) => i.title).join(", ") : "-",
@@ -229,18 +86,10 @@ export const diffTranslations: Record<
approved: { label: "Genehmigt" }, approved: { label: "Genehmigt" },
manufacturer: { label: "Hersteller" }, manufacturer: { label: "Hersteller" },
purchasePrice: { label: "Kaufpreis" }, purchasePrice: { label: "Kaufpreis" },
markupPercentage: { label: "Verkaufsaufschlag in %" },
markup_percentage: { label: "Verkaufsaufschlag in %" },
sellingPrice: { label: "Verkaufspreis" },
selling_price: { label: "Verkaufspreis" },
sellingPriceComposed: { label: "Verkaufspreis Zusammensetzung" },
purchaseDate: { label: "Kaufdatum" }, purchaseDate: { label: "Kaufdatum" },
serialNumber: { label: "Seriennummer" }, serialNumber: { label: "Seriennummer" },
customerInventoryId: { label: "Kundeninventar-ID" },
customerinventoryitems: { label: "Kundeninventar" },
usePlanning: { label: "In Plantafel verwenden" }, usePlanning: { label: "In Plantafel verwenden" },
currentSpace: { label: "Lagerplatz" }, currentSpace: { label: "Lagerplatz" },
customerspace: { label: "Kundenlagerplatz" },
customer: { customer: {
label: "Kunde", label: "Kunde",
@@ -259,7 +108,6 @@ export const diffTranslations: Record<
description: { label: "Beschreibung" }, description: { label: "Beschreibung" },
categorie: { label: "Kategorie" }, categorie: { label: "Kategorie" },
category: { label: "Kategorie" },
profile: { profile: {
label: "Mitarbeiter", label: "Mitarbeiter",
@@ -299,8 +147,6 @@ export const diffTranslations: Record<
}, },
projecttype: { label: "Projekttyp" }, projecttype: { label: "Projekttyp" },
contracttype: { label: "Vertragstyp" },
billingInterval: { label: "Abrechnungsintervall" },
fixed: { fixed: {
label: "Festgeschrieben", label: "Festgeschrieben",

View File

@@ -301,7 +301,7 @@ export async function buildExportZip(
else if(account.taxType === '7I') buschluessel = "18"; else if(account.taxType === '7I') buschluessel = "18";
else buschluessel = "-"; else buschluessel = "-";
let amountGross =/* account.amountGross ? account.amountGross : */(account.amountNet || 0) + (account.amountTax || 0); let amountGross = account.amountGross ? account.amountGross : (account.amountNet || 0) + (account.amountTax || 0);
let shSelector = Math.sign(amountGross) === -1 ? "H" : "S"; let shSelector = Math.sign(amountGross) === -1 ? "H" : "S";
let text = `ER ${ii.reference}: ${escapeString(ii.description)}`.substring(0,59); let text = `ER ${ii.reference}: ${escapeString(ii.description)}`.substring(0,59);
const vend = ii.vendor; // durch Mapping verfügbar const vend = ii.vendor; // durch Mapping verfügbar
@@ -325,27 +325,27 @@ export async function buildExportZip(
if(alloc.createddocument && alloc.createddocument.customer) { if(alloc.createddocument && alloc.createddocument.customer) {
const cd = alloc.createddocument; const cd = alloc.createddocument;
const cust = cd.customer; const cust = cd.customer;
bookingLines.push(`${displayCurrency(alloc.amount,true)};"H";;;;;${cust?.customerNumber};${datevKonto};"3";${dayjs(cd.documentDate).format("DDMM")};"${cd.documentNumber}";;;"${`ZE${alloc.description}${bsText}`.substring(0,59)}";;;;;;;"Geschäftspartner";"${cust?.name}";"Kundennummer";"${cust?.customerNumber}";"Belegnummer";"${cd.documentNumber}";"Leistungsdatum";"${dayjs(cd.deliveryDate).format("DD.MM.YYYY")}";"Belegdatum";"${dateFull}";;;;;;;;;;"";;;;;;;;Bank-Id;${alloc.bankstatement.id};;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;0;;;;"";;;;;;`); bookingLines.push(`${displayCurrency(alloc.amount,true)};"H";;;;;${cust?.customerNumber};${datevKonto};"3";${dayjs(cd.documentDate).format("DDMM")};"${cd.documentNumber}";;;"${`ZE${alloc.description}${bsText}`.substring(0,59)}";;;;;;;"Geschäftspartner";"${cust?.name}";"Kundennummer";"${cust?.customerNumber}";"Belegnummer";"${cd.documentNumber}";"Leistungsdatum";"${dayjs(cd.deliveryDate).format("DD.MM.YYYY")}";"Belegdatum";"${dayjs(cd.documentDate).format("DD.MM.YYYY")}";;;;;;;;;;"";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;0;;;;"";;;;;;;`);
} else if(alloc.incominginvoice && alloc.incominginvoice.vendor) { } else if(alloc.incominginvoice && alloc.incominginvoice.vendor) {
const ii = alloc.incominginvoice; const ii = alloc.incominginvoice;
const vend = ii.vendor; const vend = ii.vendor;
bookingLines.push(`${displayCurrency(alloc.amount,true)};"${shSelector}";;;;;${datevKonto};${vend?.vendorNumber};"";${dayjs(ii.date).format("DDMM")};"${ii.reference}";;;"${`ZA${alloc.description} ${bsText} `.substring(0,59)}";;;;;;;"Geschäftspartner";"${vend?.name}";"Kundennummer";"${vend?.vendorNumber}";"Belegnummer";"${ii.reference}";"Leistungsdatum";"${dayjs(ii.date).format("DD.MM.YYYY")}";"Belegdatum";"${dateFull}";;;;;;;;;;"";;;;;;;;Bank-Id;${alloc.bankstatement.id};;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;0;;;;"";;;;;;`); bookingLines.push(`${displayCurrency(alloc.amount,true)};"${shSelector}";;;;;${datevKonto};${vend?.vendorNumber};"";${dayjs(ii.date).format("DDMM")};"${ii.reference}";;;"${`ZA${alloc.description} ${bsText} `.substring(0,59)}";;;;;;;"Geschäftspartner";"${vend?.name}";"Kundennummer";"${vend?.vendorNumber}";"Belegnummer";"${ii.reference}";"Leistungsdatum";"${dayjs(ii.date).format("DD.MM.YYYY")}";"Belegdatum";"${dayjs(ii.date).format("DD.MM.YYYY")}";;;;;;;;;;"";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;0;;;;"";;;;;;;`);
} else if(alloc.account) { } else if(alloc.account) {
const acc = alloc.account; const acc = alloc.account;
let vorzeichen = Math.sign(alloc.amount) > 0 ? "ZE" : "ZA"; let vorzeichen = Math.sign(alloc.amount) > 0 ? "ZE" : "ZA";
bookingLines.push(`${displayCurrency(alloc.amount,true)};"${shSelector}";;;;;${datevKonto};${acc.number};"";${dateVal};"";;;"${`${vorzeichen} ${acc.number} - ${escapeString(acc.label)}${escapeString(alloc.description)}${bsText}`.substring(0,59)}";;;;;;;"Geschäftspartner";"${bs.credName || ''}";"Kundennummer";"";"Belegnummer";"";"Leistungsdatum";"";"Belegdatum";"${dateFull}";;;;;;;;;;"";;;;;;;;Bank-Id;${alloc.bankstatement.id};;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;0;;;;"";;;;;;`); bookingLines.push(`${displayCurrency(alloc.amount,true)};"${shSelector}";;;;;${datevKonto};${acc.number};"";${dateVal};"";;;"${`${vorzeichen} ${acc.number} - ${escapeString(acc.label)}${escapeString(alloc.description)}${bsText}`.substring(0,59)}";;;;;;;"Geschäftspartner";"${bs.credName || ''}";"Kundennummer";"";"Belegnummer";"";"Leistungsdatum";"";"Belegdatum";"${dateFull}";;;;;;;;;;"";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;0;;;;"";;;;;;;`);
} else if(alloc.vendor) { } else if(alloc.vendor) {
const vend = alloc.vendor; const vend = alloc.vendor;
let vorzeichen = Math.sign(alloc.amount) > 0 ? "ZE" : "ZA"; let vorzeichen = Math.sign(alloc.amount) > 0 ? "ZE" : "ZA";
bookingLines.push(`${displayCurrency(alloc.amount,true)};"${shSelector}";;;;;${datevKonto};${vend.vendorNumber};"";${dateVal};"";;;"${`${vorzeichen} ${vend.vendorNumber} - ${escapeString(vend.name)}${escapeString(alloc.description)}${bsText}`.substring(0,59)}";;;;;;;"Geschäftspartner";"${vend.name}";"Kundennummer";"";"Belegnummer";"";"Leistungsdatum";"";"Belegdatum";"${dateFull}";;;;;;;;;;"";;;;;;;;Bank-Id;${alloc.bankstatement.id};;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;0;;;;"";;;;;;`); bookingLines.push(`${displayCurrency(alloc.amount,true)};"${shSelector}";;;;;${datevKonto};${vend.vendorNumber};"";${dateVal};"";;;"${`${vorzeichen} ${vend.vendorNumber} - ${escapeString(vend.name)}${escapeString(alloc.description)}${bsText}`.substring(0,59)}";;;;;;;"Geschäftspartner";"${vend.name}";"Kundennummer";"";"Belegnummer";"";"Leistungsdatum";"";"Belegdatum";"${dateFull}";;;;;;;;;;"";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;0;;;;"";;;;;;;`);
} else if(alloc.customer) { } else if(alloc.customer) {
const cust = alloc.customer; const cust = alloc.customer;
let vorzeichen = Math.sign(alloc.amount) > 0 ? "ZE" : "ZA"; let vorzeichen = Math.sign(alloc.amount) > 0 ? "ZE" : "ZA";
bookingLines.push(`${displayCurrency(alloc.amount,true)};"${shSelector}";;;;;${datevKonto};${cust.customerNumber};"";${dateVal};"";;;"${`${vorzeichen} ${cust.customerNumber} - ${escapeString(cust.name)}${escapeString(alloc.description)}${bsText}`.substring(0,59)}";;;;;;;"Geschäftspartner";"${cust.name}";"Kundennummer";"";"Belegnummer";"";"Leistungsdatum";"";"Belegdatum";"${dateFull}";;;;;;;;;;"";;;;;;;;Bank-Id;${alloc.bankstatement.id};;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;0;;;;"";;;;;;`); bookingLines.push(`${displayCurrency(alloc.amount,true)};"${shSelector}";;;;;${datevKonto};${cust.customerNumber};"";${dateVal};"";;;"${`${vorzeichen} ${cust.customerNumber} - ${escapeString(cust.name)}${escapeString(alloc.description)}${bsText}`.substring(0,59)}";;;;;;;"Geschäftspartner";"${cust.name}";"Kundennummer";"";"Belegnummer";"";"Leistungsdatum";"";"Belegdatum";"${dateFull}";;;;;;;;;;"";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;0;;;;"";;;;;;;`);
} else if(alloc.ownaccount) { } else if(alloc.ownaccount) {
const own = alloc.ownaccount; const own = alloc.ownaccount;
let vorzeichen = Math.sign(alloc.amount) > 0 ? "ZE" : "ZA"; let vorzeichen = Math.sign(alloc.amount) > 0 ? "ZE" : "ZA";
bookingLines.push(`${displayCurrency(alloc.amount,true)};"${shSelector}";;;;;${datevKonto};${own.number};"";${dateVal};"";;;"${`${vorzeichen} ${own.number} - ${escapeString(own.name)}${escapeString(alloc.description)}${bsText}`.substring(0,59)}";;;;;;;"Geschäftspartner";"${own.name}";"Kundennummer";"";"Belegnummer";"";"Leistungsdatum";"";"Belegdatum";"${dateFull}";;;;;;;;;;"";;;;;;;;Bank-Id;${alloc.bankstatement.id};;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;0;;;;"";;;;;;`); bookingLines.push(`${displayCurrency(alloc.amount,true)};"${shSelector}";;;;;${datevKonto};${own.number};"";${dateVal};"";;;"${`${vorzeichen} ${own.number} - ${escapeString(own.name)}${escapeString(alloc.description)}${bsText}`.substring(0,59)}";;;;;;;"Geschäftspartner";"${own.name}";"Kundennummer";"";"Belegnummer";"";"Leistungsdatum";"";"Belegdatum";"${dateFull}";;;;;;;;;;"";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;0;;;;"";;;;;;;`);
} }
}); });

View File

@@ -1,25 +1,12 @@
import xmlbuilder from "xmlbuilder"; import xmlbuilder from "xmlbuilder";
import {randomUUID} from "node:crypto"; import {randomUUID} from "node:crypto";
import dayjs from "dayjs"; import dayjs from "dayjs";
import { and, eq, inArray } from "drizzle-orm";
import { createddocuments, tenants } from "../../../db/schema";
export const createSEPAExport = async (server,idsToExport, tenant_id) => { export const createSEPAExport = async (server,idsToExport, tenant_id) => {
const data = await server.db const {data,error} = await server.supabase.from("createddocuments").select().eq("tenant", tenant_id).in("id", idsToExport)
.select() const {data:tenantData,error:tenantError} = await server.supabase.from("tenants").select().eq("id", tenant_id).single()
.from(createddocuments)
.where(and(
eq(createddocuments.tenant, tenant_id),
inArray(createddocuments.id, idsToExport)
))
const tenantRows = await server.db
.select()
.from(tenants)
.where(eq(tenants.id, tenant_id))
.limit(1)
const tenantData = tenantRows[0]
console.log(tenantData) console.log(tenantData)
console.log(tenantError)
console.log(data) console.log(data)

View File

@@ -1,8 +1,11 @@
import { FastifyInstance } from "fastify" import {FastifyInstance} from "fastify";
import { PNG } from "pngjs" // import { PNG } from 'pngjs'
import { Utils } from "@mmote/niimbluelib" // import { ready as zplReady } from 'zpl-renderer-js'
import bwipjs from "bwip-js" // import { Utils } from '@mmote/niimbluelib'
import Sharp from "sharp" // import { createCanvas } from 'canvas'
// import bwipjs from 'bwip-js'
// import Sharp from 'sharp'
// import fs from 'fs'
import { tenants } from "../../db/schema" import { tenants } from "../../db/schema"
import { eq } from "drizzle-orm" import { eq } from "drizzle-orm"
@@ -12,6 +15,7 @@ export const useNextNumberRangeNumber = async (
tenantId: number, tenantId: number,
numberRange: string numberRange: string
) => { ) => {
// 1⃣ Tenant laden
const [tenant] = await server.db const [tenant] = await server.db
.select() .select()
.from(tenants) .from(tenants)
@@ -29,20 +33,23 @@ export const useNextNumberRangeNumber = async (
const current = numberRanges[numberRange] const current = numberRanges[numberRange]
// 2⃣ Used Number generieren
const usedNumber = const usedNumber =
(current.prefix || "") + (current.prefix || "") +
current.nextNumber + current.nextNumber +
(current.suffix || "") (current.suffix || "")
// 3⃣ nextNumber erhöhen
const updatedRanges = { const updatedRanges = {
// @ts-ignore // @ts-ignore
...numberRanges, ...numberRanges,
[numberRange]: { [numberRange]: {
...current, ...current,
nextNumber: current.nextNumber + 1, nextNumber: current.nextNumber + 1
}, }
} }
// 4⃣ Tenant aktualisieren
await server.db await server.db
.update(tenants) .update(tenants)
.set({ numberRanges: updatedRanges }) .set({ numberRanges: updatedRanges })
@@ -51,17 +58,24 @@ export const useNextNumberRangeNumber = async (
return { usedNumber } return { usedNumber }
} }
export async function encodeBase64ToNiimbot(base64Png: string, printDirection: "top" | "left" = "top") {
const buffer = Buffer.from(base64Png, "base64") /*
const png = PNG.sync.read(buffer) export async function encodeBase64ToNiimbot(base64Png, printDirection = 'top') {
// 1⃣ PNG dekodieren
const buffer = Buffer.from(base64Png, 'base64')
const png = PNG.sync.read(buffer) // liefert {width, height, data: Uint8Array(RGBA)}
const { width, height, data } = png const { width, height, data } = png
const cols = printDirection === "left" ? height : width console.log(width, height, data)
const rows = printDirection === "left" ? width : height const cols = printDirection === 'left' ? height : width
const rowsData: any[] = [] const rows = printDirection === 'left' ? width : height
const rowsData = []
if (cols % 8 !== 0) throw new Error("Column count must be multiple of 8") console.log(cols)
if (cols % 8 !== 0) throw new Error('Column count must be multiple of 8')
// 2⃣ Zeilenweise durchgehen und Bits bilden
for (let row = 0; row < rows; row++) { for (let row = 0; row < rows; row++) {
let isVoid = true let isVoid = true
let blackPixelsCount = 0 let blackPixelsCount = 0
@@ -70,8 +84,8 @@ export async function encodeBase64ToNiimbot(base64Png: string, printDirection: "
for (let colOct = 0; colOct < cols / 8; colOct++) { for (let colOct = 0; colOct < cols / 8; colOct++) {
let pixelsOctet = 0 let pixelsOctet = 0
for (let colBit = 0; colBit < 8; colBit++) { for (let colBit = 0; colBit < 8; colBit++) {
const x = printDirection === "left" ? row : colOct * 8 + colBit const x = printDirection === 'left' ? row : colOct * 8 + colBit
const y = printDirection === "left" ? height - 1 - (colOct * 8 + colBit) : row const y = printDirection === 'left' ? height - 1 - (colOct * 8 + colBit) : row
const idx = (y * width + x) * 4 const idx = (y * width + x) * 4
const lum = 0.299 * data[idx] + 0.587 * data[idx + 1] + 0.114 * data[idx + 2] const lum = 0.299 * data[idx] + 0.587 * data[idx + 1] + 0.114 * data[idx + 2]
const isBlack = lum < 128 const isBlack = lum < 128
@@ -85,7 +99,7 @@ export async function encodeBase64ToNiimbot(base64Png: string, printDirection: "
} }
const newPart = { const newPart = {
dataType: isVoid ? "void" : "pixels", dataType: isVoid ? 'void' : 'pixels',
rowNumber: row, rowNumber: row,
repeat: 1, repeat: 1,
rowData: isVoid ? undefined : rowData, rowData: isVoid ? undefined : rowData,
@@ -97,15 +111,14 @@ export async function encodeBase64ToNiimbot(base64Png: string, printDirection: "
} else { } else {
const last = rowsData[rowsData.length - 1] const last = rowsData[rowsData.length - 1]
let same = newPart.dataType === last.dataType let same = newPart.dataType === last.dataType
if (same && newPart.dataType === "pixels") { if (same && newPart.dataType === 'pixels') {
same = Utils.u8ArraysEqual(newPart.rowData, last.rowData) same = Utils.u8ArraysEqual(newPart.rowData, last.rowData)
} }
if (same) last.repeat++ if (same) last.repeat++
else rowsData.push(newPart) else rowsData.push(newPart)
if (row % 200 === 199) { if (row % 200 === 199) {
rowsData.push({ rowsData.push({
dataType: "check", dataType: 'check',
rowNumber: row, rowNumber: row,
repeat: 0, repeat: 0,
rowData: undefined, rowData: undefined,
@@ -118,69 +131,44 @@ export async function encodeBase64ToNiimbot(base64Png: string, printDirection: "
return { cols, rows, rowsData } return { cols, rows, rowsData }
} }
function escapeXml(value: string) { export async function generateLabel(context,width,height) {
return String(value) // Canvas für Hintergrund & Text
.replace(/&/g, "&amp;") const canvas = createCanvas(width, height)
.replace(/</g, "&lt;") const ctx = canvas.getContext('2d')
.replace(/>/g, "&gt;")
.replace(/\"/g, "&quot;")
.replace(/'/g, "&apos;")
}
export async function generateLabel(context: any = {}, width = 584, height = 354) { // Hintergrund weiß
const normalizedWidth = Math.ceil(Number(width) / 8) * 8 ctx.fillStyle = '#FFFFFF'
const normalizedHeight = Math.max(1, Number(height) || 203) ctx.fillRect(0, 0, width, height)
const idFont = Math.max(24, Math.round(normalizedHeight * 0.125)) // Überschrift
const nameFont = Math.max(17, Math.round(normalizedHeight * 0.078)) ctx.fillStyle = '#000000'
const customerFont = Math.max(14, Math.round(normalizedHeight * 0.06)) ctx.font = '32px Arial'
const serialFont = Math.max(12, Math.round(normalizedHeight * 0.052)) ctx.fillText(context.text, 20, 40)
const labelId = context.customerInventoryId || context.datamatrix || context.id || "N/A"
const labelName = context.name || context.text || "Kundeninventarartikel"
const customerName = context.customerName || ""
const serial = context.serialNumber ? `SN: ${context.serialNumber}` : ""
const nameLine1 = String(labelName).slice(0, 30)
const nameLine2 = String(labelName).slice(30, 60)
// 3) DataMatrix
const dataMatrixPng = await bwipjs.toBuffer({ const dataMatrixPng = await bwipjs.toBuffer({
bcid: "datamatrix", bcid: 'datamatrix',
text: String(labelId), text: context.datamatrix,
scale: normalizedWidth >= 560 ? 7 : 5, scale: 6,
includetext: false,
}) })
const dataMatrixMeta = await Sharp(dataMatrixPng).metadata()
const dataMatrixWidth = dataMatrixMeta.width || 0
const dataMatrixHeight = dataMatrixMeta.height || 0
const dmLeft = Math.max(8, normalizedWidth - dataMatrixWidth - 28)
const dmTop = Math.max(8, Math.floor((normalizedHeight - dataMatrixHeight) / 2))
const textMaxWidth = Math.max(120, dmLeft - 20)
const textSvg = ` // Basisbild aus Canvas
<svg width="${normalizedWidth}" height="${normalizedHeight}" xmlns="http://www.w3.org/2000/svg"> const base = await Sharp(canvas.toBuffer())
<rect width="100%" height="100%" fill="white"/> .png()
<text x="12" y="${Math.round(normalizedHeight * 0.15)}" font-size="${idFont}" font-family="Arial, Helvetica, sans-serif" font-weight="700" fill="black">${escapeXml(String(labelId).slice(0, 26))}</text> .toBuffer()
<text x="12" y="${Math.round(normalizedHeight * 0.29)}" font-size="${nameFont}" font-family="Arial, Helvetica, sans-serif" fill="black">${escapeXml(nameLine1)}</text>
<text x="12" y="${Math.round(normalizedHeight * 0.37)}" font-size="${nameFont}" font-family="Arial, Helvetica, sans-serif" fill="black">${escapeXml(nameLine2)}</text>
<text x="12" y="${Math.round(normalizedHeight * 0.49)}" font-size="${customerFont}" font-family="Arial, Helvetica, sans-serif" fill="black">${escapeXml(String(customerName).slice(0, 40))}</text>
<text x="12" y="${Math.round(normalizedHeight * 0.58)}" font-size="${serialFont}" font-family="Arial, Helvetica, sans-serif" fill="black">${escapeXml(String(serial).slice(0, 42))}</text>
<rect x="0" y="0" width="${textMaxWidth}" height="${normalizedHeight}" fill="none"/>
</svg>`.trim()
const final = await Sharp({ // Alles zusammen compositen
create: { const final = await Sharp(base)
width: normalizedWidth,
height: normalizedHeight,
channels: 3,
background: { r: 255, g: 255, b: 255 },
},
})
.composite([ .composite([
{ input: Buffer.from(textSvg), top: 0, left: 0 }, { input: dataMatrixPng, top: 60, left: 20 },
{ input: dataMatrixPng, top: dmTop, left: dmLeft },
]) ])
.png() .png()
.toBuffer() .toBuffer()
return final.toString("base64") fs.writeFileSync('label.png', final)
}
// Optional: Base64 zurückgeben (z.B. für API)
const base64 = final.toString('base64')
return base64
}*/

View File

@@ -11,7 +11,7 @@ import { s3 } from "./s3";
import { secrets } from "./secrets"; import { secrets } from "./secrets";
// Drizzle schema // Drizzle schema
import { vendors, accounts, tenants } from "../../db/schema"; import { vendors, accounts } from "../../db/schema";
import {eq} from "drizzle-orm"; import {eq} from "drizzle-orm";
let openai: OpenAI | null = null; let openai: OpenAI | null = null;
@@ -86,13 +86,12 @@ const InstructionFormat = z.object({
}); });
// --------------------------------------------------------- // ---------------------------------------------------------
// MAIN FUNCTION // MAIN FUNCTION REPLACES SUPABASE VERSION
// --------------------------------------------------------- // ---------------------------------------------------------
export const getInvoiceDataFromGPT = async function ( export const getInvoiceDataFromGPT = async function (
server: FastifyInstance, server: FastifyInstance,
file: any, file: any,
tenantId: number, tenantId: number
learningContext?: string
) { ) {
await initOpenAi(); await initOpenAi();
@@ -163,22 +162,13 @@ export const getInvoiceDataFromGPT = async function (
.from(vendors) .from(vendors)
.where(eq(vendors.tenant,tenantId)); .where(eq(vendors.tenant,tenantId));
const [tenant] = await server.db
.select({ accountChart: tenants.accountChart })
.from(tenants)
.where(eq(tenants.id, tenantId))
.limit(1)
const activeAccountChart = tenant?.accountChart || "skr03"
const accountList = await server.db const accountList = await server.db
.select({ .select({
id: accounts.id, id: accounts.id,
label: accounts.label, label: accounts.label,
number: accounts.number, number: accounts.number,
}) })
.from(accounts) .from(accounts);
.where(eq(accounts.accountChart, activeAccountChart));
// --------------------------------------------------------- // ---------------------------------------------------------
// 4) GPT ANALYSIS // 4) GPT ANALYSIS
@@ -198,13 +188,8 @@ export const getInvoiceDataFromGPT = async function (
"You extract structured invoice data.\n\n" + "You extract structured invoice data.\n\n" +
`VENDORS: ${JSON.stringify(vendorList)}\n` + `VENDORS: ${JSON.stringify(vendorList)}\n` +
`ACCOUNTS: ${JSON.stringify(accountList)}\n\n` + `ACCOUNTS: ${JSON.stringify(accountList)}\n\n` +
(learningContext
? `HISTORICAL_PATTERNS: ${learningContext}\n\n`
: "") +
"Match issuer by name to vendor.id.\n" + "Match issuer by name to vendor.id.\n" +
"Match invoice items to account id based on label/number.\n" + "Match invoice items to account id based on label/number.\n" +
"Use historical patterns as soft hints for vendor/account/payment mapping.\n" +
"Do not invent values when the invoice text contradicts the hints.\n" +
"Convert dates to YYYY-MM-DD.\n" + "Convert dates to YYYY-MM-DD.\n" +
"Keep invoice items in original order.\n", "Keep invoice items in original order.\n",
}, },

View File

@@ -1,42 +1,4 @@
import { FastifyInstance } from "fastify" import { FastifyInstance } from "fastify"
import { historyitems } from "../../db/schema";
const HISTORY_ENTITY_LABELS: Record<string, string> = {
customers: "Kunden",
members: "Mitglieder",
vendors: "Lieferanten",
projects: "Projekte",
plants: "Objekte",
contacts: "Kontakte",
inventoryitems: "Inventarartikel",
customerinventoryitems: "Kundeninventar",
products: "Artikel",
profiles: "Mitarbeiter",
absencerequests: "Abwesenheiten",
events: "Termine",
tasks: "Aufgaben",
vehicles: "Fahrzeuge",
costcentres: "Kostenstellen",
ownaccounts: "zusätzliche Buchungskonten",
documentboxes: "Dokumentenboxen",
hourrates: "Stundensätze",
services: "Leistungen",
roles: "Rollen",
checks: "Überprüfungen",
spaces: "Lagerplätze",
customerspaces: "Kundenlagerplätze",
trackingtrips: "Fahrten",
createddocuments: "Dokumente",
inventoryitemgroups: "Inventarartikelgruppen",
bankstatements: "Buchungen",
incominginvoices: "Eingangsrechnungen",
files: "Dateien",
memberrelations: "Mitgliedsverhältnisse",
}
export function getHistoryEntityLabel(entity: string) {
return HISTORY_ENTITY_LABELS[entity] || entity
}
export async function insertHistoryItem( export async function insertHistoryItem(
server: FastifyInstance, server: FastifyInstance,
@@ -51,18 +13,15 @@ export async function insertHistoryItem(
text?: string text?: string
} }
) { ) {
const entityLabel = getHistoryEntityLabel(params.entity)
const textMap = { const textMap = {
created: `Neuer Eintrag in ${entityLabel} erstellt`, created: `Neuer Eintrag in ${params.entity} erstellt`,
updated: `Eintrag in ${entityLabel} geändert`, updated: `Eintrag in ${params.entity} geändert`,
unchanged: `Eintrag in ${entityLabel} unverändert`, archived: `Eintrag in ${params.entity} archiviert`,
archived: `Eintrag in ${entityLabel} archiviert`, deleted: `Eintrag in ${params.entity} gelöscht`
deleted: `Eintrag in ${entityLabel} gelöscht`
} }
const columnMap: Record<string, string> = { const columnMap: Record<string, string> = {
customers: "customer", customers: "customer",
members: "customer",
vendors: "vendor", vendors: "vendor",
projects: "project", projects: "project",
plants: "plant", plants: "plant",
@@ -82,15 +41,10 @@ export async function insertHistoryItem(
roles: "role", roles: "role",
checks: "check", checks: "check",
spaces: "space", spaces: "space",
customerspaces: "customerspace",
customerinventoryitems: "customerinventoryitem",
trackingtrips: "trackingtrip", trackingtrips: "trackingtrip",
createddocuments: "createddocument", createddocuments: "createddocument",
inventoryitemgroups: "inventoryitemgroup", inventoryitemgroups: "inventoryitemgroup",
bankstatements: "bankstatement", bankstatements: "bankstatement"
incominginvoices: "incomingInvoice",
files: "file",
memberrelations: "memberrelation",
} }
const fkColumn = columnMap[params.entity] const fkColumn = columnMap[params.entity]
@@ -99,20 +53,18 @@ export async function insertHistoryItem(
return return
} }
const stringifyHistoryValue = (value: any) => {
if (value === undefined || value === null) return null
return typeof value === "string" ? value : JSON.stringify(value)
}
const entry = { const entry = {
tenant: params.tenant_id, tenant: params.tenant_id,
createdBy: params.created_by, created_by: params.created_by,
text: params.text || textMap[params.action], text: params.text || textMap[params.action],
action: params.action, action: params.action,
[fkColumn]: params.entityId, [fkColumn]: params.entityId,
oldVal: stringifyHistoryValue(params.oldVal), oldVal: params.oldVal ? JSON.stringify(params.oldVal) : null,
newVal: stringifyHistoryValue(params.newVal) newVal: params.newVal ? JSON.stringify(params.newVal) : null
} }
await server.db.insert(historyitems).values(entry as any) const { error } = await server.supabase.from("historyitems").insert([entry])
if (error) { // @ts-ignore
console.log(error)
}
} }

View File

@@ -2,9 +2,6 @@ import {PDFDocument, StandardFonts, rgb} from "pdf-lib"
import dayjs from "dayjs" import dayjs from "dayjs"
import {renderAsCurrency, splitStringBySpace} from "./stringRendering"; import {renderAsCurrency, splitStringBySpace} from "./stringRendering";
import {FastifyInstance} from "fastify"; import {FastifyInstance} from "fastify";
import { GetObjectCommand } from "@aws-sdk/client-s3";
import { s3 } from "./s3";
import { secrets } from "./secrets";
const getCoordinatesForPDFLib = (x:number ,y:number, page:any) => { const getCoordinatesForPDFLib = (x:number ,y:number, page:any) => {
/* /*
@@ -28,21 +25,9 @@ const getCoordinatesForPDFLib = (x:number ,y:number, page:any) => {
const getBackgroundSourceBuffer = async (server:FastifyInstance, path:string) => { const getBackgroundSourceBuffer = async (server:FastifyInstance, path:string) => {
console.log(path) const {data:backgroundPDFData,error:backgroundPDFError} = await server.supabase.storage.from("files").download(path)
const { Body } = await s3.send( return backgroundPDFData.arrayBuffer()
new GetObjectCommand({
Bucket: secrets.S3_BUCKET,
Key: path
})
)
const chunks: Buffer[] = []
for await (const chunk of Body as any) {
chunks.push(Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk))
}
return Buffer.concat(chunks)
} }
const getDuration = (time) => { const getDuration = (time) => {

View File

@@ -3,14 +3,10 @@ import {
bankaccounts, bankaccounts,
bankrequisitions, bankrequisitions,
bankstatements, bankstatements,
entitybankaccounts,
contacts, contacts,
contracts, contracts,
contracttypes,
costcentres, costcentres,
createddocuments, createddocuments,
customerinventoryitems,
customerspaces,
customers, customers,
files, files,
filetags, filetags,
@@ -20,7 +16,6 @@ import {
inventoryitemgroups, inventoryitemgroups,
inventoryitems, inventoryitems,
letterheads, letterheads,
memberrelations,
ownaccounts, ownaccounts,
plants, plants,
productcategories, productcategories,
@@ -41,28 +36,17 @@ import {
export const resourceConfig = { export const resourceConfig = {
projects: { projects: {
searchColumns: ["name","customerRef","projectNumber","notes"], searchColumns: ["name"],
mtoLoad: ["customer","plant","contract","projecttype"], mtoLoad: ["customer","plant","contract","projecttype"],
mtmLoad: ["tasks", "files","createddocuments"], mtmLoad: ["tasks", "files","createddocuments"],
table: projects, table: projects,
numberRangeHolder: "projectNumber" numberRangeHolder: "projectNumber"
}, },
customers: { customers: {
searchColumns: ["name", "nameAddition", "customerNumber", "firstname", "lastname", "notes"], searchColumns: ["name", "customerNumber", "firstname", "lastname", "notes"],
mtmLoad: ["contacts","projects","plants","createddocuments","contracts","customerinventoryitems","customerspaces"],
table: customers,
numberRangeHolder: "customerNumber",
},
members: {
searchColumns: ["name", "nameAddition", "customerNumber", "firstname", "lastname", "notes"],
mtmLoad: ["contacts","projects","plants","createddocuments","contracts"], mtmLoad: ["contacts","projects","plants","createddocuments","contracts"],
table: customers, table: customers,
numberRangeHolder: "customerNumber", numberRangeHolder: "customerNumber",
relationKey: "customer",
},
memberrelations: {
table: memberrelations,
searchColumns: ["type", "billingInterval"],
}, },
contacts: { contacts: {
searchColumns: ["firstName", "lastName", "email", "phone", "notes"], searchColumns: ["firstName", "lastName", "email", "phone", "notes"],
@@ -71,17 +55,12 @@ export const resourceConfig = {
}, },
contracts: { contracts: {
table: contracts, table: contracts,
searchColumns: ["name", "notes", "contractNumber", "paymentType", "billingInterval", "sepaRef", "bankingName"], searchColumns: ["name", "notes", "contractNumber", "paymentType", "sepaRef", "bankingName"],
numberRangeHolder: "contractNumber", numberRangeHolder: "contractNumber",
mtoLoad: ["customer", "contracttype"], mtoLoad: ["customer"],
},
contracttypes: {
table: contracttypes,
searchColumns: ["name", "description", "paymentType", "billingInterval"],
}, },
plants: { plants: {
table: plants, table: plants,
searchColumns: ["name"],
mtoLoad: ["customer"], mtoLoad: ["customer"],
mtmLoad: ["projects","tasks","files"], mtmLoad: ["projects","tasks","files"],
}, },
@@ -106,12 +85,6 @@ export const resourceConfig = {
table: inventoryitems, table: inventoryitems,
numberRangeHolder: "articleNumber", numberRangeHolder: "articleNumber",
}, },
customerinventoryitems: {
table: customerinventoryitems,
numberRangeHolder: "customerInventoryId",
mtoLoad: ["customer", "customerspace", "product", "vendor"],
searchColumns: ["name", "customerInventoryId", "serialNumber", "description", "manufacturer", "manufacturerNumber"],
},
inventoryitemgroups: { inventoryitemgroups: {
table: inventoryitemgroups table: inventoryitemgroups
}, },
@@ -146,13 +119,6 @@ export const resourceConfig = {
searchColumns: ["name","space_number","type","info_data"], searchColumns: ["name","space_number","type","info_data"],
numberRangeHolder: "spaceNumber", numberRangeHolder: "spaceNumber",
}, },
customerspaces: {
table: customerspaces,
searchColumns: ["name","space_number","type","info_data","description"],
numberRangeHolder: "space_number",
mtoLoad: ["customer"],
mtmLoad: ["customerinventoryitems"],
},
ownaccounts: { ownaccounts: {
table: ownaccounts, table: ownaccounts,
searchColumns: ["name","description","number"], searchColumns: ["name","description","number"],
@@ -203,10 +169,6 @@ export const resourceConfig = {
bankrequisitions: { bankrequisitions: {
table: bankrequisitions, table: bankrequisitions,
}, },
entitybankaccounts: {
table: entitybankaccounts,
searchColumns: ["description"],
},
serialexecutions: { serialexecutions: {
table: serialExecutions table: serialExecutions
} }

View File

@@ -14,6 +14,8 @@ export let secrets = {
PORT: number PORT: number
HOST: string HOST: string
DATABASE_URL: string DATABASE_URL: string
SUPABASE_URL: string
SUPABASE_SERVICE_ROLE_KEY: string
S3_BUCKET: string S3_BUCKET: string
ENCRYPTION_KEY: string ENCRYPTION_KEY: string
MAILER_SMTP_HOST: string MAILER_SMTP_HOST: string

View File

@@ -1,74 +0,0 @@
// scripts/fill-file-sizes.ts
import 'dotenv/config';
import { db } from '../../db';
import { files } from '../../db/schema';
import { eq, isNull } from 'drizzle-orm';
import { HeadObjectCommand } from "@aws-sdk/client-s3";
import { s3, initS3 } from '../utils/s3';
import { loadSecrets, secrets } from '../utils/secrets';
async function migrate() {
console.log("🚀 Starte Migration der Dateigrößen...");
// 1. Setup
await loadSecrets();
await initS3();
// 2. Alle Dateien holen, die noch keine Größe haben (oder alle, um sicherzugehen)
// Wir nehmen erstmal ALLE, um sicherzustellen, dass alles stimmt.
const allFiles = await db.select().from(files);
console.log(`📦 ${allFiles.length} Dateien in der Datenbank gefunden.`);
let successCount = 0;
let errorCount = 0;
// 3. Loop durch alle Dateien
for (const file of allFiles) {
if (!file.path) {
console.log(`⏭️ Überspringe Datei ${file.id} (Kein Pfad)`);
continue;
}
try {
// S3 fragen (HeadObject lädt nur Metadaten, nicht die ganze Datei -> Schnell)
const command = new HeadObjectCommand({
Bucket: secrets.S3_BUCKET, // Oder secrets.S3_BUCKET_NAME je nach deiner Config
Key: file.path
});
const response = await s3.send(command);
const size = response.ContentLength || 0;
// In DB speichern
await db.update(files)
.set({ size: size })
.where(eq(files.id, file.id));
process.stdout.write("."); // Fortschrittsanzeige
successCount++;
} catch (error: any) {
process.stdout.write("X");
// console.error(`\n❌ Fehler bei ${file.path}: ${error.name}`);
// Optional: Wenn Datei in S3 fehlt, könnten wir sie markieren oder loggen
if (error.name === 'NotFound') {
// console.error(` -> Datei existiert nicht im Bucket!`);
}
errorCount++;
}
}
console.log("\n\n------------------------------------------------");
console.log(`✅ Fertig!`);
console.log(`Updated: ${successCount}`);
console.log(`Fehler: ${errorCount} (Meistens Dateien, die im Bucket fehlen)`);
console.log("------------------------------------------------");
process.exit(0);
}
migrate().catch(err => {
console.error("Fataler Fehler:", err);
process.exit(1);
});

View File

@@ -1,200 +0,0 @@
import 'dotenv/config';
import { v2 as webdav } from 'webdav-server';
import { db } from '../../db';
import { tenants, files, folders } from '../../db/schema';
import { Readable } from 'stream';
import { GetObjectCommand, HeadObjectCommand } from "@aws-sdk/client-s3";
import { s3, initS3 } from '../utils/s3';
import { secrets, loadSecrets } from '../utils/secrets';
// ============================================================================
// 1. SETUP
// ============================================================================
const userManager = new webdav.SimpleUserManager();
const user = userManager.addUser('admin', 'admin', true);
const privilegeManager = new webdav.SimplePathPrivilegeManager();
privilegeManager.setRights(user, '/', [ 'all' ]);
const server = new webdav.WebDAVServer({
httpAuthentication: new webdav.HTTPDigestAuthentication(userManager, 'Default realm'),
privilegeManager: privilegeManager,
port: 3200,
headers: {
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Methods': 'GET,POST,PUT,DELETE,OPTIONS,PROPFIND,PROPPATCH,MKCOL,COPY,MOVE,LOCK,UNLOCK',
'Access-Control-Allow-Headers': 'Authorization, Content-Type, Depth, User-Agent, X-Expected-Entity-Length, If-Modified-Since, Cache-Control, Range, Overwrite, Destination',
}
});
// ============================================================================
// 2. CACHE
// ============================================================================
const pathToS3KeyMap = new Map<string, string>();
const pathToSizeMap = new Map<string, number>();
// ============================================================================
// 3. LOGIC
// ============================================================================
async function startServer() {
console.log('------------------------------------------------');
console.log('[WebDAV] Starte Server (Filtered Mode)...');
try {
await loadSecrets();
await initS3();
console.log('[WebDAV] S3 Verbindung OK.');
console.log('[WebDAV] Lade Datenbank...');
const allTenants = await db.select().from(tenants);
const allFolders = await db.select().from(folders);
const allFiles = await db.select().from(files);
// Zähler für Statistik
let hiddenFilesCount = 0;
// --------------------------------------------------------------------
// BUILDER
// --------------------------------------------------------------------
const buildFolderContent = (tenantId: string, parentFolderId: string | null, currentWebDavPath: string) => {
const currentDir: any = {};
// 1. UNTERORDNER
const subFolders = allFolders.filter(f => f.tenant === tenantId && f.parent === parentFolderId);
subFolders.forEach(folder => {
const folderName = folder.name.replace(/\//g, '-');
const nextPath = `${currentWebDavPath}/${folderName}`;
currentDir[folderName] = buildFolderContent(tenantId, folder.id, nextPath);
});
// 2. DATEIEN
//@ts-ignore
const dirFiles = allFiles.filter(f => f.tenant === tenantId && f.folder === parentFolderId);
dirFiles.forEach(file => {
// ============================================================
// ❌ FILTER: DATEIEN OHNE GRÖSSE AUSBLENDEN
// ============================================================
const fileSize = Number(file.size || 0);
if (fileSize <= 0) {
// Datei überspringen, wenn 0 Bytes oder null
hiddenFilesCount++;
return;
}
// ============================================================
// Name bestimmen
let fileName = 'Unbenannt';
if (file.path) fileName = file.path.split('/').pop() || 'Unbenannt';
else if (file.name) fileName = file.name;
// A) Eintrag im WebDAV
currentDir[fileName] = `Ref: ${file.id}`;
// B) Maps füllen
const webDavFullPath = `${currentWebDavPath}/${fileName}`;
if (file.path) {
pathToS3KeyMap.set(webDavFullPath, file.path);
}
// C) Größe setzen (wir wissen jetzt sicher, dass sie > 0 ist)
pathToSizeMap.set(webDavFullPath, fileSize);
});
return currentDir;
};
// --------------------------------------------------------------------
// BAUM ZUSAMMENSETZEN
// --------------------------------------------------------------------
const dbTree: any = {};
allTenants.forEach(tenant => {
const tName = tenant.name.replace(/\//g, '-');
const rootPath = `/${tName}`;
//@ts-ignore
const content = buildFolderContent(tenant.id, null, rootPath);
// Leere Ordner Hinweis (optional)
if (Object.keys(content).length === 0) {
content['(Leer).txt'] = 'Keine gültigen Dateien vorhanden.';
}
dbTree[tName] = content;
});
if (Object.keys(dbTree).length === 0) {
dbTree['Status.txt'] = 'Datenbank leer.';
}
// --------------------------------------------------------------------
// REGISTRIEREN
// --------------------------------------------------------------------
const rootFS = server.rootFileSystem();
//@ts-ignore
rootFS.addSubTree(server.createExternalContext(), dbTree);
// ====================================================================
// OVERRIDE 1: DOWNLOAD
// ====================================================================
(rootFS as any)._openReadStream = async (path: webdav.Path, ctx: any, callback: any) => {
const p = path.toString();
const s3Key = pathToS3KeyMap.get(p);
if (s3Key) {
try {
const command = new GetObjectCommand({ Bucket: secrets.S3_BUCKET, Key: s3Key });
const response = await s3.send(command);
if (response.Body) return callback(null, response.Body as Readable);
} catch (e: any) {
console.error(`[S3 ERROR] ${e.message}`);
return callback(null, Readable.from([`Error: ${e.message}`]));
}
}
return callback(null, Readable.from(['System File']));
};
// ====================================================================
// OVERRIDE 2: SIZE
// ====================================================================
(rootFS as any)._size = async (path: webdav.Path, ctx: any, callback: any) => {
const p = path.toString();
const cachedSize = pathToSizeMap.get(p);
if (cachedSize !== undefined) return callback(null, cachedSize);
// Fallback S3 Check (sollte durch Filter kaum noch vorkommen)
const s3Key = pathToS3KeyMap.get(p);
if (s3Key) {
try {
const command = new HeadObjectCommand({ Bucket: secrets.S3_BUCKET, Key: s3Key });
const response = await s3.send(command);
const realSize = response.ContentLength || 0;
pathToSizeMap.set(p, realSize);
return callback(null, realSize);
} catch (e) {
return callback(null, 0);
}
}
return callback(null, 0);
};
// --------------------------------------------------------------------
// START
// --------------------------------------------------------------------
server.start(() => {
console.log('[WebDAV] 🚀 READY auf http://localhost:3200');
console.log(`[WebDAV] Sichtbare Dateien: ${pathToS3KeyMap.size}`);
console.log(`[WebDAV] Ausgeblendet (0 Bytes): ${hiddenFilesCount}`);
});
} catch (error) {
console.error('[WebDAV] 💥 ERROR:', error);
}
}
startServer();

View File

@@ -1,70 +1,37 @@
version: "3"
services: services:
frontend: web:
image: git.federspiel.tech/flfeders/fedeo/frontend:dev image: reg.federspiel.software/fedeo/software:beta
restart: always restart: always
environment: environment:
- NUXT_PUBLIC_API_BASE=https://app.fedeo.de/backend - INFISICAL_CLIENT_ID=abc
- NUXT_PUBLIC_PDF_LICENSE=eyJkYXRhIjoiZXlKMElqb2laR1YyWld4dmNHVnlJaXdpWVhaMUlqb3hOemt3TmpNNU9UazVMQ0prYlNJNkltRndjQzVtWldSbGJ5NWtaU0lzSW00aU9pSXpOemt3Wm1Vek5UazBZbVU0TlRRNElpd2laWGh3SWpveE56a3dOak01T1RrNUxDSmtiWFFpT2lKemNHVmphV1pwWXlJc0luQWlPaUoyYVdWM1pYSWlmUT09Iiwic2lnbmF0dXJlIjoicWU4K0ZxQUJDNUp5bEJUU094Vkd5RTJMbk9UNmpyc2EyRStsN2tNNWhkM21KK2ZvVjYwaTFKeFdhZGtqSDRNWXZxQklMc0dpdWh5d2pMbUFjRHZuWGxOcTRMcXFLRm53dzVtaG1LK3lTeDRXbzVaS1loK1VZdFBzWUZjV3oyUHVGMmJraGJrVjJ6RzRlTGtRU09wdmJKY3JUZU1rN0N1VkN6Q1UraHF5T0ZVVXllWnRmaHlmcWswZEFFL0RMR1hvTDFSQXFjNkNkYU9FTDRTdC9Idy9DQnFieTE2aisvT3RxQUlLcy9NWTR6SVk3RTI3bWo4RUx5VjhXNkdXNXhqc0VUVzNKN0RRMUVlb3RhVlNLT29kc3pVRlhUYzVlbHVuSm04ZlcwM1ErMUhtSnpmWGoyS1dwM1dnamJDazZYSHozamFML2lOdUYvZFZNaWYvc2FoR3NnPT0ifQ== - INFISICAL_CLIENT_SECRET=abc
networks:
- traefik
labels:
- "traefik.enable=true"
- "traefik.docker.network=traefik"
- "traefik.port=3000"
# Middlewares
- "traefik.http.middlewares.fedeo-frontend-redirect-web-secure.redirectscheme.scheme=https"
# Web Entrypoint
- "traefik.http.routers.fedeo-frontend.middlewares=fedeo-frontend-redirect-web-secure"
- "traefik.http.routers.fedeo-frontend.rule=Host(`app.fedeo.de`) && PathPrefix(`/`)"
- "traefik.http.routers.fedeo-frontend.entrypoints=web"
# Web Secure Entrypoint
- "traefik.http.routers.fedeo-frontend-secure.rule=Host(`app.fedeo.de`) && PathPrefix(`/`)"
- "traefik.http.routers.fedeo-frontend-secure.entrypoints=web-secured" #
- "traefik.http.routers.fedeo-frontend-secure.tls.certresolver=mytlschallenge"
backend: backend:
image: git.federspiel.tech/flfeders/fedeo/backend:dev image: reg.federspiel.software/fedeo/backend:main
restart: always restart: always
environment: environment:
- INFISICAL_CLIENT_ID=a6838bd6-9983-4bf4-9be2-ace830b9abdf - NUXT_PUBLIC_API_BASE=
- INFISICAL_CLIENT_SECRET=4e3441acc0adbffd324aa50e668a95a556a3f55ec6bb85954e176e35a3392003 - NUXT_PUBLIC_PDF_LICENSE=
- NODE_ENV=production db:
networks: image: postgres
- traefik restart: always
labels: shm_size: 128mb
- "traefik.enable=true" environment:
- "traefik.docker.network=traefik" POSTGRES_PASSWORD: abc
- "traefik.port=3100" POSTGRES_USER: sandelcom
# Middlewares POSTGRES_DB: sensorfy
- "traefik.http.middlewares.fedeo-backend-redirect-web-secure.redirectscheme.scheme=https" volumes:
- "traefik.http.middlewares.fedeo-backend-strip.stripprefix.prefixes=/backend" - ./pg-data:/var/lib/postgresql/data
# Web Entrypoint ports:
- "traefik.http.routers.fedeo-backend.middlewares=fedeo-backend-redirect-web-secure" - "5432:5432"
- "traefik.http.routers.fedeo-backend.rule=Host(`app.fedeo.de`) && PathPrefix(`/backend`)"
- "traefik.http.routers.fedeo-backend.entrypoints=web"
# Web Secure Entrypoint
- "traefik.http.routers.fedeo-backend-secure.rule=Host(`app.fedeo.de`) && PathPrefix(`/backend`)"
- "traefik.http.routers.fedeo-backend-secure.entrypoints=web-secured" #
- "traefik.http.routers.fedeo-backend-secure.tls.certresolver=mytlschallenge"
- "traefik.http.routers.fedeo-backend-secure.middlewares=fedeo-backend-strip"
# db:
# image: postgres
# restart: always
# shm_size: 128mb
# environment:
# POSTGRES_PASSWORD: abc
# POSTGRES_USER: sandelcom
# POSTGRES_DB: sensorfy
# volumes:
# - ./pg-data:/var/lib/postgresql/data
# ports:
# - "5432:5432"
traefik: traefik:
image: traefik:v2.11 image: traefik:v2.2
restart: unless-stopped restart: unless-stopped
container_name: traefik container_name: traefik
command: command:
- "--api.insecure=false" - "--api.insecure=false"
- "--api.dashboard=false" - "--api.dashboard=true"
- "--api.debug=false" - "--api.debug=false"
- "--providers.docker=true" - "--providers.docker=true"
- "--providers.docker.exposedbydefault=false" - "--providers.docker.exposedbydefault=false"
@@ -76,18 +43,19 @@ services:
- "--accesslog.bufferingsize=5000" - "--accesslog.bufferingsize=5000"
- "--accesslog.fields.defaultMode=keep" - "--accesslog.fields.defaultMode=keep"
- "--accesslog.fields.headers.defaultMode=keep" - "--accesslog.fields.headers.defaultMode=keep"
- "--certificatesresolvers.mytlschallenge.acme.tlschallenge=true" # - "--certificatesresolvers.mytlschallenge.acme.tlschallenge=true" # <== Enable TLS-ALPN-01 to generate and renew ACME certs
- "--certificatesresolvers.mytlschallenge.acme.email=moin@fedeo.de" - "--certificatesresolvers.mytlschallenge.acme.email=info@sandelcom.de" # <== Setting email for certs
- "--certificatesresolvers.mytlschallenge.acme.storage=/letsencrypt/acme.json" - "--certificatesresolvers.mytlschallenge.acme.storage=/letsencrypt/acme.json" # <== Defining acme file to store cert information
ports: ports:
- 80:80 - 80:80
- 8080:8080
- 443:443 - 443:443
volumes: volumes:
- "./traefik/letsencrypt:/letsencrypt" # <== Volume for certs (TLS) - "./traefik/letsencrypt:/letsencrypt" # <== Volume for certs (TLS)
- "/var/run/docker.sock:/var/run/docker.sock:ro" - "/var/run/docker.sock:/var/run/docker.sock:ro"
- "./traefik/logs:/logs" - "./traefik/logs:/logs"
networks: labels:
- traefik #### Labels define the behavior and rules of the traefik proxy for this container ####
networks: - "traefik.enable=true" # <== Enable traefik on itself to view dashboard and assign subdomain to view it
traefik: - "traefik.http.routers.api.rule=Host(`srv1.drinkingteam.de`)" # <== Setting the domain for the dashboard
external: false - "traefik.http.routers.api.service=api@internal" # <== Enabling the api to be a service to access

View File

@@ -1,182 +0,0 @@
<script setup>
const props = defineProps({
modelValue: {
type: Array,
default: () => []
},
disabled: {
type: Boolean,
default: false
}
})
const emit = defineEmits(["update:modelValue"])
const toast = useToast()
const accounts = ref([])
const ibanSearch = ref("")
const showCreate = ref(false)
const resolvingIban = ref(false)
const createPayload = ref({
iban: "",
bic: "",
bankName: "",
description: ""
})
const normalizeIban = (value) => String(value || "").replace(/\s+/g, "").toUpperCase()
const loadAccounts = async () => {
accounts.value = await useEntities("entitybankaccounts").select()
}
const assignedIds = computed(() => {
return Array.isArray(props.modelValue) ? props.modelValue : []
})
const assignedAccounts = computed(() => {
return accounts.value.filter((a) => assignedIds.value.includes(a.id))
})
const updateAssigned = (ids) => {
emit("update:modelValue", ids)
}
const assignByIban = async () => {
const search = normalizeIban(ibanSearch.value)
if (!search) return
const match = accounts.value.find((a) => normalizeIban(a.iban) === search)
if (!match) {
toast.add({ title: "Kein Bankkonto mit dieser IBAN gefunden.", color: "rose" })
return
}
if (assignedIds.value.includes(match.id)) {
toast.add({ title: "Dieses Bankkonto ist bereits zugewiesen.", color: "amber" })
return
}
updateAssigned([...assignedIds.value, match.id])
ibanSearch.value = ""
}
const removeAssigned = (id) => {
updateAssigned(assignedIds.value.filter((i) => i !== id))
}
const createAndAssign = async () => {
if (!createPayload.value.iban || !createPayload.value.bic || !createPayload.value.bankName) {
toast.add({ title: "IBAN, BIC und Bankinstitut sind Pflichtfelder.", color: "rose" })
return
}
const created = await useEntities("entitybankaccounts").create(createPayload.value, true)
await loadAccounts()
updateAssigned([...assignedIds.value, created.id])
createPayload.value = { iban: "", bic: "", bankName: "", description: "" }
showCreate.value = false
}
const resolveCreatePayloadFromIban = async () => {
const normalized = normalizeIban(createPayload.value.iban)
if (!normalized) return
resolvingIban.value = true
try {
const data = await useFunctions().useBankingResolveIban(normalized)
if (!data) return
createPayload.value.iban = data.iban || normalized
if (data.bic) createPayload.value.bic = data.bic
if (data.bankName) createPayload.value.bankName = data.bankName
} catch (e) {
// intentionally ignored: user can still enter fields manually
} finally {
resolvingIban.value = false
}
}
loadAccounts()
</script>
<template>
<div class="flex flex-col gap-2 w-full">
<div class="flex flex-wrap gap-2" v-if="assignedAccounts.length > 0">
<UBadge
v-for="account in assignedAccounts"
:key="account.id"
color="primary"
variant="subtle"
>
{{ account.displayLabel || account.iban }}
<UButton
v-if="!disabled"
variant="ghost"
color="gray"
size="2xs"
icon="i-heroicons-x-mark"
class="ml-1"
@click="removeAssigned(account.id)"
/>
</UBadge>
</div>
<InputGroup class="w-full">
<UInput
v-model="ibanSearch"
class="flex-auto"
placeholder="IBAN eingeben und zuweisen"
:disabled="disabled"
@keydown.enter.prevent="assignByIban"
/>
<UButton :disabled="disabled" @click="assignByIban">
Zuweisen
</UButton>
<UButton :disabled="disabled" color="gray" variant="outline" @click="showCreate = true">
Neu
</UButton>
</InputGroup>
</div>
<UModal v-model="showCreate">
<UCard>
<template #header>Neue Bankverbindung erstellen</template>
<div class="space-y-3">
<UFormGroup label="IBAN">
<InputGroup>
<UInput
v-model="createPayload.iban"
@blur="resolveCreatePayloadFromIban"
@keydown.enter.prevent="resolveCreatePayloadFromIban"
/>
<UButton
color="gray"
variant="outline"
:loading="resolvingIban"
@click="resolveCreatePayloadFromIban"
>
Ermitteln
</UButton>
</InputGroup>
</UFormGroup>
<UFormGroup label="BIC">
<UInput v-model="createPayload.bic" />
</UFormGroup>
<UFormGroup label="Bankinstitut">
<UInput v-model="createPayload.bankName" />
</UFormGroup>
<UFormGroup label="Beschreibung (optional)">
<UInput v-model="createPayload.description" />
</UFormGroup>
</div>
<template #footer>
<div class="flex justify-end gap-2">
<UButton color="gray" variant="outline" @click="showCreate = false">Abbrechen</UButton>
<UButton @click="createAndAssign">Erstellen und zuweisen</UButton>
</div>
</template>
</UCard>
</UModal>
</template>

View File

@@ -1,235 +0,0 @@
<template>
<div
ref="el"
:style="style"
class="fixed z-[999] w-72 bg-white dark:bg-gray-900 shadow-2xl rounded-xl border border-gray-200 dark:border-gray-800 p-4 select-none touch-none"
>
<div class="flex items-center justify-between mb-4 cursor-move border-b pb-2 dark:border-gray-800">
<div class="flex items-center gap-2 text-gray-500">
<UIcon name="i-heroicons-calculator" />
<span class="text-xs font-bold uppercase tracking-wider">Kalkulator</span>
</div>
<div class="flex items-center gap-1">
<UTooltip text="Verlauf">
<UButton
color="gray"
variant="ghost"
:icon="showHistory ? 'i-heroicons-clock-solid' : 'i-heroicons-clock'"
size="xs"
@click="showHistory = !showHistory"
/>
</UTooltip>
<UTooltip text="Schließen (Esc)">
<UButton
color="gray"
variant="ghost"
icon="i-heroicons-x-mark"
size="xs"
@click="store.isOpen = false"
/>
</UTooltip>
</div>
</div>
<div v-if="!showHistory">
<div
class="bg-gray-100 dark:bg-gray-800 p-3 rounded-lg mb-4 text-right border border-gray-200 dark:border-gray-700 cursor-pointer group relative"
@click="copyDisplay"
>
<div class="text-[10px] text-gray-500 h-4 font-mono uppercase tracking-tighter">
Speicher: {{ Number(store.memory).toFixed(2).replace('.', ',') }}
</div>
<div class="text-2xl font-mono truncate tracking-tighter">{{ store.display }}</div>
<div class="absolute inset-0 flex items-center justify-center bg-primary-500/10 opacity-0 group-hover:opacity-100 transition-opacity rounded-lg">
<span class="text-[10px] font-bold text-primary-600 uppercase">
{{ copied ? 'Kopiert!' : 'Klicken zum Kopieren' }}
</span>
</div>
</div>
<div class="grid grid-cols-4 gap-2">
<UTooltip text="Brutto (+19%)"><UButton color="green" variant="soft" block size="xs" @click="applyTax(19)">+19%</UButton></UTooltip>
<UTooltip text="Brutto (+7%)"><UButton color="green" variant="soft" block size="xs" @click="applyTax(7)">+7%</UButton></UTooltip>
<UTooltip text="Netto (-19%)"><UButton color="rose" variant="soft" block size="xs" @click="removeTax(19)">-19%</UButton></UTooltip>
<UTooltip text="Netto (-7%)"><UButton color="rose" variant="soft" block size="xs" @click="removeTax(7)">-7%</UButton></UTooltip>
<UTooltip text="Löschen"><UButton color="gray" variant="ghost" block @click="clear">C</UButton></UTooltip>
<UTooltip text="Speicher +"><UButton color="gray" variant="ghost" block @click="addToSum">M+</UButton></UTooltip>
<UTooltip text="Speicher Reset"><UButton color="gray" variant="ghost" block @click="store.memory = 0">MC</UButton></UTooltip>
<UButton color="primary" variant="soft" @click="setOperator('/')">/</UButton>
<UButton v-for="n in [7, 8, 9]" :key="n" color="white" @click="appendNumber(n)">{{ n }}</UButton>
<UButton color="primary" variant="soft" @click="setOperator('*')">×</UButton>
<UButton v-for="n in [4, 5, 6]" :key="n" color="white" @click="appendNumber(n)">{{ n }}</UButton>
<UButton color="primary" variant="soft" @click="setOperator('-')">-</UButton>
<UButton v-for="n in [1, 2, 3]" :key="n" color="white" @click="appendNumber(n)">{{ n }}</UButton>
<UButton color="primary" variant="soft" @click="setOperator('+')">+</UButton>
<UButton color="white" class="col-span-2" @click="appendNumber(0)">0</UButton>
<UButton color="white" @click="addComma">,</UButton>
<UButton color="primary" block @click="calculate">=</UButton>
</div>
</div>
<div v-else class="h-[270px] flex flex-col animate-in fade-in duration-200">
<div class="flex-1 overflow-y-auto space-y-2 pr-1 custom-scrollbar">
<div v-if="store.history.length === 0" class="text-center text-gray-400 text-xs mt-10 italic">
Keine Berechnungen im Verlauf
</div>
<div
v-for="(item, i) in store.history" :key="i"
class="p-2 bg-gray-50 dark:bg-gray-800 rounded text-right border-l-2 border-primary-500 cursor-pointer hover:bg-primary-50 dark:hover:bg-primary-900/20 transition-colors"
@click="useHistoryItem(item.result)"
>
<div class="text-[10px] text-gray-400">{{ item.expression }} =</div>
<div class="text-sm font-bold">{{ item.result }}</div>
</div>
</div>
<UButton
color="gray"
variant="ghost"
size="xs"
block
class="mt-2"
icon="i-heroicons-trash"
@click="store.history = []"
>
Verlauf leeren
</UButton>
</div>
</div>
</template>
<script setup>
import { useDraggable, useClipboard } from '@vueuse/core'
import { useCalculatorStore } from '~/stores/calculator'
const store = useCalculatorStore()
const { copy, copied } = useClipboard()
const el = ref(null)
const { style } = useDraggable(el, {
initialValue: { x: window.innerWidth - 350, y: 150 },
})
const shouldResetDisplay = ref(false)
const showHistory = ref(false)
const previousValue = ref(null)
const lastOperator = ref(null)
// --- Logik ---
const appendNumber = (num) => {
if (store.display === '0' || shouldResetDisplay.value) {
store.display = String(num)
shouldResetDisplay.value = false
} else {
store.display += String(num)
}
}
const addComma = () => {
if (!store.display.includes(',')) {
store.display += ','
}
}
const setOperator = (op) => {
previousValue.value = parseFloat(store.display.replace(',', '.'))
lastOperator.value = op
shouldResetDisplay.value = true
}
const calculate = () => {
if (lastOperator.value === null) return
const currentVal = parseFloat(store.display.replace(',', '.'))
const prevVal = previousValue.value
let result = 0
switch (lastOperator.value) {
case '+': result = prevVal + currentVal; break
case '-': result = prevVal - currentVal; break
case '*': result = prevVal * currentVal; break
case '/': result = currentVal !== 0 ? prevVal / currentVal : 0; break
}
const expression = `${prevVal} ${lastOperator.value} ${currentVal}`
const resultString = String(Number(result.toFixed(4))).replace('.', ',')
store.addHistory(expression, resultString)
store.display = resultString
lastOperator.value = null
shouldResetDisplay.value = true
}
const clear = () => {
store.display = '0'
previousValue.value = null
lastOperator.value = null
}
const applyTax = (percent) => {
const current = parseFloat(store.display.replace(',', '.'))
store.display = (current * (1 + percent / 100)).toFixed(2).replace('.', ',')
}
const removeTax = (percent) => {
const current = parseFloat(store.display.replace(',', '.'))
store.display = (current / (1 + percent / 100)).toFixed(2).replace('.', ',')
}
const addToSum = () => {
store.memory += parseFloat(store.display.replace(',', '.'))
shouldResetDisplay.value = true
}
const copyDisplay = () => {
copy(store.display)
}
const useHistoryItem = (val) => {
store.display = val
showHistory.value = false
}
// --- Shortcuts ---
defineShortcuts({
'0': () => appendNumber(0),
'1': () => appendNumber(1),
'2': () => appendNumber(2),
'3': () => appendNumber(3),
'4': () => appendNumber(4),
'5': () => appendNumber(5),
'6': () => appendNumber(6),
'7': () => appendNumber(7),
'8': () => appendNumber(8),
'9': () => appendNumber(9),
'comma': addComma,
'plus': () => setOperator('+'),
'minus': () => setOperator('-'),
'enter': { usingInput: true, handler: calculate },
'backspace': () => {
store.display = store.display.length > 1 ? store.display.slice(0, -1) : '0'
},
// Escape schließt nun das Fenster via Store
'escape': {
usingInput: true,
whenever: [computed(() => store.isOpen)],
handler: () => { store.isOpen = false }
}
})
</script>
<style scoped>
.custom-scrollbar::-webkit-scrollbar {
width: 4px;
}
.custom-scrollbar::-webkit-scrollbar-track {
@apply bg-transparent;
}
.custom-scrollbar::-webkit-scrollbar-thumb {
@apply bg-gray-200 dark:bg-gray-700 rounded-full;
}
</style>

View File

@@ -29,6 +29,7 @@ const props = defineProps({
const emit = defineEmits(["returnData"]) const emit = defineEmits(["returnData"])
const {type} = props const {type} = props
defineShortcuts({ defineShortcuts({
@@ -52,10 +53,11 @@ const route = useRoute()
const dataStore = useDataStore() const dataStore = useDataStore()
const modal = useModal() const modal = useModal()
const dataType = dataStore.dataTypes[type] const dataType = dataStore.dataTypes[type]
const openTab = ref(0) const openTab = ref(0)
const item = ref(JSON.parse(props.item)) const item = ref(JSON.parse(props.item))
// console.log(item.value) console.log(item.value)
const oldItem = ref(null) const oldItem = ref(null)
const generateOldItemData = () => { const generateOldItemData = () => {
@@ -64,50 +66,6 @@ const generateOldItemData = () => {
generateOldItemData() generateOldItemData()
// --- ÄNDERUNG START: Computed Property statt Watcher/Function ---
// Dies berechnet den Status automatisch neu, egal woher die Daten kommen (Init oder User-Eingabe)
const saveAllowed = computed(() => {
if (!item.value) return false
const isFilledValue = (value) => {
if (Array.isArray(value)) return value.length > 0
if (typeof value === "string") return value.trim().length > 0
return value !== null && value !== undefined && value !== false
}
let allowedCount = 0
// Nur Input-Felder berücksichtigen
const relevantColumns = dataType.templateColumns.filter(i => {
if (!i.inputType) return false
if (i.showFunction && !i.showFunction(item.value)) return false
if (i.disabledFunction && i.disabledFunction(item.value)) return false
return true
})
relevantColumns.forEach(datapoint => {
if(datapoint.required) {
if(datapoint.key.includes(".")){
const [parentKey, childKey] = datapoint.key.split('.')
// Prüfung: Existiert Parent UND ist Child "truthy" (nicht null/undefined/empty)
if(item.value[parentKey] && isFilledValue(item.value[parentKey][childKey])) {
allowedCount += 1
}
} else {
if(isFilledValue(item.value[datapoint.key])) {
allowedCount += 1
}
}
} else {
// Wenn nicht required, zählt es immer als "erlaubt"
allowedCount += 1
}
})
return allowedCount >= relevantColumns.length
})
// --- ÄNDERUNG ENDE ---
const setupCreate = () => { const setupCreate = () => {
dataType.templateColumns.forEach(datapoint => { dataType.templateColumns.forEach(datapoint => {
if(datapoint.key.includes(".")){ if(datapoint.key.includes(".")){
@@ -120,7 +78,10 @@ const setupCreate = () => {
} else { } else {
item.value[datapoint.key] = {} item.value[datapoint.key] = {}
} }
} }
}) })
} }
setupCreate() setupCreate()
@@ -130,45 +91,49 @@ const setupQuery = () => {
console.log(props.mode) console.log(props.mode)
if(props.mode === "create" && (route.query || props.createQuery)) { if(props.mode === "create" && (route.query || props.createQuery)) {
let data = !props.inModal ? route.query : props.createQuery let data = !props.inModal ? route.query : props.createQuery
Object.keys(data).forEach(key => { Object.keys(data).forEach(key => {
if (dataType.templateColumns.find(i => i.key === key)) { if(dataType.templateColumns.find(i => i.key === key)) {
if (["customer", "contract", "plant", "contact", "project"].includes(key)) { if (["customer", "contract", "plant", "contact", "project"].includes(key)) {
item.value[key] = Number(data[key]) item.value[key] = Number(data[key])
} else { } else {
item.value[key] = data[key] item.value[key] = data[key]
} }
} else if (key === "resources") { } else if(key === "resources") {
/*item.value[key] = data[key]*/ /*item.value[key] = data[key]*/
JSON.parse(data[key]).forEach(async (i) => { JSON.parse(data[key]).forEach(async (i) => {
console.log(i) console.log(i)
let type = i.substring(0, 1) let type = i.substring(0,1)
let id = i.substring(2, i.length) let id = i.substring(2,i.length)
console.log(type) console.log(type)
console.log(id) console.log(id)
let holder = "" let holder = ""
if (type === "P") { if(type === "P"){
holder = "profiles" holder = "profiles"
} else if (type === "F") { } else if(type === "F"){
holder = "vehicles" holder = "vehicles"
id = Number(id) id = Number(id)
} else if (type === "I") { } else if(type === "I"){
holder = "inventoryitems" holder = "inventoryitems"
id = Number(id) id = Number(id)
} else if (type === "G") { } else if(type === "G"){
holder = "inventoryitemgroups" holder = "inventoryitemgroups"
} }
if (typeof item.value[holder] === "object") { if(typeof item.value[holder] === "object") {
item.value[holder].push(id) item.value[holder].push(id)
} else { } else {
item.value[holder] = [id] item.value[holder] = [id]
} }
}) })
} }
}) })
// calcSaveAllowed() -> Entfernt, da computed automatisch reagiert
} }
} }
setupQuery() setupQuery()
@@ -183,14 +148,14 @@ const loadOptions = async () => {
}) })
for await(const option of optionsToLoad) { for await(const option of optionsToLoad) {
if (option.option === "countrys") { if(option.option === "countrys") {
loadedOptions.value[option.option] = useEntities("countrys").selectSpecial() loadedOptions.value[option.option] = useEntities("countrys").selectSpecial()
} else if (option.option === "units") { } else if(option.option === "units") {
loadedOptions.value[option.option] = useEntities("units").selectSpecial() loadedOptions.value[option.option] = useEntities("units").selectSpecial()
} else { } else {
loadedOptions.value[option.option] = (await useEntities(option.option).select()) loadedOptions.value[option.option] = (await useEntities(option.option).select())
if (dataType.templateColumns.find(x => x.key === option.key).selectDataTypeFilter) { if(dataType.templateColumns.find(x => x.key === option.key).selectDataTypeFilter){
loadedOptions.value[option.option] = loadedOptions.value[option.option].filter(i => dataType.templateColumns.find(x => x.key === option.key).selectDataTypeFilter(i, item)) loadedOptions.value[option.option] = loadedOptions.value[option.option].filter(i => dataType.templateColumns.find(x => x.key === option.key).selectDataTypeFilter(i, item))
} }
} }
@@ -200,22 +165,46 @@ const loadOptions = async () => {
loadOptions() loadOptions()
const contentChanged = (content, datapoint) => { const contentChanged = (content, datapoint) => {
if (datapoint.key.includes(".")) { if(datapoint.key.includes(".")){
item.value[datapoint.key.split('.')[0]][datapoint.key.split('.')[1]].html = content.html item[datapoint.key.split('.')[0]][datapoint.key.split('.')[1]].html = content.html
item.value[datapoint.key.split('.')[0]][datapoint.key.split('.')[1]].text = content.text item[datapoint.key.split('.')[0]][datapoint.key.split('.')[1]].text = content.text
item.value[datapoint.key.split('.')[0]][datapoint.key.split('.')[1]].json = content.json item[datapoint.key.split('.')[0]][datapoint.key.split('.')[1]].json = content.json
} else { } else {
item.value[datapoint.key].html = content.html item[datapoint.key].html = content.html
item.value[datapoint.key].text = content.text item[datapoint.key].text = content.text
item.value[datapoint.key].json = content.json item[datapoint.key].json = content.json
} }
} }
const saveAllowed = ref(false)
const calcSaveAllowed = (item) => {
let allowedCount = 0
dataType.templateColumns.filter(i => i.inputType).forEach(datapoint => {
if(datapoint.required) {
if(datapoint.key.includes(".")){
if(item[datapoint.key.split('.')[0]][datapoint.key.split('.')[1]]) allowedCount += 1
} else {
if(item[datapoint.key]) allowedCount += 1
}
} else {
allowedCount += 1
}
})
saveAllowed.value = allowedCount >= dataType.templateColumns.filter(i => i.inputType).length
}
//calcSaveAllowed()
watch(item.value, async (newItem, oldItem) => {
calcSaveAllowed(newItem)
})
const createItem = async () => { const createItem = async () => {
let ret = null let ret = null
if (props.inModal) { if(props.inModal) {
ret = await useEntities(type).create(item.value, true) ret = await useEntities(type).create(item.value, true)
} else { } else {
@@ -229,7 +218,7 @@ const createItem = async () => {
const updateItem = async () => { const updateItem = async () => {
let ret = null let ret = null
if (props.inModal) { if(props.inModal) {
ret = await useEntities(type).update(item.value.id, item.value, true) ret = await useEntities(type).update(item.value.id, item.value, true)
emit('returnData', ret) emit('returnData', ret)
modal.close() modal.close()
@@ -237,7 +226,11 @@ const updateItem = async () => {
ret = await useEntities(type).update(item.value.id, item.value) ret = await useEntities(type).update(item.value.id, item.value)
emit('returnData', ret) emit('returnData', ret)
} }
} }
</script> </script>
<template> <template>
@@ -252,15 +245,16 @@ const updateItem = async () => {
<UButton <UButton
icon="i-heroicons-chevron-left" icon="i-heroicons-chevron-left"
variant="outline" variant="outline"
@click="router.back()" @click="router.back()/*router.push(`/standardEntity/${type}`)*/"
> >
<!-- {{dataType.label}}-->
</UButton> </UButton>
</template> </template>
<template #center> <template #center>
<h1 <h1
v-if="item" v-if="item"
:class="['text-xl','font-medium', 'text-center']" :class="['text-xl','font-medium', 'text-center']"
>{{ item.id ? `${dataType.labelSingle} bearbeiten` : `${dataType.labelSingle} erstellen` }}</h1> >{{item.id ? `${dataType.labelSingle} bearbeiten` : `${dataType.labelSingle} erstellen` }}</h1>
</template> </template>
<template #right> <template #right>
<ArchiveButton <ArchiveButton
@@ -301,7 +295,7 @@ const updateItem = async () => {
<h1 <h1
v-if="item" v-if="item"
:class="['text-xl','font-medium']" :class="['text-xl','font-medium']"
>{{ item.id ? `${dataType.labelSingle} bearbeiten` : `${dataType.labelSingle} erstellen` }}</h1> >{{item.id ? `${dataType.labelSingle} bearbeiten` : `${dataType.labelSingle} erstellen` }}</h1>
</template> </template>
<template #right> <template #right>
<UButton <UButton
@@ -336,7 +330,11 @@ const updateItem = async () => {
v-for="(columnName,index) in dataType.inputColumns" v-for="(columnName,index) in dataType.inputColumns"
:class="platform === 'mobile' ? ['w-full'] : [`w-1/${dataType.inputColumns.length}`, ... index < dataType.inputColumns.length -1 ? ['mr-5'] : []]" :class="platform === 'mobile' ? ['w-full'] : [`w-1/${dataType.inputColumns.length}`, ... index < dataType.inputColumns.length -1 ? ['mr-5'] : []]"
> >
<UDivider>{{ columnName }}</UDivider> <UDivider>{{columnName}}</UDivider>
<!--
Die Form Group darf nur in der ersten bearbeitet werden und muss dann runterkopiert werden
-->
<div <div
v-for="datapoint in dataType.templateColumns.filter(i => i.inputType && i.inputColumn === columnName)" v-for="datapoint in dataType.templateColumns.filter(i => i.inputType && i.inputColumn === columnName)"
@@ -364,7 +362,7 @@ const updateItem = async () => {
:placeholder="datapoint.inputIsNumberRange ? 'Leer lassen für automatisch generierte Nummer' : ''" :placeholder="datapoint.inputIsNumberRange ? 'Leer lassen für automatisch generierte Nummer' : ''"
> >
<template #trailing v-if="datapoint.inputTrailing"> <template #trailing v-if="datapoint.inputTrailing">
<span class="text-gray-500 dark:text-gray-400 text-xs">{{ datapoint.inputTrailing }}</span> <span class="text-gray-500 dark:text-gray-400 text-xs">{{datapoint.inputTrailing}}</span>
</template> </template>
</UInput> </UInput>
<UToggle <UToggle
@@ -438,11 +436,7 @@ const updateItem = async () => {
/> />
</template> </template>
</UPopover> </UPopover>
<BankAccountAssignInput <!-- TODO: DISABLED FOR TIPTAP -->
v-else-if="datapoint.inputType === 'bankaccountassign'"
v-model="item[datapoint.key.split('.')[0]][datapoint.key.split('.')[1]]"
:disabled="datapoint.disabledFunction ? datapoint.disabledFunction(item) : false"
/>
<Tiptap <Tiptap
v-else-if="datapoint.inputType === 'editor'" v-else-if="datapoint.inputType === 'editor'"
@updateContent="(i) => contentChanged(i,datapoint)" @updateContent="(i) => contentChanged(i,datapoint)"
@@ -469,7 +463,7 @@ const updateItem = async () => {
:placeholder="datapoint.inputIsNumberRange ? 'Leer lassen für automatisch generierte Nummer' : ''" :placeholder="datapoint.inputIsNumberRange ? 'Leer lassen für automatisch generierte Nummer' : ''"
> >
<template #trailing v-if="datapoint.inputTrailing"> <template #trailing v-if="datapoint.inputTrailing">
{{ datapoint.inputTrailing }} {{datapoint.inputTrailing}}
</template> </template>
</UInput> </UInput>
<UToggle <UToggle
@@ -543,11 +537,7 @@ const updateItem = async () => {
/> />
</template> </template>
</UPopover> </UPopover>
<BankAccountAssignInput <!-- TODO: Color/Required for TipTap and MaterialComposing -->
v-else-if="datapoint.inputType === 'bankaccountassign'"
v-model="item[datapoint.key]"
:disabled="datapoint.disabledFunction ? datapoint.disabledFunction(item) : false"
/>
<Tiptap <Tiptap
v-else-if="datapoint.inputType === 'editor'" v-else-if="datapoint.inputType === 'editor'"
@updateContent="(i) => contentChanged(i,datapoint)" @updateContent="(i) => contentChanged(i,datapoint)"
@@ -572,8 +562,35 @@ const updateItem = async () => {
icon="i-heroicons-x-mark" icon="i-heroicons-x-mark"
/> />
</InputGroup> </InputGroup>
<!-- <div
v-if="profileStore.ownTenant.ownFields"
>
<UDivider
class="mt-3"
>Eigene Felder</UDivider>
<UFormGroup
v-for="field in profileStore.ownTenant.ownFields.contracts"
:key="field.key"
:label="field.label"
>
<UInput
v-if="field.type === 'text'"
v-model="item.ownFields[field.key]"
/>
<USelectMenu
v-else-if="field.type === 'select'"
:options="field.options"
v-model="item.ownFields[field.key]"
/>
</UFormGroup>
</div>-->
</UFormGroup> </UFormGroup>
</div> </div>
</div> </div>
</div> </div>
<UFormGroup <UFormGroup
@@ -599,7 +616,7 @@ const updateItem = async () => {
:placeholder="datapoint.inputIsNumberRange ? 'Leer lassen für automatisch generierte Nummer' : ''" :placeholder="datapoint.inputIsNumberRange ? 'Leer lassen für automatisch generierte Nummer' : ''"
> >
<template #trailing v-if="datapoint.inputTrailing"> <template #trailing v-if="datapoint.inputTrailing">
<span class="text-gray-500 dark:text-gray-400 text-xs">{{ datapoint.inputTrailing }}</span> <span class="text-gray-500 dark:text-gray-400 text-xs">{{datapoint.inputTrailing}}</span>
</template> </template>
</UInput> </UInput>
<UToggle <UToggle
@@ -673,11 +690,7 @@ const updateItem = async () => {
/> />
</template> </template>
</UPopover> </UPopover>
<BankAccountAssignInput <!-- TODO: DISABLED FOR TIPTAP -->
v-else-if="datapoint.inputType === 'bankaccountassign'"
v-model="item[datapoint.key.split('.')[0]][datapoint.key.split('.')[1]]"
:disabled="datapoint.disabledFunction ? datapoint.disabledFunction(item) : false"
/>
<Tiptap <Tiptap
v-else-if="datapoint.inputType === 'editor'" v-else-if="datapoint.inputType === 'editor'"
@updateContent="(i) => contentChanged(i,datapoint)" @updateContent="(i) => contentChanged(i,datapoint)"
@@ -704,7 +717,7 @@ const updateItem = async () => {
:placeholder="datapoint.inputIsNumberRange ? 'Leer lassen für automatisch generierte Nummer' : ''" :placeholder="datapoint.inputIsNumberRange ? 'Leer lassen für automatisch generierte Nummer' : ''"
> >
<template #trailing v-if="datapoint.inputTrailing"> <template #trailing v-if="datapoint.inputTrailing">
{{ datapoint.inputTrailing }} {{datapoint.inputTrailing}}
</template> </template>
</UInput> </UInput>
<UToggle <UToggle
@@ -778,11 +791,7 @@ const updateItem = async () => {
/> />
</template> </template>
</UPopover> </UPopover>
<BankAccountAssignInput <!-- TODO: Color/Required for TipTap and MaterialComposing -->
v-else-if="datapoint.inputType === 'bankaccountassign'"
v-model="item[datapoint.key]"
:disabled="datapoint.disabledFunction ? datapoint.disabledFunction(item) : false"
/>
<Tiptap <Tiptap
v-else-if="datapoint.inputType === 'editor'" v-else-if="datapoint.inputType === 'editor'"
@updateContent="(i) => contentChanged(i,datapoint)" @updateContent="(i) => contentChanged(i,datapoint)"
@@ -807,6 +816,30 @@ const updateItem = async () => {
icon="i-heroicons-x-mark" icon="i-heroicons-x-mark"
/> />
</InputGroup> </InputGroup>
<!-- <div
v-if="profileStore.ownTenant.ownFields"
>
<UDivider
class="mt-3"
>Eigene Felder</UDivider>
<UFormGroup
v-for="field in profileStore.ownTenant.ownFields.contracts"
:key="field.key"
:label="field.label"
>
<UInput
v-if="field.type === 'text'"
v-model="item.ownFields[field.key]"
/>
<USelectMenu
v-else-if="field.type === 'select'"
:options="field.options"
v-model="item.ownFields[field.key]"
/>
</UFormGroup>
</div>-->
</UFormGroup> </UFormGroup>
</UForm> </UForm>
</UDashboardPanelContent> </UDashboardPanelContent>

View File

@@ -69,12 +69,6 @@ const profileStore = useProfileStore()
const tempStore = useTempStore() const tempStore = useTempStore()
const dataType = dataStore.dataTypes[type] const dataType = dataStore.dataTypes[type]
const canCreate = computed(() => {
if (type === "members") {
return has("members-create") || has("customers-create")
}
return has(`${type}-create`)
})
const selectedColumns = ref(tempStore.columns[type] ? tempStore.columns[type] : dataType.templateColumns.filter(i => !i.disabledInTable)) const selectedColumns = ref(tempStore.columns[type] ? tempStore.columns[type] : dataType.templateColumns.filter(i => !i.disabledInTable))
const columns = computed(() => dataType.templateColumns.filter((column) => !column.disabledInTable && selectedColumns.value.find(i => i.key === column.key))) const columns = computed(() => dataType.templateColumns.filter((column) => !column.disabledInTable && selectedColumns.value.find(i => i.key === column.key)))
@@ -144,7 +138,7 @@ const filteredRows = computed(() => {
/> />
<UButton <UButton
v-if="platform !== 'mobile' && canCreate/*&& useRole().checkRight(`${type}-create`)*/" v-if="platform !== 'mobile' && has(`${type}-create`)/*&& useRole().checkRight(`${type}-create`)*/"
@click="router.push(`/standardEntity/${type}/create`)" @click="router.push(`/standardEntity/${type}/create`)"
class="ml-3" class="ml-3"
>+ {{dataType.labelSingle}}</UButton> >+ {{dataType.labelSingle}}</UButton>

Some files were not shown because too many files have changed in this diff Show More