Compare commits
50 Commits
7125d15b3f
...
dev
| Author | SHA1 | Date | |
|---|---|---|---|
| 52c182cb5f | |||
| 9cef3964e9 | |||
| cf0fb724a2 | |||
| bbb893dd6c | |||
| 724f152d70 | |||
| 27be8241bf | |||
| d27e437ba6 | |||
| f5253b29f4 | |||
| 0141a243ce | |||
| a0e1b8c0eb | |||
| 45fb45845a | |||
| 409db82368 | |||
| 30d761f899 | |||
| 70636f6ac5 | |||
| 59392a723c | |||
| c782492ab5 | |||
| 844af30b18 | |||
| 6fded3993a | |||
| f26d6bd4f3 | |||
| 2621cc0d8d | |||
| a8238dc9ba | |||
| 49d35f080d | |||
| 189a52b3cd | |||
| 3f8ce5daf7 | |||
| 087ba1126e | |||
| db4e9612a0 | |||
| cb4917c536 | |||
| 9f32eb5439 | |||
| f596b46364 | |||
| 117da523d2 | |||
| c2901dc0a9 | |||
| 8c2a8a7998 | |||
| 1dc74947f4 | |||
| f63e793c88 | |||
| 29a84b899d | |||
| be706a70f8 | |||
| 474b3e762c | |||
| f793d4cce6 | |||
| c3f46cd184 | |||
| 6bf336356d | |||
| 55699da42c | |||
| 053f184a33 | |||
| 6541cb2adf | |||
| 7dca84947e | |||
| 45fd6fda08 | |||
| 31e80fb386 | |||
| 7ea28cc6c0 | |||
| c0faa398b8 | |||
| 19be1f0d03 | |||
| c43d3225e3 |
3
backend/.secretlintrc.json
Normal file
3
backend/.secretlintrc.json
Normal file
@@ -0,0 +1,3 @@
|
||||
{
|
||||
"rules": []
|
||||
}
|
||||
@@ -2,11 +2,12 @@
|
||||
import { drizzle } from "drizzle-orm/node-postgres";
|
||||
import { Pool } from "pg";
|
||||
import * as schema from "./schema";
|
||||
import {secrets} from "../src/utils/secrets";
|
||||
|
||||
console.log("[DB INIT] 1. Suche Connection String...");
|
||||
|
||||
// Checken woher die URL kommt
|
||||
let connectionString = process.env.DATABASE_URL;
|
||||
let connectionString = process.env.DATABASE_URL || secrets.DATABASE_URL;
|
||||
if (connectionString) {
|
||||
console.log("[DB INIT] -> Gefunden in process.env.DATABASE_URL");
|
||||
} else {
|
||||
|
||||
2
backend/db/migrations/0003_woozy_adam_destine.sql
Normal file
2
backend/db/migrations/0003_woozy_adam_destine.sql
Normal file
@@ -0,0 +1,2 @@
|
||||
-- No-op migration: Datei war im Journal referenziert, aber fehlte im Repository.
|
||||
SELECT 1;
|
||||
2
backend/db/migrations/0004_stormy_onslaught.sql
Normal file
2
backend/db/migrations/0004_stormy_onslaught.sql
Normal file
@@ -0,0 +1,2 @@
|
||||
-- No-op migration: Datei war im Journal referenziert, aber fehlte im Repository.
|
||||
SELECT 1;
|
||||
123
backend/db/migrations/0005_green_shinobi_shaw.sql
Normal file
123
backend/db/migrations/0005_green_shinobi_shaw.sql
Normal file
@@ -0,0 +1,123 @@
|
||||
CREATE TABLE "m2m_api_keys" (
|
||||
"id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
|
||||
"created_at" timestamp with time zone DEFAULT now() NOT NULL,
|
||||
"updated_at" timestamp with time zone DEFAULT now() NOT NULL,
|
||||
"tenant_id" bigint NOT NULL,
|
||||
"user_id" uuid NOT NULL,
|
||||
"created_by" uuid,
|
||||
"name" text NOT NULL,
|
||||
"key_prefix" text NOT NULL,
|
||||
"key_hash" text NOT NULL,
|
||||
"active" boolean DEFAULT true NOT NULL,
|
||||
"last_used_at" timestamp with time zone,
|
||||
"expires_at" timestamp with time zone,
|
||||
CONSTRAINT "m2m_api_keys_key_hash_unique" UNIQUE("key_hash")
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE TABLE "staff_time_events" (
|
||||
"id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
|
||||
"tenant_id" bigint NOT NULL,
|
||||
"user_id" uuid NOT NULL,
|
||||
"actor_type" text NOT NULL,
|
||||
"actor_user_id" uuid,
|
||||
"event_time" timestamp with time zone NOT NULL,
|
||||
"event_type" text NOT NULL,
|
||||
"source" text NOT NULL,
|
||||
"invalidates_event_id" uuid,
|
||||
"related_event_id" uuid,
|
||||
"metadata" jsonb,
|
||||
"created_at" timestamp with time zone DEFAULT now() NOT NULL,
|
||||
CONSTRAINT "time_events_actor_user_check" CHECK (
|
||||
(actor_type = 'system' AND actor_user_id IS NULL)
|
||||
OR
|
||||
(actor_type = 'user' AND actor_user_id IS NOT NULL)
|
||||
)
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE TABLE "serialtypes" (
|
||||
"id" bigint PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY (sequence name "serialtypes_id_seq" INCREMENT BY 1 MINVALUE 1 MAXVALUE 9223372036854775807 START WITH 1 CACHE 1),
|
||||
"created_at" timestamp with time zone DEFAULT now() NOT NULL,
|
||||
"name" text NOT NULL,
|
||||
"intervall" text,
|
||||
"icon" text,
|
||||
"tenant" bigint NOT NULL,
|
||||
"archived" boolean DEFAULT false NOT NULL,
|
||||
"updated_at" timestamp with time zone,
|
||||
"updated_by" uuid
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE TABLE "serial_executions" (
|
||||
"id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
|
||||
"tenant" bigint NOT NULL,
|
||||
"execution_date" timestamp NOT NULL,
|
||||
"status" text DEFAULT 'draft',
|
||||
"created_by" text,
|
||||
"created_at" timestamp DEFAULT now(),
|
||||
"summary" text
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE TABLE "public_links" (
|
||||
"id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
|
||||
"token" text NOT NULL,
|
||||
"tenant" integer NOT NULL,
|
||||
"default_profile" uuid,
|
||||
"is_protected" boolean DEFAULT false NOT NULL,
|
||||
"pin_hash" text,
|
||||
"config" jsonb DEFAULT '{}'::jsonb,
|
||||
"name" text NOT NULL,
|
||||
"description" text,
|
||||
"active" boolean DEFAULT true NOT NULL,
|
||||
"created_at" timestamp DEFAULT now(),
|
||||
"updated_at" timestamp DEFAULT now(),
|
||||
CONSTRAINT "public_links_token_unique" UNIQUE("token")
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE TABLE "wiki_pages" (
|
||||
"id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
|
||||
"tenant_id" bigint NOT NULL,
|
||||
"parent_id" uuid,
|
||||
"title" text NOT NULL,
|
||||
"content" jsonb,
|
||||
"is_folder" boolean DEFAULT false NOT NULL,
|
||||
"sort_order" integer DEFAULT 0 NOT NULL,
|
||||
"entity_type" text,
|
||||
"entity_id" bigint,
|
||||
"entity_uuid" uuid,
|
||||
"created_at" timestamp with time zone DEFAULT now() NOT NULL,
|
||||
"updated_at" timestamp with time zone,
|
||||
"created_by" uuid,
|
||||
"updated_by" uuid
|
||||
);
|
||||
--> statement-breakpoint
|
||||
ALTER TABLE "time_events" DISABLE ROW LEVEL SECURITY;--> statement-breakpoint
|
||||
DROP TABLE "time_events" CASCADE;--> statement-breakpoint
|
||||
ALTER TABLE "projects" ALTER COLUMN "active_phase" SET DEFAULT 'Erstkontakt';--> statement-breakpoint
|
||||
ALTER TABLE "createddocuments" ADD COLUMN "serialexecution" uuid;--> statement-breakpoint
|
||||
ALTER TABLE "devices" ADD COLUMN "last_seen" timestamp with time zone;--> statement-breakpoint
|
||||
ALTER TABLE "devices" ADD COLUMN "last_debug_info" jsonb;--> statement-breakpoint
|
||||
ALTER TABLE "files" ADD COLUMN "size" bigint;--> statement-breakpoint
|
||||
ALTER TABLE "m2m_api_keys" ADD CONSTRAINT "m2m_api_keys_tenant_id_tenants_id_fk" FOREIGN KEY ("tenant_id") REFERENCES "public"."tenants"("id") ON DELETE cascade ON UPDATE cascade;--> statement-breakpoint
|
||||
ALTER TABLE "m2m_api_keys" ADD CONSTRAINT "m2m_api_keys_user_id_auth_users_id_fk" FOREIGN KEY ("user_id") REFERENCES "public"."auth_users"("id") ON DELETE cascade ON UPDATE cascade;--> statement-breakpoint
|
||||
ALTER TABLE "m2m_api_keys" ADD CONSTRAINT "m2m_api_keys_created_by_auth_users_id_fk" FOREIGN KEY ("created_by") REFERENCES "public"."auth_users"("id") ON DELETE set null ON UPDATE cascade;--> statement-breakpoint
|
||||
ALTER TABLE "staff_time_events" ADD CONSTRAINT "staff_time_events_tenant_id_tenants_id_fk" FOREIGN KEY ("tenant_id") REFERENCES "public"."tenants"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "staff_time_events" ADD CONSTRAINT "staff_time_events_user_id_auth_users_id_fk" FOREIGN KEY ("user_id") REFERENCES "public"."auth_users"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "staff_time_events" ADD CONSTRAINT "staff_time_events_actor_user_id_auth_users_id_fk" FOREIGN KEY ("actor_user_id") REFERENCES "public"."auth_users"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "staff_time_events" ADD CONSTRAINT "staff_time_events_invalidates_event_id_staff_time_events_id_fk" FOREIGN KEY ("invalidates_event_id") REFERENCES "public"."staff_time_events"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "staff_time_events" ADD CONSTRAINT "staff_time_events_related_event_id_staff_time_events_id_fk" FOREIGN KEY ("related_event_id") REFERENCES "public"."staff_time_events"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "serialtypes" ADD CONSTRAINT "serialtypes_tenant_tenants_id_fk" FOREIGN KEY ("tenant") REFERENCES "public"."tenants"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "serialtypes" ADD CONSTRAINT "serialtypes_updated_by_auth_users_id_fk" FOREIGN KEY ("updated_by") REFERENCES "public"."auth_users"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "serial_executions" ADD CONSTRAINT "serial_executions_tenant_tenants_id_fk" FOREIGN KEY ("tenant") REFERENCES "public"."tenants"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "public_links" ADD CONSTRAINT "public_links_tenant_tenants_id_fk" FOREIGN KEY ("tenant") REFERENCES "public"."tenants"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "public_links" ADD CONSTRAINT "public_links_default_profile_auth_profiles_id_fk" FOREIGN KEY ("default_profile") REFERENCES "public"."auth_profiles"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "wiki_pages" ADD CONSTRAINT "wiki_pages_tenant_id_tenants_id_fk" FOREIGN KEY ("tenant_id") REFERENCES "public"."tenants"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "wiki_pages" ADD CONSTRAINT "wiki_pages_parent_id_wiki_pages_id_fk" FOREIGN KEY ("parent_id") REFERENCES "public"."wiki_pages"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "wiki_pages" ADD CONSTRAINT "wiki_pages_created_by_auth_users_id_fk" FOREIGN KEY ("created_by") REFERENCES "public"."auth_users"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "wiki_pages" ADD CONSTRAINT "wiki_pages_updated_by_auth_users_id_fk" FOREIGN KEY ("updated_by") REFERENCES "public"."auth_users"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
|
||||
CREATE INDEX "idx_time_events_tenant_user_time" ON "staff_time_events" USING btree ("tenant_id","user_id","event_time");--> statement-breakpoint
|
||||
CREATE INDEX "idx_time_events_created_at" ON "staff_time_events" USING btree ("created_at");--> statement-breakpoint
|
||||
CREATE INDEX "idx_time_events_invalidates" ON "staff_time_events" USING btree ("invalidates_event_id");--> statement-breakpoint
|
||||
CREATE INDEX "wiki_pages_tenant_idx" ON "wiki_pages" USING btree ("tenant_id");--> statement-breakpoint
|
||||
CREATE INDEX "wiki_pages_parent_idx" ON "wiki_pages" USING btree ("parent_id");--> statement-breakpoint
|
||||
CREATE INDEX "wiki_pages_entity_int_idx" ON "wiki_pages" USING btree ("tenant_id","entity_type","entity_id");--> statement-breakpoint
|
||||
CREATE INDEX "wiki_pages_entity_uuid_idx" ON "wiki_pages" USING btree ("tenant_id","entity_type","entity_uuid");--> statement-breakpoint
|
||||
ALTER TABLE "createddocuments" ADD CONSTRAINT "createddocuments_serialexecution_serial_executions_id_fk" FOREIGN KEY ("serialexecution") REFERENCES "public"."serial_executions"("id") ON DELETE no action ON UPDATE no action;
|
||||
1
backend/db/migrations/0006_nifty_price_lock.sql
Normal file
1
backend/db/migrations/0006_nifty_price_lock.sql
Normal file
@@ -0,0 +1 @@
|
||||
ALTER TABLE "services" ADD COLUMN "priceUpdateLocked" boolean DEFAULT false NOT NULL;
|
||||
1
backend/db/migrations/0007_bright_default_tax_type.sql
Normal file
1
backend/db/migrations/0007_bright_default_tax_type.sql
Normal file
@@ -0,0 +1 @@
|
||||
ALTER TABLE "customers" ADD COLUMN "customTaxType" text;
|
||||
16
backend/db/migrations/0008_quick_contracttypes.sql
Normal file
16
backend/db/migrations/0008_quick_contracttypes.sql
Normal file
@@ -0,0 +1,16 @@
|
||||
CREATE TABLE "contracttypes" (
|
||||
"id" bigint PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY (sequence name "contracttypes_id_seq" INCREMENT BY 1 MINVALUE 1 MAXVALUE 9223372036854775807 START WITH 1 CACHE 1),
|
||||
"created_at" timestamp with time zone DEFAULT now() NOT NULL,
|
||||
"tenant" bigint NOT NULL,
|
||||
"name" text NOT NULL,
|
||||
"description" text,
|
||||
"paymentType" text,
|
||||
"recurring" boolean DEFAULT false NOT NULL,
|
||||
"archived" boolean DEFAULT false NOT NULL,
|
||||
"updated_at" timestamp with time zone,
|
||||
"updated_by" uuid
|
||||
);
|
||||
--> statement-breakpoint
|
||||
ALTER TABLE "contracttypes" ADD CONSTRAINT "contracttypes_tenant_tenants_id_fk" FOREIGN KEY ("tenant") REFERENCES "public"."tenants"("id") ON DELETE no action ON UPDATE no action;
|
||||
--> statement-breakpoint
|
||||
ALTER TABLE "contracttypes" ADD CONSTRAINT "contracttypes_updated_by_auth_users_id_fk" FOREIGN KEY ("updated_by") REFERENCES "public"."auth_users"("id") ON DELETE no action ON UPDATE no action;
|
||||
@@ -0,0 +1,3 @@
|
||||
ALTER TABLE "contracts" ADD COLUMN "contracttype" bigint;
|
||||
--> statement-breakpoint
|
||||
ALTER TABLE "contracts" ADD CONSTRAINT "contracts_contracttype_contracttypes_id_fk" FOREIGN KEY ("contracttype") REFERENCES "public"."contracttypes"("id") ON DELETE no action ON UPDATE no action;
|
||||
3
backend/db/migrations/0010_sudden_billing_interval.sql
Normal file
3
backend/db/migrations/0010_sudden_billing_interval.sql
Normal file
@@ -0,0 +1,3 @@
|
||||
ALTER TABLE "contracttypes" ADD COLUMN "billingInterval" text;
|
||||
--> statement-breakpoint
|
||||
ALTER TABLE "contracts" ADD COLUMN "billingInterval" text;
|
||||
16
backend/db/migrations/0011_mighty_member_bankaccounts.sql
Normal file
16
backend/db/migrations/0011_mighty_member_bankaccounts.sql
Normal file
@@ -0,0 +1,16 @@
|
||||
CREATE TABLE "entitybankaccounts" (
|
||||
"id" bigint PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY (sequence name "entitybankaccounts_id_seq" INCREMENT BY 1 MINVALUE 1 MAXVALUE 9223372036854775807 START WITH 1 CACHE 1),
|
||||
"created_at" timestamp with time zone DEFAULT now() NOT NULL,
|
||||
"tenant" bigint NOT NULL,
|
||||
"iban_encrypted" jsonb NOT NULL,
|
||||
"bic_encrypted" jsonb NOT NULL,
|
||||
"bank_name_encrypted" jsonb NOT NULL,
|
||||
"description" text,
|
||||
"updated_at" timestamp with time zone,
|
||||
"updated_by" uuid,
|
||||
"archived" boolean DEFAULT false NOT NULL
|
||||
);
|
||||
--> statement-breakpoint
|
||||
ALTER TABLE "entitybankaccounts" ADD CONSTRAINT "entitybankaccounts_tenant_tenants_id_fk" FOREIGN KEY ("tenant") REFERENCES "public"."tenants"("id") ON DELETE no action ON UPDATE no action;
|
||||
--> statement-breakpoint
|
||||
ALTER TABLE "entitybankaccounts" ADD CONSTRAINT "entitybankaccounts_updated_by_auth_users_id_fk" FOREIGN KEY ("updated_by") REFERENCES "public"."auth_users"("id") ON DELETE no action ON UPDATE no action;
|
||||
73
backend/db/migrations/0012_shiny_customer_inventory.sql
Normal file
73
backend/db/migrations/0012_shiny_customer_inventory.sql
Normal file
@@ -0,0 +1,73 @@
|
||||
CREATE TABLE "customerspaces" (
|
||||
"id" bigint PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY (sequence name "customerspaces_id_seq" INCREMENT BY 1 MINVALUE 1 MAXVALUE 9223372036854775807 START WITH 1 CACHE 1),
|
||||
"created_at" timestamp with time zone DEFAULT now() NOT NULL,
|
||||
"name" text NOT NULL,
|
||||
"type" text NOT NULL,
|
||||
"tenant" bigint NOT NULL,
|
||||
"customer" bigint NOT NULL,
|
||||
"spaceNumber" text NOT NULL,
|
||||
"parentSpace" bigint,
|
||||
"infoData" jsonb DEFAULT '{"zip":"","city":"","streetNumber":""}'::jsonb NOT NULL,
|
||||
"description" text,
|
||||
"archived" boolean DEFAULT false NOT NULL,
|
||||
"updated_at" timestamp with time zone,
|
||||
"updated_by" uuid
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE TABLE "customerinventoryitems" (
|
||||
"id" bigint PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY (sequence name "customerinventoryitems_id_seq" INCREMENT BY 1 MINVALUE 1 MAXVALUE 9223372036854775807 START WITH 1 CACHE 1),
|
||||
"created_at" timestamp with time zone DEFAULT now() NOT NULL,
|
||||
"name" text NOT NULL,
|
||||
"description" text,
|
||||
"tenant" bigint NOT NULL,
|
||||
"customer" bigint NOT NULL,
|
||||
"customerspace" bigint,
|
||||
"customerInventoryId" text NOT NULL,
|
||||
"serialNumber" text,
|
||||
"quantity" bigint DEFAULT 0 NOT NULL,
|
||||
"manufacturer" text,
|
||||
"manufacturerNumber" text,
|
||||
"purchaseDate" date,
|
||||
"purchasePrice" double precision DEFAULT 0,
|
||||
"currentValue" double precision,
|
||||
"product" bigint,
|
||||
"archived" boolean DEFAULT false NOT NULL,
|
||||
"updated_at" timestamp with time zone,
|
||||
"updated_by" uuid
|
||||
);
|
||||
--> statement-breakpoint
|
||||
ALTER TABLE "customerspaces" ADD CONSTRAINT "customerspaces_tenant_tenants_id_fk" FOREIGN KEY ("tenant") REFERENCES "public"."tenants"("id") ON DELETE no action ON UPDATE no action;
|
||||
--> statement-breakpoint
|
||||
ALTER TABLE "customerspaces" ADD CONSTRAINT "customerspaces_customer_customers_id_fk" FOREIGN KEY ("customer") REFERENCES "public"."customers"("id") ON DELETE no action ON UPDATE no action;
|
||||
--> statement-breakpoint
|
||||
ALTER TABLE "customerspaces" ADD CONSTRAINT "customerspaces_parentSpace_customerspaces_id_fk" FOREIGN KEY ("parentSpace") REFERENCES "public"."customerspaces"("id") ON DELETE no action ON UPDATE no action;
|
||||
--> statement-breakpoint
|
||||
ALTER TABLE "customerspaces" ADD CONSTRAINT "customerspaces_updated_by_auth_users_id_fk" FOREIGN KEY ("updated_by") REFERENCES "public"."auth_users"("id") ON DELETE no action ON UPDATE no action;
|
||||
--> statement-breakpoint
|
||||
ALTER TABLE "customerinventoryitems" ADD CONSTRAINT "customerinventoryitems_tenant_tenants_id_fk" FOREIGN KEY ("tenant") REFERENCES "public"."tenants"("id") ON DELETE no action ON UPDATE no action;
|
||||
--> statement-breakpoint
|
||||
ALTER TABLE "customerinventoryitems" ADD CONSTRAINT "customerinventoryitems_customer_customers_id_fk" FOREIGN KEY ("customer") REFERENCES "public"."customers"("id") ON DELETE no action ON UPDATE no action;
|
||||
--> statement-breakpoint
|
||||
ALTER TABLE "customerinventoryitems" ADD CONSTRAINT "customerinventoryitems_customerspace_customerspaces_id_fk" FOREIGN KEY ("customerspace") REFERENCES "public"."customerspaces"("id") ON DELETE no action ON UPDATE no action;
|
||||
--> statement-breakpoint
|
||||
ALTER TABLE "customerinventoryitems" ADD CONSTRAINT "customerinventoryitems_product_products_id_fk" FOREIGN KEY ("product") REFERENCES "public"."products"("id") ON DELETE no action ON UPDATE no action;
|
||||
--> statement-breakpoint
|
||||
ALTER TABLE "customerinventoryitems" ADD CONSTRAINT "customerinventoryitems_updated_by_auth_users_id_fk" FOREIGN KEY ("updated_by") REFERENCES "public"."auth_users"("id") ON DELETE no action ON UPDATE no action;
|
||||
--> statement-breakpoint
|
||||
CREATE UNIQUE INDEX "customerinventoryitems_tenant_customerInventoryId_idx" ON "customerinventoryitems" USING btree ("tenant","customerInventoryId");
|
||||
--> statement-breakpoint
|
||||
ALTER TABLE "historyitems" ADD COLUMN "customerspace" bigint;
|
||||
--> statement-breakpoint
|
||||
ALTER TABLE "historyitems" ADD COLUMN "customerinventoryitem" bigint;
|
||||
--> statement-breakpoint
|
||||
ALTER TABLE "historyitems" ADD CONSTRAINT "historyitems_customerspace_customerspaces_id_fk" FOREIGN KEY ("customerspace") REFERENCES "public"."customerspaces"("id") ON DELETE no action ON UPDATE no action;
|
||||
--> statement-breakpoint
|
||||
ALTER TABLE "historyitems" ADD CONSTRAINT "historyitems_customerinventoryitem_customerinventoryitems_id_fk" FOREIGN KEY ("customerinventoryitem") REFERENCES "public"."customerinventoryitems"("id") ON DELETE no action ON UPDATE no action;
|
||||
--> statement-breakpoint
|
||||
ALTER TABLE "tenants" ALTER COLUMN "numberRanges" SET DEFAULT '{"vendors":{"prefix":"","suffix":"","nextNumber":10000},"customers":{"prefix":"","suffix":"","nextNumber":10000},"products":{"prefix":"AT-","suffix":"","nextNumber":1000},"quotes":{"prefix":"AN-","suffix":"","nextNumber":1000},"confirmationOrders":{"prefix":"AB-","suffix":"","nextNumber":1000},"invoices":{"prefix":"RE-","suffix":"","nextNumber":1000},"spaces":{"prefix":"LP-","suffix":"","nextNumber":1000},"customerspaces":{"prefix":"KLP-","suffix":"","nextNumber":1000},"inventoryitems":{"prefix":"IA-","suffix":"","nextNumber":1000},"customerinventoryitems":{"prefix":"KIA-","suffix":"","nextNumber":1000},"projects":{"prefix":"PRJ-","suffix":"","nextNumber":1000},"costcentres":{"prefix":"KST-","suffix":"","nextNumber":1000}}'::jsonb;
|
||||
--> statement-breakpoint
|
||||
UPDATE "tenants"
|
||||
SET "numberRanges" = COALESCE("numberRanges", '{}'::jsonb) || jsonb_build_object(
|
||||
'customerspaces', COALESCE("numberRanges"->'customerspaces', '{"prefix":"KLP-","suffix":"","nextNumber":1000}'::jsonb),
|
||||
'customerinventoryitems', COALESCE("numberRanges"->'customerinventoryitems', '{"prefix":"KIA-","suffix":"","nextNumber":1000}'::jsonb)
|
||||
);
|
||||
@@ -0,0 +1,3 @@
|
||||
ALTER TABLE "customerinventoryitems" ADD COLUMN "vendor" bigint;
|
||||
--> statement-breakpoint
|
||||
ALTER TABLE "customerinventoryitems" ADD CONSTRAINT "customerinventoryitems_vendor_vendors_id_fk" FOREIGN KEY ("vendor") REFERENCES "public"."vendors"("id") ON DELETE no action ON UPDATE no action;
|
||||
20
backend/db/migrations/0014_smart_memberrelations.sql
Normal file
20
backend/db/migrations/0014_smart_memberrelations.sql
Normal file
@@ -0,0 +1,20 @@
|
||||
CREATE TABLE "memberrelations" (
|
||||
"id" bigint PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY (sequence name "memberrelations_id_seq" INCREMENT BY 1 MINVALUE 1 MAXVALUE 9223372036854775807 START WITH 1 CACHE 1),
|
||||
"created_at" timestamp with time zone DEFAULT now() NOT NULL,
|
||||
"tenant" bigint NOT NULL,
|
||||
"type" text NOT NULL,
|
||||
"billingInterval" text NOT NULL,
|
||||
"billingAmount" double precision DEFAULT 0 NOT NULL,
|
||||
"archived" boolean DEFAULT false NOT NULL,
|
||||
"updated_at" timestamp with time zone,
|
||||
"updated_by" uuid
|
||||
);
|
||||
--> statement-breakpoint
|
||||
ALTER TABLE "customers" ADD COLUMN "memberrelation" bigint;
|
||||
--> statement-breakpoint
|
||||
ALTER TABLE "memberrelations" ADD CONSTRAINT "memberrelations_tenant_tenants_id_fk" FOREIGN KEY ("tenant") REFERENCES "public"."tenants"("id") ON DELETE no action ON UPDATE no action;
|
||||
--> statement-breakpoint
|
||||
ALTER TABLE "memberrelations" ADD CONSTRAINT "memberrelations_updated_by_auth_users_id_fk" FOREIGN KEY ("updated_by") REFERENCES "public"."auth_users"("id") ON DELETE no action ON UPDATE no action;
|
||||
--> statement-breakpoint
|
||||
ALTER TABLE "customers" ADD CONSTRAINT "customers_memberrelation_memberrelations_id_fk" FOREIGN KEY ("memberrelation") REFERENCES "public"."memberrelations"("id") ON DELETE no action ON UPDATE no action;
|
||||
|
||||
@@ -0,0 +1,4 @@
|
||||
ALTER TABLE "historyitems" ADD COLUMN "memberrelation" bigint;
|
||||
--> statement-breakpoint
|
||||
ALTER TABLE "historyitems" ADD CONSTRAINT "historyitems_memberrelation_memberrelations_id_fk" FOREIGN KEY ("memberrelation") REFERENCES "public"."memberrelations"("id") ON DELETE no action ON UPDATE no action;
|
||||
|
||||
@@ -0,0 +1,33 @@
|
||||
ALTER TABLE "customers" ADD COLUMN IF NOT EXISTS "memberrelation" bigint;
|
||||
|
||||
DO $$
|
||||
BEGIN
|
||||
IF NOT EXISTS (
|
||||
SELECT 1
|
||||
FROM pg_constraint
|
||||
WHERE conname = 'customers_memberrelation_memberrelations_id_fk'
|
||||
) THEN
|
||||
ALTER TABLE "customers"
|
||||
ADD CONSTRAINT "customers_memberrelation_memberrelations_id_fk"
|
||||
FOREIGN KEY ("memberrelation")
|
||||
REFERENCES "public"."memberrelations"("id")
|
||||
ON DELETE no action
|
||||
ON UPDATE no action;
|
||||
END IF;
|
||||
END $$;
|
||||
|
||||
UPDATE "customers"
|
||||
SET "memberrelation" = ("infoData"->>'memberrelation')::bigint
|
||||
WHERE
|
||||
"memberrelation" IS NULL
|
||||
AND "type" = 'Mitglied'
|
||||
AND jsonb_typeof(COALESCE("infoData", '{}'::jsonb)) = 'object'
|
||||
AND COALESCE("infoData", '{}'::jsonb) ? 'memberrelation'
|
||||
AND ("infoData"->>'memberrelation') ~ '^[0-9]+$';
|
||||
|
||||
UPDATE "customers"
|
||||
SET "infoData" = COALESCE("infoData", '{}'::jsonb) - 'memberrelation'
|
||||
WHERE
|
||||
"type" = 'Mitglied'
|
||||
AND jsonb_typeof(COALESCE("infoData", '{}'::jsonb)) = 'object'
|
||||
AND COALESCE("infoData", '{}'::jsonb) ? 'memberrelation';
|
||||
108
backend/db/migrations/0017_slow_the_hood.sql
Normal file
108
backend/db/migrations/0017_slow_the_hood.sql
Normal file
@@ -0,0 +1,108 @@
|
||||
CREATE TABLE "contracttypes" (
|
||||
"id" bigint PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY (sequence name "contracttypes_id_seq" INCREMENT BY 1 MINVALUE 1 MAXVALUE 9223372036854775807 START WITH 1 CACHE 1),
|
||||
"created_at" timestamp with time zone DEFAULT now() NOT NULL,
|
||||
"tenant" bigint NOT NULL,
|
||||
"name" text NOT NULL,
|
||||
"description" text,
|
||||
"paymentType" text,
|
||||
"recurring" boolean DEFAULT false NOT NULL,
|
||||
"billingInterval" text,
|
||||
"archived" boolean DEFAULT false NOT NULL,
|
||||
"updated_at" timestamp with time zone,
|
||||
"updated_by" uuid
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE TABLE "customerinventoryitems" (
|
||||
"id" bigint PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY (sequence name "customerinventoryitems_id_seq" INCREMENT BY 1 MINVALUE 1 MAXVALUE 9223372036854775807 START WITH 1 CACHE 1),
|
||||
"created_at" timestamp with time zone DEFAULT now() NOT NULL,
|
||||
"name" text NOT NULL,
|
||||
"description" text,
|
||||
"tenant" bigint NOT NULL,
|
||||
"customer" bigint NOT NULL,
|
||||
"customerspace" bigint,
|
||||
"customerInventoryId" text NOT NULL,
|
||||
"serialNumber" text,
|
||||
"quantity" bigint DEFAULT 0 NOT NULL,
|
||||
"manufacturer" text,
|
||||
"manufacturerNumber" text,
|
||||
"purchaseDate" date,
|
||||
"purchasePrice" double precision DEFAULT 0,
|
||||
"currentValue" double precision,
|
||||
"product" bigint,
|
||||
"vendor" bigint,
|
||||
"archived" boolean DEFAULT false NOT NULL,
|
||||
"updated_at" timestamp with time zone,
|
||||
"updated_by" uuid
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE TABLE "customerspaces" (
|
||||
"id" bigint PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY (sequence name "customerspaces_id_seq" INCREMENT BY 1 MINVALUE 1 MAXVALUE 9223372036854775807 START WITH 1 CACHE 1),
|
||||
"created_at" timestamp with time zone DEFAULT now() NOT NULL,
|
||||
"name" text NOT NULL,
|
||||
"type" text NOT NULL,
|
||||
"tenant" bigint NOT NULL,
|
||||
"customer" bigint NOT NULL,
|
||||
"spaceNumber" text NOT NULL,
|
||||
"parentSpace" bigint,
|
||||
"infoData" jsonb DEFAULT '{"zip":"","city":"","streetNumber":""}'::jsonb NOT NULL,
|
||||
"description" text,
|
||||
"archived" boolean DEFAULT false NOT NULL,
|
||||
"updated_at" timestamp with time zone,
|
||||
"updated_by" uuid
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE TABLE "entitybankaccounts" (
|
||||
"id" bigint PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY (sequence name "entitybankaccounts_id_seq" INCREMENT BY 1 MINVALUE 1 MAXVALUE 9223372036854775807 START WITH 1 CACHE 1),
|
||||
"created_at" timestamp with time zone DEFAULT now() NOT NULL,
|
||||
"tenant" bigint NOT NULL,
|
||||
"iban_encrypted" jsonb NOT NULL,
|
||||
"bic_encrypted" jsonb NOT NULL,
|
||||
"bank_name_encrypted" jsonb NOT NULL,
|
||||
"description" text,
|
||||
"updated_at" timestamp with time zone,
|
||||
"updated_by" uuid,
|
||||
"archived" boolean DEFAULT false NOT NULL
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE TABLE "memberrelations" (
|
||||
"id" bigint PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY (sequence name "memberrelations_id_seq" INCREMENT BY 1 MINVALUE 1 MAXVALUE 9223372036854775807 START WITH 1 CACHE 1),
|
||||
"created_at" timestamp with time zone DEFAULT now() NOT NULL,
|
||||
"tenant" bigint NOT NULL,
|
||||
"type" text NOT NULL,
|
||||
"billingInterval" text NOT NULL,
|
||||
"billingAmount" double precision DEFAULT 0 NOT NULL,
|
||||
"archived" boolean DEFAULT false NOT NULL,
|
||||
"updated_at" timestamp with time zone,
|
||||
"updated_by" uuid
|
||||
);
|
||||
--> statement-breakpoint
|
||||
ALTER TABLE "tenants" ALTER COLUMN "numberRanges" SET DEFAULT '{"vendors":{"prefix":"","suffix":"","nextNumber":10000},"customers":{"prefix":"","suffix":"","nextNumber":10000},"products":{"prefix":"AT-","suffix":"","nextNumber":1000},"quotes":{"prefix":"AN-","suffix":"","nextNumber":1000},"confirmationOrders":{"prefix":"AB-","suffix":"","nextNumber":1000},"invoices":{"prefix":"RE-","suffix":"","nextNumber":1000},"spaces":{"prefix":"LP-","suffix":"","nextNumber":1000},"customerspaces":{"prefix":"KLP-","suffix":"","nextNumber":1000},"inventoryitems":{"prefix":"IA-","suffix":"","nextNumber":1000},"customerinventoryitems":{"prefix":"KIA-","suffix":"","nextNumber":1000},"projects":{"prefix":"PRJ-","suffix":"","nextNumber":1000},"costcentres":{"prefix":"KST-","suffix":"","nextNumber":1000}}'::jsonb;--> statement-breakpoint
|
||||
ALTER TABLE "contracts" ADD COLUMN "contracttype" bigint;--> statement-breakpoint
|
||||
ALTER TABLE "contracts" ADD COLUMN "billingInterval" text;--> statement-breakpoint
|
||||
ALTER TABLE "customers" ADD COLUMN "customTaxType" text;--> statement-breakpoint
|
||||
ALTER TABLE "customers" ADD COLUMN "memberrelation" bigint;--> statement-breakpoint
|
||||
ALTER TABLE "historyitems" ADD COLUMN "customerspace" bigint;--> statement-breakpoint
|
||||
ALTER TABLE "historyitems" ADD COLUMN "customerinventoryitem" bigint;--> statement-breakpoint
|
||||
ALTER TABLE "historyitems" ADD COLUMN "memberrelation" bigint;--> statement-breakpoint
|
||||
ALTER TABLE "services" ADD COLUMN "priceUpdateLocked" boolean DEFAULT false NOT NULL;--> statement-breakpoint
|
||||
ALTER TABLE "contracttypes" ADD CONSTRAINT "contracttypes_tenant_tenants_id_fk" FOREIGN KEY ("tenant") REFERENCES "public"."tenants"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "contracttypes" ADD CONSTRAINT "contracttypes_updated_by_auth_users_id_fk" FOREIGN KEY ("updated_by") REFERENCES "public"."auth_users"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "customerinventoryitems" ADD CONSTRAINT "customerinventoryitems_tenant_tenants_id_fk" FOREIGN KEY ("tenant") REFERENCES "public"."tenants"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "customerinventoryitems" ADD CONSTRAINT "customerinventoryitems_customer_customers_id_fk" FOREIGN KEY ("customer") REFERENCES "public"."customers"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "customerinventoryitems" ADD CONSTRAINT "customerinventoryitems_customerspace_customerspaces_id_fk" FOREIGN KEY ("customerspace") REFERENCES "public"."customerspaces"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "customerinventoryitems" ADD CONSTRAINT "customerinventoryitems_product_products_id_fk" FOREIGN KEY ("product") REFERENCES "public"."products"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "customerinventoryitems" ADD CONSTRAINT "customerinventoryitems_vendor_vendors_id_fk" FOREIGN KEY ("vendor") REFERENCES "public"."vendors"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "customerinventoryitems" ADD CONSTRAINT "customerinventoryitems_updated_by_auth_users_id_fk" FOREIGN KEY ("updated_by") REFERENCES "public"."auth_users"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "customerspaces" ADD CONSTRAINT "customerspaces_tenant_tenants_id_fk" FOREIGN KEY ("tenant") REFERENCES "public"."tenants"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "customerspaces" ADD CONSTRAINT "customerspaces_customer_customers_id_fk" FOREIGN KEY ("customer") REFERENCES "public"."customers"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "customerspaces" ADD CONSTRAINT "customerspaces_parentSpace_customerspaces_id_fk" FOREIGN KEY ("parentSpace") REFERENCES "public"."customerspaces"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "customerspaces" ADD CONSTRAINT "customerspaces_updated_by_auth_users_id_fk" FOREIGN KEY ("updated_by") REFERENCES "public"."auth_users"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "entitybankaccounts" ADD CONSTRAINT "entitybankaccounts_tenant_tenants_id_fk" FOREIGN KEY ("tenant") REFERENCES "public"."tenants"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "entitybankaccounts" ADD CONSTRAINT "entitybankaccounts_updated_by_auth_users_id_fk" FOREIGN KEY ("updated_by") REFERENCES "public"."auth_users"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "memberrelations" ADD CONSTRAINT "memberrelations_tenant_tenants_id_fk" FOREIGN KEY ("tenant") REFERENCES "public"."tenants"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "memberrelations" ADD CONSTRAINT "memberrelations_updated_by_auth_users_id_fk" FOREIGN KEY ("updated_by") REFERENCES "public"."auth_users"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "contracts" ADD CONSTRAINT "contracts_contracttype_contracttypes_id_fk" FOREIGN KEY ("contracttype") REFERENCES "public"."contracttypes"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "customers" ADD CONSTRAINT "customers_memberrelation_memberrelations_id_fk" FOREIGN KEY ("memberrelation") REFERENCES "public"."memberrelations"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "historyitems" ADD CONSTRAINT "historyitems_customerspace_customerspaces_id_fk" FOREIGN KEY ("customerspace") REFERENCES "public"."customerspaces"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "historyitems" ADD CONSTRAINT "historyitems_customerinventoryitem_customerinventoryitems_id_fk" FOREIGN KEY ("customerinventoryitem") REFERENCES "public"."customerinventoryitems"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "historyitems" ADD CONSTRAINT "historyitems_memberrelation_memberrelations_id_fk" FOREIGN KEY ("memberrelation") REFERENCES "public"."memberrelations"("id") ON DELETE no action ON UPDATE no action;
|
||||
3
backend/db/migrations/0018_account_chart.sql
Normal file
3
backend/db/migrations/0018_account_chart.sql
Normal file
@@ -0,0 +1,3 @@
|
||||
ALTER TABLE "accounts" ADD COLUMN "accountChart" text DEFAULT 'skr03' NOT NULL;
|
||||
--> statement-breakpoint
|
||||
ALTER TABLE "tenants" ADD COLUMN "accountChart" text DEFAULT 'skr03' NOT NULL;
|
||||
@@ -0,0 +1,3 @@
|
||||
ALTER TABLE "createddocuments"
|
||||
ALTER COLUMN "customSurchargePercentage" TYPE double precision
|
||||
USING "customSurchargePercentage"::double precision;
|
||||
@@ -36,6 +36,104 @@
|
||||
"when": 1765716877146,
|
||||
"tag": "0004_stormy_onslaught",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 5,
|
||||
"version": "7",
|
||||
"when": 1771096926109,
|
||||
"tag": "0005_green_shinobi_shaw",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 6,
|
||||
"version": "7",
|
||||
"when": 1772000000000,
|
||||
"tag": "0006_nifty_price_lock",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 7,
|
||||
"version": "7",
|
||||
"when": 1772000100000,
|
||||
"tag": "0007_bright_default_tax_type",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 8,
|
||||
"version": "7",
|
||||
"when": 1773000000000,
|
||||
"tag": "0008_quick_contracttypes",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 9,
|
||||
"version": "7",
|
||||
"when": 1773000100000,
|
||||
"tag": "0009_heavy_contract_contracttype",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 10,
|
||||
"version": "7",
|
||||
"when": 1773000200000,
|
||||
"tag": "0010_sudden_billing_interval",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 11,
|
||||
"version": "7",
|
||||
"when": 1773000300000,
|
||||
"tag": "0011_mighty_member_bankaccounts",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 12,
|
||||
"version": "7",
|
||||
"when": 1773000400000,
|
||||
"tag": "0012_shiny_customer_inventory",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 13,
|
||||
"version": "7",
|
||||
"when": 1773000500000,
|
||||
"tag": "0013_brisk_customer_inventory_vendor",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 14,
|
||||
"version": "7",
|
||||
"when": 1773000600000,
|
||||
"tag": "0014_smart_memberrelations",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 15,
|
||||
"version": "7",
|
||||
"when": 1773000700000,
|
||||
"tag": "0015_wise_memberrelation_history",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 16,
|
||||
"version": "7",
|
||||
"when": 1773000800000,
|
||||
"tag": "0016_fix_memberrelation_column_usage",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 17,
|
||||
"version": "7",
|
||||
"when": 1771704862789,
|
||||
"tag": "0017_slow_the_hood",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 18,
|
||||
"version": "7",
|
||||
"when": 1773000900000,
|
||||
"tag": "0018_account_chart",
|
||||
"breakpoints": true
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -16,6 +16,7 @@ export const accounts = pgTable("accounts", {
|
||||
|
||||
number: text("number").notNull(),
|
||||
label: text("label").notNull(),
|
||||
accountChart: text("accountChart").notNull().default("skr03"),
|
||||
|
||||
description: text("description"),
|
||||
})
|
||||
|
||||
@@ -11,6 +11,7 @@ import {
|
||||
import { tenants } from "./tenants"
|
||||
import { customers } from "./customers"
|
||||
import { contacts } from "./contacts"
|
||||
import { contracttypes } from "./contracttypes"
|
||||
import { authUsers } from "./auth_users"
|
||||
|
||||
export const contracts = pgTable(
|
||||
@@ -48,6 +49,9 @@ export const contracts = pgTable(
|
||||
contact: bigint("contact", { mode: "number" }).references(
|
||||
() => contacts.id
|
||||
),
|
||||
contracttype: bigint("contracttype", { mode: "number" }).references(
|
||||
() => contracttypes.id
|
||||
),
|
||||
|
||||
bankingIban: text("bankingIban"),
|
||||
bankingBIC: text("bankingBIC"),
|
||||
@@ -57,6 +61,7 @@ export const contracts = pgTable(
|
||||
sepaDate: timestamp("sepaDate", { withTimezone: true }),
|
||||
|
||||
paymentType: text("paymentType"),
|
||||
billingInterval: text("billingInterval"),
|
||||
invoiceDispatch: text("invoiceDispatch"),
|
||||
|
||||
ownFields: jsonb("ownFields").notNull().default({}),
|
||||
|
||||
40
backend/db/schema/contracttypes.ts
Normal file
40
backend/db/schema/contracttypes.ts
Normal file
@@ -0,0 +1,40 @@
|
||||
import {
|
||||
pgTable,
|
||||
bigint,
|
||||
timestamp,
|
||||
text,
|
||||
boolean,
|
||||
uuid,
|
||||
} from "drizzle-orm/pg-core"
|
||||
|
||||
import { tenants } from "./tenants"
|
||||
import { authUsers } from "./auth_users"
|
||||
|
||||
export const contracttypes = pgTable("contracttypes", {
|
||||
id: bigint("id", { mode: "number" })
|
||||
.primaryKey()
|
||||
.generatedByDefaultAsIdentity(),
|
||||
|
||||
createdAt: timestamp("created_at", { withTimezone: true })
|
||||
.notNull()
|
||||
.defaultNow(),
|
||||
|
||||
tenant: bigint("tenant", { mode: "number" })
|
||||
.notNull()
|
||||
.references(() => tenants.id),
|
||||
|
||||
name: text("name").notNull(),
|
||||
description: text("description"),
|
||||
|
||||
paymentType: text("paymentType"),
|
||||
recurring: boolean("recurring").notNull().default(false),
|
||||
billingInterval: text("billingInterval"),
|
||||
|
||||
archived: boolean("archived").notNull().default(false),
|
||||
|
||||
updatedAt: timestamp("updated_at", { withTimezone: true }),
|
||||
updatedBy: uuid("updated_by").references(() => authUsers.id),
|
||||
})
|
||||
|
||||
export type ContractType = typeof contracttypes.$inferSelect
|
||||
export type NewContractType = typeof contracttypes.$inferInsert
|
||||
@@ -6,6 +6,7 @@ import {
|
||||
jsonb,
|
||||
boolean,
|
||||
smallint,
|
||||
doublePrecision,
|
||||
uuid,
|
||||
} from "drizzle-orm/pg-core"
|
||||
|
||||
@@ -96,7 +97,7 @@ export const createddocuments = pgTable("createddocuments", {
|
||||
|
||||
taxType: text("taxType"),
|
||||
|
||||
customSurchargePercentage: smallint("customSurchargePercentage")
|
||||
customSurchargePercentage: doublePrecision("customSurchargePercentage")
|
||||
.notNull()
|
||||
.default(0),
|
||||
|
||||
|
||||
66
backend/db/schema/customerinventoryitems.ts
Normal file
66
backend/db/schema/customerinventoryitems.ts
Normal file
@@ -0,0 +1,66 @@
|
||||
import {
|
||||
pgTable,
|
||||
bigint,
|
||||
timestamp,
|
||||
text,
|
||||
boolean,
|
||||
doublePrecision,
|
||||
uuid,
|
||||
date,
|
||||
} from "drizzle-orm/pg-core"
|
||||
|
||||
import { tenants } from "./tenants"
|
||||
import { customers } from "./customers"
|
||||
import { customerspaces } from "./customerspaces"
|
||||
import { products } from "./products"
|
||||
import { vendors } from "./vendors"
|
||||
import { authUsers } from "./auth_users"
|
||||
|
||||
export const customerinventoryitems = pgTable("customerinventoryitems", {
|
||||
id: bigint("id", { mode: "number" })
|
||||
.primaryKey()
|
||||
.generatedByDefaultAsIdentity(),
|
||||
|
||||
createdAt: timestamp("created_at", { withTimezone: true })
|
||||
.notNull()
|
||||
.defaultNow(),
|
||||
|
||||
name: text("name").notNull(),
|
||||
|
||||
description: text("description"),
|
||||
|
||||
tenant: bigint("tenant", { mode: "number" })
|
||||
.notNull()
|
||||
.references(() => tenants.id),
|
||||
|
||||
customer: bigint("customer", { mode: "number" })
|
||||
.notNull()
|
||||
.references(() => customers.id),
|
||||
|
||||
customerspace: bigint("customerspace", { mode: "number" }).references(
|
||||
() => customerspaces.id
|
||||
),
|
||||
|
||||
customerInventoryId: text("customerInventoryId").notNull(),
|
||||
serialNumber: text("serialNumber"),
|
||||
|
||||
quantity: bigint("quantity", { mode: "number" }).notNull().default(0),
|
||||
|
||||
manufacturer: text("manufacturer"),
|
||||
manufacturerNumber: text("manufacturerNumber"),
|
||||
|
||||
purchaseDate: date("purchaseDate"),
|
||||
purchasePrice: doublePrecision("purchasePrice").default(0),
|
||||
currentValue: doublePrecision("currentValue"),
|
||||
|
||||
product: bigint("product", { mode: "number" }).references(() => products.id),
|
||||
vendor: bigint("vendor", { mode: "number" }).references(() => vendors.id),
|
||||
|
||||
archived: boolean("archived").notNull().default(false),
|
||||
|
||||
updatedAt: timestamp("updated_at", { withTimezone: true }),
|
||||
updatedBy: uuid("updated_by").references(() => authUsers.id),
|
||||
})
|
||||
|
||||
export type CustomerInventoryItem = typeof customerinventoryitems.$inferSelect
|
||||
export type NewCustomerInventoryItem = typeof customerinventoryitems.$inferInsert
|
||||
@@ -10,6 +10,7 @@ import {
|
||||
} from "drizzle-orm/pg-core"
|
||||
import { tenants } from "./tenants"
|
||||
import { authUsers } from "./auth_users"
|
||||
import { memberrelations } from "./memberrelations"
|
||||
|
||||
export const customers = pgTable(
|
||||
"customers",
|
||||
@@ -62,6 +63,8 @@ export const customers = pgTable(
|
||||
updatedBy: uuid("updated_by").references(() => authUsers.id),
|
||||
|
||||
customPaymentType: text("custom_payment_type"), // ENUM payment_types separat?
|
||||
customTaxType: text("customTaxType"),
|
||||
memberrelation: bigint("memberrelation", { mode: "number" }).references(() => memberrelations.id),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
54
backend/db/schema/customerspaces.ts
Normal file
54
backend/db/schema/customerspaces.ts
Normal file
@@ -0,0 +1,54 @@
|
||||
import {
|
||||
pgTable,
|
||||
bigint,
|
||||
timestamp,
|
||||
text,
|
||||
boolean,
|
||||
jsonb,
|
||||
uuid,
|
||||
} from "drizzle-orm/pg-core"
|
||||
|
||||
import { tenants } from "./tenants"
|
||||
import { customers } from "./customers"
|
||||
import { authUsers } from "./auth_users"
|
||||
|
||||
export const customerspaces = pgTable("customerspaces", {
|
||||
id: bigint("id", { mode: "number" })
|
||||
.primaryKey()
|
||||
.generatedByDefaultAsIdentity(),
|
||||
|
||||
createdAt: timestamp("created_at", { withTimezone: true })
|
||||
.notNull()
|
||||
.defaultNow(),
|
||||
|
||||
name: text("name").notNull(),
|
||||
type: text("type").notNull(),
|
||||
|
||||
tenant: bigint("tenant", { mode: "number" })
|
||||
.notNull()
|
||||
.references(() => tenants.id),
|
||||
|
||||
customer: bigint("customer", { mode: "number" })
|
||||
.notNull()
|
||||
.references(() => customers.id),
|
||||
|
||||
space_number: text("spaceNumber").notNull(),
|
||||
|
||||
parentSpace: bigint("parentSpace", { mode: "number" }).references(
|
||||
() => customerspaces.id
|
||||
),
|
||||
|
||||
info_data: jsonb("infoData")
|
||||
.notNull()
|
||||
.default({ zip: "", city: "", streetNumber: "" }),
|
||||
|
||||
description: text("description"),
|
||||
|
||||
archived: boolean("archived").notNull().default(false),
|
||||
|
||||
updatedAt: timestamp("updated_at", { withTimezone: true }),
|
||||
updatedBy: uuid("updated_by").references(() => authUsers.id),
|
||||
})
|
||||
|
||||
export type CustomerSpace = typeof customerspaces.$inferSelect
|
||||
export type NewCustomerSpace = typeof customerspaces.$inferInsert
|
||||
39
backend/db/schema/entitybankaccounts.ts
Normal file
39
backend/db/schema/entitybankaccounts.ts
Normal file
@@ -0,0 +1,39 @@
|
||||
import {
|
||||
pgTable,
|
||||
bigint,
|
||||
timestamp,
|
||||
text,
|
||||
boolean,
|
||||
jsonb,
|
||||
uuid,
|
||||
} from "drizzle-orm/pg-core"
|
||||
import { tenants } from "./tenants"
|
||||
import { authUsers } from "./auth_users"
|
||||
|
||||
export const entitybankaccounts = pgTable("entitybankaccounts", {
|
||||
id: bigint("id", { mode: "number" })
|
||||
.primaryKey()
|
||||
.generatedByDefaultAsIdentity(),
|
||||
|
||||
createdAt: timestamp("created_at", { withTimezone: true })
|
||||
.notNull()
|
||||
.defaultNow(),
|
||||
|
||||
tenant: bigint("tenant", { mode: "number" })
|
||||
.notNull()
|
||||
.references(() => tenants.id),
|
||||
|
||||
ibanEncrypted: jsonb("iban_encrypted").notNull(),
|
||||
bicEncrypted: jsonb("bic_encrypted").notNull(),
|
||||
bankNameEncrypted: jsonb("bank_name_encrypted").notNull(),
|
||||
|
||||
description: text("description"),
|
||||
|
||||
updatedAt: timestamp("updated_at", { withTimezone: true }),
|
||||
updatedBy: uuid("updated_by").references(() => authUsers.id),
|
||||
|
||||
archived: boolean("archived").notNull().default(false),
|
||||
})
|
||||
|
||||
export type EntityBankAccount = typeof entitybankaccounts.$inferSelect
|
||||
export type NewEntityBankAccount = typeof entitybankaccounts.$inferInsert
|
||||
@@ -20,6 +20,8 @@ import { tasks } from "./tasks"
|
||||
import { vehicles } from "./vehicles"
|
||||
import { bankstatements } from "./bankstatements"
|
||||
import { spaces } from "./spaces"
|
||||
import { customerspaces } from "./customerspaces"
|
||||
import { customerinventoryitems } from "./customerinventoryitems"
|
||||
import { costcentres } from "./costcentres"
|
||||
import { ownaccounts } from "./ownaccounts"
|
||||
import { createddocuments } from "./createddocuments"
|
||||
@@ -32,6 +34,7 @@ import { events } from "./events"
|
||||
import { inventoryitemgroups } from "./inventoryitemgroups"
|
||||
import { authUsers } from "./auth_users"
|
||||
import {files} from "./files";
|
||||
import { memberrelations } from "./memberrelations";
|
||||
|
||||
export const historyitems = pgTable("historyitems", {
|
||||
id: bigint("id", { mode: "number" })
|
||||
@@ -99,6 +102,12 @@ export const historyitems = pgTable("historyitems", {
|
||||
|
||||
space: bigint("space", { mode: "number" }).references(() => spaces.id),
|
||||
|
||||
customerspace: bigint("customerspace", { mode: "number" }).references(() => customerspaces.id),
|
||||
|
||||
customerinventoryitem: bigint("customerinventoryitem", { mode: "number" }).references(() => customerinventoryitems.id),
|
||||
|
||||
memberrelation: bigint("memberrelation", { mode: "number" }).references(() => memberrelations.id),
|
||||
|
||||
config: jsonb("config"),
|
||||
|
||||
projecttype: bigint("projecttype", { mode: "number" }).references(
|
||||
|
||||
@@ -13,15 +13,19 @@ export * from "./checks"
|
||||
export * from "./citys"
|
||||
export * from "./contacts"
|
||||
export * from "./contracts"
|
||||
export * from "./contracttypes"
|
||||
export * from "./costcentres"
|
||||
export * from "./countrys"
|
||||
export * from "./createddocuments"
|
||||
export * from "./createdletters"
|
||||
export * from "./customers"
|
||||
export * from "./customerspaces"
|
||||
export * from "./customerinventoryitems"
|
||||
export * from "./devices"
|
||||
export * from "./documentboxes"
|
||||
export * from "./enums"
|
||||
export * from "./events"
|
||||
export * from "./entitybankaccounts"
|
||||
export * from "./files"
|
||||
export * from "./filetags"
|
||||
export * from "./folders"
|
||||
@@ -42,7 +46,9 @@ export * from "./incominginvoices"
|
||||
export * from "./inventoryitemgroups"
|
||||
export * from "./inventoryitems"
|
||||
export * from "./letterheads"
|
||||
export * from "./memberrelations"
|
||||
export * from "./movements"
|
||||
export * from "./m2m_api_keys"
|
||||
export * from "./notifications_event_types"
|
||||
export * from "./notifications_items"
|
||||
export * from "./notifications_preferences"
|
||||
|
||||
48
backend/db/schema/m2m_api_keys.ts
Normal file
48
backend/db/schema/m2m_api_keys.ts
Normal file
@@ -0,0 +1,48 @@
|
||||
import {
|
||||
pgTable,
|
||||
uuid,
|
||||
bigint,
|
||||
text,
|
||||
timestamp,
|
||||
boolean,
|
||||
} from "drizzle-orm/pg-core"
|
||||
|
||||
import { tenants } from "./tenants"
|
||||
import { authUsers } from "./auth_users"
|
||||
|
||||
export const m2mApiKeys = pgTable("m2m_api_keys", {
|
||||
id: uuid("id").primaryKey().defaultRandom(),
|
||||
|
||||
createdAt: timestamp("created_at", { withTimezone: true })
|
||||
.notNull()
|
||||
.defaultNow(),
|
||||
|
||||
updatedAt: timestamp("updated_at", { withTimezone: true })
|
||||
.notNull()
|
||||
.defaultNow(),
|
||||
|
||||
tenantId: bigint("tenant_id", { mode: "number" })
|
||||
.notNull()
|
||||
.references(() => tenants.id, { onDelete: "cascade", onUpdate: "cascade" }),
|
||||
|
||||
userId: uuid("user_id")
|
||||
.notNull()
|
||||
.references(() => authUsers.id, { onDelete: "cascade", onUpdate: "cascade" }),
|
||||
|
||||
createdBy: uuid("created_by").references(() => authUsers.id, {
|
||||
onDelete: "set null",
|
||||
onUpdate: "cascade",
|
||||
}),
|
||||
|
||||
name: text("name").notNull(),
|
||||
keyPrefix: text("key_prefix").notNull(),
|
||||
keyHash: text("key_hash").notNull().unique(),
|
||||
|
||||
active: boolean("active").notNull().default(true),
|
||||
|
||||
lastUsedAt: timestamp("last_used_at", { withTimezone: true }),
|
||||
expiresAt: timestamp("expires_at", { withTimezone: true }),
|
||||
})
|
||||
|
||||
export type M2mApiKey = typeof m2mApiKeys.$inferSelect
|
||||
export type NewM2mApiKey = typeof m2mApiKeys.$inferInsert
|
||||
39
backend/db/schema/memberrelations.ts
Normal file
39
backend/db/schema/memberrelations.ts
Normal file
@@ -0,0 +1,39 @@
|
||||
import {
|
||||
pgTable,
|
||||
bigint,
|
||||
timestamp,
|
||||
text,
|
||||
boolean,
|
||||
uuid,
|
||||
doublePrecision,
|
||||
} from "drizzle-orm/pg-core"
|
||||
|
||||
import { tenants } from "./tenants"
|
||||
import { authUsers } from "./auth_users"
|
||||
|
||||
export const memberrelations = pgTable("memberrelations", {
|
||||
id: bigint("id", { mode: "number" })
|
||||
.primaryKey()
|
||||
.generatedByDefaultAsIdentity(),
|
||||
|
||||
createdAt: timestamp("created_at", { withTimezone: true })
|
||||
.notNull()
|
||||
.defaultNow(),
|
||||
|
||||
tenant: bigint("tenant", { mode: "number" })
|
||||
.notNull()
|
||||
.references(() => tenants.id),
|
||||
|
||||
type: text("type").notNull(),
|
||||
billingInterval: text("billingInterval").notNull(),
|
||||
billingAmount: doublePrecision("billingAmount").notNull().default(0),
|
||||
|
||||
archived: boolean("archived").notNull().default(false),
|
||||
|
||||
updatedAt: timestamp("updated_at", { withTimezone: true }),
|
||||
updatedBy: uuid("updated_by").references(() => authUsers.id),
|
||||
})
|
||||
|
||||
export type MemberRelation = typeof memberrelations.$inferSelect
|
||||
export type NewMemberRelation = typeof memberrelations.$inferInsert
|
||||
|
||||
@@ -54,6 +54,7 @@ export const services = pgTable("services", {
|
||||
|
||||
materialComposition: jsonb("materialComposition").notNull().default([]),
|
||||
personalComposition: jsonb("personalComposition").notNull().default([]),
|
||||
priceUpdateLocked: boolean("priceUpdateLocked").notNull().default(false),
|
||||
|
||||
updatedAt: timestamp("updated_at", { withTimezone: true }),
|
||||
updatedBy: uuid("updated_by").references(() => authUsers.id),
|
||||
|
||||
@@ -74,6 +74,48 @@ export const tenants = pgTable(
|
||||
timeTracking: true,
|
||||
planningBoard: true,
|
||||
workingTimeTracking: true,
|
||||
dashboard: true,
|
||||
historyitems: true,
|
||||
tasks: true,
|
||||
wiki: true,
|
||||
files: true,
|
||||
createdletters: true,
|
||||
documentboxes: true,
|
||||
helpdesk: true,
|
||||
email: true,
|
||||
members: true,
|
||||
customers: true,
|
||||
vendors: true,
|
||||
contactsList: true,
|
||||
staffTime: true,
|
||||
createDocument: true,
|
||||
serialInvoice: true,
|
||||
incomingInvoices: true,
|
||||
costcentres: true,
|
||||
accounts: true,
|
||||
ownaccounts: true,
|
||||
banking: true,
|
||||
spaces: true,
|
||||
customerspaces: true,
|
||||
customerinventoryitems: true,
|
||||
inventoryitems: true,
|
||||
inventoryitemgroups: true,
|
||||
products: true,
|
||||
productcategories: true,
|
||||
services: true,
|
||||
servicecategories: true,
|
||||
memberrelations: true,
|
||||
staffProfiles: true,
|
||||
hourrates: true,
|
||||
projecttypes: true,
|
||||
contracttypes: true,
|
||||
plants: true,
|
||||
settingsNumberRanges: true,
|
||||
settingsEmailAccounts: true,
|
||||
settingsBanking: true,
|
||||
settingsTexttemplates: true,
|
||||
settingsTenant: true,
|
||||
export: true,
|
||||
}),
|
||||
|
||||
ownFields: jsonb("ownFields"),
|
||||
@@ -88,10 +130,13 @@ export const tenants = pgTable(
|
||||
confirmationOrders: { prefix: "AB-", suffix: "", nextNumber: 1000 },
|
||||
invoices: { prefix: "RE-", suffix: "", nextNumber: 1000 },
|
||||
spaces: { prefix: "LP-", suffix: "", nextNumber: 1000 },
|
||||
customerspaces: { prefix: "KLP-", suffix: "", nextNumber: 1000 },
|
||||
inventoryitems: { prefix: "IA-", suffix: "", nextNumber: 1000 },
|
||||
customerinventoryitems: { prefix: "KIA-", suffix: "", nextNumber: 1000 },
|
||||
projects: { prefix: "PRJ-", suffix: "", nextNumber: 1000 },
|
||||
costcentres: { prefix: "KST-", suffix: "", nextNumber: 1000 },
|
||||
}),
|
||||
accountChart: text("accountChart").notNull().default("skr03"),
|
||||
|
||||
standardEmailForInvoices: text("standardEmailForInvoices"),
|
||||
|
||||
|
||||
@@ -6,6 +6,6 @@ export default defineConfig({
|
||||
schema: "./db/schema",
|
||||
out: "./db/migrations",
|
||||
dbCredentials: {
|
||||
url: secrets.DATABASE_URL,
|
||||
url: secrets.DATABASE_URL || "postgres://postgres:wJw7aNpEBJdcxgoct6GXNpvY4Cn6ECqu@fedeo-db-001.vpn.internal:5432/fedeo",
|
||||
},
|
||||
})
|
||||
@@ -9,7 +9,10 @@
|
||||
"dev:dav": "tsx watch src/webdav/server.ts",
|
||||
"build": "tsc",
|
||||
"start": "node dist/src/index.js",
|
||||
"schema:index": "ts-node scripts/generate-schema-index.ts"
|
||||
"schema:index": "ts-node scripts/generate-schema-index.ts",
|
||||
"bankcodes:update": "tsx scripts/generate-de-bank-codes.ts",
|
||||
"members:import:csv": "tsx scripts/import-members-csv.ts",
|
||||
"accounts:import:skr42": "ts-node scripts/import-skr42-accounts.ts"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
@@ -29,7 +32,6 @@
|
||||
"@infisical/sdk": "^4.0.6",
|
||||
"@mmote/niimbluelib": "^0.0.1-alpha.29",
|
||||
"@prisma/client": "^6.15.0",
|
||||
"@supabase/supabase-js": "^2.56.1",
|
||||
"@zip.js/zip.js": "^2.7.73",
|
||||
"archiver": "^7.0.1",
|
||||
"axios": "^1.12.1",
|
||||
|
||||
95
backend/scripts/generate-de-bank-codes.ts
Normal file
95
backend/scripts/generate-de-bank-codes.ts
Normal file
@@ -0,0 +1,95 @@
|
||||
import fs from "node:fs/promises"
|
||||
import path from "node:path"
|
||||
import https from "node:https"
|
||||
|
||||
const DEFAULT_SOURCE_URL =
|
||||
"https://www.bundesbank.de/resource/blob/602632/bec25ca5df1eb62fefadd8325dafe67c/472B63F073F071307366337C94F8C870/blz-aktuell-txt-data.txt"
|
||||
|
||||
const OUTPUT_NAME_FILE = path.resolve("src/utils/deBankCodes.ts")
|
||||
const OUTPUT_BIC_FILE = path.resolve("src/utils/deBankBics.ts")
|
||||
|
||||
function fetchBuffer(url: string): Promise<Buffer> {
|
||||
return new Promise((resolve, reject) => {
|
||||
https
|
||||
.get(url, (res) => {
|
||||
if (res.statusCode && res.statusCode >= 300 && res.statusCode < 400 && res.headers.location) {
|
||||
return resolve(fetchBuffer(res.headers.location))
|
||||
}
|
||||
|
||||
if (res.statusCode !== 200) {
|
||||
return reject(new Error(`Download failed with status ${res.statusCode}`))
|
||||
}
|
||||
|
||||
const chunks: Buffer[] = []
|
||||
res.on("data", (chunk) => chunks.push(Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk)))
|
||||
res.on("end", () => resolve(Buffer.concat(chunks)))
|
||||
res.on("error", reject)
|
||||
})
|
||||
.on("error", reject)
|
||||
})
|
||||
}
|
||||
|
||||
function escapeTsString(value: string) {
|
||||
return value.replace(/\\/g, "\\\\").replace(/"/g, '\\"')
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const source = process.env.BLZ_SOURCE_URL || DEFAULT_SOURCE_URL
|
||||
const sourceFile = process.env.BLZ_SOURCE_FILE
|
||||
let raw: Buffer
|
||||
|
||||
if (sourceFile) {
|
||||
console.log(`Reading BLZ source file: ${sourceFile}`)
|
||||
raw = await fs.readFile(sourceFile)
|
||||
} else {
|
||||
console.log(`Downloading BLZ source: ${source}`)
|
||||
raw = await fetchBuffer(source)
|
||||
}
|
||||
const content = raw.toString("latin1")
|
||||
|
||||
const lines = content.split(/\r?\n/)
|
||||
const nameMap = new Map<string, string>()
|
||||
const bicMap = new Map<string, string>()
|
||||
|
||||
for (const line of lines) {
|
||||
if (!line || line.length < 150) continue
|
||||
const blz = line.slice(0, 8).trim()
|
||||
const name = line.slice(9, 67).trim()
|
||||
const bic = line.slice(139, 150).trim()
|
||||
|
||||
if (!/^\d{8}$/.test(blz) || !name) continue
|
||||
if (!nameMap.has(blz)) nameMap.set(blz, name)
|
||||
if (bic && !bicMap.has(blz)) bicMap.set(blz, bic)
|
||||
}
|
||||
|
||||
const sortedNames = [...nameMap.entries()].sort(([a], [b]) => a.localeCompare(b))
|
||||
const sortedBics = [...bicMap.entries()].sort(([a], [b]) => a.localeCompare(b))
|
||||
|
||||
const nameOutputLines = [
|
||||
"// Lokale Bankleitzahl-zu-Institut Zuordnung (DE).",
|
||||
"// Quelle: Deutsche Bundesbank, BLZ-Datei (vollstaendig).",
|
||||
"export const DE_BANK_CODE_TO_NAME: Record<string, string> = {",
|
||||
...sortedNames.map(([blz, name]) => ` "${blz}": "${escapeTsString(name)}",`),
|
||||
"}",
|
||||
"",
|
||||
]
|
||||
|
||||
const bicOutputLines = [
|
||||
"// Lokale Bankleitzahl-zu-BIC Zuordnung (DE).",
|
||||
"// Quelle: Deutsche Bundesbank, BLZ-Datei (vollstaendig).",
|
||||
"export const DE_BANK_CODE_TO_BIC: Record<string, string> = {",
|
||||
...sortedBics.map(([blz, bic]) => ` "${blz}": "${escapeTsString(bic)}",`),
|
||||
"}",
|
||||
"",
|
||||
]
|
||||
|
||||
await fs.writeFile(OUTPUT_NAME_FILE, nameOutputLines.join("\n"), "utf8")
|
||||
await fs.writeFile(OUTPUT_BIC_FILE, bicOutputLines.join("\n"), "utf8")
|
||||
console.log(`Wrote ${sortedNames.length} bank names to ${OUTPUT_NAME_FILE}`)
|
||||
console.log(`Wrote ${sortedBics.length} bank BICs to ${OUTPUT_BIC_FILE}`)
|
||||
}
|
||||
|
||||
main().catch((err) => {
|
||||
console.error(err)
|
||||
process.exit(1)
|
||||
})
|
||||
@@ -1,6 +1,5 @@
|
||||
import Fastify from "fastify";
|
||||
import swaggerPlugin from "./plugins/swagger"
|
||||
import supabasePlugin from "./plugins/supabase";
|
||||
import dayjsPlugin from "./plugins/dayjs";
|
||||
import healthRoutes from "./routes/health";
|
||||
import meRoutes from "./routes/auth/me";
|
||||
@@ -43,6 +42,7 @@ import helpdeskInboundEmailRoutes from "./routes/helpdesk.inbound.email";
|
||||
import deviceRoutes from "./routes/internal/devices";
|
||||
import tenantRoutesInternal from "./routes/internal/tenant";
|
||||
import staffTimeRoutesInternal from "./routes/internal/time";
|
||||
import authM2mInternalRoutes from "./routes/internal/auth.m2m";
|
||||
|
||||
//Devices
|
||||
import devicesRFIDRoutes from "./routes/devices/rfid";
|
||||
@@ -73,7 +73,6 @@ async function main() {
|
||||
|
||||
// Plugins Global verfügbar
|
||||
await app.register(swaggerPlugin);
|
||||
await app.register(supabasePlugin);
|
||||
await app.register(tenantPlugin);
|
||||
await app.register(dayjsPlugin);
|
||||
await app.register(dbPlugin);
|
||||
@@ -109,6 +108,7 @@ async function main() {
|
||||
|
||||
await app.register(async (m2mApp) => {
|
||||
await m2mApp.register(authM2m)
|
||||
await m2mApp.register(authM2mInternalRoutes)
|
||||
await m2mApp.register(helpdeskInboundEmailRoutes)
|
||||
await m2mApp.register(deviceRoutes)
|
||||
await m2mApp.register(tenantRoutesInternal)
|
||||
|
||||
@@ -8,9 +8,108 @@ import {
|
||||
files,
|
||||
filetags,
|
||||
incominginvoices,
|
||||
vendors,
|
||||
} from "../../../db/schema"
|
||||
|
||||
import { eq, and, isNull, not } from "drizzle-orm"
|
||||
import { eq, and, isNull, not, desc } from "drizzle-orm"
|
||||
|
||||
type InvoiceAccount = {
|
||||
account?: number | null
|
||||
description?: string | null
|
||||
taxType?: string | number | null
|
||||
}
|
||||
|
||||
const normalizeAccounts = (accounts: unknown): InvoiceAccount[] => {
|
||||
if (!Array.isArray(accounts)) return []
|
||||
return accounts
|
||||
.map((entry: any) => ({
|
||||
account: typeof entry?.account === "number" ? entry.account : null,
|
||||
description: typeof entry?.description === "string" ? entry.description : null,
|
||||
taxType: entry?.taxType ?? null,
|
||||
}))
|
||||
.filter((entry) => entry.account !== null || entry.description || entry.taxType !== null)
|
||||
}
|
||||
|
||||
const buildLearningContext = (historicalInvoices: any[]) => {
|
||||
if (!historicalInvoices.length) return null
|
||||
|
||||
const vendorProfiles = new Map<number, {
|
||||
vendorName: string
|
||||
paymentTypes: Map<string, number>
|
||||
accountUsage: Map<number, number>
|
||||
sampleDescriptions: string[]
|
||||
}>()
|
||||
|
||||
const recentExamples: any[] = []
|
||||
|
||||
for (const invoice of historicalInvoices) {
|
||||
const accounts = normalizeAccounts(invoice.accounts)
|
||||
const vendorId = typeof invoice.vendorId === "number" ? invoice.vendorId : null
|
||||
const vendorName = typeof invoice.vendorName === "string" ? invoice.vendorName : "Unknown"
|
||||
|
||||
if (vendorId) {
|
||||
if (!vendorProfiles.has(vendorId)) {
|
||||
vendorProfiles.set(vendorId, {
|
||||
vendorName,
|
||||
paymentTypes: new Map(),
|
||||
accountUsage: new Map(),
|
||||
sampleDescriptions: [],
|
||||
})
|
||||
}
|
||||
|
||||
const profile = vendorProfiles.get(vendorId)!
|
||||
if (invoice.paymentType) {
|
||||
const key = String(invoice.paymentType)
|
||||
profile.paymentTypes.set(key, (profile.paymentTypes.get(key) ?? 0) + 1)
|
||||
}
|
||||
for (const account of accounts) {
|
||||
if (typeof account.account === "number") {
|
||||
profile.accountUsage.set(account.account, (profile.accountUsage.get(account.account) ?? 0) + 1)
|
||||
}
|
||||
}
|
||||
if (invoice.description && profile.sampleDescriptions.length < 3) {
|
||||
profile.sampleDescriptions.push(String(invoice.description).slice(0, 120))
|
||||
}
|
||||
}
|
||||
|
||||
if (recentExamples.length < 20) {
|
||||
recentExamples.push({
|
||||
vendorId,
|
||||
vendorName,
|
||||
paymentType: invoice.paymentType ?? null,
|
||||
accounts: accounts.map((entry) => ({
|
||||
account: entry.account,
|
||||
description: entry.description ?? null,
|
||||
taxType: entry.taxType ?? null,
|
||||
})),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
const vendorPatterns = Array.from(vendorProfiles.entries())
|
||||
.map(([vendorId, profile]) => {
|
||||
const commonPaymentType = Array.from(profile.paymentTypes.entries())
|
||||
.sort((a, b) => b[1] - a[1])[0]?.[0] ?? null
|
||||
const topAccounts = Array.from(profile.accountUsage.entries())
|
||||
.sort((a, b) => b[1] - a[1])
|
||||
.slice(0, 4)
|
||||
.map(([accountId, count]) => ({ accountId, count }))
|
||||
|
||||
return {
|
||||
vendorId,
|
||||
vendorName: profile.vendorName,
|
||||
commonPaymentType,
|
||||
topAccounts,
|
||||
sampleDescriptions: profile.sampleDescriptions,
|
||||
}
|
||||
})
|
||||
.slice(0, 50)
|
||||
|
||||
return JSON.stringify({
|
||||
vendorPatterns,
|
||||
recentExamples,
|
||||
})
|
||||
}
|
||||
|
||||
export function prepareIncomingInvoices(server: FastifyInstance) {
|
||||
const processInvoices = async (tenantId:number) => {
|
||||
@@ -72,13 +171,34 @@ export function prepareIncomingInvoices(server: FastifyInstance) {
|
||||
continue
|
||||
}
|
||||
|
||||
const historicalInvoices = await server.db
|
||||
.select({
|
||||
vendorId: incominginvoices.vendor,
|
||||
vendorName: vendors.name,
|
||||
paymentType: incominginvoices.paymentType,
|
||||
description: incominginvoices.description,
|
||||
accounts: incominginvoices.accounts,
|
||||
})
|
||||
.from(incominginvoices)
|
||||
.leftJoin(vendors, eq(incominginvoices.vendor, vendors.id))
|
||||
.where(
|
||||
and(
|
||||
eq(incominginvoices.tenant, tenantId),
|
||||
eq(incominginvoices.archived, false)
|
||||
)
|
||||
)
|
||||
.orderBy(desc(incominginvoices.createdAt))
|
||||
.limit(120)
|
||||
|
||||
const learningContext = buildLearningContext(historicalInvoices)
|
||||
|
||||
// -------------------------------------------------------------
|
||||
// 3️⃣ Jede Datei einzeln durch GPT jagen & IncomingInvoice erzeugen
|
||||
// -------------------------------------------------------------
|
||||
for (const file of filesRes) {
|
||||
console.log(`Processing file ${file.id} for tenant ${tenantId}`)
|
||||
|
||||
const data = await getInvoiceDataFromGPT(server,file, tenantId)
|
||||
const data = await getInvoiceDataFromGPT(server,file, tenantId, learningContext ?? undefined)
|
||||
|
||||
if (!data) {
|
||||
server.log.warn(`GPT returned no data for file ${file.id}`)
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
// modules/helpdesk/helpdesk.contact.service.ts
|
||||
import { FastifyInstance } from 'fastify'
|
||||
import { and, eq, or } from "drizzle-orm";
|
||||
import { helpdesk_contacts } from "../../../db/schema";
|
||||
|
||||
export async function getOrCreateContact(
|
||||
server: FastifyInstance,
|
||||
@@ -9,30 +11,35 @@ export async function getOrCreateContact(
|
||||
if (!email && !phone) throw new Error('Contact must have at least an email or phone')
|
||||
|
||||
// Bestehenden Kontakt prüfen
|
||||
const { data: existing, error: findError } = await server.supabase
|
||||
.from('helpdesk_contacts')
|
||||
.select('*')
|
||||
.eq('tenant_id', tenant_id)
|
||||
.or(`email.eq.${email || ''},phone.eq.${phone || ''}`)
|
||||
.maybeSingle()
|
||||
const matchConditions = []
|
||||
if (email) matchConditions.push(eq(helpdesk_contacts.email, email))
|
||||
if (phone) matchConditions.push(eq(helpdesk_contacts.phone, phone))
|
||||
|
||||
if (findError) throw findError
|
||||
if (existing) return existing
|
||||
const existing = await server.db
|
||||
.select()
|
||||
.from(helpdesk_contacts)
|
||||
.where(
|
||||
and(
|
||||
eq(helpdesk_contacts.tenantId, tenant_id),
|
||||
or(...matchConditions)
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
if (existing[0]) return existing[0]
|
||||
|
||||
// Anlegen
|
||||
const { data: created, error: insertError } = await server.supabase
|
||||
.from('helpdesk_contacts')
|
||||
.insert({
|
||||
tenant_id,
|
||||
const created = await server.db
|
||||
.insert(helpdesk_contacts)
|
||||
.values({
|
||||
tenantId: tenant_id,
|
||||
email,
|
||||
phone,
|
||||
display_name,
|
||||
customer_id,
|
||||
contact_id
|
||||
displayName: display_name,
|
||||
customerId: customer_id,
|
||||
contactId: contact_id
|
||||
})
|
||||
.select()
|
||||
.single()
|
||||
.returning()
|
||||
|
||||
if (insertError) throw insertError
|
||||
return created
|
||||
return created[0]
|
||||
}
|
||||
|
||||
@@ -2,6 +2,8 @@
|
||||
import { FastifyInstance } from 'fastify'
|
||||
import { getOrCreateContact } from './helpdesk.contact.service.js'
|
||||
import {useNextNumberRangeNumber} from "../../utils/functions";
|
||||
import { and, desc, eq } from "drizzle-orm";
|
||||
import { customers, helpdesk_contacts, helpdesk_conversations } from "../../../db/schema";
|
||||
|
||||
export async function createConversation(
|
||||
server: FastifyInstance,
|
||||
@@ -25,24 +27,34 @@ export async function createConversation(
|
||||
|
||||
const {usedNumber } = await useNextNumberRangeNumber(server, tenant_id, "tickets")
|
||||
|
||||
const { data, error } = await server.supabase
|
||||
.from('helpdesk_conversations')
|
||||
.insert({
|
||||
tenant_id,
|
||||
contact_id: contactRecord.id,
|
||||
channel_instance_id,
|
||||
const inserted = await server.db
|
||||
.insert(helpdesk_conversations)
|
||||
.values({
|
||||
tenantId: tenant_id,
|
||||
contactId: contactRecord.id,
|
||||
channelInstanceId: channel_instance_id,
|
||||
subject: subject || null,
|
||||
status: 'open',
|
||||
created_at: new Date().toISOString(),
|
||||
customer_id,
|
||||
contact_person_id,
|
||||
ticket_number: usedNumber
|
||||
createdAt: new Date(),
|
||||
customerId: customer_id,
|
||||
contactPersonId: contact_person_id,
|
||||
ticketNumber: usedNumber
|
||||
})
|
||||
.select()
|
||||
.single()
|
||||
.returning()
|
||||
|
||||
if (error) throw error
|
||||
return data
|
||||
const data = inserted[0]
|
||||
|
||||
return {
|
||||
...data,
|
||||
channel_instance_id: data.channelInstanceId,
|
||||
contact_id: data.contactId,
|
||||
contact_person_id: data.contactPersonId,
|
||||
created_at: data.createdAt,
|
||||
customer_id: data.customerId,
|
||||
last_message_at: data.lastMessageAt,
|
||||
tenant_id: data.tenantId,
|
||||
ticket_number: data.ticketNumber,
|
||||
}
|
||||
}
|
||||
|
||||
export async function getConversations(
|
||||
@@ -52,22 +64,34 @@ export async function getConversations(
|
||||
) {
|
||||
const { status, limit = 50 } = opts || {}
|
||||
|
||||
let query = server.supabase.from('helpdesk_conversations').select('*, customer_id(*)').eq('tenant_id', tenant_id)
|
||||
const filters = [eq(helpdesk_conversations.tenantId, tenant_id)]
|
||||
if (status) filters.push(eq(helpdesk_conversations.status, status))
|
||||
|
||||
if (status) query = query.eq('status', status)
|
||||
query = query.order('last_message_at', { ascending: false }).limit(limit)
|
||||
|
||||
const { data, error } = await query
|
||||
if (error) throw error
|
||||
|
||||
const mappedData = data.map(entry => {
|
||||
return {
|
||||
...entry,
|
||||
customer: entry.customer_id
|
||||
}
|
||||
const data = await server.db
|
||||
.select({
|
||||
conversation: helpdesk_conversations,
|
||||
contact: helpdesk_contacts,
|
||||
customer: customers,
|
||||
})
|
||||
.from(helpdesk_conversations)
|
||||
.leftJoin(helpdesk_contacts, eq(helpdesk_contacts.id, helpdesk_conversations.contactId))
|
||||
.leftJoin(customers, eq(customers.id, helpdesk_conversations.customerId))
|
||||
.where(and(...filters))
|
||||
.orderBy(desc(helpdesk_conversations.lastMessageAt))
|
||||
.limit(limit)
|
||||
|
||||
return mappedData
|
||||
return data.map((entry) => ({
|
||||
...entry.conversation,
|
||||
helpdesk_contacts: entry.contact,
|
||||
channel_instance_id: entry.conversation.channelInstanceId,
|
||||
contact_id: entry.conversation.contactId,
|
||||
contact_person_id: entry.conversation.contactPersonId,
|
||||
created_at: entry.conversation.createdAt,
|
||||
customer_id: entry.customer,
|
||||
last_message_at: entry.conversation.lastMessageAt,
|
||||
tenant_id: entry.conversation.tenantId,
|
||||
ticket_number: entry.conversation.ticketNumber,
|
||||
}))
|
||||
}
|
||||
|
||||
export async function updateConversationStatus(
|
||||
@@ -78,13 +102,22 @@ export async function updateConversationStatus(
|
||||
const valid = ['open', 'in_progress', 'waiting_for_customer', 'answered', 'closed']
|
||||
if (!valid.includes(status)) throw new Error('Invalid status')
|
||||
|
||||
const { data, error } = await server.supabase
|
||||
.from('helpdesk_conversations')
|
||||
.update({ status })
|
||||
.eq('id', conversation_id)
|
||||
.select()
|
||||
.single()
|
||||
const updated = await server.db
|
||||
.update(helpdesk_conversations)
|
||||
.set({ status })
|
||||
.where(eq(helpdesk_conversations.id, conversation_id))
|
||||
.returning()
|
||||
|
||||
if (error) throw error
|
||||
return data
|
||||
const data = updated[0]
|
||||
return {
|
||||
...data,
|
||||
channel_instance_id: data.channelInstanceId,
|
||||
contact_id: data.contactId,
|
||||
contact_person_id: data.contactPersonId,
|
||||
created_at: data.createdAt,
|
||||
customer_id: data.customerId,
|
||||
last_message_at: data.lastMessageAt,
|
||||
tenant_id: data.tenantId,
|
||||
ticket_number: data.ticketNumber,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
// modules/helpdesk/helpdesk.message.service.ts
|
||||
import { FastifyInstance } from 'fastify'
|
||||
import { asc, eq } from "drizzle-orm";
|
||||
import { helpdesk_conversations, helpdesk_messages } from "../../../db/schema";
|
||||
|
||||
export async function addMessage(
|
||||
server: FastifyInstance,
|
||||
@@ -23,38 +25,53 @@ export async function addMessage(
|
||||
) {
|
||||
if (!payload?.text) throw new Error('Message payload requires text content')
|
||||
|
||||
const { data: message, error } = await server.supabase
|
||||
.from('helpdesk_messages')
|
||||
.insert({
|
||||
tenant_id,
|
||||
conversation_id,
|
||||
author_user_id,
|
||||
const inserted = await server.db
|
||||
.insert(helpdesk_messages)
|
||||
.values({
|
||||
tenantId: tenant_id,
|
||||
conversationId: conversation_id,
|
||||
authorUserId: author_user_id,
|
||||
direction,
|
||||
payload,
|
||||
raw_meta,
|
||||
created_at: new Date().toISOString(),
|
||||
rawMeta: raw_meta,
|
||||
externalMessageId: external_message_id,
|
||||
receivedAt: new Date(),
|
||||
})
|
||||
.select()
|
||||
.single()
|
||||
.returning()
|
||||
|
||||
if (error) throw error
|
||||
const message = inserted[0]
|
||||
|
||||
// Letzte Nachricht aktualisieren
|
||||
await server.supabase
|
||||
.from('helpdesk_conversations')
|
||||
.update({ last_message_at: new Date().toISOString() })
|
||||
.eq('id', conversation_id)
|
||||
await server.db
|
||||
.update(helpdesk_conversations)
|
||||
.set({ lastMessageAt: new Date() })
|
||||
.where(eq(helpdesk_conversations.id, conversation_id))
|
||||
|
||||
return message
|
||||
return {
|
||||
...message,
|
||||
author_user_id: message.authorUserId,
|
||||
conversation_id: message.conversationId,
|
||||
created_at: message.createdAt,
|
||||
external_message_id: message.externalMessageId,
|
||||
raw_meta: message.rawMeta,
|
||||
tenant_id: message.tenantId,
|
||||
}
|
||||
}
|
||||
|
||||
export async function getMessages(server: FastifyInstance, conversation_id: string) {
|
||||
const { data, error } = await server.supabase
|
||||
.from('helpdesk_messages')
|
||||
.select('*')
|
||||
.eq('conversation_id', conversation_id)
|
||||
.order('created_at', { ascending: true })
|
||||
const data = await server.db
|
||||
.select()
|
||||
.from(helpdesk_messages)
|
||||
.where(eq(helpdesk_messages.conversationId, conversation_id))
|
||||
.orderBy(asc(helpdesk_messages.createdAt))
|
||||
|
||||
if (error) throw error
|
||||
return data
|
||||
return data.map((message) => ({
|
||||
...message,
|
||||
author_user_id: message.authorUserId,
|
||||
conversation_id: message.conversationId,
|
||||
created_at: message.createdAt,
|
||||
external_message_id: message.externalMessageId,
|
||||
raw_meta: message.rawMeta,
|
||||
tenant_id: message.tenantId,
|
||||
}))
|
||||
}
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
// services/notification.service.ts
|
||||
import type { FastifyInstance } from 'fastify';
|
||||
import {secrets} from "../utils/secrets";
|
||||
import { eq } from "drizzle-orm";
|
||||
import { notificationsEventTypes, notificationsItems } from "../../db/schema";
|
||||
|
||||
export type NotificationStatus = 'queued' | 'sent' | 'failed';
|
||||
|
||||
@@ -34,16 +36,16 @@ export class NotificationService {
|
||||
*/
|
||||
async trigger(input: TriggerInput) {
|
||||
const { tenantId, userId, eventType, title, message, payload } = input;
|
||||
const supabase = this.server.supabase;
|
||||
|
||||
// 1) Event-Typ prüfen (aktiv?)
|
||||
const { data: eventTypeRow, error: etErr } = await supabase
|
||||
.from('notifications_event_types')
|
||||
.select('event_key,is_active')
|
||||
.eq('event_key', eventType)
|
||||
.maybeSingle();
|
||||
const eventTypeRows = await this.server.db
|
||||
.select()
|
||||
.from(notificationsEventTypes)
|
||||
.where(eq(notificationsEventTypes.eventKey, eventType))
|
||||
.limit(1)
|
||||
const eventTypeRow = eventTypeRows[0]
|
||||
|
||||
if (etErr || !eventTypeRow || eventTypeRow.is_active !== true) {
|
||||
if (!eventTypeRow || eventTypeRow.isActive !== true) {
|
||||
throw new Error(`Unbekannter oder inaktiver Event-Typ: ${eventType}`);
|
||||
}
|
||||
|
||||
@@ -54,40 +56,40 @@ export class NotificationService {
|
||||
}
|
||||
|
||||
// 3) Notification anlegen (status: queued)
|
||||
const { data: inserted, error: insErr } = await supabase
|
||||
.from('notifications_items')
|
||||
.insert({
|
||||
tenant_id: tenantId,
|
||||
user_id: userId,
|
||||
event_type: eventType,
|
||||
const insertedRows = await this.server.db
|
||||
.insert(notificationsItems)
|
||||
.values({
|
||||
tenantId,
|
||||
userId,
|
||||
eventType,
|
||||
title,
|
||||
message,
|
||||
payload: payload ?? null,
|
||||
channel: 'email',
|
||||
status: 'queued'
|
||||
})
|
||||
.select('id')
|
||||
.single();
|
||||
.returning({ id: notificationsItems.id })
|
||||
const inserted = insertedRows[0]
|
||||
|
||||
if (insErr || !inserted) {
|
||||
throw new Error(`Fehler beim Einfügen der Notification: ${insErr?.message}`);
|
||||
if (!inserted) {
|
||||
throw new Error("Fehler beim Einfügen der Notification");
|
||||
}
|
||||
|
||||
// 4) E-Mail versenden
|
||||
try {
|
||||
await this.sendEmail(user.email, title, message);
|
||||
|
||||
await supabase
|
||||
.from('notifications_items')
|
||||
.update({ status: 'sent', sent_at: new Date().toISOString() })
|
||||
.eq('id', inserted.id);
|
||||
await this.server.db
|
||||
.update(notificationsItems)
|
||||
.set({ status: 'sent', sentAt: new Date() })
|
||||
.where(eq(notificationsItems.id, inserted.id));
|
||||
|
||||
return { success: true, id: inserted.id };
|
||||
} catch (err: any) {
|
||||
await supabase
|
||||
.from('notifications_items')
|
||||
.update({ status: 'failed', error: String(err?.message || err) })
|
||||
.eq('id', inserted.id);
|
||||
await this.server.db
|
||||
.update(notificationsItems)
|
||||
.set({ status: 'failed', error: String(err?.message || err) })
|
||||
.where(eq(notificationsItems.id, inserted.id));
|
||||
|
||||
this.server.log.error({ err, notificationId: inserted.id }, 'E-Mail Versand fehlgeschlagen');
|
||||
return { success: false, error: err?.message || 'E-Mail Versand fehlgeschlagen' };
|
||||
|
||||
249
backend/src/modules/service-price-recalculation.service.ts
Normal file
249
backend/src/modules/service-price-recalculation.service.ts
Normal file
@@ -0,0 +1,249 @@
|
||||
import { and, eq } from "drizzle-orm";
|
||||
import * as schema from "../../db/schema";
|
||||
import { FastifyInstance } from "fastify";
|
||||
|
||||
type CompositionRow = {
|
||||
product?: number | string | null;
|
||||
service?: number | string | null;
|
||||
hourrate?: string | null;
|
||||
quantity?: number | string | null;
|
||||
price?: number | string | null;
|
||||
purchasePrice?: number | string | null;
|
||||
[key: string]: any;
|
||||
};
|
||||
|
||||
function toNumber(value: any): number {
|
||||
const num = Number(value ?? 0);
|
||||
return Number.isFinite(num) ? num : 0;
|
||||
}
|
||||
|
||||
function round2(value: number): number {
|
||||
return Number(value.toFixed(2));
|
||||
}
|
||||
|
||||
function getJsonNumber(source: unknown, key: string): number {
|
||||
if (!source || typeof source !== "object") return 0;
|
||||
return toNumber((source as Record<string, unknown>)[key]);
|
||||
}
|
||||
|
||||
function normalizeId(value: unknown): number | null {
|
||||
if (value === null || value === undefined || value === "") return null;
|
||||
const num = Number(value);
|
||||
return Number.isFinite(num) ? num : null;
|
||||
}
|
||||
|
||||
function normalizeUuid(value: unknown): string | null {
|
||||
if (typeof value !== "string") return null;
|
||||
const trimmed = value.trim();
|
||||
return trimmed.length ? trimmed : null;
|
||||
}
|
||||
|
||||
function sanitizeCompositionRows(value: unknown): CompositionRow[] {
|
||||
if (!Array.isArray(value)) return [];
|
||||
return value.filter((entry): entry is CompositionRow => !!entry && typeof entry === "object");
|
||||
}
|
||||
|
||||
export async function recalculateServicePricesForTenant(server: FastifyInstance, tenantId: number, updatedBy?: string | null) {
|
||||
const [services, products, hourrates] = await Promise.all([
|
||||
server.db.select().from(schema.services).where(eq(schema.services.tenant, tenantId)),
|
||||
server.db.select().from(schema.products).where(eq(schema.products.tenant, tenantId)),
|
||||
server.db.select().from(schema.hourrates).where(eq(schema.hourrates.tenant, tenantId)),
|
||||
]);
|
||||
|
||||
const serviceMap = new Map(services.map((item) => [item.id, item]));
|
||||
const productMap = new Map(products.map((item) => [item.id, item]));
|
||||
const hourrateMap = new Map(hourrates.map((item) => [item.id, item]));
|
||||
|
||||
const memo = new Map<number, {
|
||||
sellingTotal: number;
|
||||
purchaseTotal: number;
|
||||
materialTotal: number;
|
||||
materialPurchaseTotal: number;
|
||||
workerTotal: number;
|
||||
workerPurchaseTotal: number;
|
||||
materialComposition: CompositionRow[];
|
||||
personalComposition: CompositionRow[];
|
||||
}>();
|
||||
const stack = new Set<number>();
|
||||
|
||||
const calculateService = (serviceId: number) => {
|
||||
if (memo.has(serviceId)) return memo.get(serviceId)!;
|
||||
|
||||
const service = serviceMap.get(serviceId);
|
||||
const emptyResult = {
|
||||
sellingTotal: 0,
|
||||
purchaseTotal: 0,
|
||||
materialTotal: 0,
|
||||
materialPurchaseTotal: 0,
|
||||
workerTotal: 0,
|
||||
workerPurchaseTotal: 0,
|
||||
materialComposition: [],
|
||||
personalComposition: [],
|
||||
};
|
||||
|
||||
if (!service) return emptyResult;
|
||||
if (stack.has(serviceId)) return emptyResult;
|
||||
|
||||
// Gesperrte Leistungen bleiben bei automatischen Preis-Updates unverändert.
|
||||
if (service.priceUpdateLocked) {
|
||||
const lockedResult = {
|
||||
sellingTotal: getJsonNumber(service.sellingPriceComposed, "total") || toNumber(service.sellingPrice),
|
||||
purchaseTotal: getJsonNumber(service.purchasePriceComposed, "total"),
|
||||
materialTotal: getJsonNumber(service.sellingPriceComposed, "material"),
|
||||
materialPurchaseTotal: getJsonNumber(service.purchasePriceComposed, "material"),
|
||||
workerTotal: getJsonNumber(service.sellingPriceComposed, "worker"),
|
||||
workerPurchaseTotal: getJsonNumber(service.purchasePriceComposed, "worker"),
|
||||
materialComposition: sanitizeCompositionRows(service.materialComposition),
|
||||
personalComposition: sanitizeCompositionRows(service.personalComposition),
|
||||
};
|
||||
memo.set(serviceId, lockedResult);
|
||||
return lockedResult;
|
||||
}
|
||||
|
||||
stack.add(serviceId);
|
||||
try {
|
||||
const materialComposition = sanitizeCompositionRows(service.materialComposition);
|
||||
const personalComposition = sanitizeCompositionRows(service.personalComposition);
|
||||
const hasMaterialComposition = materialComposition.length > 0;
|
||||
const hasPersonalComposition = personalComposition.length > 0;
|
||||
|
||||
// Ohne Zusammensetzung keine automatische Überschreibung:
|
||||
// manuell gepflegte Preise sollen erhalten bleiben.
|
||||
if (!hasMaterialComposition && !hasPersonalComposition) {
|
||||
const manualResult = {
|
||||
sellingTotal: getJsonNumber(service.sellingPriceComposed, "total") || toNumber(service.sellingPrice),
|
||||
purchaseTotal: getJsonNumber(service.purchasePriceComposed, "total"),
|
||||
materialTotal: getJsonNumber(service.sellingPriceComposed, "material"),
|
||||
materialPurchaseTotal: getJsonNumber(service.purchasePriceComposed, "material"),
|
||||
workerTotal: getJsonNumber(service.sellingPriceComposed, "worker"),
|
||||
workerPurchaseTotal: getJsonNumber(service.purchasePriceComposed, "worker"),
|
||||
materialComposition,
|
||||
personalComposition,
|
||||
};
|
||||
memo.set(serviceId, manualResult);
|
||||
return manualResult;
|
||||
}
|
||||
|
||||
let materialTotal = 0;
|
||||
let materialPurchaseTotal = 0;
|
||||
|
||||
const normalizedMaterialComposition = materialComposition.map((entry) => {
|
||||
const quantity = toNumber(entry.quantity);
|
||||
const productId = normalizeId(entry.product);
|
||||
const childServiceId = normalizeId(entry.service);
|
||||
|
||||
let sellingPrice = toNumber(entry.price);
|
||||
let purchasePrice = toNumber(entry.purchasePrice);
|
||||
|
||||
if (productId) {
|
||||
const product = productMap.get(productId);
|
||||
sellingPrice = toNumber(product?.selling_price);
|
||||
purchasePrice = toNumber(product?.purchase_price);
|
||||
} else if (childServiceId) {
|
||||
const child = calculateService(childServiceId);
|
||||
sellingPrice = toNumber(child.sellingTotal);
|
||||
purchasePrice = toNumber(child.purchaseTotal);
|
||||
}
|
||||
|
||||
materialTotal += quantity * sellingPrice;
|
||||
materialPurchaseTotal += quantity * purchasePrice;
|
||||
|
||||
return {
|
||||
...entry,
|
||||
price: round2(sellingPrice),
|
||||
purchasePrice: round2(purchasePrice),
|
||||
};
|
||||
});
|
||||
|
||||
let workerTotal = 0;
|
||||
let workerPurchaseTotal = 0;
|
||||
const normalizedPersonalComposition = personalComposition.map((entry) => {
|
||||
const quantity = toNumber(entry.quantity);
|
||||
const hourrateId = normalizeUuid(entry.hourrate);
|
||||
|
||||
let sellingPrice = toNumber(entry.price);
|
||||
let purchasePrice = toNumber(entry.purchasePrice);
|
||||
|
||||
if (hourrateId) {
|
||||
const hourrate = hourrateMap.get(hourrateId);
|
||||
if (hourrate) {
|
||||
sellingPrice = toNumber(hourrate.sellingPrice);
|
||||
purchasePrice = toNumber(hourrate.purchase_price);
|
||||
}
|
||||
}
|
||||
|
||||
workerTotal += quantity * sellingPrice;
|
||||
workerPurchaseTotal += quantity * purchasePrice;
|
||||
|
||||
return {
|
||||
...entry,
|
||||
price: round2(sellingPrice),
|
||||
purchasePrice: round2(purchasePrice),
|
||||
};
|
||||
});
|
||||
|
||||
const result = {
|
||||
sellingTotal: round2(materialTotal + workerTotal),
|
||||
purchaseTotal: round2(materialPurchaseTotal + workerPurchaseTotal),
|
||||
materialTotal: round2(materialTotal),
|
||||
materialPurchaseTotal: round2(materialPurchaseTotal),
|
||||
workerTotal: round2(workerTotal),
|
||||
workerPurchaseTotal: round2(workerPurchaseTotal),
|
||||
materialComposition: normalizedMaterialComposition,
|
||||
personalComposition: normalizedPersonalComposition,
|
||||
};
|
||||
|
||||
memo.set(serviceId, result);
|
||||
return result;
|
||||
} finally {
|
||||
stack.delete(serviceId);
|
||||
}
|
||||
};
|
||||
|
||||
for (const service of services) {
|
||||
calculateService(service.id);
|
||||
}
|
||||
|
||||
const updates = services
|
||||
.filter((service) => !service.priceUpdateLocked)
|
||||
.map(async (service) => {
|
||||
const calc = memo.get(service.id);
|
||||
if (!calc) return;
|
||||
|
||||
const sellingPriceComposed = {
|
||||
worker: calc.workerTotal,
|
||||
material: calc.materialTotal,
|
||||
total: calc.sellingTotal,
|
||||
};
|
||||
|
||||
const purchasePriceComposed = {
|
||||
worker: calc.workerPurchaseTotal,
|
||||
material: calc.materialPurchaseTotal,
|
||||
total: calc.purchaseTotal,
|
||||
};
|
||||
|
||||
const unchanged =
|
||||
JSON.stringify(service.materialComposition ?? []) === JSON.stringify(calc.materialComposition) &&
|
||||
JSON.stringify(service.personalComposition ?? []) === JSON.stringify(calc.personalComposition) &&
|
||||
JSON.stringify(service.sellingPriceComposed ?? {}) === JSON.stringify(sellingPriceComposed) &&
|
||||
JSON.stringify(service.purchasePriceComposed ?? {}) === JSON.stringify(purchasePriceComposed) &&
|
||||
round2(toNumber(service.sellingPrice)) === calc.sellingTotal;
|
||||
|
||||
if (unchanged) return;
|
||||
|
||||
await server.db
|
||||
.update(schema.services)
|
||||
.set({
|
||||
materialComposition: calc.materialComposition,
|
||||
personalComposition: calc.personalComposition,
|
||||
sellingPriceComposed,
|
||||
purchasePriceComposed,
|
||||
sellingPrice: calc.sellingTotal,
|
||||
updatedAt: new Date(),
|
||||
updatedBy: updatedBy ?? null,
|
||||
})
|
||||
.where(and(eq(schema.services.id, service.id), eq(schema.services.tenant, tenantId)));
|
||||
});
|
||||
|
||||
await Promise.all(updates);
|
||||
}
|
||||
@@ -1,6 +1,9 @@
|
||||
import { FastifyInstance } from "fastify";
|
||||
import fp from "fastify-plugin";
|
||||
import { secrets } from "../utils/secrets";
|
||||
import { and, eq } from "drizzle-orm";
|
||||
import { authUsers, m2mApiKeys } from "../../db/schema";
|
||||
import { createHash } from "node:crypto";
|
||||
|
||||
/**
|
||||
* Fastify Plugin für Machine-to-Machine Authentifizierung.
|
||||
@@ -12,26 +15,99 @@ import { secrets } from "../utils/secrets";
|
||||
* server.register(m2mAuthPlugin, { allowedPrefix: '/internal' })
|
||||
*/
|
||||
export default fp(async (server: FastifyInstance, opts: { allowedPrefix?: string } = {}) => {
|
||||
//const allowedPrefix = opts.allowedPrefix || "/internal";
|
||||
const hashApiKey = (apiKey: string) =>
|
||||
createHash("sha256").update(apiKey, "utf8").digest("hex")
|
||||
|
||||
server.addHook("preHandler", async (req, reply) => {
|
||||
try {
|
||||
// Nur prüfen, wenn Route unterhalb des Prefix liegt
|
||||
//if (!req.url.startsWith(allowedPrefix)) return;
|
||||
const apiKeyHeader = req.headers["x-api-key"];
|
||||
const apiKey = Array.isArray(apiKeyHeader) ? apiKeyHeader[0] : apiKeyHeader;
|
||||
|
||||
const apiKey = req.headers["x-api-key"];
|
||||
|
||||
if (!apiKey || apiKey !== secrets.M2M_API_KEY) {
|
||||
if (!apiKey) {
|
||||
server.log.warn(`[M2M Auth] Ungültiger oder fehlender API-Key bei ${req.url}`);
|
||||
return reply.status(401).send({ error: "Unauthorized" });
|
||||
}
|
||||
|
||||
// Zusatzinformationen im Request (z. B. interne Kennung)
|
||||
const keyHash = hashApiKey(apiKey);
|
||||
|
||||
const keyRows = await server.db
|
||||
.select({
|
||||
id: m2mApiKeys.id,
|
||||
tenantId: m2mApiKeys.tenantId,
|
||||
userId: m2mApiKeys.userId,
|
||||
active: m2mApiKeys.active,
|
||||
expiresAt: m2mApiKeys.expiresAt,
|
||||
name: m2mApiKeys.name,
|
||||
userEmail: authUsers.email,
|
||||
})
|
||||
.from(m2mApiKeys)
|
||||
.innerJoin(authUsers, eq(authUsers.id, m2mApiKeys.userId))
|
||||
.where(and(
|
||||
eq(m2mApiKeys.keyHash, keyHash),
|
||||
eq(m2mApiKeys.active, true)
|
||||
))
|
||||
.limit(1)
|
||||
|
||||
let key = keyRows[0]
|
||||
if (!key) {
|
||||
const fallbackValid = apiKey === secrets.M2M_API_KEY
|
||||
if (!fallbackValid) {
|
||||
server.log.warn(`[M2M Auth] Ungültiger API-Key bei ${req.url}`)
|
||||
return reply.status(401).send({ error: "Unauthorized" })
|
||||
}
|
||||
|
||||
// Backward compatibility mode for one global key.
|
||||
// The caller must provide user/tenant identifiers in headers.
|
||||
const tenantIdHeader = req.headers["x-tenant-id"]
|
||||
const userIdHeader = req.headers["x-user-id"]
|
||||
const tenantId = Number(Array.isArray(tenantIdHeader) ? tenantIdHeader[0] : tenantIdHeader)
|
||||
const userId = Array.isArray(userIdHeader) ? userIdHeader[0] : userIdHeader
|
||||
|
||||
if (!tenantId || !userId) {
|
||||
return reply.status(401).send({ error: "Missing x-tenant-id or x-user-id for legacy M2M key" })
|
||||
}
|
||||
|
||||
const users = await server.db
|
||||
.select({ email: authUsers.email })
|
||||
.from(authUsers)
|
||||
.where(eq(authUsers.id, userId))
|
||||
.limit(1)
|
||||
|
||||
if (!users[0]) {
|
||||
return reply.status(401).send({ error: "Unknown user for legacy M2M key" })
|
||||
}
|
||||
|
||||
req.user = {
|
||||
user_id: userId,
|
||||
email: users[0].email,
|
||||
tenant_id: tenantId
|
||||
}
|
||||
} else {
|
||||
if (key.expiresAt && new Date(key.expiresAt).getTime() < Date.now()) {
|
||||
return reply.status(401).send({ error: "Expired API key" })
|
||||
}
|
||||
|
||||
req.user = {
|
||||
user_id: key.userId,
|
||||
email: key.userEmail,
|
||||
tenant_id: key.tenantId
|
||||
}
|
||||
|
||||
await server.db
|
||||
.update(m2mApiKeys)
|
||||
.set({ lastUsedAt: new Date(), updatedAt: new Date() })
|
||||
.where(eq(m2mApiKeys.id, key.id))
|
||||
}
|
||||
|
||||
(req as any).m2m = {
|
||||
verified: true,
|
||||
type: "internal",
|
||||
key: apiKey,
|
||||
};
|
||||
|
||||
req.role = "m2m"
|
||||
req.permissions = []
|
||||
req.hasPermission = () => false
|
||||
} catch (err) {
|
||||
// @ts-ignore
|
||||
server.log.error("[M2M Auth] Fehler beim Prüfen des API-Keys:", err);
|
||||
|
||||
@@ -1,28 +1,22 @@
|
||||
import fp from "fastify-plugin"
|
||||
import {drizzle, NodePgDatabase} from "drizzle-orm/node-postgres"
|
||||
import * as schema from "../../db/schema"
|
||||
import {secrets} from "../utils/secrets";
|
||||
import { Pool } from "pg"
|
||||
// src/plugins/db.ts
|
||||
import fp from "fastify-plugin";
|
||||
import { NodePgDatabase } from "drizzle-orm/node-postgres";
|
||||
import * as schema from "../../db/schema";
|
||||
import { db, pool } from "../../db"; // <--- Importiert jetzt die globale Instanz
|
||||
|
||||
export default fp(async (server, opts) => {
|
||||
|
||||
const pool = new Pool({
|
||||
connectionString: secrets.DATABASE_URL,
|
||||
max: 10, // je nach Last
|
||||
})
|
||||
// Wir nutzen die db, die wir in src/db/index.ts erstellt haben
|
||||
server.decorate("db", db);
|
||||
|
||||
const db = drizzle(pool , {schema})
|
||||
|
||||
// Dekorieren -> überall server.db
|
||||
server.decorate("db", db)
|
||||
|
||||
// Graceful Shutdown
|
||||
// Graceful Shutdown: Wenn Fastify ausgeht, schließen wir den Pool
|
||||
server.addHook("onClose", async () => {
|
||||
await pool.end()
|
||||
})
|
||||
console.log("[DB] Closing connection pool...");
|
||||
await pool.end();
|
||||
});
|
||||
|
||||
console.log("Drizzle database connected")
|
||||
})
|
||||
console.log("[Fastify] Database attached from shared instance");
|
||||
});
|
||||
|
||||
declare module "fastify" {
|
||||
interface FastifyInstance {
|
||||
|
||||
@@ -58,8 +58,6 @@ const queryConfigPlugin: FastifyPluginAsync<QueryConfigPluginOptions> = async (
|
||||
|
||||
const query = req.query as Record<string, any>
|
||||
|
||||
console.log(query)
|
||||
|
||||
// Pagination deaktivieren?
|
||||
const disablePagination =
|
||||
query.noPagination === 'true' ||
|
||||
|
||||
@@ -1,19 +0,0 @@
|
||||
import { FastifyInstance } from "fastify";
|
||||
import fp from "fastify-plugin";
|
||||
import { createClient, SupabaseClient } from "@supabase/supabase-js";
|
||||
import {secrets} from "../utils/secrets";
|
||||
|
||||
export default fp(async (server: FastifyInstance) => {
|
||||
const supabaseUrl = secrets.SUPABASE_URL
|
||||
const supabaseServiceKey = secrets.SUPABASE_SERVICE_ROLE_KEY
|
||||
const supabase: SupabaseClient = createClient(supabaseUrl, supabaseServiceKey);
|
||||
|
||||
// Fastify um supabase erweitern
|
||||
server.decorate("supabase", supabase);
|
||||
});
|
||||
|
||||
declare module "fastify" {
|
||||
interface FastifyInstance {
|
||||
supabase: SupabaseClient;
|
||||
}
|
||||
}
|
||||
@@ -5,26 +5,33 @@ import swaggerUi from "@fastify/swagger-ui";
|
||||
|
||||
export default fp(async (server: FastifyInstance) => {
|
||||
await server.register(swagger, {
|
||||
mode: "dynamic", // wichtig: generiert echtes OpenAPI JSON
|
||||
mode: "dynamic",
|
||||
openapi: {
|
||||
info: {
|
||||
title: "Multi-Tenant API",
|
||||
description: "API Dokumentation für dein Backend",
|
||||
title: "FEDEO Backend API",
|
||||
description: "OpenAPI specification for the FEDEO backend",
|
||||
version: "1.0.0",
|
||||
},
|
||||
servers: [{ url: "http://localhost:3000" }],
|
||||
servers: [{ url: "/" }],
|
||||
components: {
|
||||
securitySchemes: {
|
||||
bearerAuth: {
|
||||
type: "http",
|
||||
scheme: "bearer",
|
||||
bearerFormat: "JWT"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
// @ts-ignore
|
||||
await server.register(swaggerUi, {
|
||||
routePrefix: "/docs", // UI erreichbar unter http://localhost:3000/docs
|
||||
swagger: {
|
||||
info: {
|
||||
title: "Multi-Tenant API",
|
||||
version: "1.0.0",
|
||||
},
|
||||
},
|
||||
exposeRoute: true,
|
||||
routePrefix: "/docs",
|
||||
});
|
||||
|
||||
// Stable raw spec path
|
||||
server.get("/openapi.json", async (_req, reply) => {
|
||||
return reply.send(server.swagger());
|
||||
});
|
||||
});
|
||||
@@ -1,5 +1,7 @@
|
||||
import { FastifyInstance, FastifyRequest } from "fastify";
|
||||
import fp from "fastify-plugin";
|
||||
import { eq } from "drizzle-orm";
|
||||
import { tenants } from "../../db/schema";
|
||||
|
||||
export default fp(async (server: FastifyInstance) => {
|
||||
server.addHook("preHandler", async (req, reply) => {
|
||||
@@ -9,11 +11,12 @@ export default fp(async (server: FastifyInstance) => {
|
||||
return;
|
||||
}
|
||||
// Tenant aus DB laden
|
||||
const { data: tenant } = await server.supabase
|
||||
.from("tenants")
|
||||
.select("*")
|
||||
.eq("portalDomain", host)
|
||||
.single();
|
||||
const rows = await server.db
|
||||
.select()
|
||||
.from(tenants)
|
||||
.where(eq(tenants.portalDomain, host))
|
||||
.limit(1);
|
||||
const tenant = rows[0];
|
||||
|
||||
|
||||
if(!tenant) {
|
||||
|
||||
@@ -94,6 +94,7 @@ export default async function adminRoutes(server: FastifyInstance) {
|
||||
short: tenants.short,
|
||||
locked: tenants.locked,
|
||||
numberRanges: tenants.numberRanges,
|
||||
accountChart: tenants.accountChart,
|
||||
extraModules: tenants.extraModules,
|
||||
})
|
||||
.from(authTenantUsers)
|
||||
|
||||
@@ -1,11 +1,60 @@
|
||||
import { FastifyInstance } from "fastify"
|
||||
import bcrypt from "bcrypt"
|
||||
import { eq } from "drizzle-orm"
|
||||
import jwt from "jsonwebtoken"
|
||||
import { secrets } from "../../utils/secrets"
|
||||
|
||||
import { authUsers } from "../../../db/schema" // wichtig: Drizzle Schema importieren!
|
||||
|
||||
export default async function authRoutesAuthenticated(server: FastifyInstance) {
|
||||
|
||||
server.post("/auth/refresh", {
|
||||
schema: {
|
||||
tags: ["Auth"],
|
||||
summary: "Refresh JWT for current authenticated user",
|
||||
response: {
|
||||
200: {
|
||||
type: "object",
|
||||
properties: {
|
||||
token: { type: "string" },
|
||||
},
|
||||
required: ["token"],
|
||||
},
|
||||
401: {
|
||||
type: "object",
|
||||
properties: {
|
||||
error: { type: "string" },
|
||||
},
|
||||
required: ["error"],
|
||||
},
|
||||
},
|
||||
},
|
||||
}, async (req, reply) => {
|
||||
if (!req.user?.user_id) {
|
||||
return reply.code(401).send({ error: "Unauthorized" })
|
||||
}
|
||||
|
||||
const token = jwt.sign(
|
||||
{
|
||||
user_id: req.user.user_id,
|
||||
email: req.user.email,
|
||||
tenant_id: req.user.tenant_id,
|
||||
},
|
||||
secrets.JWT_SECRET!,
|
||||
{ expiresIn: "6h" }
|
||||
)
|
||||
|
||||
reply.setCookie("token", token, {
|
||||
path: "/",
|
||||
httpOnly: true,
|
||||
sameSite: process.env.NODE_ENV === "production" ? "none" : "lax",
|
||||
secure: process.env.NODE_ENV === "production",
|
||||
maxAge: 60 * 60 * 6,
|
||||
})
|
||||
|
||||
return { token }
|
||||
})
|
||||
|
||||
server.post("/auth/password/change", {
|
||||
schema: {
|
||||
tags: ["Auth"],
|
||||
|
||||
@@ -137,7 +137,7 @@ export default async function authRoutes(server: FastifyInstance) {
|
||||
httpOnly: true,
|
||||
sameSite: process.env.NODE_ENV === "production" ? "none" : "lax",
|
||||
secure: process.env.NODE_ENV === "production",
|
||||
maxAge: 60 * 60 * 3,
|
||||
maxAge: 60 * 60 * 6,
|
||||
});
|
||||
|
||||
return { token };
|
||||
|
||||
@@ -51,9 +51,11 @@ export default async function meRoutes(server: FastifyInstance) {
|
||||
name: tenants.name,
|
||||
short: tenants.short,
|
||||
locked: tenants.locked,
|
||||
features: tenants.features,
|
||||
extraModules: tenants.extraModules,
|
||||
businessInfo: tenants.businessInfo,
|
||||
numberRanges: tenants.numberRanges,
|
||||
accountChart: tenants.accountChart,
|
||||
dokuboxkey: tenants.dokuboxkey,
|
||||
standardEmailForInvoices: tenants.standardEmailForInvoices,
|
||||
standardPaymentDays: tenants.standardPaymentDays,
|
||||
|
||||
@@ -4,10 +4,19 @@ import dayjs from "dayjs"
|
||||
|
||||
import { secrets } from "../utils/secrets"
|
||||
import { insertHistoryItem } from "../utils/history"
|
||||
import { decrypt, encrypt } from "../utils/crypt"
|
||||
import { DE_BANK_CODE_TO_NAME } from "../utils/deBankCodes"
|
||||
import { DE_BANK_CODE_TO_BIC } from "../utils/deBankBics"
|
||||
|
||||
import {
|
||||
bankrequisitions,
|
||||
bankstatements,
|
||||
createddocuments,
|
||||
customers,
|
||||
entitybankaccounts,
|
||||
incominginvoices,
|
||||
statementallocations,
|
||||
vendors,
|
||||
} from "../../db/schema"
|
||||
|
||||
import {
|
||||
@@ -17,6 +26,322 @@ import {
|
||||
|
||||
|
||||
export default async function bankingRoutes(server: FastifyInstance) {
|
||||
const normalizeIban = (value?: string | null) =>
|
||||
String(value || "").replace(/\s+/g, "").toUpperCase()
|
||||
|
||||
const pickPartnerBankData = (statement: any, partnerType: "customer" | "vendor") => {
|
||||
if (!statement) return null
|
||||
|
||||
const prefersDebit = partnerType === "customer"
|
||||
? Number(statement.amount) >= 0
|
||||
: Number(statement.amount) > 0
|
||||
|
||||
const primary = prefersDebit
|
||||
? { iban: statement.debIban }
|
||||
: { iban: statement.credIban }
|
||||
const fallback = prefersDebit
|
||||
? { iban: statement.credIban }
|
||||
: { iban: statement.debIban }
|
||||
|
||||
const primaryIban = normalizeIban(primary.iban)
|
||||
if (primaryIban) {
|
||||
return {
|
||||
iban: primaryIban,
|
||||
}
|
||||
}
|
||||
|
||||
const fallbackIban = normalizeIban(fallback.iban)
|
||||
if (fallbackIban) {
|
||||
return {
|
||||
iban: fallbackIban,
|
||||
}
|
||||
}
|
||||
|
||||
return null
|
||||
}
|
||||
|
||||
const mergePartnerIban = (infoData: Record<string, any>, iban: string, bankAccountId?: number | null) => {
|
||||
if (!iban && !bankAccountId) return infoData || {}
|
||||
const info = infoData && typeof infoData === "object" ? { ...infoData } : {}
|
||||
|
||||
if (iban) {
|
||||
const existing = Array.isArray(info.bankingIbans) ? info.bankingIbans : []
|
||||
const merged = [...new Set([...existing.map((i: string) => normalizeIban(i)), iban])]
|
||||
info.bankingIbans = merged
|
||||
if (!info.bankingIban) info.bankingIban = iban
|
||||
}
|
||||
|
||||
if (bankAccountId) {
|
||||
const existingIds = Array.isArray(info.bankAccountIds) ? info.bankAccountIds : []
|
||||
if (!existingIds.includes(bankAccountId)) {
|
||||
info.bankAccountIds = [...existingIds, bankAccountId]
|
||||
}
|
||||
}
|
||||
|
||||
return info
|
||||
}
|
||||
|
||||
const ibanLengthByCountry: Record<string, number> = {
|
||||
DE: 22,
|
||||
AT: 20,
|
||||
CH: 21,
|
||||
NL: 18,
|
||||
BE: 16,
|
||||
FR: 27,
|
||||
ES: 24,
|
||||
IT: 27,
|
||||
LU: 20,
|
||||
}
|
||||
|
||||
const isValidIbanLocal = (iban: string) => {
|
||||
const normalized = normalizeIban(iban)
|
||||
if (!normalized || normalized.length < 15 || normalized.length > 34) return false
|
||||
if (!/^[A-Z]{2}[0-9]{2}[A-Z0-9]+$/.test(normalized)) return false
|
||||
|
||||
const country = normalized.slice(0, 2)
|
||||
const expectedLength = ibanLengthByCountry[country]
|
||||
if (expectedLength && normalized.length !== expectedLength) return false
|
||||
|
||||
const rearranged = normalized.slice(4) + normalized.slice(0, 4)
|
||||
let numeric = ""
|
||||
for (const ch of rearranged) {
|
||||
if (ch >= "A" && ch <= "Z") numeric += (ch.charCodeAt(0) - 55).toString()
|
||||
else numeric += ch
|
||||
}
|
||||
|
||||
let remainder = 0
|
||||
for (const digit of numeric) {
|
||||
remainder = (remainder * 10 + Number(digit)) % 97
|
||||
}
|
||||
|
||||
return remainder === 1
|
||||
}
|
||||
|
||||
const resolveGermanBankDataFromIbanLocal = (iban: string) => {
|
||||
const normalized = normalizeIban(iban)
|
||||
if (!isValidIbanLocal(normalized)) return null
|
||||
|
||||
// Für DE-IBANs kann die BLZ aus Position 5-12 lokal gelesen werden.
|
||||
if (normalized.startsWith("DE") && normalized.length === 22) {
|
||||
const bankCode = normalized.slice(4, 12)
|
||||
const bankName = DE_BANK_CODE_TO_NAME[bankCode] || `Unbekannt (BLZ ${bankCode})`
|
||||
const bic = DE_BANK_CODE_TO_BIC[bankCode] || null
|
||||
return {
|
||||
bankName,
|
||||
bic,
|
||||
bankCode,
|
||||
}
|
||||
}
|
||||
|
||||
return null
|
||||
}
|
||||
|
||||
const resolveEntityBankAccountId = async (
|
||||
tenantId: number,
|
||||
userId: string,
|
||||
iban: string
|
||||
) => {
|
||||
const normalizedIban = normalizeIban(iban)
|
||||
if (!normalizedIban) return null
|
||||
|
||||
const bankData = resolveGermanBankDataFromIbanLocal(normalizedIban)
|
||||
|
||||
const allAccounts = await server.db
|
||||
.select({
|
||||
id: entitybankaccounts.id,
|
||||
ibanEncrypted: entitybankaccounts.ibanEncrypted,
|
||||
bankNameEncrypted: entitybankaccounts.bankNameEncrypted,
|
||||
bicEncrypted: entitybankaccounts.bicEncrypted,
|
||||
})
|
||||
.from(entitybankaccounts)
|
||||
.where(eq(entitybankaccounts.tenant, tenantId))
|
||||
|
||||
const existing = allAccounts.find((row) => {
|
||||
if (!row.ibanEncrypted) return false
|
||||
try {
|
||||
const decryptedIban = decrypt(row.ibanEncrypted as any)
|
||||
return normalizeIban(decryptedIban) === normalizedIban
|
||||
} catch {
|
||||
return false
|
||||
}
|
||||
})
|
||||
|
||||
if (existing?.id) {
|
||||
if (bankData) {
|
||||
let currentBankName = ""
|
||||
let currentBic = ""
|
||||
try {
|
||||
currentBankName = String(decrypt(existing.bankNameEncrypted as any) || "").trim()
|
||||
} catch {
|
||||
currentBankName = ""
|
||||
}
|
||||
try {
|
||||
currentBic = String(decrypt((existing as any).bicEncrypted as any) || "").trim()
|
||||
} catch {
|
||||
currentBic = ""
|
||||
}
|
||||
|
||||
const nextBankName = bankData?.bankName || "Unbekannt"
|
||||
const nextBic = bankData?.bic || "UNBEKANNT"
|
||||
if (currentBankName !== nextBankName || currentBic !== nextBic) {
|
||||
await server.db
|
||||
.update(entitybankaccounts)
|
||||
.set({
|
||||
bankNameEncrypted: encrypt(nextBankName),
|
||||
bicEncrypted: encrypt(nextBic),
|
||||
updatedAt: new Date(),
|
||||
updatedBy: userId,
|
||||
})
|
||||
.where(and(eq(entitybankaccounts.id, Number(existing.id)), eq(entitybankaccounts.tenant, tenantId)))
|
||||
}
|
||||
}
|
||||
|
||||
return Number(existing.id)
|
||||
}
|
||||
|
||||
const [created] = await server.db
|
||||
.insert(entitybankaccounts)
|
||||
.values({
|
||||
tenant: tenantId,
|
||||
ibanEncrypted: encrypt(normalizedIban),
|
||||
bicEncrypted: encrypt(bankData?.bic || "UNBEKANNT"),
|
||||
bankNameEncrypted: encrypt(bankData?.bankName || "Unbekannt"),
|
||||
description: "Automatisch aus Bankbuchung übernommen",
|
||||
updatedAt: new Date(),
|
||||
updatedBy: userId,
|
||||
})
|
||||
.returning({ id: entitybankaccounts.id })
|
||||
|
||||
return created?.id ? Number(created.id) : null
|
||||
}
|
||||
|
||||
server.get("/banking/iban/:iban", async (req, reply) => {
|
||||
try {
|
||||
const { iban } = req.params as { iban: string }
|
||||
const normalized = normalizeIban(iban)
|
||||
if (!normalized) {
|
||||
return reply.code(400).send({ error: "IBAN missing" })
|
||||
}
|
||||
|
||||
const valid = isValidIbanLocal(normalized)
|
||||
const bankData = resolveGermanBankDataFromIbanLocal(normalized)
|
||||
|
||||
return reply.send({
|
||||
iban: normalized,
|
||||
valid,
|
||||
bic: bankData?.bic || null,
|
||||
bankName: bankData?.bankName || null,
|
||||
bankCode: bankData?.bankCode || null,
|
||||
})
|
||||
} catch (err) {
|
||||
server.log.error(err)
|
||||
return reply.code(500).send({ error: "Failed to resolve IBAN data" })
|
||||
}
|
||||
})
|
||||
|
||||
const assignIbanFromStatementToCustomer = async (tenantId: number, userId: string, statementId: number, createdDocumentId?: number) => {
|
||||
if (!createdDocumentId) return
|
||||
|
||||
const [statement] = await server.db
|
||||
.select()
|
||||
.from(bankstatements)
|
||||
.where(and(eq(bankstatements.id, statementId), eq(bankstatements.tenant, tenantId)))
|
||||
.limit(1)
|
||||
|
||||
if (!statement) return
|
||||
|
||||
const [doc] = await server.db
|
||||
.select({ customer: createddocuments.customer })
|
||||
.from(createddocuments)
|
||||
.where(and(eq(createddocuments.id, createdDocumentId), eq(createddocuments.tenant, tenantId)))
|
||||
.limit(1)
|
||||
|
||||
const customerId = doc?.customer
|
||||
if (!customerId) return
|
||||
|
||||
const partnerBank = pickPartnerBankData(statement, "customer")
|
||||
if (!partnerBank?.iban) return
|
||||
|
||||
const [customer] = await server.db
|
||||
.select({ id: customers.id, infoData: customers.infoData })
|
||||
.from(customers)
|
||||
.where(and(eq(customers.id, customerId), eq(customers.tenant, tenantId)))
|
||||
.limit(1)
|
||||
|
||||
if (!customer) return
|
||||
|
||||
const bankAccountId = await resolveEntityBankAccountId(
|
||||
tenantId,
|
||||
userId,
|
||||
partnerBank.iban
|
||||
)
|
||||
|
||||
const newInfoData = mergePartnerIban(
|
||||
(customer.infoData || {}) as Record<string, any>,
|
||||
partnerBank.iban,
|
||||
bankAccountId
|
||||
)
|
||||
await server.db
|
||||
.update(customers)
|
||||
.set({
|
||||
infoData: newInfoData,
|
||||
updatedAt: new Date(),
|
||||
updatedBy: userId,
|
||||
})
|
||||
.where(and(eq(customers.id, customerId), eq(customers.tenant, tenantId)))
|
||||
}
|
||||
|
||||
const assignIbanFromStatementToVendor = async (tenantId: number, userId: string, statementId: number, incomingInvoiceId?: number) => {
|
||||
if (!incomingInvoiceId) return
|
||||
|
||||
const [statement] = await server.db
|
||||
.select()
|
||||
.from(bankstatements)
|
||||
.where(and(eq(bankstatements.id, statementId), eq(bankstatements.tenant, tenantId)))
|
||||
.limit(1)
|
||||
|
||||
if (!statement) return
|
||||
|
||||
const [invoice] = await server.db
|
||||
.select({ vendor: incominginvoices.vendor })
|
||||
.from(incominginvoices)
|
||||
.where(and(eq(incominginvoices.id, incomingInvoiceId), eq(incominginvoices.tenant, tenantId)))
|
||||
.limit(1)
|
||||
|
||||
const vendorId = invoice?.vendor
|
||||
if (!vendorId) return
|
||||
|
||||
const partnerBank = pickPartnerBankData(statement, "vendor")
|
||||
if (!partnerBank?.iban) return
|
||||
|
||||
const [vendor] = await server.db
|
||||
.select({ id: vendors.id, infoData: vendors.infoData })
|
||||
.from(vendors)
|
||||
.where(and(eq(vendors.id, vendorId), eq(vendors.tenant, tenantId)))
|
||||
.limit(1)
|
||||
|
||||
if (!vendor) return
|
||||
|
||||
const bankAccountId = await resolveEntityBankAccountId(
|
||||
tenantId,
|
||||
userId,
|
||||
partnerBank.iban
|
||||
)
|
||||
|
||||
const newInfoData = mergePartnerIban(
|
||||
(vendor.infoData || {}) as Record<string, any>,
|
||||
partnerBank.iban,
|
||||
bankAccountId
|
||||
)
|
||||
await server.db
|
||||
.update(vendors)
|
||||
.set({
|
||||
infoData: newInfoData,
|
||||
updatedAt: new Date(),
|
||||
updatedBy: userId,
|
||||
})
|
||||
.where(and(eq(vendors.id, vendorId), eq(vendors.tenant, tenantId)))
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------------
|
||||
// 🔐 GoCardLess Token Handling
|
||||
@@ -171,9 +496,35 @@ export default async function bankingRoutes(server: FastifyInstance) {
|
||||
|
||||
const createdRecord = inserted[0]
|
||||
|
||||
if (createdRecord?.createddocument) {
|
||||
try {
|
||||
await assignIbanFromStatementToCustomer(
|
||||
req.user.tenant_id,
|
||||
req.user.user_id,
|
||||
Number(createdRecord.bankstatement),
|
||||
Number(createdRecord.createddocument)
|
||||
)
|
||||
} catch (err) {
|
||||
server.log.warn({ err, allocationId: createdRecord.id }, "Konnte IBAN nicht automatisch beim Kunden hinterlegen")
|
||||
}
|
||||
}
|
||||
|
||||
if (createdRecord?.incominginvoice) {
|
||||
try {
|
||||
await assignIbanFromStatementToVendor(
|
||||
req.user.tenant_id,
|
||||
req.user.user_id,
|
||||
Number(createdRecord.bankstatement),
|
||||
Number(createdRecord.incominginvoice)
|
||||
)
|
||||
} catch (err) {
|
||||
server.log.warn({ err, allocationId: createdRecord.id }, "Konnte IBAN nicht automatisch beim Lieferanten hinterlegen")
|
||||
}
|
||||
}
|
||||
|
||||
await insertHistoryItem(server, {
|
||||
entity: "bankstatements",
|
||||
entityId: createdRecord.id,
|
||||
entityId: Number(createdRecord.bankstatement),
|
||||
action: "created",
|
||||
created_by: req.user.user_id,
|
||||
tenant_id: req.user.tenant_id,
|
||||
@@ -216,7 +567,7 @@ export default async function bankingRoutes(server: FastifyInstance) {
|
||||
|
||||
await insertHistoryItem(server, {
|
||||
entity: "bankstatements",
|
||||
entityId: id,
|
||||
entityId: Number(old.bankstatement),
|
||||
action: "deleted",
|
||||
created_by: req.user.user_id,
|
||||
tenant_id: req.user.tenant_id,
|
||||
|
||||
@@ -1,6 +1,4 @@
|
||||
import { FastifyInstance } from "fastify";
|
||||
import jwt from "jsonwebtoken";
|
||||
import {insertHistoryItem} from "../utils/history";
|
||||
import {buildExportZip} from "../utils/export/datev";
|
||||
import {s3} from "../utils/s3";
|
||||
import {GetObjectCommand, PutObjectCommand} from "@aws-sdk/client-s3"
|
||||
@@ -9,6 +7,8 @@ import dayjs from "dayjs";
|
||||
import {randomUUID} from "node:crypto";
|
||||
import {secrets} from "../utils/secrets";
|
||||
import {createSEPAExport} from "../utils/export/sepa";
|
||||
import {generatedexports} from "../../db/schema";
|
||||
import {eq} from "drizzle-orm";
|
||||
|
||||
const createDatevExport = async (server:FastifyInstance,req:any,startDate,endDate,beraternr,mandantennr) => {
|
||||
try {
|
||||
@@ -45,25 +45,21 @@ const createDatevExport = async (server:FastifyInstance,req:any,startDate,endDat
|
||||
|
||||
console.log(url)
|
||||
|
||||
// 5) In Supabase-DB speichern
|
||||
const { data, error } = await server.supabase
|
||||
.from("exports")
|
||||
.insert([
|
||||
{
|
||||
tenant_id: req.user.tenant_id,
|
||||
start_date: startDate,
|
||||
end_date: endDate,
|
||||
valid_until: dayjs().add(24,"hours").toISOString(),
|
||||
file_path: fileKey,
|
||||
url: url,
|
||||
created_at: new Date().toISOString(),
|
||||
},
|
||||
])
|
||||
.select()
|
||||
.single()
|
||||
// 5) In Haupt-DB speichern
|
||||
const inserted = await server.db
|
||||
.insert(generatedexports)
|
||||
.values({
|
||||
tenantId: req.user.tenant_id,
|
||||
startDate: new Date(startDate),
|
||||
endDate: new Date(endDate),
|
||||
validUntil: dayjs().add(24, "hours").toDate(),
|
||||
filePath: fileKey,
|
||||
url,
|
||||
type: "datev",
|
||||
})
|
||||
.returning()
|
||||
|
||||
console.log(data)
|
||||
console.log(error)
|
||||
console.log(inserted[0])
|
||||
} catch (error) {
|
||||
console.log(error)
|
||||
}
|
||||
@@ -120,9 +116,22 @@ export default async function exportRoutes(server: FastifyInstance) {
|
||||
//List Exports Available for Download
|
||||
|
||||
server.get("/exports", async (req,reply) => {
|
||||
const {data,error} = await server.supabase.from("exports").select().eq("tenant_id",req.user.tenant_id)
|
||||
const data = await server.db
|
||||
.select({
|
||||
id: generatedexports.id,
|
||||
created_at: generatedexports.createdAt,
|
||||
tenant_id: generatedexports.tenantId,
|
||||
start_date: generatedexports.startDate,
|
||||
end_date: generatedexports.endDate,
|
||||
valid_until: generatedexports.validUntil,
|
||||
type: generatedexports.type,
|
||||
url: generatedexports.url,
|
||||
file_path: generatedexports.filePath,
|
||||
})
|
||||
.from(generatedexports)
|
||||
.where(eq(generatedexports.tenantId, req.user.tenant_id))
|
||||
|
||||
console.log(data,error)
|
||||
console.log(data)
|
||||
reply.send(data)
|
||||
|
||||
})
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { FastifyInstance } from "fastify";
|
||||
import {createInvoicePDF, createTimeSheetPDF} from "../utils/pdf";
|
||||
//import {encodeBase64ToNiimbot, generateLabel, useNextNumberRangeNumber} from "../utils/functions";
|
||||
import {encodeBase64ToNiimbot, generateLabel, useNextNumberRangeNumber} from "../utils/functions";
|
||||
import dayjs from "dayjs";
|
||||
//import { ready as zplReady } from 'zpl-renderer-js'
|
||||
//import { renderZPL } from "zpl-image";
|
||||
@@ -15,7 +15,6 @@ import timezone from "dayjs/plugin/timezone.js";
|
||||
import {generateTimesEvaluation} from "../modules/time/evaluation.service";
|
||||
import {citys} from "../../db/schema";
|
||||
import {eq} from "drizzle-orm";
|
||||
import {useNextNumberRangeNumber} from "../utils/functions";
|
||||
import {executeManualGeneration, finishManualGeneration} from "../modules/serialexecution.service";
|
||||
dayjs.extend(customParseFormat)
|
||||
dayjs.extend(isoWeek)
|
||||
@@ -100,31 +99,25 @@ export default async function functionRoutes(server: FastifyInstance) {
|
||||
|
||||
server.get('/functions/check-zip/:zip', async (req, reply) => {
|
||||
const { zip } = req.params as { zip: string }
|
||||
const normalizedZip = String(zip || "").replace(/\D/g, "")
|
||||
|
||||
if (!zip) {
|
||||
return reply.code(400).send({ error: 'ZIP is required' })
|
||||
if (normalizedZip.length !== 5) {
|
||||
return reply.code(400).send({ error: 'ZIP must contain exactly 5 digits' })
|
||||
}
|
||||
|
||||
try {
|
||||
//@ts-ignore
|
||||
const data = await server.db.select().from(citys).where(eq(citys.zip,zip))
|
||||
|
||||
|
||||
/*const { data, error } = await server.supabase
|
||||
.from('citys')
|
||||
const data = await server.db
|
||||
.select()
|
||||
.eq('zip', zip)
|
||||
.maybeSingle()
|
||||
.from(citys)
|
||||
.where(eq(citys.zip, Number(normalizedZip)))
|
||||
|
||||
if (error) {
|
||||
console.log(error)
|
||||
return reply.code(500).send({ error: 'Database error' })
|
||||
}*/
|
||||
|
||||
if (!data) {
|
||||
if (!data.length) {
|
||||
return reply.code(404).send({ error: 'ZIP not found' })
|
||||
}
|
||||
|
||||
const city = data[0]
|
||||
|
||||
//districtMap
|
||||
const bundeslaender = [
|
||||
{ code: 'DE-BW', name: 'Baden-Württemberg' },
|
||||
@@ -148,9 +141,8 @@ export default async function functionRoutes(server: FastifyInstance) {
|
||||
|
||||
|
||||
return reply.send({
|
||||
...data,
|
||||
//@ts-ignore
|
||||
state_code: bundeslaender.find(i => i.name === data.countryName)
|
||||
...city,
|
||||
state_code: bundeslaender.find(i => i.name === city.countryName)?.code || null
|
||||
})
|
||||
} catch (err) {
|
||||
console.log(err)
|
||||
@@ -184,44 +176,20 @@ export default async function functionRoutes(server: FastifyInstance) {
|
||||
await server.services.dokuboxSync.run()
|
||||
})
|
||||
|
||||
|
||||
/*server.post('/print/zpl/preview', async (req, reply) => {
|
||||
const { zpl, widthMm = 50, heightMm = 30, dpmm = 8, asBase64 = false } = req.body as {zpl:string,widthMm:number,heightMm:number,dpmm:number,asBase64:string}
|
||||
|
||||
console.log(widthMm,heightMm,dpmm)
|
||||
|
||||
if (!zpl) {
|
||||
return reply.code(400).send({ error: 'Missing ZPL string' })
|
||||
}
|
||||
|
||||
try {
|
||||
// 1️⃣ Renderer initialisieren
|
||||
const { api } = await zplReady
|
||||
|
||||
// 2️⃣ Rendern (liefert base64-encoded PNG)
|
||||
const base64Png = await api.zplToBase64Async(zpl, widthMm, heightMm, dpmm)
|
||||
|
||||
return await encodeBase64ToNiimbot(base64Png, 'top')
|
||||
} catch (err) {
|
||||
console.error('[ZPL Preview Error]', err)
|
||||
return reply.code(500).send({ error: err.message || 'Failed to render ZPL' })
|
||||
}
|
||||
})
|
||||
|
||||
server.post('/print/label', async (req, reply) => {
|
||||
const { context, width=584, heigth=354 } = req.body as {context:any,width:number,heigth:number}
|
||||
const { context, width = 584, height = 354 } = req.body as {context:any,width:number,height:number}
|
||||
|
||||
try {
|
||||
const base64 = await generateLabel(context,width,heigth)
|
||||
const base64 = await generateLabel(context,width,height)
|
||||
|
||||
return {
|
||||
encoded: await encodeBase64ToNiimbot(base64, 'top'),
|
||||
base64: base64
|
||||
}
|
||||
} catch (err) {
|
||||
console.error('[ZPL Preview Error]', err)
|
||||
return reply.code(500).send({ error: err.message || 'Failed to render ZPL' })
|
||||
console.error('[Label Render Error]', err)
|
||||
return reply.code(500).send({ error: err.message || 'Failed to render label' })
|
||||
}
|
||||
})*/
|
||||
})
|
||||
|
||||
}
|
||||
@@ -3,12 +3,11 @@ import { FastifyInstance } from "fastify";
|
||||
export default async function routes(server: FastifyInstance) {
|
||||
server.get("/ping", async () => {
|
||||
// Testquery gegen DB
|
||||
const { data, error } = await server.supabase.from("tenants").select("id").limit(1);
|
||||
const result = await server.db.execute("SELECT NOW()");
|
||||
|
||||
return {
|
||||
status: "ok",
|
||||
db: error ? "not connected" : "connected",
|
||||
tenant_count: data?.length ?? 0
|
||||
db: JSON.stringify(result.rows[0]),
|
||||
};
|
||||
});
|
||||
}
|
||||
@@ -3,8 +3,9 @@ import { FastifyPluginAsync } from 'fastify'
|
||||
import { createConversation } from '../modules/helpdesk/helpdesk.conversation.service.js'
|
||||
import { addMessage } from '../modules/helpdesk/helpdesk.message.service.js'
|
||||
import { getOrCreateContact } from '../modules/helpdesk/helpdesk.contact.service.js'
|
||||
import {extractDomain, findCustomerOrContactByEmailOrDomain} from "../utils/helpers";
|
||||
import {useNextNumberRangeNumber} from "../utils/functions";
|
||||
import { findCustomerOrContactByEmailOrDomain } from "../utils/helpers";
|
||||
import { eq } from "drizzle-orm";
|
||||
import { helpdesk_conversations, helpdesk_messages } from "../../db/schema";
|
||||
|
||||
// -------------------------------------------------------------
|
||||
// 📧 Interne M2M-Route für eingehende E-Mails
|
||||
@@ -52,12 +53,12 @@ const helpdeskInboundEmailRoutes: FastifyPluginAsync = async (server) => {
|
||||
// 3️⃣ Konversation anhand In-Reply-To suchen
|
||||
let conversationId: string | null = null
|
||||
if (in_reply_to) {
|
||||
const { data: msg } = await server.supabase
|
||||
.from('helpdesk_messages')
|
||||
.select('conversation_id')
|
||||
.eq('external_message_id', in_reply_to)
|
||||
.maybeSingle()
|
||||
conversationId = msg?.conversation_id || null
|
||||
const msg = await server.db
|
||||
.select({ conversationId: helpdesk_messages.conversationId })
|
||||
.from(helpdesk_messages)
|
||||
.where(eq(helpdesk_messages.externalMessageId, in_reply_to))
|
||||
.limit(1)
|
||||
conversationId = msg[0]?.conversationId || null
|
||||
}
|
||||
|
||||
// 4️⃣ Neue Konversation anlegen falls keine existiert
|
||||
@@ -73,12 +74,12 @@ const helpdeskInboundEmailRoutes: FastifyPluginAsync = async (server) => {
|
||||
})
|
||||
conversationId = conversation.id
|
||||
} else {
|
||||
const { data } = await server.supabase
|
||||
.from('helpdesk_conversations')
|
||||
.select('*')
|
||||
.eq('id', conversationId)
|
||||
.single()
|
||||
conversation = data
|
||||
const rows = await server.db
|
||||
.select()
|
||||
.from(helpdesk_conversations)
|
||||
.where(eq(helpdesk_conversations.id, conversationId))
|
||||
.limit(1)
|
||||
conversation = rows[0]
|
||||
}
|
||||
|
||||
// 5️⃣ Nachricht speichern
|
||||
@@ -96,7 +97,7 @@ const helpdeskInboundEmailRoutes: FastifyPluginAsync = async (server) => {
|
||||
return res.status(201).send({
|
||||
success: true,
|
||||
conversation_id: conversationId,
|
||||
ticket_number: conversation.ticket_number,
|
||||
ticket_number: conversation?.ticket_number || conversation?.ticketNumber,
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
@@ -3,70 +3,9 @@ import { FastifyPluginAsync } from 'fastify'
|
||||
import { createConversation } from '../modules/helpdesk/helpdesk.conversation.service.js'
|
||||
import { addMessage } from '../modules/helpdesk/helpdesk.message.service.js'
|
||||
import { getOrCreateContact } from '../modules/helpdesk/helpdesk.contact.service.js'
|
||||
|
||||
/**
|
||||
* Öffentliche Route zum Empfang eingehender Kontaktformular-Nachrichten.
|
||||
* Authentifizierung: über `public_token` aus helpdesk_channel_instances
|
||||
*/
|
||||
|
||||
function extractDomain(email) {
|
||||
if (!email) return null
|
||||
const parts = email.split("@")
|
||||
return parts.length === 2 ? parts[1].toLowerCase() : null
|
||||
}
|
||||
|
||||
async function findCustomerOrContactByEmailOrDomain(server,fromMail, tenantId) {
|
||||
const sender = fromMail
|
||||
const senderDomain = extractDomain(sender)
|
||||
if (!senderDomain) return null
|
||||
|
||||
|
||||
// 1️⃣ Direkter Match über contacts
|
||||
const { data: contactMatch } = await server.supabase
|
||||
.from("contacts")
|
||||
.select("id, customer")
|
||||
.eq("email", sender)
|
||||
.eq("tenant", tenantId)
|
||||
.maybeSingle()
|
||||
|
||||
if (contactMatch?.customer_id) return {
|
||||
customer: contactMatch.customer,
|
||||
contact: contactMatch.id
|
||||
}
|
||||
|
||||
// 2️⃣ Kunden laden, bei denen E-Mail oder Rechnungsmail passt
|
||||
const { data: customers, error } = await server.supabase
|
||||
.from("customers")
|
||||
.select("id, infoData")
|
||||
.eq("tenant", tenantId)
|
||||
|
||||
if (error) {
|
||||
console.error(`[Helpdesk] Fehler beim Laden der Kunden:`, error.message)
|
||||
return null
|
||||
}
|
||||
|
||||
// 3️⃣ Durch Kunden iterieren und prüfen
|
||||
for (const c of customers || []) {
|
||||
const info = c.infoData || {}
|
||||
const email = info.email?.toLowerCase()
|
||||
const invoiceEmail = info.invoiceEmail?.toLowerCase()
|
||||
|
||||
const emailDomain = extractDomain(email)
|
||||
const invoiceDomain = extractDomain(invoiceEmail)
|
||||
|
||||
// exakter Match oder Domain-Match
|
||||
if (
|
||||
sender === email ||
|
||||
sender === invoiceEmail ||
|
||||
senderDomain === emailDomain ||
|
||||
senderDomain === invoiceDomain
|
||||
) {
|
||||
return {customer: c.id, contact:null}
|
||||
}
|
||||
}
|
||||
|
||||
return null
|
||||
}
|
||||
import { findCustomerOrContactByEmailOrDomain } from "../utils/helpers";
|
||||
import { eq } from "drizzle-orm";
|
||||
import { helpdesk_channel_instances } from "../../db/schema";
|
||||
|
||||
const helpdeskInboundRoutes: FastifyPluginAsync = async (server) => {
|
||||
// Öffentliche POST-Route
|
||||
@@ -85,17 +24,18 @@ const helpdeskInboundRoutes: FastifyPluginAsync = async (server) => {
|
||||
}
|
||||
|
||||
// 1️⃣ Kanalinstanz anhand des Tokens ermitteln
|
||||
const { data: channel, error: channelError } = await server.supabase
|
||||
.from('helpdesk_channel_instances')
|
||||
.select('*')
|
||||
.eq('public_token', public_token)
|
||||
.single()
|
||||
const channels = await server.db
|
||||
.select()
|
||||
.from(helpdesk_channel_instances)
|
||||
.where(eq(helpdesk_channel_instances.publicToken, public_token))
|
||||
.limit(1)
|
||||
const channel = channels[0]
|
||||
|
||||
if (channelError || !channel) {
|
||||
if (!channel) {
|
||||
return res.status(404).send({ error: 'Invalid channel token' })
|
||||
}
|
||||
|
||||
const tenant_id = channel.tenant_id
|
||||
const tenant_id = channel.tenantId
|
||||
const channel_instance_id = channel.id
|
||||
|
||||
// @ts-ignore
|
||||
|
||||
@@ -5,6 +5,13 @@ import { addMessage, getMessages } from '../modules/helpdesk/helpdesk.message.se
|
||||
import { getOrCreateContact } from '../modules/helpdesk/helpdesk.contact.service.js'
|
||||
import {decrypt, encrypt} from "../utils/crypt";
|
||||
import nodemailer from "nodemailer"
|
||||
import { eq } from "drizzle-orm";
|
||||
import {
|
||||
helpdesk_channel_instances,
|
||||
helpdesk_contacts,
|
||||
helpdesk_conversations,
|
||||
helpdesk_messages,
|
||||
} from "../../db/schema";
|
||||
|
||||
const helpdeskRoutes: FastifyPluginAsync = async (server) => {
|
||||
// 📩 1. Liste aller Konversationen
|
||||
@@ -58,15 +65,30 @@ const helpdeskRoutes: FastifyPluginAsync = async (server) => {
|
||||
const tenant_id = req.user?.tenant_id
|
||||
const {id: conversation_id} = req.params as {id: string}
|
||||
|
||||
const { data, error } = await server.supabase
|
||||
.from('helpdesk_conversations')
|
||||
.select('*, helpdesk_contacts(*)')
|
||||
.eq('tenant_id', tenant_id)
|
||||
.eq('id', conversation_id)
|
||||
.single()
|
||||
const rows = await server.db
|
||||
.select({
|
||||
conversation: helpdesk_conversations,
|
||||
contact: helpdesk_contacts
|
||||
})
|
||||
.from(helpdesk_conversations)
|
||||
.leftJoin(helpdesk_contacts, eq(helpdesk_contacts.id, helpdesk_conversations.contactId))
|
||||
.where(eq(helpdesk_conversations.id, conversation_id))
|
||||
|
||||
if (error) return res.status(404).send({ error: 'Conversation not found' })
|
||||
return res.send(data)
|
||||
const data = rows[0]
|
||||
if (!data || data.conversation.tenantId !== tenant_id) return res.status(404).send({ error: 'Conversation not found' })
|
||||
|
||||
return res.send({
|
||||
...data.conversation,
|
||||
channel_instance_id: data.conversation.channelInstanceId,
|
||||
contact_id: data.conversation.contactId,
|
||||
contact_person_id: data.conversation.contactPersonId,
|
||||
created_at: data.conversation.createdAt,
|
||||
customer_id: data.conversation.customerId,
|
||||
last_message_at: data.conversation.lastMessageAt,
|
||||
tenant_id: data.conversation.tenantId,
|
||||
ticket_number: data.conversation.ticketNumber,
|
||||
helpdesk_contacts: data.contact,
|
||||
})
|
||||
})
|
||||
|
||||
// 🔄 4. Konversation Status ändern
|
||||
@@ -181,36 +203,39 @@ const helpdeskRoutes: FastifyPluginAsync = async (server) => {
|
||||
safeConfig.smtp.pass = encrypt(safeConfig.smtp.pass)
|
||||
}
|
||||
|
||||
// Speichern in Supabase
|
||||
const { data, error } = await server.supabase
|
||||
.from("helpdesk_channel_instances")
|
||||
.insert({
|
||||
tenant_id,
|
||||
type_id,
|
||||
const inserted = await server.db
|
||||
.insert(helpdesk_channel_instances)
|
||||
.values({
|
||||
tenantId: tenant_id,
|
||||
typeId: type_id,
|
||||
name,
|
||||
config: safeConfig,
|
||||
is_active,
|
||||
isActive: is_active,
|
||||
})
|
||||
.select()
|
||||
.single()
|
||||
.returning()
|
||||
|
||||
if (error) throw error
|
||||
const data = inserted[0]
|
||||
if (!data) throw new Error("Konnte Channel nicht erstellen")
|
||||
const responseConfig: any = data.config
|
||||
|
||||
// sensible Felder aus Response entfernen
|
||||
if (data.config?.imap) {
|
||||
delete data.config.imap.host
|
||||
delete data.config.imap.user
|
||||
delete data.config.imap.pass
|
||||
if (responseConfig?.imap) {
|
||||
delete responseConfig.imap.host
|
||||
delete responseConfig.imap.user
|
||||
delete responseConfig.imap.pass
|
||||
}
|
||||
if (data.config?.smtp) {
|
||||
delete data.config.smtp.host
|
||||
delete data.config.smtp.user
|
||||
delete data.config.smtp.pass
|
||||
if (responseConfig?.smtp) {
|
||||
delete responseConfig.smtp.host
|
||||
delete responseConfig.smtp.user
|
||||
delete responseConfig.smtp.pass
|
||||
}
|
||||
|
||||
reply.send({
|
||||
message: "E-Mail-Channel erfolgreich erstellt",
|
||||
channel: data,
|
||||
channel: {
|
||||
...data,
|
||||
config: responseConfig
|
||||
},
|
||||
})
|
||||
} catch (err) {
|
||||
console.error("Fehler bei Channel-Erstellung:", err)
|
||||
@@ -234,29 +259,29 @@ const helpdeskRoutes: FastifyPluginAsync = async (server) => {
|
||||
const { text } = req.body as { text: string }
|
||||
|
||||
// 🔹 Konversation inkl. Channel + Kontakt laden
|
||||
const { data: conv, error: convErr } = await server.supabase
|
||||
.from("helpdesk_conversations")
|
||||
.select(`
|
||||
id,
|
||||
tenant_id,
|
||||
subject,
|
||||
channel_instance_id,
|
||||
helpdesk_contacts(email),
|
||||
helpdesk_channel_instances(config, name),
|
||||
ticket_number
|
||||
`)
|
||||
.eq("id", conversationId)
|
||||
.single()
|
||||
const rows = await server.db
|
||||
.select({
|
||||
conversation: helpdesk_conversations,
|
||||
contact: helpdesk_contacts,
|
||||
channel: helpdesk_channel_instances,
|
||||
})
|
||||
.from(helpdesk_conversations)
|
||||
.leftJoin(helpdesk_contacts, eq(helpdesk_contacts.id, helpdesk_conversations.contactId))
|
||||
.leftJoin(helpdesk_channel_instances, eq(helpdesk_channel_instances.id, helpdesk_conversations.channelInstanceId))
|
||||
.where(eq(helpdesk_conversations.id, conversationId))
|
||||
.limit(1)
|
||||
|
||||
const conv = rows[0]
|
||||
|
||||
console.log(conv)
|
||||
|
||||
if (convErr || !conv) {
|
||||
if (!conv) {
|
||||
reply.status(404).send({ error: "Konversation nicht gefunden" })
|
||||
return
|
||||
}
|
||||
|
||||
const contact = conv.helpdesk_contacts as unknown as {email: string}
|
||||
const channel = conv.helpdesk_channel_instances as unknown as {name: string}
|
||||
const contact = conv.contact as unknown as {email: string}
|
||||
const channel = conv.channel as unknown as {name: string, config: any}
|
||||
|
||||
console.log(contact)
|
||||
if (!contact?.email) {
|
||||
@@ -288,7 +313,7 @@ const helpdeskRoutes: FastifyPluginAsync = async (server) => {
|
||||
const mailOptions = {
|
||||
from: `"${channel?.name}" <${user}>`,
|
||||
to: contact.email,
|
||||
subject: `${conv.ticket_number} | ${conv.subject}` || `${conv.ticket_number} | Antwort vom FEDEO Helpdesk`,
|
||||
subject: `${conv.conversation.ticketNumber} | ${conv.conversation.subject}` || `${conv.conversation.ticketNumber} | Antwort vom FEDEO Helpdesk`,
|
||||
text,
|
||||
}
|
||||
|
||||
@@ -296,24 +321,22 @@ const helpdeskRoutes: FastifyPluginAsync = async (server) => {
|
||||
console.log(`[Helpdesk SMTP] Gesendet an ${contact.email}: ${info.messageId}`)
|
||||
|
||||
// 💾 Nachricht speichern
|
||||
const { error: insertErr } = await server.supabase
|
||||
.from("helpdesk_messages")
|
||||
.insert({
|
||||
tenant_id: conv.tenant_id,
|
||||
conversation_id: conversationId,
|
||||
await server.db
|
||||
.insert(helpdesk_messages)
|
||||
.values({
|
||||
tenantId: conv.conversation.tenantId,
|
||||
conversationId: conversationId,
|
||||
direction: "outgoing",
|
||||
payload: { type: "text", text },
|
||||
external_message_id: info.messageId,
|
||||
received_at: new Date().toISOString(),
|
||||
externalMessageId: info.messageId,
|
||||
receivedAt: new Date(),
|
||||
})
|
||||
|
||||
if (insertErr) throw insertErr
|
||||
|
||||
// 🔁 Konversation aktualisieren
|
||||
await server.supabase
|
||||
.from("helpdesk_conversations")
|
||||
.update({ last_message_at: new Date().toISOString() })
|
||||
.eq("id", conversationId)
|
||||
await server.db
|
||||
.update(helpdesk_conversations)
|
||||
.set({ lastMessageAt: new Date() })
|
||||
.where(eq(helpdesk_conversations.id, conversationId))
|
||||
|
||||
reply.send({
|
||||
message: "E-Mail erfolgreich gesendet",
|
||||
|
||||
@@ -1,12 +1,39 @@
|
||||
// src/routes/resources/history.ts
|
||||
import { FastifyInstance } from "fastify";
|
||||
import { and, asc, eq, inArray } from "drizzle-orm";
|
||||
import { authProfiles, historyitems } from "../../db/schema";
|
||||
|
||||
const columnMap: Record<string, string> = {
|
||||
const columnMap: Record<string, any> = {
|
||||
customers: historyitems.customer,
|
||||
members: historyitems.customer,
|
||||
vendors: historyitems.vendor,
|
||||
projects: historyitems.project,
|
||||
plants: historyitems.plant,
|
||||
contacts: historyitems.contact,
|
||||
tasks: historyitems.task,
|
||||
vehicles: historyitems.vehicle,
|
||||
events: historyitems.event,
|
||||
files: historyitems.file,
|
||||
products: historyitems.product,
|
||||
inventoryitems: historyitems.inventoryitem,
|
||||
inventoryitemgroups: historyitems.inventoryitemgroup,
|
||||
checks: historyitems.check,
|
||||
costcentres: historyitems.costcentre,
|
||||
ownaccounts: historyitems.ownaccount,
|
||||
documentboxes: historyitems.documentbox,
|
||||
hourrates: historyitems.hourrate,
|
||||
services: historyitems.service,
|
||||
customerspaces: historyitems.customerspace,
|
||||
customerinventoryitems: historyitems.customerinventoryitem,
|
||||
memberrelations: historyitems.memberrelation,
|
||||
};
|
||||
|
||||
const insertFieldMap: Record<string, string> = {
|
||||
customers: "customer",
|
||||
members: "customer",
|
||||
vendors: "vendor",
|
||||
projects: "project",
|
||||
plants: "plant",
|
||||
contracts: "contract",
|
||||
contacts: "contact",
|
||||
tasks: "task",
|
||||
vehicles: "vehicle",
|
||||
@@ -15,17 +42,61 @@ const columnMap: Record<string, string> = {
|
||||
products: "product",
|
||||
inventoryitems: "inventoryitem",
|
||||
inventoryitemgroups: "inventoryitemgroup",
|
||||
absencerequests: "absencerequest",
|
||||
checks: "check",
|
||||
costcentres: "costcentre",
|
||||
ownaccounts: "ownaccount",
|
||||
documentboxes: "documentbox",
|
||||
hourrates: "hourrate",
|
||||
services: "service",
|
||||
roles: "role",
|
||||
};
|
||||
customerspaces: "customerspace",
|
||||
customerinventoryitems: "customerinventoryitem",
|
||||
memberrelations: "memberrelation",
|
||||
}
|
||||
|
||||
const parseId = (value: string) => {
|
||||
if (/^\d+$/.test(value)) return Number(value)
|
||||
return value
|
||||
}
|
||||
|
||||
export default async function resourceHistoryRoutes(server: FastifyInstance) {
|
||||
server.get("/history", {
|
||||
schema: {
|
||||
tags: ["History"],
|
||||
summary: "Get all history entries for the active tenant",
|
||||
},
|
||||
}, async (req: any) => {
|
||||
const data = await server.db
|
||||
.select()
|
||||
.from(historyitems)
|
||||
.where(eq(historyitems.tenant, req.user?.tenant_id))
|
||||
.orderBy(asc(historyitems.createdAt));
|
||||
|
||||
const userIds = Array.from(
|
||||
new Set(data.map((item) => item.createdBy).filter(Boolean))
|
||||
) as string[];
|
||||
|
||||
const profiles = userIds.length > 0
|
||||
? await server.db
|
||||
.select()
|
||||
.from(authProfiles)
|
||||
.where(and(
|
||||
eq(authProfiles.tenant_id, req.user?.tenant_id),
|
||||
inArray(authProfiles.user_id, userIds)
|
||||
))
|
||||
: [];
|
||||
|
||||
const profileByUserId = new Map(
|
||||
profiles.map((profile) => [profile.user_id, profile])
|
||||
);
|
||||
|
||||
return data.map((historyitem) => ({
|
||||
...historyitem,
|
||||
created_at: historyitem.createdAt,
|
||||
created_by: historyitem.createdBy,
|
||||
created_by_profile: historyitem.createdBy ? profileByUserId.get(historyitem.createdBy) || null : null,
|
||||
}));
|
||||
});
|
||||
|
||||
server.get<{
|
||||
Params: { resource: string; id: string }
|
||||
}>("/resource/:resource/:id/history", {
|
||||
@@ -49,29 +120,36 @@ export default async function resourceHistoryRoutes(server: FastifyInstance) {
|
||||
return reply.code(400).send({ error: `History not supported for resource '${resource}'` });
|
||||
}
|
||||
|
||||
const { data, error } = await server.supabase
|
||||
.from("historyitems")
|
||||
.select("*")
|
||||
.eq(column, id)
|
||||
.order("created_at", { ascending: true });
|
||||
const data = await server.db
|
||||
.select()
|
||||
.from(historyitems)
|
||||
.where(eq(column, parseId(id)))
|
||||
.orderBy(asc(historyitems.createdAt));
|
||||
|
||||
if (error) {
|
||||
server.log.error(error);
|
||||
return reply.code(500).send({ error: "Failed to fetch history" });
|
||||
}
|
||||
const userIds = Array.from(
|
||||
new Set(data.map((item) => item.createdBy).filter(Boolean))
|
||||
) as string[]
|
||||
|
||||
const {data:users, error:usersError} = await server.supabase
|
||||
.from("auth_users")
|
||||
.select("*, auth_profiles(*), tenants!auth_tenant_users(*)")
|
||||
const profiles = userIds.length > 0
|
||||
? await server.db
|
||||
.select()
|
||||
.from(authProfiles)
|
||||
.where(and(
|
||||
eq(authProfiles.tenant_id, req.user?.tenant_id),
|
||||
inArray(authProfiles.user_id, userIds)
|
||||
))
|
||||
: []
|
||||
|
||||
const filteredUsers = (users ||[]).filter(i => i.tenants.find((t:any) => t.id === req.user?.tenant_id))
|
||||
const profileByUserId = new Map(
|
||||
profiles.map((profile) => [profile.user_id, profile])
|
||||
)
|
||||
|
||||
const dataCombined = data.map(historyitem => {
|
||||
return {
|
||||
const dataCombined = data.map((historyitem) => ({
|
||||
...historyitem,
|
||||
created_by_profile: filteredUsers.find(i => i.id === historyitem.created_by) ? filteredUsers.find(i => i.id === historyitem.created_by).auth_profiles[0] : null
|
||||
}
|
||||
})
|
||||
created_at: historyitem.createdAt,
|
||||
created_by: historyitem.createdBy,
|
||||
created_by_profile: historyitem.createdBy ? profileByUserId.get(historyitem.createdBy) || null : null,
|
||||
}))
|
||||
|
||||
|
||||
|
||||
@@ -128,29 +206,33 @@ export default async function resourceHistoryRoutes(server: FastifyInstance) {
|
||||
const userId = (req.user as any)?.user_id;
|
||||
|
||||
|
||||
const fkField = columnMap[resource];
|
||||
const fkField = insertFieldMap[resource];
|
||||
if (!fkField) {
|
||||
return reply.code(400).send({ error: `Unknown resource: ${resource}` });
|
||||
}
|
||||
|
||||
const { data, error } = await server.supabase
|
||||
.from("historyitems")
|
||||
.insert({
|
||||
const inserted = await server.db
|
||||
.insert(historyitems)
|
||||
.values({
|
||||
text,
|
||||
[fkField]: id,
|
||||
[fkField]: parseId(id),
|
||||
oldVal: old_val || null,
|
||||
newVal: new_val || null,
|
||||
config: config || null,
|
||||
tenant: (req.user as any)?.tenant_id,
|
||||
created_by: userId
|
||||
createdBy: userId
|
||||
})
|
||||
.select()
|
||||
.single();
|
||||
.returning()
|
||||
|
||||
if (error) {
|
||||
return reply.code(500).send({ error: error.message });
|
||||
const data = inserted[0]
|
||||
if (!data) {
|
||||
return reply.code(500).send({ error: "Failed to create history entry" });
|
||||
}
|
||||
|
||||
return reply.code(201).send(data);
|
||||
return reply.code(201).send({
|
||||
...data,
|
||||
created_at: data.createdAt,
|
||||
created_by: data.createdBy
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
63
backend/src/routes/internal/auth.m2m.ts
Normal file
63
backend/src/routes/internal/auth.m2m.ts
Normal file
@@ -0,0 +1,63 @@
|
||||
import { FastifyInstance } from "fastify"
|
||||
import jwt from "jsonwebtoken"
|
||||
import { and, eq } from "drizzle-orm"
|
||||
import { authTenantUsers } from "../../../db/schema"
|
||||
import { secrets } from "../../utils/secrets"
|
||||
|
||||
export default async function authM2mInternalRoutes(server: FastifyInstance) {
|
||||
server.post("/auth/m2m/token", {
|
||||
schema: {
|
||||
tags: ["Auth"],
|
||||
summary: "Exchange M2M API key for a short-lived JWT",
|
||||
body: {
|
||||
type: "object",
|
||||
properties: {
|
||||
expires_in_seconds: { type: "number" }
|
||||
}
|
||||
}
|
||||
}
|
||||
}, async (req, reply) => {
|
||||
try {
|
||||
if (!req.user?.user_id || !req.user?.tenant_id || !req.user?.email) {
|
||||
return reply.code(401).send({ error: "Unauthorized" })
|
||||
}
|
||||
|
||||
const membership = await server.db
|
||||
.select()
|
||||
.from(authTenantUsers)
|
||||
.where(and(
|
||||
eq(authTenantUsers.user_id, req.user.user_id),
|
||||
eq(authTenantUsers.tenant_id, Number(req.user.tenant_id))
|
||||
))
|
||||
.limit(1)
|
||||
|
||||
if (!membership[0]) {
|
||||
return reply.code(403).send({ error: "User is not assigned to tenant" })
|
||||
}
|
||||
|
||||
const requestedTtl = Number((req.body as any)?.expires_in_seconds ?? 900)
|
||||
const ttlSeconds = Math.min(3600, Math.max(60, requestedTtl))
|
||||
|
||||
const token = jwt.sign(
|
||||
{
|
||||
user_id: req.user.user_id,
|
||||
email: req.user.email,
|
||||
tenant_id: req.user.tenant_id,
|
||||
},
|
||||
secrets.JWT_SECRET!,
|
||||
{ expiresIn: ttlSeconds }
|
||||
)
|
||||
|
||||
return {
|
||||
token_type: "Bearer",
|
||||
access_token: token,
|
||||
expires_in_seconds: ttlSeconds,
|
||||
user_id: req.user.user_id,
|
||||
tenant_id: req.user.tenant_id
|
||||
}
|
||||
} catch (err) {
|
||||
console.error("POST /internal/auth/m2m/token ERROR:", err)
|
||||
return reply.code(500).send({ error: "Internal Server Error" })
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -1,21 +1,22 @@
|
||||
// routes/notifications.routes.ts
|
||||
import { FastifyInstance } from 'fastify';
|
||||
import { NotificationService, UserDirectory } from '../modules/notification.service';
|
||||
import { eq } from "drizzle-orm";
|
||||
import { authUsers } from "../../db/schema";
|
||||
|
||||
// Beispiel: E-Mail aus eigener User-Tabelle laden
|
||||
const getUserDirectory: UserDirectory = async (server:FastifyInstance, userId, tenantId) => {
|
||||
const { data, error } = await server.supabase
|
||||
.from('auth_users')
|
||||
.select('email')
|
||||
.eq('id', userId)
|
||||
.maybeSingle();
|
||||
if (error || !data) return null;
|
||||
const rows = await server.db
|
||||
.select({ email: authUsers.email })
|
||||
.from(authUsers)
|
||||
.where(eq(authUsers.id, userId))
|
||||
.limit(1)
|
||||
const data = rows[0]
|
||||
if (!data) return null;
|
||||
return { email: data.email };
|
||||
};
|
||||
|
||||
export default async function notificationsRoutes(server: FastifyInstance) {
|
||||
// wichtig: server.supabase ist über app verfügbar
|
||||
|
||||
const svc = new NotificationService(server, getUserDirectory);
|
||||
|
||||
server.post('/notifications/trigger', async (req, reply) => {
|
||||
|
||||
@@ -7,11 +7,16 @@ import {
|
||||
and,
|
||||
count,
|
||||
inArray,
|
||||
or
|
||||
or,
|
||||
sql,
|
||||
} from "drizzle-orm"
|
||||
|
||||
import { resourceConfig } from "../../utils/resource.config";
|
||||
import { useNextNumberRangeNumber } from "../../utils/functions";
|
||||
import { getHistoryEntityLabel, insertHistoryItem } from "../../utils/history";
|
||||
import { diffObjects } from "../../utils/diff";
|
||||
import { recalculateServicePricesForTenant } from "../../modules/service-price-recalculation.service";
|
||||
import { decrypt, encrypt } from "../../utils/crypt";
|
||||
|
||||
// -------------------------------------------------------------
|
||||
// SQL Suche auf mehreren Feldern (Haupttabelle + Relationen)
|
||||
@@ -19,15 +24,202 @@ import { useNextNumberRangeNumber } from "../../utils/functions";
|
||||
function buildSearchCondition(columns: any[], search: string) {
|
||||
if (!search || !columns.length) return null
|
||||
|
||||
const term = `%${search.toLowerCase()}%`
|
||||
const normalizeForSearch = (value: string) =>
|
||||
value
|
||||
.toLowerCase()
|
||||
.normalize("NFD")
|
||||
.replace(/[\u0300-\u036f]/g, "")
|
||||
.replace(/ß/g, "ss")
|
||||
|
||||
const conditions = columns
|
||||
const searchTermsRaw = search
|
||||
.trim()
|
||||
.toLowerCase()
|
||||
.split(/\s+/)
|
||||
.filter(Boolean)
|
||||
.map((col) => ilike(col, term))
|
||||
|
||||
if (conditions.length === 0) return null
|
||||
const searchTermsNormalized = searchTermsRaw.map(normalizeForSearch)
|
||||
|
||||
return or(...conditions)
|
||||
const normalizeSqlExpr = (valueExpr: any) => sql`
|
||||
lower(
|
||||
replace(
|
||||
replace(
|
||||
replace(
|
||||
replace(
|
||||
replace(
|
||||
replace(
|
||||
replace(cast(${valueExpr} as text), 'Ä', 'A'),
|
||||
'Ö', 'O'
|
||||
),
|
||||
'Ü', 'U'
|
||||
),
|
||||
'ä', 'a'
|
||||
),
|
||||
'ö', 'o'
|
||||
),
|
||||
'ü', 'u'
|
||||
),
|
||||
'ß', 'ss'
|
||||
)
|
||||
)
|
||||
`
|
||||
|
||||
const validColumns = columns.filter(Boolean)
|
||||
if (validColumns.length === 0) return null
|
||||
|
||||
// Alle Suchspalten zu einem String zusammenführen, damit Vor-/Nachname zuverlässig
|
||||
// gemeinsam durchsuchbar sind (auch wenn in getrennten Feldern gespeichert).
|
||||
const combinedRawExpr = sql`concat_ws(' ', ${sql.join(validColumns.map((col) => sql`coalesce(cast(${col} as text), '')`), sql`, `)})`
|
||||
const combinedNormalizedExpr = normalizeSqlExpr(combinedRawExpr)
|
||||
|
||||
const perTermConditions = searchTermsRaw.map((rawTerm, idx) => {
|
||||
const normalizedTerm = searchTermsNormalized[idx]
|
||||
const rawLike = `%${rawTerm}%`
|
||||
const normalizedLike = `%${normalizedTerm}%`
|
||||
|
||||
const rawCondition = ilike(combinedRawExpr, rawLike)
|
||||
const normalizedCondition = sql`${combinedNormalizedExpr} like ${normalizedLike}`
|
||||
|
||||
return or(rawCondition, normalizedCondition)
|
||||
})
|
||||
|
||||
if (perTermConditions.length === 0) return null
|
||||
return and(...perTermConditions)
|
||||
}
|
||||
|
||||
function formatDiffValue(value: any): string {
|
||||
if (value === null || value === undefined) return "-"
|
||||
if (typeof value === "boolean") return value ? "Ja" : "Nein"
|
||||
if (typeof value === "object") {
|
||||
try {
|
||||
return JSON.stringify(value)
|
||||
} catch {
|
||||
return "[Objekt]"
|
||||
}
|
||||
}
|
||||
return String(value)
|
||||
}
|
||||
|
||||
const TECHNICAL_HISTORY_KEYS = new Set([
|
||||
"id",
|
||||
"tenant",
|
||||
"tenant_id",
|
||||
"createdAt",
|
||||
"created_at",
|
||||
"createdBy",
|
||||
"created_by",
|
||||
"updatedAt",
|
||||
"updated_at",
|
||||
"updatedBy",
|
||||
"updated_by",
|
||||
"archived",
|
||||
])
|
||||
|
||||
function getUserVisibleChanges(oldRecord: Record<string, any>, updated: Record<string, any>) {
|
||||
return diffObjects(oldRecord, updated).filter((c) => !TECHNICAL_HISTORY_KEYS.has(c.key))
|
||||
}
|
||||
|
||||
function buildFieldUpdateHistoryText(resource: string, label: string, oldValue: any, newValue: any) {
|
||||
const resourceLabel = getHistoryEntityLabel(resource)
|
||||
return `${resourceLabel}: ${label} geändert von "${formatDiffValue(oldValue)}" zu "${formatDiffValue(newValue)}"`
|
||||
}
|
||||
|
||||
function applyResourceWhereFilters(resource: string, table: any, whereCond: any) {
|
||||
if (resource === "members") {
|
||||
return and(whereCond, eq(table.type, "Mitglied"))
|
||||
}
|
||||
return whereCond
|
||||
}
|
||||
|
||||
function isDateLikeField(key: string) {
|
||||
if (key === "deliveryDateType") return false
|
||||
if (key.includes("_at") || key.endsWith("At")) return true
|
||||
if (/Date$/.test(key)) return true
|
||||
return /(^|_|-)date($|_|-)/i.test(key)
|
||||
}
|
||||
|
||||
function normalizeMemberPayload(payload: Record<string, any>) {
|
||||
const infoData = payload.infoData && typeof payload.infoData === "object" ? payload.infoData : {}
|
||||
const normalized = {
|
||||
...payload,
|
||||
type: "Mitglied",
|
||||
isCompany: false,
|
||||
infoData,
|
||||
}
|
||||
|
||||
return normalized
|
||||
}
|
||||
|
||||
function validateMemberPayload(payload: Record<string, any>) {
|
||||
const infoData = payload.infoData && typeof payload.infoData === "object" ? payload.infoData : {}
|
||||
const bankAccountIds = Array.isArray(infoData.bankAccountIds) ? infoData.bankAccountIds.filter(Boolean) : []
|
||||
const firstname = typeof payload.firstname === "string" ? payload.firstname.trim() : ""
|
||||
const lastname = typeof payload.lastname === "string" ? payload.lastname.trim() : ""
|
||||
|
||||
if (!firstname || !lastname) {
|
||||
return "Für Mitglieder sind Vorname und Nachname erforderlich."
|
||||
}
|
||||
|
||||
if (!bankAccountIds.length) {
|
||||
return "Für Mitglieder muss mindestens ein Bankkonto hinterlegt werden."
|
||||
}
|
||||
|
||||
if (infoData.hasSEPA && !infoData.sepaSignedAt) {
|
||||
return "Wenn ein SEPA-Mandat hinterlegt ist, muss ein Unterschriftsdatum gesetzt werden."
|
||||
}
|
||||
|
||||
return null
|
||||
}
|
||||
|
||||
function maskIban(iban: string) {
|
||||
if (!iban) return ""
|
||||
const cleaned = iban.replace(/\s+/g, "")
|
||||
if (cleaned.length <= 8) return cleaned
|
||||
return `${cleaned.slice(0, 4)} **** **** ${cleaned.slice(-4)}`
|
||||
}
|
||||
|
||||
function decryptEntityBankAccount(row: Record<string, any>) {
|
||||
const iban = row.ibanEncrypted ? decrypt(row.ibanEncrypted as any) : null
|
||||
const bic = row.bicEncrypted ? decrypt(row.bicEncrypted as any) : null
|
||||
const bankName = row.bankNameEncrypted ? decrypt(row.bankNameEncrypted as any) : null
|
||||
|
||||
return {
|
||||
...row,
|
||||
iban,
|
||||
bic,
|
||||
bankName,
|
||||
displayLabel: `${maskIban(iban || "")}${bankName ? ` | ${bankName}` : ""}${row.description ? ` (${row.description})` : ""}`.trim(),
|
||||
}
|
||||
}
|
||||
|
||||
function prepareEntityBankAccountPayload(payload: Record<string, any>, requireAll: boolean) {
|
||||
const iban = typeof payload.iban === "string" ? payload.iban.trim() : ""
|
||||
const bic = typeof payload.bic === "string" ? payload.bic.trim() : ""
|
||||
const bankName = typeof payload.bankName === "string" ? payload.bankName.trim() : ""
|
||||
|
||||
const hasAnyPlainField = Object.prototype.hasOwnProperty.call(payload, "iban")
|
||||
|| Object.prototype.hasOwnProperty.call(payload, "bic")
|
||||
|| Object.prototype.hasOwnProperty.call(payload, "bankName")
|
||||
|
||||
if (!hasAnyPlainField && !requireAll) {
|
||||
return { data: payload }
|
||||
}
|
||||
|
||||
if (!iban || !bic || !bankName) {
|
||||
return { error: "IBAN, BIC und Bankinstitut sind Pflichtfelder." }
|
||||
}
|
||||
|
||||
const result: Record<string, any> = {
|
||||
...payload,
|
||||
ibanEncrypted: encrypt(iban),
|
||||
bicEncrypted: encrypt(bic),
|
||||
bankNameEncrypted: encrypt(bankName),
|
||||
}
|
||||
|
||||
delete result.iban
|
||||
delete result.bic
|
||||
delete result.bankName
|
||||
|
||||
return { data: result }
|
||||
}
|
||||
|
||||
export default async function resourceRoutes(server: FastifyInstance) {
|
||||
@@ -52,6 +244,7 @@ export default async function resourceRoutes(server: FastifyInstance) {
|
||||
const table = config.table
|
||||
|
||||
let whereCond: any = eq(table.tenant, tenantId)
|
||||
whereCond = applyResourceWhereFilters(resource, table, whereCond)
|
||||
let q = server.db.select().from(table).$dynamic()
|
||||
|
||||
const searchCols: any[] = (config.searchColumns || []).map(c => table[c])
|
||||
@@ -121,7 +314,7 @@ export default async function resourceRoutes(server: FastifyInstance) {
|
||||
if(config.mtmListLoad) {
|
||||
for await (const relation of config.mtmListLoad) {
|
||||
const relTable = resourceConfig[relation].table
|
||||
const parentKey = resource.substring(0, resource.length - 1)
|
||||
const parentKey = config.relationKey || resource.substring(0, resource.length - 1)
|
||||
const relationRows = await server.db.select().from(relTable).where(inArray(relTable[parentKey], data.map(i => i.id)))
|
||||
data = data.map(row => ({
|
||||
...row,
|
||||
@@ -130,6 +323,10 @@ export default async function resourceRoutes(server: FastifyInstance) {
|
||||
}
|
||||
}
|
||||
|
||||
if (resource === "entitybankaccounts") {
|
||||
return data.map((row) => decryptEntityBankAccount(row))
|
||||
}
|
||||
|
||||
return data
|
||||
|
||||
} catch (err) {
|
||||
@@ -155,7 +352,10 @@ export default async function resourceRoutes(server: FastifyInstance) {
|
||||
const { search, distinctColumns } = req.query as { search?: string; distinctColumns?: string; };
|
||||
|
||||
let whereCond: any = eq(table.tenant, tenantId);
|
||||
whereCond = applyResourceWhereFilters(resource, table, whereCond)
|
||||
const searchCols: any[] = (config.searchColumns || []).map(c => table[c]);
|
||||
const debugSearchColumnNames: string[] = [...(config.searchColumns || [])];
|
||||
const parsedFilters: Array<{ key: string; value: any }> = []
|
||||
|
||||
let countQuery = server.db.select({ value: count(table.id) }).from(table).$dynamic();
|
||||
let mainQuery = server.db.select().from(table).$dynamic();
|
||||
@@ -173,7 +373,10 @@ export default async function resourceRoutes(server: FastifyInstance) {
|
||||
mainQuery = mainQuery.leftJoin(relTable, eq(table[rel], relTable.id));
|
||||
if (relConfig.searchColumns) {
|
||||
relConfig.searchColumns.forEach(c => {
|
||||
if (relTable[c]) searchCols.push(relTable[c]);
|
||||
if (relTable[c]) {
|
||||
searchCols.push(relTable[c]);
|
||||
debugSearchColumnNames.push(`${rel}.${c}`);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -182,6 +385,23 @@ export default async function resourceRoutes(server: FastifyInstance) {
|
||||
}
|
||||
|
||||
if (search) {
|
||||
if (resource === "customers") {
|
||||
const rawSearch = search.trim()
|
||||
const terms = rawSearch.toLowerCase().split(/\s+/).filter(Boolean)
|
||||
const normalizedTerms = terms
|
||||
.map((t) => t.normalize("NFD").replace(/[\u0300-\u036f]/g, "").replace(/ß/g, "ss"))
|
||||
|
||||
server.log.info({
|
||||
tag: "customer-search-debug",
|
||||
search: rawSearch,
|
||||
terms,
|
||||
normalizedTerms,
|
||||
searchColumns: debugSearchColumnNames,
|
||||
page: pagination?.page ?? 1,
|
||||
limit: pagination?.limit ?? 100,
|
||||
}, "Paginated customer search request")
|
||||
}
|
||||
|
||||
const searchCond = buildSearchCondition(searchCols, search.trim());
|
||||
if (searchCond) whereCond = and(whereCond, searchCond);
|
||||
}
|
||||
@@ -190,6 +410,7 @@ export default async function resourceRoutes(server: FastifyInstance) {
|
||||
for (const [key, val] of Object.entries(filters)) {
|
||||
const col = (table as any)[key];
|
||||
if (!col) continue;
|
||||
parsedFilters.push({ key, value: val })
|
||||
whereCond = Array.isArray(val) ? and(whereCond, inArray(col, val)) : and(whereCond, eq(col, val as any));
|
||||
}
|
||||
}
|
||||
@@ -219,7 +440,35 @@ export default async function resourceRoutes(server: FastifyInstance) {
|
||||
for (const colName of distinctColumns.split(",").map(c => c.trim())) {
|
||||
const col = (table as any)[colName];
|
||||
if (!col) continue;
|
||||
const dRows = await server.db.select({ v: col }).from(table).where(eq(table.tenant, tenantId));
|
||||
let distinctQuery = server.db.select({ v: col }).from(table).$dynamic();
|
||||
if (config.mtoLoad) {
|
||||
config.mtoLoad.forEach(rel => {
|
||||
const relConfig = resourceConfig[rel + "s"] || resourceConfig[rel];
|
||||
if (!relConfig) return;
|
||||
const relTable = relConfig.table;
|
||||
if (relTable !== table) {
|
||||
distinctQuery = distinctQuery.leftJoin(relTable, eq(table[rel], relTable.id));
|
||||
}
|
||||
});
|
||||
}
|
||||
let distinctWhereCond: any = eq(table.tenant, tenantId)
|
||||
distinctWhereCond = applyResourceWhereFilters(resource, table, distinctWhereCond)
|
||||
|
||||
if (search) {
|
||||
const searchCond = buildSearchCondition(searchCols, search.trim())
|
||||
if (searchCond) distinctWhereCond = and(distinctWhereCond, searchCond)
|
||||
}
|
||||
|
||||
for (const f of parsedFilters) {
|
||||
if (f.key === colName) continue
|
||||
const filterCol = (table as any)[f.key]
|
||||
if (!filterCol) continue
|
||||
distinctWhereCond = Array.isArray(f.value)
|
||||
? and(distinctWhereCond, inArray(filterCol, f.value))
|
||||
: and(distinctWhereCond, eq(filterCol, f.value as any))
|
||||
}
|
||||
|
||||
const dRows = await distinctQuery.where(distinctWhereCond);
|
||||
distinctValues[colName] = [...new Set(dRows.map(r => r.v).filter(v => v != null && v !== ""))].sort();
|
||||
}
|
||||
}
|
||||
@@ -250,7 +499,7 @@ export default async function resourceRoutes(server: FastifyInstance) {
|
||||
if (config.mtmListLoad) {
|
||||
for await (const relation of config.mtmListLoad) {
|
||||
const relTable = resourceConfig[relation].table;
|
||||
const parentKey = resource.substring(0, resource.length - 1);
|
||||
const parentKey = config.relationKey || resource.substring(0, resource.length - 1);
|
||||
const relationRows = await server.db.select().from(relTable).where(inArray(relTable[parentKey], data.map(i => i.id)));
|
||||
data = data.map(row => ({
|
||||
...row,
|
||||
@@ -259,6 +508,10 @@ export default async function resourceRoutes(server: FastifyInstance) {
|
||||
}
|
||||
}
|
||||
|
||||
if (resource === "entitybankaccounts") {
|
||||
data = data.map((row) => decryptEntityBankAccount(row))
|
||||
}
|
||||
|
||||
return {
|
||||
data,
|
||||
queryConfig: { ...queryConfig, total, totalPages: Math.ceil(total / limit), distinctValues }
|
||||
@@ -282,10 +535,13 @@ export default async function resourceRoutes(server: FastifyInstance) {
|
||||
const { resource, no_relations } = req.params as { resource: string, no_relations?: boolean }
|
||||
const table = resourceConfig[resource].table
|
||||
|
||||
let whereCond: any = and(eq(table.id, id), eq(table.tenant, tenantId))
|
||||
whereCond = applyResourceWhereFilters(resource, table, whereCond)
|
||||
|
||||
const projRows = await server.db
|
||||
.select()
|
||||
.from(table)
|
||||
.where(and(eq(table.id, id), eq(table.tenant, tenantId)))
|
||||
.where(whereCond)
|
||||
.limit(1)
|
||||
|
||||
if (!projRows.length)
|
||||
@@ -308,12 +564,16 @@ export default async function resourceRoutes(server: FastifyInstance) {
|
||||
if (resourceConfig[resource].mtmLoad) {
|
||||
for await (const relation of resourceConfig[resource].mtmLoad) {
|
||||
const relTable = resourceConfig[relation].table
|
||||
const parentKey = resource.substring(0, resource.length - 1)
|
||||
const parentKey = resourceConfig[resource].relationKey || resource.substring(0, resource.length - 1)
|
||||
data[relation] = await server.db.select().from(relTable).where(eq(relTable[parentKey], id))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (resource === "entitybankaccounts") {
|
||||
return decryptEntityBankAccount(data)
|
||||
}
|
||||
|
||||
return data
|
||||
} catch (err) {
|
||||
console.error("ERROR /resource/:resource/:id", err)
|
||||
@@ -326,14 +586,32 @@ export default async function resourceRoutes(server: FastifyInstance) {
|
||||
try {
|
||||
if (!req.user?.tenant_id) return reply.code(400).send({ error: "No tenant selected" });
|
||||
const { resource } = req.params as { resource: string };
|
||||
if (resource === "accounts") {
|
||||
return reply.code(403).send({ error: "Accounts are read-only" })
|
||||
}
|
||||
const body = req.body as Record<string, any>;
|
||||
const config = resourceConfig[resource];
|
||||
const table = config.table;
|
||||
|
||||
let createData = { ...body, tenant: req.user.tenant_id, archived: false };
|
||||
let createData: Record<string, any> = { ...body, tenant: req.user.tenant_id, archived: false };
|
||||
|
||||
if (resource === "members") {
|
||||
createData = normalizeMemberPayload(createData)
|
||||
const validationError = validateMemberPayload(createData)
|
||||
if (validationError) {
|
||||
return reply.code(400).send({ error: validationError })
|
||||
}
|
||||
}
|
||||
|
||||
if (resource === "entitybankaccounts") {
|
||||
const prepared = prepareEntityBankAccountPayload(createData, true)
|
||||
if (prepared.error) return reply.code(400).send({ error: prepared.error })
|
||||
createData = prepared.data!
|
||||
}
|
||||
|
||||
if (config.numberRangeHolder && !body[config.numberRangeHolder]) {
|
||||
const result = await useNextNumberRangeNumber(server, req.user.tenant_id, resource)
|
||||
const numberRangeResource = resource === "members" ? "customers" : resource
|
||||
const result = await useNextNumberRangeNumber(server, req.user.tenant_id, numberRangeResource)
|
||||
createData[config.numberRangeHolder] = result.usedNumber
|
||||
}
|
||||
|
||||
@@ -343,6 +621,33 @@ export default async function resourceRoutes(server: FastifyInstance) {
|
||||
})
|
||||
|
||||
const [created] = await server.db.insert(table).values(createData).returning()
|
||||
|
||||
if (["products", "services", "hourrates"].includes(resource)) {
|
||||
await recalculateServicePricesForTenant(server, req.user.tenant_id, req.user?.user_id || null);
|
||||
}
|
||||
|
||||
if (created) {
|
||||
try {
|
||||
const resourceLabel = getHistoryEntityLabel(resource)
|
||||
await insertHistoryItem(server, {
|
||||
tenant_id: req.user.tenant_id,
|
||||
created_by: req.user?.user_id || null,
|
||||
entity: resource,
|
||||
entityId: created.id,
|
||||
action: "created",
|
||||
oldVal: null,
|
||||
newVal: created,
|
||||
text: `Neuer Eintrag in ${resourceLabel} erstellt`,
|
||||
})
|
||||
} catch (historyError) {
|
||||
server.log.warn({ err: historyError, resource }, "Failed to write create history entry")
|
||||
}
|
||||
}
|
||||
|
||||
if (resource === "entitybankaccounts") {
|
||||
return decryptEntityBankAccount(created as Record<string, any>)
|
||||
}
|
||||
|
||||
return created;
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
@@ -354,6 +659,9 @@ export default async function resourceRoutes(server: FastifyInstance) {
|
||||
server.put("/resource/:resource/:id", async (req, reply) => {
|
||||
try {
|
||||
const { resource, id } = req.params as { resource: string; id: string }
|
||||
if (resource === "accounts") {
|
||||
return reply.code(403).send({ error: "Accounts are read-only" })
|
||||
}
|
||||
const body = req.body as Record<string, any>
|
||||
const tenantId = req.user?.tenant_id
|
||||
const userId = req.user?.user_id
|
||||
@@ -363,17 +671,93 @@ export default async function resourceRoutes(server: FastifyInstance) {
|
||||
const table = resourceConfig[resource].table
|
||||
const normalizeDate = (val: any) => { const d = new Date(val); return isNaN(d.getTime()) ? null : d; }
|
||||
|
||||
let data = { ...body, updated_at: new Date().toISOString(), updated_by: userId }
|
||||
const [oldRecord] = await server.db
|
||||
.select()
|
||||
.from(table)
|
||||
.where(and(eq(table.id, id), eq(table.tenant, tenantId)))
|
||||
.limit(1)
|
||||
|
||||
let data: Record<string, any> = { ...body, updated_at: new Date().toISOString(), updated_by: userId }
|
||||
//@ts-ignore
|
||||
delete data.updatedBy; delete data.updatedAt;
|
||||
|
||||
if (resource === "members") {
|
||||
data = normalizeMemberPayload(data)
|
||||
const validationError = validateMemberPayload(data)
|
||||
if (validationError) {
|
||||
return reply.code(400).send({ error: validationError })
|
||||
}
|
||||
}
|
||||
|
||||
if (resource === "entitybankaccounts") {
|
||||
const prepared = prepareEntityBankAccountPayload(data, false)
|
||||
if (prepared.error) return reply.code(400).send({ error: prepared.error })
|
||||
data = {
|
||||
...prepared.data,
|
||||
updated_at: data.updated_at,
|
||||
updated_by: data.updated_by,
|
||||
}
|
||||
}
|
||||
|
||||
Object.keys(data).forEach((key) => {
|
||||
if ((key.includes("_at") || key.includes("At") || key.toLowerCase().includes("date")) && key !== "deliveryDateType") {
|
||||
data[key] = normalizeDate(data[key])
|
||||
const value = data[key]
|
||||
const shouldNormalize =
|
||||
isDateLikeField(key) &&
|
||||
value !== null &&
|
||||
value !== undefined &&
|
||||
(typeof value === "string" || typeof value === "number" || value instanceof Date)
|
||||
|
||||
if (shouldNormalize) {
|
||||
data[key] = normalizeDate(value)
|
||||
}
|
||||
})
|
||||
|
||||
const [updated] = await server.db.update(table).set(data).where(and(eq(table.id, id), eq(table.tenant, tenantId))).returning()
|
||||
let updateWhereCond: any = and(eq(table.id, id), eq(table.tenant, tenantId))
|
||||
updateWhereCond = applyResourceWhereFilters(resource, table, updateWhereCond)
|
||||
const [updated] = await server.db.update(table).set(data).where(updateWhereCond).returning()
|
||||
|
||||
if (["products", "services", "hourrates"].includes(resource)) {
|
||||
await recalculateServicePricesForTenant(server, tenantId, userId);
|
||||
}
|
||||
|
||||
if (updated) {
|
||||
try {
|
||||
const resourceLabel = getHistoryEntityLabel(resource)
|
||||
const changes = oldRecord ? getUserVisibleChanges(oldRecord, updated) : []
|
||||
if (!changes.length) {
|
||||
await insertHistoryItem(server, {
|
||||
tenant_id: tenantId,
|
||||
created_by: userId,
|
||||
entity: resource,
|
||||
entityId: updated.id,
|
||||
action: "updated",
|
||||
oldVal: oldRecord || null,
|
||||
newVal: updated,
|
||||
text: `Eintrag in ${resourceLabel} geändert`,
|
||||
})
|
||||
} else {
|
||||
for (const change of changes) {
|
||||
await insertHistoryItem(server, {
|
||||
tenant_id: tenantId,
|
||||
created_by: userId,
|
||||
entity: resource,
|
||||
entityId: updated.id,
|
||||
action: "updated",
|
||||
oldVal: change.oldValue,
|
||||
newVal: change.newValue,
|
||||
text: buildFieldUpdateHistoryText(resource, change.label, change.oldValue, change.newValue),
|
||||
})
|
||||
}
|
||||
}
|
||||
} catch (historyError) {
|
||||
server.log.warn({ err: historyError, resource, id }, "Failed to write update history entry")
|
||||
}
|
||||
}
|
||||
|
||||
if (resource === "entitybankaccounts") {
|
||||
return decryptEntityBankAccount(updated as Record<string, any>)
|
||||
}
|
||||
|
||||
return updated
|
||||
} catch (err) {
|
||||
console.error(err)
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
import { FastifyInstance } from "fastify"
|
||||
import { asc, desc } from "drizzle-orm"
|
||||
import { asc, desc, eq } from "drizzle-orm"
|
||||
import { sortData } from "../utils/sort"
|
||||
|
||||
// Schema imports
|
||||
import { accounts, units,countrys } from "../../db/schema"
|
||||
import { accounts, units, countrys, tenants } from "../../db/schema"
|
||||
|
||||
const TABLE_MAP: Record<string, any> = {
|
||||
accounts,
|
||||
@@ -35,11 +35,49 @@ export default async function resourceRoutesSpecial(server: FastifyInstance) {
|
||||
}
|
||||
|
||||
// ---------------------------------------
|
||||
// 📌 SELECT: wir ignorieren select string (wie Supabase)
|
||||
// 📌 SELECT: select-string wird in dieser Route bewusst ignoriert
|
||||
// Drizzle kann kein dynamisches Select aus String!
|
||||
// Wir geben IMMER alle Spalten zurück → kompatibel zum Frontend
|
||||
// ---------------------------------------
|
||||
|
||||
if (resource === "accounts") {
|
||||
const [tenant] = await server.db
|
||||
.select({
|
||||
accountChart: tenants.accountChart,
|
||||
})
|
||||
.from(tenants)
|
||||
.where(eq(tenants.id, Number(req.user.tenant_id)))
|
||||
.limit(1)
|
||||
|
||||
const activeAccountChart = tenant?.accountChart || "skr03"
|
||||
let data
|
||||
if (sort && (accounts as any)[sort]) {
|
||||
const col = (accounts as any)[sort]
|
||||
data = ascQuery === "true"
|
||||
? await server.db
|
||||
.select()
|
||||
.from(accounts)
|
||||
.where(eq(accounts.accountChart, activeAccountChart))
|
||||
.orderBy(asc(col))
|
||||
: await server.db
|
||||
.select()
|
||||
.from(accounts)
|
||||
.where(eq(accounts.accountChart, activeAccountChart))
|
||||
.orderBy(desc(col))
|
||||
} else {
|
||||
data = await server.db
|
||||
.select()
|
||||
.from(accounts)
|
||||
.where(eq(accounts.accountChart, activeAccountChart))
|
||||
}
|
||||
|
||||
return sortData(
|
||||
data,
|
||||
sort as any,
|
||||
ascQuery === "true"
|
||||
)
|
||||
}
|
||||
|
||||
let query = server.db.select().from(table)
|
||||
|
||||
// ---------------------------------------
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
import { FastifyInstance } from 'fastify'
|
||||
import { StaffTimeEntryConnect } from '../../types/staff'
|
||||
import { asc, eq } from "drizzle-orm";
|
||||
import { stafftimenetryconnects } from "../../../db/schema";
|
||||
|
||||
export default async function staffTimeConnectRoutes(server: FastifyInstance) {
|
||||
|
||||
@@ -8,16 +10,21 @@ export default async function staffTimeConnectRoutes(server: FastifyInstance) {
|
||||
'/staff/time/:id/connects',
|
||||
async (req, reply) => {
|
||||
const { id } = req.params
|
||||
const { started_at, stopped_at, project_id, customer_id, task_id, ticket_id, notes } = req.body
|
||||
const { started_at, stopped_at, project_id, notes } = req.body
|
||||
const parsedProjectId = project_id ? Number(project_id) : null
|
||||
|
||||
const { data, error } = await server.supabase
|
||||
.from('staff_time_entry_connects')
|
||||
.insert([{ time_entry_id: id, started_at, stopped_at, project_id, customer_id, task_id, ticket_id, notes }])
|
||||
.select()
|
||||
.maybeSingle()
|
||||
const data = await server.db
|
||||
.insert(stafftimenetryconnects)
|
||||
.values({
|
||||
stafftimeentry: id,
|
||||
started_at: new Date(started_at),
|
||||
stopped_at: new Date(stopped_at),
|
||||
project_id: parsedProjectId,
|
||||
notes
|
||||
})
|
||||
.returning()
|
||||
|
||||
if (error) return reply.code(400).send({ error: error.message })
|
||||
return reply.send(data)
|
||||
return reply.send(data[0])
|
||||
}
|
||||
)
|
||||
|
||||
@@ -26,13 +33,12 @@ export default async function staffTimeConnectRoutes(server: FastifyInstance) {
|
||||
'/staff/time/:id/connects',
|
||||
async (req, reply) => {
|
||||
const { id } = req.params
|
||||
const { data, error } = await server.supabase
|
||||
.from('staff_time_entry_connects')
|
||||
.select('*')
|
||||
.eq('time_entry_id', id)
|
||||
.order('started_at', { ascending: true })
|
||||
const data = await server.db
|
||||
.select()
|
||||
.from(stafftimenetryconnects)
|
||||
.where(eq(stafftimenetryconnects.stafftimeentry, id))
|
||||
.orderBy(asc(stafftimenetryconnects.started_at))
|
||||
|
||||
if (error) return reply.code(400).send({ error: error.message })
|
||||
return reply.send(data)
|
||||
}
|
||||
)
|
||||
@@ -42,15 +48,20 @@ export default async function staffTimeConnectRoutes(server: FastifyInstance) {
|
||||
'/staff/time/connects/:connectId',
|
||||
async (req, reply) => {
|
||||
const { connectId } = req.params
|
||||
const { data, error } = await server.supabase
|
||||
.from('staff_time_entry_connects')
|
||||
.update({ ...req.body, updated_at: new Date().toISOString() })
|
||||
.eq('id', connectId)
|
||||
.select()
|
||||
.maybeSingle()
|
||||
const patchData = { ...req.body } as any
|
||||
if (patchData.started_at) patchData.started_at = new Date(patchData.started_at)
|
||||
if (patchData.stopped_at) patchData.stopped_at = new Date(patchData.stopped_at)
|
||||
if (patchData.project_id !== undefined) {
|
||||
patchData.project_id = patchData.project_id ? Number(patchData.project_id) : null
|
||||
}
|
||||
|
||||
if (error) return reply.code(400).send({ error: error.message })
|
||||
return reply.send(data)
|
||||
const data = await server.db
|
||||
.update(stafftimenetryconnects)
|
||||
.set({ ...patchData, updated_at: new Date() })
|
||||
.where(eq(stafftimenetryconnects.id, connectId))
|
||||
.returning()
|
||||
|
||||
return reply.send(data[0])
|
||||
}
|
||||
)
|
||||
|
||||
@@ -59,12 +70,10 @@ export default async function staffTimeConnectRoutes(server: FastifyInstance) {
|
||||
'/staff/time/connects/:connectId',
|
||||
async (req, reply) => {
|
||||
const { connectId } = req.params
|
||||
const { error } = await server.supabase
|
||||
.from('staff_time_entry_connects')
|
||||
.delete()
|
||||
.eq('id', connectId)
|
||||
await server.db
|
||||
.delete(stafftimenetryconnects)
|
||||
.where(eq(stafftimenetryconnects.id, connectId))
|
||||
|
||||
if (error) return reply.code(400).send({ error: error.message })
|
||||
return reply.send({ success: true })
|
||||
}
|
||||
)
|
||||
|
||||
@@ -1,18 +1,26 @@
|
||||
import { FastifyInstance } from "fastify"
|
||||
import jwt from "jsonwebtoken"
|
||||
import { secrets } from "../utils/secrets"
|
||||
import { createHash, randomBytes } from "node:crypto"
|
||||
|
||||
import {
|
||||
authTenantUsers,
|
||||
authUsers,
|
||||
authProfiles,
|
||||
tenants
|
||||
tenants,
|
||||
m2mApiKeys
|
||||
} from "../../db/schema"
|
||||
|
||||
import {and, eq, inArray} from "drizzle-orm"
|
||||
import {and, desc, eq, inArray} from "drizzle-orm"
|
||||
|
||||
|
||||
export default async function tenantRoutes(server: FastifyInstance) {
|
||||
const generateApiKey = () => {
|
||||
const raw = randomBytes(32).toString("base64url")
|
||||
return `fedeo_m2m_${raw}`
|
||||
}
|
||||
const hashApiKey = (apiKey: string) =>
|
||||
createHash("sha256").update(apiKey, "utf8").digest("hex")
|
||||
|
||||
|
||||
// -------------------------------------------------------------
|
||||
@@ -73,7 +81,7 @@ export default async function tenantRoutes(server: FastifyInstance) {
|
||||
httpOnly: true,
|
||||
sameSite: process.env.NODE_ENV === "production" ? "none" : "lax",
|
||||
secure: process.env.NODE_ENV === "production",
|
||||
maxAge: 60 * 60 * 3,
|
||||
maxAge: 60 * 60 * 6,
|
||||
})
|
||||
|
||||
return { token }
|
||||
@@ -241,4 +249,172 @@ export default async function tenantRoutes(server: FastifyInstance) {
|
||||
}
|
||||
})
|
||||
|
||||
// -------------------------------------------------------------
|
||||
// M2M API KEYS
|
||||
// -------------------------------------------------------------
|
||||
server.get("/tenant/api-keys", async (req, reply) => {
|
||||
try {
|
||||
const tenantId = req.user?.tenant_id
|
||||
if (!tenantId) return reply.code(401).send({ error: "Unauthorized" })
|
||||
|
||||
const keys = await server.db
|
||||
.select({
|
||||
id: m2mApiKeys.id,
|
||||
name: m2mApiKeys.name,
|
||||
tenant_id: m2mApiKeys.tenantId,
|
||||
user_id: m2mApiKeys.userId,
|
||||
active: m2mApiKeys.active,
|
||||
key_prefix: m2mApiKeys.keyPrefix,
|
||||
created_at: m2mApiKeys.createdAt,
|
||||
updated_at: m2mApiKeys.updatedAt,
|
||||
expires_at: m2mApiKeys.expiresAt,
|
||||
last_used_at: m2mApiKeys.lastUsedAt,
|
||||
})
|
||||
.from(m2mApiKeys)
|
||||
.where(eq(m2mApiKeys.tenantId, tenantId))
|
||||
.orderBy(desc(m2mApiKeys.createdAt))
|
||||
|
||||
return keys
|
||||
} catch (err) {
|
||||
console.error("/tenant/api-keys GET ERROR:", err)
|
||||
return reply.code(500).send({ error: "Internal Server Error" })
|
||||
}
|
||||
})
|
||||
|
||||
server.post("/tenant/api-keys", async (req, reply) => {
|
||||
try {
|
||||
const tenantId = req.user?.tenant_id
|
||||
const creatorUserId = req.user?.user_id
|
||||
if (!tenantId || !creatorUserId) {
|
||||
return reply.code(401).send({ error: "Unauthorized" })
|
||||
}
|
||||
|
||||
const { name, user_id, expires_at } = req.body as {
|
||||
name: string
|
||||
user_id: string
|
||||
expires_at?: string | null
|
||||
}
|
||||
|
||||
if (!name || !user_id) {
|
||||
return reply.code(400).send({ error: "name and user_id are required" })
|
||||
}
|
||||
|
||||
const userMembership = await server.db
|
||||
.select()
|
||||
.from(authTenantUsers)
|
||||
.where(and(
|
||||
eq(authTenantUsers.tenant_id, tenantId),
|
||||
eq(authTenantUsers.user_id, user_id)
|
||||
))
|
||||
.limit(1)
|
||||
|
||||
if (!userMembership[0]) {
|
||||
return reply.code(400).send({ error: "user_id is not assigned to this tenant" })
|
||||
}
|
||||
|
||||
const plainApiKey = generateApiKey()
|
||||
const keyPrefix = plainApiKey.slice(0, 16)
|
||||
const keyHash = hashApiKey(plainApiKey)
|
||||
|
||||
const inserted = await server.db
|
||||
.insert(m2mApiKeys)
|
||||
.values({
|
||||
tenantId,
|
||||
userId: user_id,
|
||||
createdBy: creatorUserId,
|
||||
name,
|
||||
keyPrefix,
|
||||
keyHash,
|
||||
expiresAt: expires_at ? new Date(expires_at) : null,
|
||||
})
|
||||
.returning({
|
||||
id: m2mApiKeys.id,
|
||||
name: m2mApiKeys.name,
|
||||
tenant_id: m2mApiKeys.tenantId,
|
||||
user_id: m2mApiKeys.userId,
|
||||
key_prefix: m2mApiKeys.keyPrefix,
|
||||
created_at: m2mApiKeys.createdAt,
|
||||
expires_at: m2mApiKeys.expiresAt,
|
||||
active: m2mApiKeys.active,
|
||||
})
|
||||
|
||||
return reply.code(201).send({
|
||||
...inserted[0],
|
||||
api_key: plainApiKey, // only returned once
|
||||
})
|
||||
} catch (err) {
|
||||
console.error("/tenant/api-keys POST ERROR:", err)
|
||||
return reply.code(500).send({ error: "Internal Server Error" })
|
||||
}
|
||||
})
|
||||
|
||||
server.patch("/tenant/api-keys/:id", async (req, reply) => {
|
||||
try {
|
||||
const tenantId = req.user?.tenant_id
|
||||
if (!tenantId) return reply.code(401).send({ error: "Unauthorized" })
|
||||
|
||||
const { id } = req.params as { id: string }
|
||||
const { name, active, expires_at } = req.body as {
|
||||
name?: string
|
||||
active?: boolean
|
||||
expires_at?: string | null
|
||||
}
|
||||
|
||||
const updateData: any = {
|
||||
updatedAt: new Date()
|
||||
}
|
||||
if (name !== undefined) updateData.name = name
|
||||
if (active !== undefined) updateData.active = active
|
||||
if (expires_at !== undefined) updateData.expiresAt = expires_at ? new Date(expires_at) : null
|
||||
|
||||
const updated = await server.db
|
||||
.update(m2mApiKeys)
|
||||
.set(updateData)
|
||||
.where(and(
|
||||
eq(m2mApiKeys.id, id),
|
||||
eq(m2mApiKeys.tenantId, tenantId)
|
||||
))
|
||||
.returning({
|
||||
id: m2mApiKeys.id,
|
||||
name: m2mApiKeys.name,
|
||||
tenant_id: m2mApiKeys.tenantId,
|
||||
user_id: m2mApiKeys.userId,
|
||||
active: m2mApiKeys.active,
|
||||
key_prefix: m2mApiKeys.keyPrefix,
|
||||
updated_at: m2mApiKeys.updatedAt,
|
||||
expires_at: m2mApiKeys.expiresAt,
|
||||
last_used_at: m2mApiKeys.lastUsedAt,
|
||||
})
|
||||
|
||||
if (!updated[0]) {
|
||||
return reply.code(404).send({ error: "API key not found" })
|
||||
}
|
||||
|
||||
return updated[0]
|
||||
} catch (err) {
|
||||
console.error("/tenant/api-keys PATCH ERROR:", err)
|
||||
return reply.code(500).send({ error: "Internal Server Error" })
|
||||
}
|
||||
})
|
||||
|
||||
server.delete("/tenant/api-keys/:id", async (req, reply) => {
|
||||
try {
|
||||
const tenantId = req.user?.tenant_id
|
||||
if (!tenantId) return reply.code(401).send({ error: "Unauthorized" })
|
||||
|
||||
const { id } = req.params as { id: string }
|
||||
await server.db
|
||||
.delete(m2mApiKeys)
|
||||
.where(and(
|
||||
eq(m2mApiKeys.id, id),
|
||||
eq(m2mApiKeys.tenantId, tenantId)
|
||||
))
|
||||
|
||||
return { success: true }
|
||||
} catch (err) {
|
||||
console.error("/tenant/api-keys DELETE ERROR:", err)
|
||||
return reply.code(500).send({ error: "Internal Server Error" })
|
||||
}
|
||||
})
|
||||
|
||||
}
|
||||
|
||||
@@ -10,6 +10,8 @@ import {
|
||||
plants,
|
||||
products,
|
||||
inventoryitems,
|
||||
customerinventoryitems,
|
||||
customerspaces,
|
||||
// NEU HINZUGEFÜGT (Basierend auf deinem DataStore)
|
||||
tasks,
|
||||
contacts,
|
||||
@@ -34,6 +36,8 @@ const ENTITY_CONFIG: Record<string, { table: any, labelField: any, rootLabel: st
|
||||
'plants': { table: plants, labelField: plants.name, rootLabel: 'Objekte', idField: 'id' },
|
||||
'products': { table: products, labelField: products.name, rootLabel: 'Artikel', idField: 'id' },
|
||||
'inventoryitems': { table: inventoryitems, labelField: inventoryitems.name, rootLabel: 'Inventarartikel', idField: 'id' },
|
||||
'customerinventoryitems': { table: customerinventoryitems, labelField: customerinventoryitems.name, rootLabel: 'Kundeninventar', idField: 'id' },
|
||||
'customerspaces': { table: customerspaces, labelField: customerspaces.name, rootLabel: 'Kundenlagerplätze', idField: 'id' },
|
||||
|
||||
// --- NEU BASIEREND AUF DATASTORE ---
|
||||
'tasks': { table: tasks, labelField: tasks.name, rootLabel: 'Aufgaben', idField: 'id' },
|
||||
|
||||
3512
backend/src/utils/deBankBics.ts
Normal file
3512
backend/src/utils/deBankBics.ts
Normal file
File diff suppressed because it is too large
Load Diff
3515
backend/src/utils/deBankCodes.ts
Normal file
3515
backend/src/utils/deBankCodes.ts
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,5 +1,5 @@
|
||||
|
||||
import {diffTranslations} from "./diffTranslations";
|
||||
import {diffTranslations, getDiffLabel} from "./diffTranslations";
|
||||
|
||||
export type DiffChange = {
|
||||
key: string;
|
||||
@@ -43,8 +43,6 @@ export function diffObjects(
|
||||
const oldVal = obj1?.[key];
|
||||
const newVal = obj2?.[key];
|
||||
|
||||
console.log(oldVal, key, newVal);
|
||||
|
||||
// Wenn beides null/undefined → ignorieren
|
||||
if (
|
||||
(oldVal === null || oldVal === undefined || oldVal === "" || JSON.stringify(oldVal) === "[]") &&
|
||||
@@ -72,12 +70,11 @@ export function diffObjects(
|
||||
if (type === "unchanged") continue;
|
||||
|
||||
const translation = diffTranslations[key];
|
||||
let label = key;
|
||||
let label = getDiffLabel(key);
|
||||
let resolvedOld = oldVal;
|
||||
let resolvedNew = newVal;
|
||||
|
||||
if (translation) {
|
||||
label = translation.label;
|
||||
if (translation.resolve) {
|
||||
const { oldVal: resOld, newVal: resNew } = translation.resolve(
|
||||
oldVal,
|
||||
|
||||
@@ -6,6 +6,149 @@ type ValueResolver = (
|
||||
ctx?: Record<string, any>
|
||||
) => { oldVal: any; newVal: any };
|
||||
|
||||
const TOKEN_TRANSLATIONS: Record<string, string> = {
|
||||
account: "Konto",
|
||||
active: "Aktiv",
|
||||
address: "Adresse",
|
||||
amount: "Betrag",
|
||||
archived: "Archiviert",
|
||||
article: "Artikel",
|
||||
bank: "Bank",
|
||||
barcode: "Barcode",
|
||||
birthday: "Geburtstag",
|
||||
category: "Kategorie",
|
||||
city: "Ort",
|
||||
color: "Farbe",
|
||||
comment: "Kommentar",
|
||||
company: "Firma",
|
||||
contact: "Kontakt",
|
||||
contract: "Vertrag",
|
||||
cost: "Kosten",
|
||||
country: "Land",
|
||||
created: "Erstellt",
|
||||
customer: "Kunde",
|
||||
date: "Datum",
|
||||
default: "Standard",
|
||||
deleted: "Gelöscht",
|
||||
delivery: "Lieferung",
|
||||
description: "Beschreibung",
|
||||
document: "Dokument",
|
||||
driver: "Fahrer",
|
||||
due: "Fällig",
|
||||
duration: "Dauer",
|
||||
email: "E-Mail",
|
||||
employee: "Mitarbeiter",
|
||||
enabled: "Aktiviert",
|
||||
end: "Ende",
|
||||
event: "Ereignis",
|
||||
file: "Datei",
|
||||
first: "Vorname",
|
||||
fixed: "Festgeschrieben",
|
||||
group: "Gruppe",
|
||||
hour: "Stunde",
|
||||
iban: "IBAN",
|
||||
id: "ID",
|
||||
incoming: "Eingang",
|
||||
invoice: "Rechnung",
|
||||
item: "Eintrag",
|
||||
language: "Sprache",
|
||||
last: "Nachname",
|
||||
license: "Kennzeichen",
|
||||
link: "Link",
|
||||
list: "Liste",
|
||||
location: "Standort",
|
||||
manufacturer: "Hersteller",
|
||||
markup: "Verkaufsaufschlag",
|
||||
message: "Nachricht",
|
||||
mobile: "Mobil",
|
||||
name: "Name",
|
||||
note: "Notiz",
|
||||
notes: "Notizen",
|
||||
number: "Nummer",
|
||||
order: "Bestellung",
|
||||
own: "Eigen",
|
||||
payment: "Zahlung",
|
||||
phone: "Telefon",
|
||||
plant: "Objekt",
|
||||
postal: "Post",
|
||||
price: "Preis",
|
||||
percentage: "%",
|
||||
product: "Produkt",
|
||||
profile: "Profil",
|
||||
project: "Projekt",
|
||||
purchase: "Kauf",
|
||||
quantity: "Menge",
|
||||
rate: "Satz",
|
||||
reference: "Referenz",
|
||||
requisition: "Anfrage",
|
||||
resource: "Ressource",
|
||||
role: "Rolle",
|
||||
serial: "Serien",
|
||||
service: "Leistung",
|
||||
selling: "Verkauf",
|
||||
sellign: "Verkauf",
|
||||
space: "Lagerplatz",
|
||||
start: "Start",
|
||||
statement: "Buchung",
|
||||
status: "Status",
|
||||
street: "Straße",
|
||||
surcharge: "Aufschlag",
|
||||
tax: "Steuer",
|
||||
tel: "Telefon",
|
||||
tenant: "Mandant",
|
||||
time: "Zeit",
|
||||
title: "Titel",
|
||||
total: "Gesamt",
|
||||
type: "Typ",
|
||||
unit: "Einheit",
|
||||
updated: "Aktualisiert",
|
||||
user: "Benutzer",
|
||||
ustid: "USt-ID",
|
||||
value: "Wert",
|
||||
vendor: "Lieferant",
|
||||
vehicle: "Fahrzeug",
|
||||
weekly: "Wöchentlich",
|
||||
working: "Arbeits",
|
||||
zip: "Postleitzahl",
|
||||
composed: "Zusammensetzung",
|
||||
material: "Material",
|
||||
worker: "Arbeit",
|
||||
};
|
||||
|
||||
function tokenizeKey(key: string): string[] {
|
||||
return key
|
||||
.replace(/([a-z0-9])([A-Z])/g, "$1_$2")
|
||||
.replace(/[^a-zA-Z0-9]+/g, "_")
|
||||
.split("_")
|
||||
.filter(Boolean)
|
||||
.map((p) => p.toLowerCase());
|
||||
}
|
||||
|
||||
function capitalize(word: string) {
|
||||
if (!word) return word;
|
||||
return word.charAt(0).toUpperCase() + word.slice(1);
|
||||
}
|
||||
|
||||
function fallbackLabelFromKey(key: string): string {
|
||||
const parts = tokenizeKey(key);
|
||||
if (!parts.length) return key;
|
||||
|
||||
if (parts.length > 1 && parts[parts.length - 1] === "id") {
|
||||
const base = parts.slice(0, -1).map((p) => TOKEN_TRANSLATIONS[p] || capitalize(p)).join(" ");
|
||||
return `${base} ID`.trim();
|
||||
}
|
||||
|
||||
return parts
|
||||
.map((p) => TOKEN_TRANSLATIONS[p] || capitalize(p))
|
||||
.join(" ")
|
||||
.replace(/\s+/g, " ")
|
||||
.trim();
|
||||
}
|
||||
|
||||
export function getDiffLabel(key: string): string {
|
||||
return diffTranslations[key]?.label || fallbackLabelFromKey(key);
|
||||
}
|
||||
|
||||
export const diffTranslations: Record<
|
||||
string,
|
||||
{ label: string; resolve?: ValueResolver }
|
||||
@@ -44,7 +187,7 @@ export const diffTranslations: Record<
|
||||
}),
|
||||
},
|
||||
resources: {
|
||||
label: "Resourcen",
|
||||
label: "Ressourcen",
|
||||
resolve: (o, n) => ({
|
||||
oldVal: Array.isArray(o) ? o.map((i: any) => i.title).join(", ") : "-",
|
||||
newVal: Array.isArray(n) ? n.map((i: any) => i.title).join(", ") : "-",
|
||||
@@ -86,10 +229,18 @@ export const diffTranslations: Record<
|
||||
approved: { label: "Genehmigt" },
|
||||
manufacturer: { label: "Hersteller" },
|
||||
purchasePrice: { label: "Kaufpreis" },
|
||||
markupPercentage: { label: "Verkaufsaufschlag in %" },
|
||||
markup_percentage: { label: "Verkaufsaufschlag in %" },
|
||||
sellingPrice: { label: "Verkaufspreis" },
|
||||
selling_price: { label: "Verkaufspreis" },
|
||||
sellingPriceComposed: { label: "Verkaufspreis Zusammensetzung" },
|
||||
purchaseDate: { label: "Kaufdatum" },
|
||||
serialNumber: { label: "Seriennummer" },
|
||||
customerInventoryId: { label: "Kundeninventar-ID" },
|
||||
customerinventoryitems: { label: "Kundeninventar" },
|
||||
usePlanning: { label: "In Plantafel verwenden" },
|
||||
currentSpace: { label: "Lagerplatz" },
|
||||
customerspace: { label: "Kundenlagerplatz" },
|
||||
|
||||
customer: {
|
||||
label: "Kunde",
|
||||
@@ -108,6 +259,7 @@ export const diffTranslations: Record<
|
||||
|
||||
description: { label: "Beschreibung" },
|
||||
categorie: { label: "Kategorie" },
|
||||
category: { label: "Kategorie" },
|
||||
|
||||
profile: {
|
||||
label: "Mitarbeiter",
|
||||
@@ -147,6 +299,8 @@ export const diffTranslations: Record<
|
||||
},
|
||||
|
||||
projecttype: { label: "Projekttyp" },
|
||||
contracttype: { label: "Vertragstyp" },
|
||||
billingInterval: { label: "Abrechnungsintervall" },
|
||||
|
||||
fixed: {
|
||||
label: "Festgeschrieben",
|
||||
|
||||
@@ -301,7 +301,7 @@ export async function buildExportZip(
|
||||
else if(account.taxType === '7I') buschluessel = "18";
|
||||
else buschluessel = "-";
|
||||
|
||||
let amountGross = account.amountGross ? account.amountGross : (account.amountNet || 0) + (account.amountTax || 0);
|
||||
let amountGross =/* account.amountGross ? account.amountGross : */(account.amountNet || 0) + (account.amountTax || 0);
|
||||
let shSelector = Math.sign(amountGross) === -1 ? "H" : "S";
|
||||
let text = `ER ${ii.reference}: ${escapeString(ii.description)}`.substring(0,59);
|
||||
const vend = ii.vendor; // durch Mapping verfügbar
|
||||
@@ -325,27 +325,27 @@ export async function buildExportZip(
|
||||
if(alloc.createddocument && alloc.createddocument.customer) {
|
||||
const cd = alloc.createddocument;
|
||||
const cust = cd.customer;
|
||||
bookingLines.push(`${displayCurrency(alloc.amount,true)};"H";;;;;${cust?.customerNumber};${datevKonto};"3";${dayjs(cd.documentDate).format("DDMM")};"${cd.documentNumber}";;;"${`ZE${alloc.description}${bsText}`.substring(0,59)}";;;;;;;"Geschäftspartner";"${cust?.name}";"Kundennummer";"${cust?.customerNumber}";"Belegnummer";"${cd.documentNumber}";"Leistungsdatum";"${dayjs(cd.deliveryDate).format("DD.MM.YYYY")}";"Belegdatum";"${dayjs(cd.documentDate).format("DD.MM.YYYY")}";;;;;;;;;;"";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;0;;;;"";;;;;;;`);
|
||||
bookingLines.push(`${displayCurrency(alloc.amount,true)};"H";;;;;${cust?.customerNumber};${datevKonto};"3";${dayjs(cd.documentDate).format("DDMM")};"${cd.documentNumber}";;;"${`ZE${alloc.description}${bsText}`.substring(0,59)}";;;;;;;"Geschäftspartner";"${cust?.name}";"Kundennummer";"${cust?.customerNumber}";"Belegnummer";"${cd.documentNumber}";"Leistungsdatum";"${dayjs(cd.deliveryDate).format("DD.MM.YYYY")}";"Belegdatum";"${dateFull}";;;;;;;;;;"";;;;;;;;Bank-Id;${alloc.bankstatement.id};;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;0;;;;"";;;;;;`);
|
||||
} else if(alloc.incominginvoice && alloc.incominginvoice.vendor) {
|
||||
const ii = alloc.incominginvoice;
|
||||
const vend = ii.vendor;
|
||||
bookingLines.push(`${displayCurrency(alloc.amount,true)};"${shSelector}";;;;;${datevKonto};${vend?.vendorNumber};"";${dayjs(ii.date).format("DDMM")};"${ii.reference}";;;"${`ZA${alloc.description} ${bsText} `.substring(0,59)}";;;;;;;"Geschäftspartner";"${vend?.name}";"Kundennummer";"${vend?.vendorNumber}";"Belegnummer";"${ii.reference}";"Leistungsdatum";"${dayjs(ii.date).format("DD.MM.YYYY")}";"Belegdatum";"${dayjs(ii.date).format("DD.MM.YYYY")}";;;;;;;;;;"";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;0;;;;"";;;;;;;`);
|
||||
bookingLines.push(`${displayCurrency(alloc.amount,true)};"${shSelector}";;;;;${datevKonto};${vend?.vendorNumber};"";${dayjs(ii.date).format("DDMM")};"${ii.reference}";;;"${`ZA${alloc.description} ${bsText} `.substring(0,59)}";;;;;;;"Geschäftspartner";"${vend?.name}";"Kundennummer";"${vend?.vendorNumber}";"Belegnummer";"${ii.reference}";"Leistungsdatum";"${dayjs(ii.date).format("DD.MM.YYYY")}";"Belegdatum";"${dateFull}";;;;;;;;;;"";;;;;;;;Bank-Id;${alloc.bankstatement.id};;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;0;;;;"";;;;;;`);
|
||||
} else if(alloc.account) {
|
||||
const acc = alloc.account;
|
||||
let vorzeichen = Math.sign(alloc.amount) > 0 ? "ZE" : "ZA";
|
||||
bookingLines.push(`${displayCurrency(alloc.amount,true)};"${shSelector}";;;;;${datevKonto};${acc.number};"";${dateVal};"";;;"${`${vorzeichen} ${acc.number} - ${escapeString(acc.label)}${escapeString(alloc.description)}${bsText}`.substring(0,59)}";;;;;;;"Geschäftspartner";"${bs.credName || ''}";"Kundennummer";"";"Belegnummer";"";"Leistungsdatum";"";"Belegdatum";"${dateFull}";;;;;;;;;;"";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;0;;;;"";;;;;;;`);
|
||||
bookingLines.push(`${displayCurrency(alloc.amount,true)};"${shSelector}";;;;;${datevKonto};${acc.number};"";${dateVal};"";;;"${`${vorzeichen} ${acc.number} - ${escapeString(acc.label)}${escapeString(alloc.description)}${bsText}`.substring(0,59)}";;;;;;;"Geschäftspartner";"${bs.credName || ''}";"Kundennummer";"";"Belegnummer";"";"Leistungsdatum";"";"Belegdatum";"${dateFull}";;;;;;;;;;"";;;;;;;;Bank-Id;${alloc.bankstatement.id};;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;0;;;;"";;;;;;`);
|
||||
} else if(alloc.vendor) {
|
||||
const vend = alloc.vendor;
|
||||
let vorzeichen = Math.sign(alloc.amount) > 0 ? "ZE" : "ZA";
|
||||
bookingLines.push(`${displayCurrency(alloc.amount,true)};"${shSelector}";;;;;${datevKonto};${vend.vendorNumber};"";${dateVal};"";;;"${`${vorzeichen} ${vend.vendorNumber} - ${escapeString(vend.name)}${escapeString(alloc.description)}${bsText}`.substring(0,59)}";;;;;;;"Geschäftspartner";"${vend.name}";"Kundennummer";"";"Belegnummer";"";"Leistungsdatum";"";"Belegdatum";"${dateFull}";;;;;;;;;;"";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;0;;;;"";;;;;;;`);
|
||||
bookingLines.push(`${displayCurrency(alloc.amount,true)};"${shSelector}";;;;;${datevKonto};${vend.vendorNumber};"";${dateVal};"";;;"${`${vorzeichen} ${vend.vendorNumber} - ${escapeString(vend.name)}${escapeString(alloc.description)}${bsText}`.substring(0,59)}";;;;;;;"Geschäftspartner";"${vend.name}";"Kundennummer";"";"Belegnummer";"";"Leistungsdatum";"";"Belegdatum";"${dateFull}";;;;;;;;;;"";;;;;;;;Bank-Id;${alloc.bankstatement.id};;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;0;;;;"";;;;;;`);
|
||||
} else if(alloc.customer) {
|
||||
const cust = alloc.customer;
|
||||
let vorzeichen = Math.sign(alloc.amount) > 0 ? "ZE" : "ZA";
|
||||
bookingLines.push(`${displayCurrency(alloc.amount,true)};"${shSelector}";;;;;${datevKonto};${cust.customerNumber};"";${dateVal};"";;;"${`${vorzeichen} ${cust.customerNumber} - ${escapeString(cust.name)}${escapeString(alloc.description)}${bsText}`.substring(0,59)}";;;;;;;"Geschäftspartner";"${cust.name}";"Kundennummer";"";"Belegnummer";"";"Leistungsdatum";"";"Belegdatum";"${dateFull}";;;;;;;;;;"";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;0;;;;"";;;;;;;`);
|
||||
bookingLines.push(`${displayCurrency(alloc.amount,true)};"${shSelector}";;;;;${datevKonto};${cust.customerNumber};"";${dateVal};"";;;"${`${vorzeichen} ${cust.customerNumber} - ${escapeString(cust.name)}${escapeString(alloc.description)}${bsText}`.substring(0,59)}";;;;;;;"Geschäftspartner";"${cust.name}";"Kundennummer";"";"Belegnummer";"";"Leistungsdatum";"";"Belegdatum";"${dateFull}";;;;;;;;;;"";;;;;;;;Bank-Id;${alloc.bankstatement.id};;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;0;;;;"";;;;;;`);
|
||||
} else if(alloc.ownaccount) {
|
||||
const own = alloc.ownaccount;
|
||||
let vorzeichen = Math.sign(alloc.amount) > 0 ? "ZE" : "ZA";
|
||||
bookingLines.push(`${displayCurrency(alloc.amount,true)};"${shSelector}";;;;;${datevKonto};${own.number};"";${dateVal};"";;;"${`${vorzeichen} ${own.number} - ${escapeString(own.name)}${escapeString(alloc.description)}${bsText}`.substring(0,59)}";;;;;;;"Geschäftspartner";"${own.name}";"Kundennummer";"";"Belegnummer";"";"Leistungsdatum";"";"Belegdatum";"${dateFull}";;;;;;;;;;"";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;0;;;;"";;;;;;;`);
|
||||
bookingLines.push(`${displayCurrency(alloc.amount,true)};"${shSelector}";;;;;${datevKonto};${own.number};"";${dateVal};"";;;"${`${vorzeichen} ${own.number} - ${escapeString(own.name)}${escapeString(alloc.description)}${bsText}`.substring(0,59)}";;;;;;;"Geschäftspartner";"${own.name}";"Kundennummer";"";"Belegnummer";"";"Leistungsdatum";"";"Belegdatum";"${dateFull}";;;;;;;;;;"";;;;;;;;Bank-Id;${alloc.bankstatement.id};;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;0;;;;"";;;;;;`);
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
@@ -1,12 +1,25 @@
|
||||
import xmlbuilder from "xmlbuilder";
|
||||
import {randomUUID} from "node:crypto";
|
||||
import dayjs from "dayjs";
|
||||
import { and, eq, inArray } from "drizzle-orm";
|
||||
import { createddocuments, tenants } from "../../../db/schema";
|
||||
|
||||
export const createSEPAExport = async (server,idsToExport, tenant_id) => {
|
||||
const {data,error} = await server.supabase.from("createddocuments").select().eq("tenant", tenant_id).in("id", idsToExport)
|
||||
const {data:tenantData,error:tenantError} = await server.supabase.from("tenants").select().eq("id", tenant_id).single()
|
||||
const data = await server.db
|
||||
.select()
|
||||
.from(createddocuments)
|
||||
.where(and(
|
||||
eq(createddocuments.tenant, tenant_id),
|
||||
inArray(createddocuments.id, idsToExport)
|
||||
))
|
||||
|
||||
const tenantRows = await server.db
|
||||
.select()
|
||||
.from(tenants)
|
||||
.where(eq(tenants.id, tenant_id))
|
||||
.limit(1)
|
||||
const tenantData = tenantRows[0]
|
||||
console.log(tenantData)
|
||||
console.log(tenantError)
|
||||
|
||||
console.log(data)
|
||||
|
||||
|
||||
@@ -1,11 +1,8 @@
|
||||
import {FastifyInstance} from "fastify";
|
||||
// import { PNG } from 'pngjs'
|
||||
// import { ready as zplReady } from 'zpl-renderer-js'
|
||||
// import { Utils } from '@mmote/niimbluelib'
|
||||
// import { createCanvas } from 'canvas'
|
||||
// import bwipjs from 'bwip-js'
|
||||
// import Sharp from 'sharp'
|
||||
// import fs from 'fs'
|
||||
import { FastifyInstance } from "fastify"
|
||||
import { PNG } from "pngjs"
|
||||
import { Utils } from "@mmote/niimbluelib"
|
||||
import bwipjs from "bwip-js"
|
||||
import Sharp from "sharp"
|
||||
|
||||
import { tenants } from "../../db/schema"
|
||||
import { eq } from "drizzle-orm"
|
||||
@@ -15,7 +12,6 @@ export const useNextNumberRangeNumber = async (
|
||||
tenantId: number,
|
||||
numberRange: string
|
||||
) => {
|
||||
// 1️⃣ Tenant laden
|
||||
const [tenant] = await server.db
|
||||
.select()
|
||||
.from(tenants)
|
||||
@@ -33,23 +29,20 @@ export const useNextNumberRangeNumber = async (
|
||||
|
||||
const current = numberRanges[numberRange]
|
||||
|
||||
// 2️⃣ Used Number generieren
|
||||
const usedNumber =
|
||||
(current.prefix || "") +
|
||||
current.nextNumber +
|
||||
(current.suffix || "")
|
||||
|
||||
// 3️⃣ nextNumber erhöhen
|
||||
const updatedRanges = {
|
||||
// @ts-ignore
|
||||
...numberRanges,
|
||||
[numberRange]: {
|
||||
...current,
|
||||
nextNumber: current.nextNumber + 1
|
||||
}
|
||||
nextNumber: current.nextNumber + 1,
|
||||
},
|
||||
}
|
||||
|
||||
// 4️⃣ Tenant aktualisieren
|
||||
await server.db
|
||||
.update(tenants)
|
||||
.set({ numberRanges: updatedRanges })
|
||||
@@ -58,24 +51,17 @@ export const useNextNumberRangeNumber = async (
|
||||
return { usedNumber }
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
export async function encodeBase64ToNiimbot(base64Png, printDirection = 'top') {
|
||||
// 1️⃣ PNG dekodieren
|
||||
const buffer = Buffer.from(base64Png, 'base64')
|
||||
const png = PNG.sync.read(buffer) // liefert {width, height, data: Uint8Array(RGBA)}
|
||||
export async function encodeBase64ToNiimbot(base64Png: string, printDirection: "top" | "left" = "top") {
|
||||
const buffer = Buffer.from(base64Png, "base64")
|
||||
const png = PNG.sync.read(buffer)
|
||||
|
||||
const { width, height, data } = png
|
||||
console.log(width, height, data)
|
||||
const cols = printDirection === 'left' ? height : width
|
||||
const rows = printDirection === 'left' ? width : height
|
||||
const rowsData = []
|
||||
const cols = printDirection === "left" ? height : width
|
||||
const rows = printDirection === "left" ? width : height
|
||||
const rowsData: any[] = []
|
||||
|
||||
console.log(cols)
|
||||
if (cols % 8 !== 0) throw new Error("Column count must be multiple of 8")
|
||||
|
||||
if (cols % 8 !== 0) throw new Error('Column count must be multiple of 8')
|
||||
|
||||
// 2️⃣ Zeilenweise durchgehen und Bits bilden
|
||||
for (let row = 0; row < rows; row++) {
|
||||
let isVoid = true
|
||||
let blackPixelsCount = 0
|
||||
@@ -84,8 +70,8 @@ export async function encodeBase64ToNiimbot(base64Png, printDirection = 'top') {
|
||||
for (let colOct = 0; colOct < cols / 8; colOct++) {
|
||||
let pixelsOctet = 0
|
||||
for (let colBit = 0; colBit < 8; colBit++) {
|
||||
const x = printDirection === 'left' ? row : colOct * 8 + colBit
|
||||
const y = printDirection === 'left' ? height - 1 - (colOct * 8 + colBit) : row
|
||||
const x = printDirection === "left" ? row : colOct * 8 + colBit
|
||||
const y = printDirection === "left" ? height - 1 - (colOct * 8 + colBit) : row
|
||||
const idx = (y * width + x) * 4
|
||||
const lum = 0.299 * data[idx] + 0.587 * data[idx + 1] + 0.114 * data[idx + 2]
|
||||
const isBlack = lum < 128
|
||||
@@ -99,7 +85,7 @@ export async function encodeBase64ToNiimbot(base64Png, printDirection = 'top') {
|
||||
}
|
||||
|
||||
const newPart = {
|
||||
dataType: isVoid ? 'void' : 'pixels',
|
||||
dataType: isVoid ? "void" : "pixels",
|
||||
rowNumber: row,
|
||||
repeat: 1,
|
||||
rowData: isVoid ? undefined : rowData,
|
||||
@@ -111,14 +97,15 @@ export async function encodeBase64ToNiimbot(base64Png, printDirection = 'top') {
|
||||
} else {
|
||||
const last = rowsData[rowsData.length - 1]
|
||||
let same = newPart.dataType === last.dataType
|
||||
if (same && newPart.dataType === 'pixels') {
|
||||
if (same && newPart.dataType === "pixels") {
|
||||
same = Utils.u8ArraysEqual(newPart.rowData, last.rowData)
|
||||
}
|
||||
if (same) last.repeat++
|
||||
else rowsData.push(newPart)
|
||||
|
||||
if (row % 200 === 199) {
|
||||
rowsData.push({
|
||||
dataType: 'check',
|
||||
dataType: "check",
|
||||
rowNumber: row,
|
||||
repeat: 0,
|
||||
rowData: undefined,
|
||||
@@ -131,44 +118,69 @@ export async function encodeBase64ToNiimbot(base64Png, printDirection = 'top') {
|
||||
return { cols, rows, rowsData }
|
||||
}
|
||||
|
||||
export async function generateLabel(context,width,height) {
|
||||
// Canvas für Hintergrund & Text
|
||||
const canvas = createCanvas(width, height)
|
||||
const ctx = canvas.getContext('2d')
|
||||
function escapeXml(value: string) {
|
||||
return String(value)
|
||||
.replace(/&/g, "&")
|
||||
.replace(/</g, "<")
|
||||
.replace(/>/g, ">")
|
||||
.replace(/\"/g, """)
|
||||
.replace(/'/g, "'")
|
||||
}
|
||||
|
||||
// Hintergrund weiß
|
||||
ctx.fillStyle = '#FFFFFF'
|
||||
ctx.fillRect(0, 0, width, height)
|
||||
export async function generateLabel(context: any = {}, width = 584, height = 354) {
|
||||
const normalizedWidth = Math.ceil(Number(width) / 8) * 8
|
||||
const normalizedHeight = Math.max(1, Number(height) || 203)
|
||||
|
||||
// Überschrift
|
||||
ctx.fillStyle = '#000000'
|
||||
ctx.font = '32px Arial'
|
||||
ctx.fillText(context.text, 20, 40)
|
||||
const idFont = Math.max(24, Math.round(normalizedHeight * 0.125))
|
||||
const nameFont = Math.max(17, Math.round(normalizedHeight * 0.078))
|
||||
const customerFont = Math.max(14, Math.round(normalizedHeight * 0.06))
|
||||
const serialFont = Math.max(12, Math.round(normalizedHeight * 0.052))
|
||||
|
||||
const labelId = context.customerInventoryId || context.datamatrix || context.id || "N/A"
|
||||
const labelName = context.name || context.text || "Kundeninventarartikel"
|
||||
const customerName = context.customerName || ""
|
||||
const serial = context.serialNumber ? `SN: ${context.serialNumber}` : ""
|
||||
const nameLine1 = String(labelName).slice(0, 30)
|
||||
const nameLine2 = String(labelName).slice(30, 60)
|
||||
|
||||
// 3) DataMatrix
|
||||
const dataMatrixPng = await bwipjs.toBuffer({
|
||||
bcid: 'datamatrix',
|
||||
text: context.datamatrix,
|
||||
scale: 6,
|
||||
bcid: "datamatrix",
|
||||
text: String(labelId),
|
||||
scale: normalizedWidth >= 560 ? 7 : 5,
|
||||
includetext: false,
|
||||
})
|
||||
const dataMatrixMeta = await Sharp(dataMatrixPng).metadata()
|
||||
const dataMatrixWidth = dataMatrixMeta.width || 0
|
||||
const dataMatrixHeight = dataMatrixMeta.height || 0
|
||||
const dmLeft = Math.max(8, normalizedWidth - dataMatrixWidth - 28)
|
||||
const dmTop = Math.max(8, Math.floor((normalizedHeight - dataMatrixHeight) / 2))
|
||||
const textMaxWidth = Math.max(120, dmLeft - 20)
|
||||
|
||||
// Basisbild aus Canvas
|
||||
const base = await Sharp(canvas.toBuffer())
|
||||
.png()
|
||||
.toBuffer()
|
||||
const textSvg = `
|
||||
<svg width="${normalizedWidth}" height="${normalizedHeight}" xmlns="http://www.w3.org/2000/svg">
|
||||
<rect width="100%" height="100%" fill="white"/>
|
||||
<text x="12" y="${Math.round(normalizedHeight * 0.15)}" font-size="${idFont}" font-family="Arial, Helvetica, sans-serif" font-weight="700" fill="black">${escapeXml(String(labelId).slice(0, 26))}</text>
|
||||
<text x="12" y="${Math.round(normalizedHeight * 0.29)}" font-size="${nameFont}" font-family="Arial, Helvetica, sans-serif" fill="black">${escapeXml(nameLine1)}</text>
|
||||
<text x="12" y="${Math.round(normalizedHeight * 0.37)}" font-size="${nameFont}" font-family="Arial, Helvetica, sans-serif" fill="black">${escapeXml(nameLine2)}</text>
|
||||
<text x="12" y="${Math.round(normalizedHeight * 0.49)}" font-size="${customerFont}" font-family="Arial, Helvetica, sans-serif" fill="black">${escapeXml(String(customerName).slice(0, 40))}</text>
|
||||
<text x="12" y="${Math.round(normalizedHeight * 0.58)}" font-size="${serialFont}" font-family="Arial, Helvetica, sans-serif" fill="black">${escapeXml(String(serial).slice(0, 42))}</text>
|
||||
<rect x="0" y="0" width="${textMaxWidth}" height="${normalizedHeight}" fill="none"/>
|
||||
</svg>`.trim()
|
||||
|
||||
// Alles zusammen compositen
|
||||
const final = await Sharp(base)
|
||||
const final = await Sharp({
|
||||
create: {
|
||||
width: normalizedWidth,
|
||||
height: normalizedHeight,
|
||||
channels: 3,
|
||||
background: { r: 255, g: 255, b: 255 },
|
||||
},
|
||||
})
|
||||
.composite([
|
||||
{ input: dataMatrixPng, top: 60, left: 20 },
|
||||
{ input: Buffer.from(textSvg), top: 0, left: 0 },
|
||||
{ input: dataMatrixPng, top: dmTop, left: dmLeft },
|
||||
])
|
||||
.png()
|
||||
.toBuffer()
|
||||
|
||||
fs.writeFileSync('label.png', final)
|
||||
|
||||
// Optional: Base64 zurückgeben (z.B. für API)
|
||||
const base64 = final.toString('base64')
|
||||
|
||||
return base64
|
||||
}*/
|
||||
return final.toString("base64")
|
||||
}
|
||||
|
||||
@@ -11,7 +11,7 @@ import { s3 } from "./s3";
|
||||
import { secrets } from "./secrets";
|
||||
|
||||
// Drizzle schema
|
||||
import { vendors, accounts } from "../../db/schema";
|
||||
import { vendors, accounts, tenants } from "../../db/schema";
|
||||
import {eq} from "drizzle-orm";
|
||||
|
||||
let openai: OpenAI | null = null;
|
||||
@@ -86,12 +86,13 @@ const InstructionFormat = z.object({
|
||||
});
|
||||
|
||||
// ---------------------------------------------------------
|
||||
// MAIN FUNCTION – REPLACES SUPABASE VERSION
|
||||
// MAIN FUNCTION
|
||||
// ---------------------------------------------------------
|
||||
export const getInvoiceDataFromGPT = async function (
|
||||
server: FastifyInstance,
|
||||
file: any,
|
||||
tenantId: number
|
||||
tenantId: number,
|
||||
learningContext?: string
|
||||
) {
|
||||
await initOpenAi();
|
||||
|
||||
@@ -162,13 +163,22 @@ export const getInvoiceDataFromGPT = async function (
|
||||
.from(vendors)
|
||||
.where(eq(vendors.tenant,tenantId));
|
||||
|
||||
const [tenant] = await server.db
|
||||
.select({ accountChart: tenants.accountChart })
|
||||
.from(tenants)
|
||||
.where(eq(tenants.id, tenantId))
|
||||
.limit(1)
|
||||
|
||||
const activeAccountChart = tenant?.accountChart || "skr03"
|
||||
|
||||
const accountList = await server.db
|
||||
.select({
|
||||
id: accounts.id,
|
||||
label: accounts.label,
|
||||
number: accounts.number,
|
||||
})
|
||||
.from(accounts);
|
||||
.from(accounts)
|
||||
.where(eq(accounts.accountChart, activeAccountChart));
|
||||
|
||||
// ---------------------------------------------------------
|
||||
// 4) GPT ANALYSIS
|
||||
@@ -188,8 +198,13 @@ export const getInvoiceDataFromGPT = async function (
|
||||
"You extract structured invoice data.\n\n" +
|
||||
`VENDORS: ${JSON.stringify(vendorList)}\n` +
|
||||
`ACCOUNTS: ${JSON.stringify(accountList)}\n\n` +
|
||||
(learningContext
|
||||
? `HISTORICAL_PATTERNS: ${learningContext}\n\n`
|
||||
: "") +
|
||||
"Match issuer by name to vendor.id.\n" +
|
||||
"Match invoice items to account id based on label/number.\n" +
|
||||
"Use historical patterns as soft hints for vendor/account/payment mapping.\n" +
|
||||
"Do not invent values when the invoice text contradicts the hints.\n" +
|
||||
"Convert dates to YYYY-MM-DD.\n" +
|
||||
"Keep invoice items in original order.\n",
|
||||
},
|
||||
|
||||
@@ -1,4 +1,42 @@
|
||||
import { FastifyInstance } from "fastify"
|
||||
import { historyitems } from "../../db/schema";
|
||||
|
||||
const HISTORY_ENTITY_LABELS: Record<string, string> = {
|
||||
customers: "Kunden",
|
||||
members: "Mitglieder",
|
||||
vendors: "Lieferanten",
|
||||
projects: "Projekte",
|
||||
plants: "Objekte",
|
||||
contacts: "Kontakte",
|
||||
inventoryitems: "Inventarartikel",
|
||||
customerinventoryitems: "Kundeninventar",
|
||||
products: "Artikel",
|
||||
profiles: "Mitarbeiter",
|
||||
absencerequests: "Abwesenheiten",
|
||||
events: "Termine",
|
||||
tasks: "Aufgaben",
|
||||
vehicles: "Fahrzeuge",
|
||||
costcentres: "Kostenstellen",
|
||||
ownaccounts: "zusätzliche Buchungskonten",
|
||||
documentboxes: "Dokumentenboxen",
|
||||
hourrates: "Stundensätze",
|
||||
services: "Leistungen",
|
||||
roles: "Rollen",
|
||||
checks: "Überprüfungen",
|
||||
spaces: "Lagerplätze",
|
||||
customerspaces: "Kundenlagerplätze",
|
||||
trackingtrips: "Fahrten",
|
||||
createddocuments: "Dokumente",
|
||||
inventoryitemgroups: "Inventarartikelgruppen",
|
||||
bankstatements: "Buchungen",
|
||||
incominginvoices: "Eingangsrechnungen",
|
||||
files: "Dateien",
|
||||
memberrelations: "Mitgliedsverhältnisse",
|
||||
}
|
||||
|
||||
export function getHistoryEntityLabel(entity: string) {
|
||||
return HISTORY_ENTITY_LABELS[entity] || entity
|
||||
}
|
||||
|
||||
export async function insertHistoryItem(
|
||||
server: FastifyInstance,
|
||||
@@ -13,15 +51,18 @@ export async function insertHistoryItem(
|
||||
text?: string
|
||||
}
|
||||
) {
|
||||
const entityLabel = getHistoryEntityLabel(params.entity)
|
||||
const textMap = {
|
||||
created: `Neuer Eintrag in ${params.entity} erstellt`,
|
||||
updated: `Eintrag in ${params.entity} geändert`,
|
||||
archived: `Eintrag in ${params.entity} archiviert`,
|
||||
deleted: `Eintrag in ${params.entity} gelöscht`
|
||||
created: `Neuer Eintrag in ${entityLabel} erstellt`,
|
||||
updated: `Eintrag in ${entityLabel} geändert`,
|
||||
unchanged: `Eintrag in ${entityLabel} unverändert`,
|
||||
archived: `Eintrag in ${entityLabel} archiviert`,
|
||||
deleted: `Eintrag in ${entityLabel} gelöscht`
|
||||
}
|
||||
|
||||
const columnMap: Record<string, string> = {
|
||||
customers: "customer",
|
||||
members: "customer",
|
||||
vendors: "vendor",
|
||||
projects: "project",
|
||||
plants: "plant",
|
||||
@@ -41,10 +82,15 @@ export async function insertHistoryItem(
|
||||
roles: "role",
|
||||
checks: "check",
|
||||
spaces: "space",
|
||||
customerspaces: "customerspace",
|
||||
customerinventoryitems: "customerinventoryitem",
|
||||
trackingtrips: "trackingtrip",
|
||||
createddocuments: "createddocument",
|
||||
inventoryitemgroups: "inventoryitemgroup",
|
||||
bankstatements: "bankstatement"
|
||||
bankstatements: "bankstatement",
|
||||
incominginvoices: "incomingInvoice",
|
||||
files: "file",
|
||||
memberrelations: "memberrelation",
|
||||
}
|
||||
|
||||
const fkColumn = columnMap[params.entity]
|
||||
@@ -53,18 +99,20 @@ export async function insertHistoryItem(
|
||||
return
|
||||
}
|
||||
|
||||
const stringifyHistoryValue = (value: any) => {
|
||||
if (value === undefined || value === null) return null
|
||||
return typeof value === "string" ? value : JSON.stringify(value)
|
||||
}
|
||||
|
||||
const entry = {
|
||||
tenant: params.tenant_id,
|
||||
created_by: params.created_by,
|
||||
createdBy: params.created_by,
|
||||
text: params.text || textMap[params.action],
|
||||
action: params.action,
|
||||
[fkColumn]: params.entityId,
|
||||
oldVal: params.oldVal ? JSON.stringify(params.oldVal) : null,
|
||||
newVal: params.newVal ? JSON.stringify(params.newVal) : null
|
||||
oldVal: stringifyHistoryValue(params.oldVal),
|
||||
newVal: stringifyHistoryValue(params.newVal)
|
||||
}
|
||||
|
||||
const { error } = await server.supabase.from("historyitems").insert([entry])
|
||||
if (error) { // @ts-ignore
|
||||
console.log(error)
|
||||
}
|
||||
await server.db.insert(historyitems).values(entry as any)
|
||||
}
|
||||
|
||||
@@ -2,6 +2,9 @@ import {PDFDocument, StandardFonts, rgb} from "pdf-lib"
|
||||
import dayjs from "dayjs"
|
||||
import {renderAsCurrency, splitStringBySpace} from "./stringRendering";
|
||||
import {FastifyInstance} from "fastify";
|
||||
import { GetObjectCommand } from "@aws-sdk/client-s3";
|
||||
import { s3 } from "./s3";
|
||||
import { secrets } from "./secrets";
|
||||
|
||||
const getCoordinatesForPDFLib = (x:number ,y:number, page:any) => {
|
||||
/*
|
||||
@@ -25,9 +28,21 @@ const getCoordinatesForPDFLib = (x:number ,y:number, page:any) => {
|
||||
|
||||
const getBackgroundSourceBuffer = async (server:FastifyInstance, path:string) => {
|
||||
|
||||
const {data:backgroundPDFData,error:backgroundPDFError} = await server.supabase.storage.from("files").download(path)
|
||||
console.log(path)
|
||||
|
||||
return backgroundPDFData.arrayBuffer()
|
||||
const { Body } = await s3.send(
|
||||
new GetObjectCommand({
|
||||
Bucket: secrets.S3_BUCKET,
|
||||
Key: path
|
||||
})
|
||||
)
|
||||
|
||||
const chunks: Buffer[] = []
|
||||
for await (const chunk of Body as any) {
|
||||
chunks.push(Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk))
|
||||
}
|
||||
|
||||
return Buffer.concat(chunks)
|
||||
}
|
||||
|
||||
const getDuration = (time) => {
|
||||
|
||||
@@ -3,10 +3,14 @@ import {
|
||||
bankaccounts,
|
||||
bankrequisitions,
|
||||
bankstatements,
|
||||
entitybankaccounts,
|
||||
contacts,
|
||||
contracts,
|
||||
contracttypes,
|
||||
costcentres,
|
||||
createddocuments,
|
||||
customerinventoryitems,
|
||||
customerspaces,
|
||||
customers,
|
||||
files,
|
||||
filetags,
|
||||
@@ -16,6 +20,7 @@ import {
|
||||
inventoryitemgroups,
|
||||
inventoryitems,
|
||||
letterheads,
|
||||
memberrelations,
|
||||
ownaccounts,
|
||||
plants,
|
||||
productcategories,
|
||||
@@ -43,10 +48,21 @@ export const resourceConfig = {
|
||||
numberRangeHolder: "projectNumber"
|
||||
},
|
||||
customers: {
|
||||
searchColumns: ["name", "customerNumber", "firstname", "lastname", "notes"],
|
||||
searchColumns: ["name", "nameAddition", "customerNumber", "firstname", "lastname", "notes"],
|
||||
mtmLoad: ["contacts","projects","plants","createddocuments","contracts","customerinventoryitems","customerspaces"],
|
||||
table: customers,
|
||||
numberRangeHolder: "customerNumber",
|
||||
},
|
||||
members: {
|
||||
searchColumns: ["name", "nameAddition", "customerNumber", "firstname", "lastname", "notes"],
|
||||
mtmLoad: ["contacts","projects","plants","createddocuments","contracts"],
|
||||
table: customers,
|
||||
numberRangeHolder: "customerNumber",
|
||||
relationKey: "customer",
|
||||
},
|
||||
memberrelations: {
|
||||
table: memberrelations,
|
||||
searchColumns: ["type", "billingInterval"],
|
||||
},
|
||||
contacts: {
|
||||
searchColumns: ["firstName", "lastName", "email", "phone", "notes"],
|
||||
@@ -55,9 +71,13 @@ export const resourceConfig = {
|
||||
},
|
||||
contracts: {
|
||||
table: contracts,
|
||||
searchColumns: ["name", "notes", "contractNumber", "paymentType", "sepaRef", "bankingName"],
|
||||
searchColumns: ["name", "notes", "contractNumber", "paymentType", "billingInterval", "sepaRef", "bankingName"],
|
||||
numberRangeHolder: "contractNumber",
|
||||
mtoLoad: ["customer"],
|
||||
mtoLoad: ["customer", "contracttype"],
|
||||
},
|
||||
contracttypes: {
|
||||
table: contracttypes,
|
||||
searchColumns: ["name", "description", "paymentType", "billingInterval"],
|
||||
},
|
||||
plants: {
|
||||
table: plants,
|
||||
@@ -86,6 +106,12 @@ export const resourceConfig = {
|
||||
table: inventoryitems,
|
||||
numberRangeHolder: "articleNumber",
|
||||
},
|
||||
customerinventoryitems: {
|
||||
table: customerinventoryitems,
|
||||
numberRangeHolder: "customerInventoryId",
|
||||
mtoLoad: ["customer", "customerspace", "product", "vendor"],
|
||||
searchColumns: ["name", "customerInventoryId", "serialNumber", "description", "manufacturer", "manufacturerNumber"],
|
||||
},
|
||||
inventoryitemgroups: {
|
||||
table: inventoryitemgroups
|
||||
},
|
||||
@@ -120,6 +146,13 @@ export const resourceConfig = {
|
||||
searchColumns: ["name","space_number","type","info_data"],
|
||||
numberRangeHolder: "spaceNumber",
|
||||
},
|
||||
customerspaces: {
|
||||
table: customerspaces,
|
||||
searchColumns: ["name","space_number","type","info_data","description"],
|
||||
numberRangeHolder: "space_number",
|
||||
mtoLoad: ["customer"],
|
||||
mtmLoad: ["customerinventoryitems"],
|
||||
},
|
||||
ownaccounts: {
|
||||
table: ownaccounts,
|
||||
searchColumns: ["name","description","number"],
|
||||
@@ -170,6 +203,10 @@ export const resourceConfig = {
|
||||
bankrequisitions: {
|
||||
table: bankrequisitions,
|
||||
},
|
||||
entitybankaccounts: {
|
||||
table: entitybankaccounts,
|
||||
searchColumns: ["description"],
|
||||
},
|
||||
serialexecutions: {
|
||||
table: serialExecutions
|
||||
}
|
||||
|
||||
@@ -14,8 +14,6 @@ export let secrets = {
|
||||
PORT: number
|
||||
HOST: string
|
||||
DATABASE_URL: string
|
||||
SUPABASE_URL: string
|
||||
SUPABASE_SERVICE_ROLE_KEY: string
|
||||
S3_BUCKET: string
|
||||
ENCRYPTION_KEY: string
|
||||
MAILER_SMTP_HOST: string
|
||||
|
||||
@@ -1,37 +1,70 @@
|
||||
version: "3"
|
||||
|
||||
services:
|
||||
web:
|
||||
image: reg.federspiel.software/fedeo/software:beta
|
||||
frontend:
|
||||
image: git.federspiel.tech/flfeders/fedeo/frontend:dev
|
||||
restart: always
|
||||
environment:
|
||||
- INFISICAL_CLIENT_ID=abc
|
||||
- INFISICAL_CLIENT_SECRET=abc
|
||||
- NUXT_PUBLIC_API_BASE=https://app.fedeo.de/backend
|
||||
- NUXT_PUBLIC_PDF_LICENSE=eyJkYXRhIjoiZXlKMElqb2laR1YyWld4dmNHVnlJaXdpWVhaMUlqb3hOemt3TmpNNU9UazVMQ0prYlNJNkltRndjQzVtWldSbGJ5NWtaU0lzSW00aU9pSXpOemt3Wm1Vek5UazBZbVU0TlRRNElpd2laWGh3SWpveE56a3dOak01T1RrNUxDSmtiWFFpT2lKemNHVmphV1pwWXlJc0luQWlPaUoyYVdWM1pYSWlmUT09Iiwic2lnbmF0dXJlIjoicWU4K0ZxQUJDNUp5bEJUU094Vkd5RTJMbk9UNmpyc2EyRStsN2tNNWhkM21KK2ZvVjYwaTFKeFdhZGtqSDRNWXZxQklMc0dpdWh5d2pMbUFjRHZuWGxOcTRMcXFLRm53dzVtaG1LK3lTeDRXbzVaS1loK1VZdFBzWUZjV3oyUHVGMmJraGJrVjJ6RzRlTGtRU09wdmJKY3JUZU1rN0N1VkN6Q1UraHF5T0ZVVXllWnRmaHlmcWswZEFFL0RMR1hvTDFSQXFjNkNkYU9FTDRTdC9Idy9DQnFieTE2aisvT3RxQUlLcy9NWTR6SVk3RTI3bWo4RUx5VjhXNkdXNXhqc0VUVzNKN0RRMUVlb3RhVlNLT29kc3pVRlhUYzVlbHVuSm04ZlcwM1ErMUhtSnpmWGoyS1dwM1dnamJDazZYSHozamFML2lOdUYvZFZNaWYvc2FoR3NnPT0ifQ==
|
||||
networks:
|
||||
- traefik
|
||||
labels:
|
||||
- "traefik.enable=true"
|
||||
- "traefik.docker.network=traefik"
|
||||
- "traefik.port=3000"
|
||||
# Middlewares
|
||||
- "traefik.http.middlewares.fedeo-frontend-redirect-web-secure.redirectscheme.scheme=https"
|
||||
# Web Entrypoint
|
||||
- "traefik.http.routers.fedeo-frontend.middlewares=fedeo-frontend-redirect-web-secure"
|
||||
- "traefik.http.routers.fedeo-frontend.rule=Host(`app.fedeo.de`) && PathPrefix(`/`)"
|
||||
- "traefik.http.routers.fedeo-frontend.entrypoints=web"
|
||||
# Web Secure Entrypoint
|
||||
- "traefik.http.routers.fedeo-frontend-secure.rule=Host(`app.fedeo.de`) && PathPrefix(`/`)"
|
||||
- "traefik.http.routers.fedeo-frontend-secure.entrypoints=web-secured" #
|
||||
- "traefik.http.routers.fedeo-frontend-secure.tls.certresolver=mytlschallenge"
|
||||
backend:
|
||||
image: reg.federspiel.software/fedeo/backend:main
|
||||
image: git.federspiel.tech/flfeders/fedeo/backend:dev
|
||||
restart: always
|
||||
environment:
|
||||
- NUXT_PUBLIC_API_BASE=
|
||||
- NUXT_PUBLIC_PDF_LICENSE=
|
||||
db:
|
||||
image: postgres
|
||||
restart: always
|
||||
shm_size: 128mb
|
||||
environment:
|
||||
POSTGRES_PASSWORD: abc
|
||||
POSTGRES_USER: sandelcom
|
||||
POSTGRES_DB: sensorfy
|
||||
volumes:
|
||||
- ./pg-data:/var/lib/postgresql/data
|
||||
ports:
|
||||
- "5432:5432"
|
||||
- INFISICAL_CLIENT_ID=a6838bd6-9983-4bf4-9be2-ace830b9abdf
|
||||
- INFISICAL_CLIENT_SECRET=4e3441acc0adbffd324aa50e668a95a556a3f55ec6bb85954e176e35a3392003
|
||||
- NODE_ENV=production
|
||||
networks:
|
||||
- traefik
|
||||
labels:
|
||||
- "traefik.enable=true"
|
||||
- "traefik.docker.network=traefik"
|
||||
- "traefik.port=3100"
|
||||
# Middlewares
|
||||
- "traefik.http.middlewares.fedeo-backend-redirect-web-secure.redirectscheme.scheme=https"
|
||||
- "traefik.http.middlewares.fedeo-backend-strip.stripprefix.prefixes=/backend"
|
||||
# Web Entrypoint
|
||||
- "traefik.http.routers.fedeo-backend.middlewares=fedeo-backend-redirect-web-secure"
|
||||
- "traefik.http.routers.fedeo-backend.rule=Host(`app.fedeo.de`) && PathPrefix(`/backend`)"
|
||||
- "traefik.http.routers.fedeo-backend.entrypoints=web"
|
||||
# Web Secure Entrypoint
|
||||
- "traefik.http.routers.fedeo-backend-secure.rule=Host(`app.fedeo.de`) && PathPrefix(`/backend`)"
|
||||
- "traefik.http.routers.fedeo-backend-secure.entrypoints=web-secured" #
|
||||
- "traefik.http.routers.fedeo-backend-secure.tls.certresolver=mytlschallenge"
|
||||
- "traefik.http.routers.fedeo-backend-secure.middlewares=fedeo-backend-strip"
|
||||
# db:
|
||||
# image: postgres
|
||||
# restart: always
|
||||
# shm_size: 128mb
|
||||
# environment:
|
||||
# POSTGRES_PASSWORD: abc
|
||||
# POSTGRES_USER: sandelcom
|
||||
# POSTGRES_DB: sensorfy
|
||||
# volumes:
|
||||
# - ./pg-data:/var/lib/postgresql/data
|
||||
# ports:
|
||||
# - "5432:5432"
|
||||
traefik:
|
||||
image: traefik:v2.2
|
||||
image: traefik:v2.11
|
||||
restart: unless-stopped
|
||||
container_name: traefik
|
||||
command:
|
||||
- "--api.insecure=false"
|
||||
- "--api.dashboard=true"
|
||||
- "--api.dashboard=false"
|
||||
- "--api.debug=false"
|
||||
- "--providers.docker=true"
|
||||
- "--providers.docker.exposedbydefault=false"
|
||||
@@ -43,19 +76,18 @@ services:
|
||||
- "--accesslog.bufferingsize=5000"
|
||||
- "--accesslog.fields.defaultMode=keep"
|
||||
- "--accesslog.fields.headers.defaultMode=keep"
|
||||
- "--certificatesresolvers.mytlschallenge.acme.tlschallenge=true" # <== Enable TLS-ALPN-01 to generate and renew ACME certs
|
||||
- "--certificatesresolvers.mytlschallenge.acme.email=info@sandelcom.de" # <== Setting email for certs
|
||||
- "--certificatesresolvers.mytlschallenge.acme.storage=/letsencrypt/acme.json" # <== Defining acme file to store cert information
|
||||
- "--certificatesresolvers.mytlschallenge.acme.tlschallenge=true" #
|
||||
- "--certificatesresolvers.mytlschallenge.acme.email=moin@fedeo.de"
|
||||
- "--certificatesresolvers.mytlschallenge.acme.storage=/letsencrypt/acme.json"
|
||||
ports:
|
||||
- 80:80
|
||||
- 8080:8080
|
||||
- 443:443
|
||||
volumes:
|
||||
- "./traefik/letsencrypt:/letsencrypt" # <== Volume for certs (TLS)
|
||||
- "/var/run/docker.sock:/var/run/docker.sock:ro"
|
||||
- "./traefik/logs:/logs"
|
||||
labels:
|
||||
#### Labels define the behavior and rules of the traefik proxy for this container ####
|
||||
- "traefik.enable=true" # <== Enable traefik on itself to view dashboard and assign subdomain to view it
|
||||
- "traefik.http.routers.api.rule=Host(`srv1.drinkingteam.de`)" # <== Setting the domain for the dashboard
|
||||
- "traefik.http.routers.api.service=api@internal" # <== Enabling the api to be a service to access
|
||||
networks:
|
||||
- traefik
|
||||
networks:
|
||||
traefik:
|
||||
external: false
|
||||
182
frontend/components/BankAccountAssignInput.vue
Normal file
182
frontend/components/BankAccountAssignInput.vue
Normal file
@@ -0,0 +1,182 @@
|
||||
<script setup>
|
||||
const props = defineProps({
|
||||
modelValue: {
|
||||
type: Array,
|
||||
default: () => []
|
||||
},
|
||||
disabled: {
|
||||
type: Boolean,
|
||||
default: false
|
||||
}
|
||||
})
|
||||
|
||||
const emit = defineEmits(["update:modelValue"])
|
||||
const toast = useToast()
|
||||
|
||||
const accounts = ref([])
|
||||
const ibanSearch = ref("")
|
||||
const showCreate = ref(false)
|
||||
const resolvingIban = ref(false)
|
||||
|
||||
const createPayload = ref({
|
||||
iban: "",
|
||||
bic: "",
|
||||
bankName: "",
|
||||
description: ""
|
||||
})
|
||||
|
||||
const normalizeIban = (value) => String(value || "").replace(/\s+/g, "").toUpperCase()
|
||||
|
||||
const loadAccounts = async () => {
|
||||
accounts.value = await useEntities("entitybankaccounts").select()
|
||||
}
|
||||
|
||||
const assignedIds = computed(() => {
|
||||
return Array.isArray(props.modelValue) ? props.modelValue : []
|
||||
})
|
||||
|
||||
const assignedAccounts = computed(() => {
|
||||
return accounts.value.filter((a) => assignedIds.value.includes(a.id))
|
||||
})
|
||||
|
||||
const updateAssigned = (ids) => {
|
||||
emit("update:modelValue", ids)
|
||||
}
|
||||
|
||||
const assignByIban = async () => {
|
||||
const search = normalizeIban(ibanSearch.value)
|
||||
if (!search) return
|
||||
|
||||
const match = accounts.value.find((a) => normalizeIban(a.iban) === search)
|
||||
if (!match) {
|
||||
toast.add({ title: "Kein Bankkonto mit dieser IBAN gefunden.", color: "rose" })
|
||||
return
|
||||
}
|
||||
|
||||
if (assignedIds.value.includes(match.id)) {
|
||||
toast.add({ title: "Dieses Bankkonto ist bereits zugewiesen.", color: "amber" })
|
||||
return
|
||||
}
|
||||
|
||||
updateAssigned([...assignedIds.value, match.id])
|
||||
ibanSearch.value = ""
|
||||
}
|
||||
|
||||
const removeAssigned = (id) => {
|
||||
updateAssigned(assignedIds.value.filter((i) => i !== id))
|
||||
}
|
||||
|
||||
const createAndAssign = async () => {
|
||||
if (!createPayload.value.iban || !createPayload.value.bic || !createPayload.value.bankName) {
|
||||
toast.add({ title: "IBAN, BIC und Bankinstitut sind Pflichtfelder.", color: "rose" })
|
||||
return
|
||||
}
|
||||
|
||||
const created = await useEntities("entitybankaccounts").create(createPayload.value, true)
|
||||
await loadAccounts()
|
||||
updateAssigned([...assignedIds.value, created.id])
|
||||
createPayload.value = { iban: "", bic: "", bankName: "", description: "" }
|
||||
showCreate.value = false
|
||||
}
|
||||
|
||||
const resolveCreatePayloadFromIban = async () => {
|
||||
const normalized = normalizeIban(createPayload.value.iban)
|
||||
if (!normalized) return
|
||||
|
||||
resolvingIban.value = true
|
||||
try {
|
||||
const data = await useFunctions().useBankingResolveIban(normalized)
|
||||
if (!data) return
|
||||
|
||||
createPayload.value.iban = data.iban || normalized
|
||||
if (data.bic) createPayload.value.bic = data.bic
|
||||
if (data.bankName) createPayload.value.bankName = data.bankName
|
||||
} catch (e) {
|
||||
// intentionally ignored: user can still enter fields manually
|
||||
} finally {
|
||||
resolvingIban.value = false
|
||||
}
|
||||
}
|
||||
|
||||
loadAccounts()
|
||||
</script>
|
||||
|
||||
<template>
|
||||
<div class="flex flex-col gap-2 w-full">
|
||||
<div class="flex flex-wrap gap-2" v-if="assignedAccounts.length > 0">
|
||||
<UBadge
|
||||
v-for="account in assignedAccounts"
|
||||
:key="account.id"
|
||||
color="primary"
|
||||
variant="subtle"
|
||||
>
|
||||
{{ account.displayLabel || account.iban }}
|
||||
<UButton
|
||||
v-if="!disabled"
|
||||
variant="ghost"
|
||||
color="gray"
|
||||
size="2xs"
|
||||
icon="i-heroicons-x-mark"
|
||||
class="ml-1"
|
||||
@click="removeAssigned(account.id)"
|
||||
/>
|
||||
</UBadge>
|
||||
</div>
|
||||
|
||||
<InputGroup class="w-full">
|
||||
<UInput
|
||||
v-model="ibanSearch"
|
||||
class="flex-auto"
|
||||
placeholder="IBAN eingeben und zuweisen"
|
||||
:disabled="disabled"
|
||||
@keydown.enter.prevent="assignByIban"
|
||||
/>
|
||||
<UButton :disabled="disabled" @click="assignByIban">
|
||||
Zuweisen
|
||||
</UButton>
|
||||
<UButton :disabled="disabled" color="gray" variant="outline" @click="showCreate = true">
|
||||
Neu
|
||||
</UButton>
|
||||
</InputGroup>
|
||||
</div>
|
||||
|
||||
<UModal v-model="showCreate">
|
||||
<UCard>
|
||||
<template #header>Neue Bankverbindung erstellen</template>
|
||||
<div class="space-y-3">
|
||||
<UFormGroup label="IBAN">
|
||||
<InputGroup>
|
||||
<UInput
|
||||
v-model="createPayload.iban"
|
||||
@blur="resolveCreatePayloadFromIban"
|
||||
@keydown.enter.prevent="resolveCreatePayloadFromIban"
|
||||
/>
|
||||
<UButton
|
||||
color="gray"
|
||||
variant="outline"
|
||||
:loading="resolvingIban"
|
||||
@click="resolveCreatePayloadFromIban"
|
||||
>
|
||||
Ermitteln
|
||||
</UButton>
|
||||
</InputGroup>
|
||||
</UFormGroup>
|
||||
<UFormGroup label="BIC">
|
||||
<UInput v-model="createPayload.bic" />
|
||||
</UFormGroup>
|
||||
<UFormGroup label="Bankinstitut">
|
||||
<UInput v-model="createPayload.bankName" />
|
||||
</UFormGroup>
|
||||
<UFormGroup label="Beschreibung (optional)">
|
||||
<UInput v-model="createPayload.description" />
|
||||
</UFormGroup>
|
||||
</div>
|
||||
<template #footer>
|
||||
<div class="flex justify-end gap-2">
|
||||
<UButton color="gray" variant="outline" @click="showCreate = false">Abbrechen</UButton>
|
||||
<UButton @click="createAndAssign">Erstellen und zuweisen</UButton>
|
||||
</div>
|
||||
</template>
|
||||
</UCard>
|
||||
</UModal>
|
||||
</template>
|
||||
@@ -151,8 +151,6 @@ const fileNames = computed(() => {
|
||||
:disabled="uploadInProgress || selectedFiles.length === 0"
|
||||
>Hochladen</UButton>
|
||||
</template>
|
||||
|
||||
{{props.fileData}}
|
||||
</UCard>
|
||||
</div>
|
||||
</UModal>
|
||||
|
||||
@@ -69,20 +69,31 @@ generateOldItemData()
|
||||
const saveAllowed = computed(() => {
|
||||
if (!item.value) return false
|
||||
|
||||
const isFilledValue = (value) => {
|
||||
if (Array.isArray(value)) return value.length > 0
|
||||
if (typeof value === "string") return value.trim().length > 0
|
||||
return value !== null && value !== undefined && value !== false
|
||||
}
|
||||
|
||||
let allowedCount = 0
|
||||
// Nur Input-Felder berücksichtigen
|
||||
const relevantColumns = dataType.templateColumns.filter(i => i.inputType)
|
||||
const relevantColumns = dataType.templateColumns.filter(i => {
|
||||
if (!i.inputType) return false
|
||||
if (i.showFunction && !i.showFunction(item.value)) return false
|
||||
if (i.disabledFunction && i.disabledFunction(item.value)) return false
|
||||
return true
|
||||
})
|
||||
|
||||
relevantColumns.forEach(datapoint => {
|
||||
if(datapoint.required) {
|
||||
if(datapoint.key.includes(".")){
|
||||
const [parentKey, childKey] = datapoint.key.split('.')
|
||||
// Prüfung: Existiert Parent UND ist Child "truthy" (nicht null/undefined/empty)
|
||||
if(item.value[parentKey] && item.value[parentKey][childKey]) {
|
||||
if(item.value[parentKey] && isFilledValue(item.value[parentKey][childKey])) {
|
||||
allowedCount += 1
|
||||
}
|
||||
} else {
|
||||
if(item.value[datapoint.key]) {
|
||||
if(isFilledValue(item.value[datapoint.key])) {
|
||||
allowedCount += 1
|
||||
}
|
||||
}
|
||||
@@ -427,6 +438,11 @@ const updateItem = async () => {
|
||||
/>
|
||||
</template>
|
||||
</UPopover>
|
||||
<BankAccountAssignInput
|
||||
v-else-if="datapoint.inputType === 'bankaccountassign'"
|
||||
v-model="item[datapoint.key.split('.')[0]][datapoint.key.split('.')[1]]"
|
||||
:disabled="datapoint.disabledFunction ? datapoint.disabledFunction(item) : false"
|
||||
/>
|
||||
<Tiptap
|
||||
v-else-if="datapoint.inputType === 'editor'"
|
||||
@updateContent="(i) => contentChanged(i,datapoint)"
|
||||
@@ -527,6 +543,11 @@ const updateItem = async () => {
|
||||
/>
|
||||
</template>
|
||||
</UPopover>
|
||||
<BankAccountAssignInput
|
||||
v-else-if="datapoint.inputType === 'bankaccountassign'"
|
||||
v-model="item[datapoint.key]"
|
||||
:disabled="datapoint.disabledFunction ? datapoint.disabledFunction(item) : false"
|
||||
/>
|
||||
<Tiptap
|
||||
v-else-if="datapoint.inputType === 'editor'"
|
||||
@updateContent="(i) => contentChanged(i,datapoint)"
|
||||
@@ -652,6 +673,11 @@ const updateItem = async () => {
|
||||
/>
|
||||
</template>
|
||||
</UPopover>
|
||||
<BankAccountAssignInput
|
||||
v-else-if="datapoint.inputType === 'bankaccountassign'"
|
||||
v-model="item[datapoint.key.split('.')[0]][datapoint.key.split('.')[1]]"
|
||||
:disabled="datapoint.disabledFunction ? datapoint.disabledFunction(item) : false"
|
||||
/>
|
||||
<Tiptap
|
||||
v-else-if="datapoint.inputType === 'editor'"
|
||||
@updateContent="(i) => contentChanged(i,datapoint)"
|
||||
@@ -752,6 +778,11 @@ const updateItem = async () => {
|
||||
/>
|
||||
</template>
|
||||
</UPopover>
|
||||
<BankAccountAssignInput
|
||||
v-else-if="datapoint.inputType === 'bankaccountassign'"
|
||||
v-model="item[datapoint.key]"
|
||||
:disabled="datapoint.disabledFunction ? datapoint.disabledFunction(item) : false"
|
||||
/>
|
||||
<Tiptap
|
||||
v-else-if="datapoint.inputType === 'editor'"
|
||||
@updateContent="(i) => contentChanged(i,datapoint)"
|
||||
|
||||
@@ -69,6 +69,12 @@ const profileStore = useProfileStore()
|
||||
const tempStore = useTempStore()
|
||||
|
||||
const dataType = dataStore.dataTypes[type]
|
||||
const canCreate = computed(() => {
|
||||
if (type === "members") {
|
||||
return has("members-create") || has("customers-create")
|
||||
}
|
||||
return has(`${type}-create`)
|
||||
})
|
||||
|
||||
const selectedColumns = ref(tempStore.columns[type] ? tempStore.columns[type] : dataType.templateColumns.filter(i => !i.disabledInTable))
|
||||
const columns = computed(() => dataType.templateColumns.filter((column) => !column.disabledInTable && selectedColumns.value.find(i => i.key === column.key)))
|
||||
@@ -138,7 +144,7 @@ const filteredRows = computed(() => {
|
||||
/>
|
||||
|
||||
<UButton
|
||||
v-if="platform !== 'mobile' && has(`${type}-create`)/*&& useRole().checkRight(`${type}-create`)*/"
|
||||
v-if="platform !== 'mobile' && canCreate/*&& useRole().checkRight(`${type}-create`)*/"
|
||||
@click="router.push(`/standardEntity/${type}/create`)"
|
||||
class="ml-3"
|
||||
>+ {{dataType.labelSingle}}</UButton>
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
import EntityShowSubTimes from "~/components/EntityShowSubTimes.vue";
|
||||
import WikiEntityWidget from "~/components/wiki/WikiEntityWidget.vue";
|
||||
import LabelPrintModal from "~/components/LabelPrintModal.vue";
|
||||
|
||||
const props = defineProps({
|
||||
type: {
|
||||
@@ -69,7 +70,7 @@ const getAvailableQueryStringData = (keys) => {
|
||||
|
||||
if(props.item.customer) {
|
||||
addParam("customer", props.item.customer.id)
|
||||
} else if(type === "customers") {
|
||||
} else if(type === "customers" || type === "members") {
|
||||
addParam("customer", props.item.id)
|
||||
}
|
||||
|
||||
@@ -136,6 +137,18 @@ const changePinned = async () => {
|
||||
|
||||
}
|
||||
|
||||
const openCustomerInventoryLabelPrint = () => {
|
||||
modal.open(LabelPrintModal, {
|
||||
context: {
|
||||
id: props.item.id,
|
||||
customerInventoryId: props.item.customerInventoryId,
|
||||
name: props.item.name,
|
||||
customerName: props.item.customer?.name,
|
||||
serialNumber: props.item.serialNumber
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
</script>
|
||||
|
||||
<template>
|
||||
@@ -193,6 +206,14 @@ const changePinned = async () => {
|
||||
color="yellow"
|
||||
@click="changePinned"
|
||||
></UButton>
|
||||
<UButton
|
||||
v-if="type === 'customerinventoryitems'"
|
||||
icon="i-heroicons-printer"
|
||||
variant="outline"
|
||||
@click="openCustomerInventoryLabelPrint"
|
||||
>
|
||||
Label
|
||||
</UButton>
|
||||
<UButton
|
||||
@click="router.push(`/standardEntity/${type}/edit/${item.id}`)"
|
||||
>
|
||||
@@ -214,6 +235,14 @@ const changePinned = async () => {
|
||||
>{{item ? `${dataType.labelSingle}${props.item[dataType.templateColumns.find(i => i.title).key] ? ': ' + props.item[dataType.templateColumns.find(i => i.title).key] : ''}`: '' }}</h1>
|
||||
</template>
|
||||
<template #right>
|
||||
<UButton
|
||||
v-if="type === 'customerinventoryitems'"
|
||||
icon="i-heroicons-printer"
|
||||
variant="outline"
|
||||
@click="openCustomerInventoryLabelPrint"
|
||||
>
|
||||
Label
|
||||
</UButton>
|
||||
<UButton
|
||||
@click="router.push(`/standardEntity/${type}/edit/${item.id}`)"
|
||||
>
|
||||
|
||||
@@ -31,6 +31,7 @@ const dataStore = useDataStore()
|
||||
const tempStore = useTempStore()
|
||||
|
||||
const router = useRouter()
|
||||
const createRoute = computed(() => type.value === "tasks" ? `/tasks/create?${props.queryStringData}` : `/standardEntity/${type.value}/create?${props.queryStringData}`)
|
||||
|
||||
let dataType = null
|
||||
|
||||
@@ -80,7 +81,7 @@ setup()
|
||||
</template>
|
||||
<Toolbar>
|
||||
<UButton
|
||||
@click="router.push(`/standardEntity/${type}/create?${props.queryStringData}`)"
|
||||
@click="router.push(createRoute)"
|
||||
>
|
||||
+ {{dataType.labelSingle}}
|
||||
</UButton>
|
||||
|
||||
@@ -60,7 +60,6 @@ const router = useRouter()
|
||||
const createddocuments = ref([])
|
||||
|
||||
const setup = async () => {
|
||||
//createddocuments.value = (await useSupabaseSelect("createddocuments")).filter(i => !i.archived)
|
||||
createddocuments.value = (await useEntities("createddocuments").select()).filter(i => !i.archived)
|
||||
}
|
||||
setup()
|
||||
|
||||
@@ -21,13 +21,20 @@ const props = defineProps({
|
||||
const dataStore = useDataStore()
|
||||
|
||||
const dataType = dataStore.dataTypes[props.topLevelType]
|
||||
const historyType = computed(() => {
|
||||
const holder = dataType?.historyItemHolder
|
||||
if (!holder) return props.topLevelType
|
||||
|
||||
const normalized = String(holder).toLowerCase()
|
||||
return normalized.endsWith("s") ? normalized : `${normalized}s`
|
||||
})
|
||||
|
||||
</script>
|
||||
|
||||
<template>
|
||||
<UCard class="mt-5 scroll" :style="props.platform !== 'mobile' ? 'height: 80vh' : ''">
|
||||
<HistoryDisplay
|
||||
:type="props.topLevelType"
|
||||
:type="historyType"
|
||||
v-if="props.item.id"
|
||||
:element-id="props.item.id"
|
||||
render-headline
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
<script setup>
|
||||
import dayjs from "dayjs";
|
||||
|
||||
const props = defineProps({
|
||||
queryStringData: {
|
||||
@@ -28,6 +29,33 @@ const dataType = dataStore.dataTypes[props.topLevelType]
|
||||
// const selectedColumns = ref(tempStore.columns[props.topLevelType] ? tempStore.columns[props.topLevelType] : dataType.templateColumns.filter(i => !i.disabledInTable))
|
||||
// const columns = computed(() => dataType.templateColumns.filter((column) => !column.disabledInTable && selectedColumns.value.find(i => i.key === column.key)))
|
||||
|
||||
const getDatapointValue = (datapoint) => {
|
||||
if (datapoint.key.includes(".")) {
|
||||
const [parentKey, childKey] = datapoint.key.split(".")
|
||||
return props.item?.[parentKey]?.[childKey]
|
||||
}
|
||||
return props.item?.[datapoint.key]
|
||||
}
|
||||
|
||||
const renderDatapointValue = (datapoint) => {
|
||||
const value = getDatapointValue(datapoint)
|
||||
if (value === null || value === undefined || value === "") return "-"
|
||||
|
||||
if (datapoint.inputType === "date") {
|
||||
return dayjs(value).isValid() ? dayjs(value).format("DD.MM.YYYY") : String(value)
|
||||
}
|
||||
|
||||
if (datapoint.inputType === "datetime") {
|
||||
return dayjs(value).isValid() ? dayjs(value).format("DD.MM.YYYY HH:mm") : String(value)
|
||||
}
|
||||
|
||||
if (datapoint.inputType === "bool" || typeof value === "boolean") {
|
||||
return value ? "Ja" : "Nein"
|
||||
}
|
||||
|
||||
return `${value}${datapoint.unit ? datapoint.unit : ""}`
|
||||
}
|
||||
|
||||
</script>
|
||||
|
||||
<template>
|
||||
@@ -53,8 +81,7 @@ const dataType = dataStore.dataTypes[props.topLevelType]
|
||||
<td>
|
||||
<component v-if="datapoint.component" :is="datapoint.component" :row="props.item" :in-show="true"></component>
|
||||
<div v-else>
|
||||
<span v-if="datapoint.key.includes('.')">{{props.item[datapoint.key.split('.')[0]][datapoint.key.split('.')[1]]}}{{datapoint.unit}}</span>
|
||||
<span v-else>{{props.item[datapoint.key]}} {{datapoint.unit}}</span>
|
||||
<span>{{ renderDatapointValue(datapoint) }}</span>
|
||||
</div>
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
@@ -1,9 +1,6 @@
|
||||
<script setup>
|
||||
import dayjs from "dayjs";
|
||||
const supabase = useSupabaseClient()
|
||||
const route = useRoute()
|
||||
const router = useRouter()
|
||||
const profileStore = useProfileStore()
|
||||
|
||||
|
||||
const props = defineProps({
|
||||
@@ -28,8 +25,6 @@ const statementallocations = ref([])
|
||||
const incominginvoices = ref([])
|
||||
|
||||
const setup = async () => {
|
||||
//statementallocations.value = (await supabase.from("statementallocations").select("*, bs_id(*)").eq("account", route.params.id).eq("tenant",profileStore.currentTenant).order("created_at",{ascending: true})).data
|
||||
//incominginvoices.value = (await useSupabaseSelect("incominginvoices", "*, vendor(*)")).filter(i => i.accounts.find(x => x.account == route.params.id))
|
||||
}
|
||||
|
||||
setup()
|
||||
|
||||
@@ -24,7 +24,7 @@ const emit = defineEmits(["updateNeeded"]);
|
||||
|
||||
const router = useRouter()
|
||||
const profileStore = useProfileStore()
|
||||
const supabase = useSupabaseClient()
|
||||
const auth = useAuthStore()
|
||||
|
||||
const renderedPhases = computed(() => {
|
||||
if(props.topLevelType === "projects" && props.item.phases) {
|
||||
@@ -58,6 +58,7 @@ const renderedPhases = computed(() => {
|
||||
})
|
||||
|
||||
const changeActivePhase = async (key) => {
|
||||
console.log(props.item)
|
||||
let item = await useEntities("projects").selectSingle(props.item.id,'*')
|
||||
|
||||
let phaseLabel = ""
|
||||
@@ -68,26 +69,17 @@ const changeActivePhase = async (key) => {
|
||||
if(p.key === key) {
|
||||
p.active = true
|
||||
p.activated_at = dayjs().format()
|
||||
p.activated_by = profileStore.activeProfile.id
|
||||
p.activated_by = auth.user.id
|
||||
phaseLabel = p.label
|
||||
}
|
||||
|
||||
return p
|
||||
})
|
||||
|
||||
console.log(item)
|
||||
|
||||
const res = await useEntities("projects").update(item.id, {phases:item.phases,active_phase: item.phases.find(i => i.active).label})
|
||||
|
||||
//const {error:updateError} = await supabase.from("projects").update({phases: item.phases}).eq("id",item.id)
|
||||
|
||||
|
||||
|
||||
/*const {error} = await supabase.from("historyitems").insert({
|
||||
createdBy: profileStore.activeProfile.id,
|
||||
tenant: profileStore.currentTenant,
|
||||
text: `Aktive Phase zu "${phaseLabel}" gewechselt`,
|
||||
project: item.id
|
||||
})*/
|
||||
|
||||
emit("updateNeeded")
|
||||
|
||||
}
|
||||
@@ -152,7 +144,7 @@ const changeActivePhase = async (key) => {
|
||||
|
||||
<div>
|
||||
<p v-if="item.activated_at" class="dark:text-white text-black">Aktiviert am: {{dayjs(item.activated_at).format("DD.MM.YY HH:mm")}} Uhr</p>
|
||||
<p v-if="item.activated_by" class="dark:text-white text-black">Aktiviert durch: {{profileStore.getProfileById(item.activated_by).fullName}}</p>
|
||||
<p v-if="item.activated_by" class="dark:text-white text-black">Aktiviert durch: {{item.activated_by}}</p>
|
||||
<p v-if="item.description" class="dark:text-white text-black">Beschreibung: {{item.description}}</p>
|
||||
</div>
|
||||
</UCard>
|
||||
|
||||
@@ -1,10 +1,5 @@
|
||||
<script setup>
|
||||
import dayjs from "dayjs";
|
||||
const supabase = useSupabaseClient()
|
||||
const route = useRoute()
|
||||
const router = useRouter()
|
||||
const profileStore = useProfileStore()
|
||||
|
||||
|
||||
const props = defineProps({
|
||||
queryStringData: {
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
<script setup>
|
||||
const getShowRoute = (entityType, id) => entityType === "tasks" ? `/tasks/show/${id}` : `/standardEntity/${entityType}/show/${id}`
|
||||
|
||||
defineShortcuts({
|
||||
/*'/': () => {
|
||||
//console.log(searchinput)
|
||||
@@ -8,7 +10,7 @@
|
||||
'Enter': {
|
||||
usingInput: true,
|
||||
handler: () => {
|
||||
router.push(`/standardEntity/${props.type}/show/${props.rows.value[selectedItem.value].id}`)
|
||||
router.push(getShowRoute(props.type, props.rows[selectedItem.value].id))
|
||||
}
|
||||
},
|
||||
'arrowdown': () => {
|
||||
@@ -57,7 +59,7 @@
|
||||
|
||||
const selectedItem = ref(0)
|
||||
const sort = ref({
|
||||
column: dataType.supabaseSortColumn || "date",
|
||||
column: dataType.sortColumn || "date",
|
||||
direction: 'desc'
|
||||
})
|
||||
|
||||
@@ -75,7 +77,7 @@
|
||||
:columns="props.columns"
|
||||
class="w-full"
|
||||
:ui="{ divide: 'divide-gray-200 dark:divide-gray-800' }"
|
||||
@select="(i) => router.push(`/standardEntity/${type}/show/${i.id}`) "
|
||||
@select="(i) => router.push(getShowRoute(type, i.id))"
|
||||
:empty-state="{ icon: 'i-heroicons-circle-stack-20-solid', label: `Keine ${dataType.label} anzuzeigen` }"
|
||||
>
|
||||
<!-- <template
|
||||
|
||||
@@ -46,6 +46,7 @@
|
||||
const dataStore = useDataStore()
|
||||
|
||||
const router = useRouter()
|
||||
const getShowRoute = (entityType, id) => entityType === "tasks" ? `/tasks/show/${id}` : `/standardEntity/${entityType}/show/${id}`
|
||||
|
||||
const dataType = dataStore.dataTypes[props.type]
|
||||
|
||||
@@ -59,7 +60,7 @@
|
||||
<a
|
||||
v-for="item in props.rows"
|
||||
class="my-1"
|
||||
@click="router.push(`/standardEntity/${type}/show/${item.id}`)"
|
||||
@click="router.push(getShowRoute(type, item.id))"
|
||||
>
|
||||
<p class="truncate text-left text-primary text-xl">{{dataType.templateColumns.find(i => i.title).key ? item[dataType.templateColumns.find(i => i.title).key] : null}}</p>
|
||||
<p class="text-sm">
|
||||
|
||||
@@ -1,14 +1,17 @@
|
||||
<script setup>
|
||||
|
||||
const supabase = useSupabaseClient()
|
||||
const profileStore = useProfileStore()
|
||||
|
||||
const globalMessages = ref([])
|
||||
|
||||
const setup = async () => {
|
||||
let {data} = await supabase.from("globalmessages").select("*, profiles(id)")
|
||||
let data = []
|
||||
try {
|
||||
data = await useNuxtApp().$api("/api/resource/globalmessages")
|
||||
} catch (e) {
|
||||
data = []
|
||||
}
|
||||
|
||||
data = data.filter((message) => message.profiles.length === 0)
|
||||
data = (data || []).filter((message) => !message.profiles || message.profiles.length === 0)
|
||||
|
||||
globalMessages.value = data
|
||||
|
||||
@@ -29,10 +32,17 @@ const showMessage = (message) => {
|
||||
showMessageModal.value = true
|
||||
}
|
||||
const markMessageAsRead = async () => {
|
||||
await supabase.from("globalmessagesseen").insert({
|
||||
try {
|
||||
await useNuxtApp().$api("/api/resource/globalmessagesseen", {
|
||||
method: "POST",
|
||||
body: {
|
||||
profile: profileStore.activeProfile.id,
|
||||
message: messageToShow.value.id,
|
||||
}
|
||||
})
|
||||
} catch (e) {
|
||||
// noop: endpoint optional in newer backend versions
|
||||
}
|
||||
showMessageModal.value = false
|
||||
setup()
|
||||
|
||||
|
||||
@@ -3,10 +3,7 @@ const { isHelpSlideoverOpen } = useDashboard()
|
||||
const { metaSymbol } = useShortcuts()
|
||||
|
||||
const shortcuts = ref(false)
|
||||
const dataStore = useDataStore()
|
||||
const profileStore = useProfileStore()
|
||||
const query = ref('')
|
||||
const supabase = useSupabaseClient()
|
||||
const toast = useToast()
|
||||
const router = useRouter()
|
||||
|
||||
|
||||
@@ -3,11 +3,13 @@ import dayjs from "dayjs"
|
||||
const props = defineProps({
|
||||
type: {
|
||||
type: String,
|
||||
required: true
|
||||
required: false,
|
||||
default: null
|
||||
},
|
||||
elementId: {
|
||||
type: String,
|
||||
required: true
|
||||
required: false,
|
||||
default: null
|
||||
},
|
||||
renderHeadline: {
|
||||
type: Boolean,
|
||||
@@ -25,13 +27,11 @@ const items = ref([])
|
||||
const platform = ref("default")
|
||||
|
||||
const setup = async () => {
|
||||
|
||||
|
||||
if(props.type && props.elementId){
|
||||
items.value = await useNuxtApp().$api(`/api/resource/${props.type}/${props.elementId}/history`)
|
||||
} /*else {
|
||||
|
||||
}*/
|
||||
} else {
|
||||
items.value = await useNuxtApp().$api(`/api/history`)
|
||||
}
|
||||
}
|
||||
|
||||
setup()
|
||||
@@ -43,6 +43,10 @@ const addHistoryItemData = ref({
|
||||
})
|
||||
|
||||
const addHistoryItem = async () => {
|
||||
if (!props.type || !props.elementId) {
|
||||
toast.add({ title: "Im zentralen Logbuch können keine direkten Einträge erstellt werden." })
|
||||
return
|
||||
}
|
||||
|
||||
const res = await useNuxtApp().$api(`/api/resource/${props.type}/${props.elementId}/history`, {
|
||||
method: "POST",
|
||||
|
||||
@@ -44,7 +44,9 @@ async function loadLabel() {
|
||||
labelData.value = await $api(`/api/print/label`, {
|
||||
method: "POST",
|
||||
body: JSON.stringify({
|
||||
context: props.context || null
|
||||
context: props.context || null,
|
||||
width: 584,
|
||||
height: 354
|
||||
})
|
||||
})
|
||||
} catch (err) {
|
||||
@@ -78,11 +80,17 @@ onMounted(() => {
|
||||
|
||||
})
|
||||
|
||||
watch(() => labelPrinter.connected, (connected) => {
|
||||
if (connected && !labelData.value) {
|
||||
loadLabel()
|
||||
}
|
||||
})
|
||||
|
||||
</script>
|
||||
|
||||
<template>
|
||||
<UModal>
|
||||
<UCard>
|
||||
<UModal :ui="{ width: 'sm:max-w-5xl' }">
|
||||
<UCard class="w-[92vw] max-w-5xl">
|
||||
|
||||
<template #header>
|
||||
<div class="flex items-center justify-between">
|
||||
@@ -91,11 +99,11 @@ onMounted(() => {
|
||||
</div>
|
||||
</template>
|
||||
|
||||
<div v-if="!loading && labelPrinter.connected">
|
||||
<div v-if="!loading && labelPrinter.connected" class="w-full">
|
||||
<img
|
||||
:src="`data:image/png;base64,${labelData.base64}`"
|
||||
alt="Label Preview"
|
||||
class="max-w-full max-h-64 object-contain"
|
||||
class="w-full max-h-[70vh] object-contain"
|
||||
/>
|
||||
</div>
|
||||
<div v-else-if="loading && !labelPrinter.connected">
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user