WIP auth and knex > kysely

This commit is contained in:
Linus Miller 2025-12-19 08:24:41 +01:00
parent 8f6591b679
commit d6a5445ab5
25 changed files with 455 additions and 766 deletions

View File

@ -5,7 +5,7 @@ meta {
}
get {
url: http://localhost:4040/api/users
url: {{base_url}}/api/users
body: none
auth: none
}

View File

@ -0,0 +1,34 @@
meta {
name: /auth/register
type: http
seq: 3
}
post {
url: {{base_url}}/auth/register
body: json
auth: inherit
}
body:json {
{
"email": "linus.k.miller@gmail.com",
"password": "rasmus",
"inviteEmail": "linus.k.miller@gmail.com",
"inviteToken": "1502f035584e09870aab05611161a636f88fb08ccba745850a0430f2bb5b3d8c"
}
}
body:form-urlencoded {
email: linus.k.miller@gmail.com
password: rasmus
}
body:multipart-form {
linus.k.miller@gmail.com:
}
settings {
encodeUrl: true
timeout: 0
}

View File

@ -25,10 +25,7 @@ services:
- POSTGRES_USER=brf_books
- POSTGRES_PASSWORD=brf_books
volumes:
- ./docker/postgres/01-auth_schema.sql:/docker-entrypoint-initdb.d/01-auth_schema.sql
- ./docker/postgres/02-accounting_schema.sql:/docker-entrypoint-initdb.d/02-accounting_schema.sql
- ./docker/postgres/03-auth_data.sql:/docker-entrypoint-initdb.d/03-auth_data.sql
- ./docker/postgres/04-accounting_data.sql:/docker-entrypoint-initdb.d/04-accounting_data.sql
- ./docker/postgres:/docker-entrypoint-initdb.d
- postgres:/var/lib/postgresql/data
redis:

View File

@ -2,12 +2,15 @@
-- PostgreSQL database dump
--
-- Dumped from database version 16.0
-- Dumped by pg_dump version 16.0
\restrict kmfsZ1NUIbedynsFb23ZupLqit5AgAIEj3QsIeG1L5YkBtJbYtar24uoNvU1ZrF
-- Dumped from database version 18.1
-- Dumped by pg_dump version 18.1
SET statement_timeout = 0;
SET lock_timeout = 0;
SET idle_in_transaction_session_timeout = 0;
SET transaction_timeout = 0;
SET client_encoding = 'UTF8';
SET standard_conforming_strings = on;
SELECT pg_catalog.set_config('search_path', '', false);
@ -34,6 +37,25 @@ CREATE TABLE public.admission (
);
--
-- Name: admissions_id_seq; Type: SEQUENCE; Schema: public; Owner: -
--
CREATE SEQUENCE public.admissions_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: admissions_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
--
ALTER SEQUENCE public.admissions_id_seq OWNED BY public.admission.id;
--
-- Name: admissions_roles; Type: TABLE; Schema: public; Owner: -
--
@ -45,7 +67,7 @@ CREATE TABLE public.admissions_roles (
--
-- Name: "emailToken"; Type: TABLE; Schema: public; Owner: -
-- Name: emailToken; Type: TABLE; Schema: public; Owner: -
--
CREATE TABLE public."emailToken" (
@ -60,7 +82,7 @@ CREATE TABLE public."emailToken" (
--
-- Name: "emailToken_id_seq"; Type: SEQUENCE; Schema: public; Owner: -
-- Name: emailToken_id_seq; Type: SEQUENCE; Schema: public; Owner: -
--
CREATE SEQUENCE public."emailToken_id_seq"
@ -72,52 +94,12 @@ CREATE SEQUENCE public."emailToken_id_seq"
--
-- Name: "emailToken_id_seq"; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
-- Name: emailToken_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
--
ALTER SEQUENCE public."emailToken_id_seq" OWNED BY public."emailToken".id;
--
-- Name: error; Type: TABLE; Schema: public; Owner: -
--
CREATE TABLE public.error (
id integer NOT NULL,
"statusCode" integer,
type text,
message text,
details json,
stack text,
method text,
path text,
headers json,
ip text,
"reqId" text,
"createdAt" timestamp with time zone
);
--
-- Name: error_id_seq; Type: SEQUENCE; Schema: public; Owner: -
--
CREATE SEQUENCE public.error_id_seq
AS integer
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: error_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
--
ALTER SEQUENCE public.error_id_seq OWNED BY public.error.id;
--
-- Name: invite; Type: TABLE; Schema: public; Owner: -
--
@ -165,7 +147,7 @@ CREATE TABLE public.invites_roles (
--
-- Name: "passwordToken"; Type: TABLE; Schema: public; Owner: -
-- Name: passwordToken; Type: TABLE; Schema: public; Owner: -
--
CREATE TABLE public."passwordToken" (
@ -179,7 +161,7 @@ CREATE TABLE public."passwordToken" (
--
-- Name: "passwordToken_id_seq"; Type: SEQUENCE; Schema: public; Owner: -
-- Name: passwordToken_id_seq; Type: SEQUENCE; Schema: public; Owner: -
--
CREATE SEQUENCE public."passwordToken_id_seq"
@ -191,31 +173,12 @@ CREATE SEQUENCE public."passwordToken_id_seq"
--
-- Name: "passwordToken_id_seq"; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
-- Name: passwordToken_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
--
ALTER SEQUENCE public."passwordToken_id_seq" OWNED BY public."passwordToken".id;
--
-- Name: admissions_id_seq; Type: SEQUENCE; Schema: public; Owner: -
--
CREATE SEQUENCE public.admissions_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: admissions_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
--
ALTER SEQUENCE public.admissions_id_seq OWNED BY public.admission.id;
--
-- Name: role; Type: TABLE; Schema: public; Owner: -
--
@ -257,16 +220,16 @@ CREATE TABLE public."user" (
id integer NOT NULL,
email character varying(254) NOT NULL,
password character varying(256) NOT NULL,
"createdAt" timestamp with time zone DEFAULT now() NOT NULL,
"lastLoginAt" timestamp with time zone,
"loginAttempts" integer DEFAULT 0,
"lastLoginAttemptAt" timestamp with time zone,
"lastActivityAt" timestamp with time zone,
"emailVerifiedAt" timestamp with time zone,
"bannedAt" timestamp with time zone,
"bannedById" integer,
"blockedAt" timestamp with time zone,
"blockedById" integer,
"emailVerifiedAt" timestamp with time zone
"createdAt" timestamp with time zone DEFAULT now() NOT NULL
);
@ -307,19 +270,12 @@ ALTER TABLE ONLY public.admission ALTER COLUMN id SET DEFAULT nextval('public.ad
--
-- Name: "emailToken" id; Type: DEFAULT; Schema: public; Owner: -
-- Name: emailToken id; Type: DEFAULT; Schema: public; Owner: -
--
ALTER TABLE ONLY public."emailToken" ALTER COLUMN id SET DEFAULT nextval('public."emailToken_id_seq"'::regclass);
--
-- Name: error id; Type: DEFAULT; Schema: public; Owner: -
--
ALTER TABLE ONLY public.error ALTER COLUMN id SET DEFAULT nextval('public.error_id_seq'::regclass);
--
-- Name: invite id; Type: DEFAULT; Schema: public; Owner: -
--
@ -328,7 +284,7 @@ ALTER TABLE ONLY public.invite ALTER COLUMN id SET DEFAULT nextval('public.invit
--
-- Name: "passwordToken" id; Type: DEFAULT; Schema: public; Owner: -
-- Name: passwordToken id; Type: DEFAULT; Schema: public; Owner: -
--
ALTER TABLE ONLY public."passwordToken" ALTER COLUMN id SET DEFAULT nextval('public."passwordToken_id_seq"'::regclass);
@ -356,6 +312,14 @@ ALTER TABLE ONLY public.admission
ADD CONSTRAINT admission_pkey PRIMARY KEY (id);
--
-- Name: admission admission_regex_key; Type: CONSTRAINT; Schema: public; Owner: -
--
ALTER TABLE ONLY public.admission
ADD CONSTRAINT admission_regex_key UNIQUE (regex);
--
-- Name: admissions_roles admissions_roles_pkey; Type: CONSTRAINT; Schema: public; Owner: -
--
@ -365,7 +329,7 @@ ALTER TABLE ONLY public.admissions_roles
--
-- Name: "emailToken" email_token_pkey; Type: CONSTRAINT; Schema: public; Owner: -
-- Name: emailToken email_token_pkey; Type: CONSTRAINT; Schema: public; Owner: -
--
ALTER TABLE ONLY public."emailToken"
@ -373,21 +337,13 @@ ALTER TABLE ONLY public."emailToken"
--
-- Name: "emailToken" email_token_unique; Type: CONSTRAINT; Schema: public; Owner: -
-- Name: emailToken email_token_unique; Type: CONSTRAINT; Schema: public; Owner: -
--
ALTER TABLE ONLY public."emailToken"
ADD CONSTRAINT email_token_unique UNIQUE ("userId", email);
--
-- Name: error error_pkey; Type: CONSTRAINT; Schema: public; Owner: -
--
ALTER TABLE ONLY public.error
ADD CONSTRAINT error_pkey PRIMARY KEY (id);
--
-- Name: invite invite_pkey; Type: CONSTRAINT; Schema: public; Owner: -
--
@ -405,7 +361,7 @@ ALTER TABLE ONLY public.invites_roles
--
-- Name: "passwordToken" "passwordToken_pkey"; Type: CONSTRAINT; Schema: public; Owner: -
-- Name: passwordToken passwordToken_pkey; Type: CONSTRAINT; Schema: public; Owner: -
--
ALTER TABLE ONLY public."passwordToken"
@ -453,63 +409,63 @@ ALTER TABLE ONLY public.users_roles
--
-- Name: "fki_admission_createdById_fkey"; Type: INDEX; Schema: public; Owner: -
-- Name: fki_admission_createdById_fkey; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX "fki_admission_createdById_fkey" ON public.admission USING btree ("createdById");
--
-- Name: "fki_admission_modifiedById_fkey"; Type: INDEX; Schema: public; Owner: -
-- Name: fki_admission_modifiedById_fkey; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX "fki_admission_modifiedById_fkey" ON public.admission USING btree ("modifiedById");
--
-- Name: "fki_invite_modifiedById_fkey"; Type: INDEX; Schema: public; Owner: -
-- Name: fki_invite_modifiedById_fkey; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX "fki_invite_modifiedById_fkey" ON public.invite USING btree ("modifiedById");
--
-- Name: "fki_role_createdById_fkey"; Type: INDEX; Schema: public; Owner: -
-- Name: fki_role_createdById_fkey; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX "fki_role_createdById_fkey" ON public.role USING btree ("createdById");
--
-- Name: "fki_role_modifiedById_fkey"; Type: INDEX; Schema: public; Owner: -
-- Name: fki_role_modifiedById_fkey; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX "fki_role_modifiedById_fkey" ON public.role USING btree ("modifiedById");
--
-- Name: "fki_user_bannedById_fkey"; Type: INDEX; Schema: public; Owner: -
-- Name: fki_user_bannedById_fkey; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX "fki_user_bannedById_fkey" ON public."user" USING btree ("bannedById");
--
-- Name: "fki_user_blockedById_fkey"; Type: INDEX; Schema: public; Owner: -
-- Name: fki_user_blockedById_fkey; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX "fki_user_blockedById_fkey" ON public."user" USING btree ("blockedById");
--
-- Name: "fki_users_roles_roleId_fkey"; Type: INDEX; Schema: public; Owner: -
-- Name: fki_users_roles_roleId_fkey; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX "fki_users_roles_roleId_fkey" ON public.users_roles USING btree ("roleId");
--
-- Name: admission "admission_createdById_fkey"; Type: FK CONSTRAINT; Schema: public; Owner: -
-- Name: admission admission_createdById_fkey; Type: FK CONSTRAINT; Schema: public; Owner: -
--
ALTER TABLE ONLY public.admission
@ -517,7 +473,7 @@ ALTER TABLE ONLY public.admission
--
-- Name: admission "admission_modifiedById_fkey"; Type: FK CONSTRAINT; Schema: public; Owner: -
-- Name: admission admission_modifiedById_fkey; Type: FK CONSTRAINT; Schema: public; Owner: -
--
ALTER TABLE ONLY public.admission
@ -525,7 +481,7 @@ ALTER TABLE ONLY public.admission
--
-- Name: admissions_roles "admissions_roles_admissionId_fkey"; Type: FK CONSTRAINT; Schema: public; Owner: -
-- Name: admissions_roles admissions_roles_admissionId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: -
--
ALTER TABLE ONLY public.admissions_roles
@ -533,7 +489,7 @@ ALTER TABLE ONLY public.admissions_roles
--
-- Name: admissions_roles "admissions_roles_roleId_fkey"; Type: FK CONSTRAINT; Schema: public; Owner: -
-- Name: admissions_roles admissions_roles_roleId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: -
--
ALTER TABLE ONLY public.admissions_roles
@ -541,7 +497,7 @@ ALTER TABLE ONLY public.admissions_roles
--
-- Name: "emailToken" "emailToken_userId_fkey"; Type: FK CONSTRAINT; Schema: public; Owner: -
-- Name: emailToken emailToken_userId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: -
--
ALTER TABLE ONLY public."emailToken"
@ -549,7 +505,7 @@ ALTER TABLE ONLY public."emailToken"
--
-- Name: invite "invite_consumedById_fkey"; Type: FK CONSTRAINT; Schema: public; Owner: -
-- Name: invite invite_consumedById_fkey; Type: FK CONSTRAINT; Schema: public; Owner: -
--
ALTER TABLE ONLY public.invite
@ -557,7 +513,7 @@ ALTER TABLE ONLY public.invite
--
-- Name: invite "invite_createdById_fkey"; Type: FK CONSTRAINT; Schema: public; Owner: -
-- Name: invite invite_createdById_fkey; Type: FK CONSTRAINT; Schema: public; Owner: -
--
ALTER TABLE ONLY public.invite
@ -565,7 +521,7 @@ ALTER TABLE ONLY public.invite
--
-- Name: invite "invite_modifiedById_fkey"; Type: FK CONSTRAINT; Schema: public; Owner: -
-- Name: invite invite_modifiedById_fkey; Type: FK CONSTRAINT; Schema: public; Owner: -
--
ALTER TABLE ONLY public.invite
@ -573,7 +529,7 @@ ALTER TABLE ONLY public.invite
--
-- Name: invites_roles "invites_roles_inviteId_fkey"; Type: FK CONSTRAINT; Schema: public; Owner: -
-- Name: invites_roles invites_roles_inviteId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: -
--
ALTER TABLE ONLY public.invites_roles
@ -605,7 +561,7 @@ ALTER TABLE ONLY public.role
--
-- Name: role "role_modifiedById_fkey"; Type: FK CONSTRAINT; Schema: public; Owner: -
-- Name: role role_modifiedById_fkey; Type: FK CONSTRAINT; Schema: public; Owner: -
--
ALTER TABLE ONLY public.role
@ -613,7 +569,7 @@ ALTER TABLE ONLY public.role
--
-- Name: user "user_bannedById_fkey"; Type: FK CONSTRAINT; Schema: public; Owner: -
-- Name: user user_bannedById_fkey; Type: FK CONSTRAINT; Schema: public; Owner: -
--
ALTER TABLE ONLY public."user"
@ -621,7 +577,7 @@ ALTER TABLE ONLY public."user"
--
-- Name: user "user_blockedById_fkey"; Type: FK CONSTRAINT; Schema: public; Owner: -
-- Name: user user_blockedById_fkey; Type: FK CONSTRAINT; Schema: public; Owner: -
--
ALTER TABLE ONLY public."user"
@ -629,7 +585,7 @@ ALTER TABLE ONLY public."user"
--
-- Name: users_roles "users_roles_roleId_fkey"; Type: FK CONSTRAINT; Schema: public; Owner: -
-- Name: users_roles users_roles_roleId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: -
--
ALTER TABLE ONLY public.users_roles
@ -637,7 +593,7 @@ ALTER TABLE ONLY public.users_roles
--
-- Name: users_roles "users_roles_userId_fkey"; Type: FK CONSTRAINT; Schema: public; Owner: -
-- Name: users_roles users_roles_userId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: -
--
ALTER TABLE ONLY public.users_roles
@ -648,3 +604,5 @@ ALTER TABLE ONLY public.users_roles
-- PostgreSQL database dump complete
--
\unrestrict kmfsZ1NUIbedynsFb23ZupLqit5AgAIEj3QsIeG1L5YkBtJbYtar24uoNvU1ZrF

View File

@ -0,0 +1,86 @@
--
-- PostgreSQL database dump
--
\restrict L31sa9yPB4GMmh0f4VYcX32P22LWelqHYoqB6dSuAh5ONY1bK2J71n300uCZbI9
-- Dumped from database version 18.1
-- Dumped by pg_dump version 18.1
SET statement_timeout = 0;
SET lock_timeout = 0;
SET idle_in_transaction_session_timeout = 0;
SET transaction_timeout = 0;
SET client_encoding = 'UTF8';
SET standard_conforming_strings = on;
SELECT pg_catalog.set_config('search_path', '', false);
SET check_function_bodies = false;
SET xmloption = content;
SET client_min_messages = warning;
SET row_security = off;
SET default_tablespace = '';
SET default_table_access_method = heap;
--
-- Name: error; Type: TABLE; Schema: public; Owner: -
--
CREATE TABLE public.error (
id integer NOT NULL,
"statusCode" integer,
type text,
message text,
details json,
stack text,
method text,
path text,
headers json,
ip text,
"reqId" text,
"createdAt" timestamp with time zone
);
--
-- Name: error_id_seq; Type: SEQUENCE; Schema: public; Owner: -
--
CREATE SEQUENCE public.error_id_seq
AS integer
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: error_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
--
ALTER SEQUENCE public.error_id_seq OWNED BY public.error.id;
--
-- Name: error id; Type: DEFAULT; Schema: public; Owner: -
--
ALTER TABLE ONLY public.error ALTER COLUMN id SET DEFAULT nextval('public.error_id_seq'::regclass);
--
-- Name: error error_pkey; Type: CONSTRAINT; Schema: public; Owner: -
--
ALTER TABLE ONLY public.error
ADD CONSTRAINT error_pkey PRIMARY KEY (id);
--
-- PostgreSQL database dump complete
--
\unrestrict L31sa9yPB4GMmh0f4VYcX32P22LWelqHYoqB6dSuAh5ONY1bK2J71n300uCZbI9

View File

@ -2,7 +2,7 @@
-- PostgreSQL database dump
--
\restrict FugYqvehfvYcZV6n0VXYfKK3pEfWehcjXHsTSddhC5Qcn0530oCENplg6a2CdZd
\restrict kNYhdwOhwE9I3bgAzdljyYgB5xyEpjhiaSCeYZfp84v3ey1GpvsdxX4U8Y8fQM3
-- Dumped from database version 18.1
-- Dumped by pg_dump version 18.1
@ -19,25 +19,6 @@ SET xmloption = content;
SET client_min_messages = warning;
SET row_security = off;
--
-- Name: truncate_tables(character varying); Type: FUNCTION; Schema: public; Owner: -
--
CREATE FUNCTION public.truncate_tables(username character varying) RETURNS void
LANGUAGE plpgsql
AS $$
DECLARE
statements CURSOR FOR
SELECT tablename FROM pg_tables
WHERE tableowner = username AND schemaname = 'public';
BEGIN
FOR stmt IN statements LOOP
EXECUTE 'TRUNCATE TABLE ' || quote_ident(stmt.tablename) || ' CASCADE;';
END LOOP;
END;
$$;
SET default_tablespace = '';
SET default_table_access_method = heap;
@ -372,26 +353,6 @@ CREATE TABLE public.supplier (
);
--
-- Name: supplier_id_seq; Type: SEQUENCE; Schema: public; Owner: -
--
CREATE SEQUENCE public.supplier_id_seq
AS integer
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: supplier_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
--
ALTER SEQUENCE public.supplier_id_seq OWNED BY public.supplier.id;
--
-- Name: supplierType; Type: TABLE; Schema: public; Owner: -
--
@ -422,6 +383,26 @@ CREATE SEQUENCE public."supplierType_id_seq"
ALTER SEQUENCE public."supplierType_id_seq" OWNED BY public."supplierType".id;
--
-- Name: supplier_id_seq; Type: SEQUENCE; Schema: public; Owner: -
--
CREATE SEQUENCE public.supplier_id_seq
AS integer
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: supplier_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
--
ALTER SEQUENCE public.supplier_id_seq OWNED BY public.supplier.id;
--
-- Name: transaction; Type: TABLE; Schema: public; Owner: -
--
@ -682,6 +663,14 @@ ALTER TABLE ONLY public.object
ADD CONSTRAINT object_pkey PRIMARY KEY (id);
--
-- Name: supplierType supplierType_pkey; Type: CONSTRAINT; Schema: public; Owner: -
--
ALTER TABLE ONLY public."supplierType"
ADD CONSTRAINT "supplierType_pkey" PRIMARY KEY (id);
--
-- Name: supplier supplier_pkey; Type: CONSTRAINT; Schema: public; Owner: -
--
@ -698,14 +687,6 @@ ALTER TABLE ONLY public.supplier
ADD CONSTRAINT "supplier_taxId_key" UNIQUE ("taxId");
--
-- Name: supplierType supplierType_pkey; Type: CONSTRAINT; Schema: public; Owner: -
--
ALTER TABLE ONLY public."supplierType"
ADD CONSTRAINT "supplierType_pkey" PRIMARY KEY (id);
--
-- Name: transaction transaction_pkey; Type: CONSTRAINT; Schema: public; Owner: -
--
@ -811,7 +792,7 @@ ALTER TABLE ONLY public."transactionsToObjects"
--
-- Name: "transactionsToObjects" transactionsToObjects_transactionId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: -
-- Name: transactionsToObjects transactionsToObjects_transactionId_fkey; Type: FK CONSTRAINT; Schema: public; Owner: -
--
ALTER TABLE ONLY public."transactionsToObjects"
@ -822,5 +803,5 @@ ALTER TABLE ONLY public."transactionsToObjects"
-- PostgreSQL database dump complete
--
\unrestrict FugYqvehfvYcZV6n0VXYfKK3pEfWehcjXHsTSddhC5Qcn0530oCENplg6a2CdZd
\unrestrict kNYhdwOhwE9I3bgAzdljyYgB5xyEpjhiaSCeYZfp84v3ey1GpvsdxX4U8Y8fQM3

78
docker/postgres/dump.py Executable file
View File

@ -0,0 +1,78 @@
#!/usr/bin/python3
import os
import argparse
from subprocess import run
from datetime import datetime
auth_tables = [
'admission',
'admissions_roles',
'emailToken',
'invite',
'invites_roles',
'passwordToken',
'role',
'user',
'users_roles',
]
accounting_tables = [
'account',
'accountBalance',
'aliasesToSupplier',
'dimension',
'entry',
'file',
'filesToInvoice',
'financialYear',
'invoice',
'journal',
'object',
'supplier',
'supplierType',
'transaction',
'transactionsToObjects',
]
parser = argparse.ArgumentParser()
parser.add_argument('-D', '--dir', help='The local location', default='./docker/postgres')
parser.add_argument('-s', '--schema', help='Dump schema')
parser.add_argument('-d', '--data', help='Dump')
parser.add_argument('--auth', action='store_true')
parser.add_argument('--accounting', action='store_true')
parser.add_argument('tables', type=str, nargs='*', help='The tables to dump')
args = parser.parse_args()
command = ['docker-compose', 'exec', '-T', 'postgres', 'pg_dump', '-U', 'brf_books', '-d', 'brf_books', '-O']
for enabled, tables in [
(args.tables, args.tables),
(args.auth, auth_tables),
(args.accounting, accounting_tables),
]:
if enabled:
for table in tables:
command.extend(['-t', f'"{table}"'])
if args.schema:
print('dumping schema...')
with open(os.path.join(args.dir, args.schema), 'w') as f:
run(
[ *command, '-s'],
stdout=f,
stderr=None
)
print(' done!')
if args.data:
print('dumping data...')
with open(os.path.join(args.dir, args.data), 'w') as f:
run(
[ *command, '-a'],
stdout=f,
stderr=None
)
print(' done!')

View File

@ -1,29 +0,0 @@
#!/bin/sh
script_dir=$(dirname $(readlink -f "$0"))
SCHEMA=true
DATA=true
while getopts "as" opt; do
case $opt in
"a")
SCHEMA=false
;;
"s")
DATA=false
;;
esac
done
if [ $SCHEMA = "true" ]; then
echo -n "dumping schema..."
docker-compose exec -T postgres pg_dump -U brf_books -d brf_books -s -O > $script_dir/01-schema.sql
echo " done!"
fi
if [ $DATA = "true" ]; then
echo -n "dumping data..."
docker-compose exec -T postgres pg_dump -U brf_books -d brf_books -a -O > $script_dir/02-data.sql
echo " done!"
fi

View File

@ -1,29 +1,13 @@
import _ from 'lodash'
import env from '../env.ts'
const domain = 'brf.lkm.nu'
type SiteConfig = {
title: string
name: string
port: string | null
hostname: string | null
domain: string
protocol: string
localHost: string
host: string | null
url: string
emails: Record<string, string>
}
const defaults: SiteConfig = {
export default {
title: 'BRF',
name: 'brf',
port: null,
hostname: null,
domain,
protocol: 'http',
localHost: `http://localhost:${env.PORT}`,
port: env.PORT,
hostname: env.HOSTNAME,
domain: env.DOMAIN,
protocol: env.PROTOCOL,
localHost: `http://localhost:${env.FASTIFY_PORT}`,
get host() {
return this.port ? `${this.hostname}:${this.port}` : this.hostname
},
@ -35,22 +19,3 @@ const defaults: SiteConfig = {
info: 'contact@bitmill.io',
},
}
export default _.merge(
defaults,
{
development: {
hostname: 'localhost',
port: env.PORT,
},
production: {
hostname: domain,
protocol: 'https',
emails: {
robot: `no-reply@${domain}`,
info: `info@${domain}`,
},
},
}[env.NODE_ENV as 'development' | 'production'],
) as SiteConfig

View File

@ -1,18 +1,15 @@
import type { FastifyPluginCallback, FastifyRequest } from 'fastify'
import fp from 'fastify-plugin'
import UserQueries from '../services/users/queries.ts'
import { jsonArrayFrom } from 'kysely/helpers/postgres'
import changePassword from './auth/routes/change_password.ts'
import resetPassword from './auth/routes/reset_password.ts'
import login from './auth/routes/login.ts'
import logout from './auth/routes/logout.ts'
import register from './auth/routes/register.ts'
import verifyEmail from './auth/routes/verify_email.ts'
import knex from '../lib/knex.ts'
import emitter from '../lib/emitter.ts'
import type { User } from '../services/users/types.ts'
const userQueries = UserQueries({ knex, emitter })
const userPromiseSymbol = Symbol('user')
const serialize = (user: User) => Promise.resolve(user.id)
@ -20,6 +17,8 @@ const serialize = (user: User) => Promise.resolve(user.id)
const auth: FastifyPluginCallback<{ prefix?: string }> = (fastify, options, done) => {
const prefix = options.prefix || ''
const { db } = fastify
fastify.decorate('auth', (request, reply, done) => {
if (!request.session.userId) return reply.status(401).send()
@ -49,10 +48,20 @@ const auth: FastifyPluginCallback<{ prefix?: string }> = (fastify, options, done
fastify.decorateRequest('getUser', function getUser(this: FastifyRequest & { [userPromiseSymbol]?: Promise<ANY> }) {
if (!this.session || !this.session.userId) {
return Promise.resolve(null)
} else if (!this[userPromiseSymbol]) {
this[userPromiseSymbol] = userQueries.findById(this.session.userId).catch((err) => {
this.log.error(err)
})
}
if (!this[userPromiseSymbol]) {
this[userPromiseSymbol] = db
.selectFrom('user')
.selectAll()
.select((eb) =>
jsonArrayFrom(
eb.selectFrom('role as r').innerJoin('users_roles as ur', 'ur.roleId', 'r.id').select(['id', 'name']),
).as('roles'),
)
.where('id', '=', this.session.userId)
.executeTakeFirstOrThrow()
.then(({ password: _, ...rest }) => rest)
}
return this[userPromiseSymbol]
@ -79,10 +88,6 @@ const auth: FastifyPluginCallback<{ prefix?: string }> = (fastify, options, done
fastify.get(prefix + '/logout', logout)
fastify.post(prefix + '/register', register)
fastify.addHook('onClose', () => {
knex.destroy()
})
done()
}

View File

@ -2,21 +2,12 @@ import _ from 'lodash'
import * as z from 'zod'
import type { RouteHandler } from 'fastify'
import config from '../../../config.ts'
import emitter from '../../../lib/emitter.ts'
import knex from '../../../lib/knex.ts'
import AdmissionQueries from '../../../services/admissions/queries.ts'
import InviteQueries from '../../../services/invites/queries.ts'
import UserQueries from '../../../services/users/queries.ts'
import verifyEmailTemplate from '../../../templates/emails/verify_email.ts'
import sendMail from '../../../lib/send_mail.ts'
import StatusError from '../../../lib/status_error.ts'
import { generateToken, hashPassword } from '../helpers.ts'
import type { Invite } from '../../../services/invites/types.ts'
import type { Role } from '../../../services/roles/types.ts'
const admissionQueries = AdmissionQueries({ knex, emitter })
const inviteQueries = InviteQueries({ knex, emitter })
const userQueries = UserQueries({ knex, emitter })
import type { Invite } from '../../../../shared/types.db.ts'
import { sql, type Selectable } from 'kysely'
const { errors, timeouts } = config.auth
@ -44,23 +35,50 @@ const ResponseSchema = {
'4XX': { $ref: 'status-error' },
}
type InviteMinimal = Pick<Selectable<Invite>, 'id' | 'email' | 'createdAt' | 'consumedAt'> & { roleIds?: number[] }
const register: RouteHandler<{ Body: z.infer<typeof BodySchema> }> = async function (request, reply) {
const { db } = request.server
try {
// TODO validate and ensure same as confirm
const email = request.body.email.trim().toLowerCase()
let invite: Invite | null = null
let invite: InviteMinimal | null | undefined = null
if (request.body.inviteEmail && request.body.inviteToken) {
invite = await inviteQueries.findOne({ email: request.body.inviteEmail, token: request.body.inviteToken })
const latestInvite = await inviteQueries.findOne({
email: request.body.inviteEmail,
sort: [{ column: 'createdAt', order: 'desc' }],
})
let latestInvite: { id: number } | null | undefined = null
;[invite, latestInvite] = await Promise.all([
db
.selectFrom('invite as i')
.innerJoin('invites_roles as ir', 'ir.inviteId', 'i.id')
.select([
'id',
'email',
'createdAt',
'consumedAt',
(eb) => eb.fn.agg<number[]>('array_agg', ['ir.roleId']).as('roleIds'),
])
.where((eb) =>
eb.and({
email: request.body.inviteEmail,
token: request.body.inviteToken,
}),
)
.groupBy('id')
.executeTakeFirst(),
db
.selectFrom('invite')
.select('id')
.where('email', '=', request.body.inviteEmail)
.orderBy('createdAt', 'desc')
.executeTakeFirst(),
])
if (!invite) {
throw new StatusError(...errors.tokenNotFound)
} else if (invite.id !== latestInvite.id) {
} else if (invite.id !== latestInvite!.id) {
throw new StatusError(...errors.tokenNotLatest)
} else if (Date.now() > new Date(invite.createdAt).getTime() + timeouts.invite) {
throw new StatusError(...errors.tokenExpired)
@ -69,73 +87,84 @@ const register: RouteHandler<{ Body: z.infer<typeof BodySchema> }> = async funct
}
}
const admissions = await admissionQueries.findMatches(email)
const admissions = await db
.selectFrom('admission as a')
.innerJoin('admissions_roles as ar', 'ar.admissionId', 'a.id')
.select(['regex', (eb) => eb.fn.agg('array_agg', ['ar.roleId']).as('roleIds')])
.groupBy('regex')
.execute()
.then((admissions) => admissions.filter((admission) => new RegExp(admission.regex).test(email)))
const roles = Array.from(
new Set(
[
invite?.roles ? invite.roles.map((role) => role.id) : [],
...admissions.map((admission) => (admission.roles ? admission.roles.map((role: Role) => role.id) : [])),
].flat(),
),
const roleIds: number[] = Array.from(
new Set([invite?.roleIds || [], ...admissions.map((admission) => admission.roleIds || [])].flat() as number[]),
)
if (!roles.length) {
if (!roleIds.length) {
throw new StatusError(...errors.notAuthorized)
}
const user = await knex.transaction(async (trx) => {
const user = await userQueries.create(
{
email,
password: await hashPassword(request.body.password),
roles,
emailVerifiedAt: invite && invite.email === email ? (knex.fn.now() as unknown as string) : null,
},
trx,
)
const trx = await db.startTransaction().execute()
if (invite) {
await inviteQueries.consume(invite.id, user.id, trx)
}
const user = await trx
.insertInto('user')
.values({
email,
password: await hashPassword(request.body.password),
emailVerifiedAt: invite?.email === email ? sql`now()` : null,
})
.returningAll()
.executeTakeFirstOrThrow()
if (!user.emailVerifiedAt) {
const token = generateToken()
await trx
.insertInto('users_roles')
.values(roleIds.map((roleId) => ({ roleId, userId: user.id })))
.execute()
await trx('email_token').insert({
user_id: user.id,
if (invite) {
trx
.updateTable('invite')
.set({ consumedAt: sql`now()`, consumedById: user.id })
.execute()
}
if (!user.emailVerifiedAt) {
const token = generateToken()
await trx
.insertInto('emailToken')
.values({
userId: user.id,
email: user.email,
token,
})
.execute()
const link = `${new URL('/auth/verify-email', config.site.url)}?email=${user.email}&token=${token}`
const link = `${new URL('/auth/verify-email', config.site.url)}?email=${user.email}&token=${token}`
await sendMail({
to: user.email,
subject: `Verify ${config.site.title} account`,
html: await verifyEmailTemplate({ link }).text(),
})
}
await sendMail({
to: user.email,
subject: `Verify ${config.site.title} account`,
html: await verifyEmailTemplate({ link }).text(),
})
}
return userQueries.findById(user.id)
})
await trx.commit().execute()
const roles = await db.selectFrom('role').select(['id', 'name']).where('id', 'in', roleIds).execute()
reply.type('application/json')
return reply.status(201).send(_.omit(user, 'password'))
return reply.status(201).send(Object.assign(_.omit(user, 'password'), { roles }))
} catch (err) {
this.log.error(err)
// @ts-ignore
if (err.code == 23505) {
err = new StatusError(...errors.duplicateEmail)
}
if (err instanceof StatusError) {
const statusError = new StatusError(...errors.duplicateEmail)
return reply
.status(err.status || 500)
.status(statusError.status || 500)
.type('application/json')
.send(err.toJSON())
.send(statusError.toJSON())
} else {
throw err
}

View File

@ -15,6 +15,7 @@ import results from './api/results.ts'
import roles from './api/roles.ts'
import suppliers from './api/suppliers.ts'
import transactions from './api/transactions.ts'
import users from './api/users.ts'
const apiRoutes: FastifyPluginCallbackTypebox = (fastify, _, done) => {
fastify.register(accounts, { prefix: '/accounts' })
@ -31,6 +32,7 @@ const apiRoutes: FastifyPluginCallbackTypebox = (fastify, _, done) => {
fastify.register(roles, { prefix: '/roles' })
fastify.register(suppliers, { prefix: '/suppliers' })
fastify.register(transactions, { prefix: '/transactions' })
fastify.register(users, { prefix: '/users' })
done()
}

View File

@ -8,7 +8,7 @@ import { AdmissionSchema, RoleSchema } from '../../schemas/db.ts'
const admissionsPlugin: FastifyPluginCallbackZod = (fastify, _options, done) => {
const { db } = fastify
fastify.addHook('onRequest', fastify.auth)
// fastify.addHook('onRequest', fastify.auth)
fastify.route({
url: '/',
@ -90,7 +90,7 @@ const admissionsPlugin: FastifyPluginCallbackZod = (fastify, _options, done) =>
method: 'DELETE',
schema: {
params: z.object({
id: z.number(),
id: z.coerce.number(),
}),
response: {
204: {},

View File

@ -1,44 +1,10 @@
import * as z from 'zod'
import type { FastifyPluginCallbackZod } from 'fastify-type-provider-zod'
import knex from '../../lib/knex.ts'
import emitter from '../../lib/emitter.ts'
import Queries from '../../services/users/queries.ts'
const usersPlugin: FastifyPluginCallbackZod<{ addParentSchema: (schema: ANY) => void }> = (
fastify,
{ addParentSchema },
done,
) => {
const queries = Queries({ emitter, knex })
import { UserSchema } from '../../schemas/db.ts'
addParentSchema({
$id: 'user',
type: 'object',
properties: {
id: { type: 'integer' },
email: { type: 'string' },
password: { type: 'string' },
createdAt: { type: 'string' },
lastLoginAt: { type: 'string' },
lastLoginAttemptAt: { type: 'string' },
loginAttempts: { type: 'integer' },
bannedAt: { type: 'string' },
bannedById: { type: 'integer' },
blockedAt: { type: 'string' },
blockedById: { type: 'integer' },
emailVerifiedAt: { type: 'string' },
roles: { type: ['array', 'null'], items: { $ref: 'role' } },
},
})
addParentSchema({
$id: 'user-short',
type: 'object',
properties: {
id: { type: 'integer' },
email: { type: 'string' },
createdAt: { type: 'string' },
},
})
const usersPlugin: FastifyPluginCallbackZod = (fastify, _options, done) => {
const { db } = fastify
fastify.addHook('onRequest', fastify.auth)
@ -47,11 +13,27 @@ const usersPlugin: FastifyPluginCallbackZod<{ addParentSchema: (schema: ANY) =>
method: 'GET',
schema: {
response: {
200: { type: 'array', items: { $ref: 'user' } },
200: z.array(UserSchema.omit({ password: true })),
},
},
handler(request) {
return queries.find(request.query)
handler() {
return db
.selectFrom('user')
.select([
'id',
'email',
'lastLoginAt',
'loginAttempts',
'lastLoginAttemptAt',
'lastActivityAt',
'bannedAt',
'bannedById',
'blockedAt',
'blockedById',
'emailVerifiedAt',
'createdAt',
])
.execute()
},
})

View File

@ -71,3 +71,19 @@ export const SupplierTypeSchema = z.object({
id: z.number().int().optional(),
name: z.string(),
})
export const UserSchema = z.object({
id: z.number().int().optional(),
email: z.string(),
password: z.string(),
createdAt: z.date().optional(),
lastLoginAt: z.date().nullable().optional(),
loginAttempts: z.number().int().nullable().optional(),
lastLoginAttemptAt: z.date().nullable().optional(),
lastActivityAt: z.date().nullable().optional(),
bannedAt: z.date().nullable().optional(),
bannedById: z.number().int().nullable().optional(),
blockedAt: z.date().nullable().optional(),
blockedById: z.number().int().nullable().optional(),
emailVerifiedAt: z.date().nullable().optional(),
})

View File

@ -1,95 +0,0 @@
import type EventEmitter from 'node:events'
import type { Knex } from 'knex'
import _ from 'lodash'
import RestQueriesFactory from '../../lib/knex_rest_queries.ts'
import type { NewAdmission } from './types.ts'
export const columns = ['id', 'regex', 'createdAt', 'createdById', 'modifiedAt', 'modifiedById']
export const Selects = (knex: Knex) => ({
roles: knex
.select(knex.raw('json_agg(roles)'))
.from(
knex
.select('id', 'name')
.from('role')
.innerJoin('admissions_roles', 'role.id', 'admissions_roles.role_id')
.where('admissions_roles.admission_id', knex.ref('admission.id'))
.orderBy('name')
.as('roles'),
),
createdBy: knex
.select(knex.raw('row_to_json("user")'))
.from(knex.select('id', 'email').from('user').where('id', knex.ref('admission.created_by_id')).as('user')),
})
export default ({ knex, emitter }: { emitter: EventEmitter; knex: Knex }) => {
const selects = Selects(knex)
const queries = RestQueriesFactory({
knex,
emitter,
table: 'admission',
columns,
selects,
})
return {
...queries,
create(json: NewAdmission, client = knex) {
return client.transaction(async (trx) => {
const admission = await queries.create(_.omit(json, 'roles'), trx)
const roles = json.roles.map((role) => ({
admission_id: admission.id,
role_id: role,
}))
await trx.table('admissions_roles').insert(roles)
return queries.findById(admission.id, trx)
})
},
update(id: number, json: NewAdmission, client = knex) {
return client.transaction(async (trx) => {
await queries.update(id, _.omit(json, ['roles']), trx)
// TODO decide how to handle this, ie should an empty array delete all?
if (json.roles?.length) {
await trx.table('admissions_roles').delete().where('admission_id', id).whereNotIn('role_id', json.roles)
await trx.raw(
`INSERT INTO admissions_roles(admission_id, role_id)
SELECT :admissionId, role_ids FROM unnest(:roleIds::int[]) AS role_ids WHERE NOT EXISTS
(SELECT 1 FROM admissions_roles WHERE admission_id = :admissionId AND role_id = role_ids)`,
{ admissionId: id, roleIds: json.roles },
)
}
return queries.findById(id, trx)
})
},
findMatches(email: string, client = knex) {
return client('admission')
.select([...columns, selects])
.then((admissions) => {
if (admissions) {
admissions = admissions.filter((admission) => {
const regex = new RegExp(admission.regex)
return regex.test(email)
})
// if (!admissions.length) admissions = undefined
}
return admissions
})
},
}
}

View File

@ -1,13 +0,0 @@
export type Admission = {
id: number
regex: string
createdAt: string
createdById: string
modifiedAt: string | null
modifiedById: number | null
}
export type NewAdmission = {
regex: string
roles: number[]
}

View File

@ -1,92 +0,0 @@
import type EventEmitter from 'node:events'
import crypto from 'node:crypto'
import type { Knex } from 'knex'
import _ from 'lodash'
import type { NewInvite } from './types.ts'
import RestQueriesFactory from '../../lib/knex_rest_queries.ts'
export const columns = [
'id',
'email',
'token',
'createdAt',
'createdById',
'modifiedAt',
'modifiedById',
'consumedAt',
'consumedById',
]
export const Selects = (knex: Knex) => ({
roles: knex
.select(knex.raw('json_agg(roles)'))
.from(
knex
.select('id', 'name')
.from('role')
.innerJoin('invites_roles', 'role.id', 'invites_roles.role_id')
.where('invites_roles.invite_id', knex.ref('invite.id'))
.as('roles'),
),
createdBy: knex
.select(knex.raw('row_to_json(users)'))
.from(knex.select('id', 'email').from('user').where('id', knex.ref('invite.created_by_id')).as('users')),
consumedBy: knex
.select(knex.raw('row_to_json(users)'))
.from(knex.select('id', 'email').from('user').where('id', knex.ref('invite.consumed_by_id')).as('users')),
})
function generateToken(length = 64) {
return crypto.randomBytes(length / 2).toString('hex')
}
export default ({ knex, emitter }: { knex: Knex; emitter: EventEmitter }) => {
const queries = RestQueriesFactory({
emitter,
knex,
table: 'invite',
columns,
selects: Selects(knex),
})
function consume(id: number, consumedById: number, client = knex) {
return client('invite')
.update({ consumed_at: knex.fn.now(), consumed_by_id: consumedById })
.where('id', id)
.then((result: ANY) => {
if (result === 0) throw new Error('No invite was consumed')
return !!result
})
}
async function create(json: NewInvite, client: Knex | Knex.Transaction = knex) {
const token = generateToken()
const trx = client.isTransaction ? (client as Knex.Transaction) : await client.transaction()
const invite = await queries.create({ ..._.omit(json, 'roles'), token }, trx)
const roles = json.roles.map((role: number) => ({
invite_id: invite.id,
role_id: role,
}))
await trx.table('invites_roles').insert(roles)
if (trx !== client) {
await trx.commit()
}
return queries.findById(invite.id, trx)
}
return {
...queries,
consume,
create,
}
}

View File

@ -1,20 +0,0 @@
import type { Role } from '../roles/types.ts'
export type Invite = {
id: number
email: string
token: string
createdAt: string
createdById: number
modifiedAt: string | null
modifiedById: number | null
consumedAt: string | null
consumedById: number | null
roles: Role[]
}
export type NewInvite = {
email: string
createdById: number
roles: number[]
}

View File

@ -1,40 +0,0 @@
import type { Knex } from 'knex'
import _ from 'lodash'
import RestQueriesFactory from '../../lib/knex_rest_queries.ts'
import emitter from '../../lib/emitter.ts'
export const columns = ['id', 'name', 'createdAt', 'createdById', 'modifiedById', 'modifiedAt']
export const Selects = (knex: Knex) => ({
createdBy: knex
.select(knex.raw('row_to_json(users)'))
.from(knex.select('id', 'email', 'createdAt').from('user').where('id', knex.ref('role.created_by_id')).as('users')),
modifiedBy: knex
.select(knex.raw('row_to_json(users)'))
.from(
knex.select('id', 'email', 'createdAt').from('user').where('id', knex.ref('role.modified_by_id')).as('users'),
),
})
export default ({ knex }: { knex: Knex }) => {
function findByIds(ids: string[], client = knex) {
return client.table('role').select(columns).whereIn('id', ids).then(_.identity)
}
function findByNames(names: string[], client = knex) {
return client.table('role').select(columns).whereIn('name', names).then(_.identity)
}
return {
...RestQueriesFactory({
emitter,
knex,
table: 'role',
columns,
selects: Selects(knex),
}),
findByIds,
findByNames,
}
}

View File

@ -1,8 +0,0 @@
export type Role = {
id: number
name: string
createdAt: string
createdById: number
modifedAt: string | null
modifiedById: string | null
}

View File

@ -1,127 +0,0 @@
import EventEmitter from 'events'
import type { Knex } from 'knex'
import _ from 'lodash'
import type { User, NewUser } from './types.ts'
import RestQueriesFactory from '../../lib/knex_rest_queries.ts'
export const columns = [
'bannedAt',
'bannedById',
'blockedAt',
'blockedById',
'createdAt',
'email',
'emailVerifiedAt',
'id',
'lastActivityAt',
'lastLoginAt',
'lastLoginAttemptAt',
'loginAttempts',
'password',
]
export const Selects = (knex: Knex) => ({
roles: knex
.select(knex.raw('json_agg(roles)'))
.from(
knex
.select('id', 'name')
.from('role')
.innerJoin('users_roles', 'role.id', 'users_roles.roleId')
.where('users_roles.userId', knex.ref('user.id'))
.as('roles'),
),
})
interface Options {
emitter: EventEmitter
knex: Knex
}
export default ({ emitter, knex }: Options) => {
const userQueries = RestQueriesFactory({
knex,
emitter,
omit: ['replace'],
table: 'user',
columns,
selects: Selects(knex),
})
async function create(json: NewUser, client: Knex | Knex.Transaction = knex) {
const trx = (client.isTransaction ? client : await client.transaction()) as Knex.Transaction
const user = await userQueries.create(json, trx)
if (json.roles) {
await trx('users_roles').insert(json.roles.map((role) => ({ userId: user.id, roleId: role })))
}
if (trx !== client) {
await trx.commit()
}
return userQueries.findById(user.id, trx)
}
function onActivity(id: number, client = knex) {
return update(id, { lastActivityAt: 'now()' }, client)
}
function onLogin(id: number, client = knex) {
return update(id, { lastLoginAt: 'now()', loginAttempts: 0, lastLoginAttemptAt: null }, client)
}
function onLoginAttempt(id: number, client = knex) {
return client('user')
.update({ last_login_attempt_at: knex.fn.now(), login_attempts: knex.raw('login_attempts + 1') })
.where('id', id)
}
function replace(id: number, json: Partial<User>, client = knex) {
return update(id, json, client)
}
function update(id: number, json: Partial<User>, client = knex) {
return client.transaction((trx) =>
userQueries
.update(id, _.omit(json, ['roles']), trx)
.then(() => {
const roles = json.roles
if (roles && roles.length) {
const roleIds = roles.map((role) => role.id)
return trx.raw(
`WITH deleted_rows AS (
DELETE FROM users_roles
WHERE "userId" = :userId AND "roleId" NOT IN
(SELECT "roleIds" FROM unnest(:roleIds::int[]) AS "roleIds")
RETURNING "roleId"
), inserted_rows AS (
INSERT INTO users_roles("userId", "roleId")
SELECT :userId, "roleIds" FROM unnest(:roleIds::int[]) AS "roleIds" WHERE NOT EXISTS
(SELECT 1 FROM users_roles WHERE user_id = :userId AND "roleId" = "roleIds")
RETURNING "roleId", "userId"
)
SELECT "roleId", "userId" FROM inserted_rows;`,
[id, roleIds],
)
}
})
.then(() => userQueries.findById(id)),
)
}
return {
...userQueries,
create,
onActivity,
onLogin,
onLoginAttempt,
replace,
update,
}
}

View File

@ -1,20 +0,0 @@
import type { Role } from '../roles/types.ts'
export type User = {
bannedAt: string | null
bannedById: number | null
blockedAt: string | null
blockedById: number | null
createdAt: string
email: string
emailVerifiedAt: string | null
id: number
lastActivityAt: string | null
lastLoginAt: string | null
lastLoginAttemptAt: string | null
loginAttempts: number
password: string
roles: Role[]
}
export type NewUser = Pick<User, 'email' | 'password' | 'emailVerifiedAt'> & { roles: number[] }