initial commit
This commit is contained in:
commit
033262103f
5
.env.development
Normal file
5
.env.development
Normal file
@ -0,0 +1,5 @@
|
||||
PGHOST=localhost
|
||||
PGPORT=5432
|
||||
PGDATABASE=brf_books
|
||||
PGUSER=brf_books
|
||||
PGPASSWORD=brf_books
|
||||
134
.gitignore
vendored
Normal file
134
.gitignore
vendored
Normal file
@ -0,0 +1,134 @@
|
||||
/dump
|
||||
|
||||
# Logs
|
||||
logs
|
||||
*.log
|
||||
npm-debug.log*
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
lerna-debug.log*
|
||||
.pnpm-debug.log*
|
||||
|
||||
# Diagnostic reports (https://nodejs.org/api/report.html)
|
||||
report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
|
||||
|
||||
# Runtime data
|
||||
pids
|
||||
*.pid
|
||||
*.seed
|
||||
*.pid.lock
|
||||
|
||||
# Directory for instrumented libs generated by jscoverage/JSCover
|
||||
lib-cov
|
||||
|
||||
# Coverage directory used by tools like istanbul
|
||||
coverage
|
||||
*.lcov
|
||||
|
||||
# nyc test coverage
|
||||
.nyc_output
|
||||
|
||||
# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
|
||||
.grunt
|
||||
|
||||
# Bower dependency directory (https://bower.io/)
|
||||
bower_components
|
||||
|
||||
# node-waf configuration
|
||||
.lock-wscript
|
||||
|
||||
# Compiled binary addons (https://nodejs.org/api/addons.html)
|
||||
build/Release
|
||||
|
||||
# Dependency directories
|
||||
node_modules/
|
||||
jspm_packages/
|
||||
|
||||
# Snowpack dependency directory (https://snowpack.dev/)
|
||||
web_modules/
|
||||
|
||||
# TypeScript cache
|
||||
*.tsbuildinfo
|
||||
|
||||
# Optional npm cache directory
|
||||
.npm
|
||||
|
||||
# Optional eslint cache
|
||||
.eslintcache
|
||||
|
||||
# Optional stylelint cache
|
||||
.stylelintcache
|
||||
|
||||
# Microbundle cache
|
||||
.rpt2_cache/
|
||||
.rts2_cache_cjs/
|
||||
.rts2_cache_es/
|
||||
.rts2_cache_umd/
|
||||
|
||||
# Optional REPL history
|
||||
.node_repl_history
|
||||
|
||||
# Output of 'npm pack'
|
||||
*.tgz
|
||||
|
||||
# Yarn Integrity file
|
||||
.yarn-integrity
|
||||
|
||||
# dotenv environment variable files
|
||||
.env.secrets*
|
||||
# .env
|
||||
# .env.development.local
|
||||
# .env.test.local
|
||||
# .env.production.local
|
||||
# .env.local
|
||||
|
||||
# parcel-bundler cache (https://parceljs.org/)
|
||||
.cache
|
||||
.parcel-cache
|
||||
|
||||
# Next.js build output
|
||||
.next
|
||||
out
|
||||
|
||||
# Nuxt.js build / generate output
|
||||
.nuxt
|
||||
dist
|
||||
|
||||
# Gatsby files
|
||||
.cache/
|
||||
# Comment in the public line in if your project uses Gatsby and not Next.js
|
||||
# https://nextjs.org/blog/next-9-1#public-directory-support
|
||||
# public
|
||||
|
||||
# vuepress build output
|
||||
.vuepress/dist
|
||||
|
||||
# vuepress v2.x temp and cache directory
|
||||
.temp
|
||||
.cache
|
||||
|
||||
# Docusaurus cache and generated files
|
||||
.docusaurus
|
||||
|
||||
# Serverless directories
|
||||
.serverless/
|
||||
|
||||
# FuseBox cache
|
||||
.fusebox/
|
||||
|
||||
# DynamoDB Local files
|
||||
.dynamodb/
|
||||
|
||||
# TernJS port file
|
||||
.tern-port
|
||||
|
||||
# Stores VSCode versions used for testing VSCode extensions
|
||||
.vscode-test
|
||||
|
||||
# yarn v2
|
||||
.yarn/cache
|
||||
.yarn/unplugged
|
||||
.yarn/build-state.yml
|
||||
.yarn/install-state.gz
|
||||
.pnp.*
|
||||
|
||||
1
.husky/pre-commit
Normal file
1
.husky/pre-commit
Normal file
@ -0,0 +1 @@
|
||||
pnpm exec lint-staged
|
||||
9
.lintstagedrc
Normal file
9
.lintstagedrc
Normal file
@ -0,0 +1,9 @@
|
||||
{
|
||||
"*.{cjs,cts,js,jsx,mjs,mts,ts,tsx}": [
|
||||
"oxlint --fix",
|
||||
"prettier --write"
|
||||
],
|
||||
"*.{css,scss,json}": [
|
||||
"prettier --write"
|
||||
]
|
||||
}
|
||||
18
.prettierignore
Normal file
18
.prettierignore
Normal file
@ -0,0 +1,18 @@
|
||||
dist
|
||||
dump
|
||||
*.*
|
||||
!*.cjs
|
||||
!*.css
|
||||
!*.cts
|
||||
!*.html
|
||||
!*.js
|
||||
!*.json
|
||||
!*.jsx
|
||||
!*.mjs
|
||||
!*.mts
|
||||
!*.scss
|
||||
!*.ts
|
||||
!*.tsx
|
||||
!*.yaml
|
||||
!*.yml
|
||||
pnpm-lock.yaml
|
||||
17
.prettierrc
Normal file
17
.prettierrc
Normal file
@ -0,0 +1,17 @@
|
||||
{
|
||||
"jsxSingleQuote": true,
|
||||
"printWidth": 120,
|
||||
"semi": false,
|
||||
"singleQuote": true,
|
||||
"trailingComma": "all",
|
||||
"embeddedLanguageFormatting": "off",
|
||||
|
||||
"overrides": [
|
||||
{
|
||||
"files": "*.scss",
|
||||
"options": {
|
||||
"trailingComma": "none"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
45
Dockerfile
Normal file
45
Dockerfile
Normal file
@ -0,0 +1,45 @@
|
||||
FROM node:23-alpine AS base
|
||||
ENV PNPM_HOME="/pnpm"
|
||||
ENV PATH="$PNPM_HOME:$PATH"
|
||||
RUN corepack enable pnpm
|
||||
|
||||
ARG UID=1000
|
||||
ARG GID=1000
|
||||
|
||||
RUN apk add --no-cache git shadow
|
||||
|
||||
RUN groupmod -g $GID node
|
||||
# this does not seem to be having full effect. eg /home/node gets 1337:1000 ownership despite group node having id 1337
|
||||
RUN usermod -u $UID -g node node
|
||||
|
||||
USER node
|
||||
RUN mkdir /home/node/startbit
|
||||
RUN git config --global --add safe.directory /home/node/startbit
|
||||
WORKDIR /home/node/startbit
|
||||
|
||||
COPY --chown=node pnpm-lock.yaml package.json ./
|
||||
RUN --mount=type=cache,id=pnpm,target=/pnpm/store pnpm install --frozen-lockfile
|
||||
COPY --chown=node \
|
||||
.env.secrets \
|
||||
vite.config.js ./
|
||||
|
||||
# -------- development -------- #
|
||||
FROM base AS development
|
||||
|
||||
ENV NODE_ENV=development
|
||||
|
||||
CMD pnpm run start:watch
|
||||
|
||||
# -------- production --------- #
|
||||
FROM base AS production
|
||||
|
||||
COPY --chown=node client client
|
||||
COPY --chown=node server server
|
||||
|
||||
ENV NODE_ENV=production
|
||||
|
||||
RUN pnpm run build
|
||||
|
||||
COPY --chown=node .git .git
|
||||
|
||||
CMD pnpm run start server/index.ts
|
||||
13
bin/parse_file.ts
Normal file
13
bin/parse_file.ts
Normal file
@ -0,0 +1,13 @@
|
||||
import fs from 'fs/promises'
|
||||
import parseStream from '../server/lib/parse_stream.ts'
|
||||
import knex from '../server/lib/knex.ts'
|
||||
|
||||
for await (const file of process.argv.slice(2)) {
|
||||
const fh = await fs.open(file)
|
||||
|
||||
console.log(`- parsing file: ${file}`)
|
||||
|
||||
await parseStream(fh.readableWebStream())
|
||||
}
|
||||
|
||||
knex.destroy()
|
||||
37
client/test/jsdom_polyfills.ts
Normal file
37
client/test/jsdom_polyfills.ts
Normal file
@ -0,0 +1,37 @@
|
||||
// modified version of: https://github.com/modosc/global-jsdom/blob/d1dd3cdeeeddd4d0653496a728e0f81e18776654/packages/global-jsdom/esm/index.mjs
|
||||
|
||||
import JSDOM from 'jsdom'
|
||||
|
||||
const html = '<!doctype html><html><head><meta charset="utf-8"></head><body></body></html>'
|
||||
|
||||
const jsdom = new JSDOM.JSDOM(html, {
|
||||
// set a default url if we don't get one - otherwise things explode when we
|
||||
// copy localstorage keys
|
||||
url: 'http://localhost:3000',
|
||||
// enable pretendtobevisual by default since react needs
|
||||
// window.requestanimationframe, see https://github.com/jsdom/jsdom#pretending-to-be-a-visual-browser
|
||||
pretendToBeVisual: true,
|
||||
})
|
||||
const { window } = jsdom
|
||||
const { document } = window
|
||||
|
||||
const KEYS = Object.getOwnPropertyNames(window).filter((k) => !k.startsWith('_') && !(k in globalThis))
|
||||
|
||||
KEYS.forEach((key) => (globalThis[key] = window[key]))
|
||||
|
||||
globalThis.document = document
|
||||
globalThis.window = window
|
||||
|
||||
window.console = globalThis.console
|
||||
|
||||
ElementInternals.prototype.setFormValue = () => {}
|
||||
ElementInternals.prototype.setValidity = () => {}
|
||||
|
||||
// since cant modify the constructor we instantiate states in a getter
|
||||
Object.defineProperty(ElementInternals.prototype, 'states', {
|
||||
get() {
|
||||
if (!this._states) this._states = new Set()
|
||||
|
||||
return this._states
|
||||
},
|
||||
})
|
||||
4
client/test/register_tsx_hook.ts
Normal file
4
client/test/register_tsx_hook.ts
Normal file
@ -0,0 +1,4 @@
|
||||
// eslint-disable-next-line n/no-unsupported-features/node-builtins
|
||||
import { register } from 'node:module'
|
||||
|
||||
register('./tsx_hook.ts', import.meta.url)
|
||||
25
client/test/tsx_hook.ts
Normal file
25
client/test/tsx_hook.ts
Normal file
@ -0,0 +1,25 @@
|
||||
import esbuild from 'esbuild'
|
||||
import { type LoadHook } from 'node:module'
|
||||
|
||||
export const load: LoadHook = async function (url, context, nextLoad) {
|
||||
if (url.endsWith('.tsx')) {
|
||||
const format = 'module'
|
||||
|
||||
const { source } = await nextLoad(url, { ...context, format })
|
||||
|
||||
const transformedSource = await esbuild.transform(source, {
|
||||
loader: 'tsx',
|
||||
jsx: 'transform',
|
||||
jsxFactory: 'h',
|
||||
})
|
||||
|
||||
return {
|
||||
format,
|
||||
shortCirtcuit: true,
|
||||
shortCirtcuits: true,
|
||||
source: transformedSource.code,
|
||||
}
|
||||
}
|
||||
|
||||
return nextLoad(url)
|
||||
}
|
||||
17
client/test/utils.ts
Normal file
17
client/test/utils.ts
Normal file
@ -0,0 +1,17 @@
|
||||
export function render(str: string, container?: HTMLElement) {
|
||||
if (!container) {
|
||||
container ||= document.createElement('div')
|
||||
|
||||
document.body.appendChild(container)
|
||||
}
|
||||
|
||||
container.innerHTML = str
|
||||
|
||||
return container
|
||||
}
|
||||
|
||||
export function cleanup() {
|
||||
while (document.body.firstChild) {
|
||||
document.body.firstChild.remove()
|
||||
}
|
||||
}
|
||||
33
docker-compose.base.yml
Normal file
33
docker-compose.base.yml
Normal file
@ -0,0 +1,33 @@
|
||||
services:
|
||||
caddy:
|
||||
image: caddy:2-alpine
|
||||
volumes:
|
||||
- ./docker/caddy/Caddyfile:/etc/caddy/Caddyfile
|
||||
- caddy_config:/config
|
||||
- caddy_data:/data
|
||||
ports:
|
||||
- 80:80
|
||||
- 443:443
|
||||
|
||||
node:
|
||||
build:
|
||||
context: .
|
||||
args:
|
||||
DOCKER_BUILDKIT: 1
|
||||
UID: ${UID:-1337}
|
||||
GID: ${GID:-1337}
|
||||
|
||||
postgres:
|
||||
image: postgres:18-alpine
|
||||
restart: always
|
||||
environment:
|
||||
- POSTGRES_DB=brf_books
|
||||
- POSTGRES_USER=brf_books
|
||||
- POSTGRES_PASSWORD=brf_books
|
||||
volumes:
|
||||
- ./docker/postgres/01-schema.sql:/docker-entrypoint-initdb.d/01-schema.sql
|
||||
- ./docker/postgres/02-data.sql:/docker-entrypoint-initdb.d/02-data.sql
|
||||
- postgres:/var/lib/postgresql/data
|
||||
|
||||
redis:
|
||||
image: redis:7-alpine
|
||||
10
docker-compose.local.yml
Normal file
10
docker-compose.local.yml
Normal file
@ -0,0 +1,10 @@
|
||||
volumes:
|
||||
postgres:
|
||||
|
||||
services:
|
||||
postgres:
|
||||
extends:
|
||||
file: docker-compose.base.yml
|
||||
service: postgres
|
||||
ports:
|
||||
- ${PG_EXTERNAL_PORT-5432}:5432
|
||||
8
docker-compose.yml
Normal file
8
docker-compose.yml
Normal file
@ -0,0 +1,8 @@
|
||||
volumes:
|
||||
postgres:
|
||||
|
||||
services:
|
||||
postgres:
|
||||
extends:
|
||||
file: docker-compose.base.yml
|
||||
service: postgres
|
||||
15
docker/caddy/Caddyfile
Normal file
15
docker/caddy/Caddyfile
Normal file
@ -0,0 +1,15 @@
|
||||
brf.local {
|
||||
tls internal
|
||||
root * /usr/share/caddy
|
||||
|
||||
@ws {
|
||||
header Connection *Upgrade*
|
||||
header Upgrade websocket
|
||||
}
|
||||
|
||||
reverse_proxy @ws node:24678
|
||||
reverse_proxy node:1337 {
|
||||
lb_try_duration 30s
|
||||
lb_try_interval 1s
|
||||
}
|
||||
}
|
||||
490
docker/postgres/01-schema.sql
Normal file
490
docker/postgres/01-schema.sql
Normal file
@ -0,0 +1,490 @@
|
||||
--
|
||||
-- PostgreSQL database dump
|
||||
--
|
||||
|
||||
\restrict EQkcX1mt4Oqej8UZL2Z1qyvnzIAf3O4TYefngsqIN91Lr6JLNTawzD4OIRSJr2s
|
||||
|
||||
-- Dumped from database version 18.1
|
||||
-- Dumped by pg_dump version 18.1
|
||||
|
||||
SET statement_timeout = 0;
|
||||
SET lock_timeout = 0;
|
||||
SET idle_in_transaction_session_timeout = 0;
|
||||
SET transaction_timeout = 0;
|
||||
SET client_encoding = 'UTF8';
|
||||
SET standard_conforming_strings = on;
|
||||
SELECT pg_catalog.set_config('search_path', '', false);
|
||||
SET check_function_bodies = false;
|
||||
SET xmloption = content;
|
||||
SET client_min_messages = warning;
|
||||
SET row_security = off;
|
||||
|
||||
--
|
||||
-- Name: truncate_tables(character varying); Type: FUNCTION; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
CREATE FUNCTION public.truncate_tables(username character varying) RETURNS void
|
||||
LANGUAGE plpgsql
|
||||
AS $$
|
||||
DECLARE
|
||||
statements CURSOR FOR
|
||||
SELECT tablename FROM pg_tables
|
||||
WHERE tableowner = username AND schemaname = 'public';
|
||||
BEGIN
|
||||
FOR stmt IN statements LOOP
|
||||
EXECUTE 'TRUNCATE TABLE ' || quote_ident(stmt.tablename) || ' CASCADE;';
|
||||
END LOOP;
|
||||
END;
|
||||
$$;
|
||||
|
||||
|
||||
SET default_tablespace = '';
|
||||
|
||||
SET default_table_access_method = heap;
|
||||
|
||||
--
|
||||
-- Name: account; Type: TABLE; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
CREATE TABLE public.account (
|
||||
id integer NOT NULL,
|
||||
number smallint NOT NULL,
|
||||
financial_year_id integer NOT NULL,
|
||||
description text NOT NULL,
|
||||
sru smallint
|
||||
);
|
||||
|
||||
|
||||
--
|
||||
-- Name: account_balance; Type: TABLE; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
CREATE TABLE public.account_balance (
|
||||
account_number integer NOT NULL,
|
||||
financial_year_id integer NOT NULL,
|
||||
"in" numeric(12,2) DEFAULT 0 NOT NULL,
|
||||
"out" numeric(12,2) DEFAULT 0 NOT NULL,
|
||||
in_quantity integer,
|
||||
out_quantity integer
|
||||
);
|
||||
|
||||
|
||||
--
|
||||
-- Name: account_id_seq; Type: SEQUENCE; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
CREATE SEQUENCE public.account_id_seq
|
||||
AS integer
|
||||
START WITH 1
|
||||
INCREMENT BY 1
|
||||
NO MINVALUE
|
||||
NO MAXVALUE
|
||||
CACHE 1;
|
||||
|
||||
|
||||
--
|
||||
-- Name: account_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
ALTER SEQUENCE public.account_id_seq OWNED BY public.account.id;
|
||||
|
||||
|
||||
--
|
||||
-- Name: dimension; Type: TABLE; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
CREATE TABLE public.dimension (
|
||||
id integer NOT NULL,
|
||||
number smallint NOT NULL,
|
||||
name text
|
||||
);
|
||||
|
||||
|
||||
--
|
||||
-- Name: dimension_id_seq; Type: SEQUENCE; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
CREATE SEQUENCE public.dimension_id_seq
|
||||
AS integer
|
||||
START WITH 1
|
||||
INCREMENT BY 1
|
||||
NO MINVALUE
|
||||
NO MAXVALUE
|
||||
CACHE 1;
|
||||
|
||||
|
||||
--
|
||||
-- Name: dimension_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
ALTER SEQUENCE public.dimension_id_seq OWNED BY public.dimension.id;
|
||||
|
||||
|
||||
--
|
||||
-- Name: entry; Type: TABLE; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
CREATE TABLE public.entry (
|
||||
id integer NOT NULL,
|
||||
financial_year_id integer NOT NULL,
|
||||
journal_id integer NOT NULL,
|
||||
number integer NOT NULL,
|
||||
transaction_date date NOT NULL,
|
||||
description text,
|
||||
entry_date date NOT NULL,
|
||||
signature text
|
||||
);
|
||||
|
||||
|
||||
--
|
||||
-- Name: entry_id_seq; Type: SEQUENCE; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
CREATE SEQUENCE public.entry_id_seq
|
||||
AS integer
|
||||
START WITH 1
|
||||
INCREMENT BY 1
|
||||
NO MINVALUE
|
||||
NO MAXVALUE
|
||||
CACHE 1;
|
||||
|
||||
|
||||
--
|
||||
-- Name: entry_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
ALTER SEQUENCE public.entry_id_seq OWNED BY public.entry.id;
|
||||
|
||||
|
||||
--
|
||||
-- Name: financial_year; Type: TABLE; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
CREATE TABLE public.financial_year (
|
||||
id integer NOT NULL,
|
||||
start_date date NOT NULL,
|
||||
end_date date NOT NULL
|
||||
);
|
||||
|
||||
|
||||
--
|
||||
-- Name: financial_year_id_seq; Type: SEQUENCE; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
CREATE SEQUENCE public.financial_year_id_seq
|
||||
AS integer
|
||||
START WITH 1
|
||||
INCREMENT BY 1
|
||||
NO MINVALUE
|
||||
NO MAXVALUE
|
||||
CACHE 1;
|
||||
|
||||
|
||||
--
|
||||
-- Name: financial_year_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
ALTER SEQUENCE public.financial_year_id_seq OWNED BY public.financial_year.id;
|
||||
|
||||
|
||||
--
|
||||
-- Name: journal; Type: TABLE; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
CREATE TABLE public.journal (
|
||||
id integer NOT NULL,
|
||||
identifier text NOT NULL,
|
||||
description text
|
||||
);
|
||||
|
||||
|
||||
--
|
||||
-- Name: journal_id_seq; Type: SEQUENCE; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
CREATE SEQUENCE public.journal_id_seq
|
||||
AS integer
|
||||
START WITH 1
|
||||
INCREMENT BY 1
|
||||
NO MINVALUE
|
||||
NO MAXVALUE
|
||||
CACHE 1;
|
||||
|
||||
|
||||
--
|
||||
-- Name: journal_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
ALTER SEQUENCE public.journal_id_seq OWNED BY public.journal.id;
|
||||
|
||||
|
||||
--
|
||||
-- Name: object; Type: TABLE; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
CREATE TABLE public.object (
|
||||
id integer NOT NULL,
|
||||
dimension_id integer NOT NULL,
|
||||
number smallint NOT NULL,
|
||||
name text
|
||||
);
|
||||
|
||||
|
||||
--
|
||||
-- Name: object_id_seq; Type: SEQUENCE; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
CREATE SEQUENCE public.object_id_seq
|
||||
AS integer
|
||||
START WITH 1
|
||||
INCREMENT BY 1
|
||||
NO MINVALUE
|
||||
NO MAXVALUE
|
||||
CACHE 1;
|
||||
|
||||
|
||||
--
|
||||
-- Name: object_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
ALTER SEQUENCE public.object_id_seq OWNED BY public.object.id;
|
||||
|
||||
|
||||
--
|
||||
-- Name: transaction; Type: TABLE; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
CREATE TABLE public.transaction (
|
||||
id integer NOT NULL,
|
||||
entry_id integer NOT NULL,
|
||||
account_number smallint NOT NULL,
|
||||
amount numeric(12,2) NOT NULL,
|
||||
object_id integer,
|
||||
description text,
|
||||
transaction_date date,
|
||||
quantity numeric(12,2),
|
||||
signature text
|
||||
);
|
||||
|
||||
|
||||
--
|
||||
-- Name: transaction_id_seq; Type: SEQUENCE; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
CREATE SEQUENCE public.transaction_id_seq
|
||||
AS integer
|
||||
START WITH 1
|
||||
INCREMENT BY 1
|
||||
NO MINVALUE
|
||||
NO MAXVALUE
|
||||
CACHE 1;
|
||||
|
||||
|
||||
--
|
||||
-- Name: transaction_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
ALTER SEQUENCE public.transaction_id_seq OWNED BY public.transaction.id;
|
||||
|
||||
|
||||
--
|
||||
-- Name: transactions_to_objects; Type: TABLE; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
CREATE TABLE public.transactions_to_objects (
|
||||
transaction_id integer NOT NULL,
|
||||
object_id integer NOT NULL
|
||||
);
|
||||
|
||||
|
||||
--
|
||||
-- Name: account id; Type: DEFAULT; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
ALTER TABLE ONLY public.account ALTER COLUMN id SET DEFAULT nextval('public.account_id_seq'::regclass);
|
||||
|
||||
|
||||
--
|
||||
-- Name: dimension id; Type: DEFAULT; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
ALTER TABLE ONLY public.dimension ALTER COLUMN id SET DEFAULT nextval('public.dimension_id_seq'::regclass);
|
||||
|
||||
|
||||
--
|
||||
-- Name: entry id; Type: DEFAULT; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
ALTER TABLE ONLY public.entry ALTER COLUMN id SET DEFAULT nextval('public.entry_id_seq'::regclass);
|
||||
|
||||
|
||||
--
|
||||
-- Name: financial_year id; Type: DEFAULT; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
ALTER TABLE ONLY public.financial_year ALTER COLUMN id SET DEFAULT nextval('public.financial_year_id_seq'::regclass);
|
||||
|
||||
|
||||
--
|
||||
-- Name: journal id; Type: DEFAULT; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
ALTER TABLE ONLY public.journal ALTER COLUMN id SET DEFAULT nextval('public.journal_id_seq'::regclass);
|
||||
|
||||
|
||||
--
|
||||
-- Name: object id; Type: DEFAULT; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
ALTER TABLE ONLY public.object ALTER COLUMN id SET DEFAULT nextval('public.object_id_seq'::regclass);
|
||||
|
||||
|
||||
--
|
||||
-- Name: transaction id; Type: DEFAULT; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
ALTER TABLE ONLY public.transaction ALTER COLUMN id SET DEFAULT nextval('public.transaction_id_seq'::regclass);
|
||||
|
||||
|
||||
--
|
||||
-- Name: account_balance account_balance_pkey; Type: CONSTRAINT; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
ALTER TABLE ONLY public.account_balance
|
||||
ADD CONSTRAINT account_balance_pkey PRIMARY KEY (account_number, financial_year_id);
|
||||
|
||||
|
||||
--
|
||||
-- Name: account account_number_financial_year_id_key; Type: CONSTRAINT; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
ALTER TABLE ONLY public.account
|
||||
ADD CONSTRAINT account_number_financial_year_id_key UNIQUE (number, financial_year_id);
|
||||
|
||||
|
||||
--
|
||||
-- Name: account account_pkey; Type: CONSTRAINT; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
ALTER TABLE ONLY public.account
|
||||
ADD CONSTRAINT account_pkey PRIMARY KEY (id);
|
||||
|
||||
|
||||
--
|
||||
-- Name: dimension dimension_pkey; Type: CONSTRAINT; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
ALTER TABLE ONLY public.dimension
|
||||
ADD CONSTRAINT dimension_pkey PRIMARY KEY (id);
|
||||
|
||||
|
||||
--
|
||||
-- Name: entry entry_pkey; Type: CONSTRAINT; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
ALTER TABLE ONLY public.entry
|
||||
ADD CONSTRAINT entry_pkey PRIMARY KEY (id);
|
||||
|
||||
|
||||
--
|
||||
-- Name: financial_year financial_year_pkey; Type: CONSTRAINT; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
ALTER TABLE ONLY public.financial_year
|
||||
ADD CONSTRAINT financial_year_pkey PRIMARY KEY (id);
|
||||
|
||||
|
||||
--
|
||||
-- Name: financial_year financial_year_start_date_end_date_key; Type: CONSTRAINT; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
ALTER TABLE ONLY public.financial_year
|
||||
ADD CONSTRAINT financial_year_start_date_end_date_key UNIQUE (start_date, end_date);
|
||||
|
||||
|
||||
--
|
||||
-- Name: journal journal_pkey; Type: CONSTRAINT; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
ALTER TABLE ONLY public.journal
|
||||
ADD CONSTRAINT journal_pkey PRIMARY KEY (id);
|
||||
|
||||
|
||||
--
|
||||
-- Name: object object_dimension_id_number_key; Type: CONSTRAINT; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
ALTER TABLE ONLY public.object
|
||||
ADD CONSTRAINT object_dimension_id_number_key UNIQUE (dimension_id, number);
|
||||
|
||||
|
||||
--
|
||||
-- Name: object object_pkey; Type: CONSTRAINT; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
ALTER TABLE ONLY public.object
|
||||
ADD CONSTRAINT object_pkey PRIMARY KEY (id);
|
||||
|
||||
|
||||
--
|
||||
-- Name: transaction transaction_pkey; Type: CONSTRAINT; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
ALTER TABLE ONLY public.transaction
|
||||
ADD CONSTRAINT transaction_pkey PRIMARY KEY (id);
|
||||
|
||||
|
||||
--
|
||||
-- Name: transactions_to_objects transactions_to_objects_transaction_id_object_id_key; Type: CONSTRAINT; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
ALTER TABLE ONLY public.transactions_to_objects
|
||||
ADD CONSTRAINT transactions_to_objects_transaction_id_object_id_key UNIQUE (transaction_id, object_id);
|
||||
|
||||
|
||||
--
|
||||
-- Name: object object_dimension_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
ALTER TABLE ONLY public.object
|
||||
ADD CONSTRAINT object_dimension_id_fkey FOREIGN KEY (dimension_id) REFERENCES public.dimension(id);
|
||||
|
||||
|
||||
--
|
||||
-- Name: transaction transaction_entry_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
ALTER TABLE ONLY public.transaction
|
||||
ADD CONSTRAINT transaction_entry_id_fkey FOREIGN KEY (entry_id) REFERENCES public.entry(id);
|
||||
|
||||
|
||||
--
|
||||
-- Name: transaction transaction_object_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
ALTER TABLE ONLY public.transaction
|
||||
ADD CONSTRAINT transaction_object_id_fkey FOREIGN KEY (object_id) REFERENCES public.object(id);
|
||||
|
||||
|
||||
--
|
||||
-- Name: transactions_to_objects transactions_to_objects_object_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
ALTER TABLE ONLY public.transactions_to_objects
|
||||
ADD CONSTRAINT transactions_to_objects_object_id_fkey FOREIGN KEY (object_id) REFERENCES public.object(id);
|
||||
|
||||
|
||||
--
|
||||
-- Name: transactions_to_objects transactions_to_objects_transaction_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: -
|
||||
--
|
||||
|
||||
ALTER TABLE ONLY public.transactions_to_objects
|
||||
ADD CONSTRAINT transactions_to_objects_transaction_id_fkey FOREIGN KEY (transaction_id) REFERENCES public.transaction(id);
|
||||
|
||||
|
||||
--
|
||||
-- PostgreSQL database dump complete
|
||||
--
|
||||
|
||||
\unrestrict EQkcX1mt4Oqej8UZL2Z1qyvnzIAf3O4TYefngsqIN91Lr6JLNTawzD4OIRSJr2s
|
||||
|
||||
0
docker/postgres/02-data.sql
Normal file
0
docker/postgres/02-data.sql
Normal file
29
docker/postgres/dump.sh
Executable file
29
docker/postgres/dump.sh
Executable file
@ -0,0 +1,29 @@
|
||||
#!/bin/sh
|
||||
|
||||
script_dir=$(dirname $(readlink -f "$0"))
|
||||
|
||||
SCHEMA=true
|
||||
DATA=true
|
||||
|
||||
while getopts "as" opt; do
|
||||
case $opt in
|
||||
"a")
|
||||
SCHEMA=false
|
||||
;;
|
||||
"s")
|
||||
DATA=false
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
if [ $SCHEMA = "true" ]; then
|
||||
echo -n "dumping schema..."
|
||||
docker-compose exec -T postgres pg_dump -U brf_books -d brf_books -s -O > $script_dir/01-schema.sql
|
||||
echo " done!"
|
||||
fi
|
||||
|
||||
if [ $DATA = "true" ]; then
|
||||
echo -n "dumping data..."
|
||||
docker-compose exec -T postgres pg_dump -U brf_books -d brf_books -a -O > $script_dir/02-data.sql
|
||||
echo " done!"
|
||||
fi
|
||||
34
global.d.ts
vendored
Normal file
34
global.d.ts
vendored
Normal file
@ -0,0 +1,34 @@
|
||||
import 'fastify'
|
||||
|
||||
import { ViteDevServer } from 'vite'
|
||||
|
||||
declare global {
|
||||
type ANY = any
|
||||
|
||||
const __STATE__: any
|
||||
}
|
||||
|
||||
declare module '*.scss'
|
||||
|
||||
declare module '*.module.scss' {
|
||||
const classes: Record<string, string>
|
||||
export default classes
|
||||
}
|
||||
|
||||
declare module 'fastify' {
|
||||
interface FastifyInstance {
|
||||
auth: string
|
||||
devServer: ViteDevServer
|
||||
}
|
||||
|
||||
interface FastifyRequest {
|
||||
login: string
|
||||
user: Promise<string>
|
||||
}
|
||||
|
||||
interface FastifyReply {
|
||||
ctx: { [key: string]: any }
|
||||
}
|
||||
}
|
||||
|
||||
export {}
|
||||
48
package.json
Normal file
48
package.json
Normal file
@ -0,0 +1,48 @@
|
||||
{
|
||||
"name": "brf_books",
|
||||
"version": "0.0.1",
|
||||
"main": "server/index.js",
|
||||
"type": "module",
|
||||
"repository": "git@git.bitmill.io:bitmill/brf_books.git",
|
||||
"author": "Linus Miller <linus.miller@bitmill.io>",
|
||||
"license": "MIT",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"build": "pnpm run build:client && pnpm run build:server",
|
||||
"build:client": "vite build",
|
||||
"build:server": "vite build --ssr",
|
||||
"clean": "rm -r dist",
|
||||
"format": "prettier --write .",
|
||||
"lint": "oxlint",
|
||||
"prepare": "husky",
|
||||
"start": "node server/index.ts",
|
||||
"start:watch": "node --watch-path server --enable-source-maps server/index.ts",
|
||||
"test": "pnpm run test:client && pnpm run test:server",
|
||||
"test:client": "node --no-warnings --import=./client/test/jsdom_polyfills.ts --import=./client/test/register_tsx_hook.ts --test ./client/**/*.test.ts{,x}",
|
||||
"test:server": "node --no-warnings --test ./server/**/*.test.ts",
|
||||
"types": "tsc"
|
||||
},
|
||||
"dependencies": {
|
||||
"@bmp/highlight-stack": "^0.1.2",
|
||||
"knex": "^3.1.0",
|
||||
"pg": "^8.16.3",
|
||||
"pino-abstract-transport": "^3.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/core": "^7.26.10",
|
||||
"@preact/preset-vite": "^2.10.1",
|
||||
"@testing-library/preact": "^3.2.4",
|
||||
"@types/lodash": "^4.17.16",
|
||||
"@types/node": "^24.10.1",
|
||||
"esbuild": "^0.27.0",
|
||||
"globals": "^16.0.0",
|
||||
"husky": "^9.1.7",
|
||||
"jsdom": "^27.2.0",
|
||||
"lint-staged": "^16.2.7",
|
||||
"oxlint": "^1.29.0",
|
||||
"prettier": "^3.5.3",
|
||||
"sass": "^1.85.1",
|
||||
"typescript": "^5.8.2",
|
||||
"vite": "^7.2.4"
|
||||
}
|
||||
}
|
||||
3495
pnpm-lock.yaml
generated
Normal file
3495
pnpm-lock.yaml
generated
Normal file
File diff suppressed because it is too large
Load Diff
42
server/env.ts
Normal file
42
server/env.ts
Normal file
@ -0,0 +1,42 @@
|
||||
const evals = ['false', 'true', 'null', 'undefined']
|
||||
const numberRegex = /^\d+$/
|
||||
|
||||
export function read(columns, defaults = {}) {
|
||||
columns = [...new Set(columns.concat(Object.keys(defaults)))].sort()
|
||||
|
||||
const missing = []
|
||||
|
||||
const result = columns.reduce((result, variable) => {
|
||||
/* eslint-disable-next-line n/no-process-env */
|
||||
let value = process.env[variable] || defaults[variable]
|
||||
|
||||
if (value === undefined) {
|
||||
missing.push(variable)
|
||||
} else {
|
||||
if (typeof value === 'string') {
|
||||
if (numberRegex.test(value)) {
|
||||
value = Number.parseInt(value)
|
||||
} else if (evals.includes(value)) {
|
||||
// eslint-disable-next-line no-eval
|
||||
value = eval(value)
|
||||
}
|
||||
}
|
||||
|
||||
result[variable] = value
|
||||
}
|
||||
|
||||
return result
|
||||
}, {})
|
||||
|
||||
if (missing.length) {
|
||||
throw new Error(`Missing required env variables: ${missing.join(', ')}`)
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
export default read(['PGDATABASE', 'PGHOST', 'PGPASSWORD', 'PGPORT', 'PGUSER'], {
|
||||
PGPASSWORD: null,
|
||||
PGPORT: null,
|
||||
PGUSER: null,
|
||||
})
|
||||
33
server/lib/knex.ts
Normal file
33
server/lib/knex.ts
Normal file
@ -0,0 +1,33 @@
|
||||
import _ from 'lodash'
|
||||
// eslint-disable-next-line import/no-named-as-default
|
||||
import knex from 'knex'
|
||||
import pg from 'pg'
|
||||
import env from '../env.ts'
|
||||
|
||||
const queryProto = pg.Query.prototype
|
||||
|
||||
const handleRowDescription = queryProto.handleRowDescription
|
||||
|
||||
queryProto.handleRowDescription = function (msg) {
|
||||
msg.fields.forEach((field) => {
|
||||
field.name = _.camelCase(field.name)
|
||||
})
|
||||
|
||||
return handleRowDescription.call(this, msg)
|
||||
}
|
||||
|
||||
export default knex({
|
||||
client: 'pg',
|
||||
|
||||
wrapIdentifier: (value, origImpl) => (value === '*' ? value : origImpl(_.snakeCase(value))),
|
||||
|
||||
connection: {
|
||||
database: env.PGDATABASE,
|
||||
host: env.PGHOST,
|
||||
password: env.PGPASSWORD,
|
||||
port: env.PGPORT,
|
||||
user: env.PGUSER,
|
||||
},
|
||||
|
||||
acquireConnectionTimeout: 30000,
|
||||
})
|
||||
223
server/lib/parse_line.test.ts
Normal file
223
server/lib/parse_line.test.ts
Normal file
@ -0,0 +1,223 @@
|
||||
import { test, type TestContext } from 'node:test'
|
||||
import {
|
||||
parseDim,
|
||||
parseIB,
|
||||
parseKonto,
|
||||
parseObjekt,
|
||||
parseRAR,
|
||||
parseSRU,
|
||||
parseVer,
|
||||
parseTrans,
|
||||
parseUB,
|
||||
} from './parse_line.ts'
|
||||
|
||||
test('parseDim', (t: TestContext) => {
|
||||
const line = '#DIM 6 "Projekt"'
|
||||
const expected = {
|
||||
number: 6,
|
||||
name: 'Projekt',
|
||||
}
|
||||
t.assert.deepStrictEqual(parseDim(line), expected)
|
||||
})
|
||||
|
||||
test('parseIB', (t: TestContext) => {
|
||||
let line = '#IB 0 1790 11946.07 0'
|
||||
let expected = {
|
||||
accountNumber: 1790,
|
||||
yearNumber: 0,
|
||||
balance: 11946.07,
|
||||
quantity: 0,
|
||||
}
|
||||
t.assert.deepStrictEqual(parseIB(line), expected)
|
||||
|
||||
line = '#IB 0 2083 -57106301 0'
|
||||
expected = {
|
||||
accountNumber: 2083,
|
||||
yearNumber: 0,
|
||||
balance: -57106301,
|
||||
quantity: 0,
|
||||
}
|
||||
t.assert.deepStrictEqual(parseIB(line), expected)
|
||||
|
||||
line = '#IB 0 1111 -5710630.1 3'
|
||||
expected = {
|
||||
accountNumber: 1111,
|
||||
yearNumber: 0,
|
||||
balance: -5710630.1,
|
||||
quantity: 3,
|
||||
}
|
||||
t.assert.deepStrictEqual(parseIB(line), expected)
|
||||
})
|
||||
|
||||
test('parseKonto', (t: TestContext) => {
|
||||
const line = '#KONTO 1249 "Ack avskr bilar/transportmedel"'
|
||||
const expected = {
|
||||
number: 1249,
|
||||
description: 'Ack avskr bilar/transportmedel',
|
||||
}
|
||||
t.assert.deepStrictEqual(parseKonto(line), expected)
|
||||
})
|
||||
|
||||
test('parseObjekt', (t: TestContext) => {
|
||||
let line = '#OBJEKT 1 "3" "3"'
|
||||
let expected = {
|
||||
dimensionNumber: 1,
|
||||
number: 3,
|
||||
name: '3',
|
||||
}
|
||||
t.assert.deepStrictEqual(parseObjekt(line), expected)
|
||||
|
||||
line = '#OBJEKT 6 "338" "338"'
|
||||
expected = {
|
||||
dimensionNumber: 6,
|
||||
number: 338,
|
||||
name: '338',
|
||||
}
|
||||
|
||||
line = '#OBJEKT 6 "4" "Entreer"'
|
||||
expected = {
|
||||
dimensionNumber: 6,
|
||||
number: 4,
|
||||
name: 'Entreer',
|
||||
}
|
||||
})
|
||||
|
||||
test('parseRAR', (t: TestContext) => {
|
||||
let line = '#RAR 0 20160101 20161231'
|
||||
let expected = {
|
||||
yearNumber: 0,
|
||||
startDate: '20160101',
|
||||
endDate: '20161231',
|
||||
}
|
||||
t.assert.deepStrictEqual(parseRAR(line), expected)
|
||||
|
||||
line = '#RAR -1 20150101 20151231'
|
||||
expected = {
|
||||
yearNumber: -1,
|
||||
startDate: '20150101',
|
||||
endDate: '20151231',
|
||||
}
|
||||
t.assert.deepStrictEqual(parseRAR(line), expected)
|
||||
})
|
||||
|
||||
test('parseSRU', (t: TestContext) => {
|
||||
const line = '#SRU 1240 7215'
|
||||
const expected = {
|
||||
number: 1240,
|
||||
sru: 7215,
|
||||
}
|
||||
t.assert.deepStrictEqual(parseSRU(line), expected)
|
||||
})
|
||||
|
||||
test('parseTrans', (t: TestContext) => {
|
||||
// Fortnox 3.57.11
|
||||
let line = '#TRANS 1790 {1 "1"} 7509 "" "Faktura 9631584500436 172-57 - Perspektiv Bredband AB" 0'
|
||||
let expected = {
|
||||
accountNumber: 1790,
|
||||
objectList: [[1, 1]],
|
||||
amount: 7509,
|
||||
transactionDate: null,
|
||||
description: 'Faktura 9631584500436 172-57 - Perspektiv Bredband AB',
|
||||
quantity: 0,
|
||||
signature: null,
|
||||
}
|
||||
t.assert.deepStrictEqual(parseTrans(line), expected)
|
||||
|
||||
// Fortnox 3.57.11
|
||||
line = '#TRANS 2351 {} -3140000 "" "" 0'
|
||||
expected = {
|
||||
accountNumber: 2351,
|
||||
objectList: null,
|
||||
amount: -3140000,
|
||||
transactionDate: null,
|
||||
description: null,
|
||||
quantity: 0,
|
||||
signature: null,
|
||||
}
|
||||
t.assert.deepStrictEqual(parseTrans(line), expected)
|
||||
|
||||
// Edison Ekonomi Byrå 6.1
|
||||
line = '#TRANS 6310 {} 9076.00 20160131 "jan - mars"'
|
||||
expected = {
|
||||
accountNumber: 6310,
|
||||
objectList: null,
|
||||
amount: 9076.0,
|
||||
transactionDate: '20160131',
|
||||
description: 'jan - mars',
|
||||
quantity: null,
|
||||
signature: null,
|
||||
}
|
||||
t.assert.deepStrictEqual(parseTrans(line), expected)
|
||||
|
||||
line = '#TRANS 4341 {1 "1" 6 "338"} 25000 "" "Faktura 748 172-114 - Bredablick Fastighetspartner AB" 0'
|
||||
expected = {
|
||||
accountNumber: 4341,
|
||||
objectList: [
|
||||
[1, 1],
|
||||
[6, 338],
|
||||
],
|
||||
amount: 25000,
|
||||
transactionDate: null,
|
||||
description: 'Faktura 748 172-114 - Bredablick Fastighetspartner AB',
|
||||
quantity: 0,
|
||||
signature: null,
|
||||
}
|
||||
t.assert.deepStrictEqual(parseTrans(line), expected)
|
||||
|
||||
// line = '#TRANS 7010 {"1" "456" "7" "47"} 13200.00'
|
||||
// expected = {
|
||||
// accountNumber: 7010,
|
||||
// objectList: [[1, 1], [6, 338]],
|
||||
// amount: 13200,
|
||||
// transactionDate: null,
|
||||
// description: null,
|
||||
// quantity: null,
|
||||
// signature: null,
|
||||
// }
|
||||
// t.assert.deepStrictEqual(parseTrans(line), expected)
|
||||
})
|
||||
|
||||
test('parseUB', (t: TestContext) => {
|
||||
let line = '#UB 0 1110 77246210 0'
|
||||
let expected = {
|
||||
accountNumber: 1110,
|
||||
yearNumber: 0,
|
||||
balance: 77246210,
|
||||
quantity: 0,
|
||||
}
|
||||
t.assert.deepStrictEqual(parseUB(line), expected)
|
||||
|
||||
line = '#UB 0 1229 -37390.26 0'
|
||||
expected = {
|
||||
accountNumber: 1229,
|
||||
yearNumber: 0,
|
||||
balance: -37390.26,
|
||||
quantity: 0,
|
||||
}
|
||||
t.assert.deepStrictEqual(parseUB(line), expected)
|
||||
|
||||
line = '#UB 0 1730 26327.8 0'
|
||||
expected = {
|
||||
yearNumber: 0,
|
||||
accountNumber: 1730,
|
||||
balance: 26327.8,
|
||||
quantity: 0,
|
||||
}
|
||||
t.assert.deepStrictEqual(parseUB(line), expected)
|
||||
})
|
||||
|
||||
test('parseVer', (t: TestContext) => {
|
||||
// #VER A 1 20151231 "BR/RR 2015" 20231213
|
||||
|
||||
const line = '#VER A 1 20151231 "BR/RR 2015" 20231213'
|
||||
const expected = {
|
||||
journal: 'A',
|
||||
number: 1,
|
||||
transactionDate: '20151231',
|
||||
description: 'BR/RR 2015',
|
||||
entryDate: '20231213',
|
||||
signature: null,
|
||||
}
|
||||
|
||||
t.assert.deepStrictEqual(parseVer(line), expected)
|
||||
})
|
||||
177
server/lib/parse_line.ts
Normal file
177
server/lib/parse_line.ts
Normal file
@ -0,0 +1,177 @@
|
||||
// #DIM dimensionsnr namn
|
||||
// #DIM 6 "Projekt"
|
||||
const rDim = /#DIM\s+(-?\d+)\s+"([^"]*)"$/
|
||||
|
||||
export function parseDim(line: string) {
|
||||
const [, number, name] = line.match(rDim)
|
||||
|
||||
return {
|
||||
number: parseInt(number),
|
||||
name,
|
||||
}
|
||||
}
|
||||
|
||||
// #IB årsnr konto saldo kvantitet
|
||||
// #IB 0 1790 11946.07 0
|
||||
// #IB 0 2083 -57106301 0
|
||||
const rIB = /^#IB\s+(-?\d+)\s+(\d{4,4})\s+(-?\d+(?:\.\d{1,2})?)\s+(-?\d+)$/
|
||||
|
||||
export function parseIB(line: string) {
|
||||
const result = line.match(rIB)
|
||||
|
||||
if (!result) {
|
||||
console.error(line)
|
||||
throw Error('parsing error')
|
||||
}
|
||||
|
||||
const [, yearNumber, accountNumber, balance, quantity] = result
|
||||
|
||||
return {
|
||||
accountNumber: parseInt(accountNumber),
|
||||
yearNumber: parseInt(yearNumber),
|
||||
balance: parseFloat(balance),
|
||||
quantity: parseInt(quantity),
|
||||
}
|
||||
}
|
||||
|
||||
// #KONTO 1249 "Ack avskr bilar/transportmedel"
|
||||
const rKonto = /^#KONTO\s+(\d{4,4})\s+"([^"]*)"$/
|
||||
|
||||
export function parseKonto(line: string) {
|
||||
const result = line.match(rKonto)
|
||||
|
||||
if (!result) {
|
||||
console.error(line)
|
||||
throw Error('parsing error')
|
||||
}
|
||||
|
||||
const [, number, description] = line.match(rKonto)
|
||||
|
||||
return {
|
||||
number: parseInt(number),
|
||||
description,
|
||||
}
|
||||
}
|
||||
|
||||
// #OBJEKT dimensionsnr objektnr objektnamn
|
||||
// #OBJEKT 6 "4" "Entreer"
|
||||
const rObjekt = /^#OBJEKT\s+(\d+)\s+"?(\d+)"?\s"([^"]*)"$/
|
||||
|
||||
export function parseObjekt(line: string) {
|
||||
const [, dimensionNumber, number, name] = line.match(rObjekt)
|
||||
|
||||
return {
|
||||
dimensionNumber: parseInt(dimensionNumber),
|
||||
number: parseInt(number),
|
||||
name,
|
||||
}
|
||||
}
|
||||
|
||||
// #RAR årsnr start slut
|
||||
// #RAR 0 20160101 20161231
|
||||
// #RAR -1 20150101 20151231
|
||||
const rRAR = /^#RAR\s+(-?\d+)\s+(\d{8,8})\s+(\d{8,8})$/
|
||||
|
||||
export function parseRAR(line: string) {
|
||||
const [, yearNumber, startDate, endDate] = line.match(rRAR)
|
||||
|
||||
return {
|
||||
yearNumber: parseInt(yearNumber),
|
||||
startDate,
|
||||
endDate,
|
||||
}
|
||||
}
|
||||
|
||||
// #SRU konto SRU-kod
|
||||
// #SRU 1240 7215
|
||||
const rSRU = /^#SRU\s+(\d{4,4})\s+(\d{4,4})$/
|
||||
|
||||
export function parseSRU(line: string) {
|
||||
const [, number, sru] = line.match(rSRU)
|
||||
|
||||
return {
|
||||
number: parseInt(number),
|
||||
sru: parseInt(sru),
|
||||
}
|
||||
}
|
||||
|
||||
// #TRANS kontonr {objektlista} belopp transdat transtext kvantitet sign
|
||||
// #TRANS 1790 {1 "1"} 7509 "" "Faktura 9631584500436 172-57 - Perspektiv Bredband AB" 0
|
||||
// #TRANS 6310 {} 9076.00 20160131 "jan - mars"
|
||||
const rTrans =
|
||||
/^#TRANS\s+(\d{4,4})\s+\{([^}]+)?\}\s+(-?\d+(?:\.\d+)?)\s+"?([^"]*)"?\s+"?([^"]*)"?(?:\s+(\d+))?(?:\s+(.*))?$/
|
||||
const rObjectList = /"?(\d+)"?\s+"?(\d+)"/g
|
||||
|
||||
export function parseTrans(line: string) {
|
||||
const result = line.match(rTrans)
|
||||
|
||||
if (!result) {
|
||||
console.error(line)
|
||||
throw new Error('parsing error')
|
||||
}
|
||||
|
||||
const [, accountNumber, objectListString, amount, transactionDate, description, quantity, signature] = result
|
||||
|
||||
let objectList = null
|
||||
|
||||
if (objectListString) {
|
||||
objectList = []
|
||||
const result = objectListString.matchAll(rObjectList)
|
||||
|
||||
for (const match of result) {
|
||||
const [, dimension, object] = match
|
||||
|
||||
objectList.push([dimension, object].map((val) => parseInt(val)))
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
accountNumber: parseInt(accountNumber),
|
||||
objectList,
|
||||
amount: parseFloat(amount),
|
||||
transactionDate: transactionDate || null,
|
||||
description: description || null,
|
||||
quantity: quantity ? parseInt(quantity) : null,
|
||||
signature: signature || null,
|
||||
}
|
||||
}
|
||||
|
||||
// #UB årsnr konto saldo kvantitet
|
||||
// #UB 0 1110 77246210 0
|
||||
// #UB 0 1229 -37390.26 0
|
||||
const rUB = /^#UB\s+(-?\d+)\s+(\d{4,4})\s+(-?\d+(?:\.\d{1,2})?)\s+(-?\d+)$/
|
||||
|
||||
export function parseUB(line: string) {
|
||||
const result = line.match(rUB)
|
||||
|
||||
if (!result) {
|
||||
console.error(line)
|
||||
throw Error('parsing error')
|
||||
}
|
||||
|
||||
const [, yearNumber, accountNumber, balance, quantity] = result
|
||||
|
||||
return {
|
||||
accountNumber: parseInt(accountNumber),
|
||||
yearNumber: parseInt(yearNumber),
|
||||
balance: parseFloat(balance),
|
||||
quantity: parseInt(quantity),
|
||||
}
|
||||
}
|
||||
|
||||
// #VER serie vernr verdatum vertext regdatum sign
|
||||
// #VER A 1 20151231 "BR/RR 2015" 20231213
|
||||
const rVer = /#VER\s+(\w+)\s+(\d+)\s+(\d{8,8})\s+"([^"]*)"\s+(\d{8,8})(?:\s+(.*))?/
|
||||
|
||||
export function parseVer(line: string) {
|
||||
const [, journal, number, transactionDate, description, entryDate, signature] = line.match(rVer)
|
||||
|
||||
return {
|
||||
journal,
|
||||
number: parseInt(number),
|
||||
transactionDate,
|
||||
description,
|
||||
entryDate,
|
||||
signature: signature || null,
|
||||
}
|
||||
}
|
||||
285
server/lib/parse_stream.ts
Normal file
285
server/lib/parse_stream.ts
Normal file
@ -0,0 +1,285 @@
|
||||
import { type ReadableStream } from 'node:stream/web'
|
||||
import knex from './knex.ts'
|
||||
|
||||
import split, { type Decoder } from './split.ts'
|
||||
import {
|
||||
parseDim,
|
||||
parseIB,
|
||||
parseKonto,
|
||||
parseObjekt,
|
||||
parseRAR,
|
||||
parseSRU,
|
||||
parseTrans,
|
||||
parseUB,
|
||||
parseVer,
|
||||
} from './parse_line.ts'
|
||||
|
||||
const defaultDecoder = {
|
||||
decode(chunk: Uint8Array) {
|
||||
return Array.from(chunk, (uint) => {
|
||||
switch (uint) {
|
||||
case 132:
|
||||
return 'ä'
|
||||
case 134:
|
||||
return 'å'
|
||||
case 148:
|
||||
return 'ö'
|
||||
case 142:
|
||||
return 'Ä'
|
||||
case 143:
|
||||
return 'Å'
|
||||
case 153:
|
||||
return 'Ö'
|
||||
default:
|
||||
return String.fromCharCode(uint)
|
||||
}
|
||||
}).join('')
|
||||
},
|
||||
}
|
||||
|
||||
export default async function parseStream(stream: ReadableStream, decoder: Decoder = defaultDecoder) {
|
||||
const journals = new Map()
|
||||
|
||||
let currentEntryId: number
|
||||
const details: Record<string, string> = {}
|
||||
let currentYear = null
|
||||
|
||||
const trx = await knex.transaction()
|
||||
|
||||
async function getJournalId(identifier: string) {
|
||||
if (journals.has(identifier)) {
|
||||
return journals.get(identifier)
|
||||
}
|
||||
|
||||
let journal = await trx('journal').first('*').where('identifier', identifier)
|
||||
|
||||
if (!journal) {
|
||||
journal = (await trx('journal').insert({ identifier }).returning('*'))[0]
|
||||
}
|
||||
|
||||
journals.set(identifier, journal.id)
|
||||
|
||||
return journal.id
|
||||
}
|
||||
|
||||
for await (let line of stream.pipeThrough(split(null, { decoder }))) {
|
||||
line = line.trim()
|
||||
|
||||
if (line[0] !== '#') {
|
||||
continue
|
||||
}
|
||||
|
||||
const splitLine = line.split(/\s+/)
|
||||
const lineType = splitLine[0]
|
||||
|
||||
switch (lineType) {
|
||||
case '#DIM': {
|
||||
const { number, name } = parseDim(line)
|
||||
|
||||
const existingDimension = await trx('dimension').first('*').where('number', number)
|
||||
|
||||
if (!existingDimension) {
|
||||
await trx('dimension').insert({ number, name })
|
||||
} else if (existingDimension.name !== name) {
|
||||
await trx('dimension').update({ name }).where('number', number)
|
||||
}
|
||||
|
||||
break
|
||||
}
|
||||
case '#IB': {
|
||||
const { yearNumber, accountNumber, balance, quantity } = parseIB(line)
|
||||
|
||||
if (yearNumber !== 0) continue
|
||||
|
||||
const existingAccountBalance = await trx('accountBalance')
|
||||
.first('*')
|
||||
.where({ financialYearId: currentYear.id, accountNumber })
|
||||
|
||||
if (!existingAccountBalance) {
|
||||
await trx('accountBalance').insert({
|
||||
financialYearId: currentYear.id,
|
||||
accountNumber,
|
||||
in: balance,
|
||||
inQuantity: quantity,
|
||||
})
|
||||
} else {
|
||||
await trx('accountBalance')
|
||||
.update({
|
||||
in: balance,
|
||||
inQuantity: quantity,
|
||||
})
|
||||
.where({
|
||||
financialYearId: currentYear.id,
|
||||
accountNumber,
|
||||
})
|
||||
}
|
||||
|
||||
break
|
||||
}
|
||||
case '#KONTO': {
|
||||
const { number, description } = parseKonto(line)
|
||||
|
||||
const existingAccount = await trx('account')
|
||||
.first('*')
|
||||
.where('number', number)
|
||||
.orderBy('financialYearId', 'desc')
|
||||
|
||||
if (!existingAccount) {
|
||||
await trx('account').insert({
|
||||
financialYearId: currentYear!.id,
|
||||
number,
|
||||
description,
|
||||
})
|
||||
} else if (existingAccount.description !== description) {
|
||||
await trx('account').update({ description }).where('id', existingAccount.id)
|
||||
}
|
||||
|
||||
break
|
||||
}
|
||||
case '#OBJEKT': {
|
||||
const { dimensionNumber, number, name } = parseObjekt(line)
|
||||
|
||||
const dimension = await trx('dimension').first('*').where('number', dimensionNumber)
|
||||
|
||||
if (!dimension) throw new Error(`Dimension "${dimensionNumber}" does not exist`)
|
||||
|
||||
const existingObject = await trx('object').first('*').where({ dimensionId: dimension.id, number })
|
||||
|
||||
if (!existingObject) {
|
||||
await trx('object').insert({ dimensionId: dimension.id, number, name })
|
||||
} else if (existingObject.name !== name) {
|
||||
await trx('object').update({ name }).where({ dimensionId: dimension.id, number })
|
||||
}
|
||||
|
||||
break
|
||||
}
|
||||
case '#RAR': {
|
||||
const { yearNumber, startDate, endDate } = parseRAR(line)
|
||||
|
||||
if (yearNumber !== 0) continue
|
||||
|
||||
currentYear = (await trx('financial_year').insert({ startDate, endDate }).returning('*'))[0]
|
||||
|
||||
break
|
||||
}
|
||||
case '#SRU': {
|
||||
const { number, sru } = parseSRU(line)
|
||||
|
||||
const existingAccount = await trx('account')
|
||||
.first('*')
|
||||
.where('number', number)
|
||||
.orderBy('financialYearId', 'desc')
|
||||
|
||||
if (existingAccount) {
|
||||
if (existingAccount.sru !== sru) {
|
||||
await trx('account').update({ sru: sru }).where('id', existingAccount.id)
|
||||
}
|
||||
} else {
|
||||
await trx('account').insert({
|
||||
financialYearId: currentYear!.id,
|
||||
number,
|
||||
description: existingAccount.description,
|
||||
sru,
|
||||
})
|
||||
}
|
||||
|
||||
break
|
||||
}
|
||||
case '#TRANS': {
|
||||
const { objectList, ...transaction } = parseTrans(line)
|
||||
|
||||
let objectId: number
|
||||
|
||||
const transactionId = (
|
||||
await trx('transaction')
|
||||
.insert({
|
||||
entryId: currentEntryId,
|
||||
objectId,
|
||||
...transaction,
|
||||
})
|
||||
.returning('id')
|
||||
)[0].id
|
||||
|
||||
if (objectList) {
|
||||
for (const [dimensionNumber, objectNumber] of objectList) {
|
||||
const objectId = (
|
||||
await trx('object')
|
||||
.first('object.id')
|
||||
.innerJoin('dimension', 'object.dimension_id', 'dimension.id')
|
||||
.where({
|
||||
'object.number': objectNumber,
|
||||
'dimension.number': dimensionNumber,
|
||||
})
|
||||
)?.id
|
||||
|
||||
if (!objectId) {
|
||||
throw new Error(`Object {${dimensionNumber} ${objectNumber}} does not exist!`)
|
||||
}
|
||||
|
||||
await trx('transactionsToObjects').insert({
|
||||
transactionId,
|
||||
objectId,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
break
|
||||
}
|
||||
case '#UB': {
|
||||
const { yearNumber, accountNumber, balance, quantity } = parseUB(line)
|
||||
|
||||
if (yearNumber !== 0) continue
|
||||
|
||||
const existingAccountBalance = await trx('accountBalance')
|
||||
.first('*')
|
||||
.where({ financialYearId: currentYear.id, accountNumber })
|
||||
|
||||
if (!existingAccountBalance) {
|
||||
await trx('accountBalance').insert({
|
||||
financialYearId: currentYear.id,
|
||||
accountNumber,
|
||||
out: balance,
|
||||
outQuantity: quantity,
|
||||
})
|
||||
} else {
|
||||
await trx('accountBalance')
|
||||
.update({
|
||||
out: balance,
|
||||
outQuantity: quantity,
|
||||
})
|
||||
.where({
|
||||
financialYearId: currentYear.id,
|
||||
accountNumber,
|
||||
})
|
||||
}
|
||||
|
||||
break
|
||||
}
|
||||
case '#VER': {
|
||||
const { journal, ...rest } = parseVer(line)
|
||||
|
||||
let journalId = await getJournalId(journal)
|
||||
|
||||
currentEntryId = (
|
||||
await trx('entry')
|
||||
.insert({
|
||||
journalId,
|
||||
financialYearId: currentYear!.id,
|
||||
...rest,
|
||||
})
|
||||
.returning('id')
|
||||
)[0].id
|
||||
|
||||
break
|
||||
}
|
||||
default:
|
||||
details[lineType] = splitLine.slice(1).join(' ')
|
||||
}
|
||||
}
|
||||
|
||||
await trx.commit()
|
||||
|
||||
console.dir(details)
|
||||
|
||||
console.info(`DONE!: ${currentYear.startDate} - ${currentYear.endDate}`)
|
||||
}
|
||||
36
server/lib/split.ts
Normal file
36
server/lib/split.ts
Normal file
@ -0,0 +1,36 @@
|
||||
const defaultMatcher = /\r?\n/
|
||||
|
||||
export type Decoder = {
|
||||
decode: (chunk?: ArrayBufferView) => void
|
||||
}
|
||||
|
||||
interface Options {
|
||||
decoder?: Decoder
|
||||
}
|
||||
|
||||
export default function split(matcher: RegExp, { decoder }: Options = {}) {
|
||||
matcher ??= defaultMatcher
|
||||
|
||||
let rest: string | null = null
|
||||
|
||||
return new TransformStream({
|
||||
start() {},
|
||||
transform(chunk, controller) {
|
||||
if (chunk) {
|
||||
if (decoder) {
|
||||
chunk = decoder.decode(chunk)
|
||||
}
|
||||
|
||||
const lines = ((rest == null ? '' : rest) + chunk).split(matcher)
|
||||
|
||||
rest = lines.pop()
|
||||
|
||||
for (const line of lines) {
|
||||
controller.enqueue(line)
|
||||
}
|
||||
} else {
|
||||
controller.terminate()
|
||||
}
|
||||
},
|
||||
})
|
||||
}
|
||||
21
server/lib/status_error.ts
Normal file
21
server/lib/status_error.ts
Normal file
@ -0,0 +1,21 @@
|
||||
import http from 'http'
|
||||
|
||||
export default class StatusError extends Error {
|
||||
status: number
|
||||
|
||||
constructor(status = 500, message = http.STATUS_CODES[status] || 'Unknown Error', options?: Record<string, ANY>) {
|
||||
super(message, options)
|
||||
|
||||
this.name = this.constructor.name
|
||||
this.status = status
|
||||
}
|
||||
|
||||
toJSON() {
|
||||
return {
|
||||
name: this.name,
|
||||
message: this.message,
|
||||
status: this.status,
|
||||
stack: this.stack,
|
||||
}
|
||||
}
|
||||
}
|
||||
16
tsconfig.json
Normal file
16
tsconfig.json
Normal file
@ -0,0 +1,16 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"noEmit": true,
|
||||
"target": "esnext",
|
||||
"jsx": "react-jsx",
|
||||
"jsxImportSource": "preact",
|
||||
"module": "nodenext",
|
||||
"moduleResolution": "nodenext",
|
||||
"allowImportingTsExtensions": true,
|
||||
"rewriteRelativeImportExtensions": true,
|
||||
"verbatimModuleSyntax": true,
|
||||
"erasableSyntaxOnly": true,
|
||||
"allowArbitraryExtensions": true
|
||||
},
|
||||
"include": ["./global.d.ts", "./client/", "./server/"]
|
||||
}
|
||||
34
vite.config.js
Normal file
34
vite.config.js
Normal file
@ -0,0 +1,34 @@
|
||||
import { defineConfig } from 'vite'
|
||||
import { preact } from '@preact/preset-vite'
|
||||
|
||||
export default defineConfig(({ isSsrBuild }) => {
|
||||
const entry = process.env.VITE_ENTRY || 'public'
|
||||
|
||||
return {
|
||||
plugins: [preact()],
|
||||
root: new URL('./client', import.meta.url).pathname,
|
||||
publicDir: new URL('./client/static', import.meta.url).pathname,
|
||||
build: {
|
||||
outDir: new URL('./dist', import.meta.url).pathname,
|
||||
emptyOutDir: false,
|
||||
sourcemap: true,
|
||||
assetsInlineLimit: 0,
|
||||
copyPublicDir: !isSsrBuild && entry === 'public',
|
||||
manifest: isSsrBuild ? null : `manifest.${entry}.json`,
|
||||
rollupOptions: {
|
||||
input: {
|
||||
[entry]: new URL(`./client/${entry}/${isSsrBuild ? 'server' : 'client'}.ts`, import.meta.url).pathname,
|
||||
},
|
||||
},
|
||||
},
|
||||
server: {
|
||||
allowedHosts: ['startbit.local'],
|
||||
port: 1338,
|
||||
hmr: process.env.VITE_HMR_PROXY
|
||||
? {
|
||||
clientPort: 443,
|
||||
}
|
||||
: false,
|
||||
},
|
||||
}
|
||||
})
|
||||
Loading…
Reference in New Issue
Block a user