mirror of
https://github.com/frappe/books.git
synced 2024-12-22 19:09:01 +00:00
Merge pull request #381 from 18alantom/disen-node-two
incr: disentangle node, part 2
This commit is contained in:
commit
9bb5299b97
2
.git-blame-ignore-revs
Normal file
2
.git-blame-ignore-revs
Normal file
@ -0,0 +1,2 @@
|
||||
# Rename 'frappe' to 'fyo' outside src
|
||||
32d282dc9c6f129807a1cf53eae47fc3602aa976
|
1
.prettierignore
Normal file
1
.prettierignore
Normal file
@ -0,0 +1 @@
|
||||
**/types.ts
|
91
META.md
Normal file
91
META.md
Normal file
@ -0,0 +1,91 @@
|
||||
This `md` lays out how this project is structured.
|
||||
|
||||
## Execution
|
||||
|
||||
Since it's an electron project, there are two points from where the execution
|
||||
begins.
|
||||
|
||||
1. **Main Process**: Think of this as the _server_, the file where this beings
|
||||
is `books/main.ts`
|
||||
2. **Renderer Process**: Think of this as the _client_, the file where this
|
||||
begins is `books/src/main.js`
|
||||
|
||||
_Note: For more insight into how electron execution is structured check out electron's
|
||||
[Process Model](https://www.electronjs.org/docs/latest/tutorial/process-model)._
|
||||
|
||||
This process is architected in a _client-server_ manner. If the _client_ side
|
||||
requires resources from the _server_ side, it does so by making use of
|
||||
`ipcRenderer.send` or `ipcRenderer.invoke` i.e. if the front end is being run on
|
||||
electron.
|
||||
|
||||
The `ipcRenderer` calls are done only in `fyo/demux/*.ts` files. I.e. these
|
||||
are the only files on the _client_ side that are aware of the platform the
|
||||
_client_ is being run on i.e. `electron` or Browser. So all platform specific
|
||||
calls should go through these _demux_ files.
|
||||
|
||||
## Code Structure
|
||||
|
||||
Code is structured in a way so as to maintain clear separation between what each
|
||||
set of files structured under some subdirectory does. It is also to maintain a
|
||||
clear separation between the _client_ and the _server_.
|
||||
|
||||
The _client_ code should not be calling _server_ code directly (i.e. by
|
||||
importing it) and vice-versa. This is to maintain the _client_ code in a
|
||||
platform agnostic manner.
|
||||
|
||||
Some of the code is side agnostic, i.e. can be called from the _client_ or the
|
||||
_server_. Only code that doesn't have platform specific calls example using
|
||||
`node` `fs` or the browsers `window`. Ideally this code won't have an imports.
|
||||
|
||||
### Special Folders
|
||||
|
||||
Here's a list of subdirectories and their purposes, for more details on
|
||||
individual ones, check the `README.md` in those subdirectories:
|
||||
|
||||
| Folder | Side | Description |
|
||||
| -------------- | ---------- | -------------------------------------------------------------------------------------------------------------------------- |
|
||||
| `main` | _server_ | Electron main process specific code called from `books/main.ts` |
|
||||
| `schemas` | _server_ | Collection of database schemas in a `json` format and the code to combine them |
|
||||
| `backend` | _server_ | Database management and CRUD calls |
|
||||
| `scripts` | _server_ | Code that is not called when the project is running, but separately to run some task for instance to generate translations |
|
||||
| `build` | _server_ | Build specific files not used unless building the project |
|
||||
| `translations` | _server_ | Collection of csv files containing translations |
|
||||
| `src` | _client_ | Code that mainly deals with the view layer (all `.vue` are stored here) |
|
||||
| `reports` | _client\*_ | Collection of logic code and view layer config files for displaying reports. |
|
||||
| `models` | _client\*_ | Collection of `Model.ts` files that manage the data and some business logic on the client side. |
|
||||
| `fyo` | _client\*_ | Code for the underlying library that manages the client side |
|
||||
| `utils` | _agnostic_ | Collection of code used by either sides. |
|
||||
| `dummy` | _agnostic_ | Code used to generate dummy data for testing or demo purposes |
|
||||
|
||||
#### _client\*_
|
||||
|
||||
The code in these folders is called during runtime from the _client_
|
||||
side but since they contain business logic, they are tested using `mocha` on the
|
||||
_server_ side. This is a bit stupid and so will be fixed later.
|
||||
|
||||
Due to this, the code in these files should not be calling _client_ side code
|
||||
directly. If client side code is to be called, it should be done so only by
|
||||
using dynamic imports, i.e. `await import('...')` along pathways that won't run
|
||||
in a test.
|
||||
|
||||
### Special Files
|
||||
|
||||
Other than this there are two special types of files:
|
||||
|
||||
#### `**/types.ts`
|
||||
|
||||
These contains all the type information, these files are side agnostic and
|
||||
should only import code from other type files.
|
||||
|
||||
The type information contained depends on the folder it is under i.e. where the
|
||||
code associated with the types is written.
|
||||
|
||||
If trying to understand the code in this project I'd suggest not ignoring these.
|
||||
|
||||
#### `**/test/*.spec.ts`
|
||||
|
||||
These contain tests, as of now all tests run on the _server_ side using `mocha`.
|
||||
|
||||
The tests files are located in `**/test` folders which are nested under the
|
||||
directories of what they are testing. No code from these files is called during
|
||||
runtime.
|
16
README.md
16
README.md
@ -106,13 +106,15 @@ If you want to contribute code then you can fork this repo, make changes and rai
|
||||
|
||||
## Translation Contributors
|
||||
|
||||
| Language | Contributors |
|
||||
|----|---|
|
||||
| French | [DeepL](https://www.deepl.com/) |
|
||||
| German | [DeepL](https://www.deepl.com/), [barredterra](https://github.com/barredterra) |
|
||||
| Portuguese | [DeepL](https://www.deepl.com/) |
|
||||
| Arabic | [taha2002](https://github.com/taha2002) |
|
||||
| Catalan | Dídac E. Jiménez |
|
||||
| Language | Contributors |
|
||||
| ---------- | ------------------------------------------------------------------------------ |
|
||||
| French | [DeepL](https://www.deepl.com/) |
|
||||
| German | [DeepL](https://www.deepl.com/), [barredterra](https://github.com/barredterra) |
|
||||
| Portuguese | [DeepL](https://www.deepl.com/) |
|
||||
| Arabic | [taha2002](https://github.com/taha2002) |
|
||||
| Catalan | Dídac E. Jiménez |
|
||||
| Dutch | [FastAct](https://github.com/FastAct) |
|
||||
| Spanish | [talmax1124](https://github.com/talmax1124) |
|
||||
|
||||
## License
|
||||
|
||||
|
@ -1,31 +0,0 @@
|
||||
import frappe from 'frappe';
|
||||
import { DateTime } from 'luxon';
|
||||
|
||||
export async function getExchangeRate({ fromCurrency, toCurrency, date }) {
|
||||
if (!date) {
|
||||
date = DateTime.local().toISODate();
|
||||
}
|
||||
if (!fromCurrency || !toCurrency) {
|
||||
throw new frappe.errors.NotFoundError(
|
||||
'Please provide `fromCurrency` and `toCurrency` to get exchange rate.'
|
||||
);
|
||||
}
|
||||
let cacheKey = `currencyExchangeRate:${date}:${fromCurrency}:${toCurrency}`;
|
||||
let exchangeRate = parseFloat(localStorage.getItem(cacheKey));
|
||||
if (!exchangeRate) {
|
||||
try {
|
||||
let res = await fetch(
|
||||
` https://api.vatcomply.com/rates?date=${date}&base=${fromCurrency}&symbols=${toCurrency}`
|
||||
);
|
||||
let data = await res.json();
|
||||
exchangeRate = data.rates[toCurrency];
|
||||
localStorage.setItem(cacheKey, exchangeRate);
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
throw new Error(
|
||||
`Could not fetch exchange rate for ${fromCurrency} -> ${toCurrency}`
|
||||
);
|
||||
}
|
||||
}
|
||||
return exchangeRate;
|
||||
}
|
@ -1,368 +0,0 @@
|
||||
import { showMessageDialog } from '@/utils';
|
||||
import frappe, { t } from 'frappe';
|
||||
import { DateTime } from 'luxon';
|
||||
import { exportCsv, saveExportData } from '../reports/commonExporter';
|
||||
import { getSavePath } from '../src/utils';
|
||||
|
||||
// prettier-ignore
|
||||
export const stateCodeMap = {
|
||||
'JAMMU AND KASHMIR': '1',
|
||||
'HIMACHAL PRADESH': '2',
|
||||
'PUNJAB': '3',
|
||||
'CHANDIGARH': '4',
|
||||
'UTTARAKHAND': '5',
|
||||
'HARYANA': '6',
|
||||
'DELHI': '7',
|
||||
'RAJASTHAN': '8',
|
||||
'UTTAR PRADESH': '9',
|
||||
'BIHAR': '10',
|
||||
'SIKKIM': '11',
|
||||
'ARUNACHAL PRADESH': '12',
|
||||
'NAGALAND': '13',
|
||||
'MANIPUR': '14',
|
||||
'MIZORAM': '15',
|
||||
'TRIPURA': '16',
|
||||
'MEGHALAYA': '17',
|
||||
'ASSAM': '18',
|
||||
'WEST BENGAL': '19',
|
||||
'JHARKHAND': '20',
|
||||
'ODISHA': '21',
|
||||
'CHATTISGARH': '22',
|
||||
'MADHYA PRADESH': '23',
|
||||
'GUJARAT': '24',
|
||||
'DADRA AND NAGAR HAVELI AND DAMAN AND DIU': '26',
|
||||
'MAHARASHTRA': '27',
|
||||
'KARNATAKA': '29',
|
||||
'GOA': '30',
|
||||
'LAKSHADWEEP': '31',
|
||||
'KERALA': '32',
|
||||
'TAMIL NADU': '33',
|
||||
'PUDUCHERRY': '34',
|
||||
'ANDAMAN AND NICOBAR ISLANDS': '35',
|
||||
'TELANGANA': '36',
|
||||
'ANDHRA PRADESH': '37',
|
||||
'LADAKH': '38',
|
||||
};
|
||||
|
||||
const GST = {
|
||||
'GST-0': 0,
|
||||
'GST-0.25': 0.25,
|
||||
'GST-3': 3,
|
||||
'GST-5': 5,
|
||||
'GST-6': 6,
|
||||
'GST-12': 12,
|
||||
'GST-18': 18,
|
||||
'GST-28': 28,
|
||||
'IGST-0': 0,
|
||||
'IGST-0.25': 0.25,
|
||||
'IGST-3': 3,
|
||||
'IGST-5': 5,
|
||||
'IGST-6': 6,
|
||||
'IGST-12': 12,
|
||||
'IGST-18': 18,
|
||||
'IGST-28': 28,
|
||||
};
|
||||
|
||||
const CSGST = {
|
||||
'GST-0': 0,
|
||||
'GST-0.25': 0.125,
|
||||
'GST-3': 1.5,
|
||||
'GST-5': 2.5,
|
||||
'GST-6': 3,
|
||||
'GST-12': 6,
|
||||
'GST-18': 9,
|
||||
'GST-28': 14,
|
||||
};
|
||||
|
||||
const IGST = {
|
||||
'IGST-0.25': 0.25,
|
||||
'IGST-3': 3,
|
||||
'IGST-5': 5,
|
||||
'IGST-6': 6,
|
||||
'IGST-12': 12,
|
||||
'IGST-18': 18,
|
||||
'IGST-28': 28,
|
||||
};
|
||||
|
||||
export async function generateGstr1Json(getReportData) {
|
||||
const { gstin } = frappe.AccountingSettings;
|
||||
if (!gstin) {
|
||||
showMessageDialog({
|
||||
message: t`Export Failed`,
|
||||
description: t`Please set GSTIN in General Settings.`,
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
const {
|
||||
rows,
|
||||
filters: { transferType, toDate },
|
||||
} = getReportData();
|
||||
|
||||
const { filePath, canceled } = await getSavePath('gstr-1', 'json');
|
||||
if (canceled || !filePath) return;
|
||||
|
||||
const gstData = {
|
||||
version: 'GST3.0.4',
|
||||
hash: 'hash',
|
||||
gstin: gstin,
|
||||
// fp is the the MMYYYY for the last month of the report
|
||||
// for example if you are extracting report for 1st July 2020 to 31st September 2020 then
|
||||
// fb = 092020
|
||||
fp: DateTime.fromISO(toDate).toFormat('MMyyyy'),
|
||||
};
|
||||
|
||||
if (transferType === 'B2B') {
|
||||
gstData.b2b = await generateB2bData(rows);
|
||||
} else if (transferType === 'B2CL') {
|
||||
gstData.b2cl = await generateB2clData(rows);
|
||||
} else if (transferType === 'B2CS') {
|
||||
gstData.b2cs = await generateB2csData(rows);
|
||||
}
|
||||
|
||||
const jsonData = JSON.stringify(gstData);
|
||||
await saveExportData(jsonData, filePath);
|
||||
}
|
||||
|
||||
async function generateB2bData(rows) {
|
||||
const b2b = [];
|
||||
|
||||
for (let row of rows) {
|
||||
const customer = {
|
||||
ctin: row.gstin,
|
||||
inv: [],
|
||||
};
|
||||
|
||||
const invRecord = {
|
||||
inum: row.invNo,
|
||||
idt: DateTime.fromFormat(row.invDate, 'yyyy-MM-dd').toFormat(
|
||||
'dd-MM-yyyy'
|
||||
),
|
||||
val: row.invAmt,
|
||||
pos: row.gstin && row.gstin.substring(0, 2),
|
||||
rchrg: row.reverseCharge,
|
||||
inv_typ: 'R',
|
||||
itms: [],
|
||||
};
|
||||
|
||||
let items = await frappe.db
|
||||
.knex('SalesInvoiceItem')
|
||||
.where('parent', invRecord.inum);
|
||||
|
||||
items.forEach((item) => {
|
||||
const itemRecord = {
|
||||
num: item.hsnCode,
|
||||
itm_det: {
|
||||
txval: frappe.pesa(item.baseAmount).float,
|
||||
rt: GST[item.tax],
|
||||
csamt: 0,
|
||||
camt: frappe
|
||||
.pesa(CSGST[item.tax] || 0)
|
||||
.mul(item.baseAmount)
|
||||
.div(100).float,
|
||||
samt: frappe
|
||||
.pesa(CSGST[item.tax] || 0)
|
||||
.mul(item.baseAmount)
|
||||
.div(100).float,
|
||||
iamt: frappe
|
||||
.pesa(IGST[item.tax] || 0)
|
||||
.mul(item.baseAmount)
|
||||
.div(100).float,
|
||||
},
|
||||
};
|
||||
|
||||
invRecord.itms.push(itemRecord);
|
||||
});
|
||||
|
||||
const customerRecord = b2b.find((b) => b.ctin === row.gstin);
|
||||
|
||||
if (customerRecord) {
|
||||
customerRecord.inv.push(invRecord);
|
||||
} else {
|
||||
customer.inv.push(invRecord);
|
||||
b2b.push(customer);
|
||||
}
|
||||
}
|
||||
|
||||
return b2b;
|
||||
}
|
||||
|
||||
async function generateB2clData(invoices) {
|
||||
const b2cl = [];
|
||||
|
||||
for (let invoice of invoices) {
|
||||
const stateInvoiceRecord = {
|
||||
pos: stateCodeMap[invoice.place.toUpperCase()],
|
||||
inv: [],
|
||||
};
|
||||
|
||||
const invRecord = {
|
||||
inum: invoice.invNo,
|
||||
idt: DateTime.fromFormat(invoice.invDate, 'yyyy-MM-dd').toFormat(
|
||||
'dd-MM-yyyy'
|
||||
),
|
||||
val: invoice.invAmt,
|
||||
itms: [],
|
||||
};
|
||||
|
||||
let items = await frappe.db
|
||||
.knex('SalesInvoiceItem')
|
||||
.where('parent', invRecord.inum);
|
||||
|
||||
items.forEach((item) => {
|
||||
const itemRecord = {
|
||||
num: item.hsnCode,
|
||||
itm_det: {
|
||||
txval: frappe.pesa(item.baseAmount).float,
|
||||
rt: GST[item.tax],
|
||||
csamt: 0,
|
||||
iamt: frappe
|
||||
.pesa(invoice.rate || 0)
|
||||
.mul(item.baseAmount)
|
||||
.div(100).float,
|
||||
},
|
||||
};
|
||||
|
||||
invRecord.itms.push(itemRecord);
|
||||
});
|
||||
|
||||
const stateRecord = b2cl.find((b) => b.pos === stateCodeMap[invoice.place]);
|
||||
|
||||
if (stateRecord) {
|
||||
stateRecord.inv.push(invRecord);
|
||||
} else {
|
||||
stateInvoiceRecord.inv.push(invRecord);
|
||||
b2cl.push(stateInvoiceRecord);
|
||||
}
|
||||
}
|
||||
|
||||
return b2cl;
|
||||
}
|
||||
|
||||
async function generateB2csData(invoices) {
|
||||
const b2cs = [];
|
||||
|
||||
for (let invoice of invoices) {
|
||||
const pos = invoice.place.toUpperCase();
|
||||
|
||||
const invRecord = {
|
||||
sply_ty: invoice.inState ? 'INTRA' : 'INTER',
|
||||
pos: stateCodeMap[pos],
|
||||
// "OE" - Abbreviation for errors and omissions excepted.
|
||||
typ: 'OE',
|
||||
txval: invoice.taxVal,
|
||||
rt: invoice.rate,
|
||||
iamt: !invoice.inState ? (invoice.taxVal * invoice.rate) / 100 : 0,
|
||||
camt: invoice.inState ? invoice.cgstAmt : 0,
|
||||
samt: invoice.inState ? invoice.sgstAmt : 0,
|
||||
csamt: 0,
|
||||
};
|
||||
|
||||
b2cs.push(invRecord);
|
||||
}
|
||||
|
||||
return b2cs;
|
||||
}
|
||||
|
||||
export async function generateGstr2Csv(getReportData) {
|
||||
const { gstin } = frappe.AccountingSettings;
|
||||
if (!gstin) {
|
||||
showMessageDialog({
|
||||
message: t`Export Failed`,
|
||||
description: t`Please set GSTIN in General Settings.`,
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
const {
|
||||
rows,
|
||||
columns,
|
||||
filters: { transferType, toDate },
|
||||
} = getReportData();
|
||||
|
||||
const { filePath, canceled } = await getSavePath('gstr-2', 'csv');
|
||||
if (canceled || !filePath) return;
|
||||
|
||||
let gstData;
|
||||
if (transferType === 'B2B') {
|
||||
gstData = await generateB2bCsvGstr2(rows, columns);
|
||||
}
|
||||
|
||||
await exportCsv(gstData.rows, gstData.columns, filePath);
|
||||
}
|
||||
|
||||
async function generateB2bCsvGstr2(rows, columns) {
|
||||
const csvColumns = [
|
||||
{
|
||||
label: t`GSTIN of Supplier`,
|
||||
fieldname: 'gstin',
|
||||
},
|
||||
{
|
||||
label: t`Invoice Number`,
|
||||
fieldname: 'invNo',
|
||||
},
|
||||
{
|
||||
label: t`Invoice Date`,
|
||||
fieldname: 'invDate',
|
||||
},
|
||||
{
|
||||
label: t`Invoice Value`,
|
||||
fieldname: 'invAmt',
|
||||
},
|
||||
{
|
||||
label: t`Place of supply`,
|
||||
fieldname: 'place',
|
||||
},
|
||||
{
|
||||
label: t`Reverse Charge`,
|
||||
fieldname: 'reverseCharge',
|
||||
},
|
||||
{
|
||||
label: t`Rate`,
|
||||
fieldname: 'rate',
|
||||
},
|
||||
{
|
||||
label: t`Taxable Value`,
|
||||
fieldname: 'taxVal',
|
||||
},
|
||||
{
|
||||
label: t`Intergrated Tax Paid`,
|
||||
fieldname: 'igstAmt',
|
||||
},
|
||||
{
|
||||
label: t`Central Tax Paid`,
|
||||
fieldname: 'cgstAmt',
|
||||
},
|
||||
{
|
||||
label: t`State/UT Tax Paid`,
|
||||
fieldname: 'sgstAmt',
|
||||
},
|
||||
];
|
||||
|
||||
return {
|
||||
columns: csvColumns || [],
|
||||
rows: rows || [],
|
||||
};
|
||||
}
|
||||
|
||||
export async function generateGstr1Csv(getReportData) {
|
||||
const { gstin } = frappe.AccountingSettings;
|
||||
if (!gstin) {
|
||||
showMessageDialog({
|
||||
message: t`Export Failed`,
|
||||
description: t`Please set GSTIN in General Settings.`,
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
const {
|
||||
rows,
|
||||
columns,
|
||||
filters: { transferType, toDate },
|
||||
} = getReportData();
|
||||
|
||||
const { filePath, canceled } = await getSavePath('gstr-1', 'csv');
|
||||
if (canceled || !filePath) return;
|
||||
|
||||
await exportCsv(rows, columns, filePath);
|
||||
}
|
@ -1,79 +0,0 @@
|
||||
import frappe from 'frappe';
|
||||
import standardCOA from '../fixtures/verified/standardCOA.json';
|
||||
import { getCOAList } from '../src/utils';
|
||||
const accountFields = ['accountType', 'accountNumber', 'rootType', 'isGroup'];
|
||||
|
||||
function getAccountName(accountName, accountNumber) {
|
||||
if (accountNumber) {
|
||||
return `${accountName} - ${accountNumber}`;
|
||||
}
|
||||
return accountName;
|
||||
}
|
||||
|
||||
async function importAccounts(children, parentAccount, rootType, rootAccount) {
|
||||
for (let rootName in children) {
|
||||
if (accountFields.includes(rootName)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const child = children[rootName];
|
||||
|
||||
if (rootAccount) {
|
||||
rootType = child.rootType;
|
||||
}
|
||||
|
||||
const { accountType, accountNumber } = child;
|
||||
const accountName = getAccountName(rootName, accountNumber);
|
||||
const isGroup = identifyIsGroup(child);
|
||||
const doc = frappe.newDoc({
|
||||
doctype: 'Account',
|
||||
name: accountName,
|
||||
parentAccount,
|
||||
isGroup,
|
||||
rootType,
|
||||
balance: 0,
|
||||
accountType,
|
||||
});
|
||||
|
||||
await doc.insert();
|
||||
await importAccounts(child, accountName, rootType);
|
||||
}
|
||||
}
|
||||
|
||||
function identifyIsGroup(child) {
|
||||
if (child.isGroup) {
|
||||
return child.isGroup;
|
||||
}
|
||||
|
||||
const keys = Object.keys(child);
|
||||
const children = keys.filter((key) => !accountFields.includes(key));
|
||||
|
||||
if (children.length) {
|
||||
return 1;
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
export async function getCountryCOA(chartOfAccounts) {
|
||||
const coaList = getCOAList();
|
||||
const coa = coaList.find(({ name }) => name === chartOfAccounts);
|
||||
const conCode = coa.countryCode;
|
||||
if (!conCode) {
|
||||
return standardCOA;
|
||||
}
|
||||
|
||||
try {
|
||||
const countryCoa = (
|
||||
await import('../fixtures/verified/' + conCode + '.json')
|
||||
).default;
|
||||
return countryCoa.tree;
|
||||
} catch (e) {
|
||||
return standardCOA;
|
||||
}
|
||||
}
|
||||
|
||||
export default async function importCharts(chartOfAccounts) {
|
||||
const chart = await getCountryCOA(chartOfAccounts);
|
||||
await importAccounts(chart, '', '', true);
|
||||
}
|
@ -1,164 +0,0 @@
|
||||
import frappe from 'frappe';
|
||||
|
||||
export default class LedgerPosting {
|
||||
constructor({ reference, party, date, description }) {
|
||||
this.reference = reference;
|
||||
this.party = party;
|
||||
this.date = date;
|
||||
this.description = description;
|
||||
this.entries = [];
|
||||
this.entryMap = {};
|
||||
this.reverted = 0;
|
||||
// To change balance while entering ledger entries
|
||||
this.accountEntries = [];
|
||||
}
|
||||
|
||||
async debit(account, amount, referenceType, referenceName) {
|
||||
const entry = this.getEntry(account, referenceType, referenceName);
|
||||
entry.debit = entry.debit.add(amount);
|
||||
await this.setAccountBalanceChange(account, 'debit', amount);
|
||||
}
|
||||
|
||||
async credit(account, amount, referenceType, referenceName) {
|
||||
const entry = this.getEntry(account, referenceType, referenceName);
|
||||
entry.credit = entry.credit.add(amount);
|
||||
await this.setAccountBalanceChange(account, 'credit', amount);
|
||||
}
|
||||
|
||||
async setAccountBalanceChange(accountName, type, amount) {
|
||||
const debitAccounts = ['Asset', 'Expense'];
|
||||
const { rootType } = await frappe.getDoc('Account', accountName);
|
||||
if (debitAccounts.indexOf(rootType) === -1) {
|
||||
const change = type == 'credit' ? amount : amount.neg();
|
||||
this.accountEntries.push({
|
||||
name: accountName,
|
||||
balanceChange: change,
|
||||
});
|
||||
} else {
|
||||
const change = type == 'debit' ? amount : amount.neg();
|
||||
this.accountEntries.push({
|
||||
name: accountName,
|
||||
balanceChange: change,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
getEntry(account, referenceType, referenceName) {
|
||||
if (!this.entryMap[account]) {
|
||||
const entry = {
|
||||
account: account,
|
||||
party: this.party || '',
|
||||
date: this.date || this.reference.date,
|
||||
referenceType: referenceType || this.reference.doctype,
|
||||
referenceName: referenceName || this.reference.name,
|
||||
description: this.description,
|
||||
reverted: this.reverted,
|
||||
debit: frappe.pesa(0),
|
||||
credit: frappe.pesa(0),
|
||||
};
|
||||
|
||||
this.entries.push(entry);
|
||||
this.entryMap[account] = entry;
|
||||
}
|
||||
|
||||
return this.entryMap[account];
|
||||
}
|
||||
|
||||
async post() {
|
||||
this.validateEntries();
|
||||
await this.insertEntries();
|
||||
}
|
||||
|
||||
async postReverse() {
|
||||
this.validateEntries();
|
||||
|
||||
let data = await frappe.db.getAll({
|
||||
doctype: 'AccountingLedgerEntry',
|
||||
fields: ['name'],
|
||||
filters: {
|
||||
referenceName: this.reference.name,
|
||||
reverted: 0,
|
||||
},
|
||||
});
|
||||
|
||||
for (let entry of data) {
|
||||
let entryDoc = await frappe.getDoc('AccountingLedgerEntry', entry.name);
|
||||
entryDoc.reverted = 1;
|
||||
await entryDoc.update();
|
||||
}
|
||||
|
||||
let temp;
|
||||
for (let entry of this.entries) {
|
||||
temp = entry.debit;
|
||||
entry.debit = entry.credit;
|
||||
entry.credit = temp;
|
||||
entry.reverted = 1;
|
||||
}
|
||||
for (let entry of this.accountEntries) {
|
||||
entry.balanceChange = entry.balanceChange.neg();
|
||||
}
|
||||
await this.insertEntries();
|
||||
}
|
||||
|
||||
makeRoundOffEntry() {
|
||||
let { debit, credit } = this.getTotalDebitAndCredit();
|
||||
let difference = debit.sub(credit);
|
||||
let absoluteValue = difference.abs();
|
||||
let allowance = 0.5;
|
||||
if (absoluteValue.eq(0)) {
|
||||
return;
|
||||
}
|
||||
|
||||
let roundOffAccount = this.getRoundOffAccount();
|
||||
if (absoluteValue.lte(allowance)) {
|
||||
if (difference.gt(0)) {
|
||||
this.credit(roundOffAccount, absoluteValue);
|
||||
} else {
|
||||
this.debit(roundOffAccount, absoluteValue);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
validateEntries() {
|
||||
let { debit, credit } = this.getTotalDebitAndCredit();
|
||||
if (debit.neq(credit)) {
|
||||
throw new frappe.errors.ValidationError(
|
||||
`Total Debit: ${frappe.format(
|
||||
debit,
|
||||
'Currency'
|
||||
)} must be equal to Total Credit: ${frappe.format(credit, 'Currency')}`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
getTotalDebitAndCredit() {
|
||||
let debit = frappe.pesa(0);
|
||||
let credit = frappe.pesa(0);
|
||||
|
||||
for (let entry of this.entries) {
|
||||
debit = debit.add(entry.debit);
|
||||
credit = credit.add(entry.credit);
|
||||
}
|
||||
|
||||
return { debit, credit };
|
||||
}
|
||||
|
||||
async insertEntries() {
|
||||
for (let entry of this.entries) {
|
||||
let entryDoc = frappe.newDoc({
|
||||
doctype: 'AccountingLedgerEntry',
|
||||
});
|
||||
Object.assign(entryDoc, entry);
|
||||
await entryDoc.insert();
|
||||
}
|
||||
for (let entry of this.accountEntries) {
|
||||
let entryDoc = await frappe.getDoc('Account', entry.name);
|
||||
entryDoc.balance = entryDoc.balance.add(entry.balanceChange);
|
||||
await entryDoc.update();
|
||||
}
|
||||
}
|
||||
|
||||
getRoundOffAccount() {
|
||||
return frappe.AccountingSettings.roundOffAccount;
|
||||
}
|
||||
}
|
@ -1,20 +0,0 @@
|
||||
import { t } from 'frappe';
|
||||
|
||||
export const ledgerLink = {
|
||||
label: t`Ledger Entries`,
|
||||
condition: (doc) => doc.submitted,
|
||||
action: (doc, router) => {
|
||||
router.push({
|
||||
name: 'Report',
|
||||
params: {
|
||||
reportName: 'general-ledger',
|
||||
defaultFilters: {
|
||||
referenceType: doc.doctype,
|
||||
referenceName: doc.name,
|
||||
},
|
||||
},
|
||||
});
|
||||
},
|
||||
};
|
||||
|
||||
export default { ledgerLink };
|
122
backend/database/bespoke.ts
Normal file
122
backend/database/bespoke.ts
Normal file
@ -0,0 +1,122 @@
|
||||
import DatabaseCore from './core';
|
||||
import { BespokeFunction } from './types';
|
||||
|
||||
export class BespokeQueries {
|
||||
[key: string]: BespokeFunction;
|
||||
|
||||
static async getLastInserted(
|
||||
db: DatabaseCore,
|
||||
schemaName: string
|
||||
): Promise<number> {
|
||||
const lastInserted = (await db.knex!.raw(
|
||||
'select cast(name as int) as num from ?? order by num desc limit 1',
|
||||
[schemaName]
|
||||
)) as { num: number }[];
|
||||
|
||||
const num = lastInserted?.[0]?.num;
|
||||
if (num === undefined) {
|
||||
return 0;
|
||||
}
|
||||
return num;
|
||||
}
|
||||
|
||||
static async getTopExpenses(
|
||||
db: DatabaseCore,
|
||||
fromDate: string,
|
||||
toDate: string
|
||||
) {
|
||||
const expenseAccounts = db
|
||||
.knex!.select('name')
|
||||
.from('Account')
|
||||
.where('rootType', 'Expense');
|
||||
|
||||
const topExpenses = await db
|
||||
.knex!.select({
|
||||
total: db.knex!.raw('sum(cast(debit as real) - cast(credit as real))'),
|
||||
})
|
||||
.select('account')
|
||||
.from('AccountingLedgerEntry')
|
||||
.where('reverted', false)
|
||||
.where('account', 'in', expenseAccounts)
|
||||
.whereBetween('date', [fromDate, toDate])
|
||||
.groupBy('account')
|
||||
.orderBy('total', 'desc')
|
||||
.limit(5);
|
||||
return topExpenses;
|
||||
}
|
||||
|
||||
static async getTotalOutstanding(
|
||||
db: DatabaseCore,
|
||||
schemaName: string,
|
||||
fromDate: string,
|
||||
toDate: string
|
||||
) {
|
||||
return await db.knex!(schemaName)
|
||||
.sum({ total: 'baseGrandTotal' })
|
||||
.sum({ outstanding: 'outstandingAmount' })
|
||||
.where('submitted', true)
|
||||
.where('cancelled', false)
|
||||
.whereBetween('date', [fromDate, toDate])
|
||||
.first();
|
||||
}
|
||||
|
||||
static async getCashflow(db: DatabaseCore, fromDate: string, toDate: string) {
|
||||
const cashAndBankAccounts = db.knex!('Account')
|
||||
.select('name')
|
||||
.where('accountType', 'in', ['Cash', 'Bank'])
|
||||
.andWhere('isGroup', false);
|
||||
const dateAsMonthYear = db.knex!.raw(`strftime('%Y-%m', ??)`, 'date');
|
||||
return await db.knex!('AccountingLedgerEntry')
|
||||
.where('reverted', false)
|
||||
.sum({
|
||||
inflow: 'debit',
|
||||
outflow: 'credit',
|
||||
})
|
||||
.select({
|
||||
yearmonth: dateAsMonthYear,
|
||||
})
|
||||
.where('account', 'in', cashAndBankAccounts)
|
||||
.whereBetween('date', [fromDate, toDate])
|
||||
.groupBy(dateAsMonthYear);
|
||||
}
|
||||
|
||||
static async getIncomeAndExpenses(
|
||||
db: DatabaseCore,
|
||||
fromDate: string,
|
||||
toDate: string
|
||||
) {
|
||||
const income = await db.knex!.raw(
|
||||
`
|
||||
select sum(cast(credit as real) - cast(debit as real)) as balance, strftime('%Y-%m', date) as yearmonth
|
||||
from AccountingLedgerEntry
|
||||
where
|
||||
reverted = false and
|
||||
date between date(?) and date(?) and
|
||||
account in (
|
||||
select name
|
||||
from Account
|
||||
where rootType = 'Income'
|
||||
)
|
||||
group by yearmonth`,
|
||||
[fromDate, toDate]
|
||||
);
|
||||
|
||||
const expense = await db.knex!.raw(
|
||||
`
|
||||
select sum(cast(debit as real) - cast(credit as real)) as balance, strftime('%Y-%m', date) as yearmonth
|
||||
from AccountingLedgerEntry
|
||||
where
|
||||
reverted = false and
|
||||
date between date(?) and date(?) and
|
||||
account in (
|
||||
select name
|
||||
from Account
|
||||
where rootType = 'Expense'
|
||||
)
|
||||
group by yearmonth`,
|
||||
[fromDate, toDate]
|
||||
);
|
||||
|
||||
return { income, expense };
|
||||
}
|
||||
}
|
943
backend/database/core.ts
Normal file
943
backend/database/core.ts
Normal file
@ -0,0 +1,943 @@
|
||||
import {
|
||||
CannotCommitError,
|
||||
DatabaseError,
|
||||
DuplicateEntryError,
|
||||
LinkValidationError,
|
||||
NotFoundError,
|
||||
ValueError,
|
||||
} from 'fyo/utils/errors';
|
||||
import { knex, Knex } from 'knex';
|
||||
import {
|
||||
Field,
|
||||
FieldTypeEnum,
|
||||
RawValue,
|
||||
Schema,
|
||||
SchemaMap,
|
||||
TargetField,
|
||||
} from '../../schemas/types';
|
||||
import {
|
||||
getIsNullOrUndef,
|
||||
getRandomString,
|
||||
getValueMapFromList,
|
||||
} from '../../utils';
|
||||
import { DatabaseBase, GetAllOptions, QueryFilter } from '../../utils/db/types';
|
||||
import { getDefaultMetaFieldValueMap, sqliteTypeMap, SYSTEM } from '../helpers';
|
||||
import {
|
||||
ColumnDiff,
|
||||
FieldValueMap,
|
||||
GetQueryBuilderOptions,
|
||||
SingleValue,
|
||||
} from './types';
|
||||
|
||||
/**
|
||||
* # DatabaseCore
|
||||
* This is the ORM, the DatabaseCore interface (function signatures) should be
|
||||
* replicated by the frontend demuxes and all the backend muxes.
|
||||
*
|
||||
* ## Db Core Call Sequence
|
||||
*
|
||||
* 1. Init core: `const db = new DatabaseCore(dbPath)`.
|
||||
* 2. Connect db: `db.connect()`. This will allow for raw queries to be executed.
|
||||
* 3. Set schemas: `db.setSchemaMap(schemaMap)`. This will allow for ORM functions to be executed.
|
||||
* 4. Migrate: `await db.migrate()`. This will create absent tables and update the tables' shape.
|
||||
* 5. ORM function execution: `db.get(...)`, `db.insert(...)`, etc.
|
||||
* 6. Close connection: `await db.close()`.
|
||||
*
|
||||
* Note: Meta values: created, modified, createdBy, modifiedBy are set by DatabaseCore
|
||||
* only for schemas that are SingleValue. Else they have to be passed by the caller in
|
||||
* the `fieldValueMap`.
|
||||
*/
|
||||
|
||||
export default class DatabaseCore extends DatabaseBase {
|
||||
knex?: Knex;
|
||||
typeMap = sqliteTypeMap;
|
||||
dbPath: string;
|
||||
schemaMap: SchemaMap = {};
|
||||
connectionParams: Knex.Config;
|
||||
|
||||
constructor(dbPath?: string) {
|
||||
super();
|
||||
this.dbPath = dbPath ?? ':memory:';
|
||||
this.connectionParams = {
|
||||
client: 'better-sqlite3',
|
||||
connection: {
|
||||
filename: this.dbPath,
|
||||
},
|
||||
useNullAsDefault: true,
|
||||
asyncStackTraces: process.env.NODE_ENV === 'development',
|
||||
};
|
||||
}
|
||||
|
||||
static async getCountryCode(dbPath: string): Promise<string> {
|
||||
let countryCode = 'in';
|
||||
const db = new DatabaseCore(dbPath);
|
||||
await db.connect();
|
||||
|
||||
let query: { value: string }[] = [];
|
||||
try {
|
||||
query = await db.knex!('SingleValue').where({
|
||||
fieldname: 'countryCode',
|
||||
parent: 'SystemSettings',
|
||||
});
|
||||
} catch {
|
||||
// Database not inialized and no countryCode passed
|
||||
}
|
||||
|
||||
if (query.length > 0) {
|
||||
countryCode = query[0].value as string;
|
||||
}
|
||||
|
||||
await db.close();
|
||||
return countryCode;
|
||||
}
|
||||
|
||||
setSchemaMap(schemaMap: SchemaMap) {
|
||||
this.schemaMap = schemaMap;
|
||||
}
|
||||
|
||||
async connect() {
|
||||
this.knex = knex(this.connectionParams);
|
||||
this.knex.on('query-error', (error) => {
|
||||
error.type = this.#getError(error);
|
||||
});
|
||||
await this.knex.raw('PRAGMA foreign_keys=ON');
|
||||
}
|
||||
|
||||
async close() {
|
||||
await this.knex!.destroy();
|
||||
}
|
||||
|
||||
async commit() {
|
||||
/**
|
||||
* this auto commits, commit is not required
|
||||
* will later wrap the outermost functions in
|
||||
* transactions.
|
||||
*/
|
||||
try {
|
||||
// await this.knex!.raw('commit');
|
||||
} catch (err) {
|
||||
const type = this.#getError(err as Error);
|
||||
if (type !== CannotCommitError) {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async migrate() {
|
||||
for (const schemaName in this.schemaMap) {
|
||||
const schema = this.schemaMap[schemaName] as Schema;
|
||||
if (schema.isSingle) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (await this.#tableExists(schemaName)) {
|
||||
await this.#alterTable(schemaName);
|
||||
} else {
|
||||
await this.#createTable(schemaName);
|
||||
}
|
||||
}
|
||||
|
||||
await this.commit();
|
||||
await this.#initializeSingles();
|
||||
}
|
||||
|
||||
async exists(schemaName: string, name?: string): Promise<boolean> {
|
||||
const schema = this.schemaMap[schemaName] as Schema;
|
||||
if (schema.isSingle) {
|
||||
return this.#singleExists(schemaName);
|
||||
}
|
||||
|
||||
let row = [];
|
||||
try {
|
||||
const qb = this.knex!(schemaName);
|
||||
if (name !== undefined) {
|
||||
qb.where({ name });
|
||||
}
|
||||
row = await qb.limit(1);
|
||||
} catch (err) {
|
||||
if (this.#getError(err as Error) !== NotFoundError) {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
return row.length > 0;
|
||||
}
|
||||
|
||||
async insert(
|
||||
schemaName: string,
|
||||
fieldValueMap: FieldValueMap
|
||||
): Promise<FieldValueMap> {
|
||||
// insert parent
|
||||
if (this.schemaMap[schemaName]!.isSingle) {
|
||||
await this.#updateSingleValues(schemaName, fieldValueMap);
|
||||
} else {
|
||||
await this.#insertOne(schemaName, fieldValueMap);
|
||||
}
|
||||
|
||||
// insert children
|
||||
await this.#insertOrUpdateChildren(schemaName, fieldValueMap, false);
|
||||
return fieldValueMap;
|
||||
}
|
||||
|
||||
async get(
|
||||
schemaName: string,
|
||||
name: string = '',
|
||||
fields?: string | string[]
|
||||
): Promise<FieldValueMap> {
|
||||
const schema = this.schemaMap[schemaName] as Schema;
|
||||
if (!schema.isSingle && !name) {
|
||||
throw new ValueError('name is mandatory');
|
||||
}
|
||||
|
||||
/**
|
||||
* If schema is single return all the values
|
||||
* of the single type schema, in this case field
|
||||
* is ignored.
|
||||
*/
|
||||
let fieldValueMap: FieldValueMap = {};
|
||||
if (schema.isSingle) {
|
||||
return await this.#getSingle(schemaName);
|
||||
}
|
||||
|
||||
if (typeof fields === 'string') {
|
||||
fields = [fields];
|
||||
}
|
||||
|
||||
if (fields === undefined) {
|
||||
fields = schema.fields.map((f) => f.fieldname);
|
||||
}
|
||||
|
||||
/**
|
||||
* Separate table fields and non table fields
|
||||
*/
|
||||
const allTableFields: TargetField[] = this.#getTableFields(schemaName);
|
||||
const allTableFieldNames: string[] = allTableFields.map((f) => f.fieldname);
|
||||
const tableFields: TargetField[] = allTableFields.filter((f) =>
|
||||
fields!.includes(f.fieldname)
|
||||
);
|
||||
const nonTableFieldNames: string[] = fields.filter(
|
||||
(f) => !allTableFieldNames.includes(f)
|
||||
);
|
||||
|
||||
/**
|
||||
* If schema is not single then return specific fields
|
||||
* if child fields are selected, all child fields are returned.
|
||||
*/
|
||||
if (nonTableFieldNames.length) {
|
||||
fieldValueMap =
|
||||
(await this.#getOne(schemaName, name, nonTableFieldNames)) ?? {};
|
||||
}
|
||||
|
||||
if (tableFields.length) {
|
||||
await this.#loadChildren(name, fieldValueMap, tableFields);
|
||||
}
|
||||
return fieldValueMap;
|
||||
}
|
||||
|
||||
async getAll(
|
||||
schemaName: string,
|
||||
options: GetAllOptions = {}
|
||||
): Promise<FieldValueMap[]> {
|
||||
const schema = this.schemaMap[schemaName] as Schema;
|
||||
if (schema === undefined) {
|
||||
throw new NotFoundError(`schema ${schemaName} not found`);
|
||||
}
|
||||
|
||||
const hasCreated = !!schema.fields.find((f) => f.fieldname === 'created');
|
||||
|
||||
const {
|
||||
fields = ['name'],
|
||||
filters,
|
||||
offset,
|
||||
limit,
|
||||
groupBy,
|
||||
orderBy = hasCreated ? 'created' : undefined,
|
||||
order = 'desc',
|
||||
} = options;
|
||||
|
||||
return (await this.#getQueryBuilder(
|
||||
schemaName,
|
||||
typeof fields === 'string' ? [fields] : fields,
|
||||
filters ?? {},
|
||||
{
|
||||
offset,
|
||||
limit,
|
||||
groupBy,
|
||||
orderBy,
|
||||
order,
|
||||
}
|
||||
)) as FieldValueMap[];
|
||||
}
|
||||
|
||||
async getSingleValues(
|
||||
...fieldnames: ({ fieldname: string; parent?: string } | string)[]
|
||||
): Promise<SingleValue<RawValue>> {
|
||||
const fieldnameList = fieldnames.map((fieldname) => {
|
||||
if (typeof fieldname === 'string') {
|
||||
return { fieldname };
|
||||
}
|
||||
return fieldname;
|
||||
});
|
||||
|
||||
let builder = this.knex!('SingleValue');
|
||||
builder = builder.where(fieldnameList[0]);
|
||||
|
||||
fieldnameList.slice(1).forEach(({ fieldname, parent }) => {
|
||||
if (typeof parent === 'undefined') {
|
||||
builder = builder.orWhere({ fieldname });
|
||||
} else {
|
||||
builder = builder.orWhere({ fieldname, parent });
|
||||
}
|
||||
});
|
||||
|
||||
let values: { fieldname: string; parent: string; value: RawValue }[] = [];
|
||||
try {
|
||||
values = await builder.select('fieldname', 'value', 'parent');
|
||||
} catch (err) {
|
||||
if (this.#getError(err as Error) === NotFoundError) {
|
||||
return [];
|
||||
}
|
||||
|
||||
throw err;
|
||||
}
|
||||
|
||||
return values;
|
||||
}
|
||||
|
||||
async rename(schemaName: string, oldName: string, newName: string) {
|
||||
/**
|
||||
* Rename is expensive mostly won't allow it.
|
||||
* TODO: rename all links
|
||||
* TODO: rename in childtables
|
||||
*/
|
||||
await this.knex!(schemaName)
|
||||
.update({ name: newName })
|
||||
.where('name', oldName);
|
||||
await this.commit();
|
||||
}
|
||||
|
||||
async update(schemaName: string, fieldValueMap: FieldValueMap) {
|
||||
// update parent
|
||||
if (this.schemaMap[schemaName]!.isSingle) {
|
||||
await this.#updateSingleValues(schemaName, fieldValueMap);
|
||||
} else {
|
||||
await this.#updateOne(schemaName, fieldValueMap);
|
||||
}
|
||||
|
||||
// insert or update children
|
||||
await this.#insertOrUpdateChildren(schemaName, fieldValueMap, true);
|
||||
}
|
||||
|
||||
async delete(schemaName: string, name: string) {
|
||||
const schema = this.schemaMap[schemaName] as Schema;
|
||||
if (schema.isSingle) {
|
||||
await this.#deleteSingle(schemaName, name);
|
||||
return;
|
||||
}
|
||||
|
||||
await this.#deleteOne(schemaName, name);
|
||||
|
||||
// delete children
|
||||
const tableFields = this.#getTableFields(schemaName);
|
||||
|
||||
for (const field of tableFields) {
|
||||
await this.#deleteChildren(field.target, name);
|
||||
}
|
||||
}
|
||||
|
||||
async #tableExists(schemaName: string) {
|
||||
return await this.knex!.schema.hasTable(schemaName);
|
||||
}
|
||||
|
||||
async #singleExists(singleSchemaName: string) {
|
||||
const res = await this.knex!('SingleValue')
|
||||
.count('parent as count')
|
||||
.where('parent', singleSchemaName)
|
||||
.first();
|
||||
return (res?.count ?? 0) > 0;
|
||||
}
|
||||
|
||||
async #removeColumns(schemaName: string, targetColumns: string[]) {
|
||||
const fields = this.schemaMap[schemaName]?.fields
|
||||
.filter((f) => f.fieldtype !== FieldTypeEnum.Table)
|
||||
.map((f) => f.fieldname);
|
||||
const tableRows = await this.getAll(schemaName, { fields });
|
||||
this.prestigeTheTable(schemaName, tableRows);
|
||||
}
|
||||
|
||||
#getError(err: Error) {
|
||||
let errorType = DatabaseError;
|
||||
if (err.message.includes('SQLITE_ERROR: no such table')) {
|
||||
errorType = NotFoundError;
|
||||
}
|
||||
if (err.message.includes('FOREIGN KEY')) {
|
||||
errorType = LinkValidationError;
|
||||
}
|
||||
if (err.message.includes('SQLITE_ERROR: cannot commit')) {
|
||||
errorType = CannotCommitError;
|
||||
}
|
||||
if (err.message.includes('SQLITE_CONSTRAINT: UNIQUE constraint failed:')) {
|
||||
errorType = DuplicateEntryError;
|
||||
}
|
||||
return errorType;
|
||||
}
|
||||
|
||||
async prestigeTheTable(schemaName: string, tableRows: FieldValueMap[]) {
|
||||
const max = 200;
|
||||
|
||||
// Alter table hacx for sqlite in case of schema change.
|
||||
const tempName = `__${schemaName}`;
|
||||
await this.knex!.schema.dropTableIfExists(tempName);
|
||||
|
||||
await this.knex!.raw('PRAGMA foreign_keys=OFF');
|
||||
await this.#createTable(schemaName, tempName);
|
||||
|
||||
if (tableRows.length > 200) {
|
||||
const fi = Math.floor(tableRows.length / max);
|
||||
for (let i = 0; i <= fi; i++) {
|
||||
const rowSlice = tableRows.slice(i * max, i + 1 * max);
|
||||
if (rowSlice.length === 0) {
|
||||
break;
|
||||
}
|
||||
await this.knex!.batchInsert(tempName, rowSlice);
|
||||
}
|
||||
} else {
|
||||
await this.knex!.batchInsert(tempName, tableRows);
|
||||
}
|
||||
|
||||
await this.knex!.schema.dropTable(schemaName);
|
||||
await this.knex!.schema.renameTable(tempName, schemaName);
|
||||
await this.knex!.raw('PRAGMA foreign_keys=ON');
|
||||
}
|
||||
|
||||
async #getTableColumns(schemaName: string): Promise<string[]> {
|
||||
const info: FieldValueMap[] = await this.knex!.raw(
|
||||
`PRAGMA table_info(${schemaName})`
|
||||
);
|
||||
return info.map((d) => d.name as string);
|
||||
}
|
||||
|
||||
async truncate(tableNames?: string[]) {
|
||||
if (tableNames === undefined) {
|
||||
const q = (await this.knex!.raw(`
|
||||
select name from sqlite_schema
|
||||
where type='table'
|
||||
and name not like 'sqlite_%'`)) as { name: string }[];
|
||||
tableNames = q.map((i) => i.name);
|
||||
}
|
||||
|
||||
for (const name of tableNames) {
|
||||
await this.knex!(name).del();
|
||||
}
|
||||
}
|
||||
|
||||
async #getForeignKeys(schemaName: string): Promise<string[]> {
|
||||
const foreignKeyList: FieldValueMap[] = await this.knex!.raw(
|
||||
`PRAGMA foreign_key_list(${schemaName})`
|
||||
);
|
||||
return foreignKeyList.map((d) => d.from as string);
|
||||
}
|
||||
|
||||
#getQueryBuilder(
|
||||
schemaName: string,
|
||||
fields: string[],
|
||||
filters: QueryFilter,
|
||||
options: GetQueryBuilderOptions
|
||||
): Knex.QueryBuilder {
|
||||
const builder = this.knex!.select(fields).from(schemaName);
|
||||
|
||||
this.#applyFiltersToBuilder(builder, filters);
|
||||
|
||||
if (options.orderBy) {
|
||||
builder.orderBy(options.orderBy, options.order);
|
||||
}
|
||||
|
||||
if (options.groupBy) {
|
||||
builder.groupBy(options.groupBy);
|
||||
}
|
||||
|
||||
if (options.offset) {
|
||||
builder.offset(options.offset);
|
||||
}
|
||||
|
||||
if (options.limit) {
|
||||
builder.limit(options.limit);
|
||||
}
|
||||
|
||||
return builder;
|
||||
}
|
||||
|
||||
#applyFiltersToBuilder(builder: Knex.QueryBuilder, filters: QueryFilter) {
|
||||
// {"status": "Open"} => `status = "Open"`
|
||||
|
||||
// {"status": "Open", "name": ["like", "apple%"]}
|
||||
// => `status="Open" and name like "apple%"
|
||||
|
||||
// {"date": [">=", "2017-09-09", "<=", "2017-11-01"]}
|
||||
// => `date >= 2017-09-09 and date <= 2017-11-01`
|
||||
|
||||
const filtersArray = [];
|
||||
|
||||
for (const field in filters) {
|
||||
const value = filters[field];
|
||||
let operator: string | number = '=';
|
||||
let comparisonValue = value as string | number | (string | number)[];
|
||||
|
||||
if (Array.isArray(value)) {
|
||||
operator = (value[0] as string).toLowerCase();
|
||||
comparisonValue = value[1] as string | number | (string | number)[];
|
||||
|
||||
if (operator === 'includes') {
|
||||
operator = 'like';
|
||||
}
|
||||
|
||||
if (
|
||||
operator === 'like' &&
|
||||
!(comparisonValue as (string | number)[]).includes('%')
|
||||
) {
|
||||
comparisonValue = `%${comparisonValue}%`;
|
||||
}
|
||||
}
|
||||
|
||||
filtersArray.push([field, operator, comparisonValue]);
|
||||
|
||||
if (Array.isArray(value) && value.length > 2) {
|
||||
// multiple conditions
|
||||
const operator = value[2];
|
||||
const comparisonValue = value[3];
|
||||
filtersArray.push([field, operator, comparisonValue]);
|
||||
}
|
||||
}
|
||||
|
||||
filtersArray.map((filter) => {
|
||||
const field = filter[0] as string;
|
||||
const operator = filter[1];
|
||||
const comparisonValue = filter[2];
|
||||
|
||||
if (operator === '=') {
|
||||
builder.where(field, comparisonValue);
|
||||
} else {
|
||||
builder.where(field, operator as string, comparisonValue as string);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
async #getColumnDiff(schemaName: string): Promise<ColumnDiff> {
|
||||
const tableColumns = await this.#getTableColumns(schemaName);
|
||||
const validFields = this.schemaMap[schemaName]!.fields;
|
||||
const diff: ColumnDiff = { added: [], removed: [] };
|
||||
|
||||
for (const field of validFields) {
|
||||
const hasDbType = this.typeMap.hasOwnProperty(field.fieldtype);
|
||||
if (!tableColumns.includes(field.fieldname) && hasDbType) {
|
||||
diff.added.push(field);
|
||||
}
|
||||
}
|
||||
|
||||
const validFieldNames = validFields.map((field) => field.fieldname);
|
||||
for (const column of tableColumns) {
|
||||
if (!validFieldNames.includes(column)) {
|
||||
diff.removed.push(column);
|
||||
}
|
||||
}
|
||||
|
||||
return diff;
|
||||
}
|
||||
|
||||
async #getNewForeignKeys(schemaName: string): Promise<Field[]> {
|
||||
const foreignKeys = await this.#getForeignKeys(schemaName);
|
||||
const newForeignKeys: Field[] = [];
|
||||
const schema = this.schemaMap[schemaName] as Schema;
|
||||
for (const field of schema.fields) {
|
||||
if (
|
||||
field.fieldtype === 'Link' &&
|
||||
!foreignKeys.includes(field.fieldname)
|
||||
) {
|
||||
newForeignKeys.push(field);
|
||||
}
|
||||
}
|
||||
return newForeignKeys;
|
||||
}
|
||||
|
||||
#buildColumnForTable(table: Knex.AlterTableBuilder, field: Field) {
|
||||
if (field.fieldtype === FieldTypeEnum.Table) {
|
||||
// In case columnType is "Table"
|
||||
// childTable links are handled using the childTable's "parent" field
|
||||
return;
|
||||
}
|
||||
|
||||
const columnType = this.typeMap[field.fieldtype];
|
||||
if (!columnType) {
|
||||
return;
|
||||
}
|
||||
|
||||
const column = table[columnType](
|
||||
field.fieldname
|
||||
) as Knex.SqlLiteColumnBuilder;
|
||||
|
||||
// primary key
|
||||
if (field.fieldname === 'name') {
|
||||
column.primary();
|
||||
}
|
||||
|
||||
// iefault value
|
||||
if (field.default !== undefined) {
|
||||
column.defaultTo(field.default);
|
||||
}
|
||||
|
||||
// required
|
||||
if (field.required) {
|
||||
column.notNullable();
|
||||
}
|
||||
|
||||
// link
|
||||
if (
|
||||
field.fieldtype === FieldTypeEnum.Link &&
|
||||
(field as TargetField).target
|
||||
) {
|
||||
const targetSchemaName = (field as TargetField).target as string;
|
||||
const schema = this.schemaMap[targetSchemaName] as Schema;
|
||||
table
|
||||
.foreign(field.fieldname)
|
||||
.references('name')
|
||||
.inTable(schema.name)
|
||||
.onUpdate('CASCADE')
|
||||
.onDelete('RESTRICT');
|
||||
}
|
||||
}
|
||||
|
||||
async #alterTable(schemaName: string) {
|
||||
// get columns
|
||||
const diff: ColumnDiff = await this.#getColumnDiff(schemaName);
|
||||
const newForeignKeys: Field[] = await this.#getNewForeignKeys(schemaName);
|
||||
|
||||
return this.knex!.schema.table(schemaName, (table) => {
|
||||
if (diff.added.length) {
|
||||
for (const field of diff.added) {
|
||||
this.#buildColumnForTable(table, field);
|
||||
}
|
||||
}
|
||||
|
||||
if (diff.removed.length) {
|
||||
this.#removeColumns(schemaName, diff.removed);
|
||||
}
|
||||
}).then(() => {
|
||||
if (newForeignKeys.length) {
|
||||
return this.#addForeignKeys(schemaName, newForeignKeys);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
async #createTable(schemaName: string, tableName?: string) {
|
||||
tableName ??= schemaName;
|
||||
const fields = this.schemaMap[schemaName]!.fields;
|
||||
return await this.#runCreateTableQuery(tableName, fields);
|
||||
}
|
||||
|
||||
#runCreateTableQuery(schemaName: string, fields: Field[]) {
|
||||
return this.knex!.schema.createTable(schemaName, (table) => {
|
||||
for (const field of fields) {
|
||||
this.#buildColumnForTable(table, field);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
async #getNonExtantSingleValues(singleSchemaName: string) {
|
||||
const existingFields = (
|
||||
await this.knex!('SingleValue')
|
||||
.where({ parent: singleSchemaName })
|
||||
.select('fieldname')
|
||||
).map(({ fieldname }) => fieldname);
|
||||
|
||||
return this.schemaMap[singleSchemaName]!.fields.map(
|
||||
({ fieldname, default: value }) => ({
|
||||
fieldname,
|
||||
value: value as RawValue | undefined,
|
||||
})
|
||||
).filter(
|
||||
({ fieldname, value }) =>
|
||||
!existingFields.includes(fieldname) && value !== undefined
|
||||
);
|
||||
}
|
||||
|
||||
async #deleteOne(schemaName: string, name: string) {
|
||||
return await this.knex!(schemaName).where('name', name).delete();
|
||||
}
|
||||
|
||||
async #deleteSingle(schemaName: string, fieldname: string) {
|
||||
return await this.knex!('SingleValue')
|
||||
.where({ parent: schemaName, fieldname })
|
||||
.delete();
|
||||
}
|
||||
|
||||
#deleteChildren(schemaName: string, parentName: string) {
|
||||
return this.knex!(schemaName).where('parent', parentName).delete();
|
||||
}
|
||||
|
||||
#runDeleteOtherChildren(
|
||||
field: TargetField,
|
||||
parentName: string,
|
||||
added: string[]
|
||||
) {
|
||||
// delete other children
|
||||
return this.knex!(field.target)
|
||||
.where('parent', parentName)
|
||||
.andWhere('name', 'not in', added)
|
||||
.delete();
|
||||
}
|
||||
|
||||
#prepareChild(
|
||||
parentSchemaName: string,
|
||||
parentName: string,
|
||||
child: FieldValueMap,
|
||||
field: Field,
|
||||
idx: number
|
||||
) {
|
||||
if (!child.name) {
|
||||
child.name ??= getRandomString();
|
||||
}
|
||||
child.parent = parentName;
|
||||
child.parentSchemaName = parentSchemaName;
|
||||
child.parentFieldname = field.fieldname;
|
||||
child.idx ??= idx;
|
||||
}
|
||||
|
||||
async #addForeignKeys(schemaName: string, newForeignKeys: Field[]) {
|
||||
await this.knex!.raw('PRAGMA foreign_keys=OFF');
|
||||
await this.knex!.raw('BEGIN TRANSACTION');
|
||||
|
||||
const tempName = 'TEMP' + schemaName;
|
||||
|
||||
// create temp table
|
||||
await this.#createTable(schemaName, tempName);
|
||||
|
||||
try {
|
||||
// copy from old to new table
|
||||
await this.knex!(tempName).insert(this.knex!.select().from(schemaName));
|
||||
} catch (err) {
|
||||
await this.knex!.raw('ROLLBACK');
|
||||
await this.knex!.raw('PRAGMA foreign_keys=ON');
|
||||
|
||||
const rows = await this.knex!.select().from(schemaName);
|
||||
await this.prestigeTheTable(schemaName, rows);
|
||||
return;
|
||||
}
|
||||
|
||||
// drop old table
|
||||
await this.knex!.schema.dropTable(schemaName);
|
||||
|
||||
// rename new table
|
||||
await this.knex!.schema.renameTable(tempName, schemaName);
|
||||
|
||||
await this.knex!.raw('COMMIT');
|
||||
await this.knex!.raw('PRAGMA foreign_keys=ON');
|
||||
}
|
||||
|
||||
async #loadChildren(
|
||||
parentName: string,
|
||||
fieldValueMap: FieldValueMap,
|
||||
tableFields: TargetField[]
|
||||
) {
|
||||
for (const field of tableFields) {
|
||||
fieldValueMap[field.fieldname] = await this.getAll(field.target, {
|
||||
fields: ['*'],
|
||||
filters: { parent: parentName },
|
||||
orderBy: 'idx',
|
||||
order: 'asc',
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
async #getOne(schemaName: string, name: string, fields: string[]) {
|
||||
const fieldValueMap: FieldValueMap = await this.knex!.select(fields)
|
||||
.from(schemaName)
|
||||
.where('name', name)
|
||||
.first();
|
||||
return fieldValueMap;
|
||||
}
|
||||
|
||||
async #getSingle(schemaName: string): Promise<FieldValueMap> {
|
||||
const values = await this.getAll('SingleValue', {
|
||||
fields: ['fieldname', 'value'],
|
||||
filters: { parent: schemaName },
|
||||
orderBy: 'fieldname',
|
||||
order: 'asc',
|
||||
});
|
||||
|
||||
return getValueMapFromList(values, 'fieldname', 'value') as FieldValueMap;
|
||||
}
|
||||
|
||||
#insertOne(schemaName: string, fieldValueMap: FieldValueMap) {
|
||||
if (!fieldValueMap.name) {
|
||||
fieldValueMap.name = getRandomString();
|
||||
}
|
||||
|
||||
// Non Table Fields
|
||||
const fields = this.schemaMap[schemaName]!.fields.filter(
|
||||
(f) => f.fieldtype !== FieldTypeEnum.Table
|
||||
);
|
||||
|
||||
const validMap: FieldValueMap = {};
|
||||
for (const { fieldname } of fields) {
|
||||
validMap[fieldname] = fieldValueMap[fieldname];
|
||||
}
|
||||
|
||||
return this.knex!(schemaName).insert(validMap);
|
||||
}
|
||||
|
||||
async #updateSingleValues(
|
||||
singleSchemaName: string,
|
||||
fieldValueMap: FieldValueMap
|
||||
) {
|
||||
const fields = this.schemaMap[singleSchemaName]!.fields;
|
||||
|
||||
for (const field of fields) {
|
||||
const value = fieldValueMap[field.fieldname] as RawValue | undefined;
|
||||
if (value === undefined) {
|
||||
continue;
|
||||
}
|
||||
|
||||
await this.#updateSingleValue(singleSchemaName, field.fieldname, value);
|
||||
}
|
||||
}
|
||||
|
||||
async #updateSingleValue(
|
||||
singleSchemaName: string,
|
||||
fieldname: string,
|
||||
value: RawValue
|
||||
) {
|
||||
const names: { name: string }[] = await this.knex!('SingleValue')
|
||||
.select('name')
|
||||
.where({
|
||||
parent: singleSchemaName,
|
||||
fieldname,
|
||||
});
|
||||
const name = names?.[0]?.name as string | undefined;
|
||||
|
||||
if (name === undefined) {
|
||||
this.#insertSingleValue(singleSchemaName, fieldname, value);
|
||||
} else {
|
||||
return await this.knex!('SingleValue').where({ name }).update({
|
||||
value,
|
||||
modifiedBy: SYSTEM,
|
||||
modified: new Date().toISOString(),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
async #insertSingleValue(
|
||||
singleSchemaName: string,
|
||||
fieldname: string,
|
||||
value: RawValue
|
||||
) {
|
||||
const updateMap = getDefaultMetaFieldValueMap();
|
||||
const fieldValueMap: FieldValueMap = Object.assign({}, updateMap, {
|
||||
parent: singleSchemaName,
|
||||
fieldname,
|
||||
value,
|
||||
name: getRandomString(),
|
||||
});
|
||||
return await this.knex!('SingleValue').insert(fieldValueMap);
|
||||
}
|
||||
|
||||
async #initializeSingles() {
|
||||
const singleSchemaNames = Object.keys(this.schemaMap).filter(
|
||||
(n) => this.schemaMap[n]!.isSingle
|
||||
);
|
||||
|
||||
for (const schemaName of singleSchemaNames) {
|
||||
if (await this.#singleExists(schemaName)) {
|
||||
await this.#updateNonExtantSingleValues(schemaName);
|
||||
continue;
|
||||
}
|
||||
|
||||
const fields = this.schemaMap[schemaName]!.fields;
|
||||
if (fields.every((f) => f.default === undefined)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const defaultValues: FieldValueMap = fields.reduce((acc, f) => {
|
||||
if (f.default !== undefined) {
|
||||
acc[f.fieldname] = f.default;
|
||||
}
|
||||
|
||||
return acc;
|
||||
}, {} as FieldValueMap);
|
||||
|
||||
await this.#updateSingleValues(schemaName, defaultValues);
|
||||
}
|
||||
}
|
||||
|
||||
async #updateNonExtantSingleValues(schemaName: string) {
|
||||
const singleValues = await this.#getNonExtantSingleValues(schemaName);
|
||||
for (const sv of singleValues) {
|
||||
await this.#updateSingleValue(schemaName, sv.fieldname, sv.value!);
|
||||
}
|
||||
}
|
||||
|
||||
async #updateOne(schemaName: string, fieldValueMap: FieldValueMap) {
|
||||
const updateMap = { ...fieldValueMap };
|
||||
delete updateMap.name;
|
||||
const schema = this.schemaMap[schemaName] as Schema;
|
||||
for (const { fieldname, fieldtype } of schema.fields) {
|
||||
if (fieldtype !== FieldTypeEnum.Table) {
|
||||
continue;
|
||||
}
|
||||
|
||||
delete updateMap[fieldname];
|
||||
}
|
||||
|
||||
if (Object.keys(updateMap).length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
return await this.knex!(schemaName)
|
||||
.where('name', fieldValueMap.name as string)
|
||||
.update(updateMap);
|
||||
}
|
||||
|
||||
async #insertOrUpdateChildren(
|
||||
schemaName: string,
|
||||
fieldValueMap: FieldValueMap,
|
||||
isUpdate: boolean
|
||||
) {
|
||||
const parentName = fieldValueMap.name as string;
|
||||
const tableFields = this.#getTableFields(schemaName);
|
||||
|
||||
for (const field of tableFields) {
|
||||
const added: string[] = [];
|
||||
|
||||
const tableFieldValue = fieldValueMap[field.fieldname] as
|
||||
| FieldValueMap[]
|
||||
| undefined
|
||||
| null;
|
||||
if (getIsNullOrUndef(tableFieldValue)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
for (const child of tableFieldValue!) {
|
||||
this.#prepareChild(schemaName, parentName, child, field, added.length);
|
||||
|
||||
if (
|
||||
isUpdate &&
|
||||
(await this.exists(field.target, child.name as string))
|
||||
) {
|
||||
await this.#updateOne(field.target, child);
|
||||
} else {
|
||||
await this.#insertOne(field.target, child);
|
||||
}
|
||||
|
||||
added.push(child.name as string);
|
||||
}
|
||||
|
||||
if (isUpdate) {
|
||||
await this.#runDeleteOtherChildren(field, parentName, added);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#getTableFields(schemaName: string): TargetField[] {
|
||||
return this.schemaMap[schemaName]!.fields.filter(
|
||||
(f) => f.fieldtype === FieldTypeEnum.Table
|
||||
) as TargetField[];
|
||||
}
|
||||
}
|
164
backend/database/manager.ts
Normal file
164
backend/database/manager.ts
Normal file
@ -0,0 +1,164 @@
|
||||
import { constants } from 'fs';
|
||||
import fs from 'fs/promises';
|
||||
import path from 'path';
|
||||
import { DatabaseDemuxBase, DatabaseMethod } from 'utils/db/types';
|
||||
import { getSchemas } from '../../schemas';
|
||||
import { databaseMethodSet } from '../helpers';
|
||||
import patches from '../patches';
|
||||
import { BespokeQueries } from './bespoke';
|
||||
import DatabaseCore from './core';
|
||||
import { runPatches } from './runPatch';
|
||||
import { BespokeFunction, Patch } from './types';
|
||||
|
||||
export class DatabaseManager extends DatabaseDemuxBase {
|
||||
db?: DatabaseCore;
|
||||
|
||||
get #isInitialized(): boolean {
|
||||
return this.db !== undefined && this.db.knex !== undefined;
|
||||
}
|
||||
|
||||
getSchemaMap() {
|
||||
return this.db?.schemaMap ?? {};
|
||||
}
|
||||
|
||||
async createNewDatabase(dbPath: string, countryCode: string) {
|
||||
await this.#unlinkIfExists(dbPath);
|
||||
return await this.connectToDatabase(dbPath, countryCode);
|
||||
}
|
||||
|
||||
async connectToDatabase(dbPath: string, countryCode?: string) {
|
||||
countryCode = await this._connect(dbPath, countryCode);
|
||||
await this.#migrate();
|
||||
return countryCode;
|
||||
}
|
||||
|
||||
async _connect(dbPath: string, countryCode?: string) {
|
||||
countryCode ??= await DatabaseCore.getCountryCode(dbPath);
|
||||
this.db = new DatabaseCore(dbPath);
|
||||
await this.db.connect();
|
||||
const schemaMap = getSchemas(countryCode);
|
||||
this.db.setSchemaMap(schemaMap);
|
||||
return countryCode;
|
||||
}
|
||||
|
||||
async #migrate(): Promise<void> {
|
||||
if (!this.#isInitialized) {
|
||||
return;
|
||||
}
|
||||
|
||||
const isFirstRun = await this.#getIsFirstRun();
|
||||
if (isFirstRun) {
|
||||
await this.db!.migrate();
|
||||
}
|
||||
|
||||
/**
|
||||
* This needs to be supplimented with transactions
|
||||
* TODO: Add transactions in core.ts
|
||||
*/
|
||||
const dbPath = this.db!.dbPath;
|
||||
const copyPath = await this.#makeTempCopy();
|
||||
|
||||
try {
|
||||
await this.#runPatchesAndMigrate();
|
||||
} catch (err) {
|
||||
console.error(err);
|
||||
await this.db!.close();
|
||||
await fs.copyFile(copyPath, dbPath);
|
||||
throw err;
|
||||
} finally {
|
||||
await fs.unlink(copyPath);
|
||||
}
|
||||
}
|
||||
|
||||
async #runPatchesAndMigrate() {
|
||||
const patchesToExecute = await this.#getPatchesToExecute();
|
||||
|
||||
patchesToExecute.sort((a, b) => (b.priority ?? 0) - (a.priority ?? 0));
|
||||
const preMigrationPatches = patchesToExecute.filter(
|
||||
(p) => p.patch.beforeMigrate
|
||||
);
|
||||
const postMigrationPatches = patchesToExecute.filter(
|
||||
(p) => !p.patch.beforeMigrate
|
||||
);
|
||||
|
||||
await runPatches(preMigrationPatches, this);
|
||||
await this.db!.migrate();
|
||||
await runPatches(postMigrationPatches, this);
|
||||
}
|
||||
|
||||
async #getPatchesToExecute(): Promise<Patch[]> {
|
||||
if (this.db === undefined) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const query: { name: string }[] = await this.db.knex!('PatchRun').select(
|
||||
'name'
|
||||
);
|
||||
const executedPatches = query.map((q) => q.name);
|
||||
return patches.filter((p) => !executedPatches.includes(p.name));
|
||||
}
|
||||
|
||||
async call(method: DatabaseMethod, ...args: unknown[]) {
|
||||
if (!this.#isInitialized) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!databaseMethodSet.has(method)) {
|
||||
return;
|
||||
}
|
||||
|
||||
// @ts-ignore
|
||||
const response = await this.db[method](...args);
|
||||
if (method === 'close') {
|
||||
delete this.db;
|
||||
}
|
||||
|
||||
return response;
|
||||
}
|
||||
|
||||
async callBespoke(method: string, ...args: unknown[]): Promise<unknown> {
|
||||
if (!this.#isInitialized) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!BespokeQueries.hasOwnProperty(method)) {
|
||||
return;
|
||||
}
|
||||
|
||||
// @ts-ignore
|
||||
const queryFunction: BespokeFunction = BespokeQueries[method];
|
||||
return await queryFunction(this.db!, ...args);
|
||||
}
|
||||
|
||||
async #unlinkIfExists(dbPath: string) {
|
||||
const exists = await fs
|
||||
.access(dbPath, constants.W_OK)
|
||||
.then(() => true)
|
||||
.catch(() => false);
|
||||
|
||||
if (exists) {
|
||||
fs.unlink(dbPath);
|
||||
}
|
||||
}
|
||||
|
||||
async #getIsFirstRun(): Promise<boolean> {
|
||||
if (!this.#isInitialized) {
|
||||
return true;
|
||||
}
|
||||
|
||||
const tableList: unknown[] = await this.db!.knex!.raw(
|
||||
"SELECT name FROM sqlite_master WHERE type='table'"
|
||||
);
|
||||
return tableList.length === 0;
|
||||
}
|
||||
|
||||
async #makeTempCopy() {
|
||||
const src = this.db!.dbPath;
|
||||
const dir = path.parse(src).dir;
|
||||
const dest = path.join(dir, '__premigratory_temp.db');
|
||||
await fs.copyFile(src, dest);
|
||||
return dest;
|
||||
}
|
||||
}
|
||||
|
||||
export default new DatabaseManager();
|
31
backend/database/runPatch.ts
Normal file
31
backend/database/runPatch.ts
Normal file
@ -0,0 +1,31 @@
|
||||
import { getDefaultMetaFieldValueMap } from '../helpers';
|
||||
import { DatabaseManager } from './manager';
|
||||
import { FieldValueMap, Patch } from './types';
|
||||
|
||||
export async function runPatches(patches: Patch[], dm: DatabaseManager) {
|
||||
const list: { name: string; success: boolean }[] = [];
|
||||
for (const patch of patches) {
|
||||
const success = await runPatch(patch, dm);
|
||||
list.push({ name: patch.name, success });
|
||||
}
|
||||
return list;
|
||||
}
|
||||
|
||||
async function runPatch(patch: Patch, dm: DatabaseManager): Promise<boolean> {
|
||||
try {
|
||||
await patch.patch.execute(dm);
|
||||
} catch (err) {
|
||||
console.error('PATCH FAILED: ', patch.name);
|
||||
console.error(err);
|
||||
return false;
|
||||
}
|
||||
|
||||
await makeEntry(patch.name, dm);
|
||||
return true;
|
||||
}
|
||||
|
||||
async function makeEntry(patchName: string, dm: DatabaseManager) {
|
||||
const defaultFieldValueMap = getDefaultMetaFieldValueMap() as FieldValueMap;
|
||||
defaultFieldValueMap.name = patchName;
|
||||
await dm.db!.insert('PatchRun', defaultFieldValueMap);
|
||||
}
|
212
backend/database/tests/helpers.ts
Normal file
212
backend/database/tests/helpers.ts
Normal file
@ -0,0 +1,212 @@
|
||||
import assert from 'assert';
|
||||
import { cloneDeep } from 'lodash';
|
||||
import { SchemaMap, SchemaStub, SchemaStubMap } from 'schemas/types';
|
||||
import {
|
||||
addMetaFields,
|
||||
cleanSchemas,
|
||||
getAbstractCombinedSchemas,
|
||||
} from '../../../schemas';
|
||||
import SingleValue from '../../../schemas/core/SingleValue.json';
|
||||
|
||||
const Customer = {
|
||||
name: 'Customer',
|
||||
label: 'Customer',
|
||||
fields: [
|
||||
{
|
||||
fieldname: 'name',
|
||||
label: 'Name',
|
||||
fieldtype: 'Data',
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
fieldname: 'email',
|
||||
label: 'Email',
|
||||
fieldtype: 'Data',
|
||||
placeholder: 'john@thoe.com',
|
||||
},
|
||||
{
|
||||
fieldname: 'phone',
|
||||
label: 'Phone',
|
||||
fieldtype: 'Data',
|
||||
placeholder: '9999999999',
|
||||
},
|
||||
],
|
||||
quickEditFields: ['email'],
|
||||
keywordFields: ['name'],
|
||||
};
|
||||
|
||||
const SalesInvoiceItem = {
|
||||
name: 'SalesInvoiceItem',
|
||||
label: 'Sales Invoice Item',
|
||||
isChild: true,
|
||||
fields: [
|
||||
{
|
||||
fieldname: 'item',
|
||||
label: 'Item',
|
||||
fieldtype: 'Data',
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
fieldname: 'quantity',
|
||||
label: 'Quantity',
|
||||
fieldtype: 'Float',
|
||||
required: true,
|
||||
default: 1,
|
||||
},
|
||||
{
|
||||
fieldname: 'rate',
|
||||
label: 'Rate',
|
||||
fieldtype: 'Float',
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
fieldname: 'amount',
|
||||
label: 'Amount',
|
||||
fieldtype: 'Float',
|
||||
readOnly: true,
|
||||
},
|
||||
],
|
||||
tableFields: ['item', 'quantity', 'rate', 'amount'],
|
||||
};
|
||||
|
||||
const SalesInvoice = {
|
||||
name: 'SalesInvoice',
|
||||
label: 'Sales Invoice',
|
||||
isSingle: false,
|
||||
isChild: false,
|
||||
isSubmittable: true,
|
||||
keywordFields: ['name', 'customer'],
|
||||
fields: [
|
||||
{
|
||||
label: 'Invoice No',
|
||||
fieldname: 'name',
|
||||
fieldtype: 'Data',
|
||||
required: true,
|
||||
readOnly: true,
|
||||
},
|
||||
{
|
||||
fieldname: 'date',
|
||||
label: 'Date',
|
||||
fieldtype: 'Date',
|
||||
},
|
||||
{
|
||||
fieldname: 'customer',
|
||||
label: 'Customer',
|
||||
fieldtype: 'Link',
|
||||
target: 'Customer',
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
fieldname: 'account',
|
||||
label: 'Account',
|
||||
fieldtype: 'Data',
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
fieldname: 'items',
|
||||
label: 'Items',
|
||||
fieldtype: 'Table',
|
||||
target: 'SalesInvoiceItem',
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
fieldname: 'grandTotal',
|
||||
label: 'Grand Total',
|
||||
fieldtype: 'Currency',
|
||||
readOnly: true,
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
const SystemSettings = {
|
||||
name: 'SystemSettings',
|
||||
label: 'System Settings',
|
||||
isSingle: true,
|
||||
isChild: false,
|
||||
fields: [
|
||||
{
|
||||
fieldname: 'dateFormat',
|
||||
label: 'Date Format',
|
||||
fieldtype: 'Select',
|
||||
options: [
|
||||
{
|
||||
label: '23/03/2022',
|
||||
value: 'dd/MM/yyyy',
|
||||
},
|
||||
{
|
||||
label: '03/23/2022',
|
||||
value: 'MM/dd/yyyy',
|
||||
},
|
||||
],
|
||||
default: 'dd/MM/yyyy',
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
fieldname: 'locale',
|
||||
label: 'Locale',
|
||||
fieldtype: 'Data',
|
||||
default: 'en-IN',
|
||||
},
|
||||
],
|
||||
quickEditFields: ['locale', 'dateFormat'],
|
||||
keywordFields: [],
|
||||
};
|
||||
|
||||
export function getBuiltTestSchemaMap(): SchemaMap {
|
||||
const testSchemaMap: SchemaStubMap = {
|
||||
SingleValue: SingleValue as SchemaStub,
|
||||
Customer: Customer as SchemaStub,
|
||||
SalesInvoice: SalesInvoice as SchemaStub,
|
||||
SalesInvoiceItem: SalesInvoiceItem as SchemaStub,
|
||||
SystemSettings: SystemSettings as SchemaStub,
|
||||
};
|
||||
|
||||
const schemaMapClone = cloneDeep(testSchemaMap);
|
||||
const abstractCombined = getAbstractCombinedSchemas(schemaMapClone);
|
||||
const cleanedSchemas = cleanSchemas(abstractCombined);
|
||||
return addMetaFields(cleanedSchemas);
|
||||
}
|
||||
|
||||
export function getBaseMeta() {
|
||||
return {
|
||||
createdBy: 'Administrator',
|
||||
modifiedBy: 'Administrator',
|
||||
created: new Date().toISOString(),
|
||||
modified: new Date().toISOString(),
|
||||
};
|
||||
}
|
||||
|
||||
export async function assertThrows(
|
||||
func: () => Promise<unknown>,
|
||||
message?: string
|
||||
) {
|
||||
let threw = true;
|
||||
try {
|
||||
await func();
|
||||
threw = false;
|
||||
} catch {
|
||||
} finally {
|
||||
if (!threw) {
|
||||
throw new assert.AssertionError({
|
||||
message: `Missing expected exception: ${message}`,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function assertDoesNotThrow(
|
||||
func: () => Promise<unknown>,
|
||||
message?: string
|
||||
) {
|
||||
try {
|
||||
await func();
|
||||
} catch (err) {
|
||||
throw new assert.AssertionError({
|
||||
message: `Got unwanted exception: ${message}\nError: ${
|
||||
(err as Error).message
|
||||
}\n${(err as Error).stack}`,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export type BaseMetaKey = 'created' | 'modified' | 'createdBy' | 'modifiedBy';
|
624
backend/database/tests/testCore.spec.ts
Normal file
624
backend/database/tests/testCore.spec.ts
Normal file
@ -0,0 +1,624 @@
|
||||
import * as assert from 'assert';
|
||||
import 'mocha';
|
||||
import { FieldTypeEnum, RawValue, Schema } from 'schemas/types';
|
||||
import { getMapFromList, getValueMapFromList, sleep } from 'utils';
|
||||
import { getDefaultMetaFieldValueMap, sqliteTypeMap } from '../../helpers';
|
||||
import DatabaseCore from '../core';
|
||||
import { FieldValueMap, SqliteTableInfo } from '../types';
|
||||
import {
|
||||
assertDoesNotThrow,
|
||||
assertThrows,
|
||||
BaseMetaKey,
|
||||
getBuiltTestSchemaMap,
|
||||
} from './helpers';
|
||||
|
||||
/**
|
||||
* Note: these tests have a strange structure where multiple tests are
|
||||
* inside a `specify`, this is cause `describe` doesn't support `async` or waiting
|
||||
* on promises.
|
||||
*
|
||||
* Due to this `async` db operations need to be handled in `specify`. And `specify`
|
||||
* can't be nested in the `describe` can, hence the strange structure.
|
||||
*
|
||||
* This also implies that assert calls should have discriptive
|
||||
*/
|
||||
|
||||
describe('DatabaseCore: Connect Migrate Close', function () {
|
||||
const db = new DatabaseCore();
|
||||
specify('dbPath', function () {
|
||||
assert.strictEqual(db.dbPath, ':memory:');
|
||||
});
|
||||
|
||||
const schemaMap = getBuiltTestSchemaMap();
|
||||
db.setSchemaMap(schemaMap);
|
||||
specify('schemaMap', function () {
|
||||
assert.strictEqual(schemaMap, db.schemaMap);
|
||||
});
|
||||
|
||||
specify('connect', async function () {
|
||||
await assertDoesNotThrow(async () => await db.connect());
|
||||
assert.notStrictEqual(db.knex, undefined);
|
||||
});
|
||||
|
||||
specify('migrate and close', async function () {
|
||||
// Does not throw
|
||||
await db.migrate();
|
||||
// Does not throw
|
||||
await db.close();
|
||||
});
|
||||
});
|
||||
|
||||
describe('DatabaseCore: Migrate and Check Db', function () {
|
||||
let db: DatabaseCore;
|
||||
const schemaMap = getBuiltTestSchemaMap();
|
||||
|
||||
this.beforeEach(async function () {
|
||||
db = new DatabaseCore();
|
||||
await db.connect();
|
||||
db.setSchemaMap(schemaMap);
|
||||
});
|
||||
|
||||
this.afterEach(async function () {
|
||||
await db.close();
|
||||
});
|
||||
|
||||
specify(`Pre Migrate TableInfo`, async function () {
|
||||
for (const schemaName in schemaMap) {
|
||||
const columns = await db.knex?.raw('pragma table_info(??)', schemaName);
|
||||
assert.strictEqual(columns.length, 0, `column count ${schemaName}`);
|
||||
}
|
||||
});
|
||||
|
||||
specify('Post Migrate TableInfo', async function () {
|
||||
await db.migrate();
|
||||
for (const schemaName in schemaMap) {
|
||||
const schema = schemaMap[schemaName] as Schema;
|
||||
const fieldMap = getMapFromList(schema.fields, 'fieldname');
|
||||
const columns: SqliteTableInfo[] = await db.knex!.raw(
|
||||
'pragma table_info(??)',
|
||||
schemaName
|
||||
);
|
||||
|
||||
let columnCount = schema.fields.filter(
|
||||
(f) => f.fieldtype !== FieldTypeEnum.Table
|
||||
).length;
|
||||
|
||||
if (schema.isSingle) {
|
||||
columnCount = 0;
|
||||
}
|
||||
|
||||
assert.strictEqual(
|
||||
columns.length,
|
||||
columnCount,
|
||||
`${schemaName}:: column count: ${columns.length}, ${columnCount}`
|
||||
);
|
||||
|
||||
for (const column of columns) {
|
||||
const field = fieldMap[column.name];
|
||||
const dbColType = sqliteTypeMap[field.fieldtype];
|
||||
|
||||
assert.strictEqual(
|
||||
column.name,
|
||||
field.fieldname,
|
||||
`${schemaName}.${column.name}:: name check: ${column.name}, ${field.fieldname}`
|
||||
);
|
||||
|
||||
assert.strictEqual(
|
||||
column.type.toLowerCase(),
|
||||
dbColType,
|
||||
`${schemaName}.${column.name}:: type check: ${column.type}, ${dbColType}`
|
||||
);
|
||||
|
||||
if (field.required !== undefined) {
|
||||
assert.strictEqual(
|
||||
!!column.notnull,
|
||||
field.required,
|
||||
`${schemaName}.${column.name}:: iotnull iheck: ${column.notnull}, ${field.required}`
|
||||
);
|
||||
} else {
|
||||
assert.strictEqual(
|
||||
column.notnull,
|
||||
0,
|
||||
`${schemaName}.${column.name}:: notnull check: ${column.notnull}, ${field.required}`
|
||||
);
|
||||
}
|
||||
|
||||
if (column.dflt_value === null) {
|
||||
assert.strictEqual(
|
||||
field.default,
|
||||
undefined,
|
||||
`${schemaName}.${column.name}:: dflt_value check: ${column.dflt_value}, ${field.default}`
|
||||
);
|
||||
} else {
|
||||
assert.strictEqual(
|
||||
column.dflt_value.slice(1, -1),
|
||||
String(field.default),
|
||||
`${schemaName}.${column.name}:: dflt_value check: ${column.type}, ${dbColType}`
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('DatabaseCore: CRUD', function () {
|
||||
let db: DatabaseCore;
|
||||
const schemaMap = getBuiltTestSchemaMap();
|
||||
|
||||
this.beforeEach(async function () {
|
||||
db = new DatabaseCore();
|
||||
await db.connect();
|
||||
db.setSchemaMap(schemaMap);
|
||||
await db.migrate();
|
||||
});
|
||||
|
||||
this.afterEach(async function () {
|
||||
await db.close();
|
||||
});
|
||||
|
||||
specify('exists() before insertion', async function () {
|
||||
for (const schemaName in schemaMap) {
|
||||
const doesExist = await db.exists(schemaName);
|
||||
if (['SingleValue', 'SystemSettings'].includes(schemaName)) {
|
||||
assert.strictEqual(doesExist, true, `${schemaName} exists`);
|
||||
} else {
|
||||
assert.strictEqual(doesExist, false, `${schemaName} exists`);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
specify('CRUD single values', async function () {
|
||||
/**
|
||||
* Checking default values which are created when db.migrate
|
||||
* takes place.
|
||||
*/
|
||||
let rows: Record<string, RawValue>[] = await db.knex!.raw(
|
||||
'select * from SingleValue'
|
||||
);
|
||||
const defaultMap = getValueMapFromList(
|
||||
(schemaMap.SystemSettings as Schema).fields,
|
||||
'fieldname',
|
||||
'default'
|
||||
);
|
||||
for (const row of rows) {
|
||||
assert.strictEqual(
|
||||
row.value,
|
||||
defaultMap[row.fieldname as string],
|
||||
`${row.fieldname} default values equality`
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Insertion and updation for single values call the same function.
|
||||
*
|
||||
* Insert
|
||||
*/
|
||||
|
||||
let localeRow = rows.find((r) => r.fieldname === 'locale');
|
||||
const localeEntryName = localeRow?.name as string;
|
||||
const localeEntryCreated = localeRow?.created as string;
|
||||
|
||||
let locale = 'hi-IN';
|
||||
await db.insert('SystemSettings', { locale });
|
||||
rows = await db.knex!.raw('select * from SingleValue');
|
||||
localeRow = rows.find((r) => r.fieldname === 'locale');
|
||||
|
||||
assert.notStrictEqual(localeEntryName, undefined, 'localeEntryName');
|
||||
assert.strictEqual(rows.length, 2, 'rows length insert');
|
||||
assert.strictEqual(
|
||||
localeRow?.name as string,
|
||||
localeEntryName,
|
||||
`localeEntryName ${localeRow?.name}, ${localeEntryName}`
|
||||
);
|
||||
assert.strictEqual(
|
||||
localeRow?.value,
|
||||
locale,
|
||||
`locale ${localeRow?.value}, ${locale}`
|
||||
);
|
||||
assert.strictEqual(
|
||||
localeRow?.created,
|
||||
localeEntryCreated,
|
||||
`locale ${localeRow?.value}, ${locale}`
|
||||
);
|
||||
|
||||
/**
|
||||
* Update
|
||||
*/
|
||||
locale = 'ca-ES';
|
||||
await db.update('SystemSettings', { locale });
|
||||
rows = await db.knex!.raw('select * from SingleValue');
|
||||
localeRow = rows.find((r) => r.fieldname === 'locale');
|
||||
|
||||
assert.notStrictEqual(localeEntryName, undefined, 'localeEntryName');
|
||||
assert.strictEqual(rows.length, 2, 'rows length update');
|
||||
assert.strictEqual(
|
||||
localeRow?.name as string,
|
||||
localeEntryName,
|
||||
`localeEntryName ${localeRow?.name}, ${localeEntryName}`
|
||||
);
|
||||
assert.strictEqual(
|
||||
localeRow?.value,
|
||||
locale,
|
||||
`locale ${localeRow?.value}, ${locale}`
|
||||
);
|
||||
assert.strictEqual(
|
||||
localeRow?.created,
|
||||
localeEntryCreated,
|
||||
`locale ${localeRow?.value}, ${locale}`
|
||||
);
|
||||
|
||||
/**
|
||||
* Delete
|
||||
*/
|
||||
await db.delete('SystemSettings', 'locale');
|
||||
rows = await db.knex!.raw('select * from SingleValue');
|
||||
assert.strictEqual(rows.length, 1, 'delete one');
|
||||
await db.delete('SystemSettings', 'dateFormat');
|
||||
rows = await db.knex!.raw('select * from SingleValue');
|
||||
assert.strictEqual(rows.length, 0, 'delete two');
|
||||
|
||||
const dateFormat = 'dd/mm/yy';
|
||||
await db.insert('SystemSettings', { locale, dateFormat });
|
||||
rows = await db.knex!.raw('select * from SingleValue');
|
||||
assert.strictEqual(rows.length, 2, 'delete two');
|
||||
|
||||
/**
|
||||
* Read
|
||||
*
|
||||
* getSingleValues
|
||||
*/
|
||||
const svl = await db.getSingleValues('locale', 'dateFormat');
|
||||
assert.strictEqual(svl.length, 2, 'getSingleValues length');
|
||||
for (const sv of svl) {
|
||||
assert.strictEqual(
|
||||
sv.parent,
|
||||
'SystemSettings',
|
||||
`singleValue parent ${sv.parent}`
|
||||
);
|
||||
assert.strictEqual(
|
||||
sv.value,
|
||||
{ locale, dateFormat }[sv.fieldname],
|
||||
`singleValue value ${sv.value}`
|
||||
);
|
||||
|
||||
/**
|
||||
* get
|
||||
*/
|
||||
const svlMap = await db.get('SystemSettings');
|
||||
assert.strictEqual(Object.keys(svlMap).length, 2, 'get key length');
|
||||
assert.strictEqual(svlMap.locale, locale, 'get locale');
|
||||
assert.strictEqual(svlMap.dateFormat, dateFormat, 'get locale');
|
||||
}
|
||||
});
|
||||
|
||||
specify('CRUD nondependent schema', async function () {
|
||||
const schemaName = 'Customer';
|
||||
let rows = await db.knex!(schemaName);
|
||||
assert.strictEqual(rows.length, 0, 'rows length before insertion');
|
||||
|
||||
/**
|
||||
* Insert
|
||||
*/
|
||||
const metaValues = getDefaultMetaFieldValueMap();
|
||||
const name = 'John Thoe';
|
||||
|
||||
await assertThrows(
|
||||
async () => await db.insert(schemaName, { name }),
|
||||
'insert() did not throw without meta values'
|
||||
);
|
||||
|
||||
const updateMap = Object.assign({}, metaValues, { name });
|
||||
await db.insert(schemaName, updateMap);
|
||||
rows = await db.knex!(schemaName);
|
||||
let firstRow = rows?.[0];
|
||||
assert.strictEqual(rows.length, 1, `rows length insert ${rows.length}`);
|
||||
assert.strictEqual(
|
||||
firstRow.name,
|
||||
name,
|
||||
`name check ${firstRow.name}, ${name}`
|
||||
);
|
||||
assert.strictEqual(firstRow.email, null, `email check ${firstRow.email}`);
|
||||
|
||||
for (const key in metaValues) {
|
||||
assert.strictEqual(
|
||||
firstRow[key],
|
||||
metaValues[key as BaseMetaKey],
|
||||
`${key} check`
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Update
|
||||
*/
|
||||
const email = 'john@thoe.com';
|
||||
await sleep(1); // required for modified to change
|
||||
await db.update(schemaName, {
|
||||
name,
|
||||
email,
|
||||
modified: new Date().toISOString(),
|
||||
});
|
||||
rows = await db.knex!(schemaName);
|
||||
firstRow = rows?.[0];
|
||||
assert.strictEqual(rows.length, 1, `rows length update ${rows.length}`);
|
||||
assert.strictEqual(
|
||||
firstRow.name,
|
||||
name,
|
||||
`name check update ${firstRow.name}, ${name}`
|
||||
);
|
||||
assert.strictEqual(
|
||||
firstRow.email,
|
||||
email,
|
||||
`email check update ${firstRow.email}`
|
||||
);
|
||||
|
||||
const phone = '8149133530';
|
||||
await sleep(1);
|
||||
await db.update(schemaName, {
|
||||
name,
|
||||
phone,
|
||||
modified: new Date().toISOString(),
|
||||
});
|
||||
rows = await db.knex!(schemaName);
|
||||
firstRow = rows?.[0];
|
||||
assert.strictEqual(
|
||||
firstRow.email,
|
||||
email,
|
||||
`email check update ${firstRow.email}`
|
||||
);
|
||||
assert.strictEqual(
|
||||
firstRow.phone,
|
||||
phone,
|
||||
`email check update ${firstRow.phone}`
|
||||
);
|
||||
|
||||
for (const key in metaValues) {
|
||||
const val = firstRow[key];
|
||||
const expected = metaValues[key as BaseMetaKey];
|
||||
if (key !== 'modified') {
|
||||
assert.strictEqual(val, expected, `${key} check ${val}, ${expected}`);
|
||||
} else {
|
||||
assert.notStrictEqual(
|
||||
val,
|
||||
expected,
|
||||
`${key} check ${val}, ${expected}`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete
|
||||
*/
|
||||
await db.delete(schemaName, name);
|
||||
rows = await db.knex!(schemaName);
|
||||
assert.strictEqual(rows.length, 0, `rows length delete ${rows.length}`);
|
||||
|
||||
/**
|
||||
* Get
|
||||
*/
|
||||
let fvMap = await db.get(schemaName, name);
|
||||
assert.strictEqual(
|
||||
Object.keys(fvMap).length,
|
||||
0,
|
||||
`key count get ${JSON.stringify(fvMap)}`
|
||||
);
|
||||
|
||||
/**
|
||||
* > 1 entries
|
||||
*/
|
||||
|
||||
const cOne = { name: 'John Whoe', ...getDefaultMetaFieldValueMap() };
|
||||
const cTwo = { name: 'Jane Whoe', ...getDefaultMetaFieldValueMap() };
|
||||
|
||||
// Insert
|
||||
await db.insert(schemaName, cOne);
|
||||
assert.strictEqual(
|
||||
(await db.knex!(schemaName)).length,
|
||||
1,
|
||||
`rows length minsert`
|
||||
);
|
||||
await db.insert(schemaName, cTwo);
|
||||
rows = await db.knex!(schemaName);
|
||||
assert.strictEqual(rows.length, 2, `rows length minsert`);
|
||||
|
||||
const cs = [cOne, cTwo];
|
||||
for (const i in cs) {
|
||||
for (const k in cs[i]) {
|
||||
const val = cs[i][k as BaseMetaKey];
|
||||
assert.strictEqual(
|
||||
rows?.[i]?.[k],
|
||||
val,
|
||||
`equality check ${i} ${k} ${val} ${rows?.[i]?.[k]}`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Update
|
||||
await db.update(schemaName, { name: cOne.name, email });
|
||||
const cOneEmail = await db.get(schemaName, cOne.name, 'email');
|
||||
assert.strictEqual(
|
||||
cOneEmail.email,
|
||||
email,
|
||||
`mi update check one ${cOneEmail}`
|
||||
);
|
||||
const cTwoEmail = await db.get(schemaName, cTwo.name, 'email');
|
||||
assert.strictEqual(
|
||||
cOneEmail.email,
|
||||
email,
|
||||
`mi update check two ${cTwoEmail}`
|
||||
);
|
||||
|
||||
// Rename
|
||||
const newName = 'Johnny Whoe';
|
||||
await db.rename(schemaName, cOne.name, newName);
|
||||
|
||||
fvMap = await db.get(schemaName, cOne.name);
|
||||
assert.strictEqual(
|
||||
Object.keys(fvMap).length,
|
||||
0,
|
||||
`mi rename check old ${JSON.stringify(fvMap)}`
|
||||
);
|
||||
|
||||
fvMap = await db.get(schemaName, newName);
|
||||
assert.strictEqual(
|
||||
fvMap.email,
|
||||
email,
|
||||
`mi rename check new ${JSON.stringify(fvMap)}`
|
||||
);
|
||||
|
||||
// Delete
|
||||
await db.delete(schemaName, newName);
|
||||
rows = await db.knex!(schemaName);
|
||||
assert.strictEqual(rows.length, 1, `mi delete length ${rows.length}`);
|
||||
assert.strictEqual(
|
||||
rows[0].name,
|
||||
cTwo.name,
|
||||
`mi delete name ${rows[0].name}`
|
||||
);
|
||||
});
|
||||
|
||||
specify('CRUD dependent schema', async function () {
|
||||
const Customer = 'Customer';
|
||||
const SalesInvoice = 'SalesInvoice';
|
||||
const SalesInvoiceItem = 'SalesInvoiceItem';
|
||||
|
||||
const customer: FieldValueMap = {
|
||||
name: 'John Whoe',
|
||||
email: 'john@whoe.com',
|
||||
...getDefaultMetaFieldValueMap(),
|
||||
};
|
||||
|
||||
const invoice: FieldValueMap = {
|
||||
name: 'SINV-1001',
|
||||
date: '2022-01-21',
|
||||
customer: customer.name,
|
||||
account: 'Debtors',
|
||||
submitted: false,
|
||||
cancelled: false,
|
||||
...getDefaultMetaFieldValueMap(),
|
||||
};
|
||||
|
||||
await assertThrows(
|
||||
async () => await db.insert(SalesInvoice, invoice),
|
||||
'foreign key constraint fail failed'
|
||||
);
|
||||
|
||||
await assertDoesNotThrow(async () => {
|
||||
await db.insert(Customer, customer);
|
||||
await db.insert(SalesInvoice, invoice);
|
||||
}, 'insertion failed');
|
||||
|
||||
await assertThrows(
|
||||
async () => await db.delete(Customer, customer.name as string),
|
||||
'foreign key constraint fail failed'
|
||||
);
|
||||
|
||||
await assertDoesNotThrow(async () => {
|
||||
await db.delete(SalesInvoice, invoice.name as string);
|
||||
await db.delete(Customer, customer.name as string);
|
||||
}, 'deletion failed');
|
||||
|
||||
await db.insert(Customer, customer);
|
||||
await db.insert(SalesInvoice, invoice);
|
||||
|
||||
let fvMap = await db.get(SalesInvoice, invoice.name as string);
|
||||
for (const key in invoice) {
|
||||
let expected = invoice[key];
|
||||
if (typeof expected === 'boolean') {
|
||||
expected = +expected;
|
||||
}
|
||||
|
||||
assert.strictEqual(
|
||||
fvMap[key],
|
||||
expected,
|
||||
`equality check ${key}: ${fvMap[key]}, ${invoice[key]}`
|
||||
);
|
||||
}
|
||||
|
||||
assert.strictEqual(
|
||||
(fvMap.items as unknown[])?.length,
|
||||
0,
|
||||
'empty items check'
|
||||
);
|
||||
|
||||
const items: FieldValueMap[] = [
|
||||
{
|
||||
item: 'Bottle Caps',
|
||||
quantity: 2,
|
||||
rate: 100,
|
||||
amount: 200,
|
||||
},
|
||||
];
|
||||
|
||||
await assertThrows(
|
||||
async () => await db.insert(SalesInvoice, { name: invoice.name, items }),
|
||||
'invoice insertion with ct did not fail'
|
||||
);
|
||||
await assertDoesNotThrow(
|
||||
async () => await db.update(SalesInvoice, { name: invoice.name, items }),
|
||||
'ct insertion failed'
|
||||
);
|
||||
|
||||
fvMap = await db.get(SalesInvoice, invoice.name as string);
|
||||
const ct = fvMap.items as FieldValueMap[];
|
||||
assert.strictEqual(ct.length, 1, `ct length ${ct.length}`);
|
||||
assert.strictEqual(ct[0].parent, invoice.name, `ct parent ${ct[0].parent}`);
|
||||
assert.strictEqual(
|
||||
ct[0].parentFieldname,
|
||||
'items',
|
||||
`ct parentFieldname ${ct[0].parentFieldname}`
|
||||
);
|
||||
assert.strictEqual(
|
||||
ct[0].parentSchemaName,
|
||||
SalesInvoice,
|
||||
`ct parentSchemaName ${ct[0].parentSchemaName}`
|
||||
);
|
||||
for (const key in items[0]) {
|
||||
assert.strictEqual(
|
||||
ct[0][key],
|
||||
items[0][key],
|
||||
`ct values ${key}: ${ct[0][key]}, ${items[0][key]}`
|
||||
);
|
||||
}
|
||||
|
||||
items.push({
|
||||
item: 'Mentats',
|
||||
quantity: 4,
|
||||
rate: 200,
|
||||
amount: 800,
|
||||
});
|
||||
await assertDoesNotThrow(
|
||||
async () => await db.update(SalesInvoice, { name: invoice.name, items }),
|
||||
'ct updation failed'
|
||||
);
|
||||
|
||||
let rows = await db.getAll(SalesInvoiceItem, {
|
||||
fields: ['item', 'quantity', 'rate', 'amount'],
|
||||
});
|
||||
assert.strictEqual(rows.length, 2, `ct length update ${rows.length}`);
|
||||
|
||||
for (const i in rows) {
|
||||
for (const key in rows[i]) {
|
||||
assert.strictEqual(
|
||||
rows[i][key],
|
||||
items[i][key],
|
||||
`ct values ${i},${key}: ${rows[i][key]}`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
invoice.date = '2022-04-01';
|
||||
invoice.modified = new Date().toISOString();
|
||||
await db.update('SalesInvoice', {
|
||||
name: invoice.name,
|
||||
date: invoice.date,
|
||||
modified: invoice.modified,
|
||||
});
|
||||
|
||||
rows = await db.knex!(SalesInvoiceItem);
|
||||
assert.strictEqual(rows.length, 2, `postupdate ct empty ${rows.length}`);
|
||||
|
||||
await db.delete(SalesInvoice, invoice.name as string);
|
||||
rows = await db.getAll(SalesInvoiceItem);
|
||||
assert.strictEqual(rows.length, 0, `ct length delete ${rows.length}`);
|
||||
});
|
||||
});
|
57
backend/database/types.ts
Normal file
57
backend/database/types.ts
Normal file
@ -0,0 +1,57 @@
|
||||
import { Field, RawValue } from '../../schemas/types';
|
||||
import DatabaseCore from './core';
|
||||
import { DatabaseManager } from './manager';
|
||||
|
||||
export interface GetQueryBuilderOptions {
|
||||
offset?: number;
|
||||
limit?: number;
|
||||
groupBy?: string;
|
||||
orderBy?: string;
|
||||
order?: 'desc' | 'asc';
|
||||
}
|
||||
|
||||
export type ColumnDiff = { added: Field[]; removed: string[] };
|
||||
export type FieldValueMap = Record<
|
||||
string,
|
||||
RawValue | undefined | FieldValueMap[]
|
||||
>;
|
||||
|
||||
export interface Patch {
|
||||
name: string;
|
||||
version: string;
|
||||
patch: {
|
||||
execute: (dm: DatabaseManager) => Promise<void>;
|
||||
beforeMigrate?: boolean;
|
||||
};
|
||||
priority?: number;
|
||||
}
|
||||
|
||||
export type KnexColumnType =
|
||||
| 'text'
|
||||
| 'integer'
|
||||
| 'float'
|
||||
| 'boolean'
|
||||
| 'date'
|
||||
| 'datetime'
|
||||
| 'time'
|
||||
| 'binary';
|
||||
|
||||
// Returned by pragma table_info
|
||||
export interface SqliteTableInfo {
|
||||
pk: number;
|
||||
cid: number;
|
||||
name: string;
|
||||
type: string;
|
||||
notnull: number; // 0 | 1
|
||||
dflt_value: string | null;
|
||||
}
|
||||
|
||||
export type BespokeFunction = (
|
||||
db: DatabaseCore,
|
||||
...args: unknown[]
|
||||
) => Promise<unknown>;
|
||||
export type SingleValue<T> = {
|
||||
fieldname: string;
|
||||
parent: string;
|
||||
value: T;
|
||||
}[];
|
49
backend/helpers.ts
Normal file
49
backend/helpers.ts
Normal file
@ -0,0 +1,49 @@
|
||||
import { DatabaseMethod } from 'utils/db/types';
|
||||
import { KnexColumnType } from './database/types';
|
||||
|
||||
export const sqliteTypeMap: Record<string, KnexColumnType> = {
|
||||
AutoComplete: 'text',
|
||||
Currency: 'text',
|
||||
Int: 'integer',
|
||||
Float: 'float',
|
||||
Percent: 'float',
|
||||
Check: 'boolean',
|
||||
Code: 'text',
|
||||
Date: 'date',
|
||||
Datetime: 'datetime',
|
||||
Time: 'time',
|
||||
Text: 'text',
|
||||
Data: 'text',
|
||||
Link: 'text',
|
||||
DynamicLink: 'text',
|
||||
Password: 'text',
|
||||
Select: 'text',
|
||||
File: 'binary',
|
||||
Attach: 'text',
|
||||
AttachImage: 'text',
|
||||
Color: 'text',
|
||||
};
|
||||
|
||||
export const SYSTEM = '__SYSTEM__';
|
||||
export const validTypes = Object.keys(sqliteTypeMap);
|
||||
export function getDefaultMetaFieldValueMap() {
|
||||
const now = new Date().toISOString();
|
||||
return {
|
||||
createdBy: SYSTEM,
|
||||
modifiedBy: SYSTEM,
|
||||
created: now,
|
||||
modified: now,
|
||||
};
|
||||
}
|
||||
|
||||
export const databaseMethodSet: Set<DatabaseMethod> = new Set([
|
||||
'insert',
|
||||
'get',
|
||||
'getAll',
|
||||
'getSingleValues',
|
||||
'rename',
|
||||
'update',
|
||||
'delete',
|
||||
'close',
|
||||
'exists',
|
||||
]);
|
13
backend/patches/index.ts
Normal file
13
backend/patches/index.ts
Normal file
@ -0,0 +1,13 @@
|
||||
import { Patch } from '../database/types';
|
||||
import testPatch from './testPatch';
|
||||
import updateSchemas from './updateSchemas';
|
||||
|
||||
export default [
|
||||
{ name: 'testPatch', version: '0.5.0-beta.0', patch: testPatch },
|
||||
{
|
||||
name: 'updateSchemas',
|
||||
version: '0.5.0-beta.0',
|
||||
patch: updateSchemas,
|
||||
priority: 100,
|
||||
},
|
||||
] as Patch[];
|
10
backend/patches/testPatch.ts
Normal file
10
backend/patches/testPatch.ts
Normal file
@ -0,0 +1,10 @@
|
||||
import { DatabaseManager } from '../database/manager';
|
||||
|
||||
async function execute(dm: DatabaseManager) {
|
||||
/**
|
||||
* Execute function will receive the DatabaseManager which is to be used
|
||||
* to apply database patches.
|
||||
*/
|
||||
}
|
||||
|
||||
export default { execute, beforeMigrate: true };
|
400
backend/patches/updateSchemas.ts
Normal file
400
backend/patches/updateSchemas.ts
Normal file
@ -0,0 +1,400 @@
|
||||
import fs from 'fs/promises';
|
||||
import { RawValueMap } from 'fyo/core/types';
|
||||
import { Knex } from 'knex';
|
||||
import path from 'path';
|
||||
import { changeKeys, deleteKeys, getIsNullOrUndef, invertMap } from 'utils';
|
||||
import { getCountryCodeFromCountry } from 'utils/misc';
|
||||
import { Version } from 'utils/version';
|
||||
import { ModelNameEnum } from '../../models/types';
|
||||
import { FieldTypeEnum, Schema, SchemaMap } from '../../schemas/types';
|
||||
import { DatabaseManager } from '../database/manager';
|
||||
|
||||
const ignoreColumns = ['keywords'];
|
||||
const columnMap = { creation: 'created', owner: 'createdBy' };
|
||||
const childTableColumnMap = {
|
||||
parenttype: 'parentSchemaName',
|
||||
parentfield: 'parentFieldname',
|
||||
};
|
||||
|
||||
const defaultNumberSeriesMap = {
|
||||
[ModelNameEnum.Payment]: 'PAY-',
|
||||
[ModelNameEnum.JournalEntry]: 'JE-',
|
||||
[ModelNameEnum.SalesInvoice]: 'SINV-',
|
||||
[ModelNameEnum.PurchaseInvoice]: 'PINV-',
|
||||
} as Record<ModelNameEnum, string>;
|
||||
|
||||
async function execute(dm: DatabaseManager) {
|
||||
const sourceKnex = dm.db!.knex!;
|
||||
const version = (
|
||||
await sourceKnex('SingleValue')
|
||||
.select('value')
|
||||
.where({ fieldname: 'version' })
|
||||
)?.[0]?.value;
|
||||
|
||||
/**
|
||||
* Versions after this should have the new schemas
|
||||
*/
|
||||
|
||||
if (version && Version.gt(version, '0.4.3-beta.0')) {
|
||||
return;
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize a different db to copy all the updated
|
||||
* data into.
|
||||
*/
|
||||
const countryCode = await getCountryCode(sourceKnex);
|
||||
const destDm = await getDestinationDM(dm.db!.dbPath, countryCode);
|
||||
|
||||
/**
|
||||
* Copy data from all the relevant tables
|
||||
* the other tables will be empty cause unused.
|
||||
*/
|
||||
try {
|
||||
await copyData(sourceKnex, destDm);
|
||||
} catch (err) {
|
||||
const destPath = destDm.db!.dbPath;
|
||||
destDm.db!.close();
|
||||
await fs.unlink(destPath);
|
||||
throw err;
|
||||
}
|
||||
|
||||
/**
|
||||
* Version will update when migration completes, this
|
||||
* is set to prevent this patch from running again.
|
||||
*/
|
||||
await destDm.db!.update(ModelNameEnum.SystemSettings, {
|
||||
version: '0.5.0-beta.0',
|
||||
});
|
||||
|
||||
/**
|
||||
* Replace the database with the new one.
|
||||
*/
|
||||
await replaceDatabaseCore(dm, destDm);
|
||||
}
|
||||
|
||||
async function replaceDatabaseCore(
|
||||
dm: DatabaseManager,
|
||||
destDm: DatabaseManager
|
||||
) {
|
||||
const newDbPath = destDm.db!.dbPath; // new db with new schema
|
||||
const oldDbPath = dm.db!.dbPath; // old db to be replaced
|
||||
|
||||
await dm.db!.close();
|
||||
await destDm.db!.close();
|
||||
await fs.unlink(oldDbPath);
|
||||
await fs.rename(newDbPath, oldDbPath);
|
||||
await dm._connect(oldDbPath);
|
||||
}
|
||||
|
||||
async function copyData(sourceKnex: Knex, destDm: DatabaseManager) {
|
||||
const destKnex = destDm.db!.knex!;
|
||||
const schemaMap = destDm.getSchemaMap();
|
||||
await destKnex!.raw('PRAGMA foreign_keys=OFF');
|
||||
await copySingleValues(sourceKnex, destKnex, schemaMap);
|
||||
await copyParty(sourceKnex, destKnex, schemaMap[ModelNameEnum.Party]!);
|
||||
await copyItem(sourceKnex, destKnex, schemaMap[ModelNameEnum.Item]!);
|
||||
await copyChildTables(sourceKnex, destKnex, schemaMap);
|
||||
await copyOtherTables(sourceKnex, destKnex, schemaMap);
|
||||
await copyTransactionalTables(sourceKnex, destKnex, schemaMap);
|
||||
await copyLedgerEntries(
|
||||
sourceKnex,
|
||||
destKnex,
|
||||
schemaMap[ModelNameEnum.AccountingLedgerEntry]!
|
||||
);
|
||||
await copyNumberSeries(
|
||||
sourceKnex,
|
||||
destKnex,
|
||||
schemaMap[ModelNameEnum.NumberSeries]!
|
||||
);
|
||||
await destKnex!.raw('PRAGMA foreign_keys=ON');
|
||||
}
|
||||
|
||||
async function copyNumberSeries(
|
||||
sourceKnex: Knex,
|
||||
destKnex: Knex,
|
||||
schema: Schema
|
||||
) {
|
||||
const values = (await sourceKnex(
|
||||
ModelNameEnum.NumberSeries
|
||||
)) as RawValueMap[];
|
||||
|
||||
const refMap = invertMap(defaultNumberSeriesMap);
|
||||
|
||||
for (const value of values) {
|
||||
if (value.referenceType) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const name = value.name as string;
|
||||
const referenceType = refMap[name];
|
||||
if (!referenceType) {
|
||||
delete value.name;
|
||||
continue;
|
||||
}
|
||||
|
||||
const indices = await sourceKnex.raw(
|
||||
`
|
||||
select cast(substr(name, ??) as int) as idx
|
||||
from ??
|
||||
order by idx desc
|
||||
limit 1`,
|
||||
[name.length + 1, referenceType]
|
||||
);
|
||||
|
||||
value.start = 1001;
|
||||
value.current = indices[0]?.idx ?? value.current ?? value.start;
|
||||
value.referenceType = referenceType;
|
||||
}
|
||||
|
||||
await copyValues(
|
||||
destKnex,
|
||||
ModelNameEnum.NumberSeries,
|
||||
values.filter((v) => v.name),
|
||||
[],
|
||||
{},
|
||||
schema
|
||||
);
|
||||
}
|
||||
|
||||
async function copyLedgerEntries(
|
||||
sourceKnex: Knex,
|
||||
destKnex: Knex,
|
||||
schema: Schema
|
||||
) {
|
||||
const values = (await sourceKnex(
|
||||
ModelNameEnum.AccountingLedgerEntry
|
||||
)) as RawValueMap[];
|
||||
await copyValues(
|
||||
destKnex,
|
||||
ModelNameEnum.AccountingLedgerEntry,
|
||||
values,
|
||||
['description', 'againstAccount', 'balance'],
|
||||
{},
|
||||
schema
|
||||
);
|
||||
}
|
||||
|
||||
async function copyOtherTables(
|
||||
sourceKnex: Knex,
|
||||
destKnex: Knex,
|
||||
schemaMap: SchemaMap
|
||||
) {
|
||||
const schemaNames = [
|
||||
ModelNameEnum.Account,
|
||||
ModelNameEnum.Currency,
|
||||
ModelNameEnum.Address,
|
||||
ModelNameEnum.Color,
|
||||
ModelNameEnum.Tax,
|
||||
ModelNameEnum.PatchRun,
|
||||
];
|
||||
|
||||
for (const sn of schemaNames) {
|
||||
const values = (await sourceKnex(sn)) as RawValueMap[];
|
||||
await copyValues(destKnex, sn, values, [], {}, schemaMap[sn]);
|
||||
}
|
||||
}
|
||||
|
||||
async function copyTransactionalTables(
|
||||
sourceKnex: Knex,
|
||||
destKnex: Knex,
|
||||
schemaMap: SchemaMap
|
||||
) {
|
||||
const schemaNames = [
|
||||
ModelNameEnum.JournalEntry,
|
||||
ModelNameEnum.Payment,
|
||||
ModelNameEnum.SalesInvoice,
|
||||
ModelNameEnum.PurchaseInvoice,
|
||||
];
|
||||
|
||||
for (const sn of schemaNames) {
|
||||
const values = (await sourceKnex(sn)) as RawValueMap[];
|
||||
values.forEach((v) => {
|
||||
if (!v.submitted) {
|
||||
v.submitted = 0;
|
||||
}
|
||||
|
||||
if (!v.cancelled) {
|
||||
v.cancelled = 0;
|
||||
}
|
||||
|
||||
if (!v.numberSeries) {
|
||||
v.numberSeries = defaultNumberSeriesMap[sn];
|
||||
}
|
||||
|
||||
if (v.customer) {
|
||||
v.party = v.customer;
|
||||
}
|
||||
|
||||
if (v.supplier) {
|
||||
v.party = v.supplier;
|
||||
}
|
||||
});
|
||||
await copyValues(
|
||||
destKnex,
|
||||
sn,
|
||||
values,
|
||||
[],
|
||||
childTableColumnMap,
|
||||
schemaMap[sn]
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
async function copyChildTables(
|
||||
sourceKnex: Knex,
|
||||
destKnex: Knex,
|
||||
schemaMap: SchemaMap
|
||||
) {
|
||||
const childSchemaNames = Object.keys(schemaMap).filter(
|
||||
(sn) => schemaMap[sn]?.isChild
|
||||
);
|
||||
|
||||
for (const sn of childSchemaNames) {
|
||||
const values = (await sourceKnex(sn)) as RawValueMap[];
|
||||
await copyValues(
|
||||
destKnex,
|
||||
sn,
|
||||
values,
|
||||
[],
|
||||
childTableColumnMap,
|
||||
schemaMap[sn]
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
async function copyItem(sourceKnex: Knex, destKnex: Knex, schema: Schema) {
|
||||
const values = (await sourceKnex(ModelNameEnum.Item)) as RawValueMap[];
|
||||
values.forEach((value) => {
|
||||
value.for = 'Both';
|
||||
});
|
||||
|
||||
await copyValues(destKnex, ModelNameEnum.Item, values, [], {}, schema);
|
||||
}
|
||||
|
||||
async function copyParty(sourceKnex: Knex, destKnex: Knex, schema: Schema) {
|
||||
const values = (await sourceKnex(ModelNameEnum.Party)) as RawValueMap[];
|
||||
values.forEach((value) => {
|
||||
// customer will be mapped onto role
|
||||
if (Number(value.supplier) === 1) {
|
||||
value.customer = 'Supplier';
|
||||
} else {
|
||||
value.customer = 'Customer';
|
||||
}
|
||||
});
|
||||
|
||||
await copyValues(
|
||||
destKnex,
|
||||
ModelNameEnum.Party,
|
||||
values,
|
||||
['supplier', 'addressDisplay'],
|
||||
{ customer: 'role' },
|
||||
schema
|
||||
);
|
||||
}
|
||||
|
||||
async function copySingleValues(
|
||||
sourceKnex: Knex,
|
||||
destKnex: Knex,
|
||||
schemaMap: SchemaMap
|
||||
) {
|
||||
const singleSchemaNames = Object.keys(schemaMap).filter(
|
||||
(k) => schemaMap[k]?.isSingle
|
||||
);
|
||||
const singleValues = (await sourceKnex(ModelNameEnum.SingleValue).whereIn(
|
||||
'parent',
|
||||
singleSchemaNames
|
||||
)) as RawValueMap[];
|
||||
await copyValues(destKnex, ModelNameEnum.SingleValue, singleValues);
|
||||
}
|
||||
|
||||
async function copyValues(
|
||||
destKnex: Knex,
|
||||
destTableName: string,
|
||||
values: RawValueMap[],
|
||||
keysToDelete: string[] = [],
|
||||
keyMap: Record<string, string> = {},
|
||||
schema?: Schema
|
||||
) {
|
||||
keysToDelete = [...keysToDelete, ...ignoreColumns];
|
||||
keyMap = { ...keyMap, ...columnMap };
|
||||
|
||||
values = values.map((sv) => deleteKeys(sv, keysToDelete));
|
||||
values = values.map((sv) => changeKeys(sv, keyMap));
|
||||
|
||||
if (schema) {
|
||||
values.forEach((v) => notNullify(v, schema));
|
||||
}
|
||||
|
||||
if (schema) {
|
||||
const newKeys = schema?.fields.map((f) => f.fieldname);
|
||||
values.forEach((v) => deleteOldKeys(v, newKeys));
|
||||
}
|
||||
|
||||
await destKnex.batchInsert(destTableName, values, 100);
|
||||
}
|
||||
|
||||
async function getDestinationDM(sourceDbPath: string, countryCode: string) {
|
||||
/**
|
||||
* This is where all the stuff from the old db will be copied.
|
||||
* That won't be altered cause schema update will cause data loss.
|
||||
*/
|
||||
|
||||
const dir = path.parse(sourceDbPath).dir;
|
||||
const dbPath = path.join(dir, '__update_schemas_temp.db');
|
||||
const dm = new DatabaseManager();
|
||||
await dm._connect(dbPath, countryCode);
|
||||
await dm.db!.migrate();
|
||||
await dm.db!.truncate();
|
||||
return dm;
|
||||
}
|
||||
|
||||
async function getCountryCode(knex: Knex) {
|
||||
/**
|
||||
* Need to account for schema changes, in 0.4.3-beta.0
|
||||
*/
|
||||
const country = (
|
||||
await knex('SingleValue').select('value').where({ fieldname: 'country' })
|
||||
)?.[0]?.value;
|
||||
|
||||
if (!country) {
|
||||
return '';
|
||||
}
|
||||
|
||||
return getCountryCodeFromCountry(country);
|
||||
}
|
||||
|
||||
function notNullify(map: RawValueMap, schema: Schema) {
|
||||
for (const field of schema.fields) {
|
||||
if (!field.required || !getIsNullOrUndef(map[field.fieldname])) {
|
||||
continue;
|
||||
}
|
||||
|
||||
switch (field.fieldtype) {
|
||||
case FieldTypeEnum.Float:
|
||||
case FieldTypeEnum.Int:
|
||||
case FieldTypeEnum.Check:
|
||||
map[field.fieldname] = 0;
|
||||
break;
|
||||
case FieldTypeEnum.Currency:
|
||||
map[field.fieldname] = '0.00000000000';
|
||||
break;
|
||||
case FieldTypeEnum.Table:
|
||||
continue;
|
||||
default:
|
||||
map[field.fieldname] = '';
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function deleteOldKeys(map: RawValueMap, newKeys: string[]) {
|
||||
for (const key of Object.keys(map)) {
|
||||
if (newKeys.includes(key)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
delete map[key];
|
||||
}
|
||||
}
|
||||
|
||||
export default { execute, beforeMigrate: true };
|
11
dummy/README.md
Normal file
11
dummy/README.md
Normal file
@ -0,0 +1,11 @@
|
||||
# Dummy
|
||||
|
||||
This will be used to generate dummy data for the purposes of tests and to create
|
||||
a demo instance.
|
||||
|
||||
There are a few `.json` files here (eg: `items.json`) which have been generated,
|
||||
these are not to be edited.
|
||||
|
||||
The generated data has some randomness, there is a `baseCount` arg to the
|
||||
exported `setupDummyInstance` function, the number of transactions generated are
|
||||
always more than this.
|
64
dummy/helpers.ts
Normal file
64
dummy/helpers.ts
Normal file
@ -0,0 +1,64 @@
|
||||
import { DateTime } from 'luxon';
|
||||
|
||||
// prettier-ignore
|
||||
export const partyPurchaseItemMap: Record<string, string[]> = {
|
||||
'Janky Office Spaces': ['Office Rent', 'Office Cleaning'],
|
||||
"Josféña's 611s": ['611 Jeans - PCH', '611 Jeans - SHR'],
|
||||
'Lankness Feet Fomenters': ['Bominga Shoes', 'Jade Slippers'],
|
||||
'The Overclothes Company': ['Jacket - RAW', 'Cryo Gloves', 'Cool Cloth'],
|
||||
'Adani Electricity Mumbai Limited': ['Electricity'],
|
||||
'Only Fulls': ['Full Sleeve - BLK', 'Full Sleeve - COL'],
|
||||
'Just Epaulettes': ['Epaulettes - 4POR'],
|
||||
'Le Socials': ['Social Ads'],
|
||||
'Maxwell': ['Marketing - Video'],
|
||||
};
|
||||
|
||||
export const purchaseItemPartyMap: Record<string, string> = Object.keys(
|
||||
partyPurchaseItemMap
|
||||
).reduce((acc, party) => {
|
||||
for (const item of partyPurchaseItemMap[party]) {
|
||||
acc[item] = party;
|
||||
}
|
||||
return acc;
|
||||
}, {} as Record<string, string>);
|
||||
|
||||
export const flow = [
|
||||
0.35, // Jan
|
||||
0.25, // Feb
|
||||
0.15, // Mar
|
||||
0.15, // Apr
|
||||
0.25, // May
|
||||
0.05, // Jun
|
||||
0.05, // Jul
|
||||
0.15, // Aug
|
||||
0.25, // Sep
|
||||
0.35, // Oct
|
||||
0.45, // Nov
|
||||
0.55, // Dec
|
||||
];
|
||||
export function getFlowConstant(months: number) {
|
||||
// Jan to December
|
||||
const d = DateTime.now().minus({ months });
|
||||
return flow[d.month - 1];
|
||||
}
|
||||
|
||||
export function getRandomDates(count: number, months: number): Date[] {
|
||||
/**
|
||||
* Returns `count` number of dates for a month, `months` back from the
|
||||
* current date.
|
||||
*/
|
||||
let endDate = DateTime.now();
|
||||
if (months !== 0) {
|
||||
const back = endDate.minus({ months });
|
||||
endDate = DateTime.local(back.year, back.month, back.daysInMonth);
|
||||
}
|
||||
|
||||
const dates: Date[] = [];
|
||||
for (let i = 0; i < count; i++) {
|
||||
const day = Math.ceil(endDate.day * Math.random());
|
||||
const date = DateTime.local(endDate.year, endDate.month, day);
|
||||
dates.push(date.toJSDate());
|
||||
}
|
||||
|
||||
return dates;
|
||||
}
|
537
dummy/index.ts
Normal file
537
dummy/index.ts
Normal file
@ -0,0 +1,537 @@
|
||||
import { Fyo, t } from 'fyo';
|
||||
import { Doc } from 'fyo/model/doc';
|
||||
import { range, sample } from 'lodash';
|
||||
import { DateTime } from 'luxon';
|
||||
import { Invoice } from 'models/baseModels/Invoice/Invoice';
|
||||
import { Payment } from 'models/baseModels/Payment/Payment';
|
||||
import { PurchaseInvoice } from 'models/baseModels/PurchaseInvoice/PurchaseInvoice';
|
||||
import { SalesInvoice } from 'models/baseModels/SalesInvoice/SalesInvoice';
|
||||
import { ModelNameEnum } from 'models/types';
|
||||
import setupInstance from 'src/setup/setupInstance';
|
||||
import { getMapFromList } from 'utils';
|
||||
import { getFiscalYear } from 'utils/misc';
|
||||
import {
|
||||
flow,
|
||||
getFlowConstant,
|
||||
getRandomDates,
|
||||
purchaseItemPartyMap,
|
||||
} from './helpers';
|
||||
import items from './items.json';
|
||||
import logo from './logo';
|
||||
import parties from './parties.json';
|
||||
|
||||
type Notifier = (stage: string, percent: number) => void;
|
||||
|
||||
export async function setupDummyInstance(
|
||||
dbPath: string,
|
||||
fyo: Fyo,
|
||||
years: number = 1,
|
||||
baseCount: number = 1000,
|
||||
notifier?: Notifier
|
||||
) {
|
||||
fyo.purgeCache();
|
||||
notifier?.(fyo.t`Setting Up Instance`, -1);
|
||||
const options = {
|
||||
logo: null,
|
||||
companyName: "Flo's Clothes",
|
||||
country: 'India',
|
||||
fullname: 'Lin Florentine',
|
||||
email: 'lin@flosclothes.com',
|
||||
bankName: 'Supreme Bank',
|
||||
currency: 'INR',
|
||||
fiscalYearStart: getFiscalYear('04-01', true),
|
||||
fiscalYearEnd: getFiscalYear('04-01', false),
|
||||
chartOfAccounts: 'India - Chart of Accounts',
|
||||
};
|
||||
await setupInstance(dbPath, options, fyo);
|
||||
|
||||
years = Math.floor(years);
|
||||
notifier?.(fyo.t`Creating Items and Parties`, -1);
|
||||
await generateStaticEntries(fyo);
|
||||
await generateDynamicEntries(fyo, years, baseCount, notifier);
|
||||
await setOtherSettings(fyo);
|
||||
|
||||
const instanceId = (await fyo.getValue(
|
||||
ModelNameEnum.SystemSettings,
|
||||
'instanceId'
|
||||
)) as string;
|
||||
return { companyName: options.companyName, instanceId };
|
||||
}
|
||||
|
||||
async function setOtherSettings(fyo: Fyo) {
|
||||
const doc = await fyo.doc.getDoc(ModelNameEnum.PrintSettings);
|
||||
const address = fyo.doc.getNewDoc(ModelNameEnum.Address);
|
||||
await address.setAndSync({
|
||||
addressLine1: '1st Column, Fitzgerald Bridge',
|
||||
city: 'Pune',
|
||||
state: 'Maharashtra',
|
||||
pos: 'Maharashtra',
|
||||
postalCode: '411001',
|
||||
country: 'India',
|
||||
});
|
||||
|
||||
await doc.setAndSync({
|
||||
color: '#F687B3',
|
||||
template: 'Business',
|
||||
displayLogo: true,
|
||||
phone: '+91 8983-000418',
|
||||
logo,
|
||||
address: address.name,
|
||||
});
|
||||
|
||||
const acc = await fyo.doc.getDoc(ModelNameEnum.AccountingSettings);
|
||||
await acc.setAndSync({
|
||||
gstin: '27LIN180000A1Z5',
|
||||
});
|
||||
console.log(acc.gstin, await fyo.db.getSingleValues('gstin'));
|
||||
}
|
||||
|
||||
/**
|
||||
* warning: long functions ahead!
|
||||
*/
|
||||
|
||||
async function generateDynamicEntries(
|
||||
fyo: Fyo,
|
||||
years: number,
|
||||
baseCount: number,
|
||||
notifier?: Notifier
|
||||
) {
|
||||
const salesInvoices = await getSalesInvoices(fyo, years, baseCount, notifier);
|
||||
|
||||
notifier?.(fyo.t`Creating Purchase Invoices`, -1);
|
||||
const purchaseInvoices = await getPurchaseInvoices(fyo, years, salesInvoices);
|
||||
|
||||
notifier?.(fyo.t`Creating Journal Entries`, -1);
|
||||
const journalEntries = await getJournalEntries(fyo, salesInvoices);
|
||||
await syncAndSubmit(journalEntries, notifier);
|
||||
|
||||
const invoices = ([salesInvoices, purchaseInvoices].flat() as Invoice[]).sort(
|
||||
(a, b) => +(a.date as Date) - +(b.date as Date)
|
||||
);
|
||||
await syncAndSubmit(invoices, notifier);
|
||||
|
||||
const payments = await getPayments(fyo, invoices);
|
||||
await syncAndSubmit(payments, notifier);
|
||||
}
|
||||
|
||||
async function getJournalEntries(fyo: Fyo, salesInvoices: SalesInvoice[]) {
|
||||
const entries = [];
|
||||
const amount = salesInvoices
|
||||
.map((i) => i.items!)
|
||||
.flat()
|
||||
.reduce((a, b) => a.add(b.amount!), fyo.pesa(0))
|
||||
.percent(75)
|
||||
.clip(0);
|
||||
const lastInv = salesInvoices.sort((a, b) => +a.date! - +b.date!).at(-1)!
|
||||
.date!;
|
||||
const date = DateTime.fromJSDate(lastInv).minus({ months: 6 }).toJSDate();
|
||||
|
||||
// Bank Entry
|
||||
let doc = fyo.doc.getNewDoc(
|
||||
ModelNameEnum.JournalEntry,
|
||||
{
|
||||
date,
|
||||
entryType: 'Bank Entry',
|
||||
},
|
||||
false
|
||||
);
|
||||
await doc.append('accounts', {
|
||||
account: 'Supreme Bank',
|
||||
debit: amount,
|
||||
credit: fyo.pesa(0),
|
||||
});
|
||||
|
||||
await doc.append('accounts', {
|
||||
account: 'Secured Loans',
|
||||
credit: amount,
|
||||
debit: fyo.pesa(0),
|
||||
});
|
||||
entries.push(doc);
|
||||
|
||||
// Cash Entry
|
||||
doc = fyo.doc.getNewDoc(
|
||||
ModelNameEnum.JournalEntry,
|
||||
{
|
||||
date,
|
||||
entryType: 'Cash Entry',
|
||||
},
|
||||
false
|
||||
);
|
||||
await doc.append('accounts', {
|
||||
account: 'Cash',
|
||||
debit: amount.percent(30),
|
||||
credit: fyo.pesa(0),
|
||||
});
|
||||
|
||||
await doc.append('accounts', {
|
||||
account: 'Supreme Bank',
|
||||
credit: amount.percent(30),
|
||||
debit: fyo.pesa(0),
|
||||
});
|
||||
entries.push(doc);
|
||||
|
||||
return entries;
|
||||
}
|
||||
|
||||
async function getPayments(fyo: Fyo, invoices: Invoice[]) {
|
||||
const payments = [];
|
||||
for (const invoice of invoices) {
|
||||
// Defaulters
|
||||
if (invoice.isSales && Math.random() < 0.007) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const doc = fyo.doc.getNewDoc(ModelNameEnum.Payment, {}, false) as Payment;
|
||||
doc.party = invoice.party as string;
|
||||
doc.paymentType = invoice.isSales ? 'Receive' : 'Pay';
|
||||
doc.paymentMethod = 'Cash';
|
||||
doc.date = DateTime.fromJSDate(invoice.date as Date)
|
||||
.plus({ hours: 1 })
|
||||
.toJSDate();
|
||||
if (doc.paymentType === 'Receive') {
|
||||
doc.account = 'Debtors';
|
||||
doc.paymentAccount = 'Cash';
|
||||
} else {
|
||||
doc.account = 'Cash';
|
||||
doc.paymentAccount = 'Creditors';
|
||||
}
|
||||
doc.amount = invoice.outstandingAmount;
|
||||
|
||||
// Discount
|
||||
if (invoice.isSales && Math.random() < 0.05) {
|
||||
await doc.set('writeOff', invoice.outstandingAmount?.percent(15));
|
||||
}
|
||||
|
||||
doc.push('for', {
|
||||
referenceType: invoice.schemaName,
|
||||
referenceName: invoice.name,
|
||||
amount: invoice.outstandingAmount,
|
||||
});
|
||||
|
||||
if (doc.amount!.isZero()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
payments.push(doc);
|
||||
}
|
||||
|
||||
return payments;
|
||||
}
|
||||
|
||||
function getSalesInvoiceDates(years: number, baseCount: number): Date[] {
|
||||
const dates: Date[] = [];
|
||||
for (const months of range(0, years * 12)) {
|
||||
const flow = getFlowConstant(months);
|
||||
const count = Math.ceil(flow * baseCount * (Math.random() * 0.25 + 0.75));
|
||||
dates.push(...getRandomDates(count, months));
|
||||
}
|
||||
|
||||
return dates;
|
||||
}
|
||||
|
||||
async function getSalesInvoices(
|
||||
fyo: Fyo,
|
||||
years: number,
|
||||
baseCount: number,
|
||||
notifier?: Notifier
|
||||
) {
|
||||
const invoices: SalesInvoice[] = [];
|
||||
const salesItems = items.filter((i) => i.for !== 'Purchases');
|
||||
const customers = parties.filter((i) => i.role !== 'Supplier');
|
||||
|
||||
/**
|
||||
* Get certain number of entries for each month of the count
|
||||
* of years.
|
||||
*/
|
||||
const dates = getSalesInvoiceDates(years, baseCount);
|
||||
|
||||
/**
|
||||
* For each date create a Sales Invoice.
|
||||
*/
|
||||
|
||||
for (const d in dates) {
|
||||
const date = dates[d];
|
||||
|
||||
notifier?.(
|
||||
`Creating Sales Invoices, ${d} out of ${dates.length}`,
|
||||
parseInt(d) / dates.length
|
||||
);
|
||||
const customer = sample(customers);
|
||||
|
||||
const doc = fyo.doc.getNewDoc(
|
||||
ModelNameEnum.SalesInvoice,
|
||||
{
|
||||
date,
|
||||
},
|
||||
false
|
||||
) as SalesInvoice;
|
||||
|
||||
await doc.set('party', customer!.name);
|
||||
if (!doc.account) {
|
||||
doc.account = 'Debtors';
|
||||
}
|
||||
/**
|
||||
* Add `numItems` number of items to the invoice.
|
||||
*/
|
||||
const numItems = Math.ceil(Math.random() * 5);
|
||||
for (let i = 0; i < numItems; i++) {
|
||||
const item = sample(salesItems);
|
||||
if ((doc.items ?? []).find((i) => i.item === item)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let quantity = 1;
|
||||
|
||||
/**
|
||||
* Increase quantity depending on the rate.
|
||||
*/
|
||||
if (item!.rate < 100 && Math.random() < 0.4) {
|
||||
quantity = Math.ceil(Math.random() * 10);
|
||||
} else if (item!.rate < 1000 && Math.random() < 0.2) {
|
||||
quantity = Math.ceil(Math.random() * 4);
|
||||
} else if (Math.random() < 0.01) {
|
||||
quantity = Math.ceil(Math.random() * 3);
|
||||
}
|
||||
|
||||
let fc = flow[date.getMonth()];
|
||||
if (baseCount < 500) {
|
||||
fc += 1;
|
||||
}
|
||||
const rate = fyo.pesa(item!.rate * (fc + 1)).clip(0);
|
||||
await doc.append('items', {});
|
||||
await doc.items!.at(-1)!.set({
|
||||
item: item!.name,
|
||||
rate,
|
||||
quantity,
|
||||
account: item!.incomeAccount,
|
||||
amount: rate.mul(quantity),
|
||||
tax: item!.tax,
|
||||
description: item!.description,
|
||||
hsnCode: item!.hsnCode,
|
||||
});
|
||||
}
|
||||
|
||||
invoices.push(doc);
|
||||
}
|
||||
|
||||
return invoices;
|
||||
}
|
||||
|
||||
async function getPurchaseInvoices(
|
||||
fyo: Fyo,
|
||||
years: number,
|
||||
salesInvoices: SalesInvoice[]
|
||||
): Promise<PurchaseInvoice[]> {
|
||||
return [
|
||||
await getSalesPurchaseInvoices(fyo, salesInvoices),
|
||||
await getNonSalesPurchaseInvoices(fyo, years),
|
||||
].flat();
|
||||
}
|
||||
|
||||
async function getSalesPurchaseInvoices(
|
||||
fyo: Fyo,
|
||||
salesInvoices: SalesInvoice[]
|
||||
): Promise<PurchaseInvoice[]> {
|
||||
const invoices = [] as PurchaseInvoice[];
|
||||
/**
|
||||
* Group all sales invoices by their YYYY-MM.
|
||||
*/
|
||||
const dateGrouped = salesInvoices
|
||||
.map((si) => {
|
||||
const date = DateTime.fromJSDate(si.date as Date);
|
||||
const key = `${date.year}-${String(date.month).padStart(2, '0')}`;
|
||||
return { key, si };
|
||||
})
|
||||
.reduce((acc, item) => {
|
||||
acc[item.key] ??= [];
|
||||
acc[item.key].push(item.si);
|
||||
return acc;
|
||||
}, {} as Record<string, SalesInvoice[]>);
|
||||
|
||||
/**
|
||||
* Sort the YYYY-MM keys in ascending order.
|
||||
*/
|
||||
const dates = Object.keys(dateGrouped)
|
||||
.map((k) => ({ key: k, date: new Date(k) }))
|
||||
.sort((a, b) => +a.date - +b.date);
|
||||
const purchaseQty: Record<string, number> = {};
|
||||
|
||||
/**
|
||||
* For each date create a set of Purchase Invoices.
|
||||
*/
|
||||
for (const { key, date } of dates) {
|
||||
/**
|
||||
* Group items by name to get the total quantity used in a month.
|
||||
*/
|
||||
const itemGrouped = dateGrouped[key].reduce((acc, si) => {
|
||||
for (const item of si.items!) {
|
||||
if (item.item === 'Dry-Cleaning') {
|
||||
continue;
|
||||
}
|
||||
|
||||
acc[item.item as string] ??= 0;
|
||||
acc[item.item as string] += item.quantity as number;
|
||||
}
|
||||
|
||||
return acc;
|
||||
}, {} as Record<string, number>);
|
||||
|
||||
/**
|
||||
* Set order quantity for the first of the month.
|
||||
*/
|
||||
Object.keys(itemGrouped).forEach((name) => {
|
||||
const quantity = itemGrouped[name];
|
||||
purchaseQty[name] ??= 0;
|
||||
let prevQty = purchaseQty[name];
|
||||
|
||||
if (prevQty <= quantity) {
|
||||
prevQty = quantity - prevQty;
|
||||
}
|
||||
|
||||
purchaseQty[name] = Math.ceil(prevQty / 10) * 10;
|
||||
});
|
||||
|
||||
const supplierGrouped = Object.keys(itemGrouped).reduce((acc, item) => {
|
||||
const supplier = purchaseItemPartyMap[item];
|
||||
acc[supplier] ??= [];
|
||||
acc[supplier].push(item);
|
||||
|
||||
return acc;
|
||||
}, {} as Record<string, string[]>);
|
||||
|
||||
/**
|
||||
* For each supplier create a Purchase Invoice
|
||||
*/
|
||||
for (const supplier in supplierGrouped) {
|
||||
const doc = fyo.doc.getNewDoc(
|
||||
ModelNameEnum.PurchaseInvoice,
|
||||
{
|
||||
date,
|
||||
},
|
||||
false
|
||||
) as PurchaseInvoice;
|
||||
|
||||
await doc.set('party', supplier);
|
||||
if (!doc.account) {
|
||||
doc.account = 'Creditors';
|
||||
}
|
||||
|
||||
/**
|
||||
* For each item create a row
|
||||
*/
|
||||
for (const item of supplierGrouped[supplier]) {
|
||||
await doc.append('items', {});
|
||||
const quantity = purchaseQty[item];
|
||||
doc.items!.at(-1)!.set({ item, quantity });
|
||||
}
|
||||
|
||||
invoices.push(doc);
|
||||
}
|
||||
}
|
||||
|
||||
return invoices;
|
||||
}
|
||||
|
||||
async function getNonSalesPurchaseInvoices(
|
||||
fyo: Fyo,
|
||||
years: number
|
||||
): Promise<PurchaseInvoice[]> {
|
||||
const purchaseItems = items.filter((i) => i.for !== 'Sales');
|
||||
const itemMap = getMapFromList(purchaseItems, 'name');
|
||||
const periodic: Record<string, number> = {
|
||||
'Marketing - Video': 2,
|
||||
'Social Ads': 1,
|
||||
Electricity: 1,
|
||||
'Office Cleaning': 1,
|
||||
'Office Rent': 1,
|
||||
};
|
||||
const invoices: SalesInvoice[] = [];
|
||||
|
||||
for (const months of range(0, years * 12)) {
|
||||
/**
|
||||
* All purchases on the first of the month.
|
||||
*/
|
||||
const temp = DateTime.now().minus({ months });
|
||||
const date = DateTime.local(temp.year, temp.month, 1).toJSDate();
|
||||
|
||||
for (const name in periodic) {
|
||||
if (months % periodic[name] !== 0) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const doc = fyo.doc.getNewDoc(
|
||||
ModelNameEnum.PurchaseInvoice,
|
||||
{
|
||||
date,
|
||||
},
|
||||
false
|
||||
) as PurchaseInvoice;
|
||||
|
||||
const party = purchaseItemPartyMap[name];
|
||||
await doc.set('party', party);
|
||||
if (!doc.account) {
|
||||
doc.account = 'Creditors';
|
||||
}
|
||||
await doc.append('items', {});
|
||||
const row = doc.items!.at(-1)!;
|
||||
const item = itemMap[name];
|
||||
|
||||
let quantity = 1;
|
||||
let rate = item.rate;
|
||||
if (name === 'Social Ads') {
|
||||
quantity = Math.ceil(Math.random() * 200);
|
||||
} else if (name !== 'Office Rent') {
|
||||
rate = rate * (Math.random() * 0.4 + 0.8);
|
||||
}
|
||||
|
||||
await row.set({
|
||||
item: item.name,
|
||||
quantity,
|
||||
rate: fyo.pesa(rate).clip(0),
|
||||
});
|
||||
|
||||
invoices.push(doc);
|
||||
}
|
||||
}
|
||||
|
||||
return invoices;
|
||||
}
|
||||
|
||||
async function generateStaticEntries(fyo: Fyo) {
|
||||
await generateItems(fyo);
|
||||
await generateParties(fyo);
|
||||
}
|
||||
|
||||
async function generateItems(fyo: Fyo) {
|
||||
for (const item of items) {
|
||||
const doc = fyo.doc.getNewDoc('Item', item, false);
|
||||
await doc.sync();
|
||||
}
|
||||
}
|
||||
|
||||
async function generateParties(fyo: Fyo) {
|
||||
for (const party of parties) {
|
||||
const doc = fyo.doc.getNewDoc('Party', party, false);
|
||||
await doc.sync();
|
||||
}
|
||||
}
|
||||
|
||||
async function syncAndSubmit(docs: Doc[], notifier?: Notifier) {
|
||||
const nameMap: Record<string, string> = {
|
||||
[ModelNameEnum.PurchaseInvoice]: t`Invoices`,
|
||||
[ModelNameEnum.SalesInvoice]: t`Invoices`,
|
||||
[ModelNameEnum.Payment]: t`Payments`,
|
||||
[ModelNameEnum.JournalEntry]: t`Journal Entries`,
|
||||
};
|
||||
|
||||
const total = docs.length;
|
||||
for (const i in docs) {
|
||||
const doc = docs[i];
|
||||
notifier?.(
|
||||
`Syncing ${nameMap[doc.schemaName]}, ${i} out of ${total}`,
|
||||
parseInt(i) / total
|
||||
);
|
||||
await doc.sync();
|
||||
await doc.submit();
|
||||
}
|
||||
}
|
1
dummy/items.json
Normal file
1
dummy/items.json
Normal file
@ -0,0 +1 @@
|
||||
[{"name": "Dry-Cleaning", "description": null, "unit": "Unit", "itemType": "Service", "incomeAccount": "Service", "expenseAccount": "Cost of Goods Sold", "tax": "GST-18", "rate": 69, "hsnCode": 999712, "for": "Sales"}, {"name": "Electricity", "description": "Bzz Bzz", "unit": "Day", "itemType": "Service", "incomeAccount": "Service", "expenseAccount": "Utility Expenses", "tax": "GST-0", "rate": 6000, "hsnCode": 271600, "for": "Purchases"}, {"name": "Marketing - Video", "description": "One single video", "unit": "Unit", "itemType": "Product", "incomeAccount": "Sales", "expenseAccount": "Marketing Expenses", "tax": "GST-18", "rate": 15000, "hsnCode": 998371, "for": "Purchases"}, {"name": "Office Rent", "description": "Rent per day", "unit": "Day", "itemType": "Product", "incomeAccount": "Sales", "expenseAccount": "Office Rent", "tax": "GST-18", "rate": 50000, "hsnCode": 997212, "for": "Purchases"}, {"name": "Office Cleaning", "description": "Cleaning cost per day", "unit": "Day", "itemType": "Service", "incomeAccount": "Service", "expenseAccount": "Office Maintenance Expenses", "tax": "GST-18", "rate": 7500, "hsnCode": 998533, "for": "Purchases"}, {"name": "Social Ads", "description": "Cost per click", "unit": "Unit", "itemType": "Service", "incomeAccount": "Service", "expenseAccount": "Marketing Expenses", "tax": "GST-18", "rate": 50, "hsnCode": 99836, "for": "Purchases"}, {"name": "Cool Cloth", "description": "Some real \ud83c\udd92 cloth", "unit": "Meter", "itemType": "Product", "incomeAccount": "Sales", "expenseAccount": "Cost of Goods Sold", "tax": "GST-18", "rate": 4000, "hsnCode": 59111000, "for": "Both"}, {"name": "611 Jeans - PCH", "description": "Peach coloured 611s", "unit": "Unit", "itemType": "Product", "incomeAccount": "Sales", "expenseAccount": "Cost of Goods Sold", "tax": "GST-12", "rate": 4499, "hsnCode": 62034990, "for": "Both"}, {"name": "611 Jeans - SHR", "description": "Shark skin 611s", "unit": "Unit", "itemType": "Product", "incomeAccount": "Sales", "expenseAccount": "Cost of Goods Sold", "tax": "GST-12", "rate": 6499, "hsnCode": 62034990, "for": "Both"}, {"name": "Bominga Shoes", "description": null, "unit": "Unit", "itemType": "Product", "incomeAccount": "Sales", "expenseAccount": "Cost of Goods Sold", "tax": "GST-18", "rate": 4999, "hsnCode": 640291, "for": "Both"}, {"name": "Cryo Gloves", "description": "Keeps hands cool", "unit": "Unit", "itemType": "Product", "incomeAccount": "Sales", "expenseAccount": "Cost of Goods Sold", "tax": "GST-18", "rate": 3499, "hsnCode": 611693, "for": "Both"}, {"name": "Epaulettes - 4POR", "description": "Porcelain epaulettes", "unit": "Unit", "itemType": "Product", "incomeAccount": "Sales", "expenseAccount": "Cost of Goods Sold", "tax": "GST-18", "rate": 2499, "hsnCode": 62179090, "for": "Both"}, {"name": "Full Sleeve - BLK", "description": "Black sleeved", "unit": "Unit", "itemType": "Product", "incomeAccount": "Sales", "expenseAccount": "Cost of Goods Sold", "tax": "GST-12", "rate": 599, "hsnCode": 100820, "for": "Both"}, {"name": "Full Sleeve - COL", "description": "All color sleeved", "unit": "Unit", "itemType": "Product", "incomeAccount": "Sales", "expenseAccount": "Cost of Goods Sold", "tax": "GST-12", "rate": 499, "hsnCode": 100820, "for": "Both"}, {"name": "Jacket - RAW", "description": "Raw baby skinned jackets", "unit": "Unit", "itemType": "Product", "incomeAccount": "Sales", "expenseAccount": "Cost of Goods Sold", "tax": "GST-12", "rate": 8999, "hsnCode": 100820, "for": "Both"}, {"name": "Jade Slippers", "description": null, "unit": "Unit", "itemType": "Product", "incomeAccount": "Sales", "expenseAccount": "Cost of Goods Sold", "tax": "GST-18", "rate": 2999, "hsnCode": 640520, "for": "Both"}]
|
1
dummy/logo.ts
Normal file
1
dummy/logo.ts
Normal file
File diff suppressed because one or more lines are too long
1
dummy/parties.json
Normal file
1
dummy/parties.json
Normal file
File diff suppressed because one or more lines are too long
49
dummy/tests/testDummy.spec.ts
Normal file
49
dummy/tests/testDummy.spec.ts
Normal file
@ -0,0 +1,49 @@
|
||||
import * as assert from 'assert';
|
||||
import { DatabaseManager } from 'backend/database/manager';
|
||||
import { assertDoesNotThrow } from 'backend/database/tests/helpers';
|
||||
import { purchaseItemPartyMap } from 'dummy/helpers';
|
||||
import { Fyo } from 'fyo';
|
||||
import { DummyAuthDemux } from 'fyo/tests/helpers';
|
||||
import 'mocha';
|
||||
import { getTestDbPath } from 'tests/helpers';
|
||||
import { setupDummyInstance } from '..';
|
||||
|
||||
describe('dummy', function () {
|
||||
const dbPath = getTestDbPath();
|
||||
|
||||
let fyo: Fyo;
|
||||
|
||||
this.beforeAll(function () {
|
||||
fyo = new Fyo({
|
||||
DatabaseDemux: DatabaseManager,
|
||||
AuthDemux: DummyAuthDemux,
|
||||
isTest: true,
|
||||
isElectron: false,
|
||||
});
|
||||
});
|
||||
|
||||
this.afterAll(async function () {
|
||||
await fyo.close();
|
||||
});
|
||||
|
||||
specify('setupDummyInstance', async function () {
|
||||
await assertDoesNotThrow(async () => {
|
||||
await setupDummyInstance(dbPath, fyo, 1, 25);
|
||||
}, 'setup instance failed');
|
||||
|
||||
for (const item in purchaseItemPartyMap) {
|
||||
assert.strictEqual(
|
||||
await fyo.db.exists('Item', item),
|
||||
true,
|
||||
`not found ${item}`
|
||||
);
|
||||
|
||||
const party = purchaseItemPartyMap[item];
|
||||
assert.strictEqual(
|
||||
await fyo.db.exists('Party', party),
|
||||
true,
|
||||
`not found ${party}`
|
||||
);
|
||||
}
|
||||
}).timeout(120_000);
|
||||
});
|
@ -1,12 +1,11 @@
|
||||
productName: Frappe Books
|
||||
appId: io.frappe.books
|
||||
afterSign: build/notarize.js
|
||||
extraResources: [
|
||||
{
|
||||
from: 'log_creds.txt',
|
||||
to: '../creds/log_creds.txt',
|
||||
}
|
||||
]
|
||||
extraResources:
|
||||
[
|
||||
{ from: 'log_creds.txt', to: '../creds/log_creds.txt' },
|
||||
{ from: 'translations', to: '../translations' },
|
||||
]
|
||||
mac:
|
||||
type: distribution
|
||||
category: public.app-category.finance
|
||||
|
@ -61,16 +61,13 @@
|
||||
},
|
||||
"Angola":
|
||||
{
|
||||
"code": "ao",
|
||||
"currency": "KZ",
|
||||
"currency_fraction": "Cêntimo",
|
||||
"currency_fraction_units": 100,
|
||||
"currency_symbol": "AOA",
|
||||
"currency_name": "Kwanza",
|
||||
"timezones":
|
||||
[
|
||||
"Africa/Luanda"
|
||||
]
|
||||
"code": "ao",
|
||||
"currency": "AOA",
|
||||
"currency_fraction": "Cêntimo",
|
||||
"currency_fraction_units": 100,
|
||||
"currency_symbol": "Kz",
|
||||
"currency_name": "Kwanza",
|
||||
"timezones": ["Africa/Luanda"]
|
||||
},
|
||||
"Anguilla":
|
||||
{
|
||||
|
@ -1,172 +0,0 @@
|
||||
import { t } from 'frappe';
|
||||
|
||||
export default {
|
||||
[t`Application of Funds (Assets)`]: {
|
||||
[t`Current Assets`]: {
|
||||
[t`Accounts Receivable`]: {
|
||||
[t`Debtors`]: {
|
||||
accountType: 'Receivable',
|
||||
},
|
||||
},
|
||||
[t`Bank Accounts`]: {
|
||||
accountType: 'Bank',
|
||||
isGroup: 1,
|
||||
},
|
||||
[t`Cash In Hand`]: {
|
||||
[t`Cash`]: {
|
||||
accountType: 'Cash',
|
||||
},
|
||||
accountType: 'Cash',
|
||||
},
|
||||
[t`Loans and Advances (Assets)`]: {
|
||||
isGroup: 1,
|
||||
},
|
||||
[t`Securities and Deposits`]: {
|
||||
[t`Earnest Money`]: {},
|
||||
},
|
||||
[t`Stock Assets`]: {
|
||||
[t`Stock In Hand`]: {
|
||||
accountType: 'Stock',
|
||||
},
|
||||
accountType: 'Stock',
|
||||
},
|
||||
[t`Tax Assets`]: {
|
||||
isGroup: 1,
|
||||
},
|
||||
},
|
||||
[t`Fixed Assets`]: {
|
||||
[t`Capital Equipments`]: {
|
||||
accountType: 'Fixed Asset',
|
||||
},
|
||||
[t`Electronic Equipments`]: {
|
||||
accountType: 'Fixed Asset',
|
||||
},
|
||||
[t`Furnitures and Fixtures`]: {
|
||||
accountType: 'Fixed Asset',
|
||||
},
|
||||
[t`Office Equipments`]: {
|
||||
accountType: 'Fixed Asset',
|
||||
},
|
||||
[t`Plants and Machineries`]: {
|
||||
accountType: 'Fixed Asset',
|
||||
},
|
||||
[t`Buildings`]: {
|
||||
accountType: 'Fixed Asset',
|
||||
},
|
||||
[t`Softwares`]: {
|
||||
accountType: 'Fixed Asset',
|
||||
},
|
||||
[t`Accumulated Depreciation`]: {
|
||||
accountType: 'Accumulated Depreciation',
|
||||
},
|
||||
},
|
||||
[t`Investments`]: {
|
||||
isGroup: 1,
|
||||
},
|
||||
[t`Temporary Accounts`]: {
|
||||
[t`Temporary Opening`]: {
|
||||
accountType: 'Temporary',
|
||||
},
|
||||
},
|
||||
rootType: 'Asset',
|
||||
},
|
||||
[t`Expenses`]: {
|
||||
[t`Direct Expenses`]: {
|
||||
[t`Stock Expenses`]: {
|
||||
[t`Cost of Goods Sold`]: {
|
||||
accountType: 'Cost of Goods Sold',
|
||||
},
|
||||
[t`Expenses Included In Valuation`]: {
|
||||
accountType: 'Expenses Included In Valuation',
|
||||
},
|
||||
[t`Stock Adjustment`]: {
|
||||
accountType: 'Stock Adjustment',
|
||||
},
|
||||
},
|
||||
},
|
||||
[t`Indirect Expenses`]: {
|
||||
[t`Administrative Expenses`]: {},
|
||||
[t`Commission on Sales`]: {},
|
||||
[t`Depreciation`]: {
|
||||
accountType: 'Depreciation',
|
||||
},
|
||||
[t`Entertainment Expenses`]: {},
|
||||
[t`Freight and Forwarding Charges`]: {
|
||||
accountType: 'Chargeable',
|
||||
},
|
||||
[t`Legal Expenses`]: {},
|
||||
[t`Marketing Expenses`]: {
|
||||
accountType: 'Chargeable',
|
||||
},
|
||||
[t`Miscellaneous Expenses`]: {
|
||||
accountType: 'Chargeable',
|
||||
},
|
||||
[t`Office Maintenance Expenses`]: {},
|
||||
[t`Office Rent`]: {},
|
||||
[t`Postal Expenses`]: {},
|
||||
[t`Print and Stationery`]: {},
|
||||
[t`Round Off`]: {
|
||||
accountType: 'Round Off',
|
||||
},
|
||||
[t`Salary`]: {},
|
||||
[t`Sales Expenses`]: {},
|
||||
[t`Telephone Expenses`]: {},
|
||||
[t`Travel Expenses`]: {},
|
||||
[t`Utility Expenses`]: {},
|
||||
[t`Write Off`]: {},
|
||||
[t`Exchange Gain/Loss`]: {},
|
||||
[t`Gain/Loss on Asset Disposal`]: {},
|
||||
},
|
||||
rootType: 'Expense',
|
||||
},
|
||||
[t`Income`]: {
|
||||
[t`Direct Income`]: {
|
||||
[t`Sales`]: {},
|
||||
[t`Service`]: {},
|
||||
},
|
||||
[t`Indirect Income`]: {
|
||||
isGroup: 1,
|
||||
},
|
||||
rootType: 'Income',
|
||||
},
|
||||
[t`Source of Funds (Liabilities)`]: {
|
||||
[t`Current Liabilities`]: {
|
||||
[t`Accounts Payable`]: {
|
||||
[t`Creditors`]: {
|
||||
accountType: 'Payable',
|
||||
},
|
||||
[t`Payroll Payable`]: {},
|
||||
},
|
||||
[t`Stock Liabilities`]: {
|
||||
[t`Stock Received But Not Billed`]: {
|
||||
accountType: 'Stock Received But Not Billed',
|
||||
},
|
||||
},
|
||||
[t`Duties and Taxes`]: {
|
||||
accountType: 'Tax',
|
||||
isGroup: 1,
|
||||
},
|
||||
[t`Loans (Liabilities)`]: {
|
||||
[t`Secured Loans`]: {},
|
||||
[t`Unsecured Loans`]: {},
|
||||
[t`Bank Overdraft Account`]: {},
|
||||
},
|
||||
},
|
||||
rootType: 'Liability',
|
||||
},
|
||||
[t`Equity`]: {
|
||||
[t`Capital Stock`]: {
|
||||
accountType: 'Equity',
|
||||
},
|
||||
[t`Dividends Paid`]: {
|
||||
accountType: 'Equity',
|
||||
},
|
||||
[t`Opening Balance Equity`]: {
|
||||
accountType: 'Equity',
|
||||
},
|
||||
[t`Retained Earnings`]: {
|
||||
accountType: 'Equity',
|
||||
},
|
||||
rootType: 'Equity',
|
||||
},
|
||||
};
|
@ -386,7 +386,7 @@
|
||||
},
|
||||
"Duties and Taxes": {
|
||||
"accountType": "Tax",
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
},
|
||||
"Reservations & Credit Notes": {
|
||||
"Credit Notes": {
|
||||
|
@ -11,7 +11,7 @@
|
||||
},
|
||||
"Banque": {
|
||||
"accountType": "Bank",
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
},
|
||||
"Comptes \u00e0 recevoir": {
|
||||
"Comptes clients": {
|
||||
@ -20,7 +20,7 @@
|
||||
"Provision pour cr\u00e9ances douteuses": {}
|
||||
},
|
||||
"Encaisse": {
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
},
|
||||
"Frais pay\u00e9s d\u2019avance": {
|
||||
"Assurances pay\u00e9s d'avance": {},
|
||||
@ -29,7 +29,7 @@
|
||||
},
|
||||
"Petite caisse": {
|
||||
"accountType": "Cash",
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
},
|
||||
"Stocks": {
|
||||
"Mati\u00e8res premi\u00e8res": {},
|
||||
@ -175,7 +175,7 @@
|
||||
"Loyer - b\u00e2timent": {},
|
||||
"Loyer - entrep\u00f4t": {},
|
||||
"Op\u00e9rations indirectes de la main-d\u2019\u0153uvre directe": {
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
},
|
||||
"R\u00e9parations et entretien - b\u00e2timent": {},
|
||||
"R\u00e9parations et entretien - machinerie et \u00e9quipement": {},
|
||||
|
@ -9,12 +9,12 @@
|
||||
"Animales": {
|
||||
"accountNumber": "1.5.2.1",
|
||||
"accountType": "Stock",
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
},
|
||||
"Plantas": {
|
||||
"accountNumber": "1.5.2.2",
|
||||
"accountType": "Stock",
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
},
|
||||
"accountNumber": "1.5.2",
|
||||
"accountType": "Stock"
|
||||
@ -22,7 +22,7 @@
|
||||
"Activos Biol\u00f3gicos al Costo": {
|
||||
"accountNumber": "1.5.1",
|
||||
"accountType": "Stock",
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
},
|
||||
"accountNumber": "1.5",
|
||||
"accountType": "Stock"
|
||||
@ -32,7 +32,7 @@
|
||||
"Cr\u00e9dito Fiscal (IVA Por Cobrar)": {
|
||||
"accountNumber": "1.1.2.1",
|
||||
"accountType": "Chargeable",
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
},
|
||||
"accountNumber": "1.1.2",
|
||||
"accountType": "Chargeable"
|
||||
@ -47,22 +47,22 @@
|
||||
"Activos Adicionales y Otros": {
|
||||
"accountNumber": "1.6.6",
|
||||
"accountType": "Chargeable",
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
},
|
||||
"Cobrables Relacionados con Impuestos": {
|
||||
"accountNumber": "1.6.2",
|
||||
"accountType": "Chargeable",
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
},
|
||||
"Contratos de Construccion": {
|
||||
"accountNumber": "1.6.4",
|
||||
"accountType": "Chargeable",
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
},
|
||||
"Costos de Montaje": {
|
||||
"accountNumber": "1.6.5",
|
||||
"accountType": "Chargeable",
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
},
|
||||
"Pagos Anticipados y Otros Activos Circulantes": {
|
||||
"Seguro Pagado Anticipadamente": {
|
||||
@ -75,7 +75,7 @@
|
||||
"Proveedores de Servicio": {
|
||||
"accountNumber": "1.6.3",
|
||||
"accountType": "Chargeable",
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
},
|
||||
"accountNumber": "1.6",
|
||||
"accountType": "Chargeable"
|
||||
@ -84,32 +84,32 @@
|
||||
"Activos Financieros Clasificados por Designaci\u00f3n": {
|
||||
"accountNumber": "1.4.6",
|
||||
"accountType": "Chargeable",
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
},
|
||||
"Activos Financieros Derivados": {
|
||||
"accountNumber": "1.4.3",
|
||||
"accountType": "Chargeable",
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
},
|
||||
"Inversion o Participaci\u00f3n Accionaria en Empresas Afiliadas": {
|
||||
"accountNumber": "1.4.1",
|
||||
"accountType": "Chargeable",
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
},
|
||||
"Inversiones Burs\u00e1tiles e Instrumentos Financieros": {
|
||||
"accountNumber": "1.4.2",
|
||||
"accountType": "Chargeable",
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
},
|
||||
"Otros Activos Financieros": {
|
||||
"accountNumber": "1.4.4",
|
||||
"accountType": "Chargeable",
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
},
|
||||
"Provisi\u00f3n por Riesgo de Cr\u00e9dito (agregado) (Contra-activo)": {
|
||||
"accountNumber": "1.4.5",
|
||||
"accountType": "Round Off",
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
},
|
||||
"accountNumber": "1.4",
|
||||
"accountType": "Chargeable"
|
||||
@ -117,13 +117,13 @@
|
||||
"Activos Intangibles": {
|
||||
"accountNumber": "1.3",
|
||||
"accountType": "Chargeable",
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
},
|
||||
"Caja y Equivalentes": {
|
||||
"Caja": {
|
||||
"accountNumber": "1.9.1",
|
||||
"accountType": "Cash",
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
},
|
||||
"Equivalentes de Efectivo (Bancos)": {
|
||||
"Bancos Internacionales": {
|
||||
@ -146,7 +146,7 @@
|
||||
"Banco Industrial": {
|
||||
"accountNumber": "1.9.2.1.1",
|
||||
"accountType": "Bank",
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
},
|
||||
"Banco Internacional": {
|
||||
"accountNumber": "1.9.2.1.6",
|
||||
@ -189,12 +189,12 @@
|
||||
"Inversiones a Corto Plazo": {
|
||||
"accountNumber": "1.9.3",
|
||||
"accountType": "Bank",
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
},
|
||||
"Otros Equivalentes de Caja y Bancos": {
|
||||
"accountNumber": "1.9.4",
|
||||
"accountType": "Cash",
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
},
|
||||
"accountNumber": "1.9",
|
||||
"accountType": "Bank"
|
||||
@ -203,12 +203,12 @@
|
||||
"Activos bajo Contrato": {
|
||||
"accountNumber": "1.8.2",
|
||||
"accountType": "Receivable",
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
},
|
||||
"Ajustes": {
|
||||
"accountNumber": "1.8.4",
|
||||
"accountType": "Chargeable",
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
},
|
||||
"Otras Cuentas por Cobrar": {
|
||||
"Cuentas Por Cobrar Compa\u00f1\u00edas Afiliadas": {
|
||||
@ -241,7 +241,7 @@
|
||||
"Ventas al Cr\u00e9dito": {
|
||||
"accountNumber": "1.8.1",
|
||||
"accountType": "Receivable",
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
},
|
||||
"accountNumber": "1.8",
|
||||
"accountType": "Receivable"
|
||||
@ -253,38 +253,38 @@
|
||||
"Art\u00edculos de Inventario Adicionales": {
|
||||
"accountNumber": "1.7.8",
|
||||
"accountType": "Stock",
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
},
|
||||
"Combustibles": {
|
||||
"accountNumber": "1.7.5",
|
||||
"accountType": "Stock",
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
},
|
||||
"Inventarios Pignorados Como Garant\u00eda de Pasivo": {
|
||||
"accountNumber": "1.7.10",
|
||||
"accountType": "Stock",
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
},
|
||||
"Inventarios a Valor Razonable Menos Costos de Venta": {
|
||||
"accountNumber": "1.7.11",
|
||||
"accountType": "Stock",
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
},
|
||||
"Materia Prima": {
|
||||
"accountNumber": "1.7.1",
|
||||
"accountType": "Stock",
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
},
|
||||
"Mercader\u00eda (Mercanc\u00edas)": {
|
||||
"accountNumber": "1.7.2",
|
||||
"accountType": "Stock",
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
},
|
||||
"Otros Inventarios": {
|
||||
"Merma o Ajuste de Inventario": {
|
||||
"accountNumber": "1.7.9.1",
|
||||
"accountType": "Stock Adjustment",
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
},
|
||||
"accountNumber": "1.7.9",
|
||||
"accountType": "Stock"
|
||||
@ -292,13 +292,13 @@
|
||||
"Producto Terminado": {
|
||||
"accountNumber": "1.7.7",
|
||||
"accountType": "Stock",
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
},
|
||||
"Repuestos": {
|
||||
"Respuestos en Transito": {
|
||||
"accountNumber": "1.7.4.0",
|
||||
"accountType": "Stock",
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
},
|
||||
"accountNumber": "1.7.4",
|
||||
"accountType": "Stock"
|
||||
@ -306,12 +306,12 @@
|
||||
"Suministros de Producci\u00f3n y Consumibles": {
|
||||
"accountNumber": "1.7.3",
|
||||
"accountType": "Stock",
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
},
|
||||
"Trabajo en Progeso": {
|
||||
"accountNumber": "1.7.6",
|
||||
"accountType": "Stock",
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
},
|
||||
"accountNumber": "1.7",
|
||||
"accountType": "Stock"
|
||||
@ -324,7 +324,7 @@
|
||||
"Inversion Inmobiliaria Construida": {
|
||||
"accountNumber": "1.2.2",
|
||||
"accountType": "Chargeable",
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
},
|
||||
"accountNumber": "1.2",
|
||||
"accountType": "Chargeable"
|
||||
|
@ -29,16 +29,16 @@
|
||||
},
|
||||
"123. \u00c9p\u00fcletek, \u00e9p\u00fcletr\u00e9szek, tulajdoni h\u00e1nyadok ": {
|
||||
"accountType": "Fixed Asset",
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
},
|
||||
"124. Egy\u00e9b ingatlanok": {
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
},
|
||||
"125. \u00dczemk\u00f6r\u00f6n k\u00edv\u00fcli ingatlanok, \u00e9p\u00fcletek ": {
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
},
|
||||
"126. Ingatlanokhoz kapcsol\u00f3d\u00f3 vagyoni \u00e9rt\u00e9k\u0171 jogok": {
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
},
|
||||
"127. Ingatlanok \u00e9rt\u00e9khelyesb\u00edt\u00e9se": {},
|
||||
"129. Kis \u00e9rt\u00e9k\u0171 ingatlanok": {}
|
||||
@ -148,7 +148,7 @@
|
||||
"239. Befejezetlen termel\u00e9s \u00e9s f\u00e9lk\u00e9sz term\u00e9kek \u00e9rt\u00e9kveszt\u00e9se \u00e9s annak vissza\u00edr\u00e1sa": {}
|
||||
},
|
||||
"24. N\u00d6VEND\u00c9K-, H\u00cdZ\u00d3- \u00c9S EGY\u00c9B \u00c1LLATOK": {
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
},
|
||||
"25. K\u00c9SZTERM\u00c9KEK": {
|
||||
"251-257. K\u00e9szterm\u00e9kek": {},
|
||||
@ -158,23 +158,23 @@
|
||||
"26-28. \u00c1RUK ": {
|
||||
"261. Kereskedelmi \u00e1ruk": {
|
||||
"accountType": "Stock",
|
||||
"isGroup": 0
|
||||
"isGroup": false
|
||||
},
|
||||
"262. Idegen helyen t\u00e1rolt, bizom\u00e1nyba \u00e1tadott \u00e1ruk": {
|
||||
"accountType": "Stock",
|
||||
"isGroup": 0
|
||||
"isGroup": false
|
||||
},
|
||||
"263. T\u00e1rgyi eszk\u00f6z\u00f6k k\u00f6z\u00fcl \u00e1tsorolt \u00e1ruk": {
|
||||
"accountType": "Stock",
|
||||
"isGroup": 0
|
||||
"isGroup": false
|
||||
},
|
||||
"264. Bels\u0151 (egys\u00e9gek, tev\u00e9kenys\u00e9gek k\u00f6z\u00f6tti) \u00e1tad\u00e1s-\u00e1tv\u00e9tel \u00fctk\u00f6z\u0151sz\u00e1mla": {
|
||||
"accountType": "Stock",
|
||||
"isGroup": 0
|
||||
"isGroup": false
|
||||
},
|
||||
"269. Kereskedelmi \u00e1ruk \u00e9rt\u00e9kveszt\u00e9se \u00e9s annak vissza\u00edr\u00e1sa": {
|
||||
"accountType": "Stock",
|
||||
"isGroup": 0
|
||||
"isGroup": false
|
||||
},
|
||||
"accountType": "Stock"
|
||||
},
|
||||
@ -183,7 +183,7 @@
|
||||
"279. K\u00f6zvet\u00edtett szolg\u00e1ltat\u00e1sok \u00e9rt\u00e9kveszt\u00e9se \u00e9s annak vissza\u00edr\u00e1sa": {}
|
||||
},
|
||||
"28. BET\u00c9TD\u00cdJAS G\u00d6NGY\u00d6LEGEK": {
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
},
|
||||
"rootType": "Asset"
|
||||
},
|
||||
@ -205,13 +205,13 @@
|
||||
"319. K\u00fclf\u00f6ldi k\u00f6vetel\u00e9sek \u00e9rt\u00e9kveszt\u00e9se \u00e9s annak vissza\u00edr\u00e1sa": {}
|
||||
},
|
||||
"32. K\u00d6VETEL\u00c9SEK KAPCSOLT V\u00c1LLALKOZ\u00c1SSAL SZEMBEN": {
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
},
|
||||
"33. K\u00d6VETEL\u00c9SEK EGY\u00c9B R\u00c9SZESED\u00c9SI VISZONYBAN L\u00c9V\u00d5 V\u00c1LLALKOZ\u00c1SSAL SZEMBEN ": {
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
},
|
||||
"34. V\u00c1LT\u00d3K\u00d6VETEL\u00c9SEK": {
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
},
|
||||
"35. ADOTT EL\u00d5LEGEK": {
|
||||
"351. Immateri\u00e1lis javakra adott el\u0151legek": {},
|
||||
@ -227,17 +227,17 @@
|
||||
"3613. Egy\u00e9b elsz\u00e1mol\u00e1sok a munkav\u00e1llal\u00f3kkal": {}
|
||||
},
|
||||
"362. K\u00f6lts\u00e9gvet\u00e9ssel szembeni k\u00f6vetel\u00e9sek": {
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
},
|
||||
"363. R\u00f6vid lej\u00e1ratra k\u00f6lcs\u00f6nadott p\u00e9nzeszk\u00f6z\u00f6k": {
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
},
|
||||
"364. R\u00e9szesed\u00e9sekkel, \u00e9rt\u00e9kpap\u00edrokkal kapcsolatos k\u00f6vetel\u00e9sek": {
|
||||
"3641. R\u00f6vid lej\u00e1rat\u00fa k\u00f6lcs\u00f6n\u00f6k": {},
|
||||
"3642. Tart\u00f3san adott k\u00f6lcs\u00f6n\u00f6kb\u0151l \u00e1tsorolt k\u00f6vetel\u00e9sek": {}
|
||||
},
|
||||
"365. V\u00e1s\u00e1rolt \u00e9s kapott k\u00f6vetel\u00e9sek ": {
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
},
|
||||
"366. R\u00e9szesed\u00e9sekkel, \u00e9rt\u00e9kpap\u00edrokkal kapcsolatos k\u00f6vetel\u00e9sek": {},
|
||||
"367. Hat\u00e1rid\u0151s, opci\u00f3s \u00e9s swap \u00fcgyletekkel kapcsolatos k\u00f6vetel\u00e9sek": {},
|
||||
@ -285,7 +285,7 @@
|
||||
"383. Csekkek": {},
|
||||
"384. Elsz\u00e1mol\u00e1si bet\u00e9tsz\u00e1mla ": {
|
||||
"accountType": "Bank",
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
},
|
||||
"385. Elk\u00fcl\u00f6n\u00edtett bet\u00e9tsz\u00e1ml\u00e1k ": {
|
||||
"3851. Kamatoz\u00f3 bet\u00e9tsz\u00e1ml\u00e1k": {},
|
||||
@ -373,7 +373,7 @@
|
||||
"4452. Egy\u00e9b hossz\u00fa lej\u00e1rat\u00fa hitelek deviz\u00e1ban": {}
|
||||
},
|
||||
"446. Tart\u00f3s k\u00f6telezetts\u00e9gek kapcsolt v\u00e1llalkoz\u00e1ssal szemben ": {
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
},
|
||||
"447. Tart\u00f3s k\u00f6telezetts\u00e9gek egy\u00e9b r\u00e9szesed\u00e9si viszonyban l\u00e9v\u0151 v\u00e1llalkoz\u00e1ssal szemben": {},
|
||||
"448. P\u00e9nz\u00fcgyi l\u00edzing miatti k\u00f6telezetts\u00e9gek ": {},
|
||||
@ -449,7 +449,7 @@
|
||||
"463-9. C\u00e9gaut\u00f3ad\u00f3 ": {}
|
||||
},
|
||||
"464. G\u00e9pj\u00e1rm\u0171 ad\u00f3 (c\u00e9gaut\u00f3ad\u00f3) elsz\u00e1mol\u00e1sa": {
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
},
|
||||
"465. V\u00e1m- \u00e9s p\u00e9nz\u00fcgy\u0151rs\u00e9g elsz\u00e1mol\u00e1si sz\u00e1mla ": {
|
||||
"4651. V\u00e1mk\u00f6lts\u00e9gek \u00e9s egy\u00e9b v\u00e1mterhek elsz\u00e1mol\u00e1si sz\u00e1mla": {},
|
||||
@ -619,7 +619,7 @@
|
||||
"589. Aktiv\u00e1lt saj\u00e1t teljes\u00edtm\u00e9nyek \u00e1tvezet\u00e9si sz\u00e1mla": {}
|
||||
},
|
||||
"59. K\u00d6LTS\u00c9GNEM \u00c1TVEZET\u00c9SI SZ\u00c1MLA": {
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
},
|
||||
"rootType": "Expense"
|
||||
},
|
||||
@ -647,7 +647,7 @@
|
||||
"rootType": "Expense"
|
||||
},
|
||||
"7. SZ\u00c1MLAOSZT\u00c1LY TEV\u00c9KENYS\u00c9GEKK\u00d6LTS\u00c9GEI": {
|
||||
"isGroup": 1,
|
||||
"isGroup": true,
|
||||
"rootType": "Expense"
|
||||
},
|
||||
"8. SZ\u00c1MLAOSZT\u00c1LY \u00c9RT\u00c9KES\u00cdT\u00c9S ELSZ\u00c1MOLT \u00d6NK\u00d6LTS\u00c9GE \u00c9S R\u00c1FORD\u00cdT\u00c1SOK": {
|
||||
@ -812,7 +812,7 @@
|
||||
"9684. R\u00e9szesed\u00e9sek \u00e9rt\u00e9kveszt\u00e9s\u00e9nek vissza\u00edr\u00e1sa": {}
|
||||
},
|
||||
"969. K\u00fcl\u00f6nf\u00e9le egy\u00e9b bev\u00e9telek": {
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
}
|
||||
},
|
||||
"97. P\u00c9NZ\u00dcGYI M\u0170VELETEK BEV\u00c9TELEI": {
|
||||
|
@ -14,11 +14,11 @@
|
||||
"Bank ": {
|
||||
"Bank Other Currency": {
|
||||
"accountNumber": "1122.000",
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
},
|
||||
"Bank Rupiah": {
|
||||
"accountNumber": "1121.000",
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
},
|
||||
"accountNumber": "1120.000",
|
||||
"accountType": "Bank"
|
||||
@ -70,7 +70,7 @@
|
||||
"Persediaan Barang": {
|
||||
"accountNumber": "1141.000",
|
||||
"accountType": "Stock",
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
},
|
||||
"Uang Muka Pembelian": {
|
||||
"Uang Muka Pembelian": {
|
||||
@ -122,7 +122,7 @@
|
||||
"Investasi": {
|
||||
"Deposito": {
|
||||
"accountNumber": "1231.003",
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
},
|
||||
"Investai Saham": {
|
||||
"Investasi Saham": {
|
||||
|
@ -6,13 +6,13 @@
|
||||
"Current Assets": {
|
||||
"Accounts Receivable": {
|
||||
"Debtors": {
|
||||
"isGroup": 0,
|
||||
"isGroup": false,
|
||||
"accountType": "Receivable"
|
||||
}
|
||||
},
|
||||
"Bank Accounts": {
|
||||
"accountType": "Bank",
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
},
|
||||
"Cash In Hand": {
|
||||
"Cash": {
|
||||
@ -21,7 +21,7 @@
|
||||
"accountType": "Cash"
|
||||
},
|
||||
"Loans and Advances (Assets)": {
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
},
|
||||
"Securities and Deposits": {
|
||||
"Earnest Money": {}
|
||||
@ -33,7 +33,7 @@
|
||||
"accountType": "Stock"
|
||||
},
|
||||
"Tax Assets": {
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
}
|
||||
},
|
||||
"Fixed Assets": {
|
||||
@ -60,7 +60,7 @@
|
||||
}
|
||||
},
|
||||
"Investments": {
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
},
|
||||
"Temporary Accounts": {
|
||||
"Temporary Opening": {
|
||||
@ -126,7 +126,7 @@
|
||||
},
|
||||
"Indirect Income": {
|
||||
"accountType": "Income Account",
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
},
|
||||
"rootType": "Income"
|
||||
},
|
||||
|
@ -110,7 +110,7 @@
|
||||
},
|
||||
"INVENTARIOS": {
|
||||
"accountType": "Stock",
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
}
|
||||
},
|
||||
"ACTIVO LARGO PLAZO": {
|
||||
|
@ -57,7 +57,7 @@
|
||||
},
|
||||
"Otros Equivalentes a Efectivo": {
|
||||
"accountType": "Cash",
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
}
|
||||
},
|
||||
"Impuestos Acreditables": {
|
||||
@ -91,41 +91,41 @@
|
||||
},
|
||||
"Todos los Almacenes": {
|
||||
"accountType": "Stock",
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
},
|
||||
"accountType": "Stock"
|
||||
},
|
||||
"Otras Cuentas por Cobrar": {
|
||||
"accountType": "Receivable",
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
}
|
||||
},
|
||||
"Activo no Corriente": {
|
||||
"Activo por Impuestos Diferidos": {
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
},
|
||||
"Activos Intangibles": {
|
||||
"Amortizacion de Activos Intangibles": {
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
},
|
||||
"Concesiones": {
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
},
|
||||
"Derechos de Autor": {
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
},
|
||||
"Deterioro de Valor de Activos Intangibles": {},
|
||||
"Gastos de investigacion": {
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
},
|
||||
"Licencias": {
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
},
|
||||
"Marcas Registradas": {
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
},
|
||||
"Patentes": {
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
}
|
||||
},
|
||||
"Amortizables": {
|
||||
@ -138,7 +138,7 @@
|
||||
"accountType": "Expenses Included In Valuation"
|
||||
},
|
||||
"Mejoras en Bienes Arrendados": {
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
}
|
||||
},
|
||||
"Bienes en Arrendamiento Financiero": {
|
||||
@ -147,28 +147,28 @@
|
||||
},
|
||||
"Cuentas por Cobrar a Largo Plazo": {
|
||||
"Creditos a Largo Plazo": {
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
}
|
||||
},
|
||||
"Inversiones Permanentes": {
|
||||
"Inversiones Permanentes 1": {
|
||||
"accountType": "Fixed Asset",
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
},
|
||||
"Negocios Conjuntos": {
|
||||
"accountType": "Fixed Asset",
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
}
|
||||
},
|
||||
"Inversiones a Largo Plazo": {
|
||||
"Depositos Bancarios a Plazo": {
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
},
|
||||
"Intereses percibidos por adelantado": {
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
},
|
||||
"Titulos y Acciones": {
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
}
|
||||
},
|
||||
"Propiedad Planta y Equipo": {
|
||||
@ -203,7 +203,7 @@
|
||||
}
|
||||
},
|
||||
"Donaciones": {
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
},
|
||||
"Ganancias Acumuladas": {
|
||||
"Reservas": {
|
||||
@ -319,7 +319,7 @@
|
||||
},
|
||||
"Pasivo": {
|
||||
"Obligaciones por Arrendamiento Financiero a Largo Plazo": {
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
},
|
||||
"Pasivo Corriente": {
|
||||
"Anticipos de Clientes": {},
|
||||
@ -345,11 +345,11 @@
|
||||
},
|
||||
"Gastos por Pagar": {
|
||||
"Prestaciones Sociales": {
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
},
|
||||
"Salarios por Pagar": {},
|
||||
"Servicios Basicos 1": {
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
}
|
||||
},
|
||||
"Impuestos por Pagar": {
|
||||
@ -372,17 +372,17 @@
|
||||
}
|
||||
},
|
||||
"Otras Cuentas por Pagar": {
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
},
|
||||
"Pasivos Financieros a Corto Plazo": {
|
||||
"Otras Deudas Bancarias": {
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
},
|
||||
"Prestamos por Pagar a Corto Plazo": {
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
},
|
||||
"Sobregiros Bancarios": {
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
}
|
||||
},
|
||||
"Provisiones por Pagar": {
|
||||
@ -473,20 +473,20 @@
|
||||
},
|
||||
"Pasivo No Corriente": {
|
||||
"Cuentas por Pagar a Largo Plaso": {
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
},
|
||||
"Otras Cuentas por Pagar a Largo Plazo": {
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
},
|
||||
"Otros Pasivos Financieros a Largo Plaso": {
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
},
|
||||
"Prestamos a Largo Plazo": {
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
}
|
||||
},
|
||||
"Pasivo por Impuestos Diferidos": {
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
},
|
||||
"rootType": "Liability"
|
||||
}
|
||||
|
@ -3,7 +3,7 @@
|
||||
"name": "Netherlands - Grootboekschema",
|
||||
"tree": {
|
||||
"FABRIKAGEREKENINGEN": {
|
||||
"isGroup": 1,
|
||||
"isGroup": true,
|
||||
"rootType": "Expense"
|
||||
},
|
||||
"FINANCIELE REKENINGEN, KORTLOPENDE VORDERINGEN EN SCHULDEN": {
|
||||
@ -106,7 +106,7 @@
|
||||
"rootType": "Asset"
|
||||
},
|
||||
"INDIRECTE KOSTEN": {
|
||||
"isGroup": 1,
|
||||
"isGroup": true,
|
||||
"rootType": "Expense"
|
||||
},
|
||||
"KOSTENREKENINGEN": {
|
||||
|
@ -7,8 +7,8 @@
|
||||
}
|
||||
},
|
||||
"Bank Accounts": {
|
||||
"accountType": "Bank",
|
||||
"isGroup": 1
|
||||
"accountType": "Bank",
|
||||
"isGroup": true
|
||||
},
|
||||
"Cash In Hand": {
|
||||
"Cash": {
|
||||
@ -17,7 +17,7 @@
|
||||
"accountType": "Cash"
|
||||
},
|
||||
"Loans and Advances (Assets)": {
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
},
|
||||
"Securities and Deposits": {
|
||||
"Earnest Money": {}
|
||||
@ -29,7 +29,7 @@
|
||||
"accountType": "Stock"
|
||||
},
|
||||
"Tax Assets": {
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
}
|
||||
},
|
||||
"Fixed Assets": {
|
||||
@ -59,7 +59,7 @@
|
||||
}
|
||||
},
|
||||
"Investments": {
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
},
|
||||
"Temporary Accounts": {
|
||||
"Temporary Opening": {
|
||||
@ -123,7 +123,7 @@
|
||||
"Service": {}
|
||||
},
|
||||
"Indirect Income": {
|
||||
"isGroup": 1
|
||||
"isGroup": true
|
||||
},
|
||||
"rootType": "Income"
|
||||
},
|
||||
@ -141,8 +141,8 @@
|
||||
}
|
||||
},
|
||||
"Duties and Taxes": {
|
||||
"accountType": "Tax",
|
||||
"isGroup": 1
|
||||
"accountType": "Tax",
|
||||
"isGroup": true
|
||||
},
|
||||
"Loans (Liabilities)": {
|
||||
"Secured Loans": {},
|
||||
|
@ -1,830 +0,0 @@
|
||||
import frappe from 'frappe';
|
||||
import Observable from 'frappe/utils/observable';
|
||||
import Knex from 'knex';
|
||||
import CacheManager from '../utils/cacheManager';
|
||||
|
||||
export default class Database extends Observable {
|
||||
constructor() {
|
||||
super();
|
||||
this.initTypeMap();
|
||||
this.connectionParams = {};
|
||||
this.cache = new CacheManager();
|
||||
}
|
||||
|
||||
connect() {
|
||||
this.knex = Knex(this.connectionParams);
|
||||
this.knex.on('query-error', (error) => {
|
||||
error.type = this.getError(error);
|
||||
});
|
||||
this.executePostDbConnect();
|
||||
}
|
||||
|
||||
close() {
|
||||
//
|
||||
}
|
||||
|
||||
async migrate() {
|
||||
for (let doctype in frappe.models) {
|
||||
// check if controller module
|
||||
let meta = frappe.getMeta(doctype);
|
||||
let baseDoctype = meta.getBaseDocType();
|
||||
if (!meta.isSingle) {
|
||||
if (await this.tableExists(baseDoctype)) {
|
||||
await this.alterTable(baseDoctype);
|
||||
} else {
|
||||
await this.createTable(baseDoctype);
|
||||
}
|
||||
}
|
||||
}
|
||||
await this.commit();
|
||||
await this.initializeSingles();
|
||||
}
|
||||
|
||||
async initializeSingles() {
|
||||
let singleDoctypes = frappe
|
||||
.getModels((model) => model.isSingle)
|
||||
.map((model) => model.name);
|
||||
|
||||
for (let doctype of singleDoctypes) {
|
||||
if (await this.singleExists(doctype)) {
|
||||
const singleValues = await this.getSingleFieldsToInsert(doctype);
|
||||
singleValues.forEach(({ fieldname, value }) => {
|
||||
let singleValue = frappe.newDoc({
|
||||
doctype: 'SingleValue',
|
||||
parent: doctype,
|
||||
fieldname,
|
||||
value,
|
||||
});
|
||||
singleValue.insert();
|
||||
});
|
||||
continue;
|
||||
}
|
||||
let meta = frappe.getMeta(doctype);
|
||||
if (meta.fields.every((df) => df.default == null)) {
|
||||
continue;
|
||||
}
|
||||
let defaultValues = meta.fields.reduce((doc, df) => {
|
||||
if (df.default != null) {
|
||||
doc[df.fieldname] = df.default;
|
||||
}
|
||||
return doc;
|
||||
}, {});
|
||||
await this.updateSingle(doctype, defaultValues);
|
||||
}
|
||||
}
|
||||
|
||||
async singleExists(doctype) {
|
||||
let res = await this.knex('SingleValue')
|
||||
.count('parent as count')
|
||||
.where('parent', doctype)
|
||||
.first();
|
||||
return res.count > 0;
|
||||
}
|
||||
|
||||
async getSingleFieldsToInsert(doctype) {
|
||||
const existingFields = (
|
||||
await frappe.db
|
||||
.knex('SingleValue')
|
||||
.where({ parent: doctype })
|
||||
.select('fieldname')
|
||||
).map(({ fieldname }) => fieldname);
|
||||
|
||||
return frappe
|
||||
.getMeta(doctype)
|
||||
.fields.map(({ fieldname, default: value }) => ({
|
||||
fieldname,
|
||||
value,
|
||||
}))
|
||||
.filter(
|
||||
({ fieldname, value }) =>
|
||||
!existingFields.includes(fieldname) && value !== undefined
|
||||
);
|
||||
}
|
||||
|
||||
tableExists(table) {
|
||||
return this.knex.schema.hasTable(table);
|
||||
}
|
||||
|
||||
async createTable(doctype, tableName = null) {
|
||||
let fields = this.getValidFields(doctype);
|
||||
return await this.runCreateTableQuery(tableName || doctype, fields);
|
||||
}
|
||||
|
||||
runCreateTableQuery(doctype, fields) {
|
||||
return this.knex.schema.createTable(doctype, (table) => {
|
||||
for (let field of fields) {
|
||||
this.buildColumnForTable(table, field);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
async alterTable(doctype) {
|
||||
// get columns
|
||||
let diff = await this.getColumnDiff(doctype);
|
||||
let newForeignKeys = await this.getNewForeignKeys(doctype);
|
||||
|
||||
return this.knex.schema
|
||||
.table(doctype, (table) => {
|
||||
if (diff.added.length) {
|
||||
for (let field of diff.added) {
|
||||
this.buildColumnForTable(table, field);
|
||||
}
|
||||
}
|
||||
|
||||
if (diff.removed.length) {
|
||||
this.removeColumns(doctype, diff.removed);
|
||||
}
|
||||
})
|
||||
.then(() => {
|
||||
if (newForeignKeys.length) {
|
||||
return this.addForeignKeys(doctype, newForeignKeys);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
buildColumnForTable(table, field) {
|
||||
let columnType = this.getColumnType(field);
|
||||
if (!columnType) {
|
||||
// In case columnType is "Table"
|
||||
// childTable links are handled using the childTable's "parent" field
|
||||
return;
|
||||
}
|
||||
|
||||
let column = table[columnType](field.fieldname);
|
||||
|
||||
// primary key
|
||||
if (field.fieldname === 'name') {
|
||||
column.primary();
|
||||
}
|
||||
|
||||
// default value
|
||||
if (!!field.default && !(field.default instanceof Function)) {
|
||||
column.defaultTo(field.default);
|
||||
}
|
||||
|
||||
// required
|
||||
if (
|
||||
(!!field.required && !(field.required instanceof Function)) ||
|
||||
field.fieldtype === 'Currency'
|
||||
) {
|
||||
column.notNullable();
|
||||
}
|
||||
|
||||
// link
|
||||
if (field.fieldtype === 'Link' && field.target) {
|
||||
let meta = frappe.getMeta(field.target);
|
||||
table
|
||||
.foreign(field.fieldname)
|
||||
.references('name')
|
||||
.inTable(meta.getBaseDocType())
|
||||
.onUpdate('CASCADE')
|
||||
.onDelete('RESTRICT');
|
||||
}
|
||||
}
|
||||
|
||||
async getColumnDiff(doctype) {
|
||||
const tableColumns = await this.getTableColumns(doctype);
|
||||
const validFields = this.getValidFields(doctype);
|
||||
const diff = { added: [], removed: [] };
|
||||
|
||||
for (let field of validFields) {
|
||||
if (
|
||||
!tableColumns.includes(field.fieldname) &&
|
||||
this.getColumnType(field)
|
||||
) {
|
||||
diff.added.push(field);
|
||||
}
|
||||
}
|
||||
|
||||
const validFieldNames = validFields.map((field) => field.fieldname);
|
||||
for (let column of tableColumns) {
|
||||
if (!validFieldNames.includes(column)) {
|
||||
diff.removed.push(column);
|
||||
}
|
||||
}
|
||||
|
||||
return diff;
|
||||
}
|
||||
|
||||
async removeColumns(doctype, removed) {
|
||||
for (let column of removed) {
|
||||
await this.runRemoveColumnQuery(doctype, column);
|
||||
}
|
||||
}
|
||||
|
||||
async getNewForeignKeys(doctype) {
|
||||
let foreignKeys = await this.getForeignKeys(doctype);
|
||||
let newForeignKeys = [];
|
||||
let meta = frappe.getMeta(doctype);
|
||||
for (let field of meta.getValidFields({ withChildren: false })) {
|
||||
if (
|
||||
field.fieldtype === 'Link' &&
|
||||
!foreignKeys.includes(field.fieldname)
|
||||
) {
|
||||
newForeignKeys.push(field);
|
||||
}
|
||||
}
|
||||
return newForeignKeys;
|
||||
}
|
||||
|
||||
async addForeignKeys(doctype, newForeignKeys) {
|
||||
for (let field of newForeignKeys) {
|
||||
this.addForeignKey(doctype, field);
|
||||
}
|
||||
}
|
||||
|
||||
async getForeignKeys(doctype, field) {
|
||||
return [];
|
||||
}
|
||||
|
||||
async getTableColumns(doctype) {
|
||||
return [];
|
||||
}
|
||||
|
||||
async get(doctype, name = null, fields = '*') {
|
||||
let meta = frappe.getMeta(doctype);
|
||||
let doc;
|
||||
if (meta.isSingle) {
|
||||
doc = await this.getSingle(doctype);
|
||||
doc.name = doctype;
|
||||
} else {
|
||||
if (!name) {
|
||||
throw new frappe.errors.ValueError('name is mandatory');
|
||||
}
|
||||
doc = await this.getOne(doctype, name, fields);
|
||||
}
|
||||
if (!doc) {
|
||||
return;
|
||||
}
|
||||
await this.loadChildren(doc, meta);
|
||||
return doc;
|
||||
}
|
||||
|
||||
async loadChildren(doc, meta) {
|
||||
// load children
|
||||
let tableFields = meta.getTableFields();
|
||||
for (let field of tableFields) {
|
||||
doc[field.fieldname] = await this.getAll({
|
||||
doctype: field.childtype,
|
||||
fields: ['*'],
|
||||
filters: { parent: doc.name },
|
||||
orderBy: 'idx',
|
||||
order: 'asc',
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
async getSingle(doctype) {
|
||||
let values = await this.getAll({
|
||||
doctype: 'SingleValue',
|
||||
fields: ['fieldname', 'value'],
|
||||
filters: { parent: doctype },
|
||||
orderBy: 'fieldname',
|
||||
order: 'asc',
|
||||
});
|
||||
let doc = {};
|
||||
for (let row of values) {
|
||||
doc[row.fieldname] = row.value;
|
||||
}
|
||||
return doc;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get list of values from the singles table.
|
||||
* @param {...string | Object} fieldnames list of fieldnames to get the values of
|
||||
* @returns {Array<Object>} array of {parent, value, fieldname}.
|
||||
* @example
|
||||
* Database.getSingleValues('internalPrecision');
|
||||
* // returns [{ fieldname: 'internalPrecision', parent: 'SystemSettings', value: '12' }]
|
||||
* @example
|
||||
* Database.getSingleValues({fieldname:'internalPrecision', parent: 'SystemSettings'});
|
||||
* // returns [{ fieldname: 'internalPrecision', parent: 'SystemSettings', value: '12' }]
|
||||
*/
|
||||
async getSingleValues(...fieldnames) {
|
||||
fieldnames = fieldnames.map((fieldname) => {
|
||||
if (typeof fieldname === 'string') {
|
||||
return { fieldname };
|
||||
}
|
||||
return fieldname;
|
||||
});
|
||||
|
||||
let builder = frappe.db.knex('SingleValue');
|
||||
builder = builder.where(fieldnames[0]);
|
||||
|
||||
fieldnames.slice(1).forEach(({ fieldname, parent }) => {
|
||||
if (typeof parent === 'undefined') {
|
||||
builder = builder.orWhere({ fieldname });
|
||||
} else {
|
||||
builder = builder.orWhere({ fieldname, parent });
|
||||
}
|
||||
});
|
||||
|
||||
let values = [];
|
||||
try {
|
||||
values = await builder.select('fieldname', 'value', 'parent');
|
||||
} catch (error) {
|
||||
if (error.message.includes('no such table')) {
|
||||
return [];
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
|
||||
return values.map((value) => {
|
||||
const fields = frappe.getMeta(value.parent).fields;
|
||||
return this.getDocFormattedDoc(fields, values);
|
||||
});
|
||||
}
|
||||
|
||||
async getOne(doctype, name, fields = '*') {
|
||||
let meta = frappe.getMeta(doctype);
|
||||
let baseDoctype = meta.getBaseDocType();
|
||||
|
||||
const doc = await this.knex
|
||||
.select(fields)
|
||||
.from(baseDoctype)
|
||||
.where('name', name)
|
||||
.first();
|
||||
|
||||
if (!doc) {
|
||||
return doc;
|
||||
}
|
||||
|
||||
return this.getDocFormattedDoc(meta.fields, doc);
|
||||
}
|
||||
|
||||
getDocFormattedDoc(fields, doc) {
|
||||
// format for usage, not going into the db
|
||||
const docFields = Object.keys(doc);
|
||||
const filteredFields = fields.filter(({ fieldname }) =>
|
||||
docFields.includes(fieldname)
|
||||
);
|
||||
|
||||
const formattedValues = filteredFields.reduce((d, field) => {
|
||||
const { fieldname } = field;
|
||||
d[fieldname] = this.getDocFormattedValues(field, doc[fieldname]);
|
||||
return d;
|
||||
}, {});
|
||||
|
||||
return Object.assign(doc, formattedValues);
|
||||
}
|
||||
|
||||
getDocFormattedValues(field, value) {
|
||||
// format for usage, not going into the db
|
||||
try {
|
||||
if (field.fieldtype === 'Currency') {
|
||||
return frappe.pesa(value);
|
||||
}
|
||||
} catch (err) {
|
||||
err.message += ` value: '${value}' of type: ${typeof value}, fieldname: '${
|
||||
field.fieldname
|
||||
}', label: '${field.label}'`;
|
||||
throw err;
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
triggerChange(doctype, name) {
|
||||
this.trigger(`change:${doctype}`, { name }, 500);
|
||||
this.trigger(`change`, { doctype, name }, 500);
|
||||
// also trigger change for basedOn doctype
|
||||
let meta = frappe.getMeta(doctype);
|
||||
if (meta.basedOn) {
|
||||
this.triggerChange(meta.basedOn, name);
|
||||
}
|
||||
}
|
||||
|
||||
async insert(doctype, doc) {
|
||||
let meta = frappe.getMeta(doctype);
|
||||
let baseDoctype = meta.getBaseDocType();
|
||||
doc = this.applyBaseDocTypeFilters(doctype, doc);
|
||||
|
||||
// insert parent
|
||||
if (meta.isSingle) {
|
||||
await this.updateSingle(doctype, doc);
|
||||
} else {
|
||||
await this.insertOne(baseDoctype, doc);
|
||||
}
|
||||
|
||||
// insert children
|
||||
await this.insertChildren(meta, doc, baseDoctype);
|
||||
|
||||
this.triggerChange(doctype, doc.name);
|
||||
|
||||
return doc;
|
||||
}
|
||||
|
||||
async insertChildren(meta, doc, doctype) {
|
||||
let tableFields = meta.getTableFields();
|
||||
for (let field of tableFields) {
|
||||
let idx = 0;
|
||||
for (let child of doc[field.fieldname] || []) {
|
||||
this.prepareChild(doctype, doc.name, child, field, idx);
|
||||
await this.insertOne(field.childtype, child);
|
||||
idx++;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
insertOne(doctype, doc) {
|
||||
let fields = this.getValidFields(doctype);
|
||||
|
||||
if (!doc.name) {
|
||||
doc.name = frappe.getRandomString();
|
||||
}
|
||||
|
||||
let formattedDoc = this.getFormattedDoc(fields, doc);
|
||||
return this.knex(doctype).insert(formattedDoc);
|
||||
}
|
||||
|
||||
async update(doctype, doc) {
|
||||
let meta = frappe.getMeta(doctype);
|
||||
let baseDoctype = meta.getBaseDocType();
|
||||
doc = this.applyBaseDocTypeFilters(doctype, doc);
|
||||
|
||||
// update parent
|
||||
if (meta.isSingle) {
|
||||
await this.updateSingle(doctype, doc);
|
||||
} else {
|
||||
await this.updateOne(baseDoctype, doc);
|
||||
}
|
||||
|
||||
// insert or update children
|
||||
await this.updateChildren(meta, doc, baseDoctype);
|
||||
|
||||
this.triggerChange(doctype, doc.name);
|
||||
|
||||
return doc;
|
||||
}
|
||||
|
||||
async updateChildren(meta, doc, doctype) {
|
||||
let tableFields = meta.getTableFields();
|
||||
for (let field of tableFields) {
|
||||
let added = [];
|
||||
for (let child of doc[field.fieldname] || []) {
|
||||
this.prepareChild(doctype, doc.name, child, field, added.length);
|
||||
if (await this.exists(field.childtype, child.name)) {
|
||||
await this.updateOne(field.childtype, child);
|
||||
} else {
|
||||
await this.insertOne(field.childtype, child);
|
||||
}
|
||||
added.push(child.name);
|
||||
}
|
||||
await this.runDeleteOtherChildren(field, doc.name, added);
|
||||
}
|
||||
}
|
||||
|
||||
updateOne(doctype, doc) {
|
||||
let validFields = this.getValidFields(doctype);
|
||||
let fieldsToUpdate = Object.keys(doc).filter((f) => f !== 'name');
|
||||
let fields = validFields.filter((df) =>
|
||||
fieldsToUpdate.includes(df.fieldname)
|
||||
);
|
||||
let formattedDoc = this.getFormattedDoc(fields, doc);
|
||||
|
||||
return this.knex(doctype)
|
||||
.where('name', doc.name)
|
||||
.update(formattedDoc)
|
||||
.then(() => {
|
||||
let cacheKey = `${doctype}:${doc.name}`;
|
||||
if (this.cache.hexists(cacheKey)) {
|
||||
for (let fieldname in formattedDoc) {
|
||||
let value = formattedDoc[fieldname];
|
||||
this.cache.hset(cacheKey, fieldname, value);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
runDeleteOtherChildren(field, parent, added) {
|
||||
// delete other children
|
||||
return this.knex(field.childtype)
|
||||
.where('parent', parent)
|
||||
.andWhere('name', 'not in', added)
|
||||
.delete();
|
||||
}
|
||||
|
||||
async updateSingle(doctype, doc) {
|
||||
let meta = frappe.getMeta(doctype);
|
||||
await this.deleteSingleValues(doctype);
|
||||
for (let field of meta.getValidFields({ withChildren: false })) {
|
||||
let value = doc[field.fieldname];
|
||||
if (value != null) {
|
||||
let singleValue = frappe.newDoc({
|
||||
doctype: 'SingleValue',
|
||||
parent: doctype,
|
||||
fieldname: field.fieldname,
|
||||
value: value,
|
||||
});
|
||||
await singleValue.insert();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
deleteSingleValues(name) {
|
||||
return this.knex('SingleValue').where('parent', name).delete();
|
||||
}
|
||||
|
||||
async rename(doctype, oldName, newName) {
|
||||
let meta = frappe.getMeta(doctype);
|
||||
let baseDoctype = meta.getBaseDocType();
|
||||
await this.knex(baseDoctype)
|
||||
.update({ name: newName })
|
||||
.where('name', oldName)
|
||||
.then(() => {
|
||||
this.clearValueCache(doctype, oldName);
|
||||
});
|
||||
await frappe.db.commit();
|
||||
|
||||
this.triggerChange(doctype, newName);
|
||||
}
|
||||
|
||||
prepareChild(parenttype, parent, child, field, idx) {
|
||||
if (!child.name) {
|
||||
child.name = frappe.getRandomString();
|
||||
}
|
||||
child.parent = parent;
|
||||
child.parenttype = parenttype;
|
||||
child.parentfield = field.fieldname;
|
||||
child.idx = idx;
|
||||
}
|
||||
|
||||
getValidFields(doctype) {
|
||||
return frappe.getMeta(doctype).getValidFields({ withChildren: false });
|
||||
}
|
||||
|
||||
getFormattedDoc(fields, doc) {
|
||||
// format for storage, going into the db
|
||||
let formattedDoc = {};
|
||||
fields.map((field) => {
|
||||
let value = doc[field.fieldname];
|
||||
formattedDoc[field.fieldname] = this.getFormattedValue(field, value);
|
||||
});
|
||||
return formattedDoc;
|
||||
}
|
||||
|
||||
getFormattedValue(field, value) {
|
||||
// format for storage, going into the db
|
||||
const type = typeof value;
|
||||
if (field.fieldtype === 'Currency') {
|
||||
let currency = value;
|
||||
|
||||
if (type === 'number' || type === 'string') {
|
||||
currency = frappe.pesa(value);
|
||||
}
|
||||
|
||||
const currencyValue = currency.store;
|
||||
if (typeof currencyValue !== 'string') {
|
||||
throw new Error(
|
||||
`invalid currencyValue '${currencyValue}' of type '${typeof currencyValue}' on converting from '${value}' of type '${type}'`
|
||||
);
|
||||
}
|
||||
|
||||
return currencyValue;
|
||||
}
|
||||
|
||||
if (value instanceof Date) {
|
||||
if (field.fieldtype === 'Date') {
|
||||
// date
|
||||
return value.toISOString().substr(0, 10);
|
||||
} else {
|
||||
// datetime
|
||||
return value.toISOString();
|
||||
}
|
||||
} else if (field.fieldtype === 'Link' && !value) {
|
||||
// empty value must be null to satisfy
|
||||
// foreign key constraint
|
||||
return null;
|
||||
} else {
|
||||
return value;
|
||||
}
|
||||
}
|
||||
|
||||
applyBaseDocTypeFilters(doctype, doc) {
|
||||
let meta = frappe.getMeta(doctype);
|
||||
if (meta.filters) {
|
||||
for (let fieldname in meta.filters) {
|
||||
let value = meta.filters[fieldname];
|
||||
if (typeof value !== 'object') {
|
||||
doc[fieldname] = value;
|
||||
}
|
||||
}
|
||||
}
|
||||
return doc;
|
||||
}
|
||||
|
||||
async deleteMany(doctype, names) {
|
||||
for (const name of names) {
|
||||
await this.delete(doctype, name);
|
||||
}
|
||||
}
|
||||
|
||||
async delete(doctype, name) {
|
||||
let meta = frappe.getMeta(doctype);
|
||||
let baseDoctype = meta.getBaseDocType();
|
||||
await this.deleteOne(baseDoctype, name);
|
||||
|
||||
// delete children
|
||||
let tableFields = frappe.getMeta(doctype).getTableFields();
|
||||
for (let field of tableFields) {
|
||||
await this.deleteChildren(field.childtype, name);
|
||||
}
|
||||
|
||||
this.triggerChange(doctype, name);
|
||||
}
|
||||
|
||||
async deleteOne(doctype, name) {
|
||||
return this.knex(doctype)
|
||||
.where('name', name)
|
||||
.delete()
|
||||
.then(() => {
|
||||
this.clearValueCache(doctype, name);
|
||||
});
|
||||
}
|
||||
|
||||
deleteChildren(parenttype, parent) {
|
||||
return this.knex(parenttype).where('parent', parent).delete();
|
||||
}
|
||||
|
||||
async exists(doctype, name) {
|
||||
return (await this.getValue(doctype, name)) ? true : false;
|
||||
}
|
||||
|
||||
async getValue(doctype, filters, fieldname = 'name') {
|
||||
let meta = frappe.getMeta(doctype);
|
||||
let baseDoctype = meta.getBaseDocType();
|
||||
if (typeof filters === 'string') {
|
||||
filters = { name: filters };
|
||||
}
|
||||
if (meta.filters) {
|
||||
Object.assign(filters, meta.filters);
|
||||
}
|
||||
|
||||
let row = await this.getAll({
|
||||
doctype: baseDoctype,
|
||||
fields: [fieldname],
|
||||
filters: filters,
|
||||
start: 0,
|
||||
limit: 1,
|
||||
orderBy: 'name',
|
||||
order: 'asc',
|
||||
});
|
||||
return row.length ? row[0][fieldname] : null;
|
||||
}
|
||||
|
||||
async setValue(doctype, name, fieldname, value) {
|
||||
return await this.setValues(doctype, name, {
|
||||
[fieldname]: value,
|
||||
});
|
||||
}
|
||||
|
||||
async setValues(doctype, name, fieldValuePair) {
|
||||
let doc = Object.assign({}, fieldValuePair, { name });
|
||||
return this.updateOne(doctype, doc);
|
||||
}
|
||||
|
||||
async getCachedValue(doctype, name, fieldname) {
|
||||
let value = this.cache.hget(`${doctype}:${name}`, fieldname);
|
||||
if (value == null) {
|
||||
value = await this.getValue(doctype, name, fieldname);
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
async getAll({
|
||||
doctype,
|
||||
fields,
|
||||
filters,
|
||||
start,
|
||||
limit,
|
||||
groupBy,
|
||||
orderBy = 'creation',
|
||||
order = 'desc',
|
||||
} = {}) {
|
||||
let meta = frappe.getMeta(doctype);
|
||||
let baseDoctype = meta.getBaseDocType();
|
||||
if (!fields) {
|
||||
fields = meta.getKeywordFields();
|
||||
fields.push('name');
|
||||
}
|
||||
if (typeof fields === 'string') {
|
||||
fields = [fields];
|
||||
}
|
||||
if (meta.filters) {
|
||||
filters = Object.assign({}, filters, meta.filters);
|
||||
}
|
||||
|
||||
let builder = this.knex.select(fields).from(baseDoctype);
|
||||
|
||||
this.applyFiltersToBuilder(builder, filters);
|
||||
|
||||
if (orderBy) {
|
||||
builder.orderBy(orderBy, order);
|
||||
}
|
||||
|
||||
if (groupBy) {
|
||||
builder.groupBy(groupBy);
|
||||
}
|
||||
|
||||
if (start) {
|
||||
builder.offset(start);
|
||||
}
|
||||
|
||||
if (limit) {
|
||||
builder.limit(limit);
|
||||
}
|
||||
|
||||
const docs = await builder;
|
||||
return docs.map((doc) => this.getDocFormattedDoc(meta.fields, doc));
|
||||
}
|
||||
|
||||
applyFiltersToBuilder(builder, filters) {
|
||||
// {"status": "Open"} => `status = "Open"`
|
||||
|
||||
// {"status": "Open", "name": ["like", "apple%"]}
|
||||
// => `status="Open" and name like "apple%"
|
||||
|
||||
// {"date": [">=", "2017-09-09", "<=", "2017-11-01"]}
|
||||
// => `date >= 2017-09-09 and date <= 2017-11-01`
|
||||
|
||||
let filtersArray = [];
|
||||
|
||||
for (let field in filters) {
|
||||
let value = filters[field];
|
||||
let operator = '=';
|
||||
let comparisonValue = value;
|
||||
|
||||
if (Array.isArray(value)) {
|
||||
operator = value[0];
|
||||
comparisonValue = value[1];
|
||||
operator = operator.toLowerCase();
|
||||
|
||||
if (operator === 'includes') {
|
||||
operator = 'like';
|
||||
}
|
||||
|
||||
if (operator === 'like' && !comparisonValue.includes('%')) {
|
||||
comparisonValue = `%${comparisonValue}%`;
|
||||
}
|
||||
}
|
||||
|
||||
filtersArray.push([field, operator, comparisonValue]);
|
||||
|
||||
if (Array.isArray(value) && value.length > 2) {
|
||||
// multiple conditions
|
||||
let operator = value[2];
|
||||
let comparisonValue = value[3];
|
||||
filtersArray.push([field, operator, comparisonValue]);
|
||||
}
|
||||
}
|
||||
|
||||
filtersArray.map((filter) => {
|
||||
const [field, operator, comparisonValue] = filter;
|
||||
if (operator === '=') {
|
||||
builder.where(field, comparisonValue);
|
||||
} else {
|
||||
builder.where(field, operator, comparisonValue);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
run(query, params) {
|
||||
// run query
|
||||
return this.sql(query, params);
|
||||
}
|
||||
|
||||
sql(query, params) {
|
||||
// run sql
|
||||
return this.knex.raw(query, params);
|
||||
}
|
||||
|
||||
async commit() {
|
||||
try {
|
||||
await this.sql('commit');
|
||||
} catch (e) {
|
||||
if (e.type !== frappe.errors.CannotCommitError) {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
clearValueCache(doctype, name) {
|
||||
let cacheKey = `${doctype}:${name}`;
|
||||
this.cache.hclear(cacheKey);
|
||||
}
|
||||
|
||||
getColumnType(field) {
|
||||
return this.typeMap[field.fieldtype];
|
||||
}
|
||||
|
||||
getError(err) {
|
||||
return frappe.errors.DatabaseError;
|
||||
}
|
||||
|
||||
initTypeMap() {
|
||||
this.typeMap = {};
|
||||
}
|
||||
|
||||
executePostDbConnect() {
|
||||
frappe.initializeMoneyMaker();
|
||||
}
|
||||
}
|
@ -1,143 +0,0 @@
|
||||
import frappe from 'frappe';
|
||||
import Database from './database';
|
||||
|
||||
export default class SqliteDatabase extends Database {
|
||||
constructor({ dbPath }) {
|
||||
super();
|
||||
this.dbPath = dbPath;
|
||||
this.connectionParams = {
|
||||
client: 'sqlite3',
|
||||
connection: {
|
||||
filename: this.dbPath,
|
||||
},
|
||||
pool: {
|
||||
afterCreate(conn, done) {
|
||||
conn.run('PRAGMA foreign_keys=ON');
|
||||
done();
|
||||
},
|
||||
},
|
||||
useNullAsDefault: true,
|
||||
asyncStackTraces: process.env.NODE_ENV === 'development',
|
||||
};
|
||||
}
|
||||
|
||||
async addForeignKeys(doctype, newForeignKeys) {
|
||||
await this.sql('PRAGMA foreign_keys=OFF');
|
||||
await this.sql('BEGIN TRANSACTION');
|
||||
|
||||
const tempName = 'TEMP' + doctype;
|
||||
|
||||
// create temp table
|
||||
await this.createTable(doctype, tempName);
|
||||
|
||||
try {
|
||||
// copy from old to new table
|
||||
await this.knex(tempName).insert(this.knex.select().from(doctype));
|
||||
} catch (err) {
|
||||
await this.sql('ROLLBACK');
|
||||
await this.sql('PRAGMA foreign_keys=ON');
|
||||
|
||||
const rows = await this.knex.select().from(doctype);
|
||||
await this.prestigeTheTable(doctype, rows);
|
||||
return;
|
||||
}
|
||||
|
||||
// drop old table
|
||||
await this.knex.schema.dropTable(doctype);
|
||||
|
||||
// rename new table
|
||||
await this.knex.schema.renameTable(tempName, doctype);
|
||||
|
||||
await this.sql('COMMIT');
|
||||
await this.sql('PRAGMA foreign_keys=ON');
|
||||
}
|
||||
|
||||
removeColumns() {
|
||||
// pass
|
||||
}
|
||||
|
||||
async getTableColumns(doctype) {
|
||||
return (await this.sql(`PRAGMA table_info(${doctype})`)).map((d) => d.name);
|
||||
}
|
||||
|
||||
async getForeignKeys(doctype) {
|
||||
return (await this.sql(`PRAGMA foreign_key_list(${doctype})`)).map(
|
||||
(d) => d.from
|
||||
);
|
||||
}
|
||||
|
||||
initTypeMap() {
|
||||
// prettier-ignore
|
||||
this.typeMap = {
|
||||
'AutoComplete': 'text',
|
||||
'Currency': 'text',
|
||||
'Int': 'integer',
|
||||
'Float': 'float',
|
||||
'Percent': 'float',
|
||||
'Check': 'integer',
|
||||
'Small Text': 'text',
|
||||
'Long Text': 'text',
|
||||
'Code': 'text',
|
||||
'Text Editor': 'text',
|
||||
'Date': 'text',
|
||||
'Datetime': 'text',
|
||||
'Time': 'text',
|
||||
'Text': 'text',
|
||||
'Data': 'text',
|
||||
'Link': 'text',
|
||||
'DynamicLink': 'text',
|
||||
'Password': 'text',
|
||||
'Select': 'text',
|
||||
'Read Only': 'text',
|
||||
'File': 'text',
|
||||
'Attach': 'text',
|
||||
'AttachImage': 'text',
|
||||
'Signature': 'text',
|
||||
'Color': 'text',
|
||||
'Barcode': 'text',
|
||||
'Geolocation': 'text'
|
||||
};
|
||||
}
|
||||
|
||||
getError(err) {
|
||||
let errorType = frappe.errors.DatabaseError;
|
||||
if (err.message.includes('FOREIGN KEY')) {
|
||||
errorType = frappe.errors.LinkValidationError;
|
||||
}
|
||||
if (err.message.includes('SQLITE_ERROR: cannot commit')) {
|
||||
errorType = frappe.errors.CannotCommitError;
|
||||
}
|
||||
if (err.message.includes('SQLITE_CONSTRAINT: UNIQUE constraint failed:')) {
|
||||
errorType = frappe.errors.DuplicateEntryError;
|
||||
}
|
||||
return errorType;
|
||||
}
|
||||
|
||||
async prestigeTheTable(tableName, tableRows) {
|
||||
const max = 200;
|
||||
|
||||
// Alter table hacx for sqlite in case of schema change.
|
||||
const tempName = `__${tableName}`;
|
||||
await this.knex.schema.dropTableIfExists(tempName);
|
||||
|
||||
await this.knex.raw('PRAGMA foreign_keys=OFF');
|
||||
await this.createTable(tableName, tempName);
|
||||
|
||||
if (tableRows.length > 200) {
|
||||
const fi = Math.floor(tableRows.length / max);
|
||||
for (let i = 0; i <= fi; i++) {
|
||||
const rowSlice = tableRows.slice(i * max, i + 1 * max);
|
||||
if (rowSlice.length === 0) {
|
||||
break;
|
||||
}
|
||||
await this.knex.batchInsert(tempName, rowSlice);
|
||||
}
|
||||
} else {
|
||||
await this.knex.batchInsert(tempName, tableRows);
|
||||
}
|
||||
|
||||
await this.knex.schema.dropTable(tableName);
|
||||
await this.knex.schema.renameTable(tempName, tableName);
|
||||
await this.knex.raw('PRAGMA foreign_keys=ON');
|
||||
}
|
||||
}
|
@ -1,101 +0,0 @@
|
||||
const frappe = require('frappe');
|
||||
|
||||
class BaseError extends Error {
|
||||
constructor(statusCode, message) {
|
||||
super(message);
|
||||
this.name = 'BaseError';
|
||||
this.statusCode = statusCode;
|
||||
this.message = message;
|
||||
}
|
||||
}
|
||||
|
||||
class ValidationError extends BaseError {
|
||||
constructor(message) {
|
||||
super(417, message);
|
||||
this.name = 'ValidationError';
|
||||
}
|
||||
}
|
||||
|
||||
class NotFoundError extends BaseError {
|
||||
constructor(message) {
|
||||
super(404, message);
|
||||
this.name = 'NotFoundError';
|
||||
}
|
||||
}
|
||||
|
||||
class ForbiddenError extends BaseError {
|
||||
constructor(message) {
|
||||
super(403, message);
|
||||
this.name = 'ForbiddenError';
|
||||
}
|
||||
}
|
||||
|
||||
class DuplicateEntryError extends ValidationError {
|
||||
constructor(message) {
|
||||
super(message);
|
||||
this.name = 'DuplicateEntryError';
|
||||
}
|
||||
}
|
||||
|
||||
class LinkValidationError extends ValidationError {
|
||||
constructor(message) {
|
||||
super(message);
|
||||
this.name = 'LinkValidationError';
|
||||
}
|
||||
}
|
||||
|
||||
class MandatoryError extends ValidationError {
|
||||
constructor(message) {
|
||||
super(message);
|
||||
this.name = 'MandatoryError';
|
||||
}
|
||||
}
|
||||
|
||||
class DatabaseError extends BaseError {
|
||||
constructor(message) {
|
||||
super(500, message);
|
||||
this.name = 'DatabaseError';
|
||||
}
|
||||
}
|
||||
|
||||
class CannotCommitError extends DatabaseError {
|
||||
constructor(message) {
|
||||
super(message);
|
||||
this.name = 'CannotCommitError';
|
||||
}
|
||||
}
|
||||
|
||||
class ValueError extends ValidationError {}
|
||||
class Conflict extends ValidationError {}
|
||||
class InvalidFieldError extends ValidationError {}
|
||||
|
||||
function throwError(message, error = 'ValidationError') {
|
||||
const errorClass = {
|
||||
ValidationError: ValidationError,
|
||||
NotFoundError: NotFoundError,
|
||||
ForbiddenError: ForbiddenError,
|
||||
ValueError: ValueError,
|
||||
Conflict: Conflict,
|
||||
};
|
||||
const err = new errorClass[error](message);
|
||||
frappe.events.trigger('throw', { message, stackTrace: err.stack });
|
||||
throw err;
|
||||
}
|
||||
|
||||
frappe.throw = throwError;
|
||||
|
||||
module.exports = {
|
||||
BaseError,
|
||||
ValidationError,
|
||||
ValueError,
|
||||
Conflict,
|
||||
NotFoundError,
|
||||
ForbiddenError,
|
||||
DuplicateEntryError,
|
||||
LinkValidationError,
|
||||
DatabaseError,
|
||||
CannotCommitError,
|
||||
MandatoryError,
|
||||
InvalidFieldError,
|
||||
throw: throwError,
|
||||
};
|
@ -1,13 +0,0 @@
|
||||
export default async function initLibs(frappe) {
|
||||
const utils = await import('../utils');
|
||||
const format = await import('../utils/format');
|
||||
const errors = await import('./errors');
|
||||
const BaseMeta = await import('frappe/model/meta');
|
||||
const BaseDocument = await import('frappe/model/document');
|
||||
|
||||
Object.assign(frappe, utils.default);
|
||||
Object.assign(frappe, format.default);
|
||||
frappe.errors = errors.default;
|
||||
frappe.BaseDocument = BaseDocument.default;
|
||||
frappe.BaseMeta = BaseMeta.default;
|
||||
}
|
391
frappe/index.js
391
frappe/index.js
@ -1,391 +0,0 @@
|
||||
import initLibs from 'frappe/common';
|
||||
import { getMoneyMaker } from 'pesa';
|
||||
import { markRaw } from 'vue';
|
||||
import utils from './utils';
|
||||
import {
|
||||
DEFAULT_DISPLAY_PRECISION,
|
||||
DEFAULT_INTERNAL_PRECISION,
|
||||
} from './utils/consts';
|
||||
import Observable from './utils/observable';
|
||||
import { t, T } from './utils/translation';
|
||||
|
||||
class Frappe {
|
||||
isElectron = false;
|
||||
isServer = false;
|
||||
|
||||
async initializeAndRegister(customModels = {}, force = false) {
|
||||
this.init(force);
|
||||
await initLibs(this);
|
||||
const coreModels = await import('frappe/models');
|
||||
this.registerModels(coreModels.default);
|
||||
this.registerModels(customModels);
|
||||
}
|
||||
|
||||
async initializeMoneyMaker(currency) {
|
||||
currency ??= 'XXX';
|
||||
|
||||
// to be called after db initialization
|
||||
const values =
|
||||
(await frappe.db?.getSingleValues(
|
||||
{
|
||||
fieldname: 'internalPrecision',
|
||||
parent: 'SystemSettings',
|
||||
},
|
||||
{
|
||||
fieldname: 'displayPrecision',
|
||||
parent: 'SystemSettings',
|
||||
}
|
||||
)) ?? [];
|
||||
|
||||
let { internalPrecision: precision, displayPrecision: display } =
|
||||
values.reduce((acc, { fieldname, value }) => {
|
||||
acc[fieldname] = value;
|
||||
return acc;
|
||||
}, {});
|
||||
|
||||
if (typeof precision === 'undefined') {
|
||||
precision = DEFAULT_INTERNAL_PRECISION;
|
||||
}
|
||||
|
||||
if (typeof precision === 'string') {
|
||||
precision = parseInt(precision);
|
||||
}
|
||||
|
||||
if (typeof display === 'undefined') {
|
||||
display = DEFAULT_DISPLAY_PRECISION;
|
||||
}
|
||||
|
||||
if (typeof display === 'string') {
|
||||
display = parseInt(display);
|
||||
}
|
||||
|
||||
this.pesa = getMoneyMaker({
|
||||
currency,
|
||||
precision,
|
||||
display,
|
||||
wrapper: markRaw,
|
||||
});
|
||||
}
|
||||
|
||||
init(force) {
|
||||
if (this._initialized && !force) return;
|
||||
|
||||
// Initialize Config
|
||||
this.config = {
|
||||
serverURL: '',
|
||||
backend: 'sqlite',
|
||||
port: 8000,
|
||||
};
|
||||
|
||||
// Initialize Globals
|
||||
this.metaCache = {};
|
||||
this.models = {};
|
||||
this.forms = {};
|
||||
this.views = {};
|
||||
this.flags = {};
|
||||
this.methods = {};
|
||||
this.errorLog = [];
|
||||
|
||||
// temp params while calling routes
|
||||
this.temp = {};
|
||||
this.params = {};
|
||||
|
||||
this.docs = new Observable();
|
||||
this.events = new Observable();
|
||||
this._initialized = true;
|
||||
}
|
||||
|
||||
registerModels(models) {
|
||||
// register models from app/models/index.js
|
||||
for (let doctype in models) {
|
||||
let metaDefinition = models[doctype];
|
||||
if (!metaDefinition.name) {
|
||||
throw new Error(`Name is mandatory for ${doctype}`);
|
||||
}
|
||||
if (metaDefinition.name !== doctype) {
|
||||
throw new Error(
|
||||
`Model name mismatch for ${doctype}: ${metaDefinition.name}`
|
||||
);
|
||||
}
|
||||
let fieldnames = (metaDefinition.fields || [])
|
||||
.map((df) => df.fieldname)
|
||||
.sort();
|
||||
let duplicateFieldnames = utils.getDuplicates(fieldnames);
|
||||
if (duplicateFieldnames.length > 0) {
|
||||
throw new Error(
|
||||
`Duplicate fields in ${doctype}: ${duplicateFieldnames.join(', ')}`
|
||||
);
|
||||
}
|
||||
|
||||
this.models[doctype] = metaDefinition;
|
||||
}
|
||||
}
|
||||
|
||||
getModels(filterFunction) {
|
||||
let models = [];
|
||||
for (let doctype in this.models) {
|
||||
models.push(this.models[doctype]);
|
||||
}
|
||||
return filterFunction ? models.filter(filterFunction) : models;
|
||||
}
|
||||
|
||||
registerView(view, name, module) {
|
||||
if (!this.views[view]) this.views[view] = {};
|
||||
this.views[view][name] = module;
|
||||
}
|
||||
|
||||
registerMethod({ method, handler }) {
|
||||
this.methods[method] = handler;
|
||||
if (this.app) {
|
||||
// add to router if client-server
|
||||
this.app.post(
|
||||
`/api/method/${method}`,
|
||||
this.asyncHandler(async function (request, response) {
|
||||
let data = await handler(request.body);
|
||||
if (data === undefined) {
|
||||
data = {};
|
||||
}
|
||||
return response.json(data);
|
||||
})
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
async call({ method, args }) {
|
||||
if (this.isServer) {
|
||||
if (this.methods[method]) {
|
||||
return await this.methods[method](args);
|
||||
} else {
|
||||
throw new Error(`${method} not found`);
|
||||
}
|
||||
}
|
||||
|
||||
let url = `/api/method/${method}`;
|
||||
let response = await fetch(url, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify(args || {}),
|
||||
});
|
||||
return await response.json();
|
||||
}
|
||||
|
||||
addToCache(doc) {
|
||||
if (!this.docs) return;
|
||||
|
||||
// add to `docs` cache
|
||||
if (doc.doctype && doc.name) {
|
||||
if (!this.docs[doc.doctype]) {
|
||||
this.docs[doc.doctype] = {};
|
||||
}
|
||||
this.docs[doc.doctype][doc.name] = doc;
|
||||
|
||||
// singles available as first level objects too
|
||||
if (doc.doctype === doc.name) {
|
||||
this[doc.name] = doc;
|
||||
}
|
||||
|
||||
// propogate change to `docs`
|
||||
doc.on('change', (params) => {
|
||||
this.docs.trigger('change', params);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
removeFromCache(doctype, name) {
|
||||
try {
|
||||
delete this.docs[doctype][name];
|
||||
} catch (e) {
|
||||
console.warn(`Document ${doctype} ${name} does not exist`);
|
||||
}
|
||||
}
|
||||
|
||||
isDirty(doctype, name) {
|
||||
return (
|
||||
(this.docs &&
|
||||
this.docs[doctype] &&
|
||||
this.docs[doctype][name] &&
|
||||
this.docs[doctype][name]._dirty) ||
|
||||
false
|
||||
);
|
||||
}
|
||||
|
||||
getDocFromCache(doctype, name) {
|
||||
if (this.docs && this.docs[doctype] && this.docs[doctype][name]) {
|
||||
return this.docs[doctype][name];
|
||||
}
|
||||
}
|
||||
|
||||
getMeta(doctype) {
|
||||
if (!this.metaCache[doctype]) {
|
||||
let model = this.models[doctype];
|
||||
if (!model) {
|
||||
throw new Error(`${doctype} is not a registered doctype`);
|
||||
}
|
||||
|
||||
let metaClass = model.metaClass || this.BaseMeta;
|
||||
this.metaCache[doctype] = new metaClass(model);
|
||||
}
|
||||
|
||||
return this.metaCache[doctype];
|
||||
}
|
||||
|
||||
async getDoc(doctype, name, options = { skipDocumentCache: false }) {
|
||||
let doc = options.skipDocumentCache
|
||||
? null
|
||||
: this.getDocFromCache(doctype, name);
|
||||
if (!doc) {
|
||||
doc = new (this.getDocumentClass(doctype))({
|
||||
doctype: doctype,
|
||||
name: name,
|
||||
});
|
||||
await doc.load();
|
||||
this.addToCache(doc);
|
||||
}
|
||||
return doc;
|
||||
}
|
||||
|
||||
getDocumentClass(doctype) {
|
||||
const meta = this.getMeta(doctype);
|
||||
return meta.documentClass || this.BaseDocument;
|
||||
}
|
||||
|
||||
async getSingle(doctype) {
|
||||
return await this.getDoc(doctype, doctype);
|
||||
}
|
||||
|
||||
async getDuplicate(doc) {
|
||||
const newDoc = await this.getNewDoc(doc.doctype);
|
||||
for (let field of this.getMeta(doc.doctype).getValidFields()) {
|
||||
if (['name', 'submitted'].includes(field.fieldname)) continue;
|
||||
if (field.fieldtype === 'Table') {
|
||||
newDoc[field.fieldname] = (doc[field.fieldname] || []).map((d) => {
|
||||
let newd = Object.assign({}, d);
|
||||
newd.name = '';
|
||||
return newd;
|
||||
});
|
||||
} else {
|
||||
newDoc[field.fieldname] = doc[field.fieldname];
|
||||
}
|
||||
}
|
||||
return newDoc;
|
||||
}
|
||||
|
||||
getNewDoc(doctype, cacheDoc = true) {
|
||||
let doc = this.newDoc({ doctype: doctype });
|
||||
doc._notInserted = true;
|
||||
doc.name = frappe.getRandomString();
|
||||
if (cacheDoc) {
|
||||
this.addToCache(doc);
|
||||
}
|
||||
return doc;
|
||||
}
|
||||
|
||||
async newCustomDoc(fields) {
|
||||
let doc = new this.BaseDocument({ isCustom: 1, fields });
|
||||
doc._notInserted = true;
|
||||
doc.name = this.getRandomString();
|
||||
this.addToCache(doc);
|
||||
return doc;
|
||||
}
|
||||
|
||||
createMeta(fields) {
|
||||
let meta = new this.BaseMeta({ isCustom: 1, fields });
|
||||
return meta;
|
||||
}
|
||||
|
||||
newDoc(data) {
|
||||
let doc = new (this.getDocumentClass(data.doctype))(data);
|
||||
doc.setDefaults();
|
||||
return doc;
|
||||
}
|
||||
|
||||
async insert(data) {
|
||||
return await this.newDoc(data).insert();
|
||||
}
|
||||
|
||||
async syncDoc(data) {
|
||||
let doc;
|
||||
if (await this.db.exists(data.doctype, data.name)) {
|
||||
doc = await this.getDoc(data.doctype, data.name);
|
||||
Object.assign(doc, data);
|
||||
await doc.update();
|
||||
} else {
|
||||
doc = this.newDoc(data);
|
||||
await doc.insert();
|
||||
}
|
||||
}
|
||||
|
||||
// only for client side
|
||||
async login(email, password) {
|
||||
if (email === 'Administrator') {
|
||||
this.session = {
|
||||
user: 'Administrator',
|
||||
};
|
||||
return;
|
||||
}
|
||||
|
||||
let response = await fetch(this.getServerURL() + '/api/login', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({ email, password }),
|
||||
});
|
||||
|
||||
if (response.status === 200) {
|
||||
const res = await response.json();
|
||||
|
||||
this.session = {
|
||||
user: email,
|
||||
token: res.token,
|
||||
};
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
return response;
|
||||
}
|
||||
|
||||
async signup(email, fullName, password) {
|
||||
let response = await fetch(this.getServerURL() + '/api/signup', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({ email, fullName, password }),
|
||||
});
|
||||
|
||||
if (response.status === 200) {
|
||||
return await response.json();
|
||||
}
|
||||
|
||||
return response;
|
||||
}
|
||||
|
||||
getServerURL() {
|
||||
return this.config.serverURL || '';
|
||||
}
|
||||
|
||||
close() {
|
||||
this.db.close();
|
||||
|
||||
if (this.server) {
|
||||
this.server.close();
|
||||
}
|
||||
}
|
||||
|
||||
store = {
|
||||
isDevelopment: false,
|
||||
appVersion: '',
|
||||
};
|
||||
t = t;
|
||||
T = T;
|
||||
}
|
||||
|
||||
export { T, t };
|
||||
export default new Frappe();
|
@ -1,762 +0,0 @@
|
||||
import telemetry from '@/telemetry/telemetry';
|
||||
import { Verb } from '@/telemetry/types';
|
||||
import frappe from 'frappe';
|
||||
import Observable from 'frappe/utils/observable';
|
||||
import { DEFAULT_INTERNAL_PRECISION } from '../utils/consts';
|
||||
import { isPesa } from '../utils/index';
|
||||
import { setName } from './naming';
|
||||
|
||||
export default class Document extends Observable {
|
||||
constructor(data) {
|
||||
super();
|
||||
this.fetchValuesCache = {};
|
||||
this.flags = {};
|
||||
this.setup();
|
||||
this.setValues(data);
|
||||
}
|
||||
|
||||
setup() {
|
||||
// add listeners
|
||||
}
|
||||
|
||||
setValues(data) {
|
||||
for (let fieldname in data) {
|
||||
let value = data[fieldname];
|
||||
if (fieldname.startsWith('_')) {
|
||||
// private property
|
||||
this[fieldname] = value;
|
||||
} else if (Array.isArray(value)) {
|
||||
for (let row of value) {
|
||||
this.push(fieldname, row);
|
||||
}
|
||||
} else {
|
||||
this[fieldname] = value;
|
||||
}
|
||||
}
|
||||
// set unset fields as null
|
||||
for (let field of this.meta.getValidFields()) {
|
||||
// check for null or undefined
|
||||
if (this[field.fieldname] == null) {
|
||||
this[field.fieldname] = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
get meta() {
|
||||
if (this.isCustom) {
|
||||
this._meta = frappe.createMeta(this.fields);
|
||||
}
|
||||
if (!this._meta) {
|
||||
this._meta = frappe.getMeta(this.doctype);
|
||||
}
|
||||
return this._meta;
|
||||
}
|
||||
|
||||
async getSettings() {
|
||||
if (!this._settings) {
|
||||
this._settings = await frappe.getSingle(this.meta.settings);
|
||||
}
|
||||
return this._settings;
|
||||
}
|
||||
|
||||
// set value and trigger change
|
||||
async set(fieldname, value) {
|
||||
if (typeof fieldname === 'object') {
|
||||
const valueDict = fieldname;
|
||||
for (let fieldname in valueDict) {
|
||||
await this.set(fieldname, valueDict[fieldname]);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
if (fieldname === 'numberSeries' && !this._notInserted) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (this[fieldname] !== value) {
|
||||
this._dirty = true;
|
||||
// if child is dirty, parent is dirty too
|
||||
if (this.meta.isChild && this.parentdoc) {
|
||||
this.parentdoc._dirty = true;
|
||||
}
|
||||
|
||||
if (Array.isArray(value)) {
|
||||
this[fieldname] = [];
|
||||
value.forEach((row, i) => {
|
||||
this.append(fieldname, row);
|
||||
row.idx = i;
|
||||
});
|
||||
} else {
|
||||
await this.validateField(fieldname, value);
|
||||
this[fieldname] = value;
|
||||
}
|
||||
|
||||
// always run applyChange from the parentdoc
|
||||
if (this.meta.isChild && this.parentdoc) {
|
||||
await this.applyChange(fieldname);
|
||||
await this.parentdoc.applyChange(this.parentfield);
|
||||
} else {
|
||||
await this.applyChange(fieldname);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async applyChange(fieldname) {
|
||||
await this.applyFormula(fieldname);
|
||||
this.roundFloats();
|
||||
await this.trigger('change', {
|
||||
doc: this,
|
||||
changed: fieldname,
|
||||
});
|
||||
}
|
||||
|
||||
setDefaults() {
|
||||
for (let field of this.meta.fields) {
|
||||
if (this[field.fieldname] == null) {
|
||||
let defaultValue = getPreDefaultValues(field.fieldtype);
|
||||
|
||||
if (typeof field.default === 'function') {
|
||||
defaultValue = field.default(this);
|
||||
} else if (field.default !== undefined) {
|
||||
defaultValue = field.default;
|
||||
}
|
||||
|
||||
if (field.fieldtype === 'Currency' && !isPesa(defaultValue)) {
|
||||
defaultValue = frappe.pesa(defaultValue);
|
||||
}
|
||||
|
||||
this[field.fieldname] = defaultValue;
|
||||
}
|
||||
}
|
||||
|
||||
if (this.meta.basedOn && this.meta.filters) {
|
||||
this.setValues(this.meta.filters);
|
||||
}
|
||||
}
|
||||
|
||||
castValues() {
|
||||
for (let field of this.meta.fields) {
|
||||
let value = this[field.fieldname];
|
||||
if (value == null) {
|
||||
continue;
|
||||
}
|
||||
if (['Int', 'Check'].includes(field.fieldtype)) {
|
||||
value = parseInt(value, 10);
|
||||
} else if (field.fieldtype === 'Float') {
|
||||
value = parseFloat(value);
|
||||
} else if (field.fieldtype === 'Currency' && !isPesa(value)) {
|
||||
value = frappe.pesa(value);
|
||||
}
|
||||
this[field.fieldname] = value;
|
||||
}
|
||||
}
|
||||
|
||||
setKeywords() {
|
||||
let keywords = [];
|
||||
for (let fieldname of this.meta.getKeywordFields()) {
|
||||
keywords.push(this[fieldname]);
|
||||
}
|
||||
this.keywords = keywords.join(', ');
|
||||
}
|
||||
|
||||
append(key, document = {}) {
|
||||
// push child row and trigger change
|
||||
this.push(key, document);
|
||||
this._dirty = true;
|
||||
this.applyChange(key);
|
||||
}
|
||||
|
||||
push(key, document = {}) {
|
||||
// push child row without triggering change
|
||||
if (!this[key]) {
|
||||
this[key] = [];
|
||||
}
|
||||
this[key].push(this._initChild(document, key));
|
||||
}
|
||||
|
||||
_initChild(data, key) {
|
||||
if (data instanceof Document) {
|
||||
return data;
|
||||
}
|
||||
|
||||
data.doctype = this.meta.getField(key).childtype;
|
||||
data.parent = this.name;
|
||||
data.parenttype = this.doctype;
|
||||
data.parentfield = key;
|
||||
data.parentdoc = this;
|
||||
|
||||
if (!data.idx) {
|
||||
data.idx = (this[key] || []).length;
|
||||
}
|
||||
|
||||
if (!data.name) {
|
||||
data.name = frappe.getRandomString();
|
||||
}
|
||||
|
||||
const childDoc = new Document(data);
|
||||
childDoc.setDefaults();
|
||||
return childDoc;
|
||||
}
|
||||
|
||||
async validateInsert() {
|
||||
this.validateMandatory();
|
||||
await this.validateFields();
|
||||
}
|
||||
|
||||
validateMandatory() {
|
||||
let checkForMandatory = [this];
|
||||
let tableFields = this.meta.fields.filter((df) => df.fieldtype === 'Table');
|
||||
tableFields.map((df) => {
|
||||
let rows = this[df.fieldname];
|
||||
checkForMandatory = [...checkForMandatory, ...rows];
|
||||
});
|
||||
|
||||
let missingMandatory = checkForMandatory
|
||||
.map((doc) => getMissingMandatory(doc))
|
||||
.filter(Boolean);
|
||||
|
||||
if (missingMandatory.length > 0) {
|
||||
let fields = missingMandatory.join('\n');
|
||||
let message = frappe.t`Value missing for ${fields}`;
|
||||
throw new frappe.errors.MandatoryError(message);
|
||||
}
|
||||
|
||||
function getMissingMandatory(doc) {
|
||||
let mandatoryFields = doc.meta.fields.filter((df) => {
|
||||
if (df.required instanceof Function) {
|
||||
return df.required(doc);
|
||||
}
|
||||
return df.required;
|
||||
});
|
||||
let message = mandatoryFields
|
||||
.filter((df) => {
|
||||
let value = doc[df.fieldname];
|
||||
if (df.fieldtype === 'Table') {
|
||||
return value == null || value.length === 0;
|
||||
}
|
||||
return value == null || value === '';
|
||||
})
|
||||
.map((df) => {
|
||||
return `"${df.label}"`;
|
||||
})
|
||||
.join(', ');
|
||||
|
||||
if (message && doc.meta.isChild) {
|
||||
let parentfield = doc.parentdoc.meta.getField(doc.parentfield);
|
||||
message = `${parentfield.label} Row ${doc.idx + 1}: ${message}`;
|
||||
}
|
||||
|
||||
return message;
|
||||
}
|
||||
}
|
||||
|
||||
async validateFields() {
|
||||
let fields = this.meta.fields;
|
||||
for (let field of fields) {
|
||||
await this.validateField(field.fieldname, this.get(field.fieldname));
|
||||
}
|
||||
}
|
||||
|
||||
async validateField(key, value) {
|
||||
let field = this.meta.getField(key);
|
||||
if (!field) {
|
||||
throw new frappe.errors.InvalidFieldError(`Invalid field ${key}`);
|
||||
}
|
||||
if (field.fieldtype == 'Select') {
|
||||
this.meta.validateSelect(field, value);
|
||||
}
|
||||
if (field.validate && value != null) {
|
||||
let validator = null;
|
||||
if (typeof field.validate === 'object') {
|
||||
validator = this.getValidateFunction(field.validate);
|
||||
}
|
||||
if (typeof field.validate === 'function') {
|
||||
validator = field.validate;
|
||||
}
|
||||
if (validator) {
|
||||
await validator(value, this);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
getValidateFunction(validator) {
|
||||
let functions = {
|
||||
email(value) {
|
||||
let isValid = /(.+)@(.+){2,}\.(.+){2,}/.test(value);
|
||||
if (!isValid) {
|
||||
throw new frappe.errors.ValidationError(`Invalid email: ${value}`);
|
||||
}
|
||||
},
|
||||
phone(value) {
|
||||
let isValid = /[+]{0,1}[\d ]+/.test(value);
|
||||
if (!isValid) {
|
||||
throw new frappe.errors.ValidationError(`Invalid phone: ${value}`);
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
return functions[validator.type];
|
||||
}
|
||||
|
||||
getValidDict() {
|
||||
let data = {};
|
||||
for (let field of this.meta.getValidFields()) {
|
||||
let value = this[field.fieldname];
|
||||
if (Array.isArray(value)) {
|
||||
value = value.map((doc) =>
|
||||
doc.getValidDict ? doc.getValidDict() : doc
|
||||
);
|
||||
}
|
||||
data[field.fieldname] = value;
|
||||
}
|
||||
return data;
|
||||
}
|
||||
|
||||
setStandardValues() {
|
||||
// set standard values on server-side only
|
||||
if (frappe.isServer) {
|
||||
if (this.isSubmittable && this.submitted == null) {
|
||||
this.submitted = 0;
|
||||
}
|
||||
|
||||
let now = new Date().toISOString();
|
||||
if (!this.owner) {
|
||||
this.owner = frappe.session.user;
|
||||
}
|
||||
|
||||
if (!this.creation) {
|
||||
this.creation = now;
|
||||
}
|
||||
|
||||
this.updateModified();
|
||||
}
|
||||
}
|
||||
|
||||
updateModified() {
|
||||
if (frappe.isServer) {
|
||||
let now = new Date().toISOString();
|
||||
this.modifiedBy = frappe.session.user;
|
||||
this.modified = now;
|
||||
}
|
||||
}
|
||||
|
||||
async load() {
|
||||
let data = await frappe.db.get(this.doctype, this.name);
|
||||
if (data && data.name) {
|
||||
this.syncValues(data);
|
||||
if (this.meta.isSingle) {
|
||||
this.setDefaults();
|
||||
this.castValues();
|
||||
}
|
||||
await this.loadLinks();
|
||||
} else {
|
||||
throw new frappe.errors.NotFoundError(
|
||||
`Not Found: ${this.doctype} ${this.name}`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
async loadLinks() {
|
||||
this._links = {};
|
||||
let inlineLinks = this.meta.fields.filter((df) => df.inline);
|
||||
for (let df of inlineLinks) {
|
||||
await this.loadLink(df.fieldname);
|
||||
}
|
||||
}
|
||||
|
||||
async loadLink(fieldname) {
|
||||
this._links = this._links || {};
|
||||
let df = this.meta.getField(fieldname);
|
||||
if (this[df.fieldname]) {
|
||||
this._links[df.fieldname] = await frappe.getDoc(
|
||||
df.target,
|
||||
this[df.fieldname]
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
getLink(fieldname) {
|
||||
return this._links ? this._links[fieldname] : null;
|
||||
}
|
||||
|
||||
syncValues(data) {
|
||||
this.clearValues();
|
||||
this.setValues(data);
|
||||
this._dirty = false;
|
||||
this.trigger('change', {
|
||||
doc: this,
|
||||
});
|
||||
}
|
||||
|
||||
clearValues() {
|
||||
let toClear = ['_dirty', '_notInserted'].concat(
|
||||
this.meta.getValidFields().map((df) => df.fieldname)
|
||||
);
|
||||
for (let key of toClear) {
|
||||
this[key] = null;
|
||||
}
|
||||
}
|
||||
|
||||
setChildIdx() {
|
||||
// renumber children
|
||||
for (let field of this.meta.getValidFields()) {
|
||||
if (field.fieldtype === 'Table') {
|
||||
for (let i = 0; i < (this[field.fieldname] || []).length; i++) {
|
||||
this[field.fieldname][i].idx = i;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async compareWithCurrentDoc() {
|
||||
if (frappe.isServer && !this.isNew()) {
|
||||
let currentDoc = await frappe.db.get(this.doctype, this.name);
|
||||
|
||||
// check for conflict
|
||||
if (currentDoc && this.modified != currentDoc.modified) {
|
||||
throw new frappe.errors.Conflict(
|
||||
frappe.t`Document ${this.doctype} ${this.name} has been modified after loading`
|
||||
);
|
||||
}
|
||||
|
||||
if (this.submitted && !this.meta.isSubmittable) {
|
||||
throw new frappe.errors.ValidationError(
|
||||
frappe.t`Document type ${this.doctype} is not submittable`
|
||||
);
|
||||
}
|
||||
|
||||
// set submit action flag
|
||||
this.flags = {};
|
||||
if (this.submitted && !currentDoc.submitted) {
|
||||
this.flags.submitAction = true;
|
||||
}
|
||||
|
||||
if (currentDoc.submitted && !this.submitted) {
|
||||
this.flags.revertAction = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async applyFormula(fieldname) {
|
||||
if (!this.meta.hasFormula()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
let doc = this;
|
||||
let changed = false;
|
||||
|
||||
// children
|
||||
for (let tablefield of this.meta.getTableFields()) {
|
||||
let formulaFields = frappe
|
||||
.getMeta(tablefield.childtype)
|
||||
.getFormulaFields();
|
||||
if (formulaFields.length) {
|
||||
const value = this[tablefield.fieldname] || [];
|
||||
for (let row of value) {
|
||||
for (let field of formulaFields) {
|
||||
if (shouldApplyFormula(field, row)) {
|
||||
let val = await this.getValueFromFormula(field, row);
|
||||
let previousVal = row[field.fieldname];
|
||||
if (val !== undefined && previousVal !== val) {
|
||||
row[field.fieldname] = val;
|
||||
changed = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// parent or child row
|
||||
for (let field of this.meta.getFormulaFields()) {
|
||||
if (shouldApplyFormula(field, doc)) {
|
||||
let previousVal = doc[field.fieldname];
|
||||
let val = await this.getValueFromFormula(field, doc);
|
||||
if (val !== undefined && previousVal !== val) {
|
||||
doc[field.fieldname] = val;
|
||||
changed = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return changed;
|
||||
|
||||
function shouldApplyFormula(field, doc) {
|
||||
if (field.readOnly) {
|
||||
return true;
|
||||
}
|
||||
if (
|
||||
fieldname &&
|
||||
field.formulaDependsOn &&
|
||||
field.formulaDependsOn.includes(fieldname)
|
||||
) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (!frappe.isServer || frappe.isElectron) {
|
||||
if (doc[field.fieldname] == null || doc[field.fieldname] == '') {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
async getValueFromFormula(field, doc) {
|
||||
let value;
|
||||
|
||||
if (doc.meta.isChild) {
|
||||
value = await field.formula(doc, doc.parentdoc);
|
||||
} else {
|
||||
value = await field.formula(doc);
|
||||
}
|
||||
|
||||
if (value === undefined) {
|
||||
return;
|
||||
}
|
||||
|
||||
if ('Float' === field.fieldtype) {
|
||||
value = this.round(value, field);
|
||||
}
|
||||
|
||||
if (field.fieldtype === 'Table' && Array.isArray(value)) {
|
||||
value = value.map((row) => {
|
||||
let doc = this._initChild(row, field.fieldname);
|
||||
doc.roundFloats();
|
||||
return doc;
|
||||
});
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
roundFloats() {
|
||||
let fields = this.meta
|
||||
.getValidFields()
|
||||
.filter((df) => ['Float', 'Table'].includes(df.fieldtype));
|
||||
|
||||
for (let df of fields) {
|
||||
let value = this[df.fieldname];
|
||||
if (value == null) {
|
||||
continue;
|
||||
}
|
||||
// child
|
||||
if (Array.isArray(value)) {
|
||||
value.map((row) => row.roundFloats());
|
||||
continue;
|
||||
}
|
||||
// field
|
||||
let roundedValue = this.round(value, df);
|
||||
if (roundedValue && value !== roundedValue) {
|
||||
this[df.fieldname] = roundedValue;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async commit() {
|
||||
// re-run triggers
|
||||
this.setKeywords();
|
||||
this.setChildIdx();
|
||||
await this.applyFormula();
|
||||
await this.trigger('validate');
|
||||
}
|
||||
|
||||
async insert() {
|
||||
await setName(this);
|
||||
this.setStandardValues();
|
||||
await this.commit();
|
||||
await this.validateInsert();
|
||||
await this.trigger('beforeInsert');
|
||||
|
||||
let oldName = this.name;
|
||||
const data = await frappe.db.insert(this.doctype, this.getValidDict());
|
||||
this.syncValues(data);
|
||||
|
||||
if (oldName !== this.name) {
|
||||
frappe.removeFromCache(this.doctype, oldName);
|
||||
}
|
||||
|
||||
await this.trigger('afterInsert');
|
||||
await this.trigger('afterSave');
|
||||
|
||||
telemetry.log(Verb.Created, this.doctype);
|
||||
return this;
|
||||
}
|
||||
|
||||
async update(...args) {
|
||||
if (args.length) {
|
||||
await this.set(...args);
|
||||
}
|
||||
await this.compareWithCurrentDoc();
|
||||
await this.commit();
|
||||
await this.trigger('beforeUpdate');
|
||||
|
||||
// before submit
|
||||
if (this.flags.submitAction) await this.trigger('beforeSubmit');
|
||||
if (this.flags.revertAction) await this.trigger('beforeRevert');
|
||||
|
||||
// update modifiedBy and modified
|
||||
this.updateModified();
|
||||
|
||||
const data = await frappe.db.update(this.doctype, this.getValidDict());
|
||||
this.syncValues(data);
|
||||
|
||||
await this.trigger('afterUpdate');
|
||||
await this.trigger('afterSave');
|
||||
|
||||
// after submit
|
||||
if (this.flags.submitAction) await this.trigger('afterSubmit');
|
||||
if (this.flags.revertAction) await this.trigger('afterRevert');
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
async insertOrUpdate() {
|
||||
if (this._notInserted) {
|
||||
return await this.insert();
|
||||
} else {
|
||||
return await this.update();
|
||||
}
|
||||
}
|
||||
|
||||
async delete() {
|
||||
await this.trigger('beforeDelete');
|
||||
await frappe.db.delete(this.doctype, this.name);
|
||||
await this.trigger('afterDelete');
|
||||
|
||||
telemetry.log(Verb.Deleted, this.doctype);
|
||||
}
|
||||
|
||||
async submitOrRevert(isSubmit) {
|
||||
const wasSubmitted = this.submitted;
|
||||
this.submitted = isSubmit;
|
||||
try {
|
||||
await this.update();
|
||||
} catch (e) {
|
||||
this.submitted = wasSubmitted;
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
async submit() {
|
||||
this.cancelled = 0;
|
||||
await this.submitOrRevert(1);
|
||||
}
|
||||
|
||||
async revert() {
|
||||
await this.submitOrRevert(0);
|
||||
}
|
||||
|
||||
async rename(newName) {
|
||||
await this.trigger('beforeRename');
|
||||
await frappe.db.rename(this.doctype, this.name, newName);
|
||||
this.name = newName;
|
||||
await this.trigger('afterRename');
|
||||
}
|
||||
|
||||
// trigger methods on the class if they match
|
||||
// with the trigger name
|
||||
async trigger(event, params) {
|
||||
if (this[event]) {
|
||||
await this[event](params);
|
||||
}
|
||||
await super.trigger(event, params);
|
||||
}
|
||||
|
||||
// helper functions
|
||||
getSum(tablefield, childfield, convertToFloat = true) {
|
||||
const sum = (this[tablefield] || [])
|
||||
.map((d) => {
|
||||
const value = d[childfield] ?? 0;
|
||||
if (!isPesa(value)) {
|
||||
try {
|
||||
return frappe.pesa(value);
|
||||
} catch (err) {
|
||||
err.message += ` value: '${value}' of type: ${typeof value}, fieldname: '${tablefield}', childfield: '${childfield}'`;
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
return value;
|
||||
})
|
||||
.reduce((a, b) => a.add(b), frappe.pesa(0));
|
||||
|
||||
if (convertToFloat) {
|
||||
return sum.float;
|
||||
}
|
||||
return sum;
|
||||
}
|
||||
|
||||
getFrom(doctype, name, fieldname) {
|
||||
if (!name) return '';
|
||||
return frappe.db.getCachedValue(doctype, name, fieldname);
|
||||
}
|
||||
|
||||
round(value, df = null) {
|
||||
if (typeof df === 'string') {
|
||||
df = this.meta.getField(df);
|
||||
}
|
||||
const precision =
|
||||
frappe.SystemSettings.internalPrecision ?? DEFAULT_INTERNAL_PRECISION;
|
||||
return frappe.pesa(value).clip(precision).float;
|
||||
}
|
||||
|
||||
isNew() {
|
||||
return this._notInserted;
|
||||
}
|
||||
|
||||
getFieldMetaMap() {
|
||||
return this.meta.fields.reduce((obj, meta) => {
|
||||
obj[meta.fieldname] = meta;
|
||||
return obj;
|
||||
}, {});
|
||||
}
|
||||
|
||||
async duplicate() {
|
||||
const updateMap = {};
|
||||
const fieldValueMap = this.getValidDict();
|
||||
const keys = this.meta.fields.map((f) => f.fieldname);
|
||||
for (const key of keys) {
|
||||
let value = fieldValueMap[key];
|
||||
if (!value) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (isPesa(value)) {
|
||||
value = value.copy();
|
||||
}
|
||||
|
||||
if (value instanceof Array) {
|
||||
value.forEach((row) => {
|
||||
delete row.name;
|
||||
delete row.parent;
|
||||
});
|
||||
}
|
||||
|
||||
updateMap[key] = value;
|
||||
}
|
||||
|
||||
if (this.numberSeries) {
|
||||
delete updateMap.name;
|
||||
} else {
|
||||
updateMap.name = updateMap.name + ' CPY';
|
||||
}
|
||||
|
||||
const doc = frappe.getNewDoc(this.doctype, false);
|
||||
await doc.set(updateMap);
|
||||
await doc.insert();
|
||||
}
|
||||
}
|
||||
|
||||
function getPreDefaultValues(fieldtype) {
|
||||
switch (fieldtype) {
|
||||
case 'Table':
|
||||
return [];
|
||||
case 'Currency':
|
||||
return frappe.pesa(0.0);
|
||||
case 'Int':
|
||||
case 'Float':
|
||||
return 0;
|
||||
default:
|
||||
return null;
|
||||
}
|
||||
}
|
@ -1,114 +0,0 @@
|
||||
const cloneDeep = require('lodash/cloneDeep');
|
||||
|
||||
module.exports = {
|
||||
extend: (base, target, options = {}) => {
|
||||
base = cloneDeep(base);
|
||||
const fieldsToMerge = (target.fields || []).map(df => df.fieldname);
|
||||
const fieldsToRemove = options.skipFields || [];
|
||||
const overrideProps = options.overrideProps || [];
|
||||
for (let prop of overrideProps) {
|
||||
if (base.hasOwnProperty(prop)) {
|
||||
delete base[prop];
|
||||
}
|
||||
}
|
||||
|
||||
let mergeFields = (baseFields, targetFields) => {
|
||||
let fields = cloneDeep(baseFields);
|
||||
fields = fields
|
||||
.filter(df => !fieldsToRemove.includes(df.fieldname))
|
||||
.map(df => {
|
||||
if (fieldsToMerge.includes(df.fieldname)) {
|
||||
let copy = cloneDeep(df);
|
||||
return Object.assign(
|
||||
copy,
|
||||
targetFields.find(tdf => tdf.fieldname === df.fieldname)
|
||||
);
|
||||
}
|
||||
return df;
|
||||
});
|
||||
let fieldsAdded = fields.map(df => df.fieldname);
|
||||
let fieldsToAdd = targetFields.filter(
|
||||
df => !fieldsAdded.includes(df.fieldname)
|
||||
);
|
||||
return fields.concat(fieldsToAdd);
|
||||
};
|
||||
|
||||
let fields = mergeFields(base.fields, target.fields || []);
|
||||
let out = Object.assign(base, target);
|
||||
out.fields = fields;
|
||||
|
||||
return out;
|
||||
},
|
||||
commonFields: [
|
||||
{
|
||||
fieldname: 'name',
|
||||
fieldtype: 'Data',
|
||||
required: 1
|
||||
}
|
||||
],
|
||||
submittableFields: [
|
||||
{
|
||||
fieldname: 'submitted',
|
||||
fieldtype: 'Check',
|
||||
required: 1
|
||||
}
|
||||
],
|
||||
parentFields: [
|
||||
{
|
||||
fieldname: 'owner',
|
||||
fieldtype: 'Data',
|
||||
required: 1
|
||||
},
|
||||
{
|
||||
fieldname: 'modifiedBy',
|
||||
fieldtype: 'Data',
|
||||
required: 1
|
||||
},
|
||||
{
|
||||
fieldname: 'creation',
|
||||
fieldtype: 'Datetime',
|
||||
required: 1
|
||||
},
|
||||
{
|
||||
fieldname: 'modified',
|
||||
fieldtype: 'Datetime',
|
||||
required: 1
|
||||
},
|
||||
{
|
||||
fieldname: 'keywords',
|
||||
fieldtype: 'Text'
|
||||
}
|
||||
],
|
||||
childFields: [
|
||||
{
|
||||
fieldname: 'idx',
|
||||
fieldtype: 'Int',
|
||||
required: 1
|
||||
},
|
||||
{
|
||||
fieldname: 'parent',
|
||||
fieldtype: 'Data',
|
||||
required: 1
|
||||
},
|
||||
{
|
||||
fieldname: 'parenttype',
|
||||
fieldtype: 'Data',
|
||||
required: 1
|
||||
},
|
||||
{
|
||||
fieldname: 'parentfield',
|
||||
fieldtype: 'Data',
|
||||
required: 1
|
||||
}
|
||||
],
|
||||
treeFields: [
|
||||
{
|
||||
fieldname: 'lft',
|
||||
fieldtype: 'Int'
|
||||
},
|
||||
{
|
||||
fieldname: 'rgt',
|
||||
fieldtype: 'Int'
|
||||
}
|
||||
]
|
||||
};
|
@ -1,328 +0,0 @@
|
||||
import frappe from 'frappe';
|
||||
import { indicators as indicatorColor } from '../../src/colors';
|
||||
import Document from './document';
|
||||
import model from './index';
|
||||
|
||||
export default class BaseMeta extends Document {
|
||||
constructor(data) {
|
||||
if (data.basedOn) {
|
||||
let config = frappe.models[data.basedOn];
|
||||
Object.assign(data, config, {
|
||||
name: data.name,
|
||||
label: data.label,
|
||||
filters: data.filters,
|
||||
});
|
||||
}
|
||||
super(data);
|
||||
this.setDefaultIndicators();
|
||||
if (this.setupMeta) {
|
||||
this.setupMeta();
|
||||
}
|
||||
if (!this.titleField) {
|
||||
this.titleField = 'name';
|
||||
}
|
||||
}
|
||||
|
||||
setValues(data) {
|
||||
Object.assign(this, data);
|
||||
this.processFields();
|
||||
}
|
||||
|
||||
processFields() {
|
||||
// add name field
|
||||
if (!this.fields.find((df) => df.fieldname === 'name') && !this.isSingle) {
|
||||
this.fields = [
|
||||
{
|
||||
label: frappe.t`ID`,
|
||||
fieldname: 'name',
|
||||
fieldtype: 'Data',
|
||||
required: 1,
|
||||
readOnly: 1,
|
||||
},
|
||||
].concat(this.fields);
|
||||
}
|
||||
|
||||
this.fields = this.fields.map((df) => {
|
||||
// name field is always required
|
||||
if (df.fieldname === 'name') {
|
||||
df.required = 1;
|
||||
}
|
||||
|
||||
return df;
|
||||
});
|
||||
}
|
||||
|
||||
hasField(fieldname) {
|
||||
return this.getField(fieldname) ? true : false;
|
||||
}
|
||||
|
||||
getField(fieldname) {
|
||||
if (!this._field_map) {
|
||||
this._field_map = {};
|
||||
for (let field of this.fields) {
|
||||
this._field_map[field.fieldname] = field;
|
||||
}
|
||||
}
|
||||
return this._field_map[fieldname];
|
||||
}
|
||||
|
||||
/**
|
||||
* Get fields filtered by filters
|
||||
* @param {Object} filters
|
||||
*
|
||||
* Usage:
|
||||
* meta = frappe.getMeta('ToDo')
|
||||
* dataFields = meta.getFieldsWith({ fieldtype: 'Data' })
|
||||
*/
|
||||
getFieldsWith(filters) {
|
||||
return this.fields.filter((df) => {
|
||||
let match = true;
|
||||
for (const key in filters) {
|
||||
const value = filters[key];
|
||||
match = df[key] === value;
|
||||
}
|
||||
return match;
|
||||
});
|
||||
}
|
||||
|
||||
getLabel(fieldname) {
|
||||
let df = this.getField(fieldname);
|
||||
return df.getLabel || df.label;
|
||||
}
|
||||
|
||||
getTableFields() {
|
||||
if (this._tableFields === undefined) {
|
||||
this._tableFields = this.fields.filter(
|
||||
(field) => field.fieldtype === 'Table'
|
||||
);
|
||||
}
|
||||
return this._tableFields;
|
||||
}
|
||||
|
||||
getFormulaFields() {
|
||||
if (this._formulaFields === undefined) {
|
||||
this._formulaFields = this.fields.filter((field) => field.formula);
|
||||
}
|
||||
return this._formulaFields;
|
||||
}
|
||||
|
||||
hasFormula() {
|
||||
if (this._hasFormula === undefined) {
|
||||
this._hasFormula = false;
|
||||
if (this.getFormulaFields().length) {
|
||||
this._hasFormula = true;
|
||||
} else {
|
||||
for (let tablefield of this.getTableFields()) {
|
||||
if (frappe.getMeta(tablefield.childtype).getFormulaFields().length) {
|
||||
this._hasFormula = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return this._hasFormula;
|
||||
}
|
||||
|
||||
getBaseDocType() {
|
||||
return this.basedOn || this.name;
|
||||
}
|
||||
|
||||
async set(fieldname, value) {
|
||||
this[fieldname] = value;
|
||||
await this.trigger(fieldname);
|
||||
}
|
||||
|
||||
get(fieldname) {
|
||||
return this[fieldname];
|
||||
}
|
||||
|
||||
getValidFields({ withChildren = true } = {}) {
|
||||
if (!this._validFields) {
|
||||
this._validFields = [];
|
||||
this._validFieldsWithChildren = [];
|
||||
|
||||
const _add = (field) => {
|
||||
this._validFields.push(field);
|
||||
this._validFieldsWithChildren.push(field);
|
||||
};
|
||||
|
||||
// fields validation
|
||||
this.fields.forEach((df, i) => {
|
||||
if (!df.fieldname) {
|
||||
throw new frappe.errors.ValidationError(
|
||||
`DocType ${this.name}: "fieldname" is required for field at index ${i}`
|
||||
);
|
||||
}
|
||||
if (!df.fieldtype) {
|
||||
throw new frappe.errors.ValidationError(
|
||||
`DocType ${this.name}: "fieldtype" is required for field "${df.fieldname}"`
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
const doctypeFields = this.fields.map((field) => field.fieldname);
|
||||
|
||||
// standard fields
|
||||
for (let field of model.commonFields) {
|
||||
if (
|
||||
frappe.db.typeMap[field.fieldtype] &&
|
||||
!doctypeFields.includes(field.fieldname)
|
||||
) {
|
||||
_add(field);
|
||||
}
|
||||
}
|
||||
|
||||
if (this.isSubmittable) {
|
||||
_add({
|
||||
fieldtype: 'Check',
|
||||
fieldname: 'submitted',
|
||||
label: frappe.t`Submitted`,
|
||||
});
|
||||
}
|
||||
|
||||
if (this.isChild) {
|
||||
// child fields
|
||||
for (let field of model.childFields) {
|
||||
if (
|
||||
frappe.db.typeMap[field.fieldtype] &&
|
||||
!doctypeFields.includes(field.fieldname)
|
||||
) {
|
||||
_add(field);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// parent fields
|
||||
for (let field of model.parentFields) {
|
||||
if (
|
||||
frappe.db.typeMap[field.fieldtype] &&
|
||||
!doctypeFields.includes(field.fieldname)
|
||||
) {
|
||||
_add(field);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (this.isTree) {
|
||||
// tree fields
|
||||
for (let field of model.treeFields) {
|
||||
if (
|
||||
frappe.db.typeMap[field.fieldtype] &&
|
||||
!doctypeFields.includes(field.fieldname)
|
||||
) {
|
||||
_add(field);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// doctype fields
|
||||
for (let field of this.fields) {
|
||||
let include = frappe.db.typeMap[field.fieldtype];
|
||||
|
||||
if (include) {
|
||||
_add(field);
|
||||
}
|
||||
|
||||
// include tables if (withChildren = True)
|
||||
if (!include && field.fieldtype === 'Table') {
|
||||
this._validFieldsWithChildren.push(field);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (withChildren) {
|
||||
return this._validFieldsWithChildren;
|
||||
} else {
|
||||
return this._validFields;
|
||||
}
|
||||
}
|
||||
|
||||
getKeywordFields() {
|
||||
if (!this._keywordFields) {
|
||||
this._keywordFields = this.keywordFields;
|
||||
if (!(this._keywordFields && this._keywordFields.length && this.fields)) {
|
||||
this._keywordFields = this.fields
|
||||
.filter((field) => field.fieldtype !== 'Table' && field.required)
|
||||
.map((field) => field.fieldname);
|
||||
}
|
||||
if (!(this._keywordFields && this._keywordFields.length)) {
|
||||
this._keywordFields = ['name'];
|
||||
}
|
||||
}
|
||||
return this._keywordFields;
|
||||
}
|
||||
|
||||
getQuickEditFields() {
|
||||
if (this.quickEditFields) {
|
||||
return this.quickEditFields.map((fieldname) => this.getField(fieldname));
|
||||
}
|
||||
return this.getFieldsWith({ required: 1 });
|
||||
}
|
||||
|
||||
validateSelect(field, value) {
|
||||
let options = field.options;
|
||||
if (!options) return;
|
||||
if (!field.required && value == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
let validValues = options;
|
||||
|
||||
if (typeof options === 'string') {
|
||||
// values given as string
|
||||
validValues = options.split('\n');
|
||||
}
|
||||
|
||||
if (typeof options[0] === 'object') {
|
||||
// options as array of {label, value} pairs
|
||||
validValues = options.map((o) => o.value);
|
||||
}
|
||||
|
||||
if (!validValues.includes(value)) {
|
||||
throw new frappe.errors.ValueError(
|
||||
// prettier-ignore
|
||||
`DocType ${this.name}: Invalid value "${value}" for "${field.label}". Must be one of ${options.join(', ')}`
|
||||
);
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
async trigger(event, params = {}) {
|
||||
Object.assign(params, {
|
||||
doc: this,
|
||||
name: event,
|
||||
});
|
||||
|
||||
await super.trigger(event, params);
|
||||
}
|
||||
|
||||
setDefaultIndicators() {
|
||||
if (!this.indicators) {
|
||||
if (this.isSubmittable) {
|
||||
this.indicators = {
|
||||
key: 'submitted',
|
||||
colors: {
|
||||
0: indicatorColor.GRAY,
|
||||
1: indicatorColor.BLUE,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
getIndicatorColor(doc) {
|
||||
if (frappe.isDirty(this.name, doc.name)) {
|
||||
return indicatorColor.ORANGE;
|
||||
} else {
|
||||
if (this.indicators) {
|
||||
let value = doc[this.indicators.key];
|
||||
if (value) {
|
||||
return this.indicators.colors[value] || indicatorColor.GRAY;
|
||||
} else {
|
||||
return indicatorColor.GRAY;
|
||||
}
|
||||
} else {
|
||||
return indicatorColor.GRAY;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -1,119 +0,0 @@
|
||||
import frappe from 'frappe';
|
||||
import { getRandomString } from 'frappe/utils';
|
||||
|
||||
export async function isNameAutoSet(doctype) {
|
||||
const doc = frappe.getNewDoc(doctype);
|
||||
if (doc.meta.naming === 'autoincrement') {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (!doc.meta.settings) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const { numberSeries } = await doc.getSettings();
|
||||
if (numberSeries) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
export async function setName(doc) {
|
||||
if (frappe.isServer) {
|
||||
// if is server, always name again if autoincrement or other
|
||||
if (doc.meta.naming === 'autoincrement') {
|
||||
doc.name = await getNextId(doc.doctype);
|
||||
return;
|
||||
}
|
||||
|
||||
// Current, per doc number series
|
||||
if (doc.numberSeries) {
|
||||
doc.name = await getSeriesNext(doc.numberSeries, doc.doctype);
|
||||
return;
|
||||
}
|
||||
|
||||
// Legacy, using doc settings for number series
|
||||
if (doc.meta.settings) {
|
||||
const numberSeries = (await doc.getSettings()).numberSeries;
|
||||
if (!numberSeries) {
|
||||
return;
|
||||
}
|
||||
|
||||
doc.name = await getSeriesNext(numberSeries, doc.doctype);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
if (doc.name) {
|
||||
return;
|
||||
}
|
||||
|
||||
// name === doctype for Single
|
||||
if (doc.meta.isSingle) {
|
||||
doc.name = doc.meta.name;
|
||||
return;
|
||||
}
|
||||
|
||||
// assign a random name by default
|
||||
// override doc to set a name
|
||||
if (!doc.name) {
|
||||
doc.name = getRandomString();
|
||||
}
|
||||
}
|
||||
|
||||
export async function getNextId(doctype) {
|
||||
// get the last inserted row
|
||||
let lastInserted = await getLastInserted(doctype);
|
||||
let name = 1;
|
||||
if (lastInserted) {
|
||||
let lastNumber = parseInt(lastInserted.name);
|
||||
if (isNaN(lastNumber)) lastNumber = 0;
|
||||
name = lastNumber + 1;
|
||||
}
|
||||
return (name + '').padStart(9, '0');
|
||||
}
|
||||
|
||||
export async function getLastInserted(doctype) {
|
||||
const lastInserted = await frappe.db.getAll({
|
||||
doctype: doctype,
|
||||
fields: ['name'],
|
||||
limit: 1,
|
||||
order_by: 'creation',
|
||||
order: 'desc',
|
||||
});
|
||||
return lastInserted && lastInserted.length ? lastInserted[0] : null;
|
||||
}
|
||||
|
||||
export async function getSeriesNext(prefix, doctype) {
|
||||
let series;
|
||||
|
||||
try {
|
||||
series = await frappe.getDoc('NumberSeries', prefix);
|
||||
} catch (e) {
|
||||
if (!e.statusCode || e.statusCode !== 404) {
|
||||
throw e;
|
||||
}
|
||||
|
||||
await createNumberSeries(prefix, doctype);
|
||||
series = await frappe.getDoc('NumberSeries', prefix);
|
||||
}
|
||||
|
||||
return await series.next(doctype);
|
||||
}
|
||||
|
||||
export async function createNumberSeries(prefix, referenceType, start = 1001) {
|
||||
const exists = await frappe.db.exists('NumberSeries', prefix);
|
||||
if (exists) {
|
||||
return;
|
||||
}
|
||||
|
||||
const series = frappe.newDoc({
|
||||
doctype: 'NumberSeries',
|
||||
name: prefix,
|
||||
start,
|
||||
referenceType,
|
||||
});
|
||||
|
||||
await series.insert();
|
||||
}
|
@ -1,26 +0,0 @@
|
||||
import frappe from 'frappe';
|
||||
|
||||
export default async function runPatches(patchList) {
|
||||
const patchesAlreadyRun = (
|
||||
await frappe.db.knex('PatchRun').select('name')
|
||||
).map(({ name }) => name);
|
||||
|
||||
for (let patch of patchList) {
|
||||
if (patchesAlreadyRun.includes(patch.patchName)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
await runPatch(patch);
|
||||
}
|
||||
}
|
||||
|
||||
async function runPatch({ patchName, patchFunction }) {
|
||||
try {
|
||||
await patchFunction();
|
||||
const patchRun = frappe.getNewDoc('PatchRun');
|
||||
patchRun.name = patchName;
|
||||
await patchRun.insert();
|
||||
} catch (error) {
|
||||
console.error(`could not run ${patchName}`, error);
|
||||
}
|
||||
}
|
@ -1,61 +0,0 @@
|
||||
const { t } = require('frappe');
|
||||
|
||||
module.exports = {
|
||||
name: 'File',
|
||||
doctype: 'DocType',
|
||||
isSingle: 0,
|
||||
keywordFields: ['name', 'filename'],
|
||||
fields: [
|
||||
{
|
||||
fieldname: 'name',
|
||||
label: t`File Path`,
|
||||
fieldtype: 'Data',
|
||||
required: 1,
|
||||
},
|
||||
{
|
||||
fieldname: 'filename',
|
||||
label: t`File Name`,
|
||||
fieldtype: 'Data',
|
||||
required: 1,
|
||||
},
|
||||
{
|
||||
fieldname: 'mimetype',
|
||||
label: t`MIME Type`,
|
||||
fieldtype: 'Data',
|
||||
},
|
||||
{
|
||||
fieldname: 'size',
|
||||
label: t`File Size`,
|
||||
fieldtype: 'Int',
|
||||
},
|
||||
{
|
||||
fieldname: 'referenceDoctype',
|
||||
label: t`Reference DocType`,
|
||||
fieldtype: 'Data',
|
||||
},
|
||||
{
|
||||
fieldname: 'referenceName',
|
||||
label: t`Reference Name`,
|
||||
fieldtype: 'Data',
|
||||
},
|
||||
{
|
||||
fieldname: 'referenceField',
|
||||
label: t`Reference Field`,
|
||||
fieldtype: 'Data',
|
||||
},
|
||||
],
|
||||
layout: [
|
||||
{
|
||||
columns: [{ fields: ['filename'] }],
|
||||
},
|
||||
{
|
||||
columns: [{ fields: ['mimetype'] }, { fields: ['size'] }],
|
||||
},
|
||||
{
|
||||
columns: [
|
||||
{ fields: ['referenceDoctype'] },
|
||||
{ fields: ['referenceName'] },
|
||||
],
|
||||
},
|
||||
],
|
||||
};
|
@ -1,66 +0,0 @@
|
||||
import { t } from 'frappe';
|
||||
import NumberSeries from './NumberSeriesDocument.js';
|
||||
|
||||
const referenceTypeMap = {
|
||||
SalesInvoice: t`Invoice`,
|
||||
PurchaseInvoice: t`Bill`,
|
||||
Payment: t`Payment`,
|
||||
JournalEntry: t`Journal Entry`,
|
||||
Quotation: t`Quotation`,
|
||||
SalesOrder: t`SalesOrder`,
|
||||
Fulfillment: t`Fulfillment`,
|
||||
PurchaseOrder: t`PurchaseOrder`,
|
||||
PurchaseReceipt: t`PurchaseReceipt`,
|
||||
'-': t`None`,
|
||||
};
|
||||
|
||||
export default {
|
||||
name: 'NumberSeries',
|
||||
label: t`Number Series`,
|
||||
documentClass: NumberSeries,
|
||||
doctype: 'DocType',
|
||||
isSingle: 0,
|
||||
isChild: 0,
|
||||
keywordFields: [],
|
||||
fields: [
|
||||
{
|
||||
fieldname: 'name',
|
||||
label: t`Prefix`,
|
||||
fieldtype: 'Data',
|
||||
required: 1,
|
||||
},
|
||||
{
|
||||
fieldname: 'start',
|
||||
label: t`Start`,
|
||||
fieldtype: 'Int',
|
||||
default: 1001,
|
||||
required: 1,
|
||||
minvalue: 0,
|
||||
},
|
||||
{
|
||||
fieldname: 'padZeros',
|
||||
label: t`Pad Zeros`,
|
||||
fieldtype: 'Int',
|
||||
default: 4,
|
||||
required: 1,
|
||||
},
|
||||
{
|
||||
fieldname: 'referenceType',
|
||||
label: t`Reference Type`,
|
||||
fieldtype: 'Select',
|
||||
options: Object.keys(referenceTypeMap),
|
||||
map: referenceTypeMap,
|
||||
default: '-',
|
||||
required: 1,
|
||||
readOnly: 1,
|
||||
},
|
||||
{
|
||||
fieldname: 'current',
|
||||
label: t`Current`,
|
||||
fieldtype: 'Int',
|
||||
required: 1,
|
||||
readOnly: 1,
|
||||
},
|
||||
],
|
||||
quickEditFields: ['start', 'padZeros', 'referenceType'],
|
||||
};
|
@ -1,37 +0,0 @@
|
||||
import { getPaddedName } from '@/utils';
|
||||
import frappe from 'frappe';
|
||||
import BaseDocument from 'frappe/model/document';
|
||||
|
||||
export default class NumberSeries extends BaseDocument {
|
||||
validate() {
|
||||
if (!this.current) {
|
||||
this.current = this.start;
|
||||
}
|
||||
}
|
||||
|
||||
async next(doctype) {
|
||||
this.validate();
|
||||
|
||||
const exists = await this.checkIfCurrentExists(doctype);
|
||||
if (!exists) {
|
||||
return this.getPaddedName(this.current);
|
||||
}
|
||||
|
||||
this.current++;
|
||||
await this.update();
|
||||
return this.getPaddedName(this.current);
|
||||
}
|
||||
|
||||
async checkIfCurrentExists(doctype) {
|
||||
if (!doctype) {
|
||||
return true;
|
||||
}
|
||||
|
||||
const name = this.getPaddedName(this.current);
|
||||
return await frappe.db.exists(doctype, name);
|
||||
}
|
||||
|
||||
getPaddedName(next) {
|
||||
return getPaddedName(this.name, next, this.padZeros);
|
||||
}
|
||||
}
|
@ -1,12 +0,0 @@
|
||||
const { t } = require('frappe');
|
||||
|
||||
module.exports = {
|
||||
name: 'PatchRun',
|
||||
fields: [
|
||||
{
|
||||
fieldname: 'name',
|
||||
fieldtype: 'Data',
|
||||
label: t`Name`,
|
||||
},
|
||||
],
|
||||
};
|
@ -1,33 +0,0 @@
|
||||
const { t } = require('frappe');
|
||||
|
||||
module.exports = {
|
||||
name: 'PrintFormat',
|
||||
label: t`Print Format`,
|
||||
doctype: 'DocType',
|
||||
isSingle: 0,
|
||||
isChild: 0,
|
||||
keywordFields: [],
|
||||
fields: [
|
||||
{
|
||||
fieldname: 'name',
|
||||
label: t`Name`,
|
||||
fieldtype: 'Data',
|
||||
required: 1,
|
||||
},
|
||||
{
|
||||
fieldname: 'for',
|
||||
label: t`For`,
|
||||
fieldtype: 'Data',
|
||||
required: 1,
|
||||
},
|
||||
{
|
||||
fieldname: 'template',
|
||||
label: t`Template`,
|
||||
fieldtype: 'Code',
|
||||
required: 1,
|
||||
options: {
|
||||
mode: 'text/html',
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
@ -1,17 +0,0 @@
|
||||
const { t } = require('frappe');
|
||||
|
||||
module.exports = {
|
||||
name: 'Role',
|
||||
doctype: 'DocType',
|
||||
isSingle: 0,
|
||||
isChild: 0,
|
||||
keywordFields: [],
|
||||
fields: [
|
||||
{
|
||||
fieldname: 'name',
|
||||
label: t`Name`,
|
||||
fieldtype: 'Data',
|
||||
required: 1,
|
||||
},
|
||||
],
|
||||
};
|
@ -1,23 +0,0 @@
|
||||
const { t } = require('frappe');
|
||||
|
||||
module.exports = {
|
||||
name: 'Session',
|
||||
doctype: 'DocType',
|
||||
isSingle: 0,
|
||||
isChild: 0,
|
||||
keywordFields: [],
|
||||
fields: [
|
||||
{
|
||||
fieldname: 'username',
|
||||
label: t`Username`,
|
||||
fieldtype: 'Data',
|
||||
required: 1,
|
||||
},
|
||||
{
|
||||
fieldname: 'password',
|
||||
label: t`Password`,
|
||||
fieldtype: 'Password',
|
||||
required: 1,
|
||||
},
|
||||
],
|
||||
};
|
@ -1,29 +0,0 @@
|
||||
const { t } = require('frappe');
|
||||
|
||||
module.exports = {
|
||||
name: 'SingleValue',
|
||||
doctype: 'DocType',
|
||||
isSingle: 0,
|
||||
isChild: 0,
|
||||
keywordFields: [],
|
||||
fields: [
|
||||
{
|
||||
fieldname: 'parent',
|
||||
label: t`Parent`,
|
||||
fieldtype: 'Data',
|
||||
required: 1,
|
||||
},
|
||||
{
|
||||
fieldname: 'fieldname',
|
||||
label: t`Fieldname`,
|
||||
fieldtype: 'Data',
|
||||
required: 1,
|
||||
},
|
||||
{
|
||||
fieldname: 'value',
|
||||
label: t`Value`,
|
||||
fieldtype: 'Data',
|
||||
required: 1,
|
||||
},
|
||||
],
|
||||
};
|
@ -1,94 +0,0 @@
|
||||
const { DateTime } = require('luxon');
|
||||
const { t } = require('frappe');
|
||||
const {
|
||||
DEFAULT_DISPLAY_PRECISION,
|
||||
DEFAULT_INTERNAL_PRECISION,
|
||||
DEFAULT_LOCALE,
|
||||
} = require('../../../utils/consts');
|
||||
|
||||
let dateFormatOptions = (() => {
|
||||
let formats = [
|
||||
'dd/MM/yyyy',
|
||||
'MM/dd/yyyy',
|
||||
'dd-MM-yyyy',
|
||||
'MM-dd-yyyy',
|
||||
'yyyy-MM-dd',
|
||||
'd MMM, y',
|
||||
'MMM d, y',
|
||||
];
|
||||
|
||||
let today = DateTime.local();
|
||||
|
||||
return formats.map((format) => {
|
||||
return {
|
||||
label: today.toFormat(format),
|
||||
value: format,
|
||||
};
|
||||
});
|
||||
})();
|
||||
|
||||
module.exports = {
|
||||
name: 'SystemSettings',
|
||||
label: t`System Settings`,
|
||||
doctype: 'DocType',
|
||||
isSingle: 1,
|
||||
isChild: 0,
|
||||
keywordFields: [],
|
||||
fields: [
|
||||
{
|
||||
fieldname: 'dateFormat',
|
||||
label: t`Date Format`,
|
||||
fieldtype: 'Select',
|
||||
options: dateFormatOptions,
|
||||
default: 'MMM d, y',
|
||||
required: 1,
|
||||
description: t`Sets the app-wide date display format.`,
|
||||
},
|
||||
{
|
||||
fieldname: 'locale',
|
||||
label: t`Locale`,
|
||||
fieldtype: 'Data',
|
||||
default: DEFAULT_LOCALE,
|
||||
description: t`Set the local code. This is used for number formatting.`,
|
||||
},
|
||||
{
|
||||
fieldname: 'displayPrecision',
|
||||
label: t`Display Precision`,
|
||||
fieldtype: 'Int',
|
||||
default: DEFAULT_DISPLAY_PRECISION,
|
||||
required: 1,
|
||||
minValue: 0,
|
||||
maxValue: 9,
|
||||
validate(value, doc) {
|
||||
if (value >= 0 && value <= 9) {
|
||||
return;
|
||||
}
|
||||
throw new frappe.errors.ValidationError(
|
||||
t`Display Precision should have a value between 0 and 9.`
|
||||
);
|
||||
},
|
||||
description: t`Sets how many digits are shown after the decimal point.`,
|
||||
},
|
||||
{
|
||||
fieldname: 'internalPrecision',
|
||||
label: t`Internal Precision`,
|
||||
fieldtype: 'Int',
|
||||
minValue: 0,
|
||||
default: DEFAULT_INTERNAL_PRECISION,
|
||||
description: t`Sets the internal precision used for monetary calculations. Above 6 should be sufficient for most currencies.`,
|
||||
},
|
||||
{
|
||||
fieldname: 'hideGetStarted',
|
||||
label: t`Hide Get Started`,
|
||||
fieldtype: 'Check',
|
||||
default: 0,
|
||||
description: t`Hides the Get Started section from the sidebar. Change will be visible on restart or refreshing the app.`,
|
||||
},
|
||||
],
|
||||
quickEditFields: [
|
||||
'locale',
|
||||
'dateFormat',
|
||||
'displayPrecision',
|
||||
'hideGetStarted',
|
||||
],
|
||||
};
|
@ -1,62 +0,0 @@
|
||||
const { indicators } = require('../../../../src/colors');
|
||||
const { BLUE, GREEN } = indicators;
|
||||
const { t } = require('frappe');
|
||||
|
||||
module.exports = {
|
||||
name: 'ToDo',
|
||||
label: t`To Do`,
|
||||
naming: 'autoincrement',
|
||||
isSingle: 0,
|
||||
keywordFields: ['subject', 'description'],
|
||||
titleField: 'subject',
|
||||
indicators: {
|
||||
key: 'status',
|
||||
colors: {
|
||||
Open: BLUE,
|
||||
Closed: GREEN,
|
||||
},
|
||||
},
|
||||
fields: [
|
||||
{
|
||||
fieldname: 'subject',
|
||||
label: t`Subject`,
|
||||
placeholder: t`Subject`,
|
||||
fieldtype: 'Data',
|
||||
required: 1,
|
||||
},
|
||||
{
|
||||
fieldname: 'status',
|
||||
label: t`Status`,
|
||||
fieldtype: 'Select',
|
||||
options: ['Open', 'Closed'],
|
||||
default: 'Open',
|
||||
required: 1,
|
||||
},
|
||||
{
|
||||
fieldname: 'description',
|
||||
label: t`Description`,
|
||||
fieldtype: 'Text',
|
||||
},
|
||||
],
|
||||
|
||||
quickEditFields: ['status', 'description'],
|
||||
|
||||
actions: [
|
||||
{
|
||||
label: t`Close`,
|
||||
condition: (doc) => doc.status !== 'Closed',
|
||||
action: async (doc) => {
|
||||
await doc.set('status', 'Closed');
|
||||
await doc.update();
|
||||
},
|
||||
},
|
||||
{
|
||||
label: t`Re-Open`,
|
||||
condition: (doc) => doc.status !== 'Open',
|
||||
action: async (doc) => {
|
||||
await doc.set('status', 'Open');
|
||||
await doc.update();
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
@ -1,7 +0,0 @@
|
||||
const BaseList = require('frappe/client/view/list');
|
||||
|
||||
module.exports = class ToDoList extends BaseList {
|
||||
getFields(list) {
|
||||
return ['name', 'subject', 'status'];
|
||||
}
|
||||
};
|
@ -1,42 +0,0 @@
|
||||
const { t } = require('frappe');
|
||||
|
||||
module.exports = {
|
||||
name: 'User',
|
||||
doctype: 'DocType',
|
||||
isSingle: 0,
|
||||
isChild: 0,
|
||||
keywordFields: ['name', 'fullName'],
|
||||
fields: [
|
||||
{
|
||||
fieldname: 'name',
|
||||
label: t`Email`,
|
||||
fieldtype: 'Data',
|
||||
required: 1,
|
||||
},
|
||||
{
|
||||
fieldname: 'password',
|
||||
label: t`Password`,
|
||||
fieldtype: 'Password',
|
||||
required: 1,
|
||||
hidden: 1,
|
||||
},
|
||||
{
|
||||
fieldname: 'fullName',
|
||||
label: t`Full Name`,
|
||||
fieldtype: 'Data',
|
||||
required: 1,
|
||||
},
|
||||
{
|
||||
fieldname: 'roles',
|
||||
label: t`Roles`,
|
||||
fieldtype: 'Table',
|
||||
childtype: 'UserRole',
|
||||
},
|
||||
{
|
||||
fieldname: 'userId',
|
||||
label: t`User ID`,
|
||||
fieldtype: 'Data',
|
||||
hidden: 1,
|
||||
},
|
||||
],
|
||||
};
|
@ -1,17 +0,0 @@
|
||||
const { t } = require('frappe');
|
||||
|
||||
module.exports = {
|
||||
name: 'UserRole',
|
||||
doctype: 'DocType',
|
||||
isSingle: 0,
|
||||
isChild: 1,
|
||||
keywordFields: [],
|
||||
fields: [
|
||||
{
|
||||
fieldname: 'role',
|
||||
label: t`Role`,
|
||||
fieldtype: 'Link',
|
||||
target: 'Role',
|
||||
},
|
||||
],
|
||||
};
|
@ -1,25 +0,0 @@
|
||||
import File from './doctype/File/File.js';
|
||||
import NumberSeries from './doctype/NumberSeries/NumberSeries.js';
|
||||
import PatchRun from './doctype/PatchRun/PatchRun.js';
|
||||
import PrintFormat from './doctype/PrintFormat/PrintFormat.js';
|
||||
import Role from './doctype/Role/Role.js';
|
||||
import Session from './doctype/Session/Session.js';
|
||||
import SingleValue from './doctype/SingleValue/SingleValue.js';
|
||||
import SystemSettings from './doctype/SystemSettings/SystemSettings.js';
|
||||
import ToDo from './doctype/ToDo/ToDo.js';
|
||||
import User from './doctype/User/User.js';
|
||||
import UserRole from './doctype/UserRole/UserRole.js';
|
||||
|
||||
export default {
|
||||
NumberSeries,
|
||||
PrintFormat,
|
||||
Role,
|
||||
Session,
|
||||
SingleValue,
|
||||
SystemSettings,
|
||||
ToDo,
|
||||
User,
|
||||
UserRole,
|
||||
File,
|
||||
PatchRun,
|
||||
};
|
@ -1,15 +0,0 @@
|
||||
export const DEFAULT_INTERNAL_PRECISION = 11;
|
||||
export const DEFAULT_DISPLAY_PRECISION = 2;
|
||||
export const DEFAULT_LOCALE = 'en-IN';
|
||||
export const DEFAULT_LANGUAGE = 'English';
|
||||
export const DEFAULT_NUMBER_SERIES = {
|
||||
SalesInvoice: 'SINV-',
|
||||
PurchaseInvoice: 'PINV-',
|
||||
Payment: 'PAY-',
|
||||
JournalEntry: 'JV-',
|
||||
Quotation: 'QTN-',
|
||||
SalesOrder: 'SO-',
|
||||
Fulfillment: 'OF-',
|
||||
PurchaseOrder: 'PO-',
|
||||
PurchaseReceipt: 'PREC-',
|
||||
};
|
@ -1,117 +0,0 @@
|
||||
import frappe from 'frappe';
|
||||
import { DateTime } from 'luxon';
|
||||
import { DEFAULT_DISPLAY_PRECISION, DEFAULT_LOCALE } from './consts';
|
||||
|
||||
export default {
|
||||
format(value, df, doc) {
|
||||
if (!df) {
|
||||
return value;
|
||||
}
|
||||
|
||||
if (typeof df === 'string') {
|
||||
df = { fieldtype: df };
|
||||
}
|
||||
|
||||
if (df.fieldtype === 'Currency') {
|
||||
const currency = getCurrency(df, doc);
|
||||
value = formatCurrency(value, currency);
|
||||
} else if (df.fieldtype === 'Date') {
|
||||
let dateFormat;
|
||||
if (!frappe.SystemSettings) {
|
||||
dateFormat = 'yyyy-MM-dd';
|
||||
} else {
|
||||
dateFormat = frappe.SystemSettings.dateFormat;
|
||||
}
|
||||
|
||||
if (typeof value === 'string') {
|
||||
// ISO String
|
||||
value = DateTime.fromISO(value);
|
||||
} else if (Object.prototype.toString.call(value) === '[object Date]') {
|
||||
// JS Date
|
||||
value = DateTime.fromJSDate(value);
|
||||
}
|
||||
|
||||
value = value.toFormat(dateFormat);
|
||||
if (value === 'Invalid DateTime') {
|
||||
value = '';
|
||||
}
|
||||
} else if (df.fieldtype === 'Check') {
|
||||
typeof parseInt(value) === 'number'
|
||||
? (value = parseInt(value))
|
||||
: (value = Boolean(value));
|
||||
} else {
|
||||
if (value === null || value === undefined) {
|
||||
value = '';
|
||||
} else {
|
||||
value = value + '';
|
||||
}
|
||||
}
|
||||
return value;
|
||||
},
|
||||
formatCurrency,
|
||||
formatNumber,
|
||||
};
|
||||
|
||||
function formatCurrency(value, currency) {
|
||||
let valueString;
|
||||
try {
|
||||
valueString = formatNumber(value);
|
||||
} catch (err) {
|
||||
err.message += ` value: '${value}', type: ${typeof value}`;
|
||||
throw err;
|
||||
}
|
||||
|
||||
const currencySymbol = frappe.currencySymbols[currency];
|
||||
if (currencySymbol) {
|
||||
return currencySymbol + ' ' + valueString;
|
||||
}
|
||||
|
||||
return valueString;
|
||||
}
|
||||
|
||||
function formatNumber(value) {
|
||||
const numberFormatter = getNumberFormatter();
|
||||
if (typeof value === 'number') {
|
||||
return numberFormatter.format(value);
|
||||
}
|
||||
|
||||
if (value.round) {
|
||||
return numberFormatter.format(value.round());
|
||||
}
|
||||
|
||||
const formattedNumber = numberFormatter.format(value);
|
||||
if (formattedNumber === 'NaN') {
|
||||
throw Error(
|
||||
`invalid value passed to formatNumber: '${value}' of type ${typeof value}`
|
||||
);
|
||||
}
|
||||
|
||||
return formattedNumber;
|
||||
}
|
||||
|
||||
function getNumberFormatter() {
|
||||
if (frappe.currencyFormatter) {
|
||||
return frappe.currencyFormatter;
|
||||
}
|
||||
|
||||
const locale = frappe.SystemSettings.locale ?? DEFAULT_LOCALE;
|
||||
const display =
|
||||
frappe.SystemSettings.displayPrecision ?? DEFAULT_DISPLAY_PRECISION;
|
||||
|
||||
return (frappe.currencyFormatter = Intl.NumberFormat(locale, {
|
||||
style: 'decimal',
|
||||
minimumFractionDigits: display,
|
||||
}));
|
||||
}
|
||||
|
||||
function getCurrency(df, doc) {
|
||||
if (!(doc && df.getCurrency)) {
|
||||
return df.currency || frappe.AccountingSettings.currency || '';
|
||||
}
|
||||
|
||||
if (doc.meta && doc.meta.isChild) {
|
||||
return df.getCurrency(doc, doc.parentdoc);
|
||||
}
|
||||
|
||||
return df.getCurrency(doc);
|
||||
}
|
@ -1,99 +0,0 @@
|
||||
const { pesa } = require('pesa');
|
||||
|
||||
Array.prototype.equals = function (array) {
|
||||
return (
|
||||
this.length == array.length &&
|
||||
this.every(function (item, i) {
|
||||
return item == array[i];
|
||||
})
|
||||
);
|
||||
};
|
||||
|
||||
function slug(str) {
|
||||
return str
|
||||
.replace(/(?:^\w|[A-Z]|\b\w)/g, function (letter, index) {
|
||||
return index == 0 ? letter.toLowerCase() : letter.toUpperCase();
|
||||
})
|
||||
.replace(/\s+/g, '');
|
||||
}
|
||||
|
||||
function getRandomString() {
|
||||
return Math.random().toString(36).substr(3);
|
||||
}
|
||||
|
||||
async function sleep(seconds) {
|
||||
return new Promise((resolve) => {
|
||||
setTimeout(resolve, seconds * 1000);
|
||||
});
|
||||
}
|
||||
|
||||
function getQueryString(params) {
|
||||
if (!params) return '';
|
||||
let parts = [];
|
||||
for (let key in params) {
|
||||
if (key != null && params[key] != null) {
|
||||
parts.push(
|
||||
encodeURIComponent(key) + '=' + encodeURIComponent(params[key])
|
||||
);
|
||||
}
|
||||
}
|
||||
return parts.join('&');
|
||||
}
|
||||
|
||||
function asyncHandler(fn) {
|
||||
return (req, res, next) =>
|
||||
Promise.resolve(fn(req, res, next)).catch((err) => {
|
||||
console.log(err);
|
||||
// handle error
|
||||
res.status(err.statusCode || 500).send({ error: err.message });
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns array from 0 to n - 1
|
||||
* @param {Number} n
|
||||
*/
|
||||
function range(n) {
|
||||
return Array(n)
|
||||
.fill()
|
||||
.map((_, i) => i);
|
||||
}
|
||||
|
||||
function unique(list, key = (it) => it) {
|
||||
var seen = {};
|
||||
return list.filter((item) => {
|
||||
var k = key(item);
|
||||
return seen.hasOwnProperty(k) ? false : (seen[k] = true);
|
||||
});
|
||||
}
|
||||
|
||||
function getDuplicates(array) {
|
||||
let duplicates = [];
|
||||
for (let i in array) {
|
||||
let previous = array[i - 1];
|
||||
let current = array[i];
|
||||
|
||||
if (current === previous) {
|
||||
if (!duplicates.includes(current)) {
|
||||
duplicates.push(current);
|
||||
}
|
||||
}
|
||||
}
|
||||
return duplicates;
|
||||
}
|
||||
|
||||
function isPesa(value) {
|
||||
return value instanceof pesa().constructor;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
slug,
|
||||
getRandomString,
|
||||
sleep,
|
||||
getQueryString,
|
||||
asyncHandler,
|
||||
range,
|
||||
unique,
|
||||
getDuplicates,
|
||||
isPesa,
|
||||
};
|
@ -1 +0,0 @@
|
||||
module.exports = function () { return function () {}; };
|
@ -1,86 +0,0 @@
|
||||
import {
|
||||
getIndexFormat,
|
||||
getIndexList,
|
||||
getSnippets,
|
||||
getWhitespaceSanitized,
|
||||
} from '../../scripts/helpers';
|
||||
import { ValueError } from '../common/errors';
|
||||
|
||||
class TranslationString {
|
||||
constructor(...args) {
|
||||
this.args = args;
|
||||
}
|
||||
|
||||
get s() {
|
||||
return this.toString();
|
||||
}
|
||||
|
||||
ctx(context) {
|
||||
this.context = context;
|
||||
return this;
|
||||
}
|
||||
|
||||
#formatArg(arg) {
|
||||
return arg ?? '';
|
||||
}
|
||||
|
||||
#translate() {
|
||||
let indexFormat = getIndexFormat(this.args[0]);
|
||||
indexFormat = getWhitespaceSanitized(indexFormat);
|
||||
|
||||
const translatedIndexFormat =
|
||||
this.languageMap[indexFormat]?.translation ?? indexFormat;
|
||||
|
||||
this.argList = getIndexList(translatedIndexFormat).map(
|
||||
(i) => this.argList[i]
|
||||
);
|
||||
this.strList = getSnippets(translatedIndexFormat);
|
||||
}
|
||||
|
||||
#stitch() {
|
||||
if (!(this.args[0] instanceof Array)) {
|
||||
throw new ValueError(
|
||||
`invalid args passed to TranslationString ${
|
||||
this.args
|
||||
} of type ${typeof this.args[0]}`
|
||||
);
|
||||
}
|
||||
|
||||
this.strList = this.args[0];
|
||||
this.argList = this.args.slice(1);
|
||||
|
||||
if (this.languageMap) {
|
||||
this.#translate();
|
||||
}
|
||||
|
||||
return this.strList
|
||||
.map((s, i) => s + this.#formatArg(this.argList[i]))
|
||||
.join('')
|
||||
.replace(/\s+/g, ' ')
|
||||
.trim();
|
||||
}
|
||||
|
||||
toString() {
|
||||
return this.#stitch();
|
||||
}
|
||||
|
||||
toJSON() {
|
||||
return this.#stitch();
|
||||
}
|
||||
|
||||
valueOf() {
|
||||
return this.#stitch();
|
||||
}
|
||||
}
|
||||
|
||||
export function T(...args) {
|
||||
return new TranslationString(...args);
|
||||
}
|
||||
|
||||
export function t(...args) {
|
||||
return new TranslationString(...args).s;
|
||||
}
|
||||
|
||||
export function setLanguageMapOnTranslationString(languageMap) {
|
||||
TranslationString.prototype.languageMap = languageMap;
|
||||
}
|
112
fyo/README.md
Normal file
112
fyo/README.md
Normal file
@ -0,0 +1,112 @@
|
||||
# Fyo
|
||||
|
||||
This is the underlying framework that runs **Books**, at some point it may be
|
||||
removed into a separate repo, but as of now it's in gestation.
|
||||
|
||||
The reason for maintaining a framework is to allow for varied backends.
|
||||
Currently Books runs on the electron renderer process and all db stuff happens
|
||||
on the electron main process which has access to nodelibs. As the development
|
||||
of `Fyo` progresses it will allow for a browser frontend and a node server
|
||||
backend.
|
||||
|
||||
This platform variablity will be handled by code in the `fyo/demux` subdirectory.
|
||||
|
||||
## Pre Req
|
||||
|
||||
**Singleton**: The `Fyo` class is used as a singleton throughout Books, this
|
||||
allows for a single source of truth and a common interface to access different
|
||||
modules such as `db`, `doc` an `auth`.
|
||||
|
||||
**Localization**: Since Books' functionality changes depending on region,
|
||||
regional information (`countryCode`) is required in the initialization process.
|
||||
|
||||
**`Doc`**: This is `fyo`'s abstraction for an ORM, the associated files are
|
||||
located in `model/doc.ts`, all classes exported from `books/models` extend this.
|
||||
|
||||
### Terminology
|
||||
|
||||
- **Schema**: object that defines shape of the data in the database.
|
||||
- **Model**: the controller class that extends the `Doc` class, or the `Doc`
|
||||
class itself (if a specific controller doesn't exist).
|
||||
- **doc** (not `Doc`): instance of a Model, i.e. what has the data.
|
||||
|
||||
If you are confused, I understand.
|
||||
|
||||
## Initialization
|
||||
|
||||
There are a set of core models which are maintained in the `fyo/models`
|
||||
subdirectory, from this the _SystemSettings_ field `countryCode` is used to
|
||||
config regional information.
|
||||
|
||||
A few things have to be done on initialization:
|
||||
|
||||
#### 1. Connect To DB
|
||||
|
||||
If creating a new instance then `fyo.db.createNewDatabase` or if loading an
|
||||
instance `fyo.db.connectToDatabase`.
|
||||
|
||||
Both of them take `countryCode` as an argument, `fyo.db.createNewDatabase`
|
||||
should be passed the `countryCode` as the schemas are built on the basis of
|
||||
this.
|
||||
|
||||
#### 2. Initialize and Register
|
||||
|
||||
Done using `fyo.initializeAndRegister` after a database is connected, this should be
|
||||
passed the models and regional models.
|
||||
|
||||
This sets the schemas and associated models on the `fyo` object along with a few
|
||||
other things.
|
||||
|
||||
### Sequence
|
||||
|
||||
**First Load**: i.e. registering or creating a new instance.
|
||||
|
||||
- Get `countryCode` from the setup wizard.
|
||||
- Create a new DB using `fyo.db.createNewDatabase` with the `countryCode`.
|
||||
- Get models and `regionalModels` using `countryCode` from `models/index.ts/getRegionalModels`.
|
||||
- Call `fyo.initializeAndRegister` with the all models.
|
||||
|
||||
**Next Load**: i.e. logging in or opening an existing instance.
|
||||
|
||||
- Connect to DB using `fyo.db.connectToDatabase` and get `countryCode` from the return.
|
||||
- Get models and `regionalModels` using `countryCode` from `models/index.ts/getRegionalModels`.
|
||||
- Call `fyo.initializeAndRegister` with the all models.
|
||||
|
||||
_Note: since **SystemSettings** are initialized on `fyo.initializeAndRegister`
|
||||
db needs to be set first else an error will be thrown_
|
||||
|
||||
## Testing
|
||||
|
||||
For testing the `fyo` class, `mocha` is used (`node` side). So for this the
|
||||
demux classes are directly replaced by `node` side managers such as
|
||||
`DatabaseManager`.
|
||||
|
||||
For this to work the class signatures of the demux class and the manager have to
|
||||
be the same which is maintained by abstract demux classes.
|
||||
|
||||
`DatabaseManager` is used as the `DatabaseDemux` for testing without API or IPC
|
||||
calls. For `AuthDemux` the `DummyAuthDemux` class is used.
|
||||
|
||||
## Translations
|
||||
|
||||
All translations take place during runtime, for translations to work, a
|
||||
`LanguageMap` (for def check `utils/types.ts`) has to be set.
|
||||
|
||||
This can be done using `fyo/utils/translation.ts/setLanguageMapOnTranslationString`.
|
||||
|
||||
Since translations are runtime, if the code is evaluated before the language map
|
||||
is loaded, translations won't work. To prevent this, don't maintain translation
|
||||
strings globally since this will be evaluated before the map is loaded.
|
||||
|
||||
## Observers
|
||||
|
||||
The doc and db handlers have observers (instances of `Observable`) as
|
||||
properties, these can be accessed using
|
||||
- `fyo.db.observer`
|
||||
- `fyo.doc.observer`
|
||||
|
||||
The purpose of the observer is to trigger registered callbacks when some `doc`
|
||||
operation or `db` operation takes place.
|
||||
|
||||
These are schema level observers i.e. they are registered like so:
|
||||
`method:schemaName`. The callbacks receive args passed to the functions.
|
117
fyo/core/authHandler.ts
Normal file
117
fyo/core/authHandler.ts
Normal file
@ -0,0 +1,117 @@
|
||||
import { Fyo } from 'fyo';
|
||||
import { AuthDemux } from 'fyo/demux/auth';
|
||||
import { AuthDemuxBase, TelemetryCreds } from 'utils/auth/types';
|
||||
import { AuthDemuxConstructor } from './types';
|
||||
|
||||
interface AuthConfig {
|
||||
serverURL: string;
|
||||
backend: string;
|
||||
port: number;
|
||||
}
|
||||
|
||||
interface Session {
|
||||
user: string;
|
||||
token: string;
|
||||
}
|
||||
|
||||
export class AuthHandler {
|
||||
#config: AuthConfig;
|
||||
#session: Session;
|
||||
fyo: Fyo;
|
||||
#demux: AuthDemuxBase;
|
||||
|
||||
constructor(fyo: Fyo, Demux?: AuthDemuxConstructor) {
|
||||
this.fyo = fyo;
|
||||
this.#config = {
|
||||
serverURL: '',
|
||||
backend: 'sqlite',
|
||||
port: 8000,
|
||||
};
|
||||
|
||||
this.#session = {
|
||||
user: '',
|
||||
token: '',
|
||||
};
|
||||
|
||||
if (Demux !== undefined) {
|
||||
this.#demux = new Demux(fyo.isElectron);
|
||||
} else {
|
||||
this.#demux = new AuthDemux(fyo.isElectron);
|
||||
}
|
||||
}
|
||||
|
||||
set user(value: string) {
|
||||
this.#session.user = value;
|
||||
}
|
||||
|
||||
get user(): string {
|
||||
return this.#session.user;
|
||||
}
|
||||
|
||||
get session(): Readonly<Session> {
|
||||
return { ...this.#session };
|
||||
}
|
||||
|
||||
get config(): Readonly<AuthConfig> {
|
||||
return { ...this.#config };
|
||||
}
|
||||
|
||||
init() {}
|
||||
async login(email: string, password: string) {
|
||||
if (email === 'Administrator') {
|
||||
this.#session.user = 'Administrator';
|
||||
return;
|
||||
}
|
||||
|
||||
const response = await fetch(this.#getServerURL() + '/api/login', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({ email, password }),
|
||||
});
|
||||
|
||||
if (response.status === 200) {
|
||||
const res = await response.json();
|
||||
|
||||
this.#session.user = email;
|
||||
this.#session.token = res.token;
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
return response;
|
||||
}
|
||||
|
||||
async signup(email: string, fullName: string, password: string) {
|
||||
const response = await fetch(this.#getServerURL() + '/api/signup', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({ email, fullName, password }),
|
||||
});
|
||||
|
||||
if (response.status === 200) {
|
||||
return await response.json();
|
||||
}
|
||||
|
||||
return response;
|
||||
}
|
||||
|
||||
async logout() {
|
||||
// TODO: Implement this with auth flow
|
||||
}
|
||||
|
||||
purgeCache() {}
|
||||
|
||||
#getServerURL() {
|
||||
return this.#config.serverURL || '';
|
||||
}
|
||||
|
||||
async getTelemetryCreds(): Promise<TelemetryCreds> {
|
||||
return await this.#demux.getTelemetryCreds();
|
||||
}
|
||||
}
|
401
fyo/core/converter.ts
Normal file
401
fyo/core/converter.ts
Normal file
@ -0,0 +1,401 @@
|
||||
import { Fyo } from 'fyo';
|
||||
import { Doc } from 'fyo/model/doc';
|
||||
import { isPesa } from 'fyo/utils';
|
||||
import { ValueError } from 'fyo/utils/errors';
|
||||
import { DateTime } from 'luxon';
|
||||
import { Money } from 'pesa';
|
||||
import { Field, FieldTypeEnum, RawValue, TargetField } from 'schemas/types';
|
||||
import { getIsNullOrUndef } from 'utils';
|
||||
import { DatabaseHandler } from './dbHandler';
|
||||
import { DocValue, DocValueMap, RawValueMap } from './types';
|
||||
|
||||
/**
|
||||
* # Converter
|
||||
*
|
||||
* Basically converts serializable RawValues from the db to DocValues used
|
||||
* by the frontend and vice versa.
|
||||
*
|
||||
* ## Value Conversion
|
||||
* It exposes two static methods: `toRawValue` and `toDocValue` that can be
|
||||
* used elsewhere given the fieldtype.
|
||||
*
|
||||
* ## Map Conversion
|
||||
* Two methods `toDocValueMap` and `toRawValueMap` are exposed but should be
|
||||
* used only from the `dbHandler`.
|
||||
*/
|
||||
|
||||
export class Converter {
|
||||
db: DatabaseHandler;
|
||||
fyo: Fyo;
|
||||
|
||||
constructor(db: DatabaseHandler, fyo: Fyo) {
|
||||
this.db = db;
|
||||
this.fyo = fyo;
|
||||
}
|
||||
|
||||
toDocValueMap(
|
||||
schemaName: string,
|
||||
rawValueMap: RawValueMap | RawValueMap[]
|
||||
): DocValueMap | DocValueMap[] {
|
||||
if (Array.isArray(rawValueMap)) {
|
||||
return rawValueMap.map((dv) => this.#toDocValueMap(schemaName, dv));
|
||||
} else {
|
||||
return this.#toDocValueMap(schemaName, rawValueMap);
|
||||
}
|
||||
}
|
||||
|
||||
toRawValueMap(
|
||||
schemaName: string,
|
||||
docValueMap: DocValueMap | DocValueMap[]
|
||||
): RawValueMap | RawValueMap[] {
|
||||
if (Array.isArray(docValueMap)) {
|
||||
return docValueMap.map((dv) => this.#toRawValueMap(schemaName, dv));
|
||||
} else {
|
||||
return this.#toRawValueMap(schemaName, docValueMap);
|
||||
}
|
||||
}
|
||||
|
||||
static toDocValue(value: RawValue, field: Field, fyo: Fyo): DocValue {
|
||||
switch (field.fieldtype) {
|
||||
case FieldTypeEnum.Currency:
|
||||
return toDocCurrency(value, field, fyo);
|
||||
case FieldTypeEnum.Date:
|
||||
return toDocDate(value, field);
|
||||
case FieldTypeEnum.Datetime:
|
||||
return toDocDate(value, field);
|
||||
case FieldTypeEnum.Int:
|
||||
return toDocInt(value, field);
|
||||
case FieldTypeEnum.Float:
|
||||
return toDocFloat(value, field);
|
||||
case FieldTypeEnum.Check:
|
||||
return toDocCheck(value, field);
|
||||
default:
|
||||
return toDocString(value, field);
|
||||
}
|
||||
}
|
||||
|
||||
static toRawValue(value: DocValue, field: Field, fyo: Fyo): RawValue {
|
||||
switch (field.fieldtype) {
|
||||
case FieldTypeEnum.Currency:
|
||||
return toRawCurrency(value, fyo, field);
|
||||
case FieldTypeEnum.Date:
|
||||
return toRawDate(value, field);
|
||||
case FieldTypeEnum.Datetime:
|
||||
return toRawDateTime(value, field);
|
||||
case FieldTypeEnum.Int:
|
||||
return toRawInt(value, field);
|
||||
case FieldTypeEnum.Float:
|
||||
return toRawFloat(value, field);
|
||||
case FieldTypeEnum.Check:
|
||||
return toRawCheck(value, field);
|
||||
case FieldTypeEnum.Link:
|
||||
return toRawLink(value, field);
|
||||
default:
|
||||
return toRawString(value, field);
|
||||
}
|
||||
}
|
||||
|
||||
#toDocValueMap(schemaName: string, rawValueMap: RawValueMap): DocValueMap {
|
||||
const fieldValueMap = this.db.fieldValueMap[schemaName];
|
||||
const docValueMap: DocValueMap = {};
|
||||
|
||||
for (const fieldname in rawValueMap) {
|
||||
const field = fieldValueMap[fieldname];
|
||||
const rawValue = rawValueMap[fieldname];
|
||||
if (!field) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (Array.isArray(rawValue)) {
|
||||
const parentSchemaName = (field as TargetField).target;
|
||||
docValueMap[fieldname] = rawValue.map((rv) =>
|
||||
this.#toDocValueMap(parentSchemaName, rv)
|
||||
);
|
||||
} else {
|
||||
docValueMap[fieldname] = Converter.toDocValue(
|
||||
rawValue,
|
||||
field,
|
||||
this.fyo
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
return docValueMap;
|
||||
}
|
||||
|
||||
#toRawValueMap(schemaName: string, docValueMap: DocValueMap): RawValueMap {
|
||||
const fieldValueMap = this.db.fieldValueMap[schemaName];
|
||||
const rawValueMap: RawValueMap = {};
|
||||
|
||||
for (const fieldname in docValueMap) {
|
||||
const field = fieldValueMap[fieldname];
|
||||
const docValue = docValueMap[fieldname];
|
||||
|
||||
if (Array.isArray(docValue)) {
|
||||
const parentSchemaName = (field as TargetField).target;
|
||||
|
||||
rawValueMap[fieldname] = docValue.map((value) => {
|
||||
if (value instanceof Doc) {
|
||||
return this.#toRawValueMap(parentSchemaName, value.getValidDict());
|
||||
}
|
||||
|
||||
return this.#toRawValueMap(parentSchemaName, value as DocValueMap);
|
||||
});
|
||||
} else {
|
||||
rawValueMap[fieldname] = Converter.toRawValue(
|
||||
docValue,
|
||||
field,
|
||||
this.fyo
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
return rawValueMap;
|
||||
}
|
||||
}
|
||||
|
||||
function toDocString(value: RawValue, field: Field) {
|
||||
if (value === null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (value === undefined) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (typeof value === 'string') {
|
||||
return value;
|
||||
}
|
||||
|
||||
throwError(value, field, 'doc');
|
||||
}
|
||||
|
||||
function toDocDate(value: RawValue, field: Field) {
|
||||
if ((value as any) instanceof Date) {
|
||||
return value;
|
||||
}
|
||||
|
||||
if (value === null || value === '') {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (typeof value !== 'number' && typeof value !== 'string') {
|
||||
throwError(value, field, 'doc');
|
||||
}
|
||||
|
||||
const date = new Date(value);
|
||||
if (date.toString() === 'Invalid Date') {
|
||||
throwError(value, field, 'doc');
|
||||
}
|
||||
|
||||
return date;
|
||||
}
|
||||
|
||||
function toDocCurrency(value: RawValue, field: Field, fyo: Fyo) {
|
||||
if (isPesa(value)) {
|
||||
return value;
|
||||
}
|
||||
|
||||
if (value === '') {
|
||||
return fyo.pesa(0);
|
||||
}
|
||||
|
||||
if (typeof value === 'string') {
|
||||
return fyo.pesa(value);
|
||||
}
|
||||
|
||||
if (typeof value === 'number') {
|
||||
return fyo.pesa(value);
|
||||
}
|
||||
|
||||
if (typeof value === 'boolean') {
|
||||
return fyo.pesa(Number(value));
|
||||
}
|
||||
|
||||
if (value === null) {
|
||||
return fyo.pesa(0);
|
||||
}
|
||||
|
||||
throwError(value, field, 'doc');
|
||||
}
|
||||
|
||||
function toDocInt(value: RawValue, field: Field): number {
|
||||
if (value === '') {
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (typeof value === 'string') {
|
||||
value = parseInt(value);
|
||||
}
|
||||
|
||||
return toDocFloat(value, field);
|
||||
}
|
||||
|
||||
function toDocFloat(value: RawValue, field: Field): number {
|
||||
if (value === '') {
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (typeof value === 'boolean') {
|
||||
return Number(value);
|
||||
}
|
||||
|
||||
if (typeof value === 'string') {
|
||||
value = parseFloat(value);
|
||||
}
|
||||
|
||||
if (value === null) {
|
||||
value = 0;
|
||||
}
|
||||
|
||||
if (typeof value === 'number' && !Number.isNaN(value)) {
|
||||
return value;
|
||||
}
|
||||
|
||||
throwError(value, field, 'doc');
|
||||
}
|
||||
|
||||
function toDocCheck(value: RawValue, field: Field): boolean {
|
||||
if (typeof value === 'boolean') {
|
||||
return value;
|
||||
}
|
||||
|
||||
if (typeof value === 'string') {
|
||||
return !!parseFloat(value);
|
||||
}
|
||||
|
||||
if (typeof value === 'number') {
|
||||
return Boolean(value);
|
||||
}
|
||||
|
||||
throwError(value, field, 'doc');
|
||||
}
|
||||
|
||||
function toRawCurrency(value: DocValue, fyo: Fyo, field: Field): string {
|
||||
if (isPesa(value)) {
|
||||
return (value as Money).store;
|
||||
}
|
||||
|
||||
if (getIsNullOrUndef(value)) {
|
||||
return fyo.pesa(0).store;
|
||||
}
|
||||
|
||||
if (typeof value === 'number') {
|
||||
return fyo.pesa(value).store;
|
||||
}
|
||||
|
||||
if (typeof value === 'string') {
|
||||
return fyo.pesa(value).store;
|
||||
}
|
||||
|
||||
throwError(value, field, 'raw');
|
||||
}
|
||||
|
||||
function toRawInt(value: DocValue, field: Field): number {
|
||||
if (typeof value === 'string') {
|
||||
return parseInt(value);
|
||||
}
|
||||
|
||||
if (getIsNullOrUndef(value)) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (typeof value === 'number') {
|
||||
return Math.floor(value as number);
|
||||
}
|
||||
|
||||
throwError(value, field, 'raw');
|
||||
}
|
||||
|
||||
function toRawFloat(value: DocValue, field: Field): number {
|
||||
if (typeof value === 'string') {
|
||||
return parseFloat(value);
|
||||
}
|
||||
|
||||
if (getIsNullOrUndef(value)) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (typeof value === 'number') {
|
||||
return value;
|
||||
}
|
||||
|
||||
throwError(value, field, 'raw');
|
||||
}
|
||||
|
||||
function toRawDate(value: DocValue, field: Field): string | null {
|
||||
const dateTime = toRawDateTime(value, field);
|
||||
if (dateTime === null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return dateTime.split('T')[0];
|
||||
}
|
||||
|
||||
function toRawDateTime(value: DocValue, field: Field): string | null {
|
||||
if (value === null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (typeof value === 'string') {
|
||||
return value;
|
||||
}
|
||||
|
||||
if (value instanceof Date) {
|
||||
return (value as Date).toISOString();
|
||||
}
|
||||
|
||||
if (value instanceof DateTime) {
|
||||
return (value as DateTime).toISO();
|
||||
}
|
||||
|
||||
throwError(value, field, 'raw');
|
||||
}
|
||||
|
||||
function toRawCheck(value: DocValue, field: Field): number {
|
||||
if (typeof value === 'number') {
|
||||
value = Boolean(value);
|
||||
}
|
||||
|
||||
if (typeof value === 'boolean') {
|
||||
return Number(value);
|
||||
}
|
||||
|
||||
throwError(value, field, 'raw');
|
||||
}
|
||||
|
||||
function toRawString(value: DocValue, field: Field): string | null {
|
||||
if (value === null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (value === undefined) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (typeof value === 'string') {
|
||||
return value;
|
||||
}
|
||||
|
||||
throwError(value, field, 'raw');
|
||||
}
|
||||
|
||||
function toRawLink(value: DocValue, field: Field): string | null {
|
||||
if (value === null || !(value as string)?.length) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (typeof value === 'string') {
|
||||
return value;
|
||||
}
|
||||
|
||||
throwError(value, field, 'raw');
|
||||
}
|
||||
|
||||
function throwError<T>(value: T, field: Field, type: 'raw' | 'doc'): never {
|
||||
throw new ValueError(
|
||||
`invalid ${type} conversion '${value}' of type ${typeof value} found, field: ${JSON.stringify(
|
||||
field
|
||||
)}`
|
||||
);
|
||||
}
|
296
fyo/core/dbHandler.ts
Normal file
296
fyo/core/dbHandler.ts
Normal file
@ -0,0 +1,296 @@
|
||||
import { SingleValue } from 'backend/database/types';
|
||||
import { Fyo } from 'fyo';
|
||||
import { DatabaseDemux } from 'fyo/demux/db';
|
||||
import { ValueError } from 'fyo/utils/errors';
|
||||
import Observable from 'fyo/utils/observable';
|
||||
import { translateSchema } from 'fyo/utils/translation';
|
||||
import { Field, RawValue, SchemaMap } from 'schemas/types';
|
||||
import { getMapFromList } from 'utils';
|
||||
import { DatabaseBase, DatabaseDemuxBase, GetAllOptions } from 'utils/db/types';
|
||||
import { schemaTranslateables } from 'utils/translationHelpers';
|
||||
import { LanguageMap } from 'utils/types';
|
||||
import { Converter } from './converter';
|
||||
import {
|
||||
DatabaseDemuxConstructor,
|
||||
DocValue,
|
||||
DocValueMap,
|
||||
RawValueMap,
|
||||
} from './types';
|
||||
|
||||
// Return types of Bespoke Queries
|
||||
type TopExpenses = { account: string; total: number }[];
|
||||
type TotalOutstanding = { total: number; outstanding: number };
|
||||
type Cashflow = { inflow: number; outflow: number; yearmonth: string }[];
|
||||
type Balance = { balance: number; yearmonth: string }[];
|
||||
type IncomeExpense = { income: Balance; expense: Balance };
|
||||
|
||||
export class DatabaseHandler extends DatabaseBase {
|
||||
#fyo: Fyo;
|
||||
converter: Converter;
|
||||
#demux: DatabaseDemuxBase;
|
||||
dbPath?: string;
|
||||
#schemaMap: SchemaMap = {};
|
||||
observer: Observable<never> = new Observable();
|
||||
fieldValueMap: Record<string, Record<string, Field>> = {};
|
||||
|
||||
constructor(fyo: Fyo, Demux?: DatabaseDemuxConstructor) {
|
||||
super();
|
||||
this.#fyo = fyo;
|
||||
this.converter = new Converter(this, this.#fyo);
|
||||
|
||||
if (Demux !== undefined) {
|
||||
this.#demux = new Demux(fyo.isElectron);
|
||||
} else {
|
||||
this.#demux = new DatabaseDemux(fyo.isElectron);
|
||||
}
|
||||
}
|
||||
|
||||
get schemaMap(): Readonly<SchemaMap> {
|
||||
return this.#schemaMap;
|
||||
}
|
||||
|
||||
get isConnected() {
|
||||
return !!this.dbPath;
|
||||
}
|
||||
|
||||
async createNewDatabase(dbPath: string, countryCode: string) {
|
||||
countryCode = await this.#demux.createNewDatabase(dbPath, countryCode);
|
||||
await this.init();
|
||||
this.dbPath = dbPath;
|
||||
return countryCode;
|
||||
}
|
||||
|
||||
async connectToDatabase(dbPath: string, countryCode?: string) {
|
||||
countryCode = await this.#demux.connectToDatabase(dbPath, countryCode);
|
||||
await this.init();
|
||||
this.dbPath = dbPath;
|
||||
return countryCode;
|
||||
}
|
||||
|
||||
async init() {
|
||||
this.#schemaMap = (await this.#demux.getSchemaMap()) as SchemaMap;
|
||||
|
||||
for (const schemaName in this.schemaMap) {
|
||||
const fields = this.schemaMap[schemaName]!.fields!;
|
||||
this.fieldValueMap[schemaName] = getMapFromList(fields, 'fieldname');
|
||||
}
|
||||
this.observer = new Observable();
|
||||
}
|
||||
|
||||
async translateSchemaMap(languageMap?: LanguageMap) {
|
||||
if (languageMap) {
|
||||
translateSchema(this.#schemaMap, languageMap, schemaTranslateables);
|
||||
} else {
|
||||
this.#schemaMap = (await this.#demux.getSchemaMap()) as SchemaMap;
|
||||
}
|
||||
}
|
||||
|
||||
purgeCache() {
|
||||
this.dbPath = undefined;
|
||||
this.#schemaMap = {};
|
||||
this.fieldValueMap = {};
|
||||
}
|
||||
|
||||
async insert(
|
||||
schemaName: string,
|
||||
docValueMap: DocValueMap
|
||||
): Promise<DocValueMap> {
|
||||
let rawValueMap = this.converter.toRawValueMap(
|
||||
schemaName,
|
||||
docValueMap
|
||||
) as RawValueMap;
|
||||
rawValueMap = (await this.#demux.call(
|
||||
'insert',
|
||||
schemaName,
|
||||
rawValueMap
|
||||
)) as RawValueMap;
|
||||
this.observer.trigger(`insert:${schemaName}`, docValueMap);
|
||||
return this.converter.toDocValueMap(schemaName, rawValueMap) as DocValueMap;
|
||||
}
|
||||
|
||||
// Read
|
||||
async get(
|
||||
schemaName: string,
|
||||
name: string,
|
||||
fields?: string | string[]
|
||||
): Promise<DocValueMap> {
|
||||
const rawValueMap = (await this.#demux.call(
|
||||
'get',
|
||||
schemaName,
|
||||
name,
|
||||
fields
|
||||
)) as RawValueMap;
|
||||
this.observer.trigger(`get:${schemaName}`, { name, fields });
|
||||
return this.converter.toDocValueMap(schemaName, rawValueMap) as DocValueMap;
|
||||
}
|
||||
|
||||
async getAll(
|
||||
schemaName: string,
|
||||
options: GetAllOptions = {}
|
||||
): Promise<DocValueMap[]> {
|
||||
const rawValueMap = await this.#getAll(schemaName, options);
|
||||
this.observer.trigger(`getAll:${schemaName}`, options);
|
||||
return this.converter.toDocValueMap(
|
||||
schemaName,
|
||||
rawValueMap
|
||||
) as DocValueMap[];
|
||||
}
|
||||
|
||||
async getAllRaw(
|
||||
schemaName: string,
|
||||
options: GetAllOptions = {}
|
||||
): Promise<RawValueMap[]> {
|
||||
const all = await this.#getAll(schemaName, options);
|
||||
this.observer.trigger(`getAllRaw:${schemaName}`, options);
|
||||
return all;
|
||||
}
|
||||
|
||||
async getSingleValues(
|
||||
...fieldnames: ({ fieldname: string; parent?: string } | string)[]
|
||||
): Promise<SingleValue<DocValue>> {
|
||||
const rawSingleValue = (await this.#demux.call(
|
||||
'getSingleValues',
|
||||
...fieldnames
|
||||
)) as SingleValue<RawValue>;
|
||||
|
||||
const docSingleValue: SingleValue<DocValue> = [];
|
||||
for (const sv of rawSingleValue) {
|
||||
const field = this.fieldValueMap[sv.parent][sv.fieldname];
|
||||
const value = Converter.toDocValue(sv.value, field, this.#fyo);
|
||||
|
||||
docSingleValue.push({
|
||||
value,
|
||||
parent: sv.parent,
|
||||
fieldname: sv.fieldname,
|
||||
});
|
||||
}
|
||||
|
||||
this.observer.trigger(`getSingleValues`, fieldnames);
|
||||
return docSingleValue;
|
||||
}
|
||||
|
||||
async count(
|
||||
schemaName: string,
|
||||
options: GetAllOptions = {}
|
||||
): Promise<number> {
|
||||
const rawValueMap = await this.#getAll(schemaName, options);
|
||||
const count = rawValueMap.length;
|
||||
this.observer.trigger(`count:${schemaName}`, options);
|
||||
return count;
|
||||
}
|
||||
|
||||
// Update
|
||||
async rename(
|
||||
schemaName: string,
|
||||
oldName: string,
|
||||
newName: string
|
||||
): Promise<void> {
|
||||
await this.#demux.call('rename', schemaName, oldName, newName);
|
||||
this.observer.trigger(`rename:${schemaName}`, { oldName, newName });
|
||||
}
|
||||
|
||||
async update(schemaName: string, docValueMap: DocValueMap): Promise<void> {
|
||||
const rawValueMap = this.converter.toRawValueMap(schemaName, docValueMap);
|
||||
await this.#demux.call('update', schemaName, rawValueMap);
|
||||
this.observer.trigger(`update:${schemaName}`, docValueMap);
|
||||
}
|
||||
|
||||
// Delete
|
||||
async delete(schemaName: string, name: string): Promise<void> {
|
||||
await this.#demux.call('delete', schemaName, name);
|
||||
this.observer.trigger(`delete:${schemaName}`, name);
|
||||
}
|
||||
|
||||
// Other
|
||||
async exists(schemaName: string, name?: string): Promise<boolean> {
|
||||
const doesExist = (await this.#demux.call(
|
||||
'exists',
|
||||
schemaName,
|
||||
name
|
||||
)) as boolean;
|
||||
this.observer.trigger(`exists:${schemaName}`, name);
|
||||
return doesExist;
|
||||
}
|
||||
|
||||
async close(): Promise<void> {
|
||||
await this.#demux.call('close');
|
||||
this.purgeCache();
|
||||
}
|
||||
|
||||
/**
|
||||
* Bespoke function
|
||||
*
|
||||
* These are functions to run custom queries that are too complex for
|
||||
* DatabaseCore and require use of knex or raw queries. The output
|
||||
* of these is not converted to DocValue and is used as is (RawValue).
|
||||
*
|
||||
* The query logic for these is in backend/database/bespoke.ts
|
||||
*/
|
||||
|
||||
async getLastInserted(schemaName: string): Promise<number> {
|
||||
if (this.schemaMap[schemaName]?.naming !== 'autoincrement') {
|
||||
throw new ValueError(
|
||||
`invalid schema, ${schemaName} does not have autoincrement naming`
|
||||
);
|
||||
}
|
||||
|
||||
return (await this.#demux.callBespoke(
|
||||
'getLastInserted',
|
||||
schemaName
|
||||
)) as number;
|
||||
}
|
||||
|
||||
async getTopExpenses(fromDate: string, toDate: string): Promise<TopExpenses> {
|
||||
return (await this.#demux.callBespoke(
|
||||
'getTopExpenses',
|
||||
fromDate,
|
||||
toDate
|
||||
)) as TopExpenses;
|
||||
}
|
||||
|
||||
async getTotalOutstanding(
|
||||
schemaName: string,
|
||||
fromDate: string,
|
||||
toDate: string
|
||||
): Promise<TotalOutstanding> {
|
||||
return (await this.#demux.callBespoke(
|
||||
'getTotalOutstanding',
|
||||
schemaName,
|
||||
fromDate,
|
||||
toDate
|
||||
)) as TotalOutstanding;
|
||||
}
|
||||
|
||||
async getCashflow(fromDate: string, toDate: string): Promise<Cashflow> {
|
||||
return (await this.#demux.callBespoke(
|
||||
'getCashflow',
|
||||
fromDate,
|
||||
toDate
|
||||
)) as Cashflow;
|
||||
}
|
||||
|
||||
async getIncomeAndExpenses(
|
||||
fromDate: string,
|
||||
toDate: string
|
||||
): Promise<IncomeExpense> {
|
||||
return (await this.#demux.callBespoke(
|
||||
'getIncomeAndExpenses',
|
||||
fromDate,
|
||||
toDate
|
||||
)) as IncomeExpense;
|
||||
}
|
||||
|
||||
/**
|
||||
* Internal methods
|
||||
*/
|
||||
async #getAll(
|
||||
schemaName: string,
|
||||
options: GetAllOptions = {}
|
||||
): Promise<RawValueMap[]> {
|
||||
return (await this.#demux.call(
|
||||
'getAll',
|
||||
schemaName,
|
||||
options
|
||||
)) as RawValueMap[];
|
||||
}
|
||||
}
|
174
fyo/core/docHandler.ts
Normal file
174
fyo/core/docHandler.ts
Normal file
@ -0,0 +1,174 @@
|
||||
import { Doc } from 'fyo/model/doc';
|
||||
import { DocMap, ModelMap, SinglesMap } from 'fyo/model/types';
|
||||
import { coreModels } from 'fyo/models';
|
||||
import { NotFoundError, ValueError } from 'fyo/utils/errors';
|
||||
import Observable from 'fyo/utils/observable';
|
||||
import { Schema } from 'schemas/types';
|
||||
import { getRandomString } from 'utils';
|
||||
import { Fyo } from '..';
|
||||
import { DocValueMap } from './types';
|
||||
|
||||
export class DocHandler {
|
||||
fyo: Fyo;
|
||||
models: ModelMap = {};
|
||||
singles: SinglesMap = {};
|
||||
docs: Observable<DocMap | undefined> = new Observable();
|
||||
observer: Observable<never> = new Observable();
|
||||
|
||||
constructor(fyo: Fyo) {
|
||||
this.fyo = fyo;
|
||||
}
|
||||
|
||||
init() {
|
||||
this.models = {};
|
||||
this.singles = {};
|
||||
this.docs = new Observable();
|
||||
this.observer = new Observable();
|
||||
}
|
||||
|
||||
purgeCache() {
|
||||
this.init();
|
||||
}
|
||||
|
||||
registerModels(models: ModelMap, regionalModels: ModelMap = {}) {
|
||||
for (const schemaName in this.fyo.db.schemaMap) {
|
||||
if (coreModels[schemaName] !== undefined) {
|
||||
this.models[schemaName] = coreModels[schemaName];
|
||||
} else if (regionalModels[schemaName] !== undefined) {
|
||||
this.models[schemaName] = regionalModels[schemaName];
|
||||
} else if (models[schemaName] !== undefined) {
|
||||
this.models[schemaName] = models[schemaName];
|
||||
} else {
|
||||
this.models[schemaName] = Doc;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Doc Operations
|
||||
*/
|
||||
|
||||
async getDoc(
|
||||
schemaName: string,
|
||||
name?: string,
|
||||
options = { skipDocumentCache: false }
|
||||
) {
|
||||
if (name === undefined) {
|
||||
name = schemaName;
|
||||
}
|
||||
|
||||
if (name === schemaName && !this.fyo.schemaMap[schemaName]?.isSingle) {
|
||||
throw new ValueError(`${schemaName} is not a Single Schema`);
|
||||
}
|
||||
|
||||
let doc: Doc | undefined;
|
||||
if (!options?.skipDocumentCache) {
|
||||
doc = this.#getFromCache(schemaName, name);
|
||||
}
|
||||
|
||||
if (doc) {
|
||||
return doc;
|
||||
}
|
||||
|
||||
doc = this.getNewDoc(schemaName, { name });
|
||||
await doc.load();
|
||||
this.#addToCache(doc);
|
||||
|
||||
return doc;
|
||||
}
|
||||
|
||||
getNewDoc(
|
||||
schemaName: string,
|
||||
data: DocValueMap = {},
|
||||
cacheDoc: boolean = true,
|
||||
schema?: Schema,
|
||||
Model?: typeof Doc
|
||||
): Doc {
|
||||
if (!this.models[schemaName] && Model) {
|
||||
this.models[schemaName] = Model;
|
||||
}
|
||||
|
||||
Model ??= this.models[schemaName];
|
||||
schema ??= this.fyo.schemaMap[schemaName];
|
||||
|
||||
if (schema === undefined) {
|
||||
throw new NotFoundError(`Schema not found for ${schemaName}`);
|
||||
}
|
||||
|
||||
const doc = new Model!(schema, data, this.fyo);
|
||||
doc.name ??= getRandomString();
|
||||
if (cacheDoc) {
|
||||
this.#addToCache(doc);
|
||||
}
|
||||
|
||||
return doc;
|
||||
}
|
||||
|
||||
/**
|
||||
* Cache operations
|
||||
*/
|
||||
|
||||
#addToCache(doc: Doc) {
|
||||
if (!doc.name) {
|
||||
return;
|
||||
}
|
||||
|
||||
const name = doc.name;
|
||||
const schemaName = doc.schemaName;
|
||||
|
||||
if (!this.docs[schemaName]) {
|
||||
this.docs.set(schemaName, {});
|
||||
this.#setCacheUpdationListeners(schemaName);
|
||||
}
|
||||
|
||||
this.docs.get(schemaName)![name] = doc;
|
||||
|
||||
// singles available as first level objects too
|
||||
if (schemaName === doc.name) {
|
||||
this.singles[name] = doc;
|
||||
}
|
||||
|
||||
// propagate change to `docs`
|
||||
doc.on('change', (params: unknown) => {
|
||||
this.docs!.trigger('change', params);
|
||||
});
|
||||
|
||||
doc.on('afterSync', () => {
|
||||
if (doc.name === name) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.#removeFromCache(doc.schemaName, name);
|
||||
this.#addToCache(doc);
|
||||
});
|
||||
}
|
||||
|
||||
#setCacheUpdationListeners(schemaName: string) {
|
||||
this.fyo.db.observer.on(`delete:${schemaName}`, (name: string) => {
|
||||
this.#removeFromCache(schemaName, name);
|
||||
});
|
||||
|
||||
this.fyo.db.observer.on(
|
||||
`rename:${schemaName}`,
|
||||
(names: { oldName: string; newName: string }) => {
|
||||
const doc = this.#getFromCache(schemaName, names.oldName);
|
||||
if (doc === undefined) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.#removeFromCache(schemaName, names.oldName);
|
||||
this.#addToCache(doc);
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#removeFromCache(schemaName: string, name: string) {
|
||||
const docMap = this.docs.get(schemaName);
|
||||
delete docMap?.[name];
|
||||
}
|
||||
|
||||
#getFromCache(schemaName: string, name: string): Doc | undefined {
|
||||
const docMap = this.docs.get(schemaName);
|
||||
return docMap?.[name];
|
||||
}
|
||||
}
|
51
fyo/core/types.ts
Normal file
51
fyo/core/types.ts
Normal file
@ -0,0 +1,51 @@
|
||||
import { Doc } from 'fyo/model/doc';
|
||||
import { Money } from 'pesa';
|
||||
import { RawValue } from 'schemas/types';
|
||||
import { AuthDemuxBase } from 'utils/auth/types';
|
||||
import { DatabaseDemuxBase } from 'utils/db/types';
|
||||
|
||||
export type DocValue =
|
||||
| string
|
||||
| number
|
||||
| boolean
|
||||
| Date
|
||||
| Money
|
||||
| null
|
||||
| undefined;
|
||||
export type DocValueMap = Record<string, DocValue | Doc[] | DocValueMap[]>;
|
||||
export type RawValueMap = Record<string, RawValue | RawValueMap[]>;
|
||||
|
||||
/**
|
||||
* DatabaseDemuxConstructor: type for a constructor that returns a DatabaseDemuxBase
|
||||
* it's typed this way because `typeof AbstractClass` is invalid as abstract classes
|
||||
* can't be initialized using `new`.
|
||||
*
|
||||
* AuthDemuxConstructor: same as the above but for AuthDemuxBase
|
||||
*/
|
||||
|
||||
export type DatabaseDemuxConstructor = new (
|
||||
isElectron?: boolean
|
||||
) => DatabaseDemuxBase;
|
||||
|
||||
export type AuthDemuxConstructor = new (isElectron?: boolean) => AuthDemuxBase;
|
||||
|
||||
export enum ConfigKeys {
|
||||
Files = 'files',
|
||||
LastSelectedFilePath = 'lastSelectedFilePath',
|
||||
Language = 'language',
|
||||
DeviceId = 'deviceId',
|
||||
}
|
||||
|
||||
export interface ConfigFile {
|
||||
id: string;
|
||||
companyName: string;
|
||||
dbPath: string;
|
||||
openCount: number;
|
||||
}
|
||||
|
||||
export interface FyoConfig {
|
||||
DatabaseDemux?: DatabaseDemuxConstructor;
|
||||
AuthDemux?: AuthDemuxConstructor;
|
||||
isElectron?: boolean;
|
||||
isTest?: boolean;
|
||||
}
|
22
fyo/demux/auth.ts
Normal file
22
fyo/demux/auth.ts
Normal file
@ -0,0 +1,22 @@
|
||||
import { ipcRenderer } from 'electron';
|
||||
import { AuthDemuxBase, TelemetryCreds } from 'utils/auth/types';
|
||||
import { IPC_ACTIONS } from 'utils/messages';
|
||||
|
||||
export class AuthDemux extends AuthDemuxBase {
|
||||
#isElectron: boolean = false;
|
||||
constructor(isElectron: boolean) {
|
||||
super();
|
||||
this.#isElectron = isElectron;
|
||||
}
|
||||
|
||||
async getTelemetryCreds(): Promise<TelemetryCreds> {
|
||||
if (this.#isElectron) {
|
||||
const creds = await ipcRenderer.invoke(IPC_ACTIONS.GET_CREDS);
|
||||
const url: string = creds?.telemetryUrl ?? '';
|
||||
const token: string = creds?.tokenString ?? '';
|
||||
return { url, token };
|
||||
} else {
|
||||
return { url: '', token: '' };
|
||||
}
|
||||
}
|
||||
}
|
55
fyo/demux/config.ts
Normal file
55
fyo/demux/config.ts
Normal file
@ -0,0 +1,55 @@
|
||||
import config from 'utils/config';
|
||||
|
||||
export class Config {
|
||||
#useElectronConfig: boolean;
|
||||
fallback: Map<string, unknown> = new Map();
|
||||
constructor(isElectron: boolean) {
|
||||
this.#useElectronConfig = isElectron;
|
||||
}
|
||||
|
||||
get store(): Record<string, unknown> {
|
||||
if (this.#useElectronConfig) {
|
||||
return config.store;
|
||||
} else {
|
||||
const store: Record<string, unknown> = {};
|
||||
|
||||
for (const key of this.fallback.keys()) {
|
||||
store[key] = this.fallback.get(key);
|
||||
}
|
||||
|
||||
return store;
|
||||
}
|
||||
}
|
||||
|
||||
get(key: string, defaultValue?: unknown): unknown {
|
||||
if (this.#useElectronConfig) {
|
||||
return config.get(key, defaultValue);
|
||||
} else {
|
||||
return this.fallback.get(key) ?? defaultValue;
|
||||
}
|
||||
}
|
||||
|
||||
set(key: string, value: unknown) {
|
||||
if (this.#useElectronConfig) {
|
||||
config.set(key, value);
|
||||
} else {
|
||||
this.fallback.set(key, value);
|
||||
}
|
||||
}
|
||||
|
||||
delete(key: string) {
|
||||
if (this.#useElectronConfig) {
|
||||
config.delete(key);
|
||||
} else {
|
||||
this.fallback.delete(key);
|
||||
}
|
||||
}
|
||||
|
||||
clear() {
|
||||
if (this.#useElectronConfig) {
|
||||
config.clear();
|
||||
} else {
|
||||
this.fallback.clear();
|
||||
}
|
||||
}
|
||||
}
|
96
fyo/demux/db.ts
Normal file
96
fyo/demux/db.ts
Normal file
@ -0,0 +1,96 @@
|
||||
import { ipcRenderer } from 'electron';
|
||||
import { DatabaseError, NotImplemented } from 'fyo/utils/errors';
|
||||
import { SchemaMap } from 'schemas/types';
|
||||
import { DatabaseDemuxBase, DatabaseMethod } from 'utils/db/types';
|
||||
import { DatabaseResponse } from 'utils/ipc/types';
|
||||
import { IPC_ACTIONS } from 'utils/messages';
|
||||
|
||||
export class DatabaseDemux extends DatabaseDemuxBase {
|
||||
#isElectron: boolean = false;
|
||||
constructor(isElectron: boolean) {
|
||||
super();
|
||||
this.#isElectron = isElectron;
|
||||
}
|
||||
|
||||
async #handleDBCall(func: () => Promise<DatabaseResponse>): Promise<unknown> {
|
||||
const response = await func();
|
||||
|
||||
if (response.error?.name) {
|
||||
const { name, message, stack } = response.error;
|
||||
const dberror = new DatabaseError(`${name}\n${message}`);
|
||||
dberror.stack = stack;
|
||||
|
||||
throw dberror;
|
||||
}
|
||||
|
||||
return response.data;
|
||||
}
|
||||
|
||||
async getSchemaMap(): Promise<SchemaMap> {
|
||||
if (this.#isElectron) {
|
||||
return (await this.#handleDBCall(async function dbFunc() {
|
||||
return await ipcRenderer.invoke(IPC_ACTIONS.DB_SCHEMA);
|
||||
})) as SchemaMap;
|
||||
}
|
||||
|
||||
throw new NotImplemented();
|
||||
}
|
||||
|
||||
async createNewDatabase(
|
||||
dbPath: string,
|
||||
countryCode?: string
|
||||
): Promise<string> {
|
||||
if (this.#isElectron) {
|
||||
return (await this.#handleDBCall(async function dbFunc() {
|
||||
return await ipcRenderer.invoke(
|
||||
IPC_ACTIONS.DB_CREATE,
|
||||
dbPath,
|
||||
countryCode
|
||||
);
|
||||
})) as string;
|
||||
}
|
||||
|
||||
throw new NotImplemented();
|
||||
}
|
||||
|
||||
async connectToDatabase(
|
||||
dbPath: string,
|
||||
countryCode?: string
|
||||
): Promise<string> {
|
||||
if (this.#isElectron) {
|
||||
return (await this.#handleDBCall(async function dbFunc() {
|
||||
return await ipcRenderer.invoke(
|
||||
IPC_ACTIONS.DB_CONNECT,
|
||||
dbPath,
|
||||
countryCode
|
||||
);
|
||||
})) as string;
|
||||
}
|
||||
|
||||
throw new NotImplemented();
|
||||
}
|
||||
|
||||
async call(method: DatabaseMethod, ...args: unknown[]): Promise<unknown> {
|
||||
if (this.#isElectron) {
|
||||
return (await this.#handleDBCall(async function dbFunc() {
|
||||
return await ipcRenderer.invoke(IPC_ACTIONS.DB_CALL, method, ...args);
|
||||
})) as unknown;
|
||||
}
|
||||
|
||||
throw new NotImplemented();
|
||||
}
|
||||
|
||||
async callBespoke(method: string, ...args: unknown[]): Promise<unknown> {
|
||||
if (this.#isElectron) {
|
||||
return (await this.#handleDBCall(async function dbFunc() {
|
||||
return await ipcRenderer.invoke(
|
||||
IPC_ACTIONS.DB_BESPOKE,
|
||||
method,
|
||||
...args
|
||||
);
|
||||
})) as unknown;
|
||||
}
|
||||
|
||||
throw new NotImplemented();
|
||||
}
|
||||
}
|
230
fyo/index.ts
Normal file
230
fyo/index.ts
Normal file
@ -0,0 +1,230 @@
|
||||
import { getMoneyMaker, MoneyMaker } from 'pesa';
|
||||
import { Field } from 'schemas/types';
|
||||
import { getIsNullOrUndef } from 'utils';
|
||||
import { markRaw } from 'vue';
|
||||
import { AuthHandler } from './core/authHandler';
|
||||
import { DatabaseHandler } from './core/dbHandler';
|
||||
import { DocHandler } from './core/docHandler';
|
||||
import { DocValue, FyoConfig } from './core/types';
|
||||
import { Config } from './demux/config';
|
||||
import { Doc } from './model/doc';
|
||||
import { ModelMap } from './model/types';
|
||||
import { TelemetryManager } from './telemetry/telemetry';
|
||||
import {
|
||||
DEFAULT_CURRENCY,
|
||||
DEFAULT_DISPLAY_PRECISION,
|
||||
DEFAULT_INTERNAL_PRECISION,
|
||||
} from './utils/consts';
|
||||
import * as errors from './utils/errors';
|
||||
import { format } from './utils/format';
|
||||
import { t, T } from './utils/translation';
|
||||
import { ErrorLog } from './utils/types';
|
||||
|
||||
export class Fyo {
|
||||
t = t;
|
||||
T = T;
|
||||
|
||||
errors = errors;
|
||||
isElectron: boolean;
|
||||
|
||||
pesa: MoneyMaker;
|
||||
|
||||
auth: AuthHandler;
|
||||
doc: DocHandler;
|
||||
db: DatabaseHandler;
|
||||
|
||||
_initialized: boolean = false;
|
||||
|
||||
errorLog: ErrorLog[] = [];
|
||||
temp?: Record<string, unknown>;
|
||||
|
||||
currencyFormatter?: Intl.NumberFormat;
|
||||
currencySymbols: Record<string, string | undefined> = {};
|
||||
|
||||
isTest: boolean;
|
||||
telemetry: TelemetryManager;
|
||||
config: Config;
|
||||
|
||||
constructor(conf: FyoConfig = {}) {
|
||||
this.isTest = conf.isTest ?? false;
|
||||
this.isElectron = conf.isElectron ?? true;
|
||||
|
||||
this.auth = new AuthHandler(this, conf.AuthDemux);
|
||||
this.db = new DatabaseHandler(this, conf.DatabaseDemux);
|
||||
this.doc = new DocHandler(this);
|
||||
|
||||
this.pesa = getMoneyMaker({
|
||||
currency: DEFAULT_CURRENCY,
|
||||
precision: DEFAULT_INTERNAL_PRECISION,
|
||||
display: DEFAULT_DISPLAY_PRECISION,
|
||||
wrapper: markRaw,
|
||||
});
|
||||
|
||||
this.telemetry = new TelemetryManager(this);
|
||||
this.config = new Config(this.isElectron && !this.isTest);
|
||||
}
|
||||
|
||||
get initialized() {
|
||||
return this._initialized;
|
||||
}
|
||||
|
||||
get docs() {
|
||||
return this.doc.docs;
|
||||
}
|
||||
|
||||
get models() {
|
||||
return this.doc.models;
|
||||
}
|
||||
|
||||
get singles() {
|
||||
return this.doc.singles;
|
||||
}
|
||||
|
||||
get schemaMap() {
|
||||
return this.db.schemaMap;
|
||||
}
|
||||
|
||||
format(value: DocValue, field: string | Field, doc?: Doc) {
|
||||
return format(value, field, doc ?? null, this);
|
||||
}
|
||||
|
||||
async setIsElectron() {
|
||||
try {
|
||||
const { ipcRenderer } = await import('electron');
|
||||
this.isElectron = Boolean(ipcRenderer);
|
||||
} catch {
|
||||
this.isElectron = false;
|
||||
}
|
||||
}
|
||||
|
||||
async initializeAndRegister(
|
||||
models: ModelMap = {},
|
||||
regionalModels: ModelMap = {},
|
||||
force: boolean = false
|
||||
) {
|
||||
if (this._initialized && !force) return;
|
||||
|
||||
await this.#initializeModules();
|
||||
await this.#initializeMoneyMaker();
|
||||
|
||||
this.doc.registerModels(models, regionalModels);
|
||||
await this.doc.getDoc('SystemSettings');
|
||||
this._initialized = true;
|
||||
}
|
||||
|
||||
async #initializeModules() {
|
||||
// temp params while calling routes
|
||||
this.temp = {};
|
||||
|
||||
await this.doc.init();
|
||||
await this.auth.init();
|
||||
await this.db.init();
|
||||
}
|
||||
|
||||
async #initializeMoneyMaker() {
|
||||
const values =
|
||||
(await this.db?.getSingleValues(
|
||||
{
|
||||
fieldname: 'internalPrecision',
|
||||
parent: 'SystemSettings',
|
||||
},
|
||||
{
|
||||
fieldname: 'displayPrecision',
|
||||
parent: 'SystemSettings',
|
||||
},
|
||||
{
|
||||
fieldname: 'currency',
|
||||
parent: 'SystemSettings',
|
||||
}
|
||||
)) ?? [];
|
||||
|
||||
const acc = values.reduce((acc, sv) => {
|
||||
acc[sv.fieldname] = sv.value as string | number | undefined;
|
||||
return acc;
|
||||
}, {} as Record<string, string | number | undefined>);
|
||||
|
||||
const precision: number =
|
||||
(acc.internalPrecision as number) ?? DEFAULT_INTERNAL_PRECISION;
|
||||
const display: number =
|
||||
(acc.displayPrecision as number) ?? DEFAULT_DISPLAY_PRECISION;
|
||||
const currency: string = (acc.currency as string) ?? DEFAULT_CURRENCY;
|
||||
|
||||
this.pesa = getMoneyMaker({
|
||||
currency,
|
||||
precision,
|
||||
display,
|
||||
wrapper: markRaw,
|
||||
});
|
||||
}
|
||||
|
||||
async close() {
|
||||
await this.db.close();
|
||||
await this.auth.logout();
|
||||
}
|
||||
|
||||
getField(schemaName: string, fieldname: string) {
|
||||
const schema = this.schemaMap[schemaName];
|
||||
return schema?.fields.find((f) => f.fieldname === fieldname);
|
||||
}
|
||||
|
||||
async getValue(
|
||||
schemaName: string,
|
||||
name: string,
|
||||
fieldname?: string
|
||||
): Promise<DocValue | Doc[]> {
|
||||
if (fieldname === undefined && this.schemaMap[schemaName]?.isSingle) {
|
||||
fieldname = name;
|
||||
name = schemaName;
|
||||
}
|
||||
|
||||
if (getIsNullOrUndef(name) || getIsNullOrUndef(fieldname)) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
let doc: Doc;
|
||||
let value: DocValue | Doc[];
|
||||
try {
|
||||
doc = await this.doc.getDoc(schemaName, name);
|
||||
value = doc.get(fieldname!);
|
||||
} catch (err) {
|
||||
value = undefined;
|
||||
}
|
||||
|
||||
if (value === undefined && schemaName === name) {
|
||||
const sv = await this.db.getSingleValues({
|
||||
fieldname: fieldname!,
|
||||
parent: schemaName,
|
||||
});
|
||||
|
||||
return sv?.[0]?.value;
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
purgeCache() {
|
||||
this.pesa = getMoneyMaker({
|
||||
currency: DEFAULT_CURRENCY,
|
||||
precision: DEFAULT_INTERNAL_PRECISION,
|
||||
display: DEFAULT_DISPLAY_PRECISION,
|
||||
wrapper: markRaw,
|
||||
});
|
||||
|
||||
this._initialized = false;
|
||||
this.temp = {};
|
||||
this.currencyFormatter = undefined;
|
||||
this.currencySymbols = {};
|
||||
this.errorLog = [];
|
||||
this.temp = {};
|
||||
this.db.purgeCache();
|
||||
this.auth.purgeCache();
|
||||
this.doc.purgeCache();
|
||||
}
|
||||
|
||||
store = {
|
||||
isDevelopment: false,
|
||||
appVersion: '',
|
||||
};
|
||||
}
|
||||
|
||||
export { T, t };
|
817
fyo/model/doc.ts
Normal file
817
fyo/model/doc.ts
Normal file
@ -0,0 +1,817 @@
|
||||
import { Fyo } from 'fyo';
|
||||
import { Converter } from 'fyo/core/converter';
|
||||
import { DocValue, DocValueMap } from 'fyo/core/types';
|
||||
import { Verb } from 'fyo/telemetry/types';
|
||||
import { DEFAULT_USER } from 'fyo/utils/consts';
|
||||
import { ConflictError, MandatoryError, NotFoundError } from 'fyo/utils/errors';
|
||||
import Observable from 'fyo/utils/observable';
|
||||
import { Money } from 'pesa';
|
||||
import {
|
||||
Field,
|
||||
FieldTypeEnum,
|
||||
OptionField,
|
||||
RawValue,
|
||||
Schema,
|
||||
TargetField,
|
||||
} from 'schemas/types';
|
||||
import { getIsNullOrUndef, getMapFromList, getRandomString } from 'utils';
|
||||
import { markRaw } from 'vue';
|
||||
import { isPesa } from '../utils/index';
|
||||
import {
|
||||
areDocValuesEqual,
|
||||
getMissingMandatoryMessage,
|
||||
getPreDefaultValues,
|
||||
setChildDocIdx,
|
||||
shouldApplyFormula,
|
||||
} from './helpers';
|
||||
import { setName } from './naming';
|
||||
import {
|
||||
Action,
|
||||
ChangeArg,
|
||||
CurrenciesMap,
|
||||
DefaultMap,
|
||||
EmptyMessageMap,
|
||||
FiltersMap,
|
||||
FormulaMap,
|
||||
FormulaReturn,
|
||||
HiddenMap,
|
||||
ListsMap,
|
||||
ListViewSettings,
|
||||
ReadOnlyMap,
|
||||
RequiredMap,
|
||||
TreeViewSettings,
|
||||
ValidationMap,
|
||||
} from './types';
|
||||
import { validateOptions, validateRequired } from './validationFunction';
|
||||
|
||||
export class Doc extends Observable<DocValue | Doc[]> {
|
||||
name?: string;
|
||||
schema: Readonly<Schema>;
|
||||
fyo: Fyo;
|
||||
fieldMap: Record<string, Field>;
|
||||
|
||||
/**
|
||||
* Fields below are used by child docs to maintain
|
||||
* reference w.r.t their parent doc.
|
||||
*/
|
||||
idx?: number;
|
||||
parentdoc?: Doc;
|
||||
parentFieldname?: string;
|
||||
parentSchemaName?: string;
|
||||
|
||||
_links?: Record<string, Doc>;
|
||||
_dirty: boolean = true;
|
||||
_notInserted: boolean = true;
|
||||
|
||||
_syncing = false;
|
||||
constructor(schema: Schema, data: DocValueMap, fyo: Fyo) {
|
||||
super();
|
||||
this.fyo = markRaw(fyo);
|
||||
this.schema = schema;
|
||||
this.fieldMap = getMapFromList(schema.fields, 'fieldname');
|
||||
|
||||
if (this.schema.isSingle) {
|
||||
this.name = this.schemaName;
|
||||
}
|
||||
|
||||
this._setDefaults();
|
||||
this._setValuesWithoutChecks(data);
|
||||
}
|
||||
|
||||
get schemaName(): string {
|
||||
return this.schema.name;
|
||||
}
|
||||
|
||||
get notInserted(): boolean {
|
||||
return this._notInserted;
|
||||
}
|
||||
|
||||
get inserted(): boolean {
|
||||
return !this._notInserted;
|
||||
}
|
||||
|
||||
get tableFields(): TargetField[] {
|
||||
return this.schema.fields.filter(
|
||||
(f) => f.fieldtype === FieldTypeEnum.Table
|
||||
) as TargetField[];
|
||||
}
|
||||
|
||||
get dirty() {
|
||||
return this._dirty;
|
||||
}
|
||||
|
||||
get quickEditFields() {
|
||||
let fieldnames = this.schema.quickEditFields;
|
||||
|
||||
if (fieldnames === undefined) {
|
||||
fieldnames = [];
|
||||
}
|
||||
|
||||
if (fieldnames.length === 0 && this.fieldMap['name']) {
|
||||
fieldnames = ['name'];
|
||||
}
|
||||
|
||||
return fieldnames.map((f) => this.fieldMap[f]);
|
||||
}
|
||||
|
||||
get isSubmitted() {
|
||||
return !!this.submitted && !this.cancelled;
|
||||
}
|
||||
|
||||
get isCancelled() {
|
||||
return !!this.submitted && !!this.cancelled;
|
||||
}
|
||||
|
||||
get syncing() {
|
||||
return this._syncing;
|
||||
}
|
||||
|
||||
_setValuesWithoutChecks(data: DocValueMap) {
|
||||
for (const field of this.schema.fields) {
|
||||
const fieldname = field.fieldname;
|
||||
const value = data[field.fieldname];
|
||||
|
||||
if (Array.isArray(value)) {
|
||||
for (const row of value) {
|
||||
this.push(fieldname, row);
|
||||
}
|
||||
} else if (value !== undefined) {
|
||||
this[fieldname] = Converter.toDocValue(
|
||||
value as RawValue,
|
||||
field,
|
||||
this.fyo
|
||||
);
|
||||
} else {
|
||||
this[fieldname] = this[fieldname] ?? null;
|
||||
}
|
||||
|
||||
if (field.fieldtype === FieldTypeEnum.Table && !this[fieldname]) {
|
||||
this[fieldname] = [];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
_setDirty(value: boolean) {
|
||||
this._dirty = value;
|
||||
if (this.schema.isChild && this.parentdoc) {
|
||||
this.parentdoc._dirty = value;
|
||||
}
|
||||
}
|
||||
|
||||
// set value and trigger change
|
||||
async set(
|
||||
fieldname: string | DocValueMap,
|
||||
value?: DocValue | Doc[] | DocValueMap[]
|
||||
): Promise<boolean> {
|
||||
if (typeof fieldname === 'object') {
|
||||
return await this.setMultiple(fieldname as DocValueMap);
|
||||
}
|
||||
|
||||
if (!this._canSet(fieldname, value)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
this._setDirty(true);
|
||||
if (typeof value === 'string') {
|
||||
value = value.trim();
|
||||
}
|
||||
|
||||
if (Array.isArray(value)) {
|
||||
for (const row of value) {
|
||||
this.push(fieldname, row);
|
||||
}
|
||||
} else {
|
||||
const field = this.fieldMap[fieldname];
|
||||
await this._validateField(field, value);
|
||||
this[fieldname] = value;
|
||||
}
|
||||
|
||||
// always run applyChange from the parentdoc
|
||||
if (this.schema.isChild && this.parentdoc) {
|
||||
await this._applyChange(fieldname);
|
||||
await this.parentdoc._applyChange(this.parentFieldname as string);
|
||||
} else {
|
||||
await this._applyChange(fieldname);
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
async setMultiple(docValueMap: DocValueMap): Promise<boolean> {
|
||||
let hasSet = false;
|
||||
for (const fieldname in docValueMap) {
|
||||
const isSet = await this.set(
|
||||
fieldname,
|
||||
docValueMap[fieldname] as DocValue | Doc[]
|
||||
);
|
||||
hasSet ||= isSet;
|
||||
}
|
||||
|
||||
return hasSet;
|
||||
}
|
||||
|
||||
_canSet(
|
||||
fieldname: string,
|
||||
value?: DocValue | Doc[] | DocValueMap[]
|
||||
): boolean {
|
||||
if (fieldname === 'numberSeries' && !this.notInserted) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (value === undefined) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (this.fieldMap[fieldname] === undefined) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const currentValue = this.get(fieldname);
|
||||
if (currentValue === undefined) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return !areDocValuesEqual(currentValue as DocValue, value as DocValue);
|
||||
}
|
||||
|
||||
async _applyChange(fieldname: string): Promise<boolean> {
|
||||
await this._applyFormula(fieldname);
|
||||
await this.trigger('change', {
|
||||
doc: this,
|
||||
changed: fieldname,
|
||||
});
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
_setDefaults() {
|
||||
for (const field of this.schema.fields) {
|
||||
let defaultValue: DocValue | Doc[] = getPreDefaultValues(
|
||||
field.fieldtype,
|
||||
this.fyo
|
||||
);
|
||||
|
||||
const defaultFunction =
|
||||
this.fyo.models[this.schemaName]?.defaults?.[field.fieldname];
|
||||
if (defaultFunction !== undefined) {
|
||||
defaultValue = defaultFunction();
|
||||
} else if (field.default !== undefined) {
|
||||
defaultValue = field.default;
|
||||
}
|
||||
|
||||
if (field.fieldtype === FieldTypeEnum.Currency && !isPesa(defaultValue)) {
|
||||
defaultValue = this.fyo.pesa!(defaultValue as string | number);
|
||||
}
|
||||
|
||||
this[field.fieldname] = defaultValue;
|
||||
}
|
||||
}
|
||||
async remove(fieldname: string, idx: number) {
|
||||
const childDocs = ((this[fieldname] ?? []) as Doc[]).filter(
|
||||
(row, i) => row.idx !== idx || i !== idx
|
||||
);
|
||||
|
||||
setChildDocIdx(childDocs);
|
||||
this[fieldname] = childDocs;
|
||||
this._setDirty(true);
|
||||
return await this._applyChange(fieldname);
|
||||
}
|
||||
|
||||
async append(fieldname: string, docValueMap: DocValueMap = {}) {
|
||||
this.push(fieldname, docValueMap);
|
||||
this._setDirty(true);
|
||||
return await this._applyChange(fieldname);
|
||||
}
|
||||
|
||||
push(fieldname: string, docValueMap: Doc | DocValueMap = {}) {
|
||||
const childDocs = [
|
||||
(this[fieldname] ?? []) as Doc[],
|
||||
this._getChildDoc(docValueMap, fieldname),
|
||||
].flat();
|
||||
|
||||
setChildDocIdx(childDocs);
|
||||
this[fieldname] = childDocs;
|
||||
}
|
||||
|
||||
_getChildDoc(docValueMap: Doc | DocValueMap, fieldname: string): Doc {
|
||||
if (!this.name) {
|
||||
this.name = getRandomString();
|
||||
}
|
||||
|
||||
docValueMap.name ??= getRandomString();
|
||||
|
||||
// Child Meta Fields
|
||||
docValueMap.parent ??= this.name;
|
||||
docValueMap.parentSchemaName ??= this.schemaName;
|
||||
docValueMap.parentFieldname ??= fieldname;
|
||||
|
||||
if (docValueMap instanceof Doc) {
|
||||
docValueMap.parentdoc ??= this;
|
||||
return docValueMap;
|
||||
}
|
||||
|
||||
const childSchemaName = (this.fieldMap[fieldname] as TargetField).target;
|
||||
const childDoc = this.fyo.doc.getNewDoc(
|
||||
childSchemaName,
|
||||
docValueMap,
|
||||
false
|
||||
);
|
||||
childDoc.parentdoc = this;
|
||||
return childDoc;
|
||||
}
|
||||
|
||||
async _validateSync() {
|
||||
this._validateMandatory();
|
||||
await this._validateFields();
|
||||
}
|
||||
|
||||
_validateMandatory() {
|
||||
const checkForMandatory: Doc[] = [this];
|
||||
const tableFields = this.schema.fields.filter(
|
||||
(f) => f.fieldtype === FieldTypeEnum.Table
|
||||
) as TargetField[];
|
||||
|
||||
for (const field of tableFields) {
|
||||
const childDocs = this.get(field.fieldname) as Doc[];
|
||||
if (!childDocs) {
|
||||
continue;
|
||||
}
|
||||
|
||||
checkForMandatory.push(...childDocs);
|
||||
}
|
||||
|
||||
const missingMandatoryMessage = checkForMandatory
|
||||
.map((doc) => getMissingMandatoryMessage(doc))
|
||||
.filter(Boolean);
|
||||
|
||||
if (missingMandatoryMessage.length > 0) {
|
||||
const fields = missingMandatoryMessage.join('\n');
|
||||
const message = this.fyo.t`Value missing for ${fields}`;
|
||||
throw new MandatoryError(message);
|
||||
}
|
||||
}
|
||||
|
||||
async _validateFields() {
|
||||
const fields = this.schema.fields;
|
||||
for (const field of fields) {
|
||||
if (field.fieldtype === FieldTypeEnum.Table) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const value = this.get(field.fieldname) as DocValue;
|
||||
await this._validateField(field, value);
|
||||
}
|
||||
}
|
||||
|
||||
async _validateField(field: Field, value: DocValue) {
|
||||
if (
|
||||
field.fieldtype === FieldTypeEnum.Select ||
|
||||
field.fieldtype === FieldTypeEnum.AutoComplete
|
||||
) {
|
||||
validateOptions(field as OptionField, value as string, this);
|
||||
}
|
||||
|
||||
validateRequired(field, value, this);
|
||||
if (getIsNullOrUndef(value)) {
|
||||
return;
|
||||
}
|
||||
|
||||
const validator = this.validations[field.fieldname];
|
||||
if (validator === undefined) {
|
||||
return;
|
||||
}
|
||||
|
||||
await validator(value);
|
||||
}
|
||||
|
||||
getValidDict(filterMeta: boolean = false): DocValueMap {
|
||||
let fields = this.schema.fields;
|
||||
if (filterMeta) {
|
||||
fields = this.schema.fields.filter((f) => !f.meta);
|
||||
}
|
||||
|
||||
const data: DocValueMap = {};
|
||||
for (const field of fields) {
|
||||
let value = this[field.fieldname] as DocValue | DocValueMap[];
|
||||
|
||||
if (Array.isArray(value)) {
|
||||
value = value.map((doc) => (doc as Doc).getValidDict(filterMeta));
|
||||
}
|
||||
|
||||
if (isPesa(value)) {
|
||||
value = (value as Money).copy();
|
||||
}
|
||||
|
||||
if (value === null && this.schema.isSingle) {
|
||||
continue;
|
||||
}
|
||||
|
||||
data[field.fieldname] = value;
|
||||
}
|
||||
return data;
|
||||
}
|
||||
|
||||
_setBaseMetaValues() {
|
||||
if (this.schema.isSubmittable) {
|
||||
this.submitted = false;
|
||||
this.cancelled = false;
|
||||
}
|
||||
|
||||
if (!this.createdBy) {
|
||||
this.createdBy = this.fyo.auth.session.user || DEFAULT_USER;
|
||||
}
|
||||
|
||||
if (!this.created) {
|
||||
this.created = new Date();
|
||||
}
|
||||
|
||||
this._updateModifiedMetaValues();
|
||||
}
|
||||
|
||||
_updateModifiedMetaValues() {
|
||||
this.modifiedBy = this.fyo.auth.session.user || DEFAULT_USER;
|
||||
this.modified = new Date();
|
||||
}
|
||||
|
||||
async load() {
|
||||
if (this.name === undefined) {
|
||||
return;
|
||||
}
|
||||
|
||||
const data = await this.fyo.db.get(this.schemaName, this.name);
|
||||
if (this.schema.isSingle && !data?.name) {
|
||||
data.name = this.name!;
|
||||
}
|
||||
|
||||
if (data && data.name) {
|
||||
this._syncValues(data);
|
||||
await this.loadLinks();
|
||||
} else {
|
||||
throw new NotFoundError(`Not Found: ${this.schemaName} ${this.name}`);
|
||||
}
|
||||
|
||||
this._setDirty(false);
|
||||
this._notInserted = false;
|
||||
this.fyo.doc.observer.trigger(`load:${this.schemaName}`, this.name);
|
||||
}
|
||||
|
||||
async loadLinks() {
|
||||
this._links = {};
|
||||
const inlineLinks = this.schema.fields.filter((f) => f.inline);
|
||||
for (const f of inlineLinks) {
|
||||
await this.loadLink(f.fieldname);
|
||||
}
|
||||
}
|
||||
|
||||
async loadLink(fieldname: string) {
|
||||
this._links ??= {};
|
||||
const field = this.fieldMap[fieldname] as TargetField;
|
||||
if (field === undefined) {
|
||||
return;
|
||||
}
|
||||
|
||||
const value = this.get(fieldname);
|
||||
if (getIsNullOrUndef(value) || field.target === undefined) {
|
||||
return;
|
||||
}
|
||||
|
||||
this._links[fieldname] = await this.fyo.doc.getDoc(
|
||||
field.target,
|
||||
value as string
|
||||
);
|
||||
}
|
||||
|
||||
getLink(fieldname: string): Doc | null {
|
||||
const link = this._links?.[fieldname];
|
||||
if (link === undefined) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return link;
|
||||
}
|
||||
|
||||
_syncValues(data: DocValueMap) {
|
||||
this._clearValues();
|
||||
this._setValuesWithoutChecks(data);
|
||||
this._dirty = false;
|
||||
this.trigger('change', {
|
||||
doc: this,
|
||||
});
|
||||
}
|
||||
|
||||
_clearValues() {
|
||||
for (const { fieldname } of this.schema.fields) {
|
||||
this[fieldname] = null;
|
||||
}
|
||||
|
||||
this._dirty = true;
|
||||
this._notInserted = true;
|
||||
}
|
||||
|
||||
_setChildDocsIdx() {
|
||||
const childFields = this.schema.fields.filter(
|
||||
(f) => f.fieldtype === FieldTypeEnum.Table
|
||||
) as TargetField[];
|
||||
|
||||
for (const field of childFields) {
|
||||
const childDocs = (this.get(field.fieldname) as Doc[]) ?? [];
|
||||
setChildDocIdx(childDocs);
|
||||
}
|
||||
}
|
||||
|
||||
async _validateDbNotModified() {
|
||||
if (this.notInserted || !this.name || this.schema.isSingle) {
|
||||
return;
|
||||
}
|
||||
|
||||
const dbValues = await this.fyo.db.get(this.schemaName, this.name);
|
||||
const docModified = (this.modified as Date)?.toISOString();
|
||||
const dbModified = (dbValues.modified as Date)?.toISOString();
|
||||
|
||||
if (dbValues && docModified !== dbModified) {
|
||||
throw new ConflictError(
|
||||
this.fyo
|
||||
.t`${this.schema.label} ${this.name} has been modified after loading` +
|
||||
` ${dbModified}, ${docModified}`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
async _applyFormula(fieldname?: string): Promise<boolean> {
|
||||
const doc = this;
|
||||
let changed = false;
|
||||
|
||||
const childDocs = this.tableFields
|
||||
.map((f) => (this.get(f.fieldname) as Doc[]) ?? [])
|
||||
.flat();
|
||||
|
||||
// children
|
||||
for (const row of childDocs) {
|
||||
changed ||= (await row?._applyFormula()) ?? false;
|
||||
}
|
||||
|
||||
// parent or child row
|
||||
const formulaFields = Object.keys(this.formulas).map(
|
||||
(fn) => this.fieldMap[fn]
|
||||
);
|
||||
changed ||= await this._applyFormulaForFields(
|
||||
formulaFields,
|
||||
doc,
|
||||
fieldname
|
||||
);
|
||||
return changed;
|
||||
}
|
||||
|
||||
async _applyFormulaForFields(
|
||||
formulaFields: Field[],
|
||||
doc: Doc,
|
||||
fieldname?: string
|
||||
) {
|
||||
let changed = false;
|
||||
for (const field of formulaFields) {
|
||||
const shouldApply = shouldApplyFormula(field, doc, fieldname);
|
||||
if (!shouldApply) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const newVal = await this._getValueFromFormula(field, doc);
|
||||
const previousVal = doc.get(field.fieldname);
|
||||
const isSame = areDocValuesEqual(newVal as DocValue, previousVal);
|
||||
if (newVal === undefined || isSame) {
|
||||
continue;
|
||||
}
|
||||
|
||||
doc[field.fieldname] = newVal;
|
||||
changed = true;
|
||||
}
|
||||
|
||||
return changed;
|
||||
}
|
||||
|
||||
async _getValueFromFormula(field: Field, doc: Doc) {
|
||||
const { formula } = doc.formulas[field.fieldname] ?? {};
|
||||
if (formula === undefined) {
|
||||
return;
|
||||
}
|
||||
|
||||
let value: FormulaReturn;
|
||||
try {
|
||||
value = await formula();
|
||||
} catch {
|
||||
return;
|
||||
}
|
||||
|
||||
if (Array.isArray(value) && field.fieldtype === FieldTypeEnum.Table) {
|
||||
value = value.map((row) => this._getChildDoc(row, field.fieldname));
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
async _preSync() {
|
||||
this._setChildDocsIdx();
|
||||
await this._applyFormula();
|
||||
await this._validateSync();
|
||||
await this.trigger('validate');
|
||||
}
|
||||
|
||||
async _insert() {
|
||||
await setName(this, this.fyo);
|
||||
this._setBaseMetaValues();
|
||||
await this._preSync();
|
||||
|
||||
const validDict = this.getValidDict();
|
||||
const data = await this.fyo.db.insert(this.schemaName, validDict);
|
||||
this._syncValues(data);
|
||||
|
||||
this.fyo.telemetry.log(Verb.Created, this.schemaName);
|
||||
return this;
|
||||
}
|
||||
|
||||
async _update() {
|
||||
await this._validateDbNotModified();
|
||||
this._updateModifiedMetaValues();
|
||||
await this._preSync();
|
||||
|
||||
const data = this.getValidDict();
|
||||
await this.fyo.db.update(this.schemaName, data);
|
||||
this._syncValues(data);
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
async sync(): Promise<Doc> {
|
||||
this._syncing = true;
|
||||
await this.trigger('beforeSync');
|
||||
let doc;
|
||||
if (this.notInserted) {
|
||||
doc = await this._insert();
|
||||
} else {
|
||||
doc = await this._update();
|
||||
}
|
||||
this._notInserted = false;
|
||||
await this.trigger('afterSync');
|
||||
this.fyo.doc.observer.trigger(`sync:${this.schemaName}`, this.name);
|
||||
this._syncing = false;
|
||||
return doc;
|
||||
}
|
||||
|
||||
async delete() {
|
||||
if (this.schema.isSubmittable && !this.isCancelled) {
|
||||
return;
|
||||
}
|
||||
|
||||
await this.trigger('beforeDelete');
|
||||
await this.fyo.db.delete(this.schemaName, this.name!);
|
||||
await this.trigger('afterDelete');
|
||||
|
||||
this.fyo.telemetry.log(Verb.Deleted, this.schemaName);
|
||||
this.fyo.doc.observer.trigger(`delete:${this.schemaName}`, this.name);
|
||||
}
|
||||
|
||||
async submit() {
|
||||
if (!this.schema.isSubmittable || this.submitted || this.cancelled) {
|
||||
return;
|
||||
}
|
||||
|
||||
await this.trigger('beforeSubmit');
|
||||
await this.setAndSync('submitted', true);
|
||||
await this.trigger('afterSubmit');
|
||||
|
||||
this.fyo.telemetry.log(Verb.Submitted, this.schemaName);
|
||||
this.fyo.doc.observer.trigger(`submit:${this.schemaName}`, this.name);
|
||||
}
|
||||
|
||||
async cancel() {
|
||||
if (!this.schema.isSubmittable || !this.submitted || this.cancelled) {
|
||||
return;
|
||||
}
|
||||
|
||||
await this.trigger('beforeCancel');
|
||||
await this.setAndSync('cancelled', true);
|
||||
await this.trigger('afterCancel');
|
||||
|
||||
this.fyo.telemetry.log(Verb.Cancelled, this.schemaName);
|
||||
this.fyo.doc.observer.trigger(`cancel:${this.schemaName}`, this.name);
|
||||
}
|
||||
|
||||
async rename(newName: string) {
|
||||
if (this.submitted) {
|
||||
return;
|
||||
}
|
||||
|
||||
const oldName = this.name;
|
||||
await this.trigger('beforeRename', { oldName, newName });
|
||||
await this.fyo.db.rename(this.schemaName, this.name!, newName);
|
||||
this.name = newName;
|
||||
await this.trigger('afterRename', { oldName, newName });
|
||||
this.fyo.doc.observer.trigger(`rename:${this.schemaName}`, this.name);
|
||||
}
|
||||
|
||||
async trigger(event: string, params?: unknown) {
|
||||
if (this[event]) {
|
||||
await (this[event] as Function)(params);
|
||||
}
|
||||
|
||||
await super.trigger(event, params);
|
||||
}
|
||||
|
||||
getSum(tablefield: string, childfield: string, convertToFloat = true) {
|
||||
const childDocs = (this.get(tablefield) as Doc[]) ?? [];
|
||||
const sum = childDocs
|
||||
.map((d) => {
|
||||
const value = d.get(childfield) ?? 0;
|
||||
if (!isPesa(value)) {
|
||||
try {
|
||||
return this.fyo.pesa(value as string | number);
|
||||
} catch (err) {
|
||||
(
|
||||
err as Error
|
||||
).message += ` value: '${value}' of type: ${typeof value}, fieldname: '${tablefield}', childfield: '${childfield}'`;
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
return value as Money;
|
||||
})
|
||||
.reduce((a, b) => a.add(b), this.fyo.pesa(0));
|
||||
|
||||
if (convertToFloat) {
|
||||
return sum.float;
|
||||
}
|
||||
return sum;
|
||||
}
|
||||
|
||||
async setAndSync(fieldname: string | DocValueMap, value?: DocValue | Doc[]) {
|
||||
await this.set(fieldname, value);
|
||||
return await this.sync();
|
||||
}
|
||||
|
||||
duplicate(): Doc {
|
||||
const updateMap = this.getValidDict(true);
|
||||
for (const field in updateMap) {
|
||||
const value = updateMap[field];
|
||||
if (!Array.isArray(value)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
for (const row of value) {
|
||||
delete row.name;
|
||||
}
|
||||
}
|
||||
|
||||
if (this.numberSeries) {
|
||||
delete updateMap.name;
|
||||
} else {
|
||||
updateMap.name = updateMap.name + ' CPY';
|
||||
}
|
||||
|
||||
return this.fyo.doc.getNewDoc(this.schemaName, updateMap);
|
||||
}
|
||||
|
||||
/**
|
||||
* Lifecycle Methods
|
||||
*
|
||||
* Abstractish methods that are called using `this.trigger`.
|
||||
* These are to be overridden if required when subclassing.
|
||||
*
|
||||
* Refrain from running methods that call `this.sync`
|
||||
* in the `beforeLifecycle` methods.
|
||||
*
|
||||
* This may cause the lifecycle function to execute incorrectly.
|
||||
*/
|
||||
async change(ch: ChangeArg) {}
|
||||
async validate() {}
|
||||
async beforeSync() {}
|
||||
async afterSync() {}
|
||||
async beforeSubmit() {}
|
||||
async afterSubmit() {}
|
||||
async beforeRename() {}
|
||||
async afterRename() {}
|
||||
async beforeCancel() {}
|
||||
async afterCancel() {}
|
||||
async beforeDelete() {}
|
||||
async afterDelete() {}
|
||||
|
||||
formulas: FormulaMap = {};
|
||||
validations: ValidationMap = {};
|
||||
required: RequiredMap = {};
|
||||
hidden: HiddenMap = {};
|
||||
readOnly: ReadOnlyMap = {};
|
||||
getCurrencies: CurrenciesMap = {};
|
||||
|
||||
static lists: ListsMap = {};
|
||||
static filters: FiltersMap = {};
|
||||
static createFilters: FiltersMap = {}; // Used by the *Create* dropdown option
|
||||
static defaults: DefaultMap = {};
|
||||
static emptyMessages: EmptyMessageMap = {};
|
||||
|
||||
static getListViewSettings(fyo: Fyo): ListViewSettings {
|
||||
return {};
|
||||
}
|
||||
|
||||
static getTreeSettings(fyo: Fyo): TreeViewSettings | void {}
|
||||
|
||||
static getActions(fyo: Fyo): Action[] {
|
||||
return [];
|
||||
}
|
||||
}
|
116
fyo/model/helpers.ts
Normal file
116
fyo/model/helpers.ts
Normal file
@ -0,0 +1,116 @@
|
||||
import { Fyo } from 'fyo';
|
||||
import { DocValue } from 'fyo/core/types';
|
||||
import { isPesa } from 'fyo/utils';
|
||||
import { isEqual } from 'lodash';
|
||||
import { Money } from 'pesa';
|
||||
import { Field, FieldType, FieldTypeEnum } from 'schemas/types';
|
||||
import { getIsNullOrUndef } from 'utils';
|
||||
import { Doc } from './doc';
|
||||
|
||||
export function areDocValuesEqual(
|
||||
dvOne: DocValue | Doc[],
|
||||
dvTwo: DocValue | Doc[]
|
||||
): boolean {
|
||||
if (['string', 'number'].includes(typeof dvOne) || dvOne instanceof Date) {
|
||||
return dvOne === dvTwo;
|
||||
}
|
||||
|
||||
if (isPesa(dvOne)) {
|
||||
try {
|
||||
return (dvOne as Money).eq(dvTwo as string | number);
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return isEqual(dvOne, dvTwo);
|
||||
}
|
||||
|
||||
export function getPreDefaultValues(
|
||||
fieldtype: FieldType,
|
||||
fyo: Fyo
|
||||
): DocValue | Doc[] {
|
||||
switch (fieldtype) {
|
||||
case FieldTypeEnum.Table:
|
||||
return [] as Doc[];
|
||||
case FieldTypeEnum.Currency:
|
||||
return fyo.pesa!(0.0);
|
||||
case FieldTypeEnum.Int:
|
||||
case FieldTypeEnum.Float:
|
||||
return 0;
|
||||
default:
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
export function getMissingMandatoryMessage(doc: Doc) {
|
||||
const mandatoryFields = getMandatory(doc);
|
||||
const message = mandatoryFields
|
||||
.filter((f) => {
|
||||
const value = doc.get(f.fieldname);
|
||||
const isNullOrUndef = getIsNullOrUndef(value);
|
||||
|
||||
if (f.fieldtype === FieldTypeEnum.Table) {
|
||||
return isNullOrUndef || (value as Doc[])?.length === 0;
|
||||
}
|
||||
|
||||
return isNullOrUndef || value === '';
|
||||
})
|
||||
.map((f) => f.label ?? f.fieldname)
|
||||
.join(', ');
|
||||
|
||||
if (message && doc.schema.isChild && doc.parentdoc && doc.parentFieldname) {
|
||||
const parentfield = doc.parentdoc.fieldMap[doc.parentFieldname];
|
||||
return `${parentfield.label} Row ${(doc.idx ?? 0) + 1}: ${message}`;
|
||||
}
|
||||
|
||||
return message;
|
||||
}
|
||||
|
||||
function getMandatory(doc: Doc): Field[] {
|
||||
const mandatoryFields: Field[] = [];
|
||||
for (const field of doc.schema.fields) {
|
||||
if (field.required) {
|
||||
mandatoryFields.push(field);
|
||||
}
|
||||
|
||||
const requiredFunction = doc.required[field.fieldname];
|
||||
if (requiredFunction?.()) {
|
||||
mandatoryFields.push(field);
|
||||
}
|
||||
}
|
||||
|
||||
return mandatoryFields;
|
||||
}
|
||||
|
||||
export function shouldApplyFormula(field: Field, doc: Doc, fieldname?: string) {
|
||||
if (!doc.formulas[field.fieldname]) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (field.readOnly) {
|
||||
return true;
|
||||
}
|
||||
|
||||
const { dependsOn } = doc.formulas[field.fieldname] ?? {};
|
||||
if (dependsOn === undefined) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (dependsOn.length === 0) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (fieldname && dependsOn.includes(fieldname)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
const value = doc.get(field.fieldname);
|
||||
return getIsNullOrUndef(value);
|
||||
}
|
||||
|
||||
export function setChildDocIdx(childDocs: Doc[]) {
|
||||
for (const idx in childDocs) {
|
||||
childDocs[idx].idx = +idx;
|
||||
}
|
||||
}
|
105
fyo/model/naming.ts
Normal file
105
fyo/model/naming.ts
Normal file
@ -0,0 +1,105 @@
|
||||
import { Fyo } from 'fyo';
|
||||
import NumberSeries from 'fyo/models/NumberSeries';
|
||||
import { DEFAULT_SERIES_START } from 'fyo/utils/consts';
|
||||
import { BaseError } from 'fyo/utils/errors';
|
||||
import { getRandomString } from 'utils';
|
||||
import { Doc } from './doc';
|
||||
|
||||
export function isNameAutoSet(schemaName: string, fyo: Fyo): boolean {
|
||||
const schema = fyo.schemaMap[schemaName]!;
|
||||
if (schema.naming === 'manual') {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (schema.naming === 'autoincrement') {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (schema.naming === 'random') {
|
||||
return true;
|
||||
}
|
||||
|
||||
const numberSeries = fyo.getField(schema.name, 'numberSeries');
|
||||
if (numberSeries) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
export async function setName(doc: Doc, fyo: Fyo) {
|
||||
if (doc.schema.naming === 'manual') {
|
||||
return;
|
||||
}
|
||||
|
||||
if (doc.schema.naming === 'autoincrement') {
|
||||
return (doc.name = await getNextId(doc.schemaName, fyo));
|
||||
}
|
||||
|
||||
if (doc.numberSeries !== undefined) {
|
||||
return (doc.name = await getSeriesNext(
|
||||
doc.numberSeries as string,
|
||||
doc.schemaName,
|
||||
fyo
|
||||
));
|
||||
}
|
||||
|
||||
// name === schemaName for Single
|
||||
if (doc.schema.isSingle) {
|
||||
return (doc.name = doc.schemaName);
|
||||
}
|
||||
|
||||
// Assign a random name by default
|
||||
if (!doc.name) {
|
||||
doc.name = getRandomString();
|
||||
}
|
||||
|
||||
return doc.name;
|
||||
}
|
||||
|
||||
export async function getNextId(schemaName: string, fyo: Fyo): Promise<string> {
|
||||
const lastInserted = await fyo.db.getLastInserted(schemaName);
|
||||
return String(lastInserted + 1).padStart(9, '0');
|
||||
}
|
||||
|
||||
export async function getSeriesNext(
|
||||
prefix: string,
|
||||
schemaName: string,
|
||||
fyo: Fyo
|
||||
) {
|
||||
let series: NumberSeries;
|
||||
|
||||
try {
|
||||
series = (await fyo.doc.getDoc('NumberSeries', prefix)) as NumberSeries;
|
||||
} catch (e) {
|
||||
const { statusCode } = e as BaseError;
|
||||
if (!statusCode || statusCode !== 404) {
|
||||
throw e;
|
||||
}
|
||||
|
||||
await createNumberSeries(prefix, schemaName, DEFAULT_SERIES_START, fyo);
|
||||
series = (await fyo.doc.getDoc('NumberSeries', prefix)) as NumberSeries;
|
||||
}
|
||||
|
||||
return await series.next(schemaName);
|
||||
}
|
||||
|
||||
export async function createNumberSeries(
|
||||
prefix: string,
|
||||
referenceType: string,
|
||||
start: number,
|
||||
fyo: Fyo
|
||||
) {
|
||||
const exists = await fyo.db.exists('NumberSeries', prefix);
|
||||
if (exists) {
|
||||
return;
|
||||
}
|
||||
|
||||
const series = fyo.doc.getNewDoc('NumberSeries', {
|
||||
name: prefix,
|
||||
start,
|
||||
referenceType,
|
||||
});
|
||||
|
||||
await series.sync();
|
||||
}
|
90
fyo/model/types.ts
Normal file
90
fyo/model/types.ts
Normal file
@ -0,0 +1,90 @@
|
||||
import { DocValue, DocValueMap } from 'fyo/core/types';
|
||||
import SystemSettings from 'fyo/models/SystemSettings';
|
||||
import { FieldType, SelectOption } from 'schemas/types';
|
||||
import { QueryFilter } from 'utils/db/types';
|
||||
import { Router } from 'vue-router';
|
||||
import { Doc } from './doc';
|
||||
|
||||
/**
|
||||
* The functions below are used for dynamic evaluation
|
||||
* and setting of field types.
|
||||
*
|
||||
* Since they are set directly on the doc, they can
|
||||
* access the doc by using `this`.
|
||||
*
|
||||
* - `Formula`: Async function used for obtaining a computed value such as amount (rate * qty).
|
||||
* - `Default`: Regular function used to dynamically set the default value, example new Date().
|
||||
* - `Validation`: Async function that throw an error if the value is invalid.
|
||||
* - `Required`: Regular function used to decide if a value is mandatory (there are !notnul in the db).
|
||||
*/
|
||||
export type FormulaReturn = DocValue | DocValueMap[] | undefined | Doc[];
|
||||
export type Formula = () => Promise<FormulaReturn> | FormulaReturn;
|
||||
export type FormulaConfig = { dependsOn?: string[]; formula: Formula };
|
||||
export type Default = () => DocValue;
|
||||
export type Validation = (value: DocValue) => Promise<void> | void;
|
||||
export type Required = () => boolean;
|
||||
export type Hidden = () => boolean;
|
||||
export type ReadOnly = () => boolean;
|
||||
export type GetCurrency = () => string;
|
||||
|
||||
export type FormulaMap = Record<string, FormulaConfig | undefined>;
|
||||
export type DefaultMap = Record<string, Default | undefined>;
|
||||
export type ValidationMap = Record<string, Validation | undefined>;
|
||||
export type RequiredMap = Record<string, Required | undefined>;
|
||||
export type CurrenciesMap = Record<string, GetCurrency | undefined>;
|
||||
export type HiddenMap = Record<string, Hidden | undefined>;
|
||||
export type ReadOnlyMap = Record<string, ReadOnly | undefined>;
|
||||
|
||||
export type ChangeArg = { doc: Doc; changed: string };
|
||||
|
||||
/**
|
||||
* Should add this for hidden too
|
||||
*/
|
||||
|
||||
export type ModelMap = Record<string, typeof Doc | undefined>;
|
||||
export type DocMap = Record<string, Doc | undefined>;
|
||||
|
||||
export interface SinglesMap {
|
||||
SystemSettings?: SystemSettings;
|
||||
[key: string]: Doc | undefined;
|
||||
}
|
||||
|
||||
// Static Config properties
|
||||
|
||||
export type FilterFunction = (doc: Doc) => QueryFilter | Promise<QueryFilter>;
|
||||
export type FiltersMap = Record<string, FilterFunction>;
|
||||
|
||||
export type EmptyMessageFunction = (doc: Doc) => string;
|
||||
export type EmptyMessageMap = Record<string, EmptyMessageFunction>;
|
||||
|
||||
export type ListFunction = (doc?: Doc) => string[] | SelectOption[];
|
||||
export type ListsMap = Record<string, ListFunction | undefined>;
|
||||
|
||||
export interface Action {
|
||||
label: string;
|
||||
action: (doc: Doc, router: Router) => Promise<void> | void;
|
||||
condition?: (doc: Doc) => boolean;
|
||||
component?: {
|
||||
template?: string;
|
||||
};
|
||||
}
|
||||
|
||||
export interface ColumnConfig {
|
||||
label: string;
|
||||
fieldtype: FieldType;
|
||||
fieldname?: string;
|
||||
size?: string;
|
||||
render?: (doc: Doc) => { template: string };
|
||||
getValue?: (doc: Doc) => string;
|
||||
}
|
||||
|
||||
export type ListViewColumn = string | ColumnConfig;
|
||||
export interface ListViewSettings {
|
||||
formRoute?: (name: string) => string;
|
||||
columns?: ListViewColumn[];
|
||||
}
|
||||
|
||||
export interface TreeViewSettings {
|
||||
parentField: string;
|
||||
getRootLabel: () => Promise<string>;
|
||||
}
|
56
fyo/model/validationFunction.ts
Normal file
56
fyo/model/validationFunction.ts
Normal file
@ -0,0 +1,56 @@
|
||||
import { DocValue } from 'fyo/core/types';
|
||||
import { getOptionList } from 'fyo/utils';
|
||||
import { ValidationError, ValueError } from 'fyo/utils/errors';
|
||||
import { t } from 'fyo/utils/translation';
|
||||
import { Field, OptionField } from 'schemas/types';
|
||||
import { getIsNullOrUndef } from 'utils';
|
||||
import { Doc } from './doc';
|
||||
|
||||
export function validateEmail(value: DocValue) {
|
||||
const isValid = /(.+)@(.+){2,}\.(.+){2,}/.test(value as string);
|
||||
if (!isValid) {
|
||||
throw new ValidationError(`Invalid email: ${value}`);
|
||||
}
|
||||
}
|
||||
|
||||
export function validatePhoneNumber(value: DocValue) {
|
||||
const isValid = /[+]{0,1}[\d ]+/.test(value as string);
|
||||
if (!isValid) {
|
||||
throw new ValidationError(`Invalid phone: ${value}`);
|
||||
}
|
||||
}
|
||||
|
||||
export function validateOptions(field: OptionField, value: string, doc: Doc) {
|
||||
const options = getOptionList(field, doc);
|
||||
if (!options.length) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!field.required && !value) {
|
||||
return;
|
||||
}
|
||||
|
||||
const validValues = options.map((o) => o.value);
|
||||
|
||||
if (validValues.includes(value) || field.allowCustom) {
|
||||
return;
|
||||
}
|
||||
|
||||
const labels = options.map((o) => o.label).join(', ');
|
||||
throw new ValueError(t`Invalid value ${value} for ${field.label}`);
|
||||
}
|
||||
|
||||
export function validateRequired(field: Field, value: DocValue, doc: Doc) {
|
||||
if (!getIsNullOrUndef(value)) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (field.required) {
|
||||
throw new ValidationError(`${field.label} is required`);
|
||||
}
|
||||
|
||||
const requiredFunction = doc.required[field.fieldname];
|
||||
if (requiredFunction && requiredFunction()) {
|
||||
throw new ValidationError(`${field.label} is required`);
|
||||
}
|
||||
}
|
42
fyo/models/NumberSeries.ts
Normal file
42
fyo/models/NumberSeries.ts
Normal file
@ -0,0 +1,42 @@
|
||||
import { Doc } from 'fyo/model/doc';
|
||||
|
||||
function getPaddedName(prefix: string, next: number, padZeros: number): string {
|
||||
return prefix + next.toString().padStart(padZeros ?? 4, '0');
|
||||
}
|
||||
|
||||
export default class NumberSeries extends Doc {
|
||||
setCurrent() {
|
||||
let current = this.get('current') as number | null;
|
||||
if (!current) {
|
||||
current = this.get('start') as number;
|
||||
}
|
||||
|
||||
this.current = current;
|
||||
}
|
||||
|
||||
async next(schemaName: string) {
|
||||
this.setCurrent();
|
||||
|
||||
const exists = await this.checkIfCurrentExists(schemaName);
|
||||
if (!exists) {
|
||||
return this.getPaddedName(this.current as number);
|
||||
}
|
||||
|
||||
this.current = (this.current as number) + 1;
|
||||
await this.sync();
|
||||
return this.getPaddedName(this.current as number);
|
||||
}
|
||||
|
||||
async checkIfCurrentExists(schemaName: string) {
|
||||
if (!schemaName) {
|
||||
return true;
|
||||
}
|
||||
|
||||
const name = this.getPaddedName(this.current as number);
|
||||
return await this.fyo.db.exists(schemaName, name);
|
||||
}
|
||||
|
||||
getPaddedName(next: number): string {
|
||||
return getPaddedName(this.name as string, next, this.padZeros as number);
|
||||
}
|
||||
}
|
43
fyo/models/SystemSettings.ts
Normal file
43
fyo/models/SystemSettings.ts
Normal file
@ -0,0 +1,43 @@
|
||||
import { DocValue } from 'fyo/core/types';
|
||||
import { Doc } from 'fyo/model/doc';
|
||||
import { ListsMap, ValidationMap } from 'fyo/model/types';
|
||||
import { ValidationError } from 'fyo/utils/errors';
|
||||
import { t } from 'fyo/utils/translation';
|
||||
import { SelectOption } from 'schemas/types';
|
||||
import { getCountryInfo } from 'utils/misc';
|
||||
|
||||
export default class SystemSettings extends Doc {
|
||||
validations: ValidationMap = {
|
||||
async displayPrecision(value: DocValue) {
|
||||
if ((value as number) >= 0 && (value as number) <= 9) {
|
||||
return;
|
||||
}
|
||||
|
||||
throw new ValidationError(
|
||||
t`Display Precision should have a value between 0 and 9.`
|
||||
);
|
||||
},
|
||||
};
|
||||
|
||||
static lists: ListsMap = {
|
||||
locale() {
|
||||
const countryInfo = getCountryInfo();
|
||||
return Object.keys(countryInfo)
|
||||
.filter((c) => !!countryInfo[c]?.locale)
|
||||
.map(
|
||||
(c) =>
|
||||
({
|
||||
value: countryInfo[c]?.locale,
|
||||
label: `${c} (${countryInfo[c]?.locale})`,
|
||||
} as SelectOption)
|
||||
);
|
||||
},
|
||||
currency() {
|
||||
const countryInfo = getCountryInfo();
|
||||
const currencies = Object.values(countryInfo)
|
||||
.map((ci) => ci?.currency as string)
|
||||
.filter(Boolean);
|
||||
return [...new Set(currencies)];
|
||||
},
|
||||
};
|
||||
}
|
8
fyo/models/index.ts
Normal file
8
fyo/models/index.ts
Normal file
@ -0,0 +1,8 @@
|
||||
import { ModelMap } from 'fyo/model/types';
|
||||
import NumberSeries from './NumberSeries';
|
||||
import SystemSettings from './SystemSettings';
|
||||
|
||||
export const coreModels = {
|
||||
NumberSeries,
|
||||
SystemSettings,
|
||||
} as ModelMap;
|
101
fyo/telemetry/helpers.ts
Normal file
101
fyo/telemetry/helpers.ts
Normal file
@ -0,0 +1,101 @@
|
||||
import { Fyo } from 'fyo';
|
||||
import { ConfigFile, ConfigKeys } from 'fyo/core/types';
|
||||
import { DEFAULT_COUNTRY_CODE } from 'fyo/utils/consts';
|
||||
import { ModelNameEnum } from 'models/types';
|
||||
import { getRandomString } from 'utils';
|
||||
import { UniqueId } from './types';
|
||||
|
||||
export function getCountry(fyo: Fyo): string {
|
||||
return (
|
||||
(fyo.singles.SystemSettings?.countryCode as string) ?? DEFAULT_COUNTRY_CODE
|
||||
);
|
||||
}
|
||||
|
||||
export function getLanguage(fyo: Fyo): string {
|
||||
return fyo.config.get('language') as string;
|
||||
}
|
||||
|
||||
export function getDeviceId(fyo: Fyo): UniqueId {
|
||||
let deviceId = fyo.config.get(ConfigKeys.DeviceId) as string | undefined;
|
||||
if (deviceId === undefined) {
|
||||
deviceId = getRandomString();
|
||||
fyo.config.set(ConfigKeys.DeviceId, deviceId);
|
||||
}
|
||||
|
||||
return deviceId;
|
||||
}
|
||||
|
||||
export async function getInstanceId(fyo: Fyo): Promise<UniqueId> {
|
||||
const instanceId = (await fyo.getValue(
|
||||
ModelNameEnum.SystemSettings,
|
||||
'instanceId'
|
||||
)) as string;
|
||||
const companyName = (await fyo.getValue(
|
||||
ModelNameEnum.AccountingSettings,
|
||||
'companyName'
|
||||
)) as string;
|
||||
const dbPath = fyo.db.dbPath!;
|
||||
const files = (fyo.config.get(ConfigKeys.Files) ?? []) as ConfigFile[];
|
||||
|
||||
let file = files.find((f) => f.id === instanceId);
|
||||
|
||||
if (file === undefined) {
|
||||
file = addNewConfigFile(companyName, dbPath, instanceId, files, fyo);
|
||||
}
|
||||
|
||||
if (!file.id) {
|
||||
setIdOnConfigFile(instanceId, companyName, dbPath, files, fyo);
|
||||
}
|
||||
|
||||
return instanceId;
|
||||
}
|
||||
|
||||
export function addNewConfigFile(
|
||||
companyName: string,
|
||||
dbPath: string,
|
||||
instanceId: string,
|
||||
files: ConfigFile[],
|
||||
fyo: Fyo
|
||||
): ConfigFile {
|
||||
const newFile: ConfigFile = {
|
||||
companyName,
|
||||
dbPath,
|
||||
id: instanceId,
|
||||
openCount: 0,
|
||||
};
|
||||
|
||||
files.push(newFile);
|
||||
fyo.config.set(ConfigKeys.Files, files);
|
||||
return newFile;
|
||||
}
|
||||
|
||||
export async function getVersion(fyo: Fyo) {
|
||||
const version = (await fyo.getValue(
|
||||
ModelNameEnum.SystemSettings,
|
||||
'version'
|
||||
)) as string | undefined;
|
||||
|
||||
if (version) {
|
||||
return version;
|
||||
}
|
||||
|
||||
return fyo.store.appVersion;
|
||||
}
|
||||
|
||||
function setIdOnConfigFile(
|
||||
instanceId: string,
|
||||
companyName: string,
|
||||
dbPath: string,
|
||||
files: ConfigFile[],
|
||||
fyo: Fyo
|
||||
) {
|
||||
for (const file of files) {
|
||||
if (file.companyName !== companyName || file.dbPath !== dbPath) {
|
||||
continue;
|
||||
}
|
||||
|
||||
file.id = instanceId;
|
||||
}
|
||||
|
||||
fyo.config.set(ConfigKeys.Files, files);
|
||||
}
|
153
fyo/telemetry/telemetry.ts
Normal file
153
fyo/telemetry/telemetry.ts
Normal file
@ -0,0 +1,153 @@
|
||||
import { Fyo } from 'fyo';
|
||||
import { cloneDeep } from 'lodash';
|
||||
import { DateTime } from 'luxon';
|
||||
import {
|
||||
getCountry,
|
||||
getDeviceId,
|
||||
getInstanceId,
|
||||
getLanguage,
|
||||
getVersion,
|
||||
} from './helpers';
|
||||
import { Noun, Platform, Telemetry, Verb } from './types';
|
||||
|
||||
/**
|
||||
* # Telemetry
|
||||
* Used to check if people are using Books or not. All logging
|
||||
* happens using navigator.sendBeacon
|
||||
*
|
||||
* ## `start`
|
||||
* Used to initialize state. It should be called before any logging and after an
|
||||
* instance has loaded.
|
||||
* It is called on three events:
|
||||
* 1. When Desk is opened, i.e. when the usage starts, this also sends a started
|
||||
* log.
|
||||
* 2. On visibility change if not started, eg: when user minimizeds Books and
|
||||
* then comes back later.
|
||||
* 3. When `log` is called, but telemetry wasn't initialized.
|
||||
*
|
||||
* ## `log`
|
||||
* Used to log activity.
|
||||
*
|
||||
* ## `stop`
|
||||
* This is to be called when a session is being stopped. It's called on two events
|
||||
* 1. When the db is being changed.
|
||||
* 2. When the visiblity has changed which happens when either the app is being shut or
|
||||
* the app is hidden.
|
||||
*/
|
||||
|
||||
export class TelemetryManager {
|
||||
#url: string = '';
|
||||
#token: string = '';
|
||||
#started = false;
|
||||
#telemetryObject: Partial<Telemetry> = {};
|
||||
fyo: Fyo;
|
||||
|
||||
constructor(fyo: Fyo) {
|
||||
this.fyo = fyo;
|
||||
}
|
||||
|
||||
set platform(value: Platform) {
|
||||
this.#telemetryObject.platform ||= value;
|
||||
}
|
||||
|
||||
get hasCreds() {
|
||||
return !!this.#url && !!this.#token;
|
||||
}
|
||||
|
||||
get started() {
|
||||
return this.#started;
|
||||
}
|
||||
|
||||
get telemetryObject(): Readonly<Partial<Telemetry>> {
|
||||
return cloneDeep(this.#telemetryObject);
|
||||
}
|
||||
|
||||
async start(openCount?: number) {
|
||||
this.#telemetryObject.country ||= getCountry(this.fyo);
|
||||
this.#telemetryObject.language ??= getLanguage(this.fyo);
|
||||
this.#telemetryObject.deviceId ||= getDeviceId(this.fyo);
|
||||
this.#telemetryObject.instanceId ||= await getInstanceId(this.fyo);
|
||||
this.#telemetryObject.version ||= await getVersion(this.fyo);
|
||||
|
||||
this.#started = true;
|
||||
await this.#setCreds();
|
||||
|
||||
if (typeof openCount === 'number') {
|
||||
this.#telemetryObject.openCount = openCount;
|
||||
this.log(Verb.Started, 'telemetry');
|
||||
} else {
|
||||
this.log(Verb.Resumed, 'telemetry');
|
||||
}
|
||||
}
|
||||
|
||||
stop() {
|
||||
if (!this.started) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.log(Verb.Stopped, 'telemetry');
|
||||
this.#started = false;
|
||||
this.#clear();
|
||||
}
|
||||
|
||||
log(verb: Verb, noun: Noun, more?: Record<string, unknown>) {
|
||||
if (!this.#started && this.fyo.db.isConnected) {
|
||||
this.start().then(() => this.#sendBeacon(verb, noun, more));
|
||||
return;
|
||||
}
|
||||
|
||||
this.#sendBeacon(verb, noun, more);
|
||||
}
|
||||
|
||||
#sendBeacon(verb: Verb, noun: Noun, more?: Record<string, unknown>) {
|
||||
if (!this.hasCreds) {
|
||||
return;
|
||||
}
|
||||
|
||||
const telemetryData: Telemetry = this.#getTelemtryData(verb, noun, more);
|
||||
const data = JSON.stringify({
|
||||
token: this.#token,
|
||||
telemetryData,
|
||||
});
|
||||
|
||||
navigator.sendBeacon(this.#url, data);
|
||||
}
|
||||
|
||||
async #setCreds() {
|
||||
if (this.hasCreds) {
|
||||
return;
|
||||
}
|
||||
|
||||
const { url, token } = await this.fyo.auth.getTelemetryCreds();
|
||||
this.#url = url;
|
||||
this.#token = token;
|
||||
}
|
||||
|
||||
#getTelemtryData(
|
||||
verb: Verb,
|
||||
noun: Noun,
|
||||
more?: Record<string, unknown>
|
||||
): Telemetry {
|
||||
return {
|
||||
country: this.#telemetryObject.country!,
|
||||
language: this.#telemetryObject.language!,
|
||||
deviceId: this.#telemetryObject.deviceId!,
|
||||
instanceId: this.#telemetryObject.instanceId!,
|
||||
version: this.#telemetryObject.version!,
|
||||
openCount: this.#telemetryObject.openCount!,
|
||||
timestamp: DateTime.now().toMillis().toString(),
|
||||
verb,
|
||||
noun,
|
||||
more,
|
||||
};
|
||||
}
|
||||
|
||||
#clear() {
|
||||
delete this.#telemetryObject.country;
|
||||
delete this.#telemetryObject.language;
|
||||
delete this.#telemetryObject.deviceId;
|
||||
delete this.#telemetryObject.instanceId;
|
||||
delete this.#telemetryObject.version;
|
||||
delete this.#telemetryObject.openCount;
|
||||
}
|
||||
}
|
33
fyo/telemetry/types.ts
Normal file
33
fyo/telemetry/types.ts
Normal file
@ -0,0 +1,33 @@
|
||||
export type AppVersion = string;
|
||||
export type UniqueId = string;
|
||||
export type Timestamp = string;
|
||||
|
||||
export type Platform = 'Windows' | 'Mac' | 'Linux';
|
||||
|
||||
export enum Verb {
|
||||
Created = 'created',
|
||||
Deleted = 'deleted',
|
||||
Submitted = 'submitted',
|
||||
Cancelled = 'cancelled',
|
||||
Imported = 'imported',
|
||||
Exported = 'exported',
|
||||
Stopped = 'stopped',
|
||||
Started = 'started',
|
||||
Resumed = 'resumed',
|
||||
}
|
||||
|
||||
export type Noun = string;
|
||||
|
||||
export interface Telemetry {
|
||||
deviceId: UniqueId;
|
||||
instanceId: UniqueId;
|
||||
platform?: Platform;
|
||||
country: string;
|
||||
language: string;
|
||||
version: AppVersion;
|
||||
timestamp: Timestamp;
|
||||
openCount: number;
|
||||
verb: Verb;
|
||||
noun: Noun;
|
||||
more?: Record<string, unknown>
|
||||
}
|
7
fyo/tests/helpers.ts
Normal file
7
fyo/tests/helpers.ts
Normal file
@ -0,0 +1,7 @@
|
||||
import { AuthDemuxBase, TelemetryCreds } from 'utils/auth/types';
|
||||
|
||||
export class DummyAuthDemux extends AuthDemuxBase {
|
||||
async getTelemetryCreds(): Promise<TelemetryCreds> {
|
||||
return { url: '', token: '' };
|
||||
}
|
||||
}
|
71
fyo/tests/testFyo.spec.ts
Normal file
71
fyo/tests/testFyo.spec.ts
Normal file
@ -0,0 +1,71 @@
|
||||
import * as assert from 'assert';
|
||||
import 'mocha';
|
||||
import { getRegionalModels, models } from 'models';
|
||||
import { getSchemas } from 'schemas';
|
||||
import { Fyo } from '..';
|
||||
import { DatabaseManager } from '../../backend/database/manager';
|
||||
import { DummyAuthDemux } from './helpers';
|
||||
|
||||
describe('Fyo Init', function () {
|
||||
const fyo = new Fyo({
|
||||
DatabaseDemux: DatabaseManager,
|
||||
AuthDemux: DummyAuthDemux,
|
||||
isTest: true,
|
||||
isElectron: false,
|
||||
});
|
||||
|
||||
specify('Init', async function () {
|
||||
assert.strictEqual(
|
||||
Object.keys(fyo.schemaMap).length,
|
||||
0,
|
||||
'zero schemas one'
|
||||
);
|
||||
|
||||
assert.strictEqual(
|
||||
Object.keys(fyo.schemaMap).length,
|
||||
0,
|
||||
'zero schemas two'
|
||||
);
|
||||
|
||||
await fyo.db.createNewDatabase(':memory:', 'in');
|
||||
await fyo.initializeAndRegister({}, {});
|
||||
assert.strictEqual(
|
||||
Object.keys(fyo.schemaMap).length > 0,
|
||||
true,
|
||||
'non zero schemas'
|
||||
);
|
||||
await fyo.db.close();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Fyo Docs', function () {
|
||||
const countryCode = 'in';
|
||||
let fyo: Fyo;
|
||||
const schemaMap = getSchemas(countryCode);
|
||||
this.beforeEach(async function () {
|
||||
fyo = new Fyo({
|
||||
DatabaseDemux: DatabaseManager,
|
||||
isTest: true,
|
||||
isElectron: false,
|
||||
});
|
||||
|
||||
const regionalModels = await getRegionalModels(countryCode);
|
||||
await fyo.db.createNewDatabase(':memory:', countryCode);
|
||||
await fyo.initializeAndRegister(models, regionalModels);
|
||||
});
|
||||
|
||||
this.afterEach(async function () {
|
||||
await fyo.close();
|
||||
});
|
||||
|
||||
specify('getNewDoc', async function () {
|
||||
for (const schemaName in schemaMap) {
|
||||
const schema = schemaMap[schemaName];
|
||||
if (schema?.isSingle) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const doc = fyo.doc.getNewDoc(schemaName);
|
||||
}
|
||||
});
|
||||
});
|
131
fyo/tests/testObservable.spec.ts
Normal file
131
fyo/tests/testObservable.spec.ts
Normal file
@ -0,0 +1,131 @@
|
||||
import * as assert from 'assert';
|
||||
import Observable from 'fyo/utils/observable';
|
||||
import 'mocha';
|
||||
|
||||
enum ObsEvent {
|
||||
A = 'event-a',
|
||||
B = 'event-b',
|
||||
}
|
||||
|
||||
describe('Observable', function () {
|
||||
const obs = new Observable();
|
||||
let counter = 0;
|
||||
const params = { aOne: 18, aTwo: 21, b: 42 };
|
||||
|
||||
const listenerAOnce = (value: number) => {
|
||||
assert.strictEqual(params.aOne, value, 'listenerAOnce');
|
||||
};
|
||||
|
||||
const listenerAEvery = (value: number) => {
|
||||
if (counter === 0) {
|
||||
assert.strictEqual(params.aOne, value, 'listenerAEvery 0');
|
||||
} else if (counter === 1) {
|
||||
assert.strictEqual(params.aTwo, value, 'listenerAEvery 1');
|
||||
} else {
|
||||
throw new Error("this shouldn't run");
|
||||
}
|
||||
counter += 1;
|
||||
};
|
||||
|
||||
const listenerBOnce = (value: number) => {
|
||||
assert.strictEqual(params.b, value, 'listenerBOnce');
|
||||
};
|
||||
|
||||
specify('set A One', function () {
|
||||
assert.strictEqual(obs.hasListener(ObsEvent.A), false, 'pre');
|
||||
|
||||
obs.once(ObsEvent.A, listenerAOnce);
|
||||
assert.strictEqual(obs.hasListener(ObsEvent.A), true, 'non specific');
|
||||
assert.strictEqual(
|
||||
obs.hasListener(ObsEvent.A, listenerAOnce),
|
||||
true,
|
||||
'specific once'
|
||||
);
|
||||
assert.strictEqual(
|
||||
obs.hasListener(ObsEvent.A, listenerAEvery),
|
||||
false,
|
||||
'specific every'
|
||||
);
|
||||
});
|
||||
|
||||
specify('set A Two', function () {
|
||||
obs.on(ObsEvent.A, listenerAEvery);
|
||||
assert.strictEqual(obs.hasListener(ObsEvent.A), true, 'non specific');
|
||||
assert.strictEqual(
|
||||
obs.hasListener(ObsEvent.A, listenerAOnce),
|
||||
true,
|
||||
'specific once'
|
||||
);
|
||||
assert.strictEqual(
|
||||
obs.hasListener(ObsEvent.A, listenerAEvery),
|
||||
true,
|
||||
'specific every'
|
||||
);
|
||||
});
|
||||
|
||||
specify('set B', function () {
|
||||
assert.strictEqual(obs.hasListener(ObsEvent.B), false, 'pre');
|
||||
|
||||
obs.once(ObsEvent.B, listenerBOnce);
|
||||
assert.strictEqual(
|
||||
obs.hasListener(ObsEvent.A, listenerBOnce),
|
||||
false,
|
||||
'specific false'
|
||||
);
|
||||
assert.strictEqual(
|
||||
obs.hasListener(ObsEvent.B, listenerBOnce),
|
||||
true,
|
||||
'specific true'
|
||||
);
|
||||
});
|
||||
|
||||
specify('trigger A 0', async function () {
|
||||
await obs.trigger(ObsEvent.A, params.aOne);
|
||||
assert.strictEqual(obs.hasListener(ObsEvent.A), true, 'non specific');
|
||||
assert.strictEqual(
|
||||
obs.hasListener(ObsEvent.A, listenerAOnce),
|
||||
false,
|
||||
'specific'
|
||||
);
|
||||
});
|
||||
|
||||
specify('trigger A 1', async function () {
|
||||
assert.strictEqual(
|
||||
obs.hasListener(ObsEvent.A, listenerAEvery),
|
||||
true,
|
||||
'specific pre'
|
||||
);
|
||||
await obs.trigger(ObsEvent.A, params.aTwo);
|
||||
assert.strictEqual(
|
||||
obs.hasListener(ObsEvent.A, listenerAEvery),
|
||||
true,
|
||||
'specific post'
|
||||
);
|
||||
});
|
||||
|
||||
specify('trigger B', async function () {
|
||||
assert.strictEqual(
|
||||
obs.hasListener(ObsEvent.B, listenerBOnce),
|
||||
true,
|
||||
'specific pre'
|
||||
);
|
||||
await obs.trigger(ObsEvent.B, params.b);
|
||||
assert.strictEqual(
|
||||
obs.hasListener(ObsEvent.B, listenerBOnce),
|
||||
false,
|
||||
'specific post'
|
||||
);
|
||||
});
|
||||
|
||||
specify('remove A', async function () {
|
||||
obs.off(ObsEvent.A, listenerAEvery);
|
||||
assert.strictEqual(
|
||||
obs.hasListener(ObsEvent.A, listenerAEvery),
|
||||
false,
|
||||
'specific pre'
|
||||
);
|
||||
|
||||
assert.strictEqual(counter, 2, 'incorrect counter');
|
||||
await obs.trigger(ObsEvent.A, 777);
|
||||
});
|
||||
});
|
@ -38,7 +38,7 @@ export default class CacheManager {
|
||||
this._hashCache.get(hashName)!.set(key, value);
|
||||
}
|
||||
|
||||
hclear(hashName: string, key: string) {
|
||||
hclear(hashName: string, key?: string) {
|
||||
if (key) {
|
||||
this._hashCache.get(hashName)?.delete(key);
|
||||
} else {
|
9
fyo/utils/consts.ts
Normal file
9
fyo/utils/consts.ts
Normal file
@ -0,0 +1,9 @@
|
||||
export const DEFAULT_INTERNAL_PRECISION = 11;
|
||||
export const DEFAULT_DISPLAY_PRECISION = 2;
|
||||
export const DEFAULT_DATE_FORMAT = 'MMM d, y';
|
||||
export const DEFAULT_LOCALE = 'en-IN';
|
||||
export const DEFAULT_COUNTRY_CODE = 'in';
|
||||
export const DEFAULT_CURRENCY = 'INR';
|
||||
export const DEFAULT_LANGUAGE = 'English';
|
||||
export const DEFAULT_SERIES_START = 1001;
|
||||
export const DEFAULT_USER = 'Admin';
|
78
fyo/utils/errors.ts
Normal file
78
fyo/utils/errors.ts
Normal file
@ -0,0 +1,78 @@
|
||||
export class BaseError extends Error {
|
||||
message: string;
|
||||
statusCode: number;
|
||||
|
||||
constructor(statusCode: number, message: string) {
|
||||
super(message);
|
||||
this.name = 'BaseError';
|
||||
this.statusCode = statusCode;
|
||||
this.message = message;
|
||||
}
|
||||
}
|
||||
|
||||
export class ValidationError extends BaseError {
|
||||
constructor(message: string) {
|
||||
super(417, message);
|
||||
this.name = 'ValidationError';
|
||||
}
|
||||
}
|
||||
|
||||
export class NotFoundError extends BaseError {
|
||||
constructor(message: string) {
|
||||
super(404, message);
|
||||
this.name = 'NotFoundError';
|
||||
}
|
||||
}
|
||||
|
||||
export class ForbiddenError extends BaseError {
|
||||
constructor(message: string) {
|
||||
super(403, message);
|
||||
this.name = 'ForbiddenError';
|
||||
}
|
||||
}
|
||||
|
||||
export class DuplicateEntryError extends ValidationError {
|
||||
constructor(message: string) {
|
||||
super(message);
|
||||
this.name = 'DuplicateEntryError';
|
||||
}
|
||||
}
|
||||
|
||||
export class LinkValidationError extends ValidationError {
|
||||
constructor(message: string) {
|
||||
super(message);
|
||||
this.name = 'LinkValidationError';
|
||||
}
|
||||
}
|
||||
|
||||
export class MandatoryError extends ValidationError {
|
||||
constructor(message: string) {
|
||||
super(message);
|
||||
this.name = 'MandatoryError';
|
||||
}
|
||||
}
|
||||
|
||||
export class DatabaseError extends BaseError {
|
||||
constructor(message: string) {
|
||||
super(500, message);
|
||||
this.name = 'DatabaseError';
|
||||
}
|
||||
}
|
||||
|
||||
export class CannotCommitError extends DatabaseError {
|
||||
constructor(message: string) {
|
||||
super(message);
|
||||
this.name = 'CannotCommitError';
|
||||
}
|
||||
}
|
||||
|
||||
export class NotImplemented extends BaseError {
|
||||
constructor() {
|
||||
super(501, '');
|
||||
this.name = 'NotImplemented';
|
||||
}
|
||||
}
|
||||
|
||||
export class ValueError extends ValidationError {}
|
||||
export class ConflictError extends ValidationError {}
|
||||
export class InvalidFieldError extends ValidationError {}
|
155
fyo/utils/format.ts
Normal file
155
fyo/utils/format.ts
Normal file
@ -0,0 +1,155 @@
|
||||
import { Fyo } from 'fyo';
|
||||
import { DocValue } from 'fyo/core/types';
|
||||
import { Doc } from 'fyo/model/doc';
|
||||
import { DateTime } from 'luxon';
|
||||
import { Money } from 'pesa';
|
||||
import { Field, FieldType, FieldTypeEnum } from 'schemas/types';
|
||||
import { getIsNullOrUndef } from 'utils';
|
||||
import {
|
||||
DEFAULT_CURRENCY,
|
||||
DEFAULT_DATE_FORMAT,
|
||||
DEFAULT_DISPLAY_PRECISION,
|
||||
DEFAULT_LOCALE,
|
||||
} from './consts';
|
||||
|
||||
export function format(
|
||||
value: DocValue,
|
||||
df: string | Field | null,
|
||||
doc: Doc | null,
|
||||
fyo: Fyo
|
||||
): string {
|
||||
if (!df) {
|
||||
return String(value);
|
||||
}
|
||||
|
||||
const field: Field = getField(df);
|
||||
|
||||
if (field.fieldtype === FieldTypeEnum.Currency) {
|
||||
return formatCurrency(value, field, doc, fyo);
|
||||
}
|
||||
|
||||
if (field.fieldtype === FieldTypeEnum.Date) {
|
||||
return formatDate(value, fyo);
|
||||
}
|
||||
|
||||
if (field.fieldtype === FieldTypeEnum.Check) {
|
||||
return Boolean(value).toString();
|
||||
}
|
||||
|
||||
if (getIsNullOrUndef(value)) {
|
||||
return '';
|
||||
}
|
||||
|
||||
return String(value);
|
||||
}
|
||||
|
||||
function formatDate(value: DocValue, fyo: Fyo): string {
|
||||
const dateFormat =
|
||||
(fyo.singles.SystemSettings?.dateFormat as string) ?? DEFAULT_DATE_FORMAT;
|
||||
|
||||
let dateValue: DateTime;
|
||||
if (typeof value === 'string') {
|
||||
dateValue = DateTime.fromISO(value);
|
||||
} else if (value instanceof Date) {
|
||||
dateValue = DateTime.fromJSDate(value);
|
||||
} else {
|
||||
dateValue = DateTime.fromSeconds(value as number);
|
||||
}
|
||||
|
||||
const formattedDate = dateValue.toFormat(dateFormat);
|
||||
if (value === 'Invalid DateTime') {
|
||||
return '';
|
||||
}
|
||||
|
||||
return formattedDate;
|
||||
}
|
||||
|
||||
function formatCurrency(
|
||||
value: DocValue,
|
||||
field: Field,
|
||||
doc: Doc | null,
|
||||
fyo: Fyo
|
||||
): string {
|
||||
const currency = getCurrency(field, doc, fyo);
|
||||
|
||||
let valueString;
|
||||
try {
|
||||
valueString = formatNumber(value, fyo);
|
||||
} catch (err) {
|
||||
(err as Error).message += ` value: '${value}', type: ${typeof value}`;
|
||||
throw err;
|
||||
}
|
||||
|
||||
const currencySymbol = fyo.currencySymbols[currency];
|
||||
if (currencySymbol !== undefined) {
|
||||
return currencySymbol + ' ' + valueString;
|
||||
}
|
||||
|
||||
return valueString;
|
||||
}
|
||||
|
||||
function formatNumber(value: DocValue, fyo: Fyo): string {
|
||||
const numberFormatter = getNumberFormatter(fyo);
|
||||
if (typeof value === 'number') {
|
||||
value = fyo.pesa(value.toFixed(20));
|
||||
}
|
||||
|
||||
if ((value as Money).round) {
|
||||
const floatValue = parseFloat((value as Money).round());
|
||||
return numberFormatter.format(floatValue);
|
||||
}
|
||||
|
||||
const floatValue = parseFloat(value as string);
|
||||
const formattedNumber = numberFormatter.format(floatValue);
|
||||
|
||||
if (formattedNumber === 'NaN') {
|
||||
throw Error(
|
||||
`invalid value passed to formatNumber: '${value}' of type ${typeof value}`
|
||||
);
|
||||
}
|
||||
|
||||
return formattedNumber;
|
||||
}
|
||||
|
||||
function getNumberFormatter(fyo: Fyo) {
|
||||
if (fyo.currencyFormatter) {
|
||||
return fyo.currencyFormatter;
|
||||
}
|
||||
|
||||
const locale =
|
||||
(fyo.singles.SystemSettings?.locale as string) ?? DEFAULT_LOCALE;
|
||||
const display =
|
||||
(fyo.singles.SystemSettings?.displayPrecision as number) ??
|
||||
DEFAULT_DISPLAY_PRECISION;
|
||||
|
||||
return (fyo.currencyFormatter = Intl.NumberFormat(locale, {
|
||||
style: 'decimal',
|
||||
minimumFractionDigits: display,
|
||||
}));
|
||||
}
|
||||
|
||||
function getCurrency(field: Field, doc: Doc | null, fyo: Fyo): string {
|
||||
let getCurrency = doc?.getCurrencies?.[field.fieldname];
|
||||
if (getCurrency !== undefined) {
|
||||
return getCurrency();
|
||||
}
|
||||
|
||||
getCurrency = doc?.parentdoc?.getCurrencies[field.fieldname];
|
||||
if (getCurrency !== undefined) {
|
||||
return getCurrency();
|
||||
}
|
||||
|
||||
return (fyo.singles.SystemSettings?.currency as string) ?? DEFAULT_CURRENCY;
|
||||
}
|
||||
|
||||
function getField(df: string | Field): Field {
|
||||
if (typeof df === 'string') {
|
||||
return {
|
||||
label: '',
|
||||
fieldname: '',
|
||||
fieldtype: df as FieldType,
|
||||
};
|
||||
}
|
||||
|
||||
return df;
|
||||
}
|
111
fyo/utils/index.ts
Normal file
111
fyo/utils/index.ts
Normal file
@ -0,0 +1,111 @@
|
||||
import { Fyo } from 'fyo';
|
||||
import { Doc } from 'fyo/model/doc';
|
||||
import { Action } from 'fyo/model/types';
|
||||
import { Money } from 'pesa';
|
||||
import { Field, OptionField, SelectOption } from 'schemas/types';
|
||||
import { getIsNullOrUndef } from 'utils';
|
||||
|
||||
export function slug(str: string) {
|
||||
return str
|
||||
.replace(/(?:^\w|[A-Z]|\b\w)/g, function (letter, index) {
|
||||
return index == 0 ? letter.toLowerCase() : letter.toUpperCase();
|
||||
})
|
||||
.replace(/\s+/g, '');
|
||||
}
|
||||
|
||||
export function unique<T>(list: T[], key = (it: T) => String(it)) {
|
||||
const seen: Record<string, boolean> = {};
|
||||
return list.filter((item) => {
|
||||
const k = key(item);
|
||||
return seen.hasOwnProperty(k) ? false : (seen[k] = true);
|
||||
});
|
||||
}
|
||||
|
||||
export function getDuplicates(array: unknown[]) {
|
||||
const duplicates: unknown[] = [];
|
||||
for (const i in array) {
|
||||
const previous = array[parseInt(i) - 1];
|
||||
const current = array[i];
|
||||
|
||||
if (current === previous) {
|
||||
if (!duplicates.includes(current)) {
|
||||
duplicates.push(current);
|
||||
}
|
||||
}
|
||||
}
|
||||
return duplicates;
|
||||
}
|
||||
|
||||
export function isPesa(value: unknown): boolean {
|
||||
return value instanceof Money;
|
||||
}
|
||||
|
||||
export function getActions(doc: Doc): Action[] {
|
||||
const Model = doc.fyo.models[doc.schemaName];
|
||||
if (Model === undefined) {
|
||||
return [];
|
||||
}
|
||||
|
||||
return Model.getActions(doc.fyo);
|
||||
}
|
||||
|
||||
export async function getSingleValue(
|
||||
fieldname: string,
|
||||
parent: string,
|
||||
fyo: Fyo
|
||||
) {
|
||||
if (!fyo.db.isConnected) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const res = await fyo.db.getSingleValues({ fieldname, parent });
|
||||
const singleValue = res.find(
|
||||
(f) => f.fieldname === fieldname && f.parent === parent
|
||||
);
|
||||
|
||||
if (singleValue === undefined) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
return singleValue.value;
|
||||
}
|
||||
|
||||
export function getOptionList(
|
||||
field: Field,
|
||||
doc: Doc | undefined | null
|
||||
): SelectOption[] {
|
||||
const list = getRawOptionList(field, doc);
|
||||
return list.map((option) => {
|
||||
if (typeof option === 'string') {
|
||||
return {
|
||||
label: option,
|
||||
value: option,
|
||||
};
|
||||
}
|
||||
|
||||
return option;
|
||||
});
|
||||
}
|
||||
|
||||
function getRawOptionList(field: Field, doc: Doc | undefined | null) {
|
||||
const options = (field as OptionField).options;
|
||||
if (options && options.length > 0) {
|
||||
return (field as OptionField).options;
|
||||
}
|
||||
|
||||
if (getIsNullOrUndef(doc)) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const Model = doc!.fyo.models[doc!.schemaName];
|
||||
if (Model === undefined) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const getList = Model.lists[field.fieldname];
|
||||
if (getList === undefined) {
|
||||
return [];
|
||||
}
|
||||
|
||||
return getList(doc!);
|
||||
}
|
@ -3,8 +3,8 @@ enum EventType {
|
||||
OnceListeners = '_onceListeners',
|
||||
}
|
||||
|
||||
export default class Observable {
|
||||
[key: string]: unknown;
|
||||
export default class Observable<T> {
|
||||
[key: string]: unknown | T;
|
||||
_isHot: Map<string, boolean>;
|
||||
_eventQueue: Map<string, unknown[]>;
|
||||
_map: Map<string, unknown>;
|
||||
@ -25,8 +25,8 @@ export default class Observable {
|
||||
* @param key
|
||||
* @returns
|
||||
*/
|
||||
get(key: string): unknown {
|
||||
return this[key];
|
||||
get(key: string): T {
|
||||
return this[key] as T;
|
||||
}
|
||||
|
||||
/**
|
||||
@ -35,14 +35,34 @@ export default class Observable {
|
||||
* @param key
|
||||
* @param value
|
||||
*/
|
||||
set(key: string, value: unknown) {
|
||||
set(key: string, value: T) {
|
||||
this[key] = value;
|
||||
this.trigger('change', {
|
||||
doc: this,
|
||||
fieldname: key,
|
||||
changed: key,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if any `listener` or the given `listener` has been registered
|
||||
* for the passed `event`.
|
||||
*
|
||||
* @param event : name of the event for which the listener is checked
|
||||
* @param listener : specific listener that is checked for
|
||||
*/
|
||||
hasListener(event: string, listener?: Function) {
|
||||
const listeners = this[EventType.Listeners].get(event) ?? [];
|
||||
const onceListeners = this[EventType.OnceListeners].get(event) ?? [];
|
||||
|
||||
if (listener === undefined) {
|
||||
return [...listeners, ...onceListeners].length > 0;
|
||||
}
|
||||
|
||||
let has = listeners.includes(listener);
|
||||
has ||= onceListeners.includes(listener);
|
||||
return has;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets a `listener` that executes every time `event` is triggered
|
||||
*
|
||||
@ -91,7 +111,7 @@ export default class Observable {
|
||||
* @param throttle : wait time before triggering the event.
|
||||
*/
|
||||
|
||||
async trigger(event: string, params: unknown, throttle: number = 0) {
|
||||
async trigger(event: string, params?: unknown, throttle: number = 0) {
|
||||
let isHot = false;
|
||||
if (throttle > 0) {
|
||||
isHot = this._throttled(event, params, throttle);
|
||||
@ -150,7 +170,12 @@ export default class Observable {
|
||||
|
||||
_addListener(type: EventType, event: string, listener: Function) {
|
||||
this._initLiseners(type, event);
|
||||
this[type].get(event)!.push(listener);
|
||||
const list = this[type].get(event)!;
|
||||
if (list.includes(listener)) {
|
||||
return;
|
||||
}
|
||||
|
||||
list.push(listener);
|
||||
}
|
||||
|
||||
_initLiseners(type: EventType, event: string) {
|
135
fyo/utils/translation.ts
Normal file
135
fyo/utils/translation.ts
Normal file
@ -0,0 +1,135 @@
|
||||
import { LanguageMap, UnknownMap } from 'utils/types';
|
||||
import {
|
||||
getIndexFormat,
|
||||
getIndexList,
|
||||
getSnippets,
|
||||
getWhitespaceSanitized,
|
||||
} from '../../utils/translationHelpers';
|
||||
import { ValueError } from './errors';
|
||||
|
||||
type TranslationArgs = boolean | number | string;
|
||||
type TranslationLiteral = TemplateStringsArray | TranslationArgs;
|
||||
|
||||
class TranslationString {
|
||||
args: TranslationLiteral[];
|
||||
argList?: TranslationArgs[];
|
||||
strList?: string[];
|
||||
context?: string;
|
||||
languageMap?: LanguageMap;
|
||||
|
||||
constructor(...args: TranslationLiteral[]) {
|
||||
this.args = args;
|
||||
}
|
||||
|
||||
get s() {
|
||||
return this.toString();
|
||||
}
|
||||
|
||||
ctx(context?: string) {
|
||||
this.context = context;
|
||||
return this;
|
||||
}
|
||||
|
||||
#formatArg(arg: string | number | boolean) {
|
||||
return arg ?? '';
|
||||
}
|
||||
|
||||
#translate() {
|
||||
let indexFormat = getIndexFormat(this.args[0] as string);
|
||||
indexFormat = getWhitespaceSanitized(indexFormat);
|
||||
|
||||
const translatedIndexFormat =
|
||||
this.languageMap![indexFormat]?.translation ?? indexFormat;
|
||||
|
||||
this.argList = getIndexList(translatedIndexFormat).map(
|
||||
(i) => this.argList![i]
|
||||
);
|
||||
this.strList = getSnippets(translatedIndexFormat);
|
||||
}
|
||||
|
||||
#stitch() {
|
||||
if (!((this.args[0] as any) instanceof Array)) {
|
||||
throw new ValueError(
|
||||
`invalid args passed to TranslationString ${
|
||||
this.args
|
||||
} of type ${typeof this.args[0]}`
|
||||
);
|
||||
}
|
||||
|
||||
this.strList = this.args[0] as any as string[];
|
||||
this.argList = this.args.slice(1) as TranslationArgs[];
|
||||
|
||||
if (this.languageMap) {
|
||||
this.#translate();
|
||||
}
|
||||
|
||||
return this.strList!.map((s, i) => s + this.#formatArg(this.argList![i]))
|
||||
.join('')
|
||||
.replace(/\s+/g, ' ')
|
||||
.trim();
|
||||
}
|
||||
|
||||
toString() {
|
||||
return this.#stitch();
|
||||
}
|
||||
|
||||
toJSON() {
|
||||
return this.#stitch();
|
||||
}
|
||||
|
||||
valueOf() {
|
||||
return this.#stitch();
|
||||
}
|
||||
}
|
||||
|
||||
export function T(...args: string[]): TranslationString {
|
||||
return new TranslationString(...args);
|
||||
}
|
||||
|
||||
export function t(...args: TranslationLiteral[]): string {
|
||||
return new TranslationString(...args).s;
|
||||
}
|
||||
|
||||
export function setLanguageMapOnTranslationString(
|
||||
languageMap: LanguageMap | undefined
|
||||
) {
|
||||
TranslationString.prototype.languageMap = languageMap;
|
||||
}
|
||||
|
||||
export function translateSchema(
|
||||
map: UnknownMap | UnknownMap[],
|
||||
languageMap: LanguageMap,
|
||||
translateables: string[]
|
||||
) {
|
||||
if (Array.isArray(map)) {
|
||||
for (const item of map) {
|
||||
translateSchema(item, languageMap, translateables);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
if (typeof map !== 'object') {
|
||||
return;
|
||||
}
|
||||
|
||||
for (const key of Object.keys(map)) {
|
||||
const value = map[key];
|
||||
if (
|
||||
typeof value === 'string' &&
|
||||
translateables.includes(key) &&
|
||||
languageMap[value]?.translation
|
||||
) {
|
||||
map[key] = languageMap[value].translation;
|
||||
}
|
||||
|
||||
if (typeof value !== 'object') {
|
||||
continue;
|
||||
}
|
||||
|
||||
translateSchema(
|
||||
value as UnknownMap | UnknownMap[],
|
||||
languageMap,
|
||||
translateables
|
||||
);
|
||||
}
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user