mirror of
https://github.com/frappe/books.git
synced 2024-12-22 02:49:03 +00:00
fix: updateSchemas works now, also proper cleanup
This commit is contained in:
parent
a8904a0854
commit
46dbb6348f
@ -416,6 +416,20 @@ export default class DatabaseCore extends DatabaseBase {
|
||||
return info.map((d) => d.name as string);
|
||||
}
|
||||
|
||||
async truncate(tableNames?: string[]) {
|
||||
if (tableNames === undefined) {
|
||||
const q = (await this.knex!.raw(`
|
||||
select name from sqlite_schema
|
||||
where type='table'
|
||||
and name not like 'sqlite_%'`)) as { name: string }[];
|
||||
tableNames = q.map((i) => i.name);
|
||||
}
|
||||
|
||||
for (const name of tableNames) {
|
||||
await this.knex!(name).del();
|
||||
}
|
||||
}
|
||||
|
||||
async #getForeignKeys(schemaName: string): Promise<string[]> {
|
||||
const foreignKeyList: FieldValueMap[] = await this.knex!.raw(
|
||||
`PRAGMA foreign_key_list(${schemaName})`
|
||||
|
@ -1,6 +1,5 @@
|
||||
import { constants } from 'fs';
|
||||
import fs from 'fs/promises';
|
||||
import os from 'os';
|
||||
import path from 'path';
|
||||
import { DatabaseDemuxBase, DatabaseMethod } from 'utils/db/types';
|
||||
import { getSchemas } from '../../schemas';
|
||||
@ -53,26 +52,28 @@ export class DatabaseManager extends DatabaseDemuxBase {
|
||||
}
|
||||
|
||||
/**
|
||||
* This needs to be replaced with transactions
|
||||
* This needs to be supplimented with transactions
|
||||
* TODO: Add transactions in core.ts
|
||||
*/
|
||||
const dbPath = this.db!.dbPath;
|
||||
const copyPath = await this.#makeTempCopy();
|
||||
|
||||
try {
|
||||
await this.#runPatchesAndMigrate();
|
||||
} catch (err) {
|
||||
console.error(err);
|
||||
await this.db!.close();
|
||||
await fs.copyFile(copyPath, dbPath);
|
||||
await this._connect(dbPath);
|
||||
|
||||
throw err;
|
||||
} finally {
|
||||
await fs.unlink(copyPath);
|
||||
}
|
||||
|
||||
await fs.unlink(copyPath);
|
||||
}
|
||||
|
||||
async #runPatchesAndMigrate() {
|
||||
const patchesToExecute = await this.#getPatchesToExecute();
|
||||
|
||||
patchesToExecute.sort((a, b) => (b.priority ?? 0) - (a.priority ?? 0));
|
||||
const preMigrationPatches = patchesToExecute.filter(
|
||||
(p) => p.patch.beforeMigrate
|
||||
);
|
||||
@ -153,7 +154,8 @@ export class DatabaseManager extends DatabaseDemuxBase {
|
||||
|
||||
async #makeTempCopy() {
|
||||
const src = this.db!.dbPath;
|
||||
const dest = path.join(os.tmpdir(), 'temp.db');
|
||||
const dir = path.parse(src).dir;
|
||||
const dest = path.join(dir, '__premigratory_temp.db');
|
||||
await fs.copyFile(src, dest);
|
||||
return dest;
|
||||
}
|
||||
|
@ -23,6 +23,7 @@ export interface Patch {
|
||||
execute: (dm: DatabaseManager) => Promise<void>;
|
||||
beforeMigrate?: boolean;
|
||||
};
|
||||
priority?: number;
|
||||
}
|
||||
|
||||
export type KnexColumnType =
|
||||
@ -45,9 +46,12 @@ export interface SqliteTableInfo {
|
||||
dflt_value: string | null;
|
||||
}
|
||||
|
||||
export type BespokeFunction = (db:DatabaseCore, ...args: unknown[]) => Promise<unknown>
|
||||
export type BespokeFunction = (
|
||||
db: DatabaseCore,
|
||||
...args: unknown[]
|
||||
) => Promise<unknown>;
|
||||
export type SingleValue<T> = {
|
||||
fieldname: string;
|
||||
parent: string;
|
||||
value: T;
|
||||
}[];
|
||||
}[];
|
||||
|
@ -4,5 +4,10 @@ import updateSchemas from './updateSchemas';
|
||||
|
||||
export default [
|
||||
{ name: 'testPatch', version: '0.5.0-beta.0', patch: testPatch },
|
||||
{ name: 'updateSchemas', version: '0.5.0-beta.0', patch: updateSchemas },
|
||||
{
|
||||
name: 'updateSchemas',
|
||||
version: '0.5.0-beta.0',
|
||||
patch: updateSchemas,
|
||||
priority: 100,
|
||||
},
|
||||
] as Patch[];
|
||||
|
@ -1,13 +1,12 @@
|
||||
import fs from 'fs/promises';
|
||||
import { RawValueMap } from 'fyo/core/types';
|
||||
import { Knex } from 'knex';
|
||||
import { ModelNameEnum } from 'models/types';
|
||||
import os from 'os';
|
||||
import path from 'path';
|
||||
import { SchemaMap } from 'schemas/types';
|
||||
import { changeKeys, deleteKeys, invertMap } from 'utils';
|
||||
import { changeKeys, deleteKeys, getIsNullOrUndef, invertMap } from 'utils';
|
||||
import { getCountryCodeFromCountry } from 'utils/misc';
|
||||
import { Version } from 'utils/version';
|
||||
import { ModelNameEnum } from '../../models/types';
|
||||
import { FieldTypeEnum, Schema, SchemaMap } from '../../schemas/types';
|
||||
import { DatabaseManager } from '../database/manager';
|
||||
|
||||
const ignoreColumns = ['keywords'];
|
||||
@ -45,13 +44,20 @@ async function execute(dm: DatabaseManager) {
|
||||
* data into.
|
||||
*/
|
||||
const countryCode = await getCountryCode(sourceKnex);
|
||||
const destDm = await getDestinationDM(sourceKnex, countryCode);
|
||||
const destDm = await getDestinationDM(dm.db!.dbPath, countryCode);
|
||||
|
||||
/**
|
||||
* Copy data from all the relevant tables
|
||||
* the other tables will be empty cause unused.
|
||||
*/
|
||||
await copyData(sourceKnex, destDm);
|
||||
try {
|
||||
await copyData(sourceKnex, destDm);
|
||||
} catch (err) {
|
||||
const destPath = destDm.db!.dbPath;
|
||||
destDm.db!.close();
|
||||
await fs.unlink(destPath);
|
||||
throw err;
|
||||
}
|
||||
|
||||
/**
|
||||
* Version will update when migration completes, this
|
||||
@ -71,14 +77,14 @@ async function replaceDatabaseCore(
|
||||
dm: DatabaseManager,
|
||||
destDm: DatabaseManager
|
||||
) {
|
||||
const sourceDbPath = destDm.db!.dbPath; // new db with new schema
|
||||
const destDbPath = dm.db!.dbPath; // old db to be replaced
|
||||
const newDbPath = destDm.db!.dbPath; // new db with new schema
|
||||
const oldDbPath = dm.db!.dbPath; // old db to be replaced
|
||||
|
||||
await dm.db!.close();
|
||||
await destDm.db!.close();
|
||||
|
||||
await fs.copyFile(sourceDbPath, destDbPath);
|
||||
await dm._connect(destDbPath);
|
||||
await fs.unlink(oldDbPath);
|
||||
await fs.rename(newDbPath, oldDbPath);
|
||||
await dm._connect(oldDbPath);
|
||||
}
|
||||
|
||||
async function copyData(sourceKnex: Knex, destDm: DatabaseManager) {
|
||||
@ -86,17 +92,29 @@ async function copyData(sourceKnex: Knex, destDm: DatabaseManager) {
|
||||
const schemaMap = destDm.getSchemaMap();
|
||||
await destKnex!.raw('PRAGMA foreign_keys=OFF');
|
||||
await copySingleValues(sourceKnex, destKnex, schemaMap);
|
||||
await copyParty(sourceKnex, destKnex);
|
||||
await copyItem(sourceKnex, destKnex);
|
||||
await copyParty(sourceKnex, destKnex, schemaMap[ModelNameEnum.Party]!);
|
||||
await copyItem(sourceKnex, destKnex, schemaMap[ModelNameEnum.Item]!);
|
||||
await copyChildTables(sourceKnex, destKnex, schemaMap);
|
||||
await copyOtherTables(sourceKnex, destKnex);
|
||||
await copyTransactionalTables(sourceKnex, destKnex);
|
||||
await copyLedgerEntries(sourceKnex, destKnex);
|
||||
await copyNumberSeries(sourceKnex, destKnex);
|
||||
await copyOtherTables(sourceKnex, destKnex, schemaMap);
|
||||
await copyTransactionalTables(sourceKnex, destKnex, schemaMap);
|
||||
await copyLedgerEntries(
|
||||
sourceKnex,
|
||||
destKnex,
|
||||
schemaMap[ModelNameEnum.AccountingLedgerEntry]!
|
||||
);
|
||||
await copyNumberSeries(
|
||||
sourceKnex,
|
||||
destKnex,
|
||||
schemaMap[ModelNameEnum.NumberSeries]!
|
||||
);
|
||||
await destKnex!.raw('PRAGMA foreign_keys=ON');
|
||||
}
|
||||
|
||||
async function copyNumberSeries(sourceKnex: Knex, destKnex: Knex) {
|
||||
async function copyNumberSeries(
|
||||
sourceKnex: Knex,
|
||||
destKnex: Knex,
|
||||
schema: Schema
|
||||
) {
|
||||
const values = (await sourceKnex(
|
||||
ModelNameEnum.NumberSeries
|
||||
)) as RawValueMap[];
|
||||
@ -110,6 +128,11 @@ async function copyNumberSeries(sourceKnex: Knex, destKnex: Knex) {
|
||||
|
||||
const name = value.name as string;
|
||||
const referenceType = refMap[name];
|
||||
if (!referenceType) {
|
||||
delete value.name;
|
||||
continue;
|
||||
}
|
||||
|
||||
const indices = await sourceKnex.raw(
|
||||
`
|
||||
select cast(substr(name, ??) as int) as idx
|
||||
@ -124,36 +147,59 @@ async function copyNumberSeries(sourceKnex: Knex, destKnex: Knex) {
|
||||
value.referenceType = referenceType;
|
||||
}
|
||||
|
||||
await copyValues(destKnex, ModelNameEnum.NumberSeries, values);
|
||||
await copyValues(
|
||||
destKnex,
|
||||
ModelNameEnum.NumberSeries,
|
||||
values.filter((v) => v.name),
|
||||
[],
|
||||
{},
|
||||
schema
|
||||
);
|
||||
}
|
||||
|
||||
async function copyLedgerEntries(sourceKnex: Knex, destKnex: Knex) {
|
||||
async function copyLedgerEntries(
|
||||
sourceKnex: Knex,
|
||||
destKnex: Knex,
|
||||
schema: Schema
|
||||
) {
|
||||
const values = (await sourceKnex(
|
||||
ModelNameEnum.AccountingLedgerEntry
|
||||
)) as RawValueMap[];
|
||||
await copyValues(destKnex, ModelNameEnum.AccountingLedgerEntry, values, [
|
||||
'description',
|
||||
'againstAccount',
|
||||
'balance',
|
||||
]);
|
||||
await copyValues(
|
||||
destKnex,
|
||||
ModelNameEnum.AccountingLedgerEntry,
|
||||
values,
|
||||
['description', 'againstAccount', 'balance'],
|
||||
{},
|
||||
schema
|
||||
);
|
||||
}
|
||||
|
||||
async function copyOtherTables(sourceKnex: Knex, destKnex: Knex) {
|
||||
async function copyOtherTables(
|
||||
sourceKnex: Knex,
|
||||
destKnex: Knex,
|
||||
schemaMap: SchemaMap
|
||||
) {
|
||||
const schemaNames = [
|
||||
ModelNameEnum.Account,
|
||||
ModelNameEnum.Currency,
|
||||
ModelNameEnum.Address,
|
||||
ModelNameEnum.Color,
|
||||
ModelNameEnum.Tax,
|
||||
ModelNameEnum.PatchRun,
|
||||
];
|
||||
|
||||
for (const sn of schemaNames) {
|
||||
const values = (await sourceKnex(sn)) as RawValueMap[];
|
||||
await copyValues(destKnex, sn, values);
|
||||
await copyValues(destKnex, sn, values, [], {}, schemaMap[sn]);
|
||||
}
|
||||
}
|
||||
|
||||
async function copyTransactionalTables(sourceKnex: Knex, destKnex: Knex) {
|
||||
async function copyTransactionalTables(
|
||||
sourceKnex: Knex,
|
||||
destKnex: Knex,
|
||||
schemaMap: SchemaMap
|
||||
) {
|
||||
const schemaNames = [
|
||||
ModelNameEnum.JournalEntry,
|
||||
ModelNameEnum.Payment,
|
||||
@ -175,8 +221,23 @@ async function copyTransactionalTables(sourceKnex: Knex, destKnex: Knex) {
|
||||
if (!v.numberSeries) {
|
||||
v.numberSeries = defaultNumberSeriesMap[sn];
|
||||
}
|
||||
|
||||
if (v.customer) {
|
||||
v.party = v.customer;
|
||||
}
|
||||
|
||||
if (v.supplier) {
|
||||
v.party = v.supplier;
|
||||
}
|
||||
});
|
||||
await copyValues(destKnex, sn, values, [], childTableColumnMap);
|
||||
await copyValues(
|
||||
destKnex,
|
||||
sn,
|
||||
values,
|
||||
[],
|
||||
childTableColumnMap,
|
||||
schemaMap[sn]
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@ -191,20 +252,27 @@ async function copyChildTables(
|
||||
|
||||
for (const sn of childSchemaNames) {
|
||||
const values = (await sourceKnex(sn)) as RawValueMap[];
|
||||
await copyValues(destKnex, sn, values, [], childTableColumnMap);
|
||||
await copyValues(
|
||||
destKnex,
|
||||
sn,
|
||||
values,
|
||||
[],
|
||||
childTableColumnMap,
|
||||
schemaMap[sn]
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
async function copyItem(sourceKnex: Knex, destKnex: Knex) {
|
||||
async function copyItem(sourceKnex: Knex, destKnex: Knex, schema: Schema) {
|
||||
const values = (await sourceKnex(ModelNameEnum.Item)) as RawValueMap[];
|
||||
values.forEach((value) => {
|
||||
value.for = 'Both';
|
||||
});
|
||||
|
||||
await copyValues(destKnex, ModelNameEnum.Item, values);
|
||||
await copyValues(destKnex, ModelNameEnum.Item, values, [], {}, schema);
|
||||
}
|
||||
|
||||
async function copyParty(sourceKnex: Knex, destKnex: Knex) {
|
||||
async function copyParty(sourceKnex: Knex, destKnex: Knex, schema: Schema) {
|
||||
const values = (await sourceKnex(ModelNameEnum.Party)) as RawValueMap[];
|
||||
values.forEach((value) => {
|
||||
// customer will be mapped onto role
|
||||
@ -220,7 +288,8 @@ async function copyParty(sourceKnex: Knex, destKnex: Knex) {
|
||||
ModelNameEnum.Party,
|
||||
values,
|
||||
['supplier', 'addressDisplay'],
|
||||
{ customer: 'role' }
|
||||
{ customer: 'role' },
|
||||
schema
|
||||
);
|
||||
}
|
||||
|
||||
@ -236,7 +305,6 @@ async function copySingleValues(
|
||||
'parent',
|
||||
singleSchemaNames
|
||||
)) as RawValueMap[];
|
||||
|
||||
await copyValues(destKnex, ModelNameEnum.SingleValue, singleValues);
|
||||
}
|
||||
|
||||
@ -245,7 +313,8 @@ async function copyValues(
|
||||
destTableName: string,
|
||||
values: RawValueMap[],
|
||||
keysToDelete: string[] = [],
|
||||
keyMap: Record<string, string> = {}
|
||||
keyMap: Record<string, string> = {},
|
||||
schema?: Schema
|
||||
) {
|
||||
keysToDelete = [...keysToDelete, ...ignoreColumns];
|
||||
keyMap = { ...keyMap, ...columnMap };
|
||||
@ -253,17 +322,30 @@ async function copyValues(
|
||||
values = values.map((sv) => deleteKeys(sv, keysToDelete));
|
||||
values = values.map((sv) => changeKeys(sv, keyMap));
|
||||
|
||||
if (schema) {
|
||||
values.forEach((v) => notNullify(v, schema));
|
||||
}
|
||||
|
||||
if (schema) {
|
||||
const newKeys = schema?.fields.map((f) => f.fieldname);
|
||||
values.forEach((v) => deleteOldKeys(v, newKeys));
|
||||
}
|
||||
|
||||
await destKnex.batchInsert(destTableName, values, 100);
|
||||
}
|
||||
|
||||
async function getDestinationDM(knex: Knex, countryCode: string) {
|
||||
async function getDestinationDM(sourceDbPath: string, countryCode: string) {
|
||||
/**
|
||||
* This is where all the stuff from the old db will be copied.
|
||||
* That won't be altered cause schema update will cause data loss.
|
||||
*/
|
||||
const dbPath = path.join(os.tmpdir(), '__patch_db.db');
|
||||
|
||||
const dir = path.parse(sourceDbPath).dir;
|
||||
const dbPath = path.join(dir, '__update_schemas_temp.db');
|
||||
const dm = new DatabaseManager();
|
||||
await dm.createNewDatabase(dbPath, countryCode);
|
||||
await dm._connect(dbPath, countryCode);
|
||||
await dm.db!.migrate();
|
||||
await dm.db!.truncate();
|
||||
return dm;
|
||||
}
|
||||
|
||||
@ -282,4 +364,37 @@ async function getCountryCode(knex: Knex) {
|
||||
return getCountryCodeFromCountry(country);
|
||||
}
|
||||
|
||||
function notNullify(map: RawValueMap, schema: Schema) {
|
||||
for (const field of schema.fields) {
|
||||
if (!field.required || !getIsNullOrUndef(map[field.fieldname])) {
|
||||
continue;
|
||||
}
|
||||
|
||||
switch (field.fieldtype) {
|
||||
case FieldTypeEnum.Float:
|
||||
case FieldTypeEnum.Int:
|
||||
case FieldTypeEnum.Check:
|
||||
map[field.fieldname] = 0;
|
||||
break;
|
||||
case FieldTypeEnum.Currency:
|
||||
map[field.fieldname] = '0.00000000000';
|
||||
break;
|
||||
case FieldTypeEnum.Table:
|
||||
continue;
|
||||
default:
|
||||
map[field.fieldname] = '';
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function deleteOldKeys(map: RawValueMap, newKeys: string[]) {
|
||||
for (const key of Object.keys(map)) {
|
||||
if (newKeys.includes(key)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
delete map[key];
|
||||
}
|
||||
}
|
||||
|
||||
export default { execute, beforeMigrate: true };
|
||||
|
@ -97,10 +97,14 @@ async function getJournalEntries(fyo: Fyo, salesInvoices: SalesInvoice[]) {
|
||||
const date = DateTime.fromJSDate(lastInv).minus({ months: 6 }).toJSDate();
|
||||
|
||||
// Bank Entry
|
||||
let doc = fyo.doc.getNewDoc(ModelNameEnum.JournalEntry, {
|
||||
date,
|
||||
entryType: 'Bank Entry',
|
||||
});
|
||||
let doc = fyo.doc.getNewDoc(
|
||||
ModelNameEnum.JournalEntry,
|
||||
{
|
||||
date,
|
||||
entryType: 'Bank Entry',
|
||||
},
|
||||
false
|
||||
);
|
||||
await doc.append('accounts', {
|
||||
account: 'Supreme Bank',
|
||||
debit: amount,
|
||||
@ -115,10 +119,14 @@ async function getJournalEntries(fyo: Fyo, salesInvoices: SalesInvoice[]) {
|
||||
entries.push(doc);
|
||||
|
||||
// Cash Entry
|
||||
doc = fyo.doc.getNewDoc(ModelNameEnum.JournalEntry, {
|
||||
date,
|
||||
entryType: 'Cash Entry',
|
||||
});
|
||||
doc = fyo.doc.getNewDoc(
|
||||
ModelNameEnum.JournalEntry,
|
||||
{
|
||||
date,
|
||||
entryType: 'Cash Entry',
|
||||
},
|
||||
false
|
||||
);
|
||||
await doc.append('accounts', {
|
||||
account: 'Cash',
|
||||
debit: amount.percent(30),
|
||||
@ -143,7 +151,7 @@ async function getPayments(fyo: Fyo, invoices: Invoice[]) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const doc = fyo.doc.getNewDoc(ModelNameEnum.Payment) as Payment;
|
||||
const doc = fyo.doc.getNewDoc(ModelNameEnum.Payment, {}, false) as Payment;
|
||||
doc.party = invoice.party as string;
|
||||
doc.paymentType = invoice.isSales ? 'Receive' : 'Pay';
|
||||
doc.paymentMethod = 'Cash';
|
||||
@ -220,9 +228,13 @@ async function getSalesInvoices(
|
||||
);
|
||||
const customer = sample(customers);
|
||||
|
||||
const doc = fyo.doc.getNewDoc(ModelNameEnum.SalesInvoice, {
|
||||
date,
|
||||
}) as SalesInvoice;
|
||||
const doc = fyo.doc.getNewDoc(
|
||||
ModelNameEnum.SalesInvoice,
|
||||
{
|
||||
date,
|
||||
},
|
||||
false
|
||||
) as SalesInvoice;
|
||||
|
||||
await doc.set('party', customer!.name);
|
||||
if (!doc.account) {
|
||||
@ -361,9 +373,13 @@ async function getSalesPurchaseInvoices(
|
||||
* For each supplier create a Purchase Invoice
|
||||
*/
|
||||
for (const supplier in supplierGrouped) {
|
||||
const doc = fyo.doc.getNewDoc(ModelNameEnum.PurchaseInvoice, {
|
||||
date,
|
||||
}) as PurchaseInvoice;
|
||||
const doc = fyo.doc.getNewDoc(
|
||||
ModelNameEnum.PurchaseInvoice,
|
||||
{
|
||||
date,
|
||||
},
|
||||
false
|
||||
) as PurchaseInvoice;
|
||||
|
||||
await doc.set('party', supplier);
|
||||
if (!doc.account) {
|
||||
@ -413,9 +429,13 @@ async function getNonSalesPurchaseInvoices(
|
||||
continue;
|
||||
}
|
||||
|
||||
const doc = fyo.doc.getNewDoc(ModelNameEnum.PurchaseInvoice, {
|
||||
date,
|
||||
}) as PurchaseInvoice;
|
||||
const doc = fyo.doc.getNewDoc(
|
||||
ModelNameEnum.PurchaseInvoice,
|
||||
{
|
||||
date,
|
||||
},
|
||||
false
|
||||
) as PurchaseInvoice;
|
||||
|
||||
const party = purchaseItemPartyMap[name];
|
||||
await doc.set('party', party);
|
||||
|
15
fyo/index.ts
15
fyo/index.ts
@ -182,13 +182,24 @@ export class Fyo {
|
||||
}
|
||||
|
||||
let doc: Doc;
|
||||
let value: DocValue | Doc[];
|
||||
try {
|
||||
doc = await this.doc.getDoc(schemaName, name);
|
||||
value = doc.get(fieldname!);
|
||||
} catch (err) {
|
||||
return undefined;
|
||||
value = undefined;
|
||||
}
|
||||
|
||||
return doc.get(fieldname!);
|
||||
if (value === undefined && schemaName === name) {
|
||||
const sv = await this.db.getSingleValues({
|
||||
fieldname: fieldname!,
|
||||
parent: schemaName,
|
||||
});
|
||||
|
||||
return sv?.[0]?.value;
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
purgeCache() {
|
||||
|
@ -18,6 +18,10 @@ export async function setAndGetCleanedConfigFiles() {
|
||||
.then(() => true)
|
||||
.catch(() => false);
|
||||
|
||||
if (!file.companyName) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const key = `${file.companyName}-${file.dbPath}`;
|
||||
if (!exists || cleanedFileMap.has(key)) {
|
||||
continue;
|
||||
|
@ -180,13 +180,8 @@ function getIssueUrlQuery(errorLogObj?: ErrorLog): string {
|
||||
body.push('**Stack**:', '```', errorLogObj.stack, '```', '');
|
||||
}
|
||||
|
||||
const { fullPath } = (errorLogObj?.more as { fullPath?: string }) ?? {};
|
||||
if (fullPath) {
|
||||
body.push(`**Path**: \`${fullPath}\``);
|
||||
}
|
||||
|
||||
body.push(`**Version**: ${fyo.store.appVersion}`);
|
||||
body.push(`**Route**: ${router.currentRoute.value.fullPath}`);
|
||||
body.push(`**Version**: \`${fyo.store.appVersion}\``);
|
||||
body.push(`**Path**: \`${router.currentRoute.value.fullPath}\``);
|
||||
|
||||
const url = [baseUrl, `body=${body.join('\n')}`].join('&');
|
||||
return encodeURI(url);
|
||||
|
@ -1,8 +1,8 @@
|
||||
import { ipcRenderer } from 'electron';
|
||||
import { ConfigKeys } from 'fyo/core/types';
|
||||
import { groupBy } from 'lodash';
|
||||
import { DateTime } from 'luxon';
|
||||
import { IPC_ACTIONS } from 'utils/messages';
|
||||
import { Version } from 'utils/version';
|
||||
import { App as VueApp, createApp } from 'vue';
|
||||
import App from './App.vue';
|
||||
import Badge from './components/Badge.vue';
|
||||
@ -107,6 +107,6 @@ function setOnWindow() {
|
||||
// @ts-ignore
|
||||
window.DateTime = DateTime;
|
||||
// @ts-ignore
|
||||
window.groupBy = groupBy;
|
||||
window.Version = Version;
|
||||
}
|
||||
}
|
||||
|
@ -1,5 +1,4 @@
|
||||
import { ConfigFile, ConfigKeys } from 'fyo/core/types';
|
||||
import { getSingleValue } from 'fyo/utils';
|
||||
import { DateTime } from 'luxon';
|
||||
import { SetupWizard } from 'models/baseModels/SetupWizard/SetupWizard';
|
||||
import { ModelNameEnum } from 'models/types';
|
||||
@ -58,20 +57,12 @@ export async function getSetupWizardDoc() {
|
||||
);
|
||||
}
|
||||
|
||||
export async function getSetupComplete(): Promise<boolean> {
|
||||
return !!(await getSingleValue(
|
||||
'setupComplete',
|
||||
ModelNameEnum.AccountingSettings,
|
||||
fyo
|
||||
));
|
||||
}
|
||||
|
||||
export async function incrementOpenCount(dbPath: string) {
|
||||
const companyName = (await fyo.getValue(
|
||||
ModelNameEnum.AccountingSettings,
|
||||
'companyName'
|
||||
)) as string;
|
||||
|
||||
|
||||
let openCount = 0;
|
||||
const files = fyo.config.get(ConfigKeys.Files) as ConfigFile[];
|
||||
for (const file of files) {
|
||||
|
@ -1,4 +1,4 @@
|
||||
import countryInfo from 'fixtures/countryInfo.json';
|
||||
import countryInfo from '../fixtures/countryInfo.json';
|
||||
import { DateTime } from 'luxon';
|
||||
import { CountryInfoMap } from './types';
|
||||
|
||||
|
@ -7,11 +7,19 @@ export class Version {
|
||||
*/
|
||||
|
||||
static gte(a: string, b: string) {
|
||||
return compare(a, b, (c) => c < 0);
|
||||
let valid = false;
|
||||
return compare(a, b, (c) => {
|
||||
if (c === 0) {
|
||||
return false;
|
||||
}
|
||||
|
||||
valid ||= c > 0;
|
||||
return !valid;
|
||||
});
|
||||
}
|
||||
|
||||
static lte(a: string, b: string) {
|
||||
return compare(a, b, (c) => c > 0);
|
||||
return !Version.gt(a, b);
|
||||
}
|
||||
|
||||
static eq(a: string, b: string) {
|
||||
|
Loading…
Reference in New Issue
Block a user