2
0
mirror of https://github.com/frappe/books.git synced 2025-01-24 07:38:25 +00:00

fix: migration flow (account for plugins)

- make connect private
- remove old problematic patch updateSchemas
This commit is contained in:
18alantom 2023-07-21 14:40:27 +05:30
parent a4e4ffc3c6
commit 2a3419f72c
7 changed files with 179 additions and 533 deletions

139
backend/database/helpers.ts Normal file
View File

@ -0,0 +1,139 @@
import AdmZip from 'adm-zip';
import { getPluginFolderNameFromInfo } from 'backend/helpers';
import fs from 'fs-extra';
import { DatabaseError } from 'fyo/utils/errors';
import type { Knex } from 'knex';
import { getAppPath } from 'main/helpers';
import path from 'path';
import { getSchemas } from 'schemas/index';
import { SchemaStub } from 'schemas/types';
import { PluginInfo } from 'utils/types';
import type DatabaseCore from './core';
export async function executeFirstMigration(
db: DatabaseCore,
countryCode: string
) {
if (!db.knex) {
throw new DatabaseError('Database not initialized');
}
const isFirstRun = await getIsFirstRun(db.knex);
if (!isFirstRun) {
return;
}
const schemas = getSchemas(countryCode);
db.setSchemaMap(schemas);
await db.migrate();
}
export async function getIsFirstRun(knex: Knex): Promise<boolean> {
const query = await knex('sqlite_master').where({
type: 'table',
name: 'PatchRun',
});
return !query.length;
}
export async function getPluginInfoList(knex: Knex): Promise<PluginInfo[]> {
const plugins = (await knex('Plugin').select(['info'])) as {
info: string;
}[];
return plugins.map(({ info }) => JSON.parse(info) as PluginInfo);
}
export async function unzipPluginsIfDoesNotExist(
knex: Knex,
infoList: PluginInfo[]
): Promise<void> {
for (const info of infoList) {
const pluginsRootPath = getAppPath('plugins');
const folderName = getPluginFolderNameFromInfo(info);
const pluginPath = path.join(pluginsRootPath, folderName);
if (fs.existsSync(pluginPath)) {
continue;
}
deletePluginFolder(info);
fs.ensureDirSync(pluginPath);
const data = (await knex('Plugin')
.select('data')
.where({ name: info.name })) as {
data: string;
}[];
const pluginZipBase64 = data[0].data;
const zipBuffer = Buffer.from(pluginZipBase64, 'base64');
const pluginFilePath = path.join(pluginPath, `${folderName}.books_plugin`);
fs.writeFileSync(pluginFilePath, zipBuffer);
const zip = new AdmZip(pluginFilePath);
zip.extractAllTo(pluginPath);
}
}
function deletePluginFolder(info: PluginInfo) {
const pluginsRootPath = getAppPath('plugins');
const folderNamePrefix = getPluginFolderNameFromInfo(info, true) + '-';
for (const folderName of fs.readdirSync(pluginsRootPath)) {
if (!folderName.startsWith(folderNamePrefix)) {
continue;
}
fs.removeSync(path.join(pluginsRootPath, folderName));
}
}
export async function getRawPluginSchemaList(
infoList: PluginInfo[]
): Promise<SchemaStub[]> {
const pluginsRoot = getAppPath('plugins');
const schemaStubs: SchemaStub[][] = [];
const folderSet = new Set(
infoList.map((info) => getPluginFolderNameFromInfo(info))
);
if (!fs.existsSync(pluginsRoot)) {
return [];
}
for (const pluginFolderName of fs.readdirSync(pluginsRoot)) {
if (!folderSet.has(pluginFolderName)) {
continue;
}
const pluginPath = path.join(pluginsRoot, pluginFolderName);
const schemasJs = path.resolve(path.join(pluginPath, 'schemas.js'));
if (!fs.existsSync(schemasJs)) {
continue;
}
const {
default: { default: schemas },
} = (await import(schemasJs)) as {
default: { default: unknown };
};
if (!isSchemaStubList(schemas)) {
continue;
}
schemaStubs.push(schemas);
}
return schemaStubs.flat();
}
function isSchemaStubList(schemas: unknown): schemas is SchemaStub[] {
if (!Array.isArray(schemas)) {
return false;
}
return schemas.every(
(sch) =>
typeof sch === 'object' && typeof (sch as SchemaStub)?.name === 'string'
);
}

View File

@ -1,10 +1,6 @@
import BetterSQLite3 from 'better-sqlite3';
import fs from 'fs-extra';
import { DatabaseError } from 'fyo/utils/errors';
import {
getRawPluginSchemaList,
unzipPluginsIfDoesNotExist,
} from 'main/helpers';
import path from 'path';
import { SchemaStub } from 'schemas/types';
import { DatabaseDemuxBase, DatabaseMethod } from 'utils/db/types';
@ -15,6 +11,12 @@ import { databaseMethodSet, unlinkIfExists } from '../helpers';
import patches from '../patches';
import { BespokeQueries } from './bespoke';
import DatabaseCore from './core';
import {
executeFirstMigration,
getPluginInfoList,
getRawPluginSchemaList,
unzipPluginsIfDoesNotExist,
} from './helpers';
import { runPatches } from './runPatch';
import { BespokeFunction, Patch } from './types';
@ -40,12 +42,10 @@ export class DatabaseManager extends DatabaseDemuxBase {
}
async connectToDatabase(dbPath: string, countryCode?: string) {
countryCode = await this._connect(dbPath, countryCode);
await this.#migrate();
return countryCode;
return await this.#connect(dbPath, countryCode);
}
async _connect(dbPath: string, countryCode?: string) {
async #connect(dbPath: string, countryCode?: string) {
countryCode ??= await DatabaseCore.getCountryCode(dbPath);
this.db = new DatabaseCore(dbPath);
await this.db.connect();
@ -53,25 +53,16 @@ export class DatabaseManager extends DatabaseDemuxBase {
throw new DatabaseError('Database not connected');
}
await unzipPluginsIfDoesNotExist(this.db.knex);
this.rawPluginSchemaList = await getRawPluginSchemaList();
await executeFirstMigration(this.db, countryCode);
const infoList = await getPluginInfoList(this.db.knex);
await unzipPluginsIfDoesNotExist(this.db.knex, infoList);
this.rawPluginSchemaList = await getRawPluginSchemaList(infoList);
const schemaMap = getSchemas(countryCode, this.rawPluginSchemaList);
this.db.setSchemaMap(schemaMap);
return countryCode;
}
async #migrate(): Promise<void> {
if (!this.#isInitialized) {
return;
}
const isFirstRun = await this.#getIsFirstRun();
if (isFirstRun) {
await this.db!.migrate();
}
await this.#executeMigration();
return countryCode;
}
async #executeMigration() {
@ -169,19 +160,6 @@ export class DatabaseManager extends DatabaseDemuxBase {
return await queryFunction(this.db!, ...args);
}
async #getIsFirstRun(): Promise<boolean> {
const knex = this.db?.knex;
if (!knex) {
return true;
}
const query = await knex('sqlite_master').where({
type: 'table',
name: 'PatchRun',
});
return !query.length;
}
async #createBackup() {
const { dbPath } = this.db ?? {};
if (!dbPath || process.env.IS_TEST) {
@ -194,7 +172,12 @@ export class DatabaseManager extends DatabaseDemuxBase {
}
const db = this.getDriver();
await db?.backup(backupPath).then(() => db.close());
if (!db) {
return;
}
await db.backup(backupPath);
db.close();
}
async #getBackupFilePath() {

View File

@ -2,6 +2,7 @@ import { constants } from 'fs';
import fs from 'fs/promises';
import { DatabaseMethod } from 'utils/db/types';
import { CUSTOM_EVENTS } from 'utils/messages';
import { PluginInfo } from 'utils/types';
import { KnexColumnType } from './database/types';
export const sqliteTypeMap: Record<string, KnexColumnType> = {
@ -86,3 +87,15 @@ export async function unlinkIfExists(filePath: unknown) {
return false;
}
export function getPluginFolderNameFromInfo(
{ name, version }: PluginInfo,
noVersion = false
) {
const folderPrefix = name.replaceAll(' ', '');
if (noVersion) {
return folderPrefix;
}
return `${folderPrefix}-${version}`;
}

View File

@ -3,16 +3,9 @@ import addUOMs from './addUOMs';
import createInventoryNumberSeries from './createInventoryNumberSeries';
import fixRoundOffAccount from './fixRoundOffAccount';
import testPatch from './testPatch';
import updateSchemas from './updateSchemas';
export default [
{ name: 'testPatch', version: '0.5.0-beta.0', patch: testPatch },
{
name: 'updateSchemas',
version: '0.5.0-beta.0',
patch: updateSchemas,
priority: 100,
},
{
name: 'addUOMs',
version: '0.6.0-beta.0',

View File

@ -1,406 +0,0 @@
import fs from 'fs/promises';
import { RawValueMap } from 'fyo/core/types';
import { Knex } from 'knex';
import path from 'path';
import { changeKeys, deleteKeys, getIsNullOrUndef, invertMap } from 'utils';
import { getCountryCodeFromCountry } from 'utils/misc';
import { Version } from 'utils/version';
import { ModelNameEnum } from '../../models/types';
import { FieldTypeEnum, Schema, SchemaMap } from '../../schemas/types';
import { DatabaseManager } from '../database/manager';
const ignoreColumns = ['keywords'];
const columnMap = { creation: 'created', owner: 'createdBy' };
const childTableColumnMap = {
parenttype: 'parentSchemaName',
parentfield: 'parentFieldname',
};
const defaultNumberSeriesMap = {
[ModelNameEnum.Payment]: 'PAY-',
[ModelNameEnum.JournalEntry]: 'JV-',
[ModelNameEnum.SalesInvoice]: 'SINV-',
[ModelNameEnum.PurchaseInvoice]: 'PINV-',
} as Record<ModelNameEnum, string>;
async function execute(dm: DatabaseManager) {
if (dm.db?.dbPath === ':memory:') {
return;
}
const sourceKnex = dm.db!.knex!;
const version = (
(await sourceKnex('SingleValue')
.select('value')
.where({ fieldname: 'version' })) as { value: string }[]
)?.[0]?.value;
/**
* Versions after this should have the new schemas
*/
if (version && Version.gt(version, '0.4.3-beta.0')) {
return;
}
/**
* Initialize a different db to copy all the updated
* data into.
*/
const countryCode = await getCountryCode(sourceKnex);
const destDm = await getDestinationDM(dm.db!.dbPath, countryCode);
/**
* Copy data from all the relevant tables
* the other tables will be empty cause unused.
*/
try {
await copyData(sourceKnex, destDm);
} catch (err) {
const destPath = destDm.db!.dbPath;
await destDm.db!.close();
await fs.unlink(destPath);
throw err;
}
/**
* Version will update when migration completes, this
* is set to prevent this patch from running again.
*/
await destDm.db!.update(ModelNameEnum.SystemSettings, {
version: '0.5.0-beta.0',
});
/**
* Replace the database with the new one.
*/
await replaceDatabaseCore(dm, destDm);
}
async function replaceDatabaseCore(
dm: DatabaseManager,
destDm: DatabaseManager
) {
const newDbPath = destDm.db!.dbPath; // new db with new schema
const oldDbPath = dm.db!.dbPath; // old db to be replaced
await dm.db!.close();
await destDm.db!.close();
await fs.unlink(oldDbPath);
await fs.rename(newDbPath, oldDbPath);
await dm._connect(oldDbPath);
}
async function copyData(sourceKnex: Knex, destDm: DatabaseManager) {
const destKnex = destDm.db!.knex!;
const schemaMap = destDm.getSchemaMap();
await destKnex.raw('PRAGMA foreign_keys=OFF');
await copySingleValues(sourceKnex, destKnex, schemaMap);
await copyParty(sourceKnex, destKnex, schemaMap[ModelNameEnum.Party]!);
await copyItem(sourceKnex, destKnex, schemaMap[ModelNameEnum.Item]!);
await copyChildTables(sourceKnex, destKnex, schemaMap);
await copyOtherTables(sourceKnex, destKnex, schemaMap);
await copyTransactionalTables(sourceKnex, destKnex, schemaMap);
await copyLedgerEntries(
sourceKnex,
destKnex,
schemaMap[ModelNameEnum.AccountingLedgerEntry]!
);
await copyNumberSeries(
sourceKnex,
destKnex,
schemaMap[ModelNameEnum.NumberSeries]!
);
await destKnex.raw('PRAGMA foreign_keys=ON');
}
async function copyNumberSeries(
sourceKnex: Knex,
destKnex: Knex,
schema: Schema
) {
const values = (await sourceKnex(
ModelNameEnum.NumberSeries
)) as RawValueMap[];
const refMap = invertMap(defaultNumberSeriesMap);
for (const value of values) {
if (value.referenceType) {
continue;
}
const name = value.name as string;
const referenceType = refMap[name];
if (!referenceType) {
delete value.name;
continue;
}
const indices = (await sourceKnex.raw(
`
select cast(substr(name, ??) as int) as idx
from ??
order by idx desc
limit 1`,
[name.length + 1, referenceType]
)) as { idx: number }[];
value.start = 1001;
value.current = indices[0]?.idx ?? value.current ?? value.start;
value.referenceType = referenceType;
}
await copyValues(
destKnex,
ModelNameEnum.NumberSeries,
values.filter((v) => v.name),
[],
{},
schema
);
}
async function copyLedgerEntries(
sourceKnex: Knex,
destKnex: Knex,
schema: Schema
) {
const values = (await sourceKnex(
ModelNameEnum.AccountingLedgerEntry
)) as RawValueMap[];
await copyValues(
destKnex,
ModelNameEnum.AccountingLedgerEntry,
values,
['description', 'againstAccount', 'balance'],
{},
schema
);
}
async function copyOtherTables(
sourceKnex: Knex,
destKnex: Knex,
schemaMap: SchemaMap
) {
const schemaNames = [
ModelNameEnum.Account,
ModelNameEnum.Currency,
ModelNameEnum.Address,
ModelNameEnum.Color,
ModelNameEnum.Tax,
ModelNameEnum.PatchRun,
];
for (const sn of schemaNames) {
const values = (await sourceKnex(sn)) as RawValueMap[];
await copyValues(destKnex, sn, values, [], {}, schemaMap[sn]);
}
}
async function copyTransactionalTables(
sourceKnex: Knex,
destKnex: Knex,
schemaMap: SchemaMap
) {
const schemaNames = [
ModelNameEnum.JournalEntry,
ModelNameEnum.Payment,
ModelNameEnum.SalesInvoice,
ModelNameEnum.PurchaseInvoice,
];
for (const sn of schemaNames) {
const values = (await sourceKnex(sn)) as RawValueMap[];
values.forEach((v) => {
if (!v.submitted) {
v.submitted = 0;
}
if (!v.cancelled) {
v.cancelled = 0;
}
if (!v.numberSeries) {
v.numberSeries = defaultNumberSeriesMap[sn];
}
if (v.customer) {
v.party = v.customer;
}
if (v.supplier) {
v.party = v.supplier;
}
});
await copyValues(
destKnex,
sn,
values,
[],
childTableColumnMap,
schemaMap[sn]
);
}
}
async function copyChildTables(
sourceKnex: Knex,
destKnex: Knex,
schemaMap: SchemaMap
) {
const childSchemaNames = Object.keys(schemaMap).filter(
(sn) => schemaMap[sn]?.isChild
);
for (const sn of childSchemaNames) {
const values = (await sourceKnex(sn)) as RawValueMap[];
await copyValues(
destKnex,
sn,
values,
[],
childTableColumnMap,
schemaMap[sn]
);
}
}
async function copyItem(sourceKnex: Knex, destKnex: Knex, schema: Schema) {
const values = (await sourceKnex(ModelNameEnum.Item)) as RawValueMap[];
values.forEach((value) => {
value.for = 'Both';
});
await copyValues(destKnex, ModelNameEnum.Item, values, [], {}, schema);
}
async function copyParty(sourceKnex: Knex, destKnex: Knex, schema: Schema) {
const values = (await sourceKnex(ModelNameEnum.Party)) as RawValueMap[];
values.forEach((value) => {
// customer will be mapped onto role
if (Number(value.supplier) === 1) {
value.customer = 'Supplier';
} else {
value.customer = 'Customer';
}
});
await copyValues(
destKnex,
ModelNameEnum.Party,
values,
['supplier', 'addressDisplay'],
{ customer: 'role' },
schema
);
}
async function copySingleValues(
sourceKnex: Knex,
destKnex: Knex,
schemaMap: SchemaMap
) {
const singleSchemaNames = Object.keys(schemaMap).filter(
(k) => schemaMap[k]?.isSingle
);
const singleValues = (await sourceKnex(ModelNameEnum.SingleValue).whereIn(
'parent',
singleSchemaNames
)) as RawValueMap[];
await copyValues(destKnex, ModelNameEnum.SingleValue, singleValues);
}
async function copyValues(
destKnex: Knex,
destTableName: string,
values: RawValueMap[],
keysToDelete: string[] = [],
keyMap: Record<string, string> = {},
schema?: Schema
) {
keysToDelete = [...keysToDelete, ...ignoreColumns];
keyMap = { ...keyMap, ...columnMap };
values = values.map((sv) => deleteKeys(sv, keysToDelete));
values = values.map((sv) => changeKeys(sv, keyMap));
if (schema) {
values.forEach((v) => notNullify(v, schema));
}
if (schema) {
const newKeys = schema?.fields.map((f) => f.fieldname);
values.forEach((v) => deleteOldKeys(v, newKeys));
}
await destKnex.batchInsert(destTableName, values, 100);
}
async function getDestinationDM(sourceDbPath: string, countryCode: string) {
/**
* This is where all the stuff from the old db will be copied.
* That won't be altered cause schema update will cause data loss.
*/
const dir = path.parse(sourceDbPath).dir;
const dbPath = path.join(dir, '__update_schemas_temp.db');
const dm = new DatabaseManager();
await dm._connect(dbPath, countryCode);
await dm.db!.migrate();
await dm.db!.truncate();
return dm;
}
async function getCountryCode(knex: Knex) {
/**
* Need to account for schema changes, in 0.4.3-beta.0
*/
const country = (
(await knex('SingleValue')
.select('value')
.where({ fieldname: 'country' })) as { value: string }[]
)?.[0]?.value;
if (!country) {
return '';
}
return getCountryCodeFromCountry(country);
}
function notNullify(map: RawValueMap, schema: Schema) {
for (const field of schema.fields) {
if (!field.required || !getIsNullOrUndef(map[field.fieldname])) {
continue;
}
switch (field.fieldtype) {
case FieldTypeEnum.Float:
case FieldTypeEnum.Int:
case FieldTypeEnum.Check:
map[field.fieldname] = 0;
break;
case FieldTypeEnum.Currency:
map[field.fieldname] = '0.00000000000';
break;
case FieldTypeEnum.Table:
continue;
default:
map[field.fieldname] = '';
}
}
}
function deleteOldKeys(map: RawValueMap, newKeys: string[]) {
for (const key of Object.keys(map)) {
if (newKeys.includes(key)) {
continue;
}
delete map[key];
}
}
export default { execute, beforeMigrate: true };

View File

@ -3,10 +3,8 @@ import { app } from 'electron';
import { constants } from 'fs';
import fs from 'fs-extra';
import { ConfigFile } from 'fyo/core/types';
import type { Knex } from 'knex';
import { Main } from 'main';
import path from 'path';
import type { SchemaStub } from 'schemas/types';
import config from 'utils/config';
import { BackendResponse } from 'utils/ipc/types';
import { IPC_CHANNELS } from 'utils/messages';
@ -110,22 +108,12 @@ export function getInfoJsonFromZip(filePath: string) {
return JSON.parse(data.toString('utf-8')) as PluginInfo;
}
export function getPluginFolderNameFromInfo(
{ name, version }: PluginInfo,
noVersion = false
) {
const folderPrefix = name.replaceAll(' ', '');
if (noVersion) {
return folderPrefix;
}
return `${folderPrefix}-${version}`;
}
export function getAppPath(type: 'root' | 'backups' | 'plugins' = 'root') {
let root = app.getPath('documents');
let root: string;
if (process.env.NODE_ENV === 'development') {
root = 'dbs';
} else {
root = app.getPath('documents');
}
if (type === 'root') {
@ -134,69 +122,3 @@ export function getAppPath(type: 'root' | 'backups' | 'plugins' = 'root') {
return path.join(root, 'Frappe Books', type);
}
export async function unzipPluginsIfDoesNotExist(knex: Knex): Promise<void> {
const plugins = (await knex('Plugin').select(['name', 'info'])) as {
name: string;
info: string;
}[];
for (const { name, info: infoString } of plugins) {
const pluginsRootPath = getAppPath('plugins');
const info = JSON.parse(infoString) as PluginInfo;
const folderName = getPluginFolderNameFromInfo(info);
const pluginPath = path.join(pluginsRootPath, folderName);
if (fs.existsSync(pluginPath)) {
continue;
}
deletePluginFolder(info);
fs.ensureDirSync(pluginPath);
const data = (await knex('Plugin').select('data').where({ name })) as {
data: string;
}[];
const pluginZipBase64 = data[0].data;
const zipBuffer = Buffer.from(pluginZipBase64, 'base64');
const pluginFilePath = path.join(pluginPath, `${folderName}.books_plugin`);
fs.writeFileSync(pluginFilePath, zipBuffer);
const zip = new AdmZip(pluginFilePath);
zip.extractAllTo(pluginPath);
}
}
function deletePluginFolder(info: PluginInfo) {
const pluginsRootPath = getAppPath('plugins');
const folderNamePrefix = getPluginFolderNameFromInfo(info, true) + '-';
for (const folderName of fs.readdirSync(pluginsRootPath)) {
if (!folderName.startsWith(folderNamePrefix)) {
continue;
}
fs.removeSync(path.join(pluginsRootPath, folderName));
}
}
export async function getRawPluginSchemaList(): Promise<SchemaStub[]> {
const pluginsRoot = getAppPath('plugins');
const schemaStubs: SchemaStub[][] = [];
for (const pluginFolderName of fs.readdirSync(pluginsRoot)) {
const pluginPath = path.join(pluginsRoot, pluginFolderName);
const schemasJs = path.resolve(path.join(pluginPath, 'schemas.js'));
if (!fs.existsSync(schemasJs)) {
continue;
}
const {
default: { default: schema },
} = (await import(schemasJs)) as {
default: { default: SchemaStub[] };
};
schemaStubs.push(schema);
}
return schemaStubs.flat();
}

View File

@ -9,10 +9,14 @@ import {
import { autoUpdater } from 'electron-updater';
import { constants } from 'fs';
import fs from 'fs-extra';
import { ValueError } from 'fyo/utils/errors';
import path from 'path';
import { SelectFileOptions, SelectFileReturn } from 'utils/types';
import databaseManager from '../backend/database/manager';
import { emitMainProcessError } from '../backend/helpers';
import {
emitMainProcessError,
getPluginFolderNameFromInfo,
} from '../backend/helpers';
import { Main } from '../main';
import { DatabaseMethod } from '../utils/db/types';
import { IPC_ACTIONS } from '../utils/messages';
@ -24,13 +28,11 @@ import {
getConfigFilesWithModified,
getErrorHandledReponse,
getInfoJsonFromZip,
getPluginFolderNameFromInfo,
isNetworkError,
setAndGetCleanedConfigFiles,
unzipFile,
} from './helpers';
import { saveHtmlAsPdf } from './saveHtmlAsPdf';
import { ValueError } from 'fyo/utils/errors';
export default function registerIpcMainActionListeners(main: Main) {
ipcMain.handle(IPC_ACTIONS.CHECK_DB_ACCESS, async (_, filePath: string) => {