2
0
mirror of https://github.com/iconify/iconify.git synced 2025-01-05 15:02:09 +00:00

Create script for building ES and CommonJS libraries, move Utils and Core to new build process, move Redundancy package to monorepo and refactor it

This commit is contained in:
Vjacheslav Trushkin 2021-09-20 12:59:16 +03:00
parent 1c8273b977
commit 027a317e4e
72 changed files with 21299 additions and 1173 deletions

View File

@ -0,0 +1,13 @@
root = true
[*]
end_of_line = lf
insert_final_newline = true
indent_style = tab
indent_size = 4
charset = utf-8
trim_trailing_whitespace = true
[{*.json,*.yml}]
indent_style = space
indent_size = 2

View File

@ -0,0 +1 @@
lib

View File

@ -0,0 +1,23 @@
module.exports = {
env: {
browser: true,
es6: true,
node: true,
mocha: true,
},
extends: ['eslint:recommended', 'plugin:@typescript-eslint/recommended'],
globals: {
Atomics: 'readonly',
SharedArrayBuffer: 'readonly',
},
parser: '@typescript-eslint/parser',
parserOptions: {
ecmaVersion: 2018,
sourceType: 'module',
project: __dirname + '/tsconfig.json',
},
plugins: ['@typescript-eslint'],
rules: {
'no-mixed-spaces-and-tabs': ['off'],
},
};

8
packages/api-redundancy/.gitignore vendored Normal file
View File

@ -0,0 +1,8 @@
.idea
.vscode
.DS_Store
*.map
node_modules
lib
dist
tests-compiled

View File

@ -0,0 +1,7 @@
.idea
.vscode
.DS_Store
node_modules
src
tests
tests-compiled

View File

@ -0,0 +1,7 @@
{
"trailingComma": "es5",
"singleQuote": true,
"useTabs": true,
"semi": true,
"quoteProps": "consistent"
}

View File

@ -0,0 +1,21 @@
# Redundancy
This package provides redundancy management for scripts that use API. It detects API connection timeouts and re-sends queries to backup API host(s).
It was designed to be used with Iconify version 2.
## Usage
TODO
Due to time constraints, documentation is not available.
See `@iconify/iconify` version 2 and `@iconify/icon-finder` source code for usage examples.
## License
This package is dual-licensed under Apache 2.0 and GPL 2.0 license. You may select, at your option, one of the above-listed licenses.
`SPDX-License-Identifier: Apache-2.0 OR GPL-2.0`
© 2020 Vjacheslav Trushkin

View File

@ -0,0 +1,7 @@
/** @type {import('ts-jest/dist/types').InitialOptionsTsJest} */
module.exports = {
verbose: true,
preset: 'ts-jest',
testEnvironment: 'node',
testMatch: ['**/tests/*-test.ts'],
};

View File

@ -0,0 +1,280 @@
GNU GENERAL PUBLIC LICENSE
Version 2, June 1991
Copyright (C) 1989, 1991 Free Software Foundation, Inc.
675 Mass Ave, Cambridge, MA 02139, USA
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
Preamble
The licenses for most software are designed to take away your
freedom to share and change it. By contrast, the GNU General Public
License is intended to guarantee your freedom to share and change free
software--to make sure the software is free for all its users. This
General Public License applies to most of the Free Software
Foundation's software and to any other program whose authors commit to
using it. (Some other Free Software Foundation software is covered by
the GNU Library General Public License instead.) You can apply it to
your programs, too.
When we speak of free software, we are referring to freedom, not
price. Our General Public Licenses are designed to make sure that you
have the freedom to distribute copies of free software (and charge for
this service if you wish), that you receive source code or can get it
if you want it, that you can change the software or use pieces of it
in new free programs; and that you know you can do these things.
To protect your rights, we need to make restrictions that forbid
anyone to deny you these rights or to ask you to surrender the rights.
These restrictions translate to certain responsibilities for you if you
distribute copies of the software, or if you modify it.
For example, if you distribute copies of such a program, whether
gratis or for a fee, you must give the recipients all the rights that
you have. You must make sure that they, too, receive or can get the
source code. And you must show them these terms so they know their
rights.
We protect your rights with two steps: (1) copyright the software, and
(2) offer you this license which gives you legal permission to copy,
distribute and/or modify the software.
Also, for each author's protection and ours, we want to make certain
that everyone understands that there is no warranty for this free
software. If the software is modified by someone else and passed on, we
want its recipients to know that what they have is not the original, so
that any problems introduced by others will not reflect on the original
authors' reputations.
Finally, any free program is threatened constantly by software
patents. We wish to avoid the danger that redistributors of a free
program will individually obtain patent licenses, in effect making the
program proprietary. To prevent this, we have made it clear that any
patent must be licensed for everyone's free use or not licensed at all.
The precise terms and conditions for copying, distribution and
modification follow.
GNU GENERAL PUBLIC LICENSE
TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
0. This License applies to any program or other work which contains
a notice placed by the copyright holder saying it may be distributed
under the terms of this General Public License. The "Program", below,
refers to any such program or work, and a "work based on the Program"
means either the Program or any derivative work under copyright law:
that is to say, a work containing the Program or a portion of it,
either verbatim or with modifications and/or translated into another
language. (Hereinafter, translation is included without limitation in
the term "modification".) Each licensee is addressed as "you".
Activities other than copying, distribution and modification are not
covered by this License; they are outside its scope. The act of
running the Program is not restricted, and the output from the Program
is covered only if its contents constitute a work based on the
Program (independent of having been made by running the Program).
Whether that is true depends on what the Program does.
1. You may copy and distribute verbatim copies of the Program's
source code as you receive it, in any medium, provided that you
conspicuously and appropriately publish on each copy an appropriate
copyright notice and disclaimer of warranty; keep intact all the
notices that refer to this License and to the absence of any warranty;
and give any other recipients of the Program a copy of this License
along with the Program.
You may charge a fee for the physical act of transferring a copy, and
you may at your option offer warranty protection in exchange for a fee.
2. You may modify your copy or copies of the Program or any portion
of it, thus forming a work based on the Program, and copy and
distribute such modifications or work under the terms of Section 1
above, provided that you also meet all of these conditions:
a) You must cause the modified files to carry prominent notices
stating that you changed the files and the date of any change.
b) You must cause any work that you distribute or publish, that in
whole or in part contains or is derived from the Program or any
part thereof, to be licensed as a whole at no charge to all third
parties under the terms of this License.
c) If the modified program normally reads commands interactively
when run, you must cause it, when started running for such
interactive use in the most ordinary way, to print or display an
announcement including an appropriate copyright notice and a
notice that there is no warranty (or else, saying that you provide
a warranty) and that users may redistribute the program under
these conditions, and telling the user how to view a copy of this
License. (Exception: if the Program itself is interactive but
does not normally print such an announcement, your work based on
the Program is not required to print an announcement.)
These requirements apply to the modified work as a whole. If
identifiable sections of that work are not derived from the Program,
and can be reasonably considered independent and separate works in
themselves, then this License, and its terms, do not apply to those
sections when you distribute them as separate works. But when you
distribute the same sections as part of a whole which is a work based
on the Program, the distribution of the whole must be on the terms of
this License, whose permissions for other licensees extend to the
entire whole, and thus to each and every part regardless of who wrote it.
Thus, it is not the intent of this section to claim rights or contest
your rights to work written entirely by you; rather, the intent is to
exercise the right to control the distribution of derivative or
collective works based on the Program.
In addition, mere aggregation of another work not based on the Program
with the Program (or with a work based on the Program) on a volume of
a storage or distribution medium does not bring the other work under
the scope of this License.
3. You may copy and distribute the Program (or a work based on it,
under Section 2) in object code or executable form under the terms of
Sections 1 and 2 above provided that you also do one of the following:
a) Accompany it with the complete corresponding machine-readable
source code, which must be distributed under the terms of Sections
1 and 2 above on a medium customarily used for software interchange; or,
b) Accompany it with a written offer, valid for at least three
years, to give any third party, for a charge no more than your
cost of physically performing source distribution, a complete
machine-readable copy of the corresponding source code, to be
distributed under the terms of Sections 1 and 2 above on a medium
customarily used for software interchange; or,
c) Accompany it with the information you received as to the offer
to distribute corresponding source code. (This alternative is
allowed only for noncommercial distribution and only if you
received the program in object code or executable form with such
an offer, in accord with Subsection b above.)
The source code for a work means the preferred form of the work for
making modifications to it. For an executable work, complete source
code means all the source code for all modules it contains, plus any
associated interface definition files, plus the scripts used to
control compilation and installation of the executable. However, as a
special exception, the source code distributed need not include
anything that is normally distributed (in either source or binary
form) with the major components (compiler, kernel, and so on) of the
operating system on which the executable runs, unless that component
itself accompanies the executable.
If distribution of executable or object code is made by offering
access to copy from a designated place, then offering equivalent
access to copy the source code from the same place counts as
distribution of the source code, even though third parties are not
compelled to copy the source along with the object code.
4. You may not copy, modify, sublicense, or distribute the Program
except as expressly provided under this License. Any attempt
otherwise to copy, modify, sublicense or distribute the Program is
void, and will automatically terminate your rights under this License.
However, parties who have received copies, or rights, from you under
this License will not have their licenses terminated so long as such
parties remain in full compliance.
5. You are not required to accept this License, since you have not
signed it. However, nothing else grants you permission to modify or
distribute the Program or its derivative works. These actions are
prohibited by law if you do not accept this License. Therefore, by
modifying or distributing the Program (or any work based on the
Program), you indicate your acceptance of this License to do so, and
all its terms and conditions for copying, distributing or modifying
the Program or works based on it.
6. Each time you redistribute the Program (or any work based on the
Program), the recipient automatically receives a license from the
original licensor to copy, distribute or modify the Program subject to
these terms and conditions. You may not impose any further
restrictions on the recipients' exercise of the rights granted herein.
You are not responsible for enforcing compliance by third parties to
this License.
7. If, as a consequence of a court judgment or allegation of patent
infringement or for any other reason (not limited to patent issues),
conditions are imposed on you (whether by court order, agreement or
otherwise) that contradict the conditions of this License, they do not
excuse you from the conditions of this License. If you cannot
distribute so as to satisfy simultaneously your obligations under this
License and any other pertinent obligations, then as a consequence you
may not distribute the Program at all. For example, if a patent
license would not permit royalty-free redistribution of the Program by
all those who receive copies directly or indirectly through you, then
the only way you could satisfy both it and this License would be to
refrain entirely from distribution of the Program.
If any portion of this section is held invalid or unenforceable under
any particular circumstance, the balance of the section is intended to
apply and the section as a whole is intended to apply in other
circumstances.
It is not the purpose of this section to induce you to infringe any
patents or other property right claims or to contest validity of any
such claims; this section has the sole purpose of protecting the
integrity of the free software distribution system, which is
implemented by public license practices. Many people have made
generous contributions to the wide range of software distributed
through that system in reliance on consistent application of that
system; it is up to the author/donor to decide if he or she is willing
to distribute software through any other system and a licensee cannot
impose that choice.
This section is intended to make thoroughly clear what is believed to
be a consequence of the rest of this License.
8. If the distribution and/or use of the Program is restricted in
certain countries either by patents or by copyrighted interfaces, the
original copyright holder who places the Program under this License
may add an explicit geographical distribution limitation excluding
those countries, so that distribution is permitted only in or among
countries not thus excluded. In such case, this License incorporates
the limitation as if written in the body of this License.
9. The Free Software Foundation may publish revised and/or new versions
of the General Public License from time to time. Such new versions will
be similar in spirit to the present version, but may differ in detail to
address new problems or concerns.
Each version is given a distinguishing version number. If the Program
specifies a version number of this License which applies to it and "any
later version", you have the option of following the terms and conditions
either of that version or of any later version published by the Free
Software Foundation. If the Program does not specify a version number of
this License, you may choose any version ever published by the Free Software
Foundation.
10. If you wish to incorporate parts of the Program into other free
programs whose distribution conditions are different, write to the author
to ask for permission. For software which is copyrighted by the Free
Software Foundation, write to the Free Software Foundation; we sometimes
make exceptions for this. Our decision will be guided by the two goals
of preserving the free status of all derivatives of our free software and
of promoting the sharing and reuse of software generally.
NO WARRANTY
11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY
FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN
OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES
PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED
OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS
TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE
PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING,
REPAIR OR CORRECTION.
12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR
REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES,
INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING
OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED
TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY
YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER
PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE
POSSIBILITY OF SUCH DAMAGES.
END OF TERMS AND CONDITIONS

View File

@ -0,0 +1,191 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
Copyright 2019, 2020 Vjacheslav Trushkin.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

9663
packages/api-redundancy/package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,34 @@
{
"name": "@iconify/api-redundancy",
"description": "Reusable redundancy library for API queries",
"version": "1.0.0",
"author": "Vjacheslav Trushkin",
"license": "(Apache-2.0 OR GPL-2.0)",
"main": "dist/index.js",
"module": "dist/index.mjs",
"types": "dist/index.d.ts",
"scripts": {
"lint": "npx eslint src/*.ts",
"prebuild": "npm run lint",
"build": "tsup src/index.ts --dts --format esm,cjs",
"test": "jest --runInBand"
},
"bugs": "https://github.com/iconify/iconify/issues",
"homepage": "https://iconify.design/",
"repository": {
"type": "git",
"url": "https://github.com/iconify/iconify.git",
"directory": "packages/api-redundancy"
},
"devDependencies": {
"@types/jest": "^27.0.1",
"@types/node": "^16.9.4",
"@typescript-eslint/eslint-plugin": "^4.31.1",
"@typescript-eslint/parser": "^4.31.1",
"cross-env": "^7.0.3",
"eslint": "^7.32.0",
"jest": "^27.2.0",
"ts-jest": "^27.0.5",
"tsup": "^5.1.0"
}
}

View File

@ -0,0 +1,49 @@
/**
* Callback for "timeout" configuration property.
* Returns number of milliseconds to wait before failing query, while there are pending resources.
*/
export interface TimeoutCallback {
(
startTime: number // Start time
): number;
}
/**
* Callback for "rotate" configuration property.
* Returns number of milliseconds to wait before trying next resource.
*/
export interface RotationTimeoutCallback {
(
queriesSent: number, // Number of queries sent, starts with 1 for timeout after first resource
startTime: number // Query start time
): number;
}
/**
* Resource to rotate (usually hostname or partial URL)
*/
export type RedundancyResource = string;
/**
* Configuration object
*/
export interface RedundancyConfig {
resources: RedundancyResource[]; // Resources to rotate
index: number; // Start index
timeout: number | TimeoutCallback; // Timeout for error (full timeout = timeout + resources.length * rotate)
rotate: number | RotationTimeoutCallback; // Timeout for one query
random: boolean; // True if order should be randomised
dataAfterTimeout: boolean; // True if data can be sent after timeout
}
/**
* Default RedundancyConfig for API calls
*/
export const defaultConfig: RedundancyConfig = {
resources: [],
index: 0,
timeout: 2000,
rotate: 750,
random: false,
dataAfterTimeout: false,
};

View File

@ -0,0 +1,152 @@
import type { RedundancyConfig } from './config';
import { defaultConfig } from './config';
import type {
GetQueryStatus,
QueryModuleCallback,
QueryDoneCallback,
} from './query';
import { sendQuery } from './query';
/**
* Export types from query.ts
*/
export { GetQueryStatus, QueryModuleCallback, QueryDoneCallback };
export type {
QueryAbortCallback,
QueryUpdateIndexCallback,
QueryStatus,
PendingQueryItem,
} from './query';
/**
* Export types from config.ts
*/
export type { RedundancyConfig, RedundancyResource } from './config';
/**
* Function to filter item
*/
export interface FilterCallback {
(item: GetQueryStatus): boolean;
}
/**
* Redundancy instance
*/
export interface Redundancy {
// Send query
query: (
payload: unknown,
queryCallback: QueryModuleCallback,
doneCallback?: QueryDoneCallback
) => GetQueryStatus;
// Find Query instance
find: (callback: FilterCallback) => GetQueryStatus | null;
// Set resource start index. Overrides configuration
setIndex: (index: number) => void;
// Get resource start index. Store it in configuration
getIndex: () => number;
// Remove aborted and completed queries
cleanup: () => void;
}
/**
* Set configuration
*/
function setConfig(config: Partial<RedundancyConfig>): RedundancyConfig {
if (
typeof config !== 'object' ||
typeof (config as RedundancyConfig).resources !== 'object' ||
!((config as RedundancyConfig).resources instanceof Array) ||
!(config as RedundancyConfig).resources.length
) {
throw new Error('Invalid Reduncancy configuration');
}
const newConfig = Object.create(null);
let key: keyof RedundancyConfig;
for (key in defaultConfig) {
if (config[key] !== void 0) {
newConfig[key] = config[key];
} else {
newConfig[key] = defaultConfig[key];
}
}
return newConfig;
}
/**
* Redundancy instance
*/
export function initRedundancy(cfg: Partial<RedundancyConfig>): Redundancy {
// Configuration
const config: RedundancyConfig = setConfig(cfg);
// List of queries
let queries: GetQueryStatus[] = [];
/**
* Remove aborted and completed queries
*/
function cleanup(): void {
queries = queries.filter((item) => item().status === 'pending');
}
/**
* Send query
*/
function query(
payload: unknown,
queryCallback: QueryModuleCallback,
doneCallback?: QueryDoneCallback
): GetQueryStatus {
const query = sendQuery(
config,
payload,
queryCallback,
(data, error) => {
// Remove query from list
cleanup();
// Call callback
if (doneCallback) {
doneCallback(data, error);
}
},
(newIndex) => {
// Update start index
config.index = newIndex;
}
);
queries.push(query);
return query;
}
/**
* Find instance
*/
function find(callback: FilterCallback): GetQueryStatus | null {
const result = queries.find((value) => {
return callback(value);
});
return result !== void 0 ? result : null;
}
// Create and return functions
const instance: Redundancy = {
query,
find,
setIndex: (index: number) => {
config.index = index;
},
getIndex: () => config.index,
cleanup,
};
return instance;
}

View File

@ -0,0 +1,362 @@
import type { RedundancyConfig, RedundancyResource } from './config';
// import type { Redundancy } from './redundancy';
/**
* Execution status
*/
type QueryItemStatus = 'pending' | 'completed' | 'aborted' | 'failed';
/**
* Custom payload
*/
type QueryPayload = unknown;
/**
* Callback
*
* If error is present, something went wrong and data is undefined. If error is undefined, data is set.
*/
export type QueryDoneCallback = (data?: unknown, error?: unknown) => void;
/**
* Callback for "abort" pending item.
*/
export type QueryAbortCallback = () => void;
/**
* Callback to call to update last successful resource index. Used by Resundancy class to automatically update config.
*/
export type QueryUpdateIndexCallback = (index: number) => void;
/**
* Status for query
*/
export interface QueryStatus {
readonly abort: () => void; // Function to call to abort everything
readonly subscribe: (
callback?: QueryDoneCallback,
overwrite?: boolean
) => void; // Add function to call when query is complete
readonly payload: QueryPayload; // Payload
status: QueryItemStatus; // Query status (global, not specific to one query)
startTime: number; // Time when function was called
queriesSent: number; // Number of queries sent
queriesPending: number; // Number of pending queries
}
/**
* Callback to track status
*/
export type GetQueryStatus = () => QueryStatus;
/**
* Item in pending items list
*/
export interface PendingQueryItem {
readonly getQueryStatus: GetQueryStatus;
status: QueryItemStatus; // Current query status
readonly resource: RedundancyResource; // Resource
readonly done: QueryDoneCallback; // Function to call with data
abort?: QueryAbortCallback; // Function to call to abort query, set by query handler
}
/**
* Function to send to item to send query
*/
export type QueryModuleCallback = (
resource: RedundancyResource,
payload: QueryPayload,
queryItem: PendingQueryItem
) => void;
/**
* Send query
*/
export function sendQuery(
config: RedundancyConfig,
payload: unknown,
query: QueryModuleCallback,
done?: QueryDoneCallback,
success?: QueryUpdateIndexCallback
): GetQueryStatus {
// Get number of resources
const resourcesCount = config.resources.length;
// Save start index
const startIndex = config.random
? Math.floor(Math.random() * resourcesCount)
: config.index;
// Get resources
let resources: RedundancyResource[];
if (config.random) {
// Randomise array
let list = config.resources.slice(0);
resources = [];
while (list.length > 1) {
const nextIndex = Math.floor(Math.random() * list.length);
resources.push(list[nextIndex]);
list = list.slice(0, nextIndex).concat(list.slice(nextIndex + 1));
}
resources = resources.concat(list);
} else {
// Rearrange resources to start with startIndex
resources = config.resources
.slice(startIndex)
.concat(config.resources.slice(0, startIndex));
}
// Counters, status
const startTime = Date.now();
let status: QueryItemStatus = 'pending';
let queriesSent = 0;
let lastError: unknown = void 0;
// Timer
let timer: ReturnType<typeof setTimeout> | null = null;
// Execution queue
let queue: PendingQueryItem[] = [];
// Callbacks to call when query is complete
let doneCallbacks: QueryDoneCallback[] = [];
if (typeof done === 'function') {
doneCallbacks.push(done);
}
/**
* Reset timer
*/
function resetTimer(): void {
if (timer) {
clearTimeout(timer);
timer = null;
}
}
/**
* Abort everything
*/
function abort(): void {
// Change status
if (status === 'pending') {
status = 'aborted';
}
// Reset timer
resetTimer();
// Abort all queued items
queue.forEach((item) => {
if (item.abort) {
item.abort();
}
if (item.status === 'pending') {
item.status = 'aborted';
}
});
queue = [];
}
/**
* Add / replace callback to call when execution is complete.
* This can be used to abort pending query implementations when query is complete or aborted.
*/
function subscribe(
callback?: QueryDoneCallback,
overwrite?: boolean
): void {
if (overwrite) {
doneCallbacks = [];
}
if (typeof callback === 'function') {
doneCallbacks.push(callback);
}
}
/**
* Get query status
*/
function getQueryStatus(): QueryStatus {
return {
startTime,
payload,
status,
queriesSent,
queriesPending: queue.length,
subscribe,
abort,
};
}
/**
* Fail query
*/
function failQuery(): void {
status = 'failed';
// Send notice to all callbacks
doneCallbacks.forEach((callback) => {
callback(void 0, lastError);
});
}
/**
* Clear queue
*/
function clearQueue(): void {
queue = queue.filter((item) => {
if (item.status === 'pending') {
item.status = 'aborted';
}
if (item.abort) {
item.abort();
}
return false;
});
}
/**
* Got response from module
*/
function moduleResponse(
item: PendingQueryItem,
data?: unknown,
error?: unknown
): void {
const isError = data === void 0;
// Remove item from queue
queue = queue.filter((queued) => queued !== item);
// Check status
switch (status) {
case 'pending':
// Pending
break;
case 'failed':
if (isError || !config.dataAfterTimeout) {
// Query has already timed out or dataAfterTimeout is disabled
return;
}
// Success after failure
break;
default:
// Aborted or completed
return;
}
// Error
if (isError) {
if (error !== void 0) {
lastError = error;
}
if (!queue.length) {
if (!resources.length) {
// Nothing else queued, nothing can be queued
failQuery();
} else {
// Queue is empty: run next item immediately
// eslint-disable-next-line @typescript-eslint/no-use-before-define
execNext();
}
}
return;
}
// Reset timers, abort pending queries
resetTimer();
clearQueue();
// Update index in Redundancy
if (success && !config.random) {
const index = config.resources.indexOf(item.resource);
if (index !== -1 && index !== config.index) {
success(index);
}
}
// Mark as completed and call callbacks
status = 'completed';
doneCallbacks.forEach((callback) => {
callback(data);
});
}
/**
* Execute next query
*/
function execNext(): void {
// Check status
if (status !== 'pending') {
return;
}
// Reset timer
resetTimer();
// Get resource
const resource = resources.shift();
if (resource === void 0) {
// Nothing to execute: wait for final timeout before failing
if (queue.length) {
const timeout: number =
typeof config.timeout === 'function'
? config.timeout(startTime)
: config.timeout;
if (timeout) {
// Last timeout before failing to allow late response
timer = setTimeout(() => {
resetTimer();
if (status === 'pending') {
// Clear queue
clearQueue();
failQuery();
}
}, timeout);
return;
}
}
// Fail
failQuery();
return;
}
// Create new item
const item: PendingQueryItem = {
getQueryStatus,
status: 'pending',
resource,
done: (data?: unknown, error?: unknown) => {
moduleResponse(item, data, error);
},
};
// Add to queue
queue.push(item);
// Bump next index
queriesSent++;
// Get timeout for next item
const timeout: number =
typeof config.rotate === 'function'
? config.rotate(queriesSent, startTime)
: config.rotate;
// Create timer
timer = setTimeout(execNext, timeout);
// Execute it
query(resource, payload, item);
}
// Execute first query on next tick
setTimeout(execNext);
// Return getQueryStatus()
return getQueryStatus;
}

View File

@ -0,0 +1,512 @@
import type { RedundancyConfig } from '../src/config';
import type { PendingQueryItem } from '../src/query';
import { sendQuery } from '../src/query';
describe('Advanced queries with multiple resources', () => {
it('Simple query, time out on first, success on second after third is called, ignore third (~70ms)', (done) => {
const payload = {};
const resources = ['api1', 'api2', 'api3'];
const result = {};
const config: RedundancyConfig = {
resources,
index: 0,
timeout: 200,
rotate: 50,
random: false,
dataAfterTimeout: false,
};
// Tracking
let isSync = true;
const startTime = Date.now();
let sentQuery = 0;
let itemAborted = false;
let secondItem: PendingQueryItem;
// Send query
const getStatus = sendQuery(
config,
payload,
(resource, queryPayload, queryItem) => {
expect(isSync).toEqual(false);
expect(queryPayload).toEqual(payload);
// Query should be executed 3 times
expect(sentQuery).toBeLessThan(3);
expect(resource).toEqual(resources[sentQuery]);
// Check status
expect(queryItem.getQueryStatus).toEqual(getStatus);
const status = getStatus();
expect(status.status).toEqual('pending');
expect(status.payload).toEqual(payload);
// Bump counter
sentQuery++;
// Tests specific to each query
switch (sentQuery) {
case 1:
// First query
expect(status.queriesSent).toEqual(1);
expect(status.queriesPending).toEqual(1);
// Add abort
queryItem.abort = (): void => {
done(
'Abort should have not been called for first item'
);
};
// Fail in 20ms
setTimeout(() => {
// Status should not have changed
const status = getStatus();
expect(status.queriesSent).toEqual(1);
expect(status.queriesPending).toEqual(1);
// Fail
queryItem.done(void 0, true);
}, 20);
return;
case 2:
// Only second query should be pending
expect(status.queriesSent).toEqual(2);
expect(status.queriesPending).toEqual(1);
// Add abort
queryItem.abort = (): void => {
done(
'Abort should have not been called for second item'
);
};
// Save item
secondItem = queryItem;
return;
case 3:
// 2nd and 3rd queries should be pending
expect(status.queriesSent).toEqual(3);
expect(status.queriesPending).toEqual(2);
// Add abort
queryItem.abort = (): void => {
// This item should be aborted, but only once
expect(itemAborted).toEqual(false);
expect(sentQuery).toEqual(3);
itemAborted = true;
};
// Complete second item
secondItem.done(result);
return;
default:
done('This code should not have been reached');
}
},
(data, error) => {
// Make sure queries were sent
expect(sentQuery).toEqual(3);
// Third query should have been aborted
expect(itemAborted).toEqual(true);
// Validate data
expect(data).toEqual(result);
expect(error).toBeUndefined();
// Check status
const status = getStatus();
expect(status.status).toEqual('completed');
expect(status.queriesSent).toEqual(3);
expect(status.queriesPending).toEqual(0);
// 20ms from first query failing, 50ms from delay between second and third
const diff = Date.now() - startTime;
expect(diff > 50 && diff < 90).toEqual(true);
done();
}
);
// Check status
const status = getStatus();
expect(status.status).toEqual('pending');
expect(status.queriesSent).toEqual(0);
expect(status.queriesPending).toEqual(0);
isSync = false;
});
it('Multiple delayed responses (~100ms)', (done) => {
const payload = {};
const resources = ['api1', 'api2'];
const result1 = {};
const result2 = {};
const config: RedundancyConfig = {
resources,
index: 0,
timeout: 200,
rotate: 50,
random: false,
dataAfterTimeout: false,
};
// Tracking
let isSync = true;
const startTime = Date.now();
let sentQuery = 0;
let itemAborted = false;
let firstItem: PendingQueryItem;
// Send query
const getStatus = sendQuery(
config,
payload,
(resource, queryPayload, queryItem) => {
expect(isSync).toEqual(false);
expect(queryPayload).toEqual(payload);
// Query should be executed 2 times
expect(sentQuery).toBeLessThan(2);
expect(resource).toEqual(resources[sentQuery]);
// Check status
expect(queryItem.getQueryStatus).toEqual(getStatus);
const status = getStatus();
expect(status.status).toEqual('pending');
expect(status.payload).toEqual(payload);
// Bump counter
sentQuery++;
// Tests specific to each query
switch (sentQuery) {
case 1:
// First query
expect(status.queriesSent).toEqual(1);
expect(status.queriesPending).toEqual(1);
// Add abort
queryItem.abort = (): void => {
done(
'Abort should have not been called for first item'
);
};
// Store item
firstItem = queryItem;
return;
case 2:
// Both queries should be pending
expect(status.queriesSent).toEqual(2);
expect(status.queriesPending).toEqual(2);
// Add abort
queryItem.abort = (): void => {
expect(itemAborted).toEqual(false);
itemAborted = true;
};
// Complete first item in 20ms (70ms from start), then second item
setTimeout(() => {
// Check status
const status = getStatus();
expect(status.status).toEqual('pending');
expect(status.queriesSent).toEqual(2);
expect(status.queriesPending).toEqual(2);
firstItem.done(result1);
// Complete second item in 30 ms
setTimeout(() => {
expect(queryItem.status).toEqual('aborted');
// Should not change anything because query is already complete
queryItem.done(result2);
// Finish test
done();
}, 30);
}, 20);
return;
default:
done('This code should not have been reached');
}
},
(data, error) => {
// Make sure queries were sent
expect(sentQuery).toEqual(2);
// Second query should have been aborted
expect(itemAborted).toEqual(true);
// Validate data
expect(data).toEqual(result1);
expect(error).toBeUndefined();
// Check status
const status = getStatus();
expect(status.status).toEqual('completed');
expect(status.queriesSent).toEqual(2);
expect(status.queriesPending).toEqual(0);
// 50ms delay between queries, 20ms delay by test timer
const diff = Date.now() - startTime;
expect(diff > 50 && diff < 90).toEqual(true);
// Do not finish: second item is still pending
}
);
// Check status
const status = getStatus();
expect(status.status).toEqual('pending');
expect(status.queriesSent).toEqual(0);
expect(status.queriesPending).toEqual(0);
isSync = false;
});
it('Ignored response after time out (~150ms)', (done) => {
const payload = {};
const resources = ['api1', 'api2'];
const result = {};
const config: RedundancyConfig = {
resources,
index: 0,
timeout: 100,
rotate: 25,
random: false,
dataAfterTimeout: false,
};
// Tracking
let isSync = true;
const startTime = Date.now();
let sentQuery = 0;
let firstItem: PendingQueryItem;
let completeCount = 0;
// Send query
const getStatus = sendQuery(
config,
payload,
(resource, queryPayload, queryItem) => {
expect(isSync).toEqual(false);
expect(queryPayload).toEqual(payload);
// Query should be executed 2 times
expect(sentQuery).toBeLessThan(2);
expect(resource).toEqual(resources[sentQuery]);
// Check status
expect(queryItem.getQueryStatus).toEqual(getStatus);
const status = getStatus();
expect(status.status).toEqual('pending');
expect(status.payload).toEqual(payload);
// Bump counter
sentQuery++;
// Tests specific to each query
switch (sentQuery) {
case 1:
// First query
expect(status.queriesSent).toEqual(1);
expect(status.queriesPending).toEqual(1);
// Store item
firstItem = queryItem;
return;
case 2:
// Both queries should be pending
expect(status.queriesSent).toEqual(2);
expect(status.queriesPending).toEqual(2);
return;
default:
done('This code should not have been reached');
}
},
(data, error) => {
// Make sure queries were sent
expect(sentQuery).toEqual(2);
// Bump couneter
completeCount++;
switch (completeCount) {
case 1:
// First call: time out
((): void => {
// Validate data
expect(data).toBeUndefined();
expect(error).toBeUndefined();
// Check status
const status = getStatus();
expect(status.status).toEqual('failed');
expect(status.queriesSent).toEqual(2);
expect(status.queriesPending).toEqual(0);
// 25ms delay between queries * 2 + 100ms timeout
const diff = Date.now() - startTime;
expect(diff > 130 && diff < 170).toEqual(true);
// Send data from first query, which should be ignored because dataAfterTimeout is false
firstItem.done(result);
// Complete test
done();
})();
return;
default:
done('Callback should have been called only once');
}
}
);
// Check status
const status = getStatus();
expect(status.status).toEqual('pending');
expect(status.queriesSent).toEqual(0);
expect(status.queriesPending).toEqual(0);
isSync = false;
});
it('Response after time out (~150ms)', (done) => {
const payload = {};
const resources = ['api1', 'api2'];
const result = {};
const config: RedundancyConfig = {
resources,
index: 0,
timeout: 100,
rotate: 25,
random: false,
dataAfterTimeout: true,
};
// Tracking
let isSync = true;
const startTime = Date.now();
let sentQuery = 0;
let firstItem: PendingQueryItem;
let completeCount = 0;
// Send query
const getStatus = sendQuery(
config,
payload,
(resource, queryPayload, queryItem) => {
expect(isSync).toEqual(false);
expect(queryPayload).toEqual(payload);
// Query should be executed 2 times
expect(sentQuery).toBeLessThan(2);
expect(resource).toEqual(resources[sentQuery]);
// Check status
expect(queryItem.getQueryStatus).toEqual(getStatus);
const status = getStatus();
expect(status.status).toEqual('pending');
expect(status.payload).toEqual(payload);
// Bump counter
sentQuery++;
// Tests specific to each query
switch (sentQuery) {
case 1:
// First query
expect(status.queriesSent).toEqual(1);
expect(status.queriesPending).toEqual(1);
// Store item
firstItem = queryItem;
return;
case 2:
// Both queries should be pending
expect(status.queriesSent).toEqual(2);
expect(status.queriesPending).toEqual(2);
return;
default:
done('This code should not have been reached');
}
},
(data, error) => {
// Make sure queries were sent
expect(sentQuery).toEqual(2);
// Bump couneter
completeCount++;
switch (completeCount) {
case 1:
// First call: time out
((): void => {
// Validate data
expect(data).toBeUndefined();
expect(error).toBeUndefined();
// Check status
const status = getStatus();
expect(status.status).toEqual('failed');
expect(status.queriesSent).toEqual(2);
expect(status.queriesPending).toEqual(0);
// 25ms delay between queries * 2 + 100ms timeout
const diff = Date.now() - startTime;
expect(diff > 130 && diff < 170).toEqual(true);
// Send data from first query
firstItem.done(result);
})();
return;
case 2:
// Second call: data
((): void => {
// Validate data
expect(data).toEqual(result);
expect(error).toBeUndefined();
// Check status
const status = getStatus();
expect(status.status).toEqual('completed');
expect(status.queriesSent).toEqual(2);
expect(status.queriesPending).toEqual(0);
// Same as few lines above
const diff = Date.now() - startTime;
expect(diff > 130 && diff < 170).toEqual(true);
// Done
done();
})();
return;
default:
done('Callback should have been called only twice');
}
}
);
// Check status
const status = getStatus();
expect(status.status).toEqual('pending');
expect(status.queriesSent).toEqual(0);
expect(status.queriesPending).toEqual(0);
isSync = false;
});
});

View File

@ -0,0 +1,275 @@
import type { RedundancyConfig } from '../src/config';
import { sendQuery } from '../src/query';
describe('Basic queries', () => {
test('Empty query', (done) => {
const payload = {};
const config: RedundancyConfig = {
resources: [],
index: 0,
timeout: 200,
rotate: 100,
random: false,
dataAfterTimeout: false,
};
// Tracking
let isSync = true;
const startTime = Date.now();
// Send query
const getStatus = sendQuery(
config,
payload,
() => {
done('Query should not be called when resources list is empty');
},
(data, error) => {
expect(isSync).toEqual(false);
expect(data).toBeUndefined();
expect(error).toBeUndefined();
// Check status
const status = getStatus();
expect(status.status).toEqual('failed');
expect(status.queriesSent).toEqual(0);
expect(status.queriesPending).toEqual(0);
// Should be almost instant: no items in queue
const diff = Date.now() - startTime;
expect(diff).toBeLessThan(50);
done();
}
);
// Check status
const status = getStatus();
expect(status.status).toEqual('pending');
expect(status.queriesSent).toEqual(0);
expect(status.queriesPending).toEqual(0);
isSync = false;
});
it('Simple query', (done) => {
const payload = {};
const resources = ['test1'];
const result = {};
const config: RedundancyConfig = {
resources,
index: 0,
timeout: 200,
rotate: 100,
random: false,
dataAfterTimeout: false,
};
// Tracking
let isSync = true;
const startTime = Date.now();
let sentQuery = false;
// Send query
const getStatus = sendQuery(
config,
payload,
(resource, queryPayload, queryItem) => {
expect(isSync).toEqual(false);
expect(resource).toEqual(resources[0]);
expect(queryPayload).toEqual(payload);
// Make sure query was executed only once
expect(sentQuery).toEqual(false);
sentQuery = true;
// Check status
expect(queryItem.getQueryStatus).toEqual(getStatus);
const status = getStatus();
expect(status.status).toEqual('pending');
expect(status.payload).toEqual(payload);
expect(status.queriesSent).toEqual(1);
expect(status.queriesPending).toEqual(1);
// Add abort function
queryItem.abort = (): void => {
done('Abort should have not been called');
};
// Complete
queryItem.done(result);
},
(data, error) => {
// Make sure query was sent
expect(sentQuery).toEqual(true);
// Validate data
expect(data).toEqual(result);
expect(error).toBeUndefined();
// Check status
const status = getStatus();
expect(status.status).toEqual('completed');
expect(status.queriesSent).toEqual(1);
expect(status.queriesPending).toEqual(0);
// Should be almost instant
const diff = Date.now() - startTime;
expect(diff).toBeLessThan(50);
done();
}
);
// Check status
const status = getStatus();
expect(status.status).toEqual('pending');
expect(status.queriesSent).toEqual(0);
expect(status.queriesPending).toEqual(0);
isSync = false;
});
it('Failing query', (done) => {
const payload = {};
const resources = ['api1'];
const result = {};
const config: RedundancyConfig = {
resources,
index: 0,
timeout: 200,
rotate: 100,
random: false,
dataAfterTimeout: false,
};
// Tracking
let isSync = true;
const startTime = Date.now();
let sentQuery = false;
// Send query
const getStatus = sendQuery(
config,
payload,
(resource, queryPayload, queryItem) => {
expect(isSync).toEqual(false);
expect(resource).toEqual(resources[0]);
expect(queryPayload).toEqual(payload);
// Make sure query was executed only once
expect(sentQuery).toEqual(false);
sentQuery = true;
// Add abort function
queryItem.abort = (): void => {
done('Abort should have not been called');
};
// Fail
queryItem.done(void 0, result);
},
(data, error) => {
// Make sure query was sent
expect(sentQuery).toEqual(true);
// Validate data
expect(data).toBeUndefined();
expect(error).toEqual(result);
// Check status
const status = getStatus();
expect(status.status).toEqual('failed');
expect(status.queriesSent).toEqual(1);
expect(status.queriesPending).toEqual(0);
// Should be almost instant
const diff = Date.now() - startTime;
expect(diff).toBeLessThan(40);
done();
}
);
// Check status
const status = getStatus();
expect(status.status).toEqual('pending');
expect(status.queriesSent).toEqual(0);
expect(status.queriesPending).toEqual(0);
isSync = false;
});
it('Timed out query (~300ms)', (done) => {
const payload = {};
const resources = ['api1'];
const config: RedundancyConfig = {
resources,
index: 0,
timeout: 200,
rotate: 100,
random: false,
dataAfterTimeout: false,
};
// Tracking
let isSync = true;
const startTime = Date.now();
let sentQuery = false;
let itemAborted = false;
// Send query
const getStatus = sendQuery(
config,
payload,
(resource, queryPayload, queryItem) => {
expect(isSync).toEqual(false);
expect(resource).toEqual(resources[0]);
expect(queryPayload).toEqual(payload);
// Make sure query was executed only once
expect(sentQuery).toEqual(false);
sentQuery = true;
// Add abort function
queryItem.abort = (): void => {
expect(itemAborted).toEqual(false);
itemAborted = true;
};
// Do not do anything
},
(data, error) => {
// Make sure query was sent
expect(sentQuery).toEqual(true);
// Validate data
expect(data).toBeUndefined();
expect(error).toBeUndefined();
// Check status
const status = getStatus();
expect(status.status).toEqual('failed');
expect(status.queriesSent).toEqual(1);
expect(status.queriesPending).toEqual(0);
// Item should have been aborted
expect(itemAborted).toEqual(true);
// Should have been config.rotate + config.timeout
const diff = Date.now() - startTime;
expect(diff).toBeGreaterThan(250);
done();
}
);
// Check status
const status = getStatus();
expect(status.status).toEqual('pending');
expect(status.queriesSent).toEqual(0);
expect(status.queriesPending).toEqual(0);
isSync = false;
});
});

View File

@ -0,0 +1,465 @@
import type { RedundancyConfig } from '../src/config';
import { sendQuery } from '../src/query';
describe('Multiple resources', () => {
it('Simple query, success on first attempt', (done) => {
const payload = {};
const resources = ['api1', 'api2'];
const result = {};
const config: RedundancyConfig = {
resources,
index: 0,
timeout: 200,
rotate: 100,
random: false,
dataAfterTimeout: false,
};
// Tracking
let isSync = true;
const startTime = Date.now();
let sentQuery = 0;
// Send query
const getStatus = sendQuery(
config,
payload,
(resource, queryPayload, queryItem) => {
expect(isSync).toEqual(false);
expect(resource).toEqual('api1');
expect(queryPayload).toEqual(payload);
// Query should be executed only once because it should finish before second attempt
expect(sentQuery).toEqual(0);
sentQuery++;
// Check status
expect(queryItem.getQueryStatus).toEqual(getStatus);
const status = getStatus();
expect(status.status).toEqual('pending');
expect(status.payload).toEqual(payload);
expect(status.queriesSent).toEqual(1);
expect(status.queriesPending).toEqual(1);
// Add abort function
queryItem.abort = (): void => {
done('Abort should have not been called');
};
// Complete
queryItem.done(result);
},
(data, error) => {
// Make sure query was sent
expect(sentQuery).toEqual(1);
// Validate data
expect(data).toEqual(result);
expect(error).toBeUndefined();
// Check status
const status = getStatus();
expect(status.status).toEqual('completed');
expect(status.queriesSent).toEqual(1);
expect(status.queriesPending).toEqual(0);
// Should be almost instant
const diff = Date.now() - startTime;
expect(diff).toBeLessThan(50);
done();
},
() => {
done('This should not have been called');
}
);
// Check status
const status = getStatus();
expect(status.status).toEqual('pending');
expect(status.queriesSent).toEqual(0);
expect(status.queriesPending).toEqual(0);
isSync = false;
});
it('Simple query, time out on first, success on second (~100ms)', (done) => {
const payload = {};
const resources = ['api1', 'api2'];
const result = {};
const config: RedundancyConfig = {
resources,
index: 0,
timeout: 200,
rotate: 100,
random: false,
dataAfterTimeout: false,
};
// Tracking
let isSync = true;
const startTime = Date.now();
let sentQuery = 0;
let itemAborted = false;
let parentUpdated = false;
// Send query
const getStatus = sendQuery(
config,
payload,
(resource, queryPayload, queryItem) => {
expect(isSync).toEqual(false);
expect(queryPayload).toEqual(payload);
// Query should be executed twice
expect(sentQuery).toBeLessThan(2);
expect(resource).toEqual(resources[sentQuery]);
// Check status
expect(queryItem.getQueryStatus).toEqual(getStatus);
const status = getStatus();
expect(status.status).toEqual('pending');
expect(status.payload).toEqual(payload);
// Bump counter
sentQuery++;
// All queries should be pending
expect(status.queriesSent).toEqual(sentQuery);
expect(status.queriesPending).toEqual(sentQuery);
// Add abort function
// Time out first, complete second
switch (sentQuery) {
case 1:
queryItem.abort = (): void => {
// First item should be aborted, but only once
expect(itemAborted).toEqual(false);
// When this is executed, counter should have been increased
expect(sentQuery).toEqual(2);
itemAborted = true;
// Do nothing, let it time out
};
return;
case 2:
queryItem.abort = (): void => {
done('Abort should have not been called');
};
// Send result
queryItem.done(result);
return;
default:
done('This code should not have been reached');
}
},
(data, error) => {
// Make sure queries were sent
expect(sentQuery).toEqual(2);
// First query should have been aborted
expect(itemAborted).toEqual(true);
// Validate data
expect(data).toEqual(result);
expect(error).toBeUndefined();
// Check status
const status = getStatus();
expect(status.status).toEqual('completed');
expect(status.queriesSent).toEqual(2);
expect(status.queriesPending).toEqual(0);
// Parent should have been updated
expect(parentUpdated).toEqual(true);
// Delay between first and second queries
const diff = Date.now() - startTime;
expect(diff).toBeGreaterThan(50);
expect(diff).toBeLessThan(150);
done();
},
(newIndex) => {
// Start index should be updated to 1
expect(newIndex).toEqual(1);
parentUpdated = true;
}
);
// Check status
const status = getStatus();
expect(status.status).toEqual('pending');
expect(status.queriesSent).toEqual(0);
expect(status.queriesPending).toEqual(0);
isSync = false;
});
it('Time out all queries (~100ms)', (done) => {
const payload = {};
const resources = ['api1', 'api2'];
const config: RedundancyConfig = {
resources,
index: 0,
timeout: 50,
rotate: 25,
random: false,
dataAfterTimeout: false,
};
// Tracking
let isSync = true;
const startTime = Date.now();
let sentQuery = 0;
let item1Aborted = false;
let item2Aborted = false;
// Send query
const getStatus = sendQuery(
config,
payload,
(resource, queryPayload, queryItem) => {
expect(isSync).toEqual(false);
expect(queryPayload).toEqual(payload);
// Query should be executed twice
expect(sentQuery).toBeLessThan(2);
expect(resource).toEqual(resources[sentQuery]);
// Check status
expect(queryItem.getQueryStatus).toEqual(getStatus);
const status = getStatus();
expect(status.status).toEqual('pending');
expect(status.payload).toEqual(payload);
// Bump counter
sentQuery++;
// All queries should be pending
expect(status.queriesSent).toEqual(sentQuery);
expect(status.queriesPending).toEqual(sentQuery);
// Add abort functions
switch (sentQuery) {
case 1:
queryItem.abort = (): void => {
expect(item1Aborted).toEqual(false);
expect(item2Aborted).toEqual(false);
// This should have been executed at the end
expect(sentQuery).toEqual(2);
item1Aborted = true;
// Do not send anything
};
return;
case 2:
queryItem.abort = (): void => {
expect(item1Aborted).toEqual(true);
expect(item2Aborted).toEqual(false);
// This should have been executed at the end
expect(sentQuery).toEqual(2);
item2Aborted = true;
// Do not send anything
};
return;
default:
done('This code should not have been reached');
}
},
(data, error) => {
// Make sure queries were sent
expect(sentQuery).toEqual(2);
// Queries should have been aborted
expect(item1Aborted).toEqual(true);
expect(item2Aborted).toEqual(true);
// Validate data
expect(data).toBeUndefined();
expect(error).toBeUndefined();
// Check status
const status = getStatus();
expect(status.status).toEqual('failed');
expect(status.queriesSent).toEqual(2);
expect(status.queriesPending).toEqual(0);
// rotate * 2 + timeout
const diff = Date.now() - startTime;
expect(diff).toBeGreaterThan(90);
expect(diff).toBeLessThan(120);
done();
},
() => {
done('This should have never been called');
}
);
// Check status
const status = getStatus();
expect(status.status).toEqual('pending');
expect(status.queriesSent).toEqual(0);
expect(status.queriesPending).toEqual(0);
isSync = false;
});
it('Start with second resource (~100ms)', (done) => {
const payload = {};
const resources = ['api1', 'api2'];
const config: RedundancyConfig = {
resources,
index: 1,
timeout: 50,
rotate: 25,
random: false,
dataAfterTimeout: false,
};
// Tracking
let isSync = true;
const startTime = Date.now();
let sentQuery = 0;
let item1Aborted = false;
let item2Aborted = false;
// Send query
const getStatus = sendQuery(
config,
payload,
(resource, queryPayload, queryItem) => {
expect(isSync).toEqual(false);
expect(queryPayload).toEqual(payload);
// Resource order should be: 1, 0
expect(resource).not.toEqual(resources[sentQuery]);
expect(resource).toEqual(resources[1 - sentQuery]);
// Bump counter
sentQuery++;
// Add abort functions
switch (sentQuery) {
case 1:
queryItem.abort = (): void => {
item1Aborted = true;
};
return;
case 2:
queryItem.abort = (): void => {
item2Aborted = true;
};
return;
default:
done('This code should not have been reached');
}
},
(data, error) => {
// Make sure queries were sent
expect(sentQuery).toEqual(2);
// Queries should have been aborted
expect(item1Aborted).toEqual(true);
expect(item2Aborted).toEqual(true);
// Validate data
expect(data).toBeUndefined();
expect(error).toBeUndefined();
// rotate * 2 + timeout
const diff = Date.now() - startTime;
expect(diff).toBeGreaterThan(90);
expect(diff).toBeLessThan(120);
done();
},
() => {
done('This should have never been called');
}
);
// Check status
const status = getStatus();
expect(status.status).toEqual('pending');
expect(status.queriesSent).toEqual(0);
expect(status.queriesPending).toEqual(0);
isSync = false;
});
it('Start with last resource (~150ms)', (done) => {
const payload = {};
const resources = ['api1', 'api2', 'api3', 'api4'];
const config: RedundancyConfig = {
resources,
index: 3,
timeout: 50,
rotate: 25,
random: false,
dataAfterTimeout: false,
};
// Tracking
let isSync = true;
let sentQuery = 0;
const startTime = Date.now();
// Send query
const getStatus = sendQuery(
config,
payload,
(resource, queryPayload) => {
expect(isSync).toEqual(false);
expect(queryPayload).toEqual(payload);
// Resource order should be: 3, 0, 1, 2
expect(resource).not.toEqual(resources[sentQuery]);
const expectedIndex = sentQuery === 0 ? 3 : sentQuery - 1;
expect(resource).toEqual(resources[expectedIndex]);
// Bump counter
sentQuery++;
},
(data, error) => {
// Make sure queries were sent
expect(sentQuery).toEqual(4);
// Validate data
expect(data).toBeUndefined();
expect(error).toBeUndefined();
// rotate * 4 + timeout
const diff = Date.now() - startTime;
expect(diff).toBeGreaterThan(140);
expect(diff).toBeLessThan(170);
done();
},
() => {
done('This should have never been called');
}
);
// Check status
const status = getStatus();
expect(status.status).toEqual('pending');
expect(status.queriesSent).toEqual(0);
expect(status.queriesPending).toEqual(0);
isSync = false;
});
});

View File

@ -0,0 +1,115 @@
import { initRedundancy } from '../src/index';
type DummyResponses = Record<string, string>;
describe('Redundancy class', () => {
it('Simple query', (done) => {
const redundancy = initRedundancy({
resources: [
'https://api.local', // Will fail
'https://api-backup1.local', // Success
'https://api-backup2.local',
],
rotate: 20,
timeout: 100,
});
// Premade responses
const responses: DummyResponses = {
'https://api-backup1.local/foo': 'foo',
};
let counter = 0;
let doneCallbackCalled = false;
const query = redundancy.query(
'/foo',
(resource, payload, status) => {
counter++;
expect(counter).toBeLessThan(3); // No more than 2 queries should be executed
// Make URI from resource + payload
const uri = (resource as string) + (payload as string);
// Get fake data if it exists
if (responses[uri] === void 0) {
return;
}
// Do something with "data", simulate instant callback
status.done(responses[uri]);
// Complete test
setTimeout(() => {
expect(counter).toEqual(2);
expect(doneCallbackCalled).toEqual(true);
expect(query().status).toEqual('completed');
expect(redundancy.getIndex()).toEqual(1); // Should have changed to 1 after query
done();
});
},
(data) => {
expect(data).toEqual('foo');
doneCallbackCalled = true;
}
);
// Test find()
expect(
redundancy.find((item) => (item().payload as string) === '/foo')
).toEqual(query);
expect(redundancy.find((item) => item().status === 'pending')).toEqual(
query
);
});
it('Different start index', (done) => {
const redundancy = initRedundancy({
resources: [
'https://api.local',
'https://api-backup1.local',
'https://api-backup2.local',
],
rotate: 20,
timeout: 3000,
index: 1,
});
// Premade responses
const responses: DummyResponses = {
'https://api-backup1.local/foo': 'foo',
};
let counter = 0;
const query = redundancy.query('/foo', (resource, payload, status) => {
counter++;
expect(counter).toBeLessThan(2); // Should be success on first call because start index = 1
// Make URI from resource + payload
const uri = (resource as string) + (payload as string);
// Get fake data if it exists
if (responses[uri] === void 0) {
return;
}
// Do something with "data", simulate instant callback
status.done(responses[uri]);
// Complete test
setTimeout(() => {
expect(counter).toEqual(1);
expect(query().status).toEqual('completed');
expect(redundancy.getIndex()).toEqual(1);
done();
});
});
// Test find()
expect(redundancy.find((item) => item().payload === '/foo')).toEqual(
query
);
expect(redundancy.find((item) => item().status === 'pending')).toEqual(
query
);
});
});

View File

@ -0,0 +1,15 @@
{
"include": ["./src/*.ts", "./tests/*.ts"],
"compilerOptions": {
"target": "ESNext",
"module": "CommonJS",
"declaration": true,
"declarationMap": false,
"sourceMap": false,
"strict": true,
"moduleResolution": "node",
"esModuleInterop": true,
"forceConsistentCasingInFileNames": true,
"importsNotUsedAsValues": "error"
}
}

View File

@ -1,4 +1,4 @@
import { PendingQueryItem } from '@cyberalien/redundancy';
import { PendingQueryItem } from '@iconify/api-redundancy';
import {
APIQueryParams,
IconifyAPIPrepareQuery,

View File

@ -1,4 +1,6 @@
.DS_Store
node_modules
imports-test.mjs
tsconfig.tsbuildinfo
tests-compiled
lib

View File

@ -1,6 +1,8 @@
.DS_Store
tsconfig.json
node_modules
imports-test.mjs
tsconfig.tsbuildinfo
tests
tests-compiled
src

View File

@ -1,247 +1,17 @@
/* eslint-disable */
const fs = require('fs');
const path = require('path');
const child_process = require('child_process');
const { build } = require('esbuild');
const { buildFiles } = require('@iconify/library-builder');
// Config
const sourceDir = './src';
const targetDir = './lib';
// True if CommonJS files should be built with `esbuild`
// If false, CommonJS files will be built with `tsc` and import paths will not be rewritten
const rebuildCommonJS = false;
/**
* Find all TypeScript files
*/
const files = [];
findFiles('');
files.sort((a, b) => a.localeCompare(b));
function findFiles(dir) {
fs.readdirSync(sourceDir + dir).forEach((file) => {
if (file.slice(0, 1) === '.') {
return;
}
const filename = dir + '/' + file;
const stat = fs.lstatSync(sourceDir + filename);
if (stat.isDirectory()) {
findFiles(filename);
return;
}
const parts = filename.split('.');
const ext = parts.pop();
if (ext === 'ts') {
files.push(parts.join('.'));
}
buildFiles({
root: __dirname,
source: './src',
target: './lib',
cleanup: true,
updateExports: true,
})
.then(() => {
console.log('Done');
})
.catch((err) => {
console.error(err);
process.exit(1);
});
}
/**
* Build stuff
*/
(async () => {
// Clean up target directory
console.log(`Cleaning up ${targetDir}`);
cleanDir(targetDir);
// Build files with TypeScript compiler first to make sure there are no errors and to generate .d.ts files
const result = child_process.spawnSync('npm', ['run', 'build:dts'], {
cwd: __dirname,
stdio: 'inherit',
});
if (result.status !== 0) {
process.exit(result.status);
}
// Transpile all files to .js and .mjs
const maxMode = rebuildCommonJS ? 2 : 1;
for (let i = 0; i < files.length; i++) {
const file = files[i];
for (let j = 0; j < maxMode; j++) {
const esm = j === 0;
const ext = esm ? '.mjs' : '.js';
function testFile(dir, file) {
try {
const stat = fs.lstatSync(path.resolve(dir, file));
if (stat.isFile()) {
return 'file';
}
if (stat.isDirectory()) {
return 'dir';
}
} catch (err) {
//
}
return null;
}
function returnFile(filename) {
const parts = filename.split('.');
parts.pop();
const file = parts.join('.') + ext;
return { path: file, external: true };
}
console.log('Building', file.slice(1) + ext);
await build({
entryPoints: [sourceDir + file + '.ts'],
outfile: targetDir + file + ext,
format: esm ? 'esm' : 'cjs',
bundle: true,
plugins: [
{
name: 'resolve-path',
setup(build) {
build.onResolve({ filter: /.*/ }, (args) => {
if (
args.importer &&
args.kind === 'import-statement' &&
args.namespace === 'file'
) {
const importPath = args.path;
if (importPath.slice(0, 1) !== '.') {
return;
}
const dir = args.resolveDir;
// Check if file exists as is
const mainResult = testFile(
dir,
importPath
);
if (mainResult === 'file') {
return returnFile(importPath);
}
// Attempt to add extension
const fileWithExt = importPath + '.ts';
if (testFile(dir, fileWithExt) === 'file') {
return returnFile(fileWithExt);
}
// Check if its a directory
if (mainResult === 'dir') {
// Test '/index.js'
const testing =
importPath +
(importPath.slice(-1) === '/'
? ''
: '/') +
'index.ts';
if (testFile(dir, testing) === 'file') {
return returnFile(testing);
}
}
console.log(args);
throw new Error(
`Cannot resolve ${importPath}`
);
}
});
},
},
],
});
}
}
// Update exports in package.json
updatePackage();
})();
/**
* Update exports in package.json
*/
function updatePackage() {
const packageData = JSON.parse(fs.readFileSync('package.json', 'utf8'));
// Add './' to export
function formatExport(path) {
return path.slice(0, 2) === './' ? path : './' + path;
}
// Get all exports
const data = {};
if (packageData.main && packageData.module) {
data['./'] = {
require: formatExport(packageData.main),
import: formatExport(packageData.module),
};
}
files.forEach((file) => {
const key = formatExport(targetDir + file);
// Check for '/index'
const parts = key.split('/');
if (parts.pop() === 'index') {
data[parts.join('/') + '/'] = {
require: key + '.js',
import: key + '.mjs',
};
}
// Add file
data[key] = {
require: key + '.js',
import: key + '.mjs',
};
});
// Update package.json
if (
packageData['exports'] &&
JSON.stringify(packageData['exports']) === JSON.stringify(data)
) {
// Nothing to update
return;
}
packageData.exports = data;
const content = JSON.stringify(packageData, null, '\t') + '\n';
fs.writeFileSync('package.json', content, 'utf8');
console.log('Updated exports in package.json');
}
/**
* Remove all files from directory
*/
function cleanDir(dir) {
let files;
try {
files = fs.readdirSync(dir);
} catch (err) {
return;
}
files.forEach((file) => {
const filename = dir + '/' + file;
let stat;
try {
stat = fs.lstatSync(filename);
} catch (err) {
return;
}
if (stat.isDirectory()) {
cleanDir(filename);
try {
fs.rmdirSync(filename);
} catch (err) {
//
}
return;
}
try {
fs.unlinkSync(filename);
} catch (err) {
//
}
});
}

View File

@ -9,18 +9,18 @@
"version": "1.1.3",
"license": "(Apache-2.0 OR GPL-2.0)",
"dependencies": {
"@cyberalien/redundancy": "^1.1.0",
"@iconify/api-redundancy": "^1.0.0",
"@iconify/types": "^1.0.6",
"@iconify/utils": "^1.0.6"
},
"devDependencies": {
"@iconify/library-builder": "^1.0.0",
"@types/chai": "^4.2.18",
"@types/mocha": "^8.2.2",
"@types/node": "^15.3.0",
"@types/request": "^2.48.5",
"@typescript-eslint/eslint-plugin": "^4.31.1",
"chai": "^4.3.4",
"esbuild": "^0.12.28",
"eslint": "^7.32.0",
"mocha": "^8.3.2",
"rimraf": "^3.0.2",
@ -130,11 +130,6 @@
"node": ">=4"
}
},
"node_modules/@cyberalien/redundancy": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/@cyberalien/redundancy/-/redundancy-1.1.0.tgz",
"integrity": "sha512-+ZvuwtVXzyxX1CWnP+X5XgDelseU9KSYgmPu3/DSraR7Qyi/vLZwuRRX0sYO24M/gZaChNXRBEK6RHRe3uBndw=="
},
"node_modules/@eslint/eslintrc": {
"version": "0.4.3",
"resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-0.4.3.tgz",
@ -175,6 +170,22 @@
"integrity": "sha512-wdppn25U8z/2yiaT6YGquE6X8sSv7hNMWSXYSSU1jGv/yd6XqjXgTDJ8KP4NgjTXfJ3GbRjeeb8RTV7a/VpM+w==",
"dev": true
},
"node_modules/@iconify/api-redundancy": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/@iconify/api-redundancy/-/api-redundancy-1.0.0.tgz",
"integrity": "sha512-cdjhaq0ALKjVMTgPZR3T4C9GzvsFa7awltd7uGf8nmw96uIEpgRJpmkOksZV7oyTtN0kfMmLMs/ZGTA0t4rEEQ=="
},
"node_modules/@iconify/library-builder": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/@iconify/library-builder/-/library-builder-1.0.0.tgz",
"integrity": "sha512-xdzmA5JrD+LBPfR2b2e9lb+pQXgK+nkJhL7qTaGpNCHwCsa8HeQZyuEtvTdMUKqONeEEvZkvdUdv/lK1fn3Jtw==",
"dev": true,
"dependencies": {
"@types/node": "^15.3.0",
"esbuild": "^0.12.28",
"typescript": "^4.2.4"
}
},
"node_modules/@iconify/types": {
"version": "1.0.7",
"resolved": "https://registry.npmjs.org/@iconify/types/-/types-1.0.7.tgz",
@ -2736,11 +2747,6 @@
}
}
},
"@cyberalien/redundancy": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/@cyberalien/redundancy/-/redundancy-1.1.0.tgz",
"integrity": "sha512-+ZvuwtVXzyxX1CWnP+X5XgDelseU9KSYgmPu3/DSraR7Qyi/vLZwuRRX0sYO24M/gZaChNXRBEK6RHRe3uBndw=="
},
"@eslint/eslintrc": {
"version": "0.4.3",
"resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-0.4.3.tgz",
@ -2775,6 +2781,22 @@
"integrity": "sha512-wdppn25U8z/2yiaT6YGquE6X8sSv7hNMWSXYSSU1jGv/yd6XqjXgTDJ8KP4NgjTXfJ3GbRjeeb8RTV7a/VpM+w==",
"dev": true
},
"@iconify/api-redundancy": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/@iconify/api-redundancy/-/api-redundancy-1.0.0.tgz",
"integrity": "sha512-cdjhaq0ALKjVMTgPZR3T4C9GzvsFa7awltd7uGf8nmw96uIEpgRJpmkOksZV7oyTtN0kfMmLMs/ZGTA0t4rEEQ=="
},
"@iconify/library-builder": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/@iconify/library-builder/-/library-builder-1.0.0.tgz",
"integrity": "sha512-xdzmA5JrD+LBPfR2b2e9lb+pQXgK+nkJhL7qTaGpNCHwCsa8HeQZyuEtvTdMUKqONeEEvZkvdUdv/lK1fn3Jtw==",
"dev": true,
"requires": {
"@types/node": "^15.3.0",
"esbuild": "^0.12.28",
"typescript": "^4.2.4"
}
},
"@iconify/types": {
"version": "1.0.7",
"resolved": "https://registry.npmjs.org/@iconify/types/-/types-1.0.7.tgz",

View File

@ -16,7 +16,7 @@
"lint": "eslint {src,tests}/**/*.ts",
"prebuild": "npm run lint && npm run clean",
"build": "node build",
"build:dts": "tsc --project src/tsconfig.json",
"build:source": "tsc --project src/tsconfig.json",
"build:tests": "tsc --project tests/tsconfig.json",
"test": "mocha tests-compiled/*/*-test.js",
"pretest": "npm run build && npm run build:tests"
@ -38,6 +38,10 @@
"require": "./lib/api/index.js",
"import": "./lib/api/index.mjs"
},
"./lib/api": {
"require": "./lib/api/index.js",
"import": "./lib/api/index.mjs"
},
"./lib/api/index": {
"require": "./lib/api/index.js",
"import": "./lib/api/index.mjs"
@ -66,6 +70,10 @@
"require": "./lib/browser-storage/index.js",
"import": "./lib/browser-storage/index.mjs"
},
"./lib/browser-storage": {
"require": "./lib/browser-storage/index.js",
"import": "./lib/browser-storage/index.mjs"
},
"./lib/browser-storage/index": {
"require": "./lib/browser-storage/index.js",
"import": "./lib/browser-storage/index.mjs"
@ -108,18 +116,18 @@
}
},
"dependencies": {
"@cyberalien/redundancy": "^1.1.0",
"@iconify/api-redundancy": "^1.0.0",
"@iconify/types": "^1.0.6",
"@iconify/utils": "^1.0.6"
},
"devDependencies": {
"@iconify/library-builder": "^1.0.0",
"@types/chai": "^4.2.18",
"@types/mocha": "^8.2.2",
"@types/node": "^15.3.0",
"@types/request": "^2.48.5",
"@typescript-eslint/eslint-plugin": "^4.31.1",
"chai": "^4.3.4",
"esbuild": "^0.12.28",
"eslint": "^7.32.0",
"mocha": "^8.3.2",
"rimraf": "^3.0.2",

View File

@ -1,4 +1,4 @@
import type { RedundancyConfig } from '@cyberalien/redundancy';
import type { RedundancyConfig } from '@iconify/api-redundancy';
/**
* API config

View File

@ -1,6 +1,6 @@
import type { Redundancy, QueryModuleCallback } from '@cyberalien/redundancy';
import type { Redundancy, QueryModuleCallback } from '@iconify/api-redundancy';
import type { IconifyJSON } from '@iconify/types';
import { initRedundancy } from '@cyberalien/redundancy';
import { initRedundancy } from '@iconify/api-redundancy';
import type { SortedIcons } from '../icon/sort';
import { sortIcons } from '../icon/sort';
import type {
@ -38,8 +38,10 @@ function emptyCallback(): void {
* [provider][prefix][icon] = time when icon was added to queue
*/
type PendingIcons = Record<string, number>;
const pendingIcons: Record<string, Record<string, PendingIcons>> =
Object.create(null);
const pendingIcons: Record<
string,
Record<string, PendingIcons>
> = Object.create(null);
/**
* List of icons that are waiting to be loaded.
@ -51,12 +53,14 @@ const pendingIcons: Record<string, Record<string, PendingIcons>> =
*
* [provider][prefix] = array of icon names
*/
const iconsToLoad: Record<string, Record<string, string[]>> =
Object.create(null);
const iconsToLoad: Record<string, Record<string, string[]>> = Object.create(
null
);
// Flags to merge multiple synchronous icon requests in one asynchronous request
const loaderFlags: Record<string, Record<string, boolean>> =
Object.create(null);
const loaderFlags: Record<string, Record<string, boolean>> = Object.create(
null
);
const queueFlags: Record<string, Record<string, boolean>> = Object.create(null);
// Redundancy instances cache, sorted by provider
@ -293,8 +297,9 @@ const loadIcons: IconifyLoadIcons = (
}
// Get all sources for pending icons
const newIcons: Record<string, Record<string, string[]>> =
Object.create(null);
const newIcons: Record<string, Record<string, string[]>> = Object.create(
null
);
const sources: IconifyIconSource[] = [];
let lastProvider: string, lastPrefix: string;

View File

@ -1,4 +1,4 @@
import type { PendingQueryItem } from '@cyberalien/redundancy';
import type { PendingQueryItem } from '@iconify/api-redundancy';
import type { GetAPIConfig } from '../api/config';
/**

View File

@ -1,4 +1,4 @@
import type { PendingQueryItem } from '@cyberalien/redundancy';
import type { PendingQueryItem } from '@iconify/api-redundancy';
import type {
APIQueryParams,
IconifyAPIPrepareQuery,

View File

@ -1,4 +1,4 @@
import type { PendingQueryItem } from '@cyberalien/redundancy';
import type { PendingQueryItem } from '@iconify/api-redundancy';
import type {
APIQueryParams,
IconifyAPIPrepareQuery,
@ -50,7 +50,7 @@ function getGlobal(): JSONPRoot {
// Create root
if (rootVar === null) {
// window
const globalRoot = (self as unknown) as Record<string, unknown>;
const globalRoot = self as unknown as Record<string, unknown>;
// Test for window.Iconify. If missing, create 'IconifyJSONP'
let prefix = 'Iconify';

View File

@ -1,6 +1,6 @@
/* eslint-disable @typescript-eslint/no-unused-vars-experimental */
/* eslint-disable @typescript-eslint/no-unused-vars */
import type { PendingQueryItem } from '@cyberalien/redundancy';
import type { PendingQueryItem } from '@iconify/api-redundancy';
import type { APIQueryParams, IconifyAPIModule } from '../modules';
import type { IconifyJSON } from '@iconify/types';

View File

@ -8,7 +8,6 @@ import {
mockAPIData,
storage,
} from '../../lib/api/modules/mock';
import type { GetAPIConfig } from '../../lib/api/config';
describe('Testing mock API module prepare function', () => {
let prefixCounter = 0;

View File

@ -2,7 +2,7 @@
/* eslint-disable @typescript-eslint/no-unused-vars */
import 'mocha';
import { expect } from 'chai';
import type { PendingQueryItem } from '@cyberalien/redundancy';
import type { PendingQueryItem } from '@iconify/api-redundancy';
import type { IconifyAPIConfig } from '../../lib/api/config';
import { setAPIConfig, getAPIConfig } from '../../lib/api/config';
import type { APIQueryParams, IconifyAPIModule } from '../../lib/api/modules';

View File

@ -2,7 +2,7 @@
/* eslint-disable @typescript-eslint/no-unused-vars */
import 'mocha';
import { expect } from 'chai';
import type { PendingQueryItem } from '@cyberalien/redundancy';
import type { PendingQueryItem } from '@iconify/api-redundancy';
import { setAPIConfig } from '../../lib/api/config';
import type { APIQueryParams } from '../../lib/api/modules';
import { setAPIModule } from '../../lib/api/modules';

View File

@ -0,0 +1,13 @@
root = true
[*]
end_of_line = lf
insert_final_newline = true
indent_style = tab
indent_size = 4
charset = utf-8
trim_trailing_whitespace = true
[{*.json,*.yml}]
indent_style = space
indent_size = 2

View File

@ -0,0 +1,2 @@
lib
tests-compiled

View File

@ -0,0 +1,30 @@
module.exports = {
env: {
browser: true,
es6: true,
node: true,
mocha: true,
},
extends: ['eslint:recommended', 'plugin:@typescript-eslint/recommended'],
globals: {
Atomics: 'readonly',
SharedArrayBuffer: 'readonly',
},
parser: '@typescript-eslint/parser',
parserOptions: {
ecmaVersion: 2018,
sourceType: 'module',
project: __dirname + '/tsconfig-base.json',
},
plugins: ['@typescript-eslint'],
rules: {
'no-mixed-spaces-and-tabs': ['off'],
'no-unused-vars': ['off'],
'@typescript-eslint/no-unused-vars-experimental': ['error'],
},
overrides: [
{
files: ['src/**/*.ts', 'tests/**/*.ts'],
},
],
};

10
packages/library-builder/.gitignore vendored Normal file
View File

@ -0,0 +1,10 @@
.idea
.DS_Store
*.map
node_modules
imports-test.mjs
npm-debug.log
yarn.lock
tsconfig.tsbuildinfo
lib
tests-compiled

View File

@ -0,0 +1,11 @@
.idea
.DS_Store
*.map
node_modules
imports-test.mjs
npm-debug.log
yarn.lock
tsconfig.tsbuildinfo
src
tests
tests-compiled

View File

@ -0,0 +1,8 @@
{
"trailingComma": "es5",
"singleQuote": true,
"useTabs": true,
"semi": true,
"quoteProps": "consistent",
"endOfLine": "lf"
}

View File

@ -0,0 +1,7 @@
/** @type {import('ts-jest/dist/types').InitialOptionsTsJest} */
module.exports = {
verbose: true,
preset: 'ts-jest',
testEnvironment: 'node',
testMatch: ['**/tests/*-test.ts'],
};

View File

@ -0,0 +1,21 @@
MIT License
Copyright (c) 2021 Vjacheslav Trushkin
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

7789
packages/library-builder/package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,31 @@
{
"name": "@iconify/library-builder",
"description": "Build script to compile TypeScript files to both ES and CommonJS modules.",
"author": "Vjacheslav Trushkin",
"version": "1.0.2",
"license": "MIT",
"bugs": "https://github.com/iconify/iconify/issues",
"homepage": "https://iconify.design/",
"main": "lib/index.js",
"types": "lib/index.d.ts",
"repository": {
"type": "git",
"url": "https://github.com/iconify/iconify.git",
"directory": "packages/library-builder"
},
"scripts": {
"build": "tsc -b",
"test": "jest"
},
"dependencies": {
"@types/node": "^15.3.0",
"esbuild": "^0.12.28",
"typescript": "^4.2.4"
},
"devDependencies": {
"@types/jest": "^27.0.1",
"cross-env": "^7.0.3",
"jest": "^27.2.0",
"ts-jest": "^27.0.5"
}
}

View File

@ -0,0 +1,55 @@
# ES Builder
This is a library for transpiling TypeScript files.
It is not a bundler! It builds libraries that export multiple single files, not bundles.
What does it do?
- Creates both ES and CommonJS modules in target directory. CommonJS files use '.js' extension, ES files use '.mjs' extension.
- Creates TypeScript definition files for each file.
- Rewrites imports paths in ES modules.
- Updates exports field in package.json
- Tests ES imports
Why is it needed?
- ES modules should have full import paths, including extension, but TypeScript compiler cannot rewrite imports, so it cannot change target extension, so `tsc` cannot be reliably used to create ES modules.
- Using other tools, such as `esbuild` requires custom plugin. This package is used by multiple packages, so it makes sense to split code into a separate package to make it easily reusable.
- Currently `tsup` is the only viable alternative, but it is meant to be used as bundler. Without bundle option it currently fails to generate TypeScript definition files.
- Reusable functions for updating package.json and for testing ES imports.
## Documentation
Requirements for using build process:
- Create `tsconfig.json` that creates CommonJS modules, saves declarations, has `importsNotUsedAsValues` set to `error`.
- Add script to `package.json` for building source code, such as `"build:source": "tsc -b",`. If you have multipe
To build packages, create `build.js` in your package:
```js
/* eslint-disable */
const { buildFiles } = require('@iconify/library-builder');
buildFiles({
root: __dirname,
source: './src',
target: './lib',
})
.then(() => {
console.log('Done');
})
.catch((err) => {
console.error(err);
process.exit(1);
});
```
Source and target paths must be relative to root directory and start with `./`.
## License
The library is released with MIT license.
© 2021 Vjacheslav Trushkin / Iconify OÜ

View File

@ -0,0 +1,40 @@
import { promises as fs } from 'fs';
/**
* Clean up directory
*/
export async function cleanDir(dir: string) {
let files: string[];
try {
files = await fs.readdir(dir);
} catch (err) {
return;
}
for (let i = 0; i < files.length; i++) {
const file = files[i];
const filename = dir + '/' + file;
let stat;
try {
stat = await fs.lstat(filename);
} catch (err) {
continue;
}
if (stat.isDirectory()) {
await cleanDir(filename);
try {
await fs.rmdir(filename);
} catch (err) {
//
}
continue;
}
try {
await fs.unlink(filename);
} catch (err) {
//
}
}
}

View File

@ -0,0 +1,67 @@
import { build } from 'esbuild';
import { locateImport } from './locate';
import type { RequiredBuildParams } from './params';
/**
* Build files
*/
export async function runESBuild(params: RequiredBuildParams, files: string[]) {
const { root, source, target, rebuildCommonJS } = params;
const maxMode = rebuildCommonJS ? 2 : 1;
for (let i = 0; i < files.length; i++) {
const file = files[i];
for (let j = 0; j < maxMode; j++) {
const esm = j === 0;
const ext = esm ? '.mjs' : '.js';
console.log('Building', file.slice(1) + ext);
await build({
entryPoints: [root + source + file + '.ts'],
outfile: root + target + file + ext,
format: esm ? 'esm' : 'cjs',
bundle: true,
plugins: [
{
name: 'rewrite-imports',
setup(build) {
build.onResolve({ filter: /.*/ }, (args) => {
if (
args.importer &&
args.kind === 'import-statement' &&
args.namespace === 'file'
) {
const result = locateImport(
args.resolveDir,
args.path
);
if (result) {
return {
external: true,
path: result.file + ext,
};
}
// External package
if (args.path.slice(0, 1) !== '.') {
return {
external: true,
path: args.path,
};
}
// Debug
console.log(args);
throw new Error(
`Cannot resolve ${args.path}`
);
}
});
},
},
],
});
}
}
}

View File

@ -0,0 +1,25 @@
import { spawn } from 'child_process';
/**
* Execute command
*/
export function exec(
dir: string,
cmd: string,
params: string[]
): Promise<number> {
return new Promise((fulfill, reject) => {
const result = spawn(cmd, params, {
cwd: dir,
stdio: 'inherit',
});
result.on('close', (code) => {
if (code !== 0) {
reject(code);
} else {
fulfill(0);
}
});
});
}

View File

@ -0,0 +1,63 @@
import { promises as fs } from 'fs';
import { cleanDir } from './clean';
import { runESBuild } from './esbuild';
import { exec } from './exec';
import { BuildParams, cleanupParams } from './params';
import { scanFolder } from './scan';
import { updatePackageJSON } from './update-package';
/**
* Buld package
*/
export async function buildFiles(params: BuildParams) {
const fullParams = cleanupParams(params);
let { root, source, target, buildScript } = fullParams;
// Read package.json
let packageJSON: Record<string, unknown>;
try {
const content = await fs.readFile(root + 'package.json', 'utf8');
packageJSON = JSON.parse(content);
} catch (err) {
throw new Error('Cannot find package.json in root directory.');
}
// Check for scripts
const scripts = packageJSON.scripts as Record<string, string>;
if (typeof scripts !== 'object') {
throw new Error('package.json is missing scripts');
}
if (
typeof buildScript === 'string' &&
typeof scripts[buildScript] !== 'string'
) {
throw new Error(`Missing scripts["${buildScript}"] in package.json`);
}
// Clean up
if (fullParams.cleanup) {
await cleanDir(root + target);
}
// Find all files
const files = await scanFolder(root + source);
if (!files.length) {
throw new Error(`Cannot find any files to parse.`);
}
console.log(`Found ${files.length} files to parse.`);
// Build files with TypeScript compiler first to make sure there are no errors and to generate .d.ts files
if (typeof buildScript === 'string') {
await exec(root, 'npm', ['run', buildScript]);
} else {
await exec(root, 'tsc', ['-b']);
}
// Build ES modules
await runESBuild(fullParams, files);
// Update package.json
if (fullParams.updateExports) {
await updatePackageJSON(root, target, files);
}
}

View File

@ -0,0 +1,91 @@
import fs from 'fs';
import path from 'path';
type TestFileResult = 'file' | 'dir' | null;
export interface LocateImportResult {
fileWithExt: string;
file: string;
ext: string;
}
/**
* Check if target is a file or directory
*/
function testFile(resolveDir: string, file: string): TestFileResult {
try {
const stat = fs.lstatSync(path.resolve(resolveDir, file));
if (stat.isFile()) {
return 'file';
}
if (stat.isDirectory()) {
return 'dir';
}
} catch (err) {
//
}
return null;
}
/**
* Split file
*/
export function splitFile(fileWithExt: string): LocateImportResult {
const parts = fileWithExt.split('.');
const ext = parts.pop()!;
const file = parts.join('.');
return {
fileWithExt,
file,
ext,
};
}
/**
* Locate imported file
*/
export function locateImport(
resolveDir: string,
importPath: string,
testExtensions: string[] = ['ts'],
indexFile: string = 'index'
): LocateImportResult | null {
if (importPath.slice(0, 1) !== '.') {
return null;
}
// Check if file exists as is
const mainResult = testFile(resolveDir, importPath);
if (mainResult === 'file') {
return splitFile(importPath);
}
// Attempt to add extension
for (let i = 0; i < testExtensions.length; i++) {
const ext = testExtensions[i];
const fileWithExt = importPath + '.' + ext;
if (testFile(resolveDir, fileWithExt) === 'file') {
return splitFile(fileWithExt);
}
}
// Check directory
if (mainResult === 'dir') {
// Test '/index.*'
for (let i = 0; i < testExtensions.length; i++) {
const ext = testExtensions[i];
const testing =
importPath +
(importPath.slice(-1) === '/' ? '' : '/') +
indexFile +
'.' +
ext;
if (testFile(resolveDir, testing) === 'file') {
return splitFile(testing);
}
}
}
// Cannot locate file
return null;
}

View File

@ -0,0 +1,62 @@
export interface BuildParams {
// Root directory
root: string;
// Source directory, relative to root
source: string;
// Target directory, relative to root
target: string;
// Clean up target directory, default = true
cleanup?: boolean;
// Rebuild CommonJS files with 'esbuild', default = false
rebuildCommonJS?: boolean;
// Update exports in package.json, default = true
updateExports?: boolean;
// Build script for 'tsc -b', must be present in package.json, such as 'build:source'
// If null, builder will run 'tsc -b'
buildScript?: string | null;
}
export type RequiredBuildParams = Required<BuildParams>;
const removeTrailing: (keyof Pick<BuildParams, 'source' | 'target'>)[] = [
'source',
'target',
];
/**
* Clean up params
*/
export function cleanupParams(params: BuildParams): RequiredBuildParams {
const result: RequiredBuildParams = {
...params,
// Default boolean values
cleanup: params.cleanup !== false,
rebuildCommonJS: !!params.rebuildCommonJS,
updateExports: params.updateExports !== false,
// Default build script
buildScript: params.buildScript || null,
};
// Add trailing '/' to root
if (result.root.slice(-1) !== '/') {
result.root += '/';
}
// Remove trailing '/' from source and target
removeTrailing.forEach((attr) => {
const value = result[attr];
if (value.slice(-1) === '/') {
result[attr] = value.slice(0, value.length - 1);
}
});
return result;
}

View File

@ -0,0 +1,39 @@
import { promises as fs } from 'fs';
/**
* Scan folder
*/
export async function scanFolder(
rootDir: string,
fileExtensions: string[] = ['ts'],
includeExtension: boolean = false
): Promise<string[]> {
const results: string[] = [];
async function scan(dir: string) {
const files = await fs.readdir(rootDir + dir);
for (let i = 0; i < files.length; i++) {
const file = files[i];
if (file.slice(0, 1) === '.') {
continue;
}
const filename = dir + '/' + file;
const stat = await fs.lstat(rootDir + filename);
if (stat.isDirectory()) {
await scan(filename);
continue;
}
const parts = filename.split('.');
const ext = parts.pop()!;
if (fileExtensions.indexOf(ext) !== -1) {
results.push(includeExtension ? filename : parts.join('.'));
}
}
}
await scan('');
results.sort((a, b) => a.localeCompare(b));
return results;
}

View File

@ -0,0 +1,84 @@
import { promises as fs } from 'fs';
interface ExportRecord {
require: string;
import: string;
}
/**
* Add './' to export
*/
function formatExport(path: string): string {
return path.slice(0, 2) === './' ? path : './' + path;
}
/**
* Update package.json
*/
export async function updatePackageJSON(
root: string,
target: string,
files: string[]
) {
const packageData = JSON.parse(
await fs.readFile(root + 'package.json', 'utf8')
);
// Get all exports
const data: Record<string, ExportRecord> = {};
if (packageData.main && packageData.module) {
data['./'] = {
require: formatExport(packageData.main),
import: formatExport(packageData.module),
};
}
const dirKeys: Set<string> = new Set();
files.forEach((file) => {
const key = formatExport(target + file);
// Check for '/index'
const parts = key.split('/');
if (parts.pop() === 'index') {
const dirKey = parts.join('/');
dirKeys.add(dirKey);
// Add entries for './foo' and './foo/' in addition to './foo/index' added below
data[dirKey + '/'] = {
require: key + '.js',
import: key + '.mjs',
};
if (!data[dirKey]) {
// Do not overwrite entry added as file
data[dirKey] = {
require: key + '.js',
import: key + '.mjs',
};
}
}
// Add file
if (data[key] && !dirKeys.has(key)) {
throw new Error(`Duplicate entries for ${key} in exports`);
}
data[key] = {
require: key + '.js',
import: key + '.mjs',
};
});
// Update package.json
if (
packageData['exports'] &&
JSON.stringify(packageData['exports']) === JSON.stringify(data)
) {
// Nothing to update
return;
}
packageData.exports = data;
const content = JSON.stringify(packageData, null, '\t') + '\n';
await fs.writeFile('package.json', content, 'utf8');
console.log('Updated exports in package.json');
}

View File

@ -0,0 +1 @@
export const dummyTest = 'dummyTest';

View File

@ -0,0 +1 @@
export const parentIndex = 'parentIndex';

View File

@ -0,0 +1 @@
export const parentTest = 'parentTest';

View File

@ -0,0 +1 @@
export const barFile = 'barFile';

View File

@ -0,0 +1,3 @@
module.exports = {
foo: true,
};

View File

@ -0,0 +1,4 @@
export const fooDirectory = 'fooDirectory';
export const fooDirectorySlash = 'fooDirectorySlash';
export const testDirectoryIndex = 'testDirectoryIndex';
export const testDirectoryIndex2 = 'testDirectoryIndex2';

View File

@ -0,0 +1,17 @@
// ../index.js
import { parentIndex } from '../';
// ../test.js
import { parentTest } from '../test.ts';
// ../test2/index.js
import { siblingChildIndex } from '../test2/';
// ./foo/index.js
import { fooDirectory } from './foo';
import { fooDirectorySlash } from './foo/';
import { testDirectoryIndex } from './foo/index';
import { testDirectoryIndex2 } from './foo/index.ts';
// ./bar.js
import { barFile } from './bar';

View File

@ -0,0 +1 @@
export const siblingChildIndex = 'siblingChildIndex';

View File

@ -0,0 +1,68 @@
import { locateImport } from '../lib/locate';
const fixturesDir = __dirname + '/fixtures';
describe('Testing locateImport', () => {
test('with extension', () => {
// Relative to fixture directory
expect(locateImport(fixturesDir, './imports/index.ts')).toEqual({
ext: 'ts',
file: './imports/index',
fileWithExt: './imports/index.ts',
});
// Relative to imports directory
expect(locateImport(fixturesDir + '/imports', './index.ts')).toEqual({
ext: 'ts',
file: './index',
fileWithExt: './index.ts',
});
// Parent directory
expect(
locateImport(fixturesDir + '/imports/test', '../test/bar.ts')
).toEqual({
ext: 'ts',
file: '../test/bar',
fileWithExt: '../test/bar.ts',
});
});
test('without extension', () => {
// Relative to fixture directory
expect(locateImport(fixturesDir, './imports/test/bar')).toEqual({
ext: 'ts',
file: './imports/test/bar',
fileWithExt: './imports/test/bar.ts',
});
// Matching directory and file: file should be selected, like in require()
expect(locateImport(fixturesDir, './imports/test')).toEqual({
ext: 'ts',
file: './imports/test',
fileWithExt: './imports/test.ts',
});
// Invalid file: only .js file exists
expect(
locateImport(fixturesDir + '/imports/test', './foo/compiled')
).toBeNull();
expect(
locateImport(fixturesDir + '/imports/test', './foo/compiled', [
'ts',
])
).toBeNull();
// Custom extension
expect(
locateImport(fixturesDir + '/imports/test', './foo/compiled', [
'js',
])
).toEqual({
ext: 'js',
file: './foo/compiled',
fileWithExt: './foo/compiled.js',
});
});
});

View File

@ -0,0 +1,34 @@
import { scanFolder } from '../lib/scan';
const fixturesDir = __dirname + '/fixtures';
describe('Testing scanFolder', () => {
test('scanning fixtures', async () => {
// Default params
expect(await scanFolder(fixturesDir)).toEqual([
'/imports/dummy',
'/imports/index',
'/imports/test',
'/imports/test/bar',
'/imports/test/foo/index',
'/imports/test/test',
'/imports/test2/index',
]);
// Include extensions
expect(await scanFolder(fixturesDir, ['ts'], true)).toEqual([
'/imports/dummy.ts',
'/imports/index.ts',
'/imports/test.ts',
'/imports/test/bar.ts',
'/imports/test/foo/index.ts',
'/imports/test/test.ts',
'/imports/test2/index.ts',
]);
// Find .js files
expect(await scanFolder(fixturesDir, ['js'])).toEqual([
'/imports/test/foo/compiled',
]);
});
});

View File

@ -0,0 +1,7 @@
{
"extends": "../tsconfig-base.json",
"compilerOptions": {
"types": ["node", "jest"]
},
"include": ["./*.ts", "../src/*.ts"]
}

View File

@ -0,0 +1,14 @@
{
"compilerOptions": {
"target": "ESNext",
"module": "CommonJS",
"declaration": true,
"declarationMap": false,
"sourceMap": false,
"strict": true,
"moduleResolution": "node",
"esModuleInterop": true,
"forceConsistentCasingInFileNames": true,
"importsNotUsedAsValues": "error"
}
}

View File

@ -0,0 +1,8 @@
{
"extends": "./tsconfig-base.json",
"compilerOptions": {
"rootDir": "./src",
"outDir": "./lib"
},
"exclude": ["./tests/**", "./lib/**"]
}

View File

@ -2,7 +2,9 @@
.DS_Store
*.map
node_modules
imports-test.mjs
npm-debug.log
yarn.lock
tsconfig.tsbuildinfo
lib
tests-compiled

View File

@ -2,8 +2,10 @@
.DS_Store
*.map
node_modules
imports-test.mjs
npm-debug.log
yarn.lock
tsconfig.tsbuildinfo
src
tests
tests-compiled

View File

@ -1,247 +1,17 @@
/* eslint-disable */
const fs = require('fs');
const path = require('path');
const child_process = require('child_process');
const { build } = require('esbuild');
const { buildFiles } = require('@iconify/library-builder');
// Config
const sourceDir = './src';
const targetDir = './lib';
// True if CommonJS files should be built with `esbuild`
// If false, CommonJS files will be built with `tsc` and import paths will not be rewritten
const rebuildCommonJS = false;
/**
* Find all TypeScript files
*/
const files = [];
findFiles('');
files.sort((a, b) => a.localeCompare(b));
function findFiles(dir) {
fs.readdirSync(sourceDir + dir).forEach((file) => {
if (file.slice(0, 1) === '.') {
return;
}
const filename = dir + '/' + file;
const stat = fs.lstatSync(sourceDir + filename);
if (stat.isDirectory()) {
findFiles(filename);
return;
}
const parts = filename.split('.');
const ext = parts.pop();
if (ext === 'ts') {
files.push(parts.join('.'));
}
buildFiles({
root: __dirname,
source: './src',
target: './lib',
cleanup: true,
updateExports: true,
})
.then(() => {
console.log('Done');
})
.catch((err) => {
console.error(err);
process.exit(1);
});
}
/**
* Build stuff
*/
(async () => {
// Clean up target directory
console.log(`Cleaning up ${targetDir}`);
cleanDir(targetDir);
// Build files with TypeScript compiler first to make sure there are no errors and to generate .d.ts files
const result = child_process.spawnSync('npm', ['run', 'build:dts'], {
cwd: __dirname,
stdio: 'inherit',
});
if (result.status !== 0) {
process.exit(result.status);
}
// Transpile all files to .js and .mjs
const maxMode = rebuildCommonJS ? 2 : 1;
for (let i = 0; i < files.length; i++) {
const file = files[i];
for (let j = 0; j < maxMode; j++) {
const esm = j === 0;
const ext = esm ? '.mjs' : '.js';
function testFile(dir, file) {
try {
const stat = fs.lstatSync(path.resolve(dir, file));
if (stat.isFile()) {
return 'file';
}
if (stat.isDirectory()) {
return 'dir';
}
} catch (err) {
//
}
return null;
}
function returnFile(filename) {
const parts = filename.split('.');
parts.pop();
const file = parts.join('.') + ext;
return { path: file, external: true };
}
console.log('Building', file.slice(1) + ext);
await build({
entryPoints: [sourceDir + file + '.ts'],
outfile: targetDir + file + ext,
format: esm ? 'esm' : 'cjs',
bundle: true,
plugins: [
{
name: 'resolve-path',
setup(build) {
build.onResolve({ filter: /.*/ }, (args) => {
if (
args.importer &&
args.kind === 'import-statement' &&
args.namespace === 'file'
) {
const importPath = args.path;
if (importPath.slice(0, 1) !== '.') {
return;
}
const dir = args.resolveDir;
// Check if file exists as is
const mainResult = testFile(
dir,
importPath
);
if (mainResult === 'file') {
return returnFile(importPath);
}
// Attempt to add extension
const fileWithExt = importPath + '.ts';
if (testFile(dir, fileWithExt) === 'file') {
return returnFile(fileWithExt);
}
// Check if its a directory
if (mainResult === 'dir') {
// Test '/index.js'
const testing =
importPath +
(importPath.slice(-1) === '/'
? ''
: '/') +
'index.ts';
if (testFile(dir, testing) === 'file') {
return returnFile(testing);
}
}
console.log(args);
throw new Error(
`Cannot resolve ${importPath}`
);
}
});
},
},
],
});
}
}
// Update exports in package.json
updatePackage();
})();
/**
* Update exports in package.json
*/
function updatePackage() {
const packageData = JSON.parse(fs.readFileSync('package.json', 'utf8'));
// Add './' to export
function formatExport(path) {
return path.slice(0, 2) === './' ? path : './' + path;
}
// Get all exports
const data = {};
if (packageData.main && packageData.module) {
data['./'] = {
require: formatExport(packageData.main),
import: formatExport(packageData.module),
};
}
files.forEach((file) => {
const key = formatExport(targetDir + file);
// Check for '/index'
const parts = key.split('/');
if (parts.pop() === 'index') {
data[parts.join('/') + '/'] = {
require: key + '.js',
import: key + '.mjs',
};
}
// Add file
data[key] = {
require: key + '.js',
import: key + '.mjs',
};
});
// Update package.json
if (
packageData['exports'] &&
JSON.stringify(packageData['exports']) === JSON.stringify(data)
) {
// Nothing to update
return;
}
packageData.exports = data;
const content = JSON.stringify(packageData, null, '\t') + '\n';
fs.writeFileSync('package.json', content, 'utf8');
console.log('Updated exports in package.json');
}
/**
* Remove all files from directory
*/
function cleanDir(dir) {
let files;
try {
files = fs.readdirSync(dir);
} catch (err) {
return;
}
files.forEach((file) => {
const filename = dir + '/' + file;
let stat;
try {
stat = fs.lstatSync(filename);
} catch (err) {
return;
}
if (stat.isDirectory()) {
cleanDir(filename);
try {
fs.rmdirSync(filename);
} catch (err) {
//
}
return;
}
try {
fs.unlinkSync(filename);
} catch (err) {
//
}
});
}

File diff suppressed because it is too large Load Diff

View File

@ -16,7 +16,7 @@
"lint": "eslint {src,tests}/**/*.ts",
"prebuild": "npm run lint && npm run clean",
"build": "node build",
"build:dts": "tsc --project src/tsconfig.json",
"build:source": "tsc --project src/tsconfig.json",
"build:tests": "tsc --project tests/tsconfig.json",
"test": "mocha tests-compiled/*-test.js",
"pretest": "npm run build && npm run build:tests"
@ -34,6 +34,10 @@
"require": "./lib/customisations/index.js",
"import": "./lib/customisations/index.mjs"
},
"./lib/customisations": {
"require": "./lib/customisations/index.js",
"import": "./lib/customisations/index.mjs"
},
"./lib/customisations/index": {
"require": "./lib/customisations/index.js",
"import": "./lib/customisations/index.mjs"
@ -74,6 +78,10 @@
"require": "./lib/icon/index.js",
"import": "./lib/icon/index.mjs"
},
"./lib/icon": {
"require": "./lib/icon/index.js",
"import": "./lib/icon/index.mjs"
},
"./lib/icon/index": {
"require": "./lib/icon/index.js",
"import": "./lib/icon/index.mjs"
@ -103,15 +111,14 @@
"@iconify/types": "^1.0.6"
},
"devDependencies": {
"@iconify/library-builder": "^1.0.0",
"@types/chai": "^4.2.18",
"@types/mocha": "^8.2.2",
"@types/node": "^15.3.0",
"@typescript-eslint/eslint-plugin": "^4.31.1",
"chai": "^4.3.4",
"esbuild": "^0.12.28",
"eslint": "^7.32.0",
"mocha": "^8.4.0",
"rimraf": "^3.0.2",
"typescript": "^4.2.4"
"typescript": "^4.4.3"
}
}