diff --git a/internals/common/src/models.ts b/internals/common/src/models.ts index f1b11f24a..f2694b808 100644 --- a/internals/common/src/models.ts +++ b/internals/common/src/models.ts @@ -42,19 +42,37 @@ const getAllModelPackages = async (includeExperimental = false) => { return packageDirectoryNames.sort(); } -const getAllAvailableModels = async (packageName: string): Promise => { +export const getUMDNames = async (packageName: string): Promise> => { const modelPackageDir = path.resolve(MODELS_DIR, packageName); const umdNamesPath = path.resolve(modelPackageDir, 'umd-names.json'); if (!await exists(umdNamesPath)) { throw new Error(`No umd-names.json file found at ${umdNamesPath}`); } - const umdNames = JSON.parse(await readFile(umdNamesPath)) + try { + const umdNames = JSON.parse(await readFile(umdNamesPath)) + return umdNames; + } catch (e) { + throw new Error(`Error parsing umd-names.json file at ${umdNamesPath}: ${e}`); + } +} + +const getAllAvailableModels = async (packageName: string): Promise => { + const modelPackageDir = path.resolve(MODELS_DIR, packageName); + const umdNames = await getUMDNames(modelPackageDir); const packageJSONExports = await getPackageJSONExports(modelPackageDir); return packageJSONExports.filter(k => k[0] !== '.').map(([key, value]) => { const umdName = umdNames[key]; if (umdName === undefined) { throw new Error(`No UMD name defined for ${packageName}/umd-names.json for ${key}`); } + if (typeof umdName === 'object') { + return { + key, + umdName: umdName.direct, + umdNameFromIndex: umdName.index, + value, + }; + } return { key, umdName, @@ -87,7 +105,7 @@ const getAllModels = async (packageDirectoryNames: Promise) => { modelPackagesAndModels.push({ modelName: model.key, packageName, - modelUMDName: model.umdName, + modelUMDName: model.umdName, packageDirectoryName, modelExport: model.value, }); @@ -110,6 +128,7 @@ export const ALL_MODELS: Promise = getAllModels(ALL_MODEL_PA interface AvailableModel { key: string; umdName: string; + umdNameFromIndex?: string; value: string | PackageJSONExport; } @@ -134,7 +153,7 @@ const getPackagesAndModelsMatchingEnvironment = async (environment: Environment, }; -export const getPackagesAndModelsForEnvironment = async (environment: Environment) => { +export const getPackagesAndModelsForEnvironment = async (environment: Environment): Promise => { const packagesAndModels = await ALL_MODELS; return getPackagesAndModelsMatchingEnvironment(environment, packagesAndModels); }; diff --git a/models/esrgan-legacy/DOC.mdx b/models/esrgan-legacy/DOC.mdx index 2caf6d3a8..d82b89720 100755 --- a/models/esrgan-legacy/DOC.mdx +++ b/models/esrgan-legacy/DOC.mdx @@ -143,9 +143,9 @@ import models from '@upscalerjs/esrgan-legacy'; const upscaler = new Upscaler({ model: models.GANS, // model: models.PSNRSmall, - // model: models.div2K2x, - // model: models.div2K3x, - // model: models.div2K4x, + // model: models.div2K2X, + // model: models.div2K3X, + // model: models.div2K4X, }) ``` diff --git a/models/esrgan-legacy/src/umd.ts b/models/esrgan-legacy/src/umd.ts index 085f0032e..925be40c9 100644 --- a/models/esrgan-legacy/src/umd.ts +++ b/models/esrgan-legacy/src/umd.ts @@ -1,6 +1,6 @@ -export { default as ESRGANLegacyDiv2kX2, } from './div2k/x2'; -export { default as ESRGANLegacyDiv2kX3, } from './div2k/x3'; -export { default as ESRGANLegacyDiv2kX4, } from './div2k/x4'; -export { default as ESRGANLegacyPSNRSmall, } from './psnr-small'; -export { default as ESRGANLegacyGANS, } from './gans'; +export { default as Div2kX2, } from './div2k/x2'; +export { default as Div2kX3, } from './div2k/x3'; +export { default as Div2kX4, } from './div2k/x4'; +export { default as PSNRSmall, } from './psnr-small'; +export { default as GANS, } from './gans'; diff --git a/models/esrgan-legacy/umd-names.json b/models/esrgan-legacy/umd-names.json index bbb7f9756..a09670ba9 100755 --- a/models/esrgan-legacy/umd-names.json +++ b/models/esrgan-legacy/umd-names.json @@ -1,8 +1,23 @@ { ".": "ESRGANLegacy", - "./div2k/2x": "ESRGANLegacyDiv2kX2", - "./div2k/3x": "ESRGANLegacyDiv2kX3", - "./div2k/4x": "ESRGANLegacyDiv2kX4", - "./psnr-small": "ESRGANLegacyPSNRSmall", - "./gans": "ESRGANLegacyGANS" + "./div2k/2x": { + "index": "Div2kX2", + "direct": "ESRGANLegacyDiv2kX2" + }, + "./div2k/3x": { + "index": "Div2kX3", + "direct": "ESRGANLegacyDiv2kX3" + }, + "./div2k/4x": { + "index": "Div2kX4", + "direct": "ESRGANLegacyDiv2kX4" + }, + "./psnr-small": { + "index": "PSNRSmall", + "direct": "ESRGANLegacyPSNRSmall" + }, + "./gans": { + "index": "GANS", + "direct": "ESRGANLegacyGANS" + } } diff --git a/models/esrgan-medium/src/umd.ts b/models/esrgan-medium/src/umd.ts index 2cc88f8d3..7cbf90372 100644 --- a/models/esrgan-medium/src/umd.ts +++ b/models/esrgan-medium/src/umd.ts @@ -1,4 +1,4 @@ -export { default as ESRGANMedium2x, } from './x2'; -export { default as ESRGANMedium3x, } from './x3'; -export { default as ESRGANMedium4x, } from './x4'; -export { default as ESRGANMedium8x, } from './x8'; +export { default as x2, } from './x2'; +export { default as x3, } from './x3'; +export { default as x4, } from './x4'; +export { default as x8, } from './x8'; diff --git a/models/esrgan-medium/umd-names.json b/models/esrgan-medium/umd-names.json index a30f8adef..43062c6c4 100644 --- a/models/esrgan-medium/umd-names.json +++ b/models/esrgan-medium/umd-names.json @@ -1,7 +1,7 @@ { ".": "ESRGANMedium", - "./2x": "ESRGANMedium2x", - "./3x": "ESRGANMedium3x", - "./4x": "ESRGANMedium4x", - "./8x": "ESRGANMedium8x" + "./2x": { "index": "x2", "direct": "ESRGANMedium2x"}, + "./3x": { "index": "x3", "direct": "ESRGANMedium3x"}, + "./4x": { "index": "x4", "direct": "ESRGANMedium4x"}, + "./8x": { "index": "x8", "direct": "ESRGANMedium8x"} } diff --git a/models/esrgan-slim/src/umd.ts b/models/esrgan-slim/src/umd.ts index 98f51d128..7cbf90372 100644 --- a/models/esrgan-slim/src/umd.ts +++ b/models/esrgan-slim/src/umd.ts @@ -1,4 +1,4 @@ -export { default as ESRGANSlim2x, } from './x2'; -export { default as ESRGANSlim3x, } from './x3'; -export { default as ESRGANSlim4x, } from './x4'; -export { default as ESRGANSlim8x, } from './x8'; +export { default as x2, } from './x2'; +export { default as x3, } from './x3'; +export { default as x4, } from './x4'; +export { default as x8, } from './x8'; diff --git a/models/esrgan-slim/umd-names.json b/models/esrgan-slim/umd-names.json index eb4239aff..905f18a26 100644 --- a/models/esrgan-slim/umd-names.json +++ b/models/esrgan-slim/umd-names.json @@ -1,7 +1,7 @@ { ".": "ESRGANSlim", - "./2x": "ESRGANSlim2x", - "./3x": "ESRGANSlim3x", - "./4x": "ESRGANSlim4x", - "./8x": "ESRGANSlim8x" + "./2x": { "index": "x2", "direct": "ESRGANSlim2x"}, + "./3x": { "index": "x3", "direct": "ESRGANSlim3x"}, + "./4x": { "index": "x4", "direct": "ESRGANSlim4x"}, + "./8x": { "index": "x8", "direct": "ESRGANSlim8x"} } diff --git a/models/esrgan-thick/src/umd.ts b/models/esrgan-thick/src/umd.ts index 54d4b0455..7cbf90372 100644 --- a/models/esrgan-thick/src/umd.ts +++ b/models/esrgan-thick/src/umd.ts @@ -1,4 +1,4 @@ -export { default as ESRGANThick2x, } from './x2'; -export { default as ESRGANThick3x, } from './x3'; -export { default as ESRGANThick4x, } from './x4'; -export { default as ESRGANThick8x, } from './x8'; +export { default as x2, } from './x2'; +export { default as x3, } from './x3'; +export { default as x4, } from './x4'; +export { default as x8, } from './x8'; diff --git a/models/esrgan-thick/umd-names.json b/models/esrgan-thick/umd-names.json index aaedf4b5c..0c1e8f5be 100644 --- a/models/esrgan-thick/umd-names.json +++ b/models/esrgan-thick/umd-names.json @@ -1,7 +1,19 @@ { ".": "ESRGANThick", - "./2x": "ESRGANThick2x", - "./3x": "ESRGANThick3x", - "./4x": "ESRGANThick4x", - "./8x": "ESRGANThick8x" + "./2x": { + "index": "x2", + "direct": "ESRGANThick2x" + }, + "./3x": { + "index": "x3", + "direct": "ESRGANThick3x" + }, + "./4x": { + "index": "x4", + "direct": "ESRGANThick4x" + }, + "./8x": { + "index": "x8", + "direct": "ESRGANThick8x" + } } diff --git a/models/pixel-upsampler/src/umd.ts b/models/pixel-upsampler/src/umd.ts index 8901c3f80..b364466fc 100644 --- a/models/pixel-upsampler/src/umd.ts +++ b/models/pixel-upsampler/src/umd.ts @@ -1,3 +1,3 @@ -export { default as PixelUpsampler2x, } from './x2'; -export { default as PixelUpsampler3x, } from './x3'; -export { default as PixelUpsampler4x, } from './x4'; +export { default as x2, } from './x2'; +export { default as x3, } from './x3'; +export { default as x4, } from './x4'; diff --git a/models/pixel-upsampler/umd-names.json b/models/pixel-upsampler/umd-names.json index 20a2096cd..536f47e4b 100644 --- a/models/pixel-upsampler/umd-names.json +++ b/models/pixel-upsampler/umd-names.json @@ -1,6 +1,15 @@ { ".": "PixelUpsampler", - "./x2": "PixelUpsampler2x", - "./x3": "PixelUpsampler3x", - "./x4": "PixelUpsampler4x" + "./x2": { + "index": "x2", + "direct": "PixelUpsampler2x" + }, + "./x3": { + "index": "x3", + "direct": "PixelUpsampler3x" + }, + "./x4": { + "index": "x4", + "direct": "PixelUpsampler4x" + } } diff --git a/models/tsconfig.cjs.json b/models/tsconfig.cjs.json index 5a05d67dc..2e9d6ff8b 100644 --- a/models/tsconfig.cjs.json +++ b/models/tsconfig.cjs.json @@ -2,7 +2,7 @@ "extends": "./tsconfig.json", "compilerOptions": { "moduleResolution": "Node16", - "module": "CommonJS", - "target": "ES5", - }, + "module": "Node16", + "target": "ES5" + } } diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index ff0e93162..21cf88014 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -842,66 +842,42 @@ importers: specifier: workspace:* version: link:../upscalerjs - scripts: + tmp/bundlers/esbuild: dependencies: - '@types/glob': - specifier: ^8.1.0 - version: 8.1.0 + '@tensorflow/tfjs': + specifier: ~4.11.0 + version: 4.11.0(seedrandom@3.0.5) '@upscalerjs/default-model': specifier: workspace:* - version: link:../models/default-model + version: link:../../../models/default-model '@upscalerjs/esrgan-legacy': specifier: workspace:* - version: link:../models/esrgan-legacy - '@upscalerjs/pixel-upsampler': - specifier: workspace:* - version: link:../models/pixel-upsampler - front-matter: - specifier: ^4.0.2 - version: 4.0.2 - glob: - specifier: ^10.3.3 - version: 10.3.10 - tsc-alias: - specifier: ^1.8.7 - version: 1.8.8 - typedoc: - specifier: ^0.24.8 - version: 0.24.8(typescript@5.1.6) - upscaler: - specifier: workspace:* - version: link:../packages/upscalerjs - devDependencies: - '@internals/bundlers': + version: link:../../../models/esrgan-legacy + '@upscalerjs/esrgan-medium': specifier: workspace:* - version: link:../internals/bundlers - '@internals/common': + version: link:../../../models/esrgan-medium + '@upscalerjs/esrgan-slim': specifier: workspace:* - version: link:../internals/common - '@internals/http-server': + version: link:../../../models/esrgan-slim + '@upscalerjs/esrgan-thick': specifier: workspace:* - version: link:../internals/http-server - '@internals/test-runner': + version: link:../../../models/esrgan-thick + '@upscalerjs/pixel-upsampler': specifier: workspace:* - version: link:../internals/test-runner - '@internals/webdriver': + version: link:../../../models/pixel-upsampler + upscaler: specifier: workspace:* - version: link:../internals/webdriver - '@types/inquirer': - specifier: ^9.0.3 - version: 9.0.3 - '@types/yargs': - specifier: ^17.0.24 - version: 17.0.24 - crimson-progressbar: - specifier: ^1.3.0 - version: 1.3.0 - inquirer: - specifier: ^8.2.4 - version: 8.2.6 - yargs: - specifier: ^17.7.2 - version: 17.7.2 + version: link:../../../packages/upscalerjs + devDependencies: + '@babel/plugin-transform-modules-commonjs': + specifier: 7.22.5 + version: 7.22.5(@babel/core@7.23.2) + '@babel/preset-typescript': + specifier: 7.22.5 + version: 7.22.5(@babel/core@7.23.2) + seedrandom: + specifier: 3.0.5 + version: 3.0.5 tmp/bundlers/node/cjs: dependencies: @@ -1130,7 +1106,6 @@ packages: /@babel/cli@7.23.0(@babel/core@7.23.2): resolution: {integrity: sha512-17E1oSkGk2IwNILM4jtfAvgjt+ohmpfBky8aLerUfYZhiPNg7ca+CRCxZn8QDxwNhV/upsc2VHBCqGFIR+iBfA==} engines: {node: '>=6.9.0'} - hasBin: true peerDependencies: '@babel/core': ^7.0.0-0 dependencies: @@ -1884,6 +1859,18 @@ packages: '@babel/helper-module-transforms': 7.23.3(@babel/core@7.23.2) '@babel/helper-plugin-utils': 7.22.5 + /@babel/plugin-transform-modules-commonjs@7.22.5(@babel/core@7.23.2): + resolution: {integrity: sha512-B4pzOXj+ONRmuaQTg05b3y/4DuFz3WcCNAXPLb2Q0GT0TrGKGxNKV4jwsXts+StaM0LQczZbOpj8o1DLPDJIiA==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.23.2 + '@babel/helper-module-transforms': 7.23.3(@babel/core@7.23.2) + '@babel/helper-plugin-utils': 7.22.5 + '@babel/helper-simple-access': 7.22.5 + dev: true + /@babel/plugin-transform-modules-commonjs@7.23.0(@babel/core@7.23.2): resolution: {integrity: sha512-32Xzss14/UVc7k9g775yMIvkVK8xwKE0DPdP5JTapr3+Z9w4tzeOuLNY6BXDQR6BdnzIlXnCGAzsk/ICHBLVWQ==} engines: {node: '>=6.9.0'} @@ -2345,6 +2332,20 @@ packages: '@babel/plugin-transform-react-jsx-development': 7.22.5(@babel/core@7.23.2) '@babel/plugin-transform-react-pure-annotations': 7.23.3(@babel/core@7.23.2) + /@babel/preset-typescript@7.22.5(@babel/core@7.23.2): + resolution: {integrity: sha512-YbPaal9LxztSGhmndR46FmAbkJ/1fAsw293tSU+I5E5h+cnJ3d4GTwyUgGYmOXJYdGA+uNePle4qbaRzj2NISQ==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.23.2 + '@babel/helper-plugin-utils': 7.22.5 + '@babel/helper-validator-option': 7.22.15 + '@babel/plugin-syntax-jsx': 7.23.3(@babel/core@7.23.2) + '@babel/plugin-transform-modules-commonjs': 7.23.0(@babel/core@7.23.2) + '@babel/plugin-transform-typescript': 7.23.3(@babel/core@7.23.2) + dev: true + /@babel/preset-typescript@7.23.2(@babel/core@7.23.2): resolution: {integrity: sha512-u4UJc1XsS1GhIGteM8rnGiIvf9rJpiVgMEeCnwlLA7WJPC+jcXWJAGxYmeqs5hOZD8BbAfnV5ezBOxQbb4OUxA==} engines: {node: '>=6.9.0'} @@ -2523,7 +2524,6 @@ packages: /@docusaurus/core@2.4.3(@docusaurus/types@2.4.3)(esbuild@0.19.2)(eslint@8.47.0)(react-dom@18.2.0)(react@18.2.0)(typescript@5.2.2)(uglify-js@3.17.4): resolution: {integrity: sha512-dWH5P7cgeNSIg9ufReX6gaCl/TmrGKD38Orbwuz05WPhAQtFXHd5B8Qym1TiXfvUNvwoYKkAJOJuGe8ou0Z7PA==} engines: {node: '>=16.14'} - hasBin: true peerDependencies: react: ^16.8.4 || ^17.0.0 react-dom: ^16.8.4 || ^17.0.0 @@ -4151,6 +4151,7 @@ packages: strip-ansi-cjs: /strip-ansi@6.0.1 wrap-ansi: 8.1.0 wrap-ansi-cjs: /wrap-ansi@7.0.0 + dev: true /@istanbuljs/load-nyc-config@1.1.0: resolution: {integrity: sha512-VjeHSlIzpv/NyD3N0YuHfXOPDIixcA1q2ZV98wsMqcYlPmv2n3Yb2lYP9XMElnaFVXg5A7YLTeLu6V84uQDjmQ==} @@ -4524,6 +4525,7 @@ packages: resolution: {integrity: sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==} engines: {node: '>=14'} requiresBuild: true + dev: true optional: true /@polka/url@0.5.0: @@ -5024,7 +5026,6 @@ packages: /@types/commander@2.12.2: resolution: {integrity: sha512-0QEFiR8ljcHp9bAbWxecjVRuAMr16ivPiGOw6KFQBVrVd0RQIcM3xKdRisH2EDWgVWujiYtHwhSkSUoAAGzH7Q==} - deprecated: This is a stub types definition for commander (https://github.com/tj/commander.js). commander provides its own type definitions, so you don't need @types/commander installed! dependencies: commander: 11.1.0 dev: false @@ -5096,13 +5097,6 @@ packages: '@types/node': 20.5.1 dev: true - /@types/glob@8.1.0: - resolution: {integrity: sha512-IO+MJPVhoqz+28h1qLAcBEH2+xHMK6MTyHJc7MTnnYb6wsoLR29POVGJ7LycmVXIqyy/4/2ShP5sUwTXuOwb/w==} - dependencies: - '@types/minimatch': 5.1.2 - '@types/node': 20.5.1 - dev: false - /@types/graceful-fs@4.1.9: resolution: {integrity: sha512-olP3sd1qOEe5dXTSaFvQG+02VdRXcdytWLAZsAq1PecU8uqQAhkrnbli7DagjtXKW/Bl7YJbUsa8MPcuc8LHEQ==} dependencies: @@ -5204,10 +5198,6 @@ packages: /@types/mime@3.0.4: resolution: {integrity: sha512-iJt33IQnVRkqeqC7PzBHPTC6fDlRNRW8vjrgqtScAhrmMwe8c4Eo7+fUGTa+XdWrpEgpyKWMYmi2dIwMAYRzPw==} - /@types/minimatch@5.1.2: - resolution: {integrity: sha512-K0VQKziLUWkVKiRVrx4a40iPaxTUefQmjtkQofBkYRcoaaL/8rhwDWww9qWbrgicNOgnpIsMxyNIUM4+n6dUIA==} - dev: false - /@types/mocha@10.0.1: resolution: {integrity: sha512-/fvYntiO1GeICvqbQ3doGDIP97vWmvFt83GKguJ6prmQM2iXZfFcq6YE8KteFyRtX2/h5Hf91BYvPodJKFYv5Q==} dev: true @@ -6182,7 +6172,6 @@ packages: /autoprefixer@10.4.16(postcss@8.4.31): resolution: {integrity: sha512-7vd3UC6xKp0HLfua5IjZlcXvGAGy7cBAXTg2lyQ/8WpNhd6SiZ8Be+xm3FyBSYJx5GKcpRCzBh7RH4/0dnY+uQ==} engines: {node: ^10 || ^12 || >=14} - hasBin: true peerDependencies: postcss: ^8.1.0 dependencies: @@ -6960,11 +6949,6 @@ packages: optionalDependencies: '@colors/colors': 1.5.0 - /cli-width@3.0.0: - resolution: {integrity: sha512-FxqpkPPwu1HjuN93Omfm4h8uIanXofW0RxVEW3k5RKx+mJJYSthzNhp32Kzxxy3YAEZ/Dc/EWN1vZRY0+kOhbw==} - engines: {node: '>= 10'} - dev: true - /cli-width@4.1.0: resolution: {integrity: sha512-ouuZd4/dm2Sw5Gmqy6bGyNNNe1qt9RpmxveLSO7KcgsTnU7RXfsw+/bukWGo1abgBiMAic068rclZsO4IWmmxQ==} engines: {node: '>= 12'} @@ -7060,11 +7044,6 @@ packages: /colorette@2.0.20: resolution: {integrity: sha512-IfEDxwoWIjkeXL1eXcDiow4UbKjhLdq6/EuSVR9GMN7KVH3r9gQ83e73hsz1Nd1T3ijd5xv1wcWRYO+D6kCI2w==} - /colors@1.4.0: - resolution: {integrity: sha512-a+UqTh4kgZg/SlGvfbzDHpgRu7AAQOmmqRHJnxhRZICKFUT91brVhNNt58CMWU9PsBbv3PDCZUHbVxuDiH2mtA==} - engines: {node: '>=0.1.90'} - dev: true - /combine-promises@1.2.0: resolution: {integrity: sha512-VcQB1ziGD0NXrhKxiwyNbCDmRzs/OShMs2GqW2DlU2A/Sd0nQxE1oWDAE5O0ygSx5mgQOn9eIFh7yKPgFRVkPQ==} engines: {node: '>=10'} @@ -7107,11 +7086,6 @@ packages: resolution: {integrity: sha512-OkTL9umf+He2DZkUq8f8J9of7yL6RJKI24dVITBmNfZBmri9zYZQrKkuXiKhyfPSu8tUhnVBB1iKXevvnlR4Ww==} engines: {node: '>= 12'} - /commander@9.5.0: - resolution: {integrity: sha512-KRs7WVDKg86PWiuAqhDrAQnTXZKraVcCc6vFdL14qrZ/DcWwuRo7VoiYXalXO7S5GKpqYiVEwCbgFDfxNHKJBQ==} - engines: {node: ^12.20.0 || >=14} - dev: false - /comment-parser@1.4.0: resolution: {integrity: sha512-QLyTNiZ2KDOibvFPlZ6ZngVsZ/0gYnE6uTXi5aoDg8ed3AkJAz4sEje3Y8a29hQ1s6A99MZXe47fLAXQ1rTqaw==} engines: {node: '>= 12.0.0'} @@ -7377,12 +7351,6 @@ packages: /create-require@1.1.1: resolution: {integrity: sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==} - /crimson-progressbar@1.3.0: - resolution: {integrity: sha512-Rp94buue805sZauvbqBOmh9TpQ4hhDu+39Gi67TyboW2lcV+fljAM5C+ZsKTXtBdTxRtgSXIuby4PjLuh9gVIg==} - dependencies: - colors: 1.4.0 - dev: true - /cross-fetch@3.1.8: resolution: {integrity: sha512-cvA+JwZoU0Xq+h6WkMvAUqPEYy92Obet6UdKLfW60qn99ftItKjB5T+BkyWOFWe2pUyfQ+IJHmpOTznqk1M6Kg==} dependencies: @@ -8296,7 +8264,6 @@ packages: /eslint-config-prettier@9.0.0(eslint@8.47.0): resolution: {integrity: sha512-IcJsTkJae2S35pRsRAwoCE+925rJJStOdkKnLVgtE+tEpqU0EVVM7OqrwxqgptKdX29NUwC82I5pXsGFIgSevw==} - hasBin: true peerDependencies: eslint: '>=7.0.0' dependencies: @@ -8665,13 +8632,6 @@ packages: xml-js: 1.6.11 dev: false - /figures@3.2.0: - resolution: {integrity: sha512-yaduQFRKLXYOGgEn6AZau90j3ggSOyiqXU0F9JZfeXYhNa+Jk4X+s45A2zg5jns87GAFa34BBm2kXw4XpNcbdg==} - engines: {node: '>=8'} - dependencies: - escape-string-regexp: 1.0.5 - dev: true - /figures@5.0.0: resolution: {integrity: sha512-ej8ksPF4x6e5wvK9yevct0UCXh8TTFlWGVLlgjZuoBH1HwjIfKE/IdL5mq89sFA7zELi1VhKpmtDnrs7zWyeyg==} engines: {node: '>=14'} @@ -8815,6 +8775,7 @@ packages: dependencies: cross-spawn: 7.0.3 signal-exit: 4.1.0 + dev: true /fork-ts-checker-webpack-plugin@6.5.3(eslint@8.47.0)(typescript@5.2.2)(webpack@5.89.0): resolution: {integrity: sha512-SbH/l9ikmMWycd5puHJKTkZJKddF4iRLyW3DeZ08HTI7NGyLS38MXd/KGgeWumQO7YNQbW2u/NtPT2YowbPaGQ==} @@ -8866,12 +8827,6 @@ packages: resolution: {integrity: sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q==} engines: {node: '>= 0.6'} - /front-matter@4.0.2: - resolution: {integrity: sha512-I8ZuJ/qG92NWX8i5x1Y8qyj3vizhXS31OxjKDu3LKP+7/qBgfIKValiZIEwoVoJKUHlhWtYrktkxV1XsX+pPlg==} - dependencies: - js-yaml: 3.14.1 - dev: false - /fs-extra@10.1.0: resolution: {integrity: sha512-oRXApq54ETRj4eMiFzGnHWGy+zo5raudjuxN0b8H7s/RU2oW0Wvsx9O0ACRN/kRq9E8Vu/ReskGB5o3ji+FzHQ==} engines: {node: '>=12'} @@ -9058,6 +9013,7 @@ packages: minimatch: 9.0.3 minipass: 5.0.0 path-scurry: 1.10.1 + dev: true /glob@7.2.0: resolution: {integrity: sha512-lmLf6gtyrPq8tTjSmrO94wBeQbFR3HbLHbuyD69wuyQkImp2hWqMGB47OX65FBkPffO641IP9jWa1z4ivqG26Q==} @@ -9656,27 +9612,6 @@ packages: /inline-style-parser@0.1.1: resolution: {integrity: sha512-7NXolsK4CAS5+xvdj5OMMbI962hU/wvwoxk+LWR9Ek9bVtyuuYScDN6eS0rUm6TxApFpw7CX1o4uJzcd4AyD3Q==} - /inquirer@8.2.6: - resolution: {integrity: sha512-M1WuAmb7pn9zdFRtQYk26ZBoY043Sse0wVDdk4Bppr+JOXyQYybdtvK+l9wUibhtjdjvtoiNy8tk+EgsYIUqKg==} - engines: {node: '>=12.0.0'} - dependencies: - ansi-escapes: 4.3.2 - chalk: 4.1.2 - cli-cursor: 3.1.0 - cli-width: 3.0.0 - external-editor: 3.1.0 - figures: 3.2.0 - lodash: 4.17.21 - mute-stream: 0.0.8 - ora: 5.4.1 - run-async: 2.4.1 - rxjs: 7.8.1 - string-width: 4.2.3 - strip-ansi: 6.0.1 - through: 2.3.8 - wrap-ansi: 6.2.0 - dev: true - /inquirer@9.2.10: resolution: {integrity: sha512-tVVNFIXU8qNHoULiazz612GFl+yqNfjMTbLuViNJE/d860Qxrd3NMrse8dm40VUQLOQeULvaQF8lpAhvysjeyA==} engines: {node: '>=14.18.0'} @@ -10128,6 +10063,7 @@ packages: '@isaacs/cliui': 8.0.2 optionalDependencies: '@pkgjs/parseargs': 0.11.0 + dev: true /jake@10.8.7: resolution: {integrity: sha512-ZDi3aP+fG/LchyBzUM804VjddnwfSfsdeYkwt8NcbKRvo4rFkjhs456iLFn3k2ZUWvNe4i48WACDbza8fhq2+w==} @@ -10180,7 +10116,6 @@ packages: /jest-cli@29.7.0(@types/node@20.5.1)(ts-node@10.9.1): resolution: {integrity: sha512-OVVobw2IubN/GSYsxETi+gOe7Ka59EFMR/twOU3Jb2GnKKeMGJB5SGUUrEz3SFVmJASUdZUzy83sLNNQ2gZslg==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - hasBin: true peerDependencies: node-notifier: ^8.0.1 || ^9.0.0 || ^10.0.0 peerDependenciesMeta: @@ -10534,7 +10469,6 @@ packages: /jest@29.6.3(@types/node@20.5.1)(ts-node@10.9.1): resolution: {integrity: sha512-alueLuoPCDNHFcFGmgETR4KpQ+0ff3qVaiJwxQM4B5sC0CvXcgg4PEi7xrDkxuItDmdz/FVc7SSit4KEu8GRvw==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - hasBin: true peerDependencies: node-notifier: ^8.0.1 || ^9.0.0 || ^10.0.0 peerDependenciesMeta: @@ -10875,6 +10809,7 @@ packages: engines: {node: 14 || >=16.14} dependencies: semver: 7.5.4 + dev: true /lru-cache@5.1.1: resolution: {integrity: sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==} @@ -11494,20 +11429,11 @@ packages: resolution: {integrity: sha512-71ippSywq5Yb7/tVYyGbkBggbU8H3u5Rz56fH60jGFgr8uHwxs+aSKeqmluIVzM0m0kB7xQjKS6qPfd0b2ZoqQ==} dev: true - /mute-stream@0.0.8: - resolution: {integrity: sha512-nnbWWOkoWyUsTjKrhgD0dcz22mdkSnpYqbEjIm2nhwhuxlSkpywJmBo8h0ZqJdkp73mb90SssHkN4rsRaBAfAA==} - dev: true - /mute-stream@1.0.0: resolution: {integrity: sha512-avsJQhyd+680gKXyG/sQc0nXaC6rBkPOfyHYcFb9+hdkqQkR9bdnkJ0AMZhke0oesPqIO+mFFJ+IdBc7mst4IA==} engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} dev: true - /mylas@2.1.13: - resolution: {integrity: sha512-+MrqnJRtxdF+xngFfUUkIMQrUUL0KsxbADUkn23Z/4ibGg192Q+z+CQyiYwvWTsYjJygmMR8+w3ZDa98Zh6ESg==} - engines: {node: '>=12.0.0'} - dev: false - /nanoid@3.3.3: resolution: {integrity: sha512-p1sjXuopFs0xg+fPASzQ28agW1oHD7xDsd9Xkf3T15H3c/cifrFHVwrh74PdoklAPi+i7MdRsE47vm2r6JoB+w==} engines: {node: ^10 || ^12 || ^13.7 || ^14 || >=15.0.1} @@ -11516,7 +11442,6 @@ packages: /nanoid@3.3.7: resolution: {integrity: sha512-eSRppjcPIatRIMC1U6UngP8XFcz8MQWGQdt1MTBQ7NaAmvXDfvNxbvWV3x2y6CdEUciCSsDHDQZbhYaB8QEo2g==} engines: {node: ^10 || ^12 || ^13.7 || ^14 || >=15.0.1} - hasBin: true /nanoid@4.0.2: resolution: {integrity: sha512-7ZtY5KTCNheRGfEFxnedV5zFiORN1+Y1N6zvPTnHQd8ENUvfaDBeuJDZb2bN/oXwXxu3qkTXDzy57W5vAmDTBw==} @@ -12028,6 +11953,7 @@ packages: dependencies: lru-cache: 10.0.2 minipass: 5.0.0 + dev: true /path-to-regexp@0.1.7: resolution: {integrity: sha512-5DFkuoqlv1uYQKxy8omFBeJPQcdoE07Kv2sferDCrAq1ohOU+MSDswDIbnx3YAM60qIOnYa53wBhXW0EbMonrQ==} @@ -12158,13 +12084,6 @@ packages: v8-to-istanbul: 9.1.3 dev: true - /plimit-lit@1.6.1: - resolution: {integrity: sha512-B7+VDyb8Tl6oMJT9oSO2CW8XC/T4UcJGrwOVoNGwOQsQYhlpfajmrMj5xeejqaASq3V/EqThyOeATEOMuSEXiA==} - engines: {node: '>=12'} - dependencies: - queue-lit: 1.5.2 - dev: false - /pngjs@6.0.0: resolution: {integrity: sha512-TRzzuFRRmEoSW/p1KVAmiOgPco2Irlah+bGFCeNfJXxxYGwSw7YwAOAcd7X28K/m5bjBWKsC29KyoMfHbypayg==} engines: {node: '>=12.13.0'} @@ -12779,11 +12698,6 @@ packages: resolution: {integrity: sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ==} dev: true - /queue-lit@1.5.2: - resolution: {integrity: sha512-tLc36IOPeMAubu8BkW8YDBV+WyIgKlYU7zUNs0J5Vk9skSZ4JfGlPOqplP0aHdfv7HL0B2Pg6nwiq60Qc6M2Hw==} - engines: {node: '>=12'} - dev: false - /queue-microtask@1.2.3: resolution: {integrity: sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==} @@ -13447,11 +13361,6 @@ packages: strip-json-comments: 3.1.1 dev: false - /run-async@2.4.1: - resolution: {integrity: sha512-tvVnVv01b8c1RrA6Ep7JkStj85Guv/YrMcwqYQnwjsAS2cTmmPGBBjAjpCW7RrSodNSoE2/qg9O4bceNvUuDgQ==} - engines: {node: '>=0.12.0'} - dev: true - /run-async@3.0.0: resolution: {integrity: sha512-540WwVDOMxA6dN6We19EcT9sc3hkXPw5mzRNGM3FkdN/vtE9NFvj5lFAPNwUDmJjXidm3v7TC1cTE7t17Ulm1Q==} engines: {node: '>=0.12.0'} @@ -13827,6 +13736,7 @@ packages: /signal-exit@4.1.0: resolution: {integrity: sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==} engines: {node: '>=14'} + dev: true /simple-update-notifier@2.0.0: resolution: {integrity: sha512-a2B9Y0KlNXl9u/vsW6sTIu9vGEpfKu2wRV6l1H3XEas/0gUIzGzBoP/IouTcUQbm9JWZLH3COxyn03TYlFax6w==} @@ -14476,7 +14386,6 @@ packages: /ts-jest@29.1.1(@babel/core@7.23.2)(esbuild@0.19.2)(jest@29.6.3)(typescript@5.1.6): resolution: {integrity: sha512-D6xjnnbP17cC85nliwGiL+tpoKN0StpgE0TeOjXQTU6MVCfsB4v7aW05CgQ/1OywGb0x/oy9hHFnN+sczTiRaA==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - hasBin: true peerDependencies: '@babel/core': '>=7.0.0-beta.0 <8' '@jest/types': ^29.0.0 @@ -14510,7 +14419,6 @@ packages: /ts-node@10.9.1(@types/node@20.5.1)(typescript@5.1.6): resolution: {integrity: sha512-NtVysVPkxxrwFGUUxGYhfux8k78pQB3JqYBXlLRZgdGUqTO5wU/UyHop5p70iEbGhB7q5KmiZiU0Y3KlJrScEw==} - hasBin: true peerDependencies: '@swc/core': '>=1.2.50' '@swc/wasm': '>=1.2.50' @@ -14541,7 +14449,6 @@ packages: /ts-node@10.9.1(@types/node@20.5.1)(typescript@5.2.2): resolution: {integrity: sha512-NtVysVPkxxrwFGUUxGYhfux8k78pQB3JqYBXlLRZgdGUqTO5wU/UyHop5p70iEbGhB7q5KmiZiU0Y3KlJrScEw==} - hasBin: true peerDependencies: '@swc/core': '>=1.2.50' '@swc/wasm': '>=1.2.50' @@ -14570,23 +14477,11 @@ packages: yn: 3.1.1 dev: false - /tsc-alias@1.8.8: - resolution: {integrity: sha512-OYUOd2wl0H858NvABWr/BoSKNERw3N9GTi3rHPK8Iv4O1UyUXIrTTOAZNHsjlVpXFOhpJBVARI1s+rzwLivN3Q==} - dependencies: - chokidar: 3.5.3 - commander: 9.5.0 - globby: 11.1.0 - mylas: 2.1.13 - normalize-path: 3.0.0 - plimit-lit: 1.6.1 - dev: false - /tslib@2.6.1: resolution: {integrity: sha512-t0hLfiEKfMUoqhG+U1oid7Pva4bbDPHYfJNiB7BiIjRkj1pyC++4N3huJfqY6aRH6VTB0rvtzQwjM4K6qpfOig==} /tunnelmole@2.2.9: resolution: {integrity: sha512-Pnrb5CJoczQJ43VzNP5qy8/GOi4OPaE+ametW/VtTDWWT9HDZRFLil21e1CwhzBDNje/IJ/WcnMoxz97jLcAyg==} - hasBin: true requiresBuild: true dependencies: '@types/commander': 2.12.2 @@ -14676,7 +14571,6 @@ packages: /typedoc@0.24.8(typescript@5.1.6): resolution: {integrity: sha512-ahJ6Cpcvxwaxfu4KtjA8qZNqS43wYt6JL27wYiIgl1vd38WW/KWX11YuAeZhuz9v+ttrutSsgK+XO1CjL1kA3w==} engines: {node: '>= 14.14'} - hasBin: true peerDependencies: typescript: 4.6.x || 4.7.x || 4.8.x || 4.9.x || 5.0.x || 5.1.x dependencies: @@ -14690,7 +14584,6 @@ packages: /typedoc@0.24.8(typescript@5.2.2): resolution: {integrity: sha512-ahJ6Cpcvxwaxfu4KtjA8qZNqS43wYt6JL27wYiIgl1vd38WW/KWX11YuAeZhuz9v+ttrutSsgK+XO1CjL1kA3w==} engines: {node: '>= 14.14'} - hasBin: true peerDependencies: typescript: 4.6.x || 4.7.x || 4.8.x || 4.9.x || 5.0.x || 5.1.x dependencies: @@ -14906,7 +14799,6 @@ packages: /update-browserslist-db@1.0.13(browserslist@4.22.1): resolution: {integrity: sha512-xebP81SNcPuNpPP3uzeW1NYXxI3rxyJzF3pD6sH4jE7o/IX+WtSpwnVU+qIsDPyk0d3hmFQ7mjqc6AtV604hbg==} - hasBin: true peerDependencies: browserslist: '>= 4.21.0' dependencies: @@ -15138,7 +15030,6 @@ packages: /vite@4.4.9(@types/node@20.5.1): resolution: {integrity: sha512-2mbUn2LlUmNASWwSCNSJ/EG2HuSRTnVNaydp6vMCm5VIqJsjMfbIWtbH2kDuwUVW5mMUKKZvGPX/rqeqVvv1XA==} engines: {node: ^14.18.0 || >=16.0.0} - hasBin: true peerDependencies: '@types/node': '>= 14' less: '*' @@ -15183,7 +15074,6 @@ packages: /vitest@0.34.6(jsdom@22.1.0): resolution: {integrity: sha512-+5CALsOvbNKnS+ZHMXtuUC7nL8/7F1F2DnHGjSsszX8zCjWSSviphCb/NuS9Nzf4Q03KyyDRBAXhF/8lffME4Q==} engines: {node: '>=v14.18.0'} - hasBin: true peerDependencies: '@edge-runtime/vm': '*' '@vitest/browser': '*' @@ -15343,7 +15233,6 @@ packages: /webpack-dev-server@4.15.1(webpack@5.89.0): resolution: {integrity: sha512-5hbAst3h3C3L8w6W4P96L5vaV0PxSmJhxZvWKYIdgxOQm8pNZ5dEOmmSLBVpP85ReeyRt6AS1QJNyo/oFFPeVA==} engines: {node: '>= 12.13.0'} - hasBin: true peerDependencies: webpack: ^4.37.0 || ^5.0.0 webpack-cli: '*' @@ -15412,7 +15301,6 @@ packages: /webpack@5.89.0(esbuild@0.19.2)(uglify-js@3.17.4): resolution: {integrity: sha512-qyfIC10pOr70V+jkmud8tMfajraGCZMBWJtrmuBymQKCrLTRejBI8STDp1MCyZu/QTdZSeacCQYpYNQVOzX5kw==} engines: {node: '>=10.13.0'} - hasBin: true peerDependencies: webpack-cli: '*' peerDependenciesMeta: diff --git a/pnpm-workspace.yaml b/pnpm-workspace.yaml index 834b1742b..f8e51b4c7 100644 --- a/pnpm-workspace.yaml +++ b/pnpm-workspace.yaml @@ -3,7 +3,6 @@ packages: - 'packages/**' - 'internals/**' - 'models/**' - - 'scripts/**' - 'docs/**' - 'tmp/bundlers/**' # exclude packages that are inside demo directories diff --git a/scripts/.npmrc b/scripts/.npmrc deleted file mode 100644 index b6f27f135..000000000 --- a/scripts/.npmrc +++ /dev/null @@ -1 +0,0 @@ -engine-strict=true diff --git a/scripts/package-scripts/create-model-demo.ts b/scripts/package-scripts/create-model-demo.ts deleted file mode 100644 index ff80d4c4b..000000000 --- a/scripts/package-scripts/create-model-demo.ts +++ /dev/null @@ -1,186 +0,0 @@ -import { writeFile, copy, mkdirp, } from 'fs-extra'; -import path from 'path'; -import yargs from 'yargs'; -import { ifDefined as _ifDefined } from './prompt/ifDefined'; -import { MODELS_DIR } from './utils/constants'; - -/*** - * Types - */ - -/**** - * Utility functions - */ - -const makeDemoFolder = async (model: string) => { - await mkdirp(path.resolve(MODELS_DIR, model, 'demo')); -}; - -const copyFixture = async (model: string) => { - const modelRoot = path.resolve(MODELS_DIR, model); - await copy(path.resolve(modelRoot, 'assets/fixture.png'), path.resolve(modelRoot, 'demo', 'fixture.png')); -}; - -const getIndexJSFile = (model: string) => ` -import Upscaler from "upscaler"; -import * as models from '@upscalerjs/${model}'; -import fixture from "./fixture.png"; - -const upscaler = new Upscaler({ - model: models.small, -}); - -upscaler.upscale(fixture).then((upscaledImgSrc) => { - const img = document.createElement("img"); - img.src = upscaledImgSrc; - document.getElementById("target").appendChild(img); -}); -`; - -const getIndexHTMLFile = (model: string) => ` - - - @upscalerjs/${model} - - - - - - - - - - - - -
OriginalUpscaled
- - -
- -
-
- - - - -`; - -const getStackBlitz = () => ` -{ - "installDependencies": true, - "startCommand": "npm run dev" -} -`; -const getPackageJSONFile = (model: string) => ` -{ - "name": "@upscalerjs/demo.${model}", - "private": true, - "version": "1.0.0-beta.1", - "main": "index.js", - "scripts": { - "dev": "vite" - }, - "devDependencies": { - "vite": "*" - }, - "author": "Kevin Scott", - "license": "MIT", - "dependencies": { - "@tensorflow/tfjs": "~4.11.0", - "seedrandom": "^3.0.5", - "@upscalerjs/${model}": "^0.1.0", - "upscaler": "^1.0.0-beta.8" - }, - "engines": { - "npm": ">8.0.0" - } -}`; - -const writeDemoFile = async (model: string, file: string, contents: string) => { - const demoRoot = path.resolve(MODELS_DIR, model, 'demo'); - await writeFile(path.resolve(demoRoot, file), contents.trim(), 'utf-8'); -} - -const writeDemoFiles = async (model: string) => { - return Promise.all([ - ['index.js', getIndexJSFile(model)], - ['index.html', getIndexHTMLFile(model)], - ['package.json', getPackageJSONFile(model)], - ['.stackblitzrc', getStackBlitz()], - - ].map(([file, contents]) => writeDemoFile(model, file, contents))); -}; - -/**** - * Main function - */ - -const createModelDemo = async ( - model: string, -) => { - await makeDemoFolder(model); - await copyFixture(model); - await writeDemoFiles(model); -} - -export default createModelDemo; - -/**** - * Functions to expose the main function as a CLI tool - */ - -interface Answers { - model: string; -} - -const getArgs = async (): Promise => { - const argv = await yargs.command('create model demo', '', yargs => { - yargs.positional('model', { - describe: 'The model demo to create', - }); - }) - .help() - .argv; - - return { - model: argv._[0] as string, - } - -} - -if (require.main === module) { - (async () => { - const { model } = await getArgs(); - await createModelDemo(model); - })(); -} diff --git a/scripts/package-scripts/docs/build-api.ts b/scripts/package-scripts/docs/build-api.ts deleted file mode 100644 index 14464eb6f..000000000 --- a/scripts/package-scripts/docs/build-api.ts +++ /dev/null @@ -1,892 +0,0 @@ -import path from 'path'; -import { - mkdirp, - writeFile, -} from 'fs-extra'; -import { - Application, - DeclarationReflection as TypedocDeclarationReflection, - TSConfigReader, - TypeDocReader, - ReflectionKind, -} from 'typedoc'; -import { DOCS_DIR, UPSCALER_DIR } from '../utils/constants'; -import { clearOutMarkdownFiles } from './utils/clear-out-markdown-files'; -import { SharedArgs } from './types'; -import { - CommentDisplayPart, - CommentTag, - ParameterReflection, - ArrayType, - UnionType, - IntersectionType, - IntrinsicType, - LiteralType, - ReferenceType, - SomeType, - Comment, - SignatureReflection, - SourceReference, - DeclarationReflection, - TypeParameterReflection, -} from 'typedoc/dist/lib/serialization/schema'; -import { Platform } from '../scaffold-dependencies'; - -/**** - * Types - */ -type DecRef = DeclarationReflection | PlatformSpecificDeclarationReflection; -interface Definitions { - constructors: Record; - methods: Record; - interfaces: Record; - types: Record; - classes: Record; - functions: Record; - enums: Record; - variables: Record; -} - -interface PlatformSpecificDeclarationReflection extends Omit { - kind: 'Platform Specific Type'; - node: DeclarationReflection; - browser: DeclarationReflection; -} - -/**** - * Constants - */ -const REPO_ROOT = 'https://github.com/thekevinscott/UpscalerJS'; -const UPSCALER_TSCONFIG_PATH = path.resolve(UPSCALER_DIR, 'tsconfig.browser.esm.json'); -const UPSCALER_SRC_PATH = path.resolve(UPSCALER_DIR, 'src/browser/esm'); -const EXAMPLES_DOCS_DEST = path.resolve(DOCS_DIR, 'docs/documentation/api'); -const VALID_EXPORTS_FOR_WRITING_DOCS = ['default']; -const VALID_METHODS_FOR_WRITING_DOCS = [ - 'constructor', - 'upscale', - 'execute', - 'warmup', - 'abort', - 'dispose', - 'getModel', -]; -const INTRINSIC_TYPES = [ - 'string', - 'number', - 'boolean', -]; -const TYPES_TO_EXPAND: Record = { - 'upscale': ['Input', 'Progress'], - 'warmup': ['WarmupSizes'], -} -const EXPANDED_TYPE_CONTENT: Record) => string> = { - 'Input': (definitions) => writePlatformSpecificDefinitions(definitions), - 'WarmupSizes': () => ([ - `- \`number\` - a number representing both the size (width and height) of the patch.`, - `- \`{patchSize: number; padding?: number}\` - an object with the \`patchSize\` and optional \`padding\` properties.`, - `- \`number[]\` - an array of numbers representing the size (width and height) of the patch.`, - `- \`{patchSize: number; padding?: number}[]\` - an array of objects with the \`patchSize\` and optional \`padding\` properties.`, - ].join('\n')), - 'Progress': () => ([ - 'The progress callback function has the following four parameters:', - '- `progress` - a number between 0 and 1 representing the progress of the upscale.', - '- `slice` - a string or 3D tensor representing the current slice of the image being processed. The type returned is specified by the `progressOutput` option, or if not present, the `output` option, or if not present, string for the browser and tensor for node.', - '- `row` - the row of the image being processed.', - '- `col` - the column of the image being processed.', - '', - '[See the guide on progress for more information.](/documentation/guides/browser/usage/progress)', - ].join('\n')), -}; -// define special type information that is external -const makeNewExternalType = (name: string, url: string): DeclarationReflection => { - const type = new TypedocDeclarationReflection(name, ReflectionKind['SomeType']); - // const source = new SourceReference('', 0, 0); - // source.url = url; - type.sources = []; - return type as DeclarationReflection; -}; - -const EXTERNALLY_DEFINED_TYPES: Record = { - 'AbortSignal': makeNewExternalType( - 'AbortSignal', - 'https://developer.mozilla.org/en-US/docs/Web/API/AbortSignal' - ), - 'SerializableConstructor': makeNewExternalType( - 'SerializableConstructor', - 'https://github.com/tensorflow/tfjs/blob/38f8462fe642011ff1b7bcbb52e018f3451be58b/tfjs-core/src/serialization.ts#L54', - ), -} - -/**** - * Utility functions - */ -const getPackageAsTree = (entryPoint: string, tsconfig: string, projectRoot: string) => { - const app = new Application(); - - app.options.addReader(new TSConfigReader()); - app.options.addReader(new TypeDocReader()); - - app.bootstrap({ - entryPoints: [entryPoint], - tsconfig, - }); - - const project = app.convert(); - - if (project) { - return app.serializer.projectToObject(project, projectRoot); - } - throw new Error('No project was converted.') -} - -const getTypeFromPlatformSpecificFiles = async (fileName: string, typeName: string) => { - const platforms: Platform[] = ['browser', 'node']; - const platformSpecificTypes: DeclarationReflection[] = []; - for (let i = 0; i < platforms.length; i++) { - const platform = platforms[i]; - const imageBrowser = getPackageAsTree( - path.resolve(UPSCALER_DIR, 'src', platform, `${fileName}.${platform}.ts`), - path.resolve(UPSCALER_DIR, `tsconfig.docs.${platform}.json`), - UPSCALER_DIR, - ); - const matchingType = imageBrowser.children?.filter(child => child.name === typeName).pop(); - if (!matchingType) { - throw new Error(`Could not find input from ${fileName}.${platform}.ts`); - } - platformSpecificTypes.push(matchingType); - } - - const platformSpecificType: PlatformSpecificDeclarationReflection = { - name: typeName, - variant: 'declaration', - kind: 'Platform Specific Type', - browser: platformSpecificTypes[0], - node: platformSpecificTypes[1], - children: [], - type: platformSpecificTypes[0].type, - } - - return platformSpecificType; -} - -const getTypesFromPlatformSpecificFiles = async (): Promise<{ - children: PlatformSpecificDeclarationReflection[]; -}> => { - return { - children: await Promise.all([ - getTypeFromPlatformSpecificFiles('image', 'Input'), - ]), - }; -} - -function getAsObj (arr: T[], getKey: (item: T) => string) { - return arr.reduce((obj, item) => ({ - ...obj, - [getKey(item)]: item, - }), {} as Record); -} - -const getKindStringKey = (kindString: 'Platform Specific Type' | ReflectionKind) => { - switch (kindString) { - case 'Platform Specific Type': - return 'types'; - case ReflectionKind.Constructor: - return 'constructors'; - case ReflectionKind.Method: - return 'methods'; - case ReflectionKind.Interface: - return 'interfaces'; - case ReflectionKind.TypeAlias: - return 'types'; - case ReflectionKind.Class: - return 'classes'; - case ReflectionKind.Function: - return 'functions'; - case ReflectionKind.Enum: - return 'enums'; - case ReflectionKind.Variable: - return 'variables'; - default: - throw new Error(`Unexpected kind string: ${kindString}`); - } -} - -const getDefinitions = async (): Promise => { - const upscalerTree = getPackageAsTree( - UPSCALER_SRC_PATH, - UPSCALER_TSCONFIG_PATH, - UPSCALER_DIR, - ); - const platformSpecificTypes = await getTypesFromPlatformSpecificFiles(); - if (!upscalerTree.children) { - throw new Error('No children were found on upscaler tree object. Indicates an error in the returned structure from getPackageAsTree'); - } - const children = [ - ...upscalerTree.children, - ...(platformSpecificTypes.children || []), - ]; - - const parsedChildren = children.reduce((obj, child) => { - const { kind } = child; - try { - const key = getKindStringKey(kind); - if (!key) { - throw new Error(`Unexpected kind string: ${kind}`); - } - return { - ...obj, - [key]: obj[key].concat(child), - }; - } catch (err) { - for (var enumMember in ReflectionKind) { - console.log("enum member: ", enumMember, ReflectionKind[enumMember]); - } - throw new Error(`Could not get key for ${JSON.stringify(child)}: ${(err as Error).message}`) - - } - }, { - constructors: [] as DecRef[], - methods: [] as DecRef[], - functions: [] as DecRef[], - interfaces: [] as DecRef[], - types: [] as DecRef[], - classes: [] as DecRef[], - enums: [] as DecRef[], - variables: [] as DecRef[], - }); - - return { - methods: getAsObj(parsedChildren.methods, i => i.name), - constructors: getAsObj(parsedChildren.constructors, i => i.name), - functions: getAsObj(parsedChildren.functions, i => i.name), - types: getAsObj(parsedChildren.types, i => i.name), - interfaces: getAsObj(parsedChildren.interfaces, i => i.name), - classes: getAsObj(parsedChildren.classes, i => i.name), - enums: getAsObj(parsedChildren.enums, i => i.name), - variables: getAsObj(parsedChildren.variables, i => i.name), - }; -} - -const getSummary = (comment?: Comment) => comment?.summary.map(({ text }) => text).join(''); - -const getTextSummary = (name: string, comment?: Comment): { - codeSnippet?: string; - description?: string; - blockTags?: Record; -} => { - if (comment === undefined) { - return {}; - } - const { summary, blockTags } = comment; - const expectedCodeSnippet = summary.pop(); - if (expectedCodeSnippet?.kind !== 'code') { - throw new Error(`Expected code snippet not found for ${name}`); - } - // const { text, code } = summary.reduce((obj, item) => { - // return { - // ...obj, - // [item.kind]: item.text.trim(), - // } - // }, { - // text: '', - // code: '', - // }); - const text = summary.map(({ text }) => text).join(''); - return { - blockTags: blockTags?.reduce((obj, blockTag) => { - return { - ...obj, - [blockTag.tag]: blockTag.content, - }; - }, {}), - description: text.trim(), - codeSnippet: expectedCodeSnippet.text.trim(), - } -}; - -const getSource = ([source]: SourceReference[]) => { - let { - fileName, - line, - // character, - url, - } = source; - url = `${REPO_ROOT}/blob/main/${fileName}#L${line}`; - // if (!url) { - // throw new Error(`No URL defined for source ${fileName} at line ${line}`); - // } - const prettyFileName = fileName.split('packages/upscalerjs/src/shared/').pop(); - return `Defined in ${prettyFileName}:${line}`; -}; - -const rewriteURL = (url: string) => { - const parts = url.split(/blob\/(?[^/]+)/) - if (parts.length !== 3) { - throw new Error(`Error with the regex: ${url}`); - } - return [ - parts[0], - 'tree/main', - parts[2], - ].join(''); -} - -const isDeclarationReflection = (reflection?: DecRef): reflection is DeclarationReflection => reflection?.kind !== 'Platform Specific Type'; -const isArrayType = (type: SomeType): type is ArrayType => type.type === 'array'; -const isReferenceType = (type: SomeType): type is ReferenceType => type.type === 'reference'; -const isLiteralType = (type: SomeType): type is LiteralType => type.type === 'literal'; -const isInstrinsicType = (type: SomeType): type is IntrinsicType => type.type === 'intrinsic'; -const isUnionType = (type: SomeType): type is UnionType => type.type === 'union'; -const isIntersectionType = (type: SomeType): type is IntersectionType => type.type === 'intersection'; - -const getLiteralTypeValue = (type: LiteralType): string => { - const { value } = type; - if (typeof value === 'number') { - return `${value}`; - } else if (typeof value === 'string') { - return value; - } - - throw new Error('Not yet implemented for literal'); -} - -const getReferenceTypeOfParameter = (_type?: SomeType, definitions?: Definitions): { - type: 'reference' | 'array' | 'literal' | 'intrinsic' | 'union', - name: string; - includeURL?: boolean; -} => { - if (!_type) { - throw new Error('Define a type'); - } - if (isArrayType(_type)) { - const { elementType } = _type; - if (isReferenceType(elementType)) { - return { - type: _type.type, - name: elementType.name, - } - } else if (isUnionType(elementType)) { - return { - type: 'union', - name: elementType.types.map(t => { - if ('name' in t) { - return t.name; - } - throw new Error('unimplemented'); - }).join(' | '), - }; - } - - console.error(_type); - - throw new Error('Not yet implemented'); - } - - if (isReferenceType(_type)) { - const { name } = _type; - if (name === 'ModelDefinitionObjectOrFn') { - return { - type: _type.type, - name: "ModelDefinition", - }; - } - return { - type: _type.type, - name, - }; - } - - if (isLiteralType(_type)) { - return { - type: 'literal', - name: getLiteralTypeValue(_type), - }; - } - - if (isInstrinsicType(_type)) { - return { - type: 'intrinsic', - name: _type.name, - } - } - - if (isIntersectionType(_type)) { - const refType = _type.types.filter(t => t.type === 'reference').pop(); - if (!refType || !isReferenceType(refType)) { - throw new Error('No reference type found on intersection type.'); - } - // if (definitions === undefined) { - // throw new Error('Intersection type was provided and a reference type was found in the union, but no definitions are present.') - // } - const t = refType.typeArguments?.filter(t => t.type === 'reference').pop(); - if (!t || !('name' in t)) { - throw new Error('No type arguments found on intersection type.'); - } - return { - type: 'literal', - name: t.name, - }; - } - - if (isUnionType(_type)) { - let includeURL = true; - - const getNameFromUnionType = (type: UnionType): string => type.types.map(t => { - if (isReferenceType(t)) { - if (definitions === undefined) { - console.warn('Union type was provided and a reference type was found in the union, but no definitions are present.'); - return t.name; - } - const { interfaces, types } = definitions; - const matchingType = interfaces[t.name] || types[t.name]; - if (!isDeclarationReflection(matchingType)) { - throw new Error('Is a platform specific type'); - } - if (!matchingType?.type) { - return t.name; - // throw new Error(`No matching type found for literal ${t.name} in union`); - } - const matchingTypeType = matchingType.type; - if (isLiteralType(matchingTypeType)) { - // if any literal types are included, don't include the URL - includeURL = false; - return JSON.stringify(matchingTypeType.value); - } - if (matchingTypeType.type === 'reflection') { - // Ignore reflection types - return t.name; - } - if (matchingTypeType.type === 'union') { - return getNameFromUnionType(matchingTypeType); - } - if (matchingTypeType.type === 'tuple') { - console.log('matchingTypeType tuple', matchingTypeType); - return `[${matchingTypeType.elements?.map(e => { - if ('name' in e) { - return e.name; - } - throw new Error('Array type not yet implemented'); - }).join(',')}]`; - } - console.error('matchingTypeType', JSON.stringify(matchingTypeType, null, 2)); - - throw new Error(`Unsupported type of matching type ${matchingTypeType.type} in reference type of union type ${t.name}.`); - } else if (isInstrinsicType(t)) { - if (t.name === 'undefined') { - // ignore an explicit undefined type; this should be better represented to the user as an optional flag. - return undefined; - } - return t.name; - } else if (isLiteralType(t)) { - return `${t.value}`; - } else if (t.type === 'indexedAccess') { - const objectType = t.objectType; - if ('name' in objectType) { - return objectType.name; - } - return ''; - } else if (t.type === 'array') { - if ('name' in t.elementType) { - return `${t.elementType.name}[]`; - } - console.warn('Unknown element type', t); - // throw new Error('Unknown element type'); - return ''; - } - console.error(t); - throw new Error(`Unsupported type in union type: ${t.type}`); - }).filter(Boolean).join(' | '); - - const name = getNameFromUnionType(_type); - - return { - type: 'literal', - includeURL, - name, - }; - } - - console.error(_type) - - throw new Error(`Unsupported type: ${_type.type}`) -}; - -const getURLFromSources = (matchingType: undefined | DecRef | TypeParameterReflection) => { - if (!matchingType) { - return undefined; - } - if ('sources' in matchingType) { - const sources = matchingType.sources; - if (sources?.length) { - const { url } = sources?.[0] || {}; - if (url?.startsWith(REPO_ROOT)) { - return rewriteURL(url); - } - return url; - } - } - - return undefined; -}; - -function sortChildrenByLineNumber(children: T[]) { - return children.sort(({ sources: aSrc }, { sources: bSrc }) => { - if (!aSrc?.length) { - return 1; - } - if (!bSrc?.length) { - return -1; - } - return aSrc[0].line - bSrc[0].line; - }); -}; - -const isTypeParameterReflection = (reflection: DecRef | TypeParameterReflection): reflection is TypeParameterReflection => { - return 'parent' in reflection; -} - -const writeParameter = (methodName: string, parameter: ParameterReflection | DeclarationReflection, matchingType: undefined | DecRef | TypeParameterReflection, definitions: Definitions, childParameters: string) => { - // if (matchingType !== undefined && !isTypeParameterReflection(matchingType) && !isDeclarationReflection(matchingType)) { - // // this is a platform-specify type specification. likely it is the input definition. - // const comment = getSummary(parameter.comment); - // const { type, name } = getReferenceTypeOfParameter(parameter.type, definitions); - // const parsedName = `\`${name}${type === 'array' ? '[]' : ''}\``; - // return [ - // '-', - // `**${parameter.name}${parameter.flags?.isOptional ? '?' : ''}**:`, - // childParameters ? undefined : `[${parsedName}](#${name.toLowerCase()})`, // only show the type information if we're not expanding it - // comment ? ` - ${comment}` : undefined, - // ].filter(Boolean).join(' '); - // } - const comment = getSummary(parameter.comment); - const { type, name, includeURL = true } = getReferenceTypeOfParameter(parameter.type, definitions); - const parsedName = `${name}${type === 'array' ? '[]' : ''}`; - - let url: string | undefined = undefined; - const typesToExpand = TYPES_TO_EXPAND[methodName === 'constructor' ? '_constructor' : methodName] || []; - if (typesToExpand.includes(name)) { - url = `#${name.toLowerCase()}`; - } else if (includeURL) { - url = getURLFromSources(matchingType); - } - const linkedName = url ? `[\`${parsedName}\`](${url})` : `\`${parsedName}\``; - return [ - '-', - `**${parameter.name}${parameter.flags?.isOptional ? '?' : ''}**:`, - childParameters === '' ? linkedName : undefined, // only show the type information if we're not expanding it - comment ? ` - ${comment.split('\n').join(" ")}` : undefined, - ].filter(Boolean).join(' '); -}; - -const writePlatformSpecificParameter = (platform: string, parameter: DeclarationReflection, definitions: Definitions) => { - const comment = getSummary(parameter.comment); - const { type, name } = getReferenceTypeOfParameter(parameter.type, definitions); - const url = getURLFromSources(parameter); - const parsedName = `${name}${type === 'array' ? '[]' : ''}`; - return [ - '-', - `**[${platform}](${url})**:`, - `\`${parsedName}\``, - comment ? ` - ${comment}` : undefined, - ].filter(Boolean).join(' '); -}; - -const writePlatformSpecificDefinitions = (definitions: Definitions): string => { - const platformSpecificTypes: PlatformSpecificDeclarationReflection[] = []; - for (let i = 0; i< Object.values(definitions.types).length; i++) { - const type = Object.values(definitions.types)[i]; - if (!isDeclarationReflection(type)) { - platformSpecificTypes.push(type); - } - } - return platformSpecificTypes.map(parameter => [ - writePlatformSpecificParameter('Browser', parameter.browser, definitions), - writePlatformSpecificParameter('Node', parameter.node, definitions), - ].join('\n')).join('\n'); -}; - -const getMatchingType = (parameter: ParameterReflection | DeclarationReflection, definitions: Definitions, typeParameters: Record = {}) => { - const { classes, interfaces, types } = definitions; - let { name: nameOfTypeDefinition } = getReferenceTypeOfParameter(parameter.type, definitions); - let matchingType: undefined | PlatformSpecificDeclarationReflection | DeclarationReflection | TypeParameterReflection = undefined; - if (!INTRINSIC_TYPES.includes(nameOfTypeDefinition) && parameter.type !== undefined && !isLiteralType(parameter.type)) { - // first, check if it is a specially defined external type - matchingType = EXTERNALLY_DEFINED_TYPES[nameOfTypeDefinition] || interfaces[nameOfTypeDefinition] || types[nameOfTypeDefinition]; - // console.log('matchingType', matchingType); - if (!matchingType) { - // it's possible that this type is a generic type; in which case, replace the generic with the actual type it's extending - matchingType = typeParameters[nameOfTypeDefinition]; - if (matchingType) { - nameOfTypeDefinition = (matchingType as any).type.name; - matchingType = interfaces[nameOfTypeDefinition] || types[nameOfTypeDefinition]; - parameter.type = matchingType.type; - } - } - if (!matchingType && (parameter.type === undefined || !isUnionType(parameter.type))) { - console.warn('------') - console.warn(parameter.type); - console.warn([ - `No matching type could be found for ${nameOfTypeDefinition}.`, - `- Available interfaces: ${Object.keys(interfaces).join(', ')}`, - `- Available types: ${Object.keys(types).join(', ')}`, - `- Available classes: ${Object.keys(classes).join(', ')}` - ].join('\n')); - console.warn('------') - } - } - return matchingType; -} - -const getParameters = (methodName: string, parameters: (ParameterReflection | DeclarationReflection)[], definitions: Definitions, typeParameters: Record = {}, depth = 0): string => { - if (depth > 5) { - throw new Error('Too many levels of depth'); - } - return parameters.map((parameter) => { - const matchingType = getMatchingType(parameter, definitions, typeParameters); - const { children = [] } = matchingType || {}; - const childParameters = getParameters(methodName, sortChildrenByLineNumber(children), definitions, typeParameters, depth + 1); - return [ - writeParameter(methodName, parameter, matchingType, definitions, childParameters), - childParameters, - ].filter(Boolean).map(line => Array(depth * 2).fill(' ').join('') + line).join('\n'); - }).filter(Boolean).join('\n'); -}; - -const getReturnType = (signatures: (SignatureReflection & { typeParameter?: TypeParameterReflection[] })[], blockTags?: Record) => { - if (signatures.length === 1) { - const { type } = signatures[0]; - if (type === undefined) { - return 'void'; - } - - if (isReferenceType(type)) { - const { name, typeArguments } = type; - let nameOfType = name; - if (typeArguments?.length) { - nameOfType = `${nameOfType}<${typeArguments.map(t => getReferenceTypeOfParameter(t)).map(({ name }) => name).join(', ')}>`; - } - const returnDescription = blockTags?.['@returns']?.map(({ text }) => text).join(''); - return `\`${nameOfType}\`${returnDescription ? ` - ${returnDescription}` : ''}`; - } - - if (isInstrinsicType(type)) { - let nameOfType = type.name; - const returnDescription = blockTags?.['@returns']?.map(({ text }) => text).join(''); - return `\`${nameOfType}\`${returnDescription ? ` - ${returnDescription}` : ''}`; - } - - console.error(type); - throw new Error(`Return Type function not yet implemented for type ${type.type}`) - } - - let comment: Comment; - const validReturnTypes = new Set(); - let returnType = ''; - signatures.forEach(signature => { - if (signature.comment) { - if (comment !== undefined) { - throw new Error('Multiple comments defined for return signatures'); - } - comment = signature.comment; - } - const { type } = signature; - if (type === undefined) { - throw new Error('No type defined for signature'); - } - if (!isReferenceType(type)) { - throw new Error(`Unsupported type: ${type.type}`); - } - if (returnType !== '' && returnType !== type.name) { - throw new Error(`Conflicting return types in signatures: ${returnType} vs ${type.name}}`) - } - returnType = type.name; - if (!('typeArguments' in type)) { - throw new Error('No type arguments defined for type'); - } - const { typeArguments } = type; - typeArguments?.forEach(type => { - if (isUnionType(type)) { - type.types.forEach(t => { - if (isInstrinsicType(t) || isReferenceType(t)) { - validReturnTypes.add(t.name); - } else { - throw new Error(`Unsupported type when trying to handle union type while collecting valid signatures: ${type.type} ${t.type}`); - } - }); - } else if (isInstrinsicType(type)) { - validReturnTypes.add(type.name); - } else if (isReferenceType(type)) { - validReturnTypes.add(type.name); - } else { - throw new Error(`Unsupported type when trying to collect valid signatures: ${type.type}`); - } - }); - }) - - const nameOfType = `${returnType}<${Array.from(validReturnTypes).join(' | ')}>`; - const returnDescription = blockTags?.['@returns']?.map(({ text }) => text).join(''); - return `\`${nameOfType}\`${returnDescription ? ` - ${returnDescription}` : ''}`; -} - -const writeExpandedTypeDefinitions = (methodName: string, definitions: Definitions, typeParameters: Record = {}): string => { - // this method is for writing out additional information on the types, below the parameters - const typesToExpand = TYPES_TO_EXPAND[methodName === 'constructor' ? '_constructor' : methodName] || []; - return typesToExpand.map(type => [ - `### \`${type}\``, - EXPANDED_TYPE_CONTENT[type](definitions, typeParameters), - ].join('\n')).join('\n'); -} - -const getContentForMethod = (method: DeclarationReflection, definitions: Definitions, i: number) => { - const { - name, - signatures, - sources, - } = method; - - if (name === 'upscale') { - return [ - [ - '---', - `title: ${name}`, - `sidebar_position: ${i}`, - `sidebar_label: ${name}`, - '---', - ].join('\n'), - - `# ${name}`, - `Alias for [\`execute\`](execute)`, - ].filter(Boolean).join('\n\n'); - - } - - if (!sources?.length) { - throw new Error(`No sources found for ${name}`); - } - if (!signatures?.length) { - const { type, ...m } = method; - console.log(JSON.stringify(m, null, 2)) - throw new Error(`No signatures found in ${name}`); - } - const signature = signatures[0] as SignatureReflection & { typeParameter?: TypeParameterReflection[] }; - const { comment, parameters, typeParameter: typeParameters } = signature; - // if (!comment) { - // throw new Error(`No comment found in method ${name}`); - // } - - const { description, codeSnippet, blockTags } = getTextSummary(name, comment); - let source; - try { - source = getSource(sources); - } catch(e) { - console.error(JSON.stringify(method, null, 2)); - throw e; - } - - const content = [ - [ - '---', - `title: ${name}`, - `sidebar_position: ${i}`, - `sidebar_label: ${name}`, - '---', - ].join('\n'), -`# \`${name}\``, - description, - ...(codeSnippet ? [ - `## Example`, - codeSnippet, - ] : []), - source, - ...(parameters ? [ - `## Parameters`, - getParameters(name, parameters, definitions, getAsObj(typeParameters || [], t => t.name)), - ] : []), - writeExpandedTypeDefinitions(name, definitions, getAsObj(typeParameters || [], t => t.name)), - `## Returns`, - getReturnType(signatures, blockTags), - ].filter(Boolean).join('\n\n'); - return content; -}; - -const getSortedMethodsForWriting = async (definitions: Definitions) => { - console.log(JSON.stringify(definitions.variables, null, 2)) - const exports = Object.values(definitions.classes); - const methods: DeclarationReflection[] = []; - for (let i = 0; i < exports.length; i++) { - const xport = exports[i]; - if (VALID_EXPORTS_FOR_WRITING_DOCS.includes(xport.name)) { - const { children } = xport; - if (!children) { - throw new Error(`No methods found in export ${xport.name}`); - } - sortChildrenByLineNumber(children).forEach(method => { - if (VALID_METHODS_FOR_WRITING_DOCS.includes(method.name)) { - methods.push(method); - } else { - console.log(`** Ignoring method ${method.name}`); - } - }); - } - } - return methods; -}; - -const writeAPIDocumentationFiles = async (methods: DeclarationReflection[], definitions: Definitions) => { - await Promise.all(methods.map(async (method, i) => { - const content = getContentForMethod(method, definitions, i); - if (content) { - const target = path.resolve(EXAMPLES_DOCS_DEST, `${method.name}.md`); - await mkdirp(path.dirname(target)); - await writeFile(target, content.trim(), 'utf-8'); - } else { - throw new Error(`No content for method ${method.name}`); - } - })) -}; - -const writeIndexFile = async (methods: DeclarationReflection[]) => { - const contents = [ - '# API', - '', - 'API Documentation for UpscalerJS.', - '', - 'Available methods:', - '', - ...methods.map(method => `- [\`${method.name}\`](./${method.name})`), - ].join('\n') - await writeFile(path.resolve(EXAMPLES_DOCS_DEST, 'index.md'), contents, 'utf-8'); -} - -/**** - * Main function - */ -async function main({ shouldClearMarkdown }: SharedArgs = {}) { - await mkdirp(EXAMPLES_DOCS_DEST); - if (shouldClearMarkdown) { - await clearOutMarkdownFiles(EXAMPLES_DOCS_DEST); - } - - const definitions = await getDefinitions(); - const methods = await getSortedMethodsForWriting(definitions); - if (methods.length === 0) { - throw new Error('No methods were found') - } - - await Promise.all([ - writeAPIDocumentationFiles(methods, definitions), - writeIndexFile(methods), - ]); -} - -/**** - * Functions to expose the main function as a CLI tool - */ - -if (require.main === module) { - (async () => { - return; - // const sharedArgs = await getSharedArgs(); - // await main({ ...sharedArgs }); - })(); -} diff --git a/scripts/package-scripts/docs/build-guides.ts b/scripts/package-scripts/docs/build-guides.ts deleted file mode 100644 index 08e5cfd8a..000000000 --- a/scripts/package-scripts/docs/build-guides.ts +++ /dev/null @@ -1,283 +0,0 @@ -import path from 'path'; -import { copyFile, existsSync, mkdirp, readdir, readdirSync, readFile, readFileSync, statSync, writeFile } from 'fs-extra'; -import { DOCS_DIR, EXAMPLES_DIR } from '../utils/constants'; -import { getPackageJSON } from '../utils/packages'; -import fm from 'front-matter'; -import { clearOutMarkdownFiles } from './utils/clear-out-markdown-files'; -import { getSharedArgs, SharedArgs } from './types'; - -/**** - * Types - */ -interface FrontMatter { - [index: string]: string | number | FrontMatter; -} -interface ExampleContent { - title: string; - frontmatter: FrontMatter; -} -type Category = 'browser' | 'node' | 'other'; - -/**** - * Constants - */ -const DEFAULT_EMBED_FOR_NODE = 'codesandbox'; -const DEFAULT_EMBED_FOR_BROWSER = 'codesandbox'; -// const DEFAULT_EMBED_FOR_BROWSER = 'stackblitz'; -const EXAMPLES_DOCS_DEST = path.resolve(DOCS_DIR, 'docs/documentation/guides'); - -/**** - * Utility functions - */ -const isCategory = (category: unknown): category is Category => typeof category === 'string' && ['browser', 'node', 'other'].includes(category); -const isDirectory = (root: string) => (folder: string) => statSync(path.resolve(root, folder)).isDirectory(); -const getExampleFolders = (root: string) => readdirSync(root).filter(isDirectory(root)); - -const getDefaultCodeEmbedParameters = (category: Category, params: Record = {}) => { - if (category === 'node') { - return 'view=split,preview&module=index.js&hidenavigation=1'; - }; - return Object.entries({ - embed: 1, - file: 'index.js', - hideExplorer: 1, - ...params, - }).map(([key, val]) => `${key}=${val}`).join('&'); -} - -const getFrontmatter = (key: string): ExampleContent => { - const packageJSON = getPackageJSON(path.resolve(EXAMPLES_DIR, key, 'package.json')); - const readmePath = path.resolve(EXAMPLES_DIR, key, 'README.md'); - const readmeContents = readFileSync(readmePath, 'utf-8'); - const { body } = fm(readmeContents); - const bodyParts = body.split('\n'); - let title: undefined | string; - for (let i = 0; i < bodyParts.length; i++) { - const line = bodyParts[i]; - if (line.startsWith('#')) { - title = line.split('#')?.pop()?.trim() || ''; - break; - } - } - - if (!title) { - throw new Error(`No title found in file ${readmePath}`); - } - - const { - category = 'browser', - code_embed, - ...frontmatter - } = packageJSON['@upscalerjs']?.guide?.frontmatter || {}; - - const codeEmbed = code_embed !== false ? { - params: getDefaultCodeEmbedParameters(category, frontmatter.params), - type: category ? DEFAULT_EMBED_FOR_NODE : DEFAULT_EMBED_FOR_BROWSER, - url: `/examples/${key}`, - ...code_embed, - } : {}; - - return { - frontmatter: { - category, - hide_table_of_contents: true, - ...frontmatter, - code_embed: codeEmbed, - }, - title, - } -}; - -const getExamplesWithFrontmatter = (): ({ key: string; } & ExampleContent)[] => getExampleFolders(EXAMPLES_DIR).filter(key => { - const readmePath = path.resolve(EXAMPLES_DIR, key, 'README.md'); - return existsSync(readmePath); -}).map(key => { - return { - key, - ...getFrontmatter(key), - }; -}); - -const getExampleOrder = (examples: ({ key: string; } & ExampleContent)[]) => { - return examples.sort((a, b) => { - const aPos = Number(a.frontmatter.sidebar_position); - const bPos = Number(b.frontmatter.sidebar_position); - if (Number.isNaN(aPos)) { - return 1; - } - if (Number.isNaN(bPos)) { - return -1; - } - return aPos - bPos; - }).map(({ key }) => key); -} - -const getExamplesByName = () => { - const examplesWithFrontmatter = getExamplesWithFrontmatter(); - const exampleOrder = getExampleOrder(examplesWithFrontmatter); - - return { - examplesByName: examplesWithFrontmatter.reduce((obj, { key, ...rest }) => { - if (obj[key]) { - throw new Error(`Example already exists for key ${key}`); - } - return { - ...obj, - [key]: rest, - }; - }, {} as Record), - exampleOrder, - }; -} - -const indent = (str: string, depth = 0) => [...Array(depth * 2).fill(''), str].join(' '); -const uppercase = (str: string) => str[0].toUpperCase() + str.slice(1); - -const buildFrontmatter = (frontmatter: FrontMatter = {}, depth = 0): string[] => Object.entries(frontmatter).reduce((arr, [key, val]) => { - if (typeof val === 'object') { - return arr.concat(...[ - `${key}:`, - ...buildFrontmatter(val, depth + 1), - ].map(str => indent(str, depth))); - } - return arr.concat(indent(`${key}: ${val}`, depth)); -}, [] as string[]); - -const parseContents = async (key: string, frontmatter: FrontMatter = {}) => { - const readmePath = path.resolve(EXAMPLES_DIR, key, 'README.md'); - const contents = await readFile(readmePath, 'utf-8'); - const frontmatterContents = [ - ...buildFrontmatter(frontmatter), - ]; - return [ - '---', - ...frontmatterContents, - '---', - '', - contents, - ].filter(Boolean).join('\n'); -} - -const copyAssets = async (targetDir: string, key: string) => { - const srcAssetsDir = path.resolve(EXAMPLES_DIR, key, 'assets'); - if (existsSync(srcAssetsDir)) { - const targetAssetsDir = path.resolve(targetDir, 'assets'); - await mkdirp(targetAssetsDir); - const assets = await readdir(srcAssetsDir); - await Promise.all(assets.map(async asset => { - const assetPath = path.resolve(srcAssetsDir, asset); - await copyFile(assetPath, path.resolve(targetAssetsDir, asset)); - })); - } -} - -const copyReadmesToDocs = async (exampleOrder: string[], examplesByName: Record, dest: string) => { - await Promise.all(exampleOrder.map(async (key) => { - const example = examplesByName[key]; - if (!example) { - throw new Error(`No example found for key ${key}`); - } - const { - frontmatter, - } = example; - - const { - parent, - category, - } = frontmatter; - if (!isCategory(category)) { - throw new Error(`Category is not valid: ${category}, for key ${key}`); - } - if (parent !== undefined && typeof parent !== 'string') { - throw new Error(`Parent is not of type string: ${parent}`); - } - const targetDir = path.resolve(...[dest, category, parent].filter(Boolean)); - - // copy assets - await copyAssets(targetDir, key); - - // write readme - const targetPath = path.resolve(targetDir, `${key}.md`); - await mkdirp(path.dirname(targetPath)); - const fileContents = await parseContents(key, frontmatter); - await writeFile(targetPath, fileContents, 'utf-8'); - })); -} - -const writeIndexFile = async (exampleOrder: string[], examplesByName: Record, dest: string) => { - const examplesByCategory = exampleOrder.reduce((obj, example) => { - const { frontmatter: { parent, category } } = examplesByName[example]; - if (!isCategory(category)) { - throw new Error(`Category is not valid: ${category}, for key ${example}`); - } - if (parent !== undefined && typeof parent !== 'string') { - throw new Error(`Parent is not of type string: ${parent}`); - } - return { - ...obj, - [category]: (obj[category] || []).concat([[parent ? uppercase(parent) : undefined, example]]), - } - }, {} as Record>); - - const content = [ - '---', - 'hide_table_of_contents: true', - '---', - `# Guides`, - `This page contains a list of guides and examples for using various features of UpscalerJS.`, - ``, - `The first two guides discuss the basics of UpscalerJS and how to use it in a project. The [Models](browser/models) and [Working with Tensors](browser/tensors) guides discuss useful configuration options of UpscalerJS.`, - ``, - `There are also guides on [improving the performance](#performance) of UpscalerJS, [specific examples of implementations](#implementations), and [Node.js-specific](#node) guides.`, - ``, - ...Object.entries(examplesByCategory).map(([category, examples]) => { - let activeParent: undefined | string; - return `\n## ${uppercase(category)}\n\n${examples.map(([parent, example], i) => { - const { title } = examplesByName[example]; - const url = [ - '/documentation', - 'guides', - category, - parent, - example - ].filter(Boolean).join('/'); - let strings: string[] = []; - if (activeParent !== parent) { - activeParent = parent; - strings.push(`- ### ${parent}`); - } - strings.push(indent(`- [${title}](${url})`, activeParent ? 1 : 0)); - return strings.join('\n'); - }).join('\n')}`; - }), - ].join('\n'); - - await writeFile(path.resolve(dest, 'index.md'), content, 'utf-8'); -} - -/**** - * Main function - */ -export const buildGuides = async (dest: string, { shouldClearMarkdown }: SharedArgs = {}) => { - await mkdirp(dest) - if (shouldClearMarkdown) { - await clearOutMarkdownFiles(dest); - } - const { exampleOrder, examplesByName } = getExamplesByName(); - - await Promise.all([ - copyReadmesToDocs(exampleOrder, examplesByName, dest), - writeIndexFile(exampleOrder, examplesByName, dest), - ]); -} - -/**** - * Functions to expose the main function as a CLI tool - */ - -if (require.main === module) { - (async () => { - const sharedArgs = await getSharedArgs(); - await buildGuides(EXAMPLES_DOCS_DEST, { ...sharedArgs }); - })(); -} diff --git a/scripts/package-scripts/docs/link-model-readmes.ts b/scripts/package-scripts/docs/link-model-readmes.ts deleted file mode 100644 index 608039f59..000000000 --- a/scripts/package-scripts/docs/link-model-readmes.ts +++ /dev/null @@ -1,235 +0,0 @@ -/***** - * Script for linking model readmes locally in docs folder - */ -import path from 'path'; -import { copy, existsSync, mkdirp, readFile, unlinkSync, writeFile, writeFileSync } from 'fs-extra'; -import { DOCS_DIR, MODELS_DIR } from '../utils/constants'; -import { getAllAvailableModelPackages } from "../utils/getAllAvailableModels"; -import { getSharedArgs, SharedArgs } from './types'; -import { clearOutMarkdownFiles } from './utils/clear-out-markdown-files'; - -/**** - * Types - */ - -interface PackageWithMetadata { - description: string; - sidebarPosition: number; - enhancedSrc: string; - unenhancedSrc: string; - category: string; - packageName: string; -} - -/**** - * Utility functions - */ - -const copyAssets = async (packageName: string, targetDir: string) => { - const packagePath = path.resolve(MODELS_DIR, packageName, 'assets'); - const targetPath = path.resolve(targetDir, packageName); - await copy(packagePath, targetPath); -} - -const createMarkdown = async (contents: string, targetPath: string) => writeFile(targetPath, contents, 'utf-8'); - -const linkAllModelReadmes = async (packages: string[], targetAssetDir: string, targetDocDir: string, verbose?: boolean) => { - for (let i = 0; i < packages.length; i++) { - const packageName = packages[i]; - const packagePath = path.resolve(MODELS_DIR, packageName); - const docMdxPath = path.resolve(packagePath, 'DOC.mdx'); - - if (existsSync(docMdxPath)) { - const docMdxContents = await readFile(docMdxPath, 'utf-8'); - const category = getCategory(packageName, docMdxContents); - - const targetPath = path.resolve(targetDocDir, category, `${packageName}.mdx`); - await mkdirp(path.dirname(targetPath)); - // try { - // unlinkSync(targetPath); - // } catch (err) { } - await copyAssets(packageName, targetAssetDir); - await createMarkdown(await readFile(docMdxPath, 'utf-8'), targetPath); - if (verbose) { - console.log(`** Linked: ${packageName}`); - } - } else if (verbose) { - console.log(`** Does not have a DOC.mdx file: ${packageName}`) - } - } -}; - -const getDescription = (readmeContents: string) => { - const lines = readmeContents.split('\n'); - let description = ''; - let startedDescription = false; - for (const line of lines) { - if (line.startsWith('# ')) { - startedDescription = true; - } else if (line.startsWith('## ')) { - startedDescription = false; - break; - } else if (!line.startsWith(' `${part[0].toUpperCase()}${part.slice(1)}`; - -const getSidebarPosition = (packageName: string, readmeContents: string) => { - const lines = readmeContents.split('\n'); - for (const line of lines) { - if (line.startsWith('sidebar_position: ')) { - const pos = line.split('sidebar_position: ').pop() || ''; - return parseInt(pos, 10); - } - } - throw new Error(`Could not find sidebar position for package name ${packageName}`); -}; - -const getEnhancedSrc = (packageName: string, readmeContents: string) => { - const lines = readmeContents.split('\n'); - for (const line of lines) { - if (line.startsWith('enhanced_src: ')) { - return line.split('enhanced_src: ').pop() || ''; - } - } - - throw new Error(`Could not find enhanced_src for package name ${packageName}`); -}; - -const getCategory = (packageName: string, readmeContents: string) => { - const lines = readmeContents.split('\n'); - for (const line of lines) { - if (line.startsWith('category: ')) { - return line.split('category: ').pop() || ''; - } - } - - throw new Error(`Could not find category for package name ${packageName}`); -}; - -const getPackageMetadata = async (packageName: string) => { - const packagePath = path.resolve(MODELS_DIR, packageName); - const docMdxPath = path.resolve(packagePath, 'DOC.mdx'); - const docMdxContents = await readFile(docMdxPath, 'utf-8'); - return { - description: getDescription(docMdxContents), - sidebarPosition: getSidebarPosition(packageName, docMdxContents), - enhancedSrc: getEnhancedSrc(packageName, docMdxContents), - unenhancedSrc: `${packageName}/fixture.png`, - category: getCategory(packageName, docMdxContents), - }; -}; - -const getAllPackagesOrganizedByCategory = async (packageNames: string[]): Promise<{ category: string, packages: PackageWithMetadata[] }[]> => { - const packages = await getAllPackagesWithMetadata(packageNames); - - const packagesByCategory = packages.reduce>>((obj, pkg) => { - const { category, sidebarPosition } = pkg; - if (!obj[category]) { - obj[category] = {}; - } - obj[category][sidebarPosition] = pkg; - return obj; - }, {}); - - return Object.keys(packagesByCategory).map(category => { - const packageSidebarPositions = Object.keys(packagesByCategory[category]).sort(); - const packages = packagesByCategory[category]; - - return { - category, - packages: packageSidebarPositions.map(position => packages[position]), - } - }); -}; - -const getAllPackagesWithMetadata = async (packageNames: string[]): Promise => { - const packagesWithValidReadme = packageNames.filter(packageName => { - const packagePath = path.resolve(MODELS_DIR, packageName); - const readmePath = path.resolve(packagePath, 'DOC.mdx'); - return existsSync(readmePath); - }); - const packagesWithMetadata = await Promise.all(packagesWithValidReadme.map(async (packageName) => ({ - packageName, - ...(await getPackageMetadata(packageName)), - }))); - - return packagesWithMetadata; -}; - -const writeModelIndexFile = async (packageNames: string[], targetAssetDir: string) => { - const packagesByCategory = getAllPackagesOrganizedByCategory(packageNames); - const contents = ` ---- -title: Models -description: An overview of available UpscalerJS Models -sidebar_position: 1 -sidebar_label: Overview -pagination_next: null -pagination_prev: null -hide_title: true ---- -View this page on the UpscalerJS website - -# Models - -UpscalerJS offers a number of available models. With the exception of \`default-model\`, these models must be explicitly installed alongside UpscalerJS. - -import ModelCard from '@site/src/components/modelCards/modelCard/modelCard'; -import ModelCards from '@site/src/components/modelCards/modelCards'; - -${(await packagesByCategory).map(({ category, packages }) => ` -## ${category.split('-').map(uppercase)} - - - ${packages.map(({ packageName, description, unenhancedSrc, enhancedSrc } ) => ` - - `).join('\n')} - -`).join('\n')} - - `; - await writeFile(path.resolve(DOCS_DIR, 'docs', 'models', 'index.md'), contents.trim(), 'utf-8'); -}; - -/**** - * Main function - */ -const linkModelReadmes = async ({ shouldClearMarkdown, verbose }: SharedArgs = {}) => { - const packages = getAllAvailableModelPackages(); - const targetAssetDir = path.resolve(DOCS_DIR, `assets/assets/sample-images`); - const targetDocDir = path.resolve(DOCS_DIR, `docs/models/available`); - if (shouldClearMarkdown) { - await clearOutMarkdownFiles(targetDocDir, verbose); - } - - await writeModelIndexFile(packages, targetAssetDir); - if (verbose) { - console.log('Wrote model index file'); - } - await linkAllModelReadmes(packages, targetAssetDir, targetDocDir, verbose); -}; - -/**** - * Functions to expose the main function as a CLI tool - */ - -if (require.main === module) { - (async () => { - const sharedArgs = await getSharedArgs(); - await linkModelReadmes({ ...sharedArgs }); - })(); -} diff --git a/scripts/package-scripts/docs/tense-checks.ts b/scripts/package-scripts/docs/tense-checks.ts deleted file mode 100644 index aa964d3ca..000000000 --- a/scripts/package-scripts/docs/tense-checks.ts +++ /dev/null @@ -1,112 +0,0 @@ -/***** - * Script for checking tense in docs markdown files - */ -import { sync } from 'glob'; -import { readFile } from 'fs-extra'; -import path from 'path'; -import { DOCS_DIR, MODELS_DIR } from '../utils/constants'; -import { getAllAvailableModelPackages } from "../utils/getAllAvailableModels"; - -/**** - * Constants - */ - -const EXCLUDED_DIRECTORIES = [ - 'node_modules', - 'blog', -]; - -/**** - * Utility functions - */ - -const getDocumentationFiles = (): string[] => { - return sync(path.resolve(DOCS_DIR, `**/*.{md,mdx}`)).filter(file => { - return EXCLUDED_DIRECTORIES.reduce((include, dir) => { - return !include ? false : !file.includes(dir); - }, true); - }); -}; - -// split a markdown file's contents into two concatenated strings, -// one containing the main content of the file, the other containing -// just the asides -const splitFileContents = (contents: string): [string, string] => { - const nonAsides = []; - const asides = []; - let isAside = false; - for (const line of contents.split('\n')) { - if (line.startsWith(':::')) { - isAside = !isAside; - } else { - if (isAside) { - asides.push(line); - } else { - nonAsides.push(line); - } - } - } - return [nonAsides.join('\n'), asides.join('\n')]; -}; - -// check that a chunk of text matches a specific tense -const checkTense = (contents: string, expectedTense: 'third' | 'second') => { - if (expectedTense === 'third') { - // const matches = contents.match(/(Y|y)ou|(Y|y)our|(M|m)ine|(M|m)y/g); - return contents.match(/\b(I |I'm|me|my|mine|you|your|yours|yourself|yourselves)\b/g); - } else if (expectedTense === 'second') { - return contents.match(/\b(I |I'm|me|my|mine|we|us|our|ours|ourselves)\b/g); - } - throw new Error(`Unexpected tense: ${expectedTense}`); -} - -const checkFileForTense = async (file: string) => { - const contents = await readFile(file, 'utf-8'); - if (file.includes('documentation/api') || file.includes('troubleshooting')) { - const matches = checkTense(contents, 'second'); - if (matches !== null) { - return [ - `Found inconsistent tenses in file ${file}:`, - '', - `Main content should be second person, found following keywords: ${matches.join('|')}`, - ].join('\n'); - } - } else { - const [mainContents, asides] = splitFileContents(contents); - const mainMatches = checkTense(mainContents, 'third'); - const asidesMatches = checkTense(asides, 'second'); - if (mainMatches !== null || asidesMatches !== null) { - return [ - `Found inconsistent tenses in file ${file}:`, - '', - ...(mainMatches !== null ? [ - `Main content should be third person, found following keywords: ${mainMatches.join('|')}`, - ] : []), - ...(asidesMatches !== null ? [ - `Asides content should be second person, found following keywords: ${asidesMatches.join('|')}`, - ] : []), - ].join('\n'); - } - } - return undefined; -} - -/**** - * Main function - */ -const tenseChecks = async () => { - const files = getDocumentationFiles(); - const errors = (await Promise.all(files.map(checkFileForTense))).filter(Boolean); - - if (errors.length) { - throw new Error(errors.join('\n\n\n')); - } -} - -/**** - * Functions to expose the main function as a CLI tool - */ - -if (require.main === module) { - tenseChecks(); -} diff --git a/scripts/package-scripts/docs/types.ts b/scripts/package-scripts/docs/types.ts deleted file mode 100644 index 77766cb56..000000000 --- a/scripts/package-scripts/docs/types.ts +++ /dev/null @@ -1,17 +0,0 @@ -import yargs from 'yargs'; - -export interface SharedArgs { - shouldClearMarkdown?: boolean; - verbose?: boolean; -} - -export const getSharedArgs = async (): Promise => { - const argv = await yargs(process.argv.slice(2)).options({ - shouldClearMarkdown: { type: 'boolean' }, - verbose: { type: 'boolean' }, - }).argv; - - return { - ...argv, - } -}; diff --git a/scripts/package-scripts/docs/utils/clear-out-markdown-files.ts b/scripts/package-scripts/docs/utils/clear-out-markdown-files.ts deleted file mode 100644 index 08d120ed5..000000000 --- a/scripts/package-scripts/docs/utils/clear-out-markdown-files.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { glob } from 'glob'; -import { unlink } from 'fs-extra'; - -const getAllMarkdownFiles = (target: string) => glob(`${target}/**/*.md?(x)`); - -export const clearOutMarkdownFiles = async (target: string, verbose?: boolean) => { - const files = await getAllMarkdownFiles(target); - if (files.length > 0) { - await Promise.all(files.map(file => unlink(file))); - if (verbose) { - console.log([ - `Cleared out ${files.length} markdown files, including:`, - ...files.map(file => file.split(/docs\/documentation\//gi).pop()).map(file => `- ${file}`), - ].join('\n')); - } - } -}; diff --git a/scripts/package-scripts/find-all-packages.ts b/scripts/package-scripts/find-all-packages.ts deleted file mode 100644 index 54853561c..000000000 --- a/scripts/package-scripts/find-all-packages.ts +++ /dev/null @@ -1,39 +0,0 @@ -import fs from 'fs'; -import path from 'path'; - -const ROOT = path.resolve(__dirname, '../..'); -const EXCLUDED = ['node_modules', 'scratch']; -const MAX_DEPTH = 100; - -const findAllPackages = (dir: string, excluded: string[] = [], depth = 0): Array => { - let packages: Array = []; - if (depth > MAX_DEPTH) { - throw new Error('Maximum depth reached'); - } - const files = fs.readdirSync(dir); - for (let i = 0; i < files.length; i++) { - const file = files[i]; - const fullFile = path.resolve(dir, file); - if (file === 'package.json') { - const strippedFile = fullFile.split(`${ROOT}/`).pop(); - if (!strippedFile) { - throw new Error(`Error with file ${fullFile}`); - } - packages.push(strippedFile); - } else if (!EXCLUDED.includes(file) && !excluded.includes(fullFile)) { - const stat = fs.statSync(fullFile); - if (stat && stat.isDirectory()) { - const dirFiles = findAllPackages(fullFile, excluded, depth + 1); - packages = packages.concat(dirFiles); - } - } - } - return packages; -}; - -export default findAllPackages; - -if (require.main === module) { - const packages = findAllPackages(ROOT); - console.log(packages); -} diff --git a/scripts/package-scripts/prompt/getModel.ts b/scripts/package-scripts/prompt/getModel.ts deleted file mode 100644 index 2d27b60ad..000000000 --- a/scripts/package-scripts/prompt/getModel.ts +++ /dev/null @@ -1,31 +0,0 @@ -import inquirer from 'inquirer'; -import { getAllAvailableModelPackages } from '../utils/getAllAvailableModels'; - -export const AVAILABLE_MODELS = getAllAvailableModelPackages(); - -export const getModel = async (model?: string | number | (string | number)[], all?: unknown) => { - if (all === true) { - const modelPackages = getAllAvailableModelPackages(); - return modelPackages; - } - - if (typeof model == 'string') { - return [model]; - } - - if (Array.isArray(model)) { - return model.map(m => `${m}`); - } - - const { models } = await inquirer.prompt<{ - models: string[] - }>([ - { - type: 'checkbox', - name: 'models', - message: 'Which models do you want to build?', - choices: AVAILABLE_MODELS, - }, - ]); - return models; -} diff --git a/scripts/package-scripts/prompt/getNumber.ts b/scripts/package-scripts/prompt/getNumber.ts deleted file mode 100644 index 0a068a5ee..000000000 --- a/scripts/package-scripts/prompt/getNumber.ts +++ /dev/null @@ -1,17 +0,0 @@ -import inquirer from 'inquirer'; - -export const getNumber = async (message: string, arg?: unknown) => { - if (typeof arg == 'number') { - return arg; - } - - const response = await inquirer.prompt<{ - arg: number; - }>([ - { - name: 'arg', - message, - }, - ]); - return response.arg; -}; diff --git a/scripts/package-scripts/prompt/getPlatform.ts b/scripts/package-scripts/prompt/getPlatform.ts deleted file mode 100644 index 0a38bc087..000000000 --- a/scripts/package-scripts/prompt/getPlatform.ts +++ /dev/null @@ -1,25 +0,0 @@ -import inquirer from 'inquirer'; -import { Platform } from "./types"; - -export const AVAILABLE_PLATFORMS = ['node', 'node-gpu', 'browser']; - -const isValidPlatform = (platform?: string | number): platform is Platform => { - return typeof platform === 'string' && AVAILABLE_PLATFORMS.includes(platform); -}; - -export const getPlatform = async (platforms?: string | number) => { - if (isValidPlatform(platforms)) { - return platforms; - } - - const { value } = await inquirer.prompt([ - { - type: 'list', - name: 'value', - message: 'Which platforms do you want to build for?', - choices: AVAILABLE_PLATFORMS, - }, - ]); - return value; -} - diff --git a/scripts/package-scripts/prompt/getString.ts b/scripts/package-scripts/prompt/getString.ts deleted file mode 100644 index c26100580..000000000 --- a/scripts/package-scripts/prompt/getString.ts +++ /dev/null @@ -1,37 +0,0 @@ -import inquirer from 'inquirer'; - -export const getString = async (message: string, arg?: unknown) => { - if (typeof arg == 'string') { - return arg; - } - - const response = await inquirer.prompt<{ - arg: string - }>([ - { - name: 'arg', - message, - }, - ]); - return response.arg; -} - -export const getStringArray = async (message: string, arg?: unknown) => { - if (typeof arg === 'string') { - return [arg]; - } - - if (Array.isArray(arg) && arg.length > 0) { - return arg; - } - - const response = await inquirer.prompt<{ - arg: string - }>([ - { - name: 'arg', - message, - }, - ]); - return response.arg.split(' ').filter(Boolean); -} diff --git a/scripts/package-scripts/prompt/ifDefined.ts b/scripts/package-scripts/prompt/ifDefined.ts deleted file mode 100644 index 678ac9a99..000000000 --- a/scripts/package-scripts/prompt/ifDefined.ts +++ /dev/null @@ -1 +0,0 @@ -export function ifDefined(argv: Record, key: string, type: string) { return typeof argv[key] === type ? argv[key] as T: undefined; } diff --git a/scripts/package-scripts/prompt/prompt.ts b/scripts/package-scripts/prompt/prompt.ts deleted file mode 100644 index 197e0ba25..000000000 --- a/scripts/package-scripts/prompt/prompt.ts +++ /dev/null @@ -1,43 +0,0 @@ -import yargs, { Options } from 'yargs'; -import inquirer, { QuestionCollection } from 'inquirer'; - -export interface PromptOption extends Options { - name: string; - isValidType?: IsValidType; - prompt?: QuestionCollection; -}; - -async function prompt(...options: PromptOption[]) { - const yargsOptions: { - [key: string]: Options; - } = options.reduce((obj, option) => { - const { name, isValidType, ...yargsOption } = option; - return { - ...obj, - [name]: yargsOption, - }; - }, {}); - const argv = await yargs(process.argv.slice(2)).options(yargsOptions).argv; - - for (let i = 0; i < options.length; i++) { - const option = options[i]; - if (option.isValidType && option.prompt) { - argv[option.name] = await getArg(argv[option.name], option.isValidType, option.prompt); - } - } - return argv; -} - -export type IsValidType = (arg: any) => arg is ExpectedType; - -async function getArg(defaultArg: ExpectedType, isValidType: IsValidType, promptOption: QuestionCollection): Promise { - if (isValidType(defaultArg)) { - return defaultArg; - } - const { arg } = await inquirer.prompt<{ - arg: ExpectedType; - }>(promptOption); - return arg; -} - -export default prompt; diff --git a/scripts/package-scripts/scaffold-dependencies.ts b/scripts/package-scripts/scaffold-dependencies.ts deleted file mode 100644 index e3a1ab770..000000000 --- a/scripts/package-scripts/scaffold-dependencies.ts +++ /dev/null @@ -1,176 +0,0 @@ -import yargs from 'yargs'; -import fs from 'fs'; -import path from 'path'; -import { getPackageJSON } from './utils/packages'; -import { JSONSchemaForNPMPackageJsonFiles } from '@schemastore/package'; - -/**** - * Type Definitions - */ -export type Platform = 'browser' | 'node' | 'node-gpu'; - -export type TFJSDependency = '@tensorflow/tfjs' | '@tensorflow/tfjs-node' | '@tensorflow/tfjs-node-gpu'; - -type ContentFn = (arg: { - packageJSON: JSONSchemaForNPMPackageJsonFiles; -}) => string; -type Content = string | ContentFn; -interface File { - name: string; - contents: Content[]; -} - -export interface ScaffoldDependenciesConfig { - files: File[]; -} - -/**** - * Constants - */ -const ROOT = path.resolve(__dirname, `../..`); - -/**** - * Dependency-specific utility functions - */ -export const getPlatformSpecificTensorflow = (platform?: Platform): TFJSDependency | undefined => { - if (platform === undefined) { - return undefined; - } - if (platform === 'node') { - return '@tensorflow/tfjs-node'; - } - if (platform === 'node-gpu') { - return '@tensorflow/tfjs-node-gpu'; - } - return '@tensorflow/tfjs'; -} - -/**** - * File OS utility functions - */ - -const writeFile = (filename: string, content: string) => fs.writeFileSync(filename, content); - -const writeLines = (filename: string, content: Array) => writeFile(filename, `${content.map(l => l.trim()).join('\n')}\n`); - -/**** - * Functions for scaffolding platform-specific files - */ -const getFilePath = (file: string, platform: Platform) => `${file}.${platform === 'browser' ? 'browser' : 'node'}.ts`; - -const findPlatformSpecificFiles = (folder: string) => new Set(fs.readdirSync(folder).filter(file => { - return /(.*).(browser|node).ts$/.test(file) -}).map(file => file.split('.').slice(0, -2).join('.'))); - - -export const scaffoldPlatformSpecificFiles = (folderSrc: string, platform: Platform, { verbose }: { verbose?: boolean } = {}) => { - const files = findPlatformSpecificFiles(folderSrc); - if (verbose) { - console.log([ - 'Scaffolding the following files:', - ...Array.from(files).map(file => `- ${file}.generated.ts`), - ].join('\n')) - } - files.forEach(file => scaffoldPlatformSpecificFile(folderSrc, file, platform)); -} - -const scaffoldPlatformSpecificFile = (src: string, file: string, platform: Platform) => { - const srcFile = path.resolve(src, getFilePath(file, platform)); - if (!fs.existsSync(srcFile)) { - throw new Error(`File ${srcFile} does not exist`) - } - const targetFile = path.resolve(src, `${file}.generated.ts`); - try { fs.unlinkSync(targetFile); } catch(err) {} - fs.symlinkSync(srcFile, targetFile, 'file'); -}; - -/**** - * Utility methods - */ -export function loadScaffoldDependenciesConfig(filePath: string): Promise<{ - default: ScaffoldDependenciesConfig -}> { - return import(filePath); -} - -/**** - * Main function - */ - -type ScaffoldDependencies = ( - packageRoot: string, - config: ScaffoldDependenciesConfig, -) => Promise; -const scaffoldDependencies: ScaffoldDependencies = async ( - packageRoot, - { - files, - }, - ) => { - const PACKAGE_ROOT = path.resolve(ROOT, packageRoot); - const PACKAGE_SRC = path.resolve(PACKAGE_ROOT, 'src'); - const packageJSON = getPackageJSON(PACKAGE_ROOT); - files.forEach(({ name, contents }) => { - const filePath = path.resolve(PACKAGE_SRC, `${name}.generated.ts`); - const lines = contents.map(line => typeof line === 'string' ? line : line({ - packageJSON, - })); - writeLines(filePath, lines); - }); -} - -export default scaffoldDependencies; - -/**** - * Functions to expose the main function as a CLI tool - */ - -interface Args { - targetPackage: string; - platform?: Platform; - config: string; -} - -const isPlatform = (platform?: unknown): platform is Platform => typeof platform === 'string' && ['browser', 'node', 'node-gpu'].includes(platform); - -const getPlatform = (platform?: unknown): Platform | undefined => { - if (isPlatform(platform)) { - return platform; - } -} - -const getArgs = async (): Promise => { - const argv = await yargs.command('scaffold-dependencies [platform]', 'scaffold dependencies for a specific platform', yargs => { - yargs.positional('platform', { - describe: 'The platform to target', - }).options({ - src: { type: 'string', demandOption: true }, - config: { type: 'string', demandOption: true }, - }); - }) - .help() - .argv; - - if (typeof argv.src !== 'string') { - throw new Error(`Invalid src, should be a string: ${argv.src}`); - } - - if (typeof argv.config !== 'string') { - throw new Error(`Invalid config, should be a string: ${argv.config}`); - } - - return { - targetPackage: argv.src, - config: argv.config, - platform: getPlatform(argv['_'][0]), - } -} - - -if (require.main === module) { - (async () => { - const argv = await getArgs(); - const { default: config } = await loadScaffoldDependenciesConfig(path.resolve(ROOT, argv.config)); - await scaffoldDependencies(argv.targetPackage, config); - })(); -} diff --git a/scripts/package-scripts/update-dependency.ts b/scripts/package-scripts/update-dependency.ts deleted file mode 100644 index 3b13aba96..000000000 --- a/scripts/package-scripts/update-dependency.ts +++ /dev/null @@ -1,191 +0,0 @@ -import yargs from 'yargs'; -import inquirer from 'inquirer'; -import isValidVersion from './utils/isValidVersion'; -import { - AVAILABLE_PACKAGES, - DIRECTORIES, - getPackageJSONPath, - getPackageJSONValue, - getPreparedFolderName, - Package, - TransformPackageJsonFn, - updateMultiplePackages, - updatePackageJSONForKey, - updateSinglePackage, - } from './utils/packages'; -import { Dependency } from '@schemastore/package'; - -/**** - * Utility functions - */ - -class Logger { - updates: Array = []; - constructor(msg: string) { - this.push(msg); - } - - push(msg: string) { - this.updates.push(msg); - } - - write() { - if (this.updates.length) { - this.updates.forEach(message => console.log(message)) - } - } -} - -const makeSetVersionForPackageJSON = (dependencies: string[], version: string): TransformPackageJsonFn => (packageJSON, dir) => { - const packageJSONKeys = ['dependencies', 'peerDependencies', 'devDependencies', 'pnpm.overrides']; - const logger = new Logger(`- Updated ${getPreparedFolderName(getPackageJSONPath(dir))}`); - for (let i = 0; i < packageJSONKeys.length; i++) { - const packageJSONKey = packageJSONKeys[i]; - const packageJSONListOfDependencies = getPackageJSONValue(packageJSON, packageJSONKey); - if (packageJSONListOfDependencies) { - const gen = getMatchingDependency(dependencies, packageJSONListOfDependencies); - let value = gen.next().value; - while (value) { - const [key] = value; - if (!key) { - throw new Error(`No key found in ${JSON.stringify(value)}`) - } - packageJSONListOfDependencies[key] = version; - value = gen.next().value; - logger.push(` - ${packageJSONKey}: ${key}`); - } - packageJSON = updatePackageJSONForKey(packageJSON, packageJSONKey, packageJSONListOfDependencies) - } - } - logger.write(); - return packageJSON; -} - -export function* getMatchingDependency(matchingDependencies: string[], packageJSONListOfDependencies?: Dependency) { - if (packageJSONListOfDependencies) { - const entries = Object.entries(packageJSONListOfDependencies); - for (let i = 0; i < entries.length; i++) { - const [key, val] = entries[i]; - for (let j = 0; j < matchingDependencies.length; j++) { - const matchingDependency = matchingDependencies[j]; - if (key === matchingDependency) { - yield [key, val]; - break; - } - } - } - } -} - -/**** - * Main function - */ -const updateDependency = async (dependencies: string[], version: string, packages: string[]) => { - if (!isValidVersion(version)) { - throw new Error(`Version is not in the format x.x.x. You specified: ${version}`); - } - if (packages.length === 0) { - console.log('No packages selected, nothing to do.') - return; - } - - const setVersionForPackageJSON = makeSetVersionForPackageJSON(dependencies, version); - - await Promise.all(packages.map(packageKey => { - const pkg = DIRECTORIES[packageKey]; - if (pkg === undefined) { - throw new Error(`Package ${packageKey} is not defined.`); - } - const { multiple, directory } = pkg; - const fn = multiple ? updateMultiplePackages : updateSinglePackage; - return fn(directory, setVersionForPackageJSON); - })); -}; - -export default updateDependency; - -/**** - * Functions to expose the main function as a CLI tool - */ - -interface Args { - dependency: string; - version: string; - packages: Package[]; -} - -const getDependency = (dependency?: unknown) => { - if (typeof dependency === 'string') { - return dependency; - } - - return inquirer.prompt([ - { - name: 'dependency', - message: `Specify the dependency to update`, - }, - ]).then(r => r.dependency); -} - -const getVersion = (version?: unknown) => { - if (typeof version === 'string') { - return version; - } - - return inquirer.prompt([ - { - name: 'version', - message: `Specify the version to update to`, - }, - ]).then(r => r.version); -}; - -const isPackages = (packages?: unknown): packages is Package[] => { - return !!(Array.isArray(packages) && packages.length && typeof packages[0] === 'string'); -} - -const getPackages = (packages?: unknown) => { - if (isPackages(packages)) { - return packages; - } - - return inquirer.prompt([ - { - type: 'checkbox', - name: 'packages', - message: 'Which packages do you want to update?', - choices: AVAILABLE_PACKAGES, - }, - ]).then(r => r.packages); -} - -const getArgs = async (): Promise => { - const argv = await yargs.command('update-dependency ', 'update dependency', yargs => { - yargs.positional('dependency', { - describe: 'The dependency to update', - }).positional('version', { - describe: 'The version to update to', - }).options({ - packages: { type: 'string' }, - }); - }) - .help() - .argv; - - const dependency = await getDependency(argv._[0]); - const version = await getVersion(argv.version); - const packages = await getPackages(argv.packages); - - return { - dependency, - version, - packages, - } -} - -if (require.main === module) { - (async () => { - const { dependency, version, packages } = await getArgs(); - await updateDependency([dependency], version, packages); - })(); -} diff --git a/scripts/package-scripts/update-npm-dependencies.ts b/scripts/package-scripts/update-npm-dependencies.ts deleted file mode 100644 index aeddf54fd..000000000 --- a/scripts/package-scripts/update-npm-dependencies.ts +++ /dev/null @@ -1,118 +0,0 @@ -import { exec as _exec, ExecOptions } from 'child_process'; -import yargs from 'yargs'; -import path from 'path'; -import { ifDefined as _ifDefined } from './prompt/ifDefined'; -import { glob } from 'glob'; -import { ROOT_DIR } from './utils/constants'; -import asyncPool from "tiny-async-pool"; - -/**** - * Types - */ -interface PNPMLSItem { - name: string; - version: string; - path: string; - private: boolean; -} - -/**** - * Constants - */ -const NUMBER_OF_CONCURRENT_THREADS = 5; - -/**** - * Utility functions - */ -const exec = async (cmd: string, { verbose, ...opts }: { verbose?: boolean; } & ExecOptions = {}) => new Promise((resolve, reject) => { - let output = ''; - const spawnedProcess = _exec(cmd, opts, (error) => { - if (error) { - reject(error); - } else { - resolve(output); - } - }); - - if (verbose) { - spawnedProcess.stdout?.pipe(process.stdout); - } -}); -const getOutput = async (cmd: string, { ...opts }: ExecOptions = {}) => new Promise((resolve, reject) => { - let output = ''; - const spawnedProcess = _exec(cmd, opts, (error) => { - if (error) { - reject(error); - } else { - resolve(output); - } - }); - - spawnedProcess.stdout?.on('data', chunk => { - output += chunk; - }); -}); - -const getPNPMPackages = async (): Promise => JSON.parse(await getOutput('pnpm m ls --json --depth=-1')); - -const getAllNonPNPMPackages = async () => { - const packages = new Set((await getPNPMPackages()).map((pkg) => `${pkg.path.split(`${ROOT_DIR}/`).pop()}/package.json`)); - const files = await glob('**/package.json', { - ignore: [ - 'node_modules/**', - '**/node_modules/**', - '**/scratch/**', - '**/dev/browser/public/**', - '**/examples/react/**', - ], - }); - return files.filter(file => !packages.has(file) && file !== 'package.json'); -} - -/**** - * Main function - */ -const updateNPMDependencies = async ({ verbose }: Args) => { - const filteredFiles = await getAllNonPNPMPackages(); - for await (const _ of asyncPool(NUMBER_OF_CONCURRENT_THREADS, filteredFiles, async (file: string) => { - const output = await exec('npm update --save', { - cwd: path.resolve(ROOT_DIR, path.dirname(file)), - verbose, - }); - })) { } -}; - -export default updateNPMDependencies; - -/**** - * Functions to expose the main function as a CLI tool - */ - -interface Args { - verbose?: boolean; -} - -const getArgs = async (): Promise => { - const argv = await yargs.command('update-dependency ', 'update dependency', yargs => { - yargs.option('v', { - alias: 'verbose', - type: 'boolean', - packages: { type: 'string' }, - }); - }) - .help() - .argv; - - function ifDefined(key: string, type: string) { return _ifDefined(argv, key, type) as T; } - - return { - verbose: ifDefined('v', 'boolean'), - } -} - -if (require.main === module) { - (async () => { - const { verbose } = await getArgs(); - await updateNPMDependencies({ verbose }); - })(); -} diff --git a/scripts/package-scripts/update-tfjs.ts b/scripts/package-scripts/update-tfjs.ts deleted file mode 100644 index cea364e46..000000000 --- a/scripts/package-scripts/update-tfjs.ts +++ /dev/null @@ -1,101 +0,0 @@ -import yargs from 'yargs'; -import path from 'path'; -import inquirer, { DistinctQuestion } from 'inquirer'; -import { - AVAILABLE_PACKAGES, - getPackageJSON, - Package, -} from './utils/packages'; -import updateDependency, { getMatchingDependency } from './update-dependency'; -import { ROOT_DIR } from './utils/constants'; - -/**** - * Constants - */ -const TFJS_PACKAGES = [ - '@tensorflow/tfjs', - '@tensorflow/tfjs-node', - '@tensorflow/tfjs-node-gpu', - '@tensorflow/tfjs-layers', - '@tensorflow/tfjs-core', -]; - -/**** - * Main function - */ - -const updateTFJS = (version: string, packages: Package[]) => updateDependency(TFJS_PACKAGES, version, packages); - -export default updateTFJS; - -/**** - * Functions to expose the main function as a CLI tool - */ -const getTFJSVersion = (dir: string = ROOT_DIR): string => { - const packageJSON = getPackageJSON(dir); - const deps = packageJSON.peerDependencies; - const gen = getMatchingDependency(TFJS_PACKAGES, deps); - const matchingTFJS = gen.next().value; - if (!matchingTFJS) { - throw new Error(`Could not find a dependency matching @tensorflow/tfjs in ${dir}`); - } - const [_, val] = matchingTFJS; - if (!val) { - throw new Error(`No value was found in ${JSON.stringify(matchingTFJS)}`); - } - return val; -}; - -type TypecheckFunction = (value?: unknown) => value is T; -function getArg(typecheckFunction: TypecheckFunction, question: { name: string } & DistinctQuestion) { - return (value?: unknown) => typecheckFunction(value) ? value : inquirer.prompt(question).then(r => r[question.name]); -}; - -const isVersion = (version?: unknown): version is string => typeof version === 'string'; -const getVersion = getArg(isVersion, { - name: 'value', - message: `Specify the version to update to`, - default: getTFJSVersion(), -}); - -const isPackages = (packages?: unknown): packages is Package[] => { - return !!(Array.isArray(packages) && packages.length && typeof packages[0] === 'string'); -} -const getPackages = getArg(isPackages, { - type: 'checkbox', - name: 'packages', - message: 'Which packages do you want to update?', - choices: AVAILABLE_PACKAGES, -}); - -interface Args { - version: string; - packages: Package[]; -} - -const getArgs = async (): Promise => { - const argv = await yargs.command('update-dependency ', 'update dependency', yargs => { - yargs.positional('version', { - describe: 'The version to update to', - }).options({ - packages: { type: 'string' }, - }); - }) - .help() - .argv; - - const version = await getVersion(argv.version); - const packages = await getPackages(argv.packages); - - return { - version, - packages, - } -} - -if (require.main === module) { - (async () => { - const { version, packages } = await getArgs(); - await updateTFJS(version, packages); - })(); -} diff --git a/scripts/package-scripts/update-version.ts b/scripts/package-scripts/update-version.ts deleted file mode 100644 index 59f09a464..000000000 --- a/scripts/package-scripts/update-version.ts +++ /dev/null @@ -1,128 +0,0 @@ -import inquirer from 'inquirer'; -import isValidVersion from './utils/isValidVersion'; -import { AVAILABLE_PACKAGES, DIRECTORIES, EXAMPLES, getPackageJSON, getPackageJSONPath, getPreparedFolderName, Package, PackageUpdaterLogger, ROOT, TransformPackageJsonFn, updateMultiplePackages, updateSinglePackage, UPSCALER_JS, WRAPPER } from './utils/packages'; -import { ROOT_DIR } from './utils/constants'; - -/**** - * Type Definitions - */ -type Answers = { packages: Array, version: string, updateDependencies?: boolean, } - -/**** - * Constants - */ -const logger: PackageUpdaterLogger = (file: string) => { - return `- Updated ${getPreparedFolderName(getPackageJSONPath(file))}`; -} - -const makeSetVersionForPackageJSON = (version: string): TransformPackageJsonFn => (packageJSON) => { - packageJSON.version = version; - return packageJSON; -} - -const getVersion = (dir: string) => { - return getPackageJSON(dir).version; -}; - -const getCurrentVersions = () => { - const upscalerJSVersion = getVersion(DIRECTORIES[UPSCALER_JS].directory); - const rootVersion = getVersion(DIRECTORIES[ROOT].directory); - return [ - `root: ${rootVersion}`, - `upscaler: ${upscalerJSVersion}`, - ].join(' | '); -}; - -const updateVersion = (): Promise => new Promise(resolve => { - inquirer.prompt([ - { - name: 'version', - message: `Specify the version you wish to change to:\n(${getCurrentVersions()})\n`, - default: getVersion(ROOT_DIR), - }, - { - type: 'checkbox', - name: 'packages', - message: 'Which packages do you want to update?', - choices: AVAILABLE_PACKAGES, - }, - { - name: 'updateDependencies', - message: `Since UpscalerJS's version will be updated, do you also want to update packages (like examples) that reference it?`, - type: 'confirm', - default: true, - when: ({ packages }: Omit) => packages.includes('UpscalerJS'), - }, - // { - // name: 'commit', - // message: `Do you wish to commit changes`, - // type: 'confirm', - // default: true, - // }, - ]).then(async ({ version, packages, - // commit, - updateDependencies }) => { - if (!isValidVersion(version)) { - throw new Error(`Version is not in the format x.x.x. You specified: ${version}`); - } - if (packages.length === 0) { - console.log('No packages selected, nothing to do.') - return; - } - - const setVersionForPackageJSON = makeSetVersionForPackageJSON(version); - - for (let i = 0; i < packages.length; i++) { - const packageKey = packages[i]; - const pkg = DIRECTORIES[packageKey]; - if (pkg === undefined) { - throw new Error(`Package ${packageKey} is not defined.`); - } - const { multiple, directory } = pkg; - const fn = multiple ? updateMultiplePackages : updateSinglePackage; - - await fn(directory, setVersionForPackageJSON, logger); - } - if (updateDependencies) { - const dependencyDirectories = [DIRECTORIES[EXAMPLES], DIRECTORIES[WRAPPER]]; - const dependencyUpdater: TransformPackageJsonFn = packageJSON => { - const deps = packageJSON.dependencies; - if (deps) { - return { - ...packageJSON, - dependencies: { - ...deps, - 'upscaler': version, - } - }; - } - return packageJSON; - } - const dependencyLogger: PackageUpdaterLogger = dir => { - return `- Updated "upscaler" dependency in ${getPreparedFolderName(dir)}`; - }; - for (let i = 0; i < dependencyDirectories.length; i++) { - const { directory, multiple } = dependencyDirectories[i]; - const fn = multiple ? updateMultiplePackages : updateSinglePackage; - fn(directory, dependencyUpdater, dependencyLogger); - } - } - // if (commit) { - // const cmd = `git commit -m "Updated version to ${version} for ${formatArray(packages)}"`; - // await new Promise(resolve => setTimeout(resolve, 100)); - // try { - // await execute(cmd); - // } catch(err) { - // console.error('*******', err) - // throw err; - // } - // } - resolve(); - }); -}); - -export default updateVersion; - -if (require.main === module) { - updateVersion(); -} diff --git a/scripts/package-scripts/utils/ProgressBar.ts b/scripts/package-scripts/utils/ProgressBar.ts deleted file mode 100644 index 423c913ad..000000000 --- a/scripts/package-scripts/utils/ProgressBar.ts +++ /dev/null @@ -1,28 +0,0 @@ -const crimsonProgressBar = require("crimson-progressbar"); - -export class ProgressBar { - total: number; - i: number = 0; - - constructor(total: number) { - this.total = total; - crimsonProgressBar.renderProgressBar(0, total); - } - - update(i?: number) { - if (i !== undefined) { - if (i < 1) { - crimsonProgressBar.renderProgressBar(i * this.total, this.total); - } else { - crimsonProgressBar.renderProgressBar(i, this.total); - } - } else { - this.i += 1; - crimsonProgressBar.renderProgressBar(this.i, this.total); - } - } - - end() { - console.log('\n'); - } -} diff --git a/scripts/package-scripts/utils/babelTransform.ts b/scripts/package-scripts/utils/babelTransform.ts deleted file mode 100644 index 9a4505154..000000000 --- a/scripts/package-scripts/utils/babelTransform.ts +++ /dev/null @@ -1,19 +0,0 @@ -import { transformAsync } from '@babel/core'; -import fs from 'fs'; -import { getAllFilesRecursively } from "./getAllFilesRecursively"; - -export const babelTransform = async (directory: string) => { - const files = getAllFilesRecursively(directory, file => file.endsWith('.js')); - - await Promise.all(files.map(async filePath => { - const contents = fs.readFileSync(filePath, 'utf-8'); - const transformedCode = await transformAsync(contents, { - plugins: [ - "@babel/plugin-transform-modules-commonjs", - "babel-plugin-add-module-exports", - "@babel/plugin-proposal-export-namespace-from", - ], - }); - fs.writeFileSync(filePath, transformedCode?.code || ''); - })); -}; diff --git a/scripts/package-scripts/utils/compile.ts b/scripts/package-scripts/utils/compile.ts deleted file mode 100644 index 52183b7fb..000000000 --- a/scripts/package-scripts/utils/compile.ts +++ /dev/null @@ -1,48 +0,0 @@ -import path from 'path'; -// import ts, { ProjectReference } from 'typescript'; -import callExec from '../../../test/lib/utils/callExec'; -import { OutputFormat } from '../prompt/types'; - -// export function _old_compile(rootNames: string[], options: ts.CompilerOptions, projectReferences?: Array) { -// let program = ts.createProgram({ -// rootNames, -// options, -// projectReferences, -// }); -// let emitResult = program.emit(); - -// let allDiagnostics = ts -// .getPreEmitDiagnostics(program) -// .concat(emitResult.diagnostics); - -// allDiagnostics.forEach(diagnostic => { -// if (diagnostic.file) { -// let { line, character } = ts.getLineAndCharacterOfPosition(diagnostic.file, diagnostic.start!); -// let message = ts.flattenDiagnosticMessageText(diagnostic.messageText, "\n"); -// console.log(`${diagnostic.file.fileName} (${line + 1},${character + 1}): ${message}`); -// } else { -// console.log(ts.flattenDiagnosticMessageText(diagnostic.messageText, "\n")); -// } -// }); - -// return emitResult; -// }; - -type CompileTypescriptOpts = { - outDir?: string; - verbose?: boolean; -} -type CompileTypescript = (modelFolder: string, outputFormat: OutputFormat, opts?: CompileTypescriptOpts) => Promise; - -export const compileTypescript: CompileTypescript = (modelFolder: string, outputFormat: OutputFormat, { - outDir, - verbose, -} = {}) => callExec([ - `tsc`, - `-p`, - path.resolve(modelFolder, `tsconfig.${outputFormat}.json`), - outDir ? `--outDir ${outDir}` : undefined, -].filter(Boolean).join(' '), { - verbose, - cwd: modelFolder, -}); diff --git a/scripts/package-scripts/utils/constants.ts b/scripts/package-scripts/utils/constants.ts deleted file mode 100644 index 15e184042..000000000 --- a/scripts/package-scripts/utils/constants.ts +++ /dev/null @@ -1,18 +0,0 @@ -import path from 'path'; - -export const ROOT_DIR = path.resolve(__dirname, '../../../'); -export const MODELS_DIR = path.resolve(ROOT_DIR, 'models'); -export const EXAMPLES_DIR = path.resolve(ROOT_DIR, 'examples'); -export const TEST_DIR = path.resolve(ROOT_DIR, 'test'); -export const FIXTURES_DIR = path.resolve(TEST_DIR, '__fixtures__'); - -export const DOCS_DIR = path.resolve(ROOT_DIR, 'docs'); -export const ASSETS_DIR = path.resolve(DOCS_DIR, 'assets/assets'); - -export const TMP_DIR = path.resolve(ROOT_DIR, 'tmp'); -export const DEV_DIR = path.resolve(ROOT_DIR, 'dev'); -export const PACKAGES_DIR = path.resolve(ROOT_DIR, 'packages'); - -export const UPSCALER_DIR = path.resolve(PACKAGES_DIR, 'upscalerjs'); -export const SHARED_DIR = path.resolve(PACKAGES_DIR, 'shared'); -export const WRAPPER_DIR = path.resolve(PACKAGES_DIR, 'upscalerjs-wrapper'); diff --git a/scripts/package-scripts/utils/execute.ts b/scripts/package-scripts/utils/execute.ts deleted file mode 100644 index 78f668e0d..000000000 --- a/scripts/package-scripts/utils/execute.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { exec } from 'child_process'; -const execute = (cmd: string, { cwd, }: { cwd?: string } = {}): Promise => new Promise((resolve, reject) => { - const spawnedProcess = exec(cmd, { - cwd, - }, (error: Error | null) => { - if (error) { - reject(error.message); - } else { - resolve(); - } - }); - spawnedProcess.stderr?.pipe(process.stderr); - spawnedProcess.stdout?.pipe(process.stdout); -}) -export default execute; - diff --git a/scripts/package-scripts/utils/getAllAvailableModels.js b/scripts/package-scripts/utils/getAllAvailableModels.js deleted file mode 100644 index 6fce535d8..000000000 --- a/scripts/package-scripts/utils/getAllAvailableModels.js +++ /dev/null @@ -1,123 +0,0 @@ -"use strict"; -var __assign = (this && this.__assign) || function () { - __assign = Object.assign || function(t) { - for (var s, i = 1, n = arguments.length; i < n; i++) { - s = arguments[i]; - for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) - t[p] = s[p]; - } - return t; - }; - return __assign.apply(this, arguments); -}; -var __spreadArray = (this && this.__spreadArray) || function (to, from, pack) { - if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) { - if (ar || !(i in from)) { - if (!ar) ar = Array.prototype.slice.call(from, 0, i); - ar[i] = from[i]; - } - } - return to.concat(ar || Array.prototype.slice.call(from)); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.getFilteredModels = exports.getAllAvailableModels = exports.getAllAvailableModelPackages = void 0; -var fs_extra_1 = require("fs-extra"); -var path_1 = __importDefault(require("path")); -var getPackageJSONExports_1 = require("./getPackageJSONExports"); -var ROOT = path_1.default.resolve(__dirname, '../../../'); -var MODELS_DIR = path_1.default.resolve(ROOT, 'models'); -var EXCLUDED = ['dist', 'types', 'node_modules', 'docs']; -var jsonParse = function (fileName) { return JSON.parse((0, fs_extra_1.readFileSync)(fileName, 'utf-8')); }; -var getAllAvailableModelPackages = function (includeExperimental) { - if (includeExperimental === void 0) { includeExperimental = false; } - return (0, fs_extra_1.readdirSync)(MODELS_DIR).filter(function (file) { - var _a, _b; - var modelDir = path_1.default.resolve(MODELS_DIR, file); - if (EXCLUDED.includes(file) || !(0, fs_extra_1.lstatSync)(modelDir).isDirectory()) { - return false; - } - var packageJSONPath = path_1.default.resolve(modelDir, 'package.json'); - if (!(0, fs_extra_1.existsSync)(packageJSONPath)) { - return false; - } - if (includeExperimental === false) { - var packageJSON = JSON.parse((0, fs_extra_1.readFileSync)(packageJSONPath, 'utf-8')); - var experimental = (_b = (_a = packageJSON['@upscalerjs']) === null || _a === void 0 ? void 0 : _a['model']) === null || _b === void 0 ? void 0 : _b['experimental']; - return experimental !== true; - } - return true; - }); -}; -exports.getAllAvailableModelPackages = getAllAvailableModelPackages; -var getAllAvailableModels = function (packageName) { - var modelPackageDir = path_1.default.resolve(MODELS_DIR, packageName); - var umdNames = jsonParse(path_1.default.resolve(modelPackageDir, 'umd-names.json')); - var packageJSONPath = path_1.default.resolve(modelPackageDir, 'package.json'); - var packageJSON = JSON.parse((0, fs_extra_1.readFileSync)(packageJSONPath, 'utf8')); - return (0, getPackageJSONExports_1.getPackageJSONExports)(modelPackageDir).map(function (_a) { - var key = _a[0], pathName = _a[1]; - var umdName = umdNames[key]; - if (umdName === undefined) { - throw new Error("No UMD name defined for ".concat(packageName, "/umd-names.json for ").concat(key)); - } - var availableModel = { - export: key, - esm: key.substring(2), - cjs: key.substring(2), - umd: umdName, - pathName: pathName, - 'umd:main': packageJSON['umd:main'], - mainUMDName: umdNames['.'], - }; - return availableModel; - }); -}; -exports.getAllAvailableModels = getAllAvailableModels; -var getFilteredModels = function (_a) { - var _b = _a === void 0 ? {} : _a, specificModel = _b.specificModel, specificPackage = _b.specificPackage, _c = _b.filter, filter = _c === void 0 ? function () { return true; } : _c, _d = _b.includeExperimental, includeExperimental = _d === void 0 ? false : _d; - var filteredPackagesAndModels = (0, exports.getAllAvailableModelPackages)(includeExperimental).reduce(function (arr, packageName) { - var models = (0, exports.getAllAvailableModels)(packageName); - return arr.concat(models.map(function (model) { - return [packageName, model]; - })); - }, []) - .filter(function (_a) { - var packageName = _a[0], model = _a[1]; - if (specificPackage !== undefined) { - return packageName === specificPackage; - } - return true; - }) - .filter(function (_a) { - var _ = _a[0], model = _a[1]; - if (specificModel !== undefined) { - return model.esm === specificModel; - } - return true; - }) - .filter(function (_a) { - var packageName = _a[0], model = _a[1]; - return filter(packageName, model); - }); - if (filteredPackagesAndModels.length === 0) { - var allPackages = (0, exports.getAllAvailableModelPackages)().map(function (packageName) { - return __spreadArray([ - "- ".concat(packageName) - ], (0, exports.getAllAvailableModels)(packageName).map(function (m) { return " - ".concat(m.esm); }), true).join('\n'); - }); - throw new Error(__spreadArray([ - 'No models were found for filter', - 'Available models:' - ], allPackages, true).join('\n')); - } - var filteredPackagesAndModelsObj = filteredPackagesAndModels.reduce(function (obj, _a) { - var _b; - var packageName = _a[0], model = _a[1]; - return (__assign(__assign({}, obj), (_b = {}, _b[packageName] = (obj[packageName] || []).concat([model]), _b))); - }, {}); - return Object.entries(filteredPackagesAndModelsObj); -}; -exports.getFilteredModels = getFilteredModels; diff --git a/scripts/package-scripts/utils/getAllAvailableModels.ts b/scripts/package-scripts/utils/getAllAvailableModels.ts deleted file mode 100644 index e232b5aee..000000000 --- a/scripts/package-scripts/utils/getAllAvailableModels.ts +++ /dev/null @@ -1,117 +0,0 @@ -import { readdirSync, lstatSync, readFileSync, existsSync } from 'fs-extra'; -import path from 'path'; -import { getPackageJSONExports, PackageJSONExport } from './getPackageJSONExports'; - -const ROOT = path.resolve(__dirname, '../../../'); -const MODELS_DIR = path.resolve(ROOT, 'models'); -const EXCLUDED = ['dist', 'types', 'node_modules', 'docs']; - -const jsonParse = (fileName: string) => JSON.parse(readFileSync(fileName, 'utf-8')) - -export const getAllAvailableModelPackages = (includeExperimental = false): Array => readdirSync(MODELS_DIR).filter(file => { - const modelDir = path.resolve(MODELS_DIR, file); - if (EXCLUDED.includes(file) || !lstatSync(modelDir).isDirectory()) { - return false; - } - - const packageJSONPath = path.resolve(modelDir, 'package.json'); - - if (!existsSync(packageJSONPath)) { - return false; - } - - if (includeExperimental === false) { - const packageJSON = JSON.parse(readFileSync(packageJSONPath, 'utf-8')); - const experimental = packageJSON['@upscalerjs']?.['model']?.['experimental']; - return experimental !== true; - } - - return true; -}); - -export interface AvailableModel { - export: string; - esm: string; - cjs: string; - umd: string; - pathName: string | PackageJSONExport; - 'umd:main': string; - mainUMDName: string; -} - -export const getAllAvailableModels = (packageName: string): AvailableModel[] => { - const modelPackageDir = path.resolve(MODELS_DIR, packageName); - const umdNames = jsonParse(path.resolve(modelPackageDir, 'umd-names.json')); - const packageJSONPath = path.resolve(modelPackageDir, 'package.json'); - const packageJSON = JSON.parse(readFileSync(packageJSONPath, 'utf8')); - return getPackageJSONExports(modelPackageDir).map(([key, pathName]) => { - const umdName = umdNames[key]; - if (umdName === undefined) { - throw new Error(`No UMD name defined for ${packageName}/umd-names.json for ${key}`); - } - const availableModel: AvailableModel = { - export: key, - esm: key.substring(2), - cjs: key.substring(2), - umd: umdName, - pathName, - 'umd:main': packageJSON['umd:main'], - mainUMDName: umdNames['.'], - }; - return availableModel; - }); -}; - -export const getFilteredModels = ({ - specificModel, - specificPackage, - filter = () => true, - includeExperimental = false, -}: { - specificPackage?: string; - specificModel?: string; - filter?: (packageName: string, model: AvailableModel) => boolean; - includeExperimental?: boolean; -} = {}): [string, AvailableModel[]][] => { - const filteredPackagesAndModels = getAllAvailableModelPackages(includeExperimental).reduce((arr, packageName) => { - const models = getAllAvailableModels(packageName); - return arr.concat(models.map(model => { - return [packageName, model]; - })); - }, [] as ([string, AvailableModel])[]) - .filter(([packageName, model]) => { - if (specificPackage !== undefined) { - return packageName === specificPackage; - } - return true; - }) - .filter(([_, model]) => { - if (specificModel !== undefined) { - return model.esm === specificModel; - } - return true; - }) - .filter(([packageName, model]) => { - return filter(packageName, model); - }); - if (filteredPackagesAndModels.length === 0) { - const allPackages = getAllAvailableModelPackages().map(packageName => { - return [ - `- ${packageName}`, - ...getAllAvailableModels(packageName).map(m => ` - ${m.esm}`), - ].join('\n'); - }); - throw new Error([ - 'No models were found for filter', - 'Available models:', - ...allPackages, - ].join('\n')); - } - - const filteredPackagesAndModelsObj = filteredPackagesAndModels.reduce>((obj, [packageName, model]) => ({ - ...obj, - [packageName]: (obj[packageName] || []).concat([model]), - }), {}); - - return Object.entries(filteredPackagesAndModelsObj); -}; diff --git a/scripts/package-scripts/utils/getAllFilesRecursively.ts b/scripts/package-scripts/utils/getAllFilesRecursively.ts deleted file mode 100644 index 6e79d60a5..000000000 --- a/scripts/package-scripts/utils/getAllFilesRecursively.ts +++ /dev/null @@ -1,17 +0,0 @@ -import path from 'path'; -import fs from 'fs'; - -type Filter = (file: string) => boolean; - -export const getAllFilesRecursively = (directory: string, filter: Filter = () => true): string[] => { - return fs.readdirSync(directory).map(file => path.resolve(directory, file)).reduce((arr, file) => { - const stat = fs.statSync(file); - if (stat && stat.isDirectory()) { - return arr.concat(getAllFilesRecursively(file, filter)); - } - if (filter(file)) { - return arr.concat([file]); - } - return arr; - }, [] as string[]) -} diff --git a/scripts/package-scripts/utils/getHashedName.ts b/scripts/package-scripts/utils/getHashedName.ts deleted file mode 100644 index 52cb11a3e..000000000 --- a/scripts/package-scripts/utils/getHashedName.ts +++ /dev/null @@ -1,3 +0,0 @@ -import crypto from 'crypto'; - -export const getHashedName = (contents: string) => crypto.createHash('md5').update(contents).digest('hex'); diff --git a/scripts/package-scripts/utils/getPackageJSONExports.js b/scripts/package-scripts/utils/getPackageJSONExports.js deleted file mode 100644 index 09bb20690..000000000 --- a/scripts/package-scripts/utils/getPackageJSONExports.js +++ /dev/null @@ -1,46 +0,0 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.getPackageJSONExports = void 0; -var path_1 = __importDefault(require("path")); -var fs_1 = __importDefault(require("fs")); -var shouldIncludeExportName = function (exportName) { - if (exportName === '.') { - return false; - } - // TODO: Rethink whether we should deselect any node or node-gpu exports. - // It seems like the exports field is doing double duty. - if (exportName.endsWith('node') || exportName.endsWith('node-gpu')) { - return false; - } - return true; -}; -var isPackageJSONExports = function (exports) { - if (typeof exports !== 'object' || exports === null) { - return false; - } - ; - return Object.entries(exports).reduce(function (isValid, _a) { - var exportName = _a[0], exportValue = _a[1]; - return isValid === false ? false : typeof exportValue === 'string' || (typeof exportValue === 'object' && 'require' in exportValue && 'import' in exportValue); - }, true); -}; -var getPackageJSONExports = function (modelFolder) { - var packageJSONPath = path_1.default.resolve(modelFolder, 'package.json'); - var packageJSON = fs_1.default.readFileSync(packageJSONPath, 'utf8'); - var exports = JSON.parse(packageJSON).exports; - if (isPackageJSONExports(exports)) { - var entries = Object.entries(exports); - if (entries.length === 1) { - return entries; - } - return entries.filter(function (_a) { - var exportName = _a[0]; - return shouldIncludeExportName(exportName); - }); - } - throw new Error("Invalid exports field in package json for ".concat(modelFolder, "}: ").concat(JSON.stringify(exports))); -}; -exports.getPackageJSONExports = getPackageJSONExports; diff --git a/scripts/package-scripts/utils/getPackageJSONExports.ts b/scripts/package-scripts/utils/getPackageJSONExports.ts deleted file mode 100644 index 5fdccef47..000000000 --- a/scripts/package-scripts/utils/getPackageJSONExports.ts +++ /dev/null @@ -1,47 +0,0 @@ -import path from 'path'; -import fs from 'fs'; - -const shouldIncludeExportName = (exportName: string) => { - if (exportName === '.') { - return false; - } - - // TODO: Rethink whether we should deselect any node or node-gpu exports. - // It seems like the exports field is doing double duty. - if (exportName.endsWith('node') || exportName.endsWith('node-gpu')) { - return false; - } - - return true; -} - -export type PackageJSONExport = string | { - umd: string; - require: string; - import: string; -}; - -const isPackageJSONExports = (exports: unknown): exports is { - [index: string]: PackageJSONExport; -} => { - if (typeof exports !== 'object' || exports === null) { - return false; - }; - return Object.entries(exports).reduce((isValid, [exportName, exportValue]) => { - return isValid === false ? false : typeof exportValue === 'string' || (typeof exportValue === 'object' && 'require' in exportValue && 'import' in exportValue); - }, true); -} - -export const getPackageJSONExports = (modelFolder: string): Array<[string, PackageJSONExport]> => { - const packageJSONPath = path.resolve(modelFolder, 'package.json'); - const packageJSON = fs.readFileSync(packageJSONPath, 'utf8'); - const { exports } = JSON.parse(packageJSON); - if (isPackageJSONExports(exports)) { - const entries = Object.entries(exports); - if (entries.length === 1) { - return entries; - } - return entries.filter(([exportName]) => shouldIncludeExportName(exportName)); - } - throw new Error(`Invalid exports field in package json for ${modelFolder}}: ${JSON.stringify(exports)}`); -}; diff --git a/scripts/package-scripts/utils/isValidVersion.ts b/scripts/package-scripts/utils/isValidVersion.ts deleted file mode 100644 index c58d50596..000000000 --- a/scripts/package-scripts/utils/isValidVersion.ts +++ /dev/null @@ -1,23 +0,0 @@ -const splitParts = (version: string) => { - try { - const firstPart = version.split('-')[0]; - return firstPart.split("."); - } catch(err) { - console.error(`Could not split version ${version}`); - throw err; - } -} -export default (version: string) => { - const parts = splitParts(version); - if (parts.length !== 3) { - return false; - } - for (let i = 0; i < 3; i++) { - try { - parseInt(parts[i], 10); - } catch(err) { - return false; - } - } - return true; -} diff --git a/scripts/package-scripts/utils/packages.ts b/scripts/package-scripts/utils/packages.ts deleted file mode 100644 index 3268f141f..000000000 --- a/scripts/package-scripts/utils/packages.ts +++ /dev/null @@ -1,107 +0,0 @@ -import fs from 'fs'; -import path from 'path'; -import findAllPackages from '../find-all-packages'; -import { JSONSchemaForNPMPackageJsonFiles } from '@schemastore/package'; -import { DEV_DIR, DOCS_DIR, EXAMPLES_DIR, MODELS_DIR, PACKAGES_DIR, ROOT_DIR, TEST_DIR, UPSCALER_DIR, WRAPPER_DIR } from './constants'; - -interface FakeExports { - [index: string]: string | FakeExports; -} - -export type JSONSchema = JSONSchemaForNPMPackageJsonFiles & { - exports: FakeExports; -}; - -const DIRNAME = __dirname; - -export type Package = 'UpscalerJS' | 'Models' | 'Test' | 'Examples' | 'Root' | 'Wrapper' | 'Dev' | 'Packages'; -export type TransformPackageJsonFn = (packageJSON: JSONSchema, dir: string) => JSONSchema; -export type PackageUpdaterLogger = (file: string) => (string | undefined); - -export const UPSCALER_JS = 'UpscalerJS'; -export const ROOT = 'Root'; -export const WRAPPER = 'Wrapper'; -export const EXAMPLES = 'Examples'; -export const MODELS = 'Models'; -export const TEST = 'Test'; -export const DEV = 'Development'; -export const DOCS = 'Docs'; -export const PACKAGES = 'Packages'; - -export const AVAILABLE_PACKAGES = [ ROOT, UPSCALER_JS, MODELS, EXAMPLES, TEST, WRAPPER, DEV, DOCS, PACKAGES ]; - -export const DIRECTORIES: Record = { - [ROOT]: { directory: ROOT_DIR }, - [UPSCALER_JS]: { directory: UPSCALER_DIR }, - [MODELS]: { directory: MODELS_DIR, multiple: true }, - [EXAMPLES]: { directory: EXAMPLES_DIR, multiple: true }, - [TEST]: { directory: TEST_DIR, multiple: true }, - [WRAPPER]: { directory: WRAPPER_DIR }, - [DEV]: { directory: DEV_DIR, multiple: true }, - [DOCS]: { directory: DOCS_DIR, multiple: true }, - [PACKAGES]: { directory: PACKAGES_DIR, multiple: true }, -} - -export const getPreparedFolderName = (file: string) => { - return file.split(`${ROOT_DIR}/`).pop(); -}; - -export const getPackageJSONPath = (file: string) => { - if (file.endsWith('package.json')) { - return file; - } - return path.resolve(file, 'package.json'); -} - -export const writePackageJSON = (file: string, contents: Record>) => { - const stringifiedContents = `${JSON.stringify(contents, null, 2)}\n`; - fs.writeFileSync(getPackageJSONPath(file), stringifiedContents); -}; - -export const getPackageJSON = (file: string): JSONSchema => JSON.parse(fs.readFileSync(getPackageJSONPath(file), 'utf-8')); - -const defaultTransform: TransformPackageJsonFn = (packageJSON) => packageJSON; - -const defaultLogger: PackageUpdaterLogger = (file: string) => undefined; - -export const updateMultiplePackages = async (dir: string, transform: TransformPackageJsonFn = defaultTransform, logger: PackageUpdaterLogger = defaultLogger) => { - const packages = findAllPackages(dir, [path.resolve(DOCS_DIR, 'assets')]); - for (let i = 0; i < packages.length; i++) { - const pkg = path.resolve(ROOT_DIR, packages[i]); - await updateSinglePackage(pkg, transform, logger); - } -}; - -export const updateSinglePackage = async (dir: string, transform: TransformPackageJsonFn = defaultTransform, logger: PackageUpdaterLogger = defaultLogger) => { - const packageJSON = getPackageJSON(dir); - writePackageJSON(dir, transform(packageJSON, dir)); - const message = logger(dir); - if (message) { - console.log(message); - } -}; - -export const getPackageJSONValue = (packageJSON: JSONSchema, depKey: string) => { - return depKey.split('.').reduce((json, key) => json?.[key], packageJSON); -} - -type Value = JSONSchema[keyof JSONSchema]; -export const updatePackageJSONForKey = (packageJSON: JSONSchema, key: string, val: Value): JSONSchema => { - return getObj(packageJSON, key.split('.'), val); -} - -function getObj>(obj: T, parts: string[], val: Value): T { - if (parts.length === 1) { - return { - ...obj, - [parts[0]]: { - ...obj[parts[0]], - ...val, - } - }; - } - return { - ...obj, - [parts[0]]: getObj(obj[parts[0]], parts.slice(1), val), - } -} diff --git a/scripts/package-scripts/utils/rollup.ts b/scripts/package-scripts/utils/rollup.ts deleted file mode 100644 index 0158771c8..000000000 --- a/scripts/package-scripts/utils/rollup.ts +++ /dev/null @@ -1,93 +0,0 @@ -import fs from 'fs'; -import path from 'path'; -import { OutputOptions, rollup, RollupBuild, RollupOptions } from 'rollup'; - -export async function rollupBuild(inputOptions: RollupOptions, outputOptionsList: Array, dist: string) { - let bundle: RollupBuild | undefined = undefined; - let buildFailed = false; - try { - // create a bundle - bundle = await rollup({ - ...inputOptions, - onwarn: (warning) => { - if (warning.code === 'MIXED_EXPORTS') { - throw new Error(warning.message); - } else { - console.warn(warning); - throw new Error(warning.message); - } - } - }); - - // // an array of file names this bundle depends on - // console.log(bundle.watchFiles); - - await generateOutputs(bundle, outputOptionsList, dist); - } catch (error) { - buildFailed = true; - // do some error reporting - console.error(error); - } - if (bundle) { - // closes the bundle - await bundle.close(); - } - if (buildFailed) { - throw new Error('build failed'); - } -} - -async function generateOutputs(bundle: RollupBuild, outputOptionsList: Array, dist: string) { - for (const outputOptions of outputOptionsList) { - // generate output specific code in-memory - // you can call this function multiple times on the same bundle object - // replace bundle.generate with bundle.write to directly write to disk - const { output } = await bundle.generate(outputOptions); - - for (const chunkOrAsset of output) { - // console.log('chunk or asset', chunkOrAsset) - if (chunkOrAsset.type === 'asset') { - // For assets, this contains - // { - // fileName: string, // the asset file name - // source: string | Uint8Array // the asset source - // type: 'asset' // signifies that this is an asset - // } - // fs.writeFileSync(fileN) - console.log('Asset', chunkOrAsset); - } else { - // For chunks, this contains - // { - // code: string, // the generated JS code - // dynamicImports: string[], // external modules imported dynamically by the chunk - // exports: string[], // exported variable names - // facadeModuleId: string | null, // the id of a module that this chunk corresponds to - // fileName: string, // the chunk file name - // implicitlyLoadedBefore: string[]; // entries that should only be loaded after this chunk - // imports: string[], // external modules imported statically by the chunk - // importedBindings: {[imported: string]: string[]} // imported bindings per dependency - // isDynamicEntry: boolean, // is this chunk a dynamic entry point - // isEntry: boolean, // is this chunk a static entry point - // isImplicitEntry: boolean, // should this chunk only be loaded after other chunks - // map: string | null, // sourcemaps if present - // modules: { // information about the modules in this chunk - // [id: string]: { - // renderedExports: string[]; // exported variable names that were included - // removedExports: string[]; // exported variable names that were removed - // renderedLength: number; // the length of the remaining code in this module - // originalLength: number; // the original length of the code in this module - // code: string | null; // remaining code in this module - // }; - // }, - // name: string // the name of this chunk as used in naming patterns - // referencedFiles: string[] // files referenced via import.meta.ROLLUP_FILE_URL_ - // type: 'chunk', // signifies that this is a chunk - // } - // console.log(chunkOrAsset.fileName) - // console.log(chunkOrAsset.code) - fs.writeFileSync(path.resolve(dist, chunkOrAsset.fileName), chunkOrAsset.code, 'utf-8'); - // console.log('Chunk', chunkOrAsset.modules); - } - } - } -} diff --git a/scripts/package-scripts/utils/runDocker.ts b/scripts/package-scripts/utils/runDocker.ts deleted file mode 100644 index 4c3e38d6d..000000000 --- a/scripts/package-scripts/utils/runDocker.ts +++ /dev/null @@ -1,21 +0,0 @@ -import callExec from "../../../test/lib/utils/callExec"; - -interface Volume { - external: string; - internal: string; - -} -interface DockerOptions { - volumes?: Volume[]; -} -type RunDocker = (dockerImage: string, cmd: string, opts?: DockerOptions) => Promise; -export const runDocker: RunDocker = (dockerImage, cmd, { volumes = [] } = {}) => { - return callExec([ - "docker run --rm", - ...volumes.map(({ internal, external }) => { - return `-v "${external}:${internal}"`; - }), - dockerImage, - cmd, - ].join(' ')); -} diff --git a/scripts/package-scripts/utils/uglify.ts b/scripts/package-scripts/utils/uglify.ts deleted file mode 100644 index d3ae85657..000000000 --- a/scripts/package-scripts/utils/uglify.ts +++ /dev/null @@ -1,13 +0,0 @@ -import fs from 'fs'; -import path from 'path'; -const Uglify = require('uglify-js'); - -export const uglify = (folder: string, filename: string) => { - const filenameWithoutExt = filename.split('.').slice(0, -1).join('.'); - const { code, map } = Uglify.minify(fs.readFileSync(path.resolve(folder, `${filenameWithoutExt}.js`), 'utf-8'), { - sourceMap: true, - // comments: true, - }); - fs.writeFileSync(path.resolve(folder, `${filenameWithoutExt}.min.js`), code, 'utf-8'); - fs.writeFileSync(path.resolve(folder, `${filenameWithoutExt}.min.js.map`), map, 'utf-8'); -} diff --git a/scripts/package-scripts/utils/withTmpDir.ts b/scripts/package-scripts/utils/withTmpDir.ts deleted file mode 100644 index 662751d7b..000000000 --- a/scripts/package-scripts/utils/withTmpDir.ts +++ /dev/null @@ -1,39 +0,0 @@ -import path from 'path'; -import { sync as rimraf } from 'rimraf'; -import { existsSync, mkdirpSync } from 'fs-extra'; -import { getHashedName } from './getHashedName'; -import { TMP_DIR } from './constants'; - -interface WithTmpDirOpts { - rootDir?: string; - removeTmpDir?: boolean; -} -type WithTmpDir = (callback: WithTmpDirFn, opts?: WithTmpDirOpts) => (Promise | void); -type WithTmpDirFn = (tmp: string) => Promise; -export const withTmpDir: WithTmpDir = async (callback, { rootDir, removeTmpDir = true } = {}) => { - const tmpDir = makeTmpDir(rootDir); - - try { - await callback(tmpDir); - } - finally { - try { - if (removeTmpDir) { - rimraf(tmpDir); - } - } - catch (e) { - console.error(`An error has occurred while removing the temp folder at ${tmpDir}. Please remove it manually. Error: ${e}`); - } - } -}; - -export const makeTmpDir = (root = TMP_DIR): string => { - const hashedName = getHashedName(`${Math.random()}`); - const folder = path.resolve(root, hashedName); - mkdirpSync(folder); - if (!existsSync(folder)) { - throw new Error(`Tmp directory ${folder} was not created`); - } - return folder; -}; diff --git a/scripts/package-scripts/validate-build.ts b/scripts/package-scripts/validate-build.ts deleted file mode 100644 index e5c7004d0..000000000 --- a/scripts/package-scripts/validate-build.ts +++ /dev/null @@ -1,124 +0,0 @@ -import path from 'path'; -import yargs from 'yargs'; -import fs from 'fs'; -import { getPackageJSON, JSONSchema } from './utils/packages'; -import { sync } from 'glob'; - -const ROOT = path.resolve(__dirname, '../..'); - -/**** - * Utility methods - */ -const getKeysOfObj = (json: JSONSchema, keys: string[]): Partial => { - return keys.reduce((obj, jsonKey) => { - if (json[jsonKey]) { - return { - ...obj, - [jsonKey]: json[jsonKey], - } - }; - return obj; - }, {}); -}; -const getObjAsArray = (obj: Partial): string[] => { - return Object.values(obj).reduce((arr, file) => { - if (typeof file === 'string') { - return arr.concat(file); - } - return arr.concat(getObjAsArray(file)); - }, [] as string[]); -}; - -export const extractAllFilesFromPackageJSON = (packagePath: string): string[] => { - const packageJSON = getPackageJSON(packagePath); - return getObjAsArray(getKeysOfObj(packageJSON, [ - 'exports', - 'main', - 'module', - 'types', - 'umd:main', - ])); -}; - -/**** - * Main function - */ - -const validateBuild = async (packageName: string, include: string[] = [], { - includeFilesFromPackageJSON = true, -}: { - includeFilesFromPackageJSON?: Boolean; -} = { }): Promise> => { - const packagePath = path.resolve(ROOT, packageName); - const files = new Set([ - ...(includeFilesFromPackageJSON ? extractAllFilesFromPackageJSON(packagePath) : []), - ...include, - ].map(file => path.resolve(packagePath, file))); - const packageDistPath = path.resolve(packagePath, 'dist'); - files.forEach(file => { - if (!fs.existsSync(path.resolve(packageDistPath, file))) { - const existingFiles: string[] = sync(path.resolve(packageDistPath, '**/*')); - // console.log('files that we checked', files); - throw new Error([ - `File ${file} was not built or does not exist.`, - existingFiles.length === 0 ? 'No existing files were found' : `Existing files include: \n${existingFiles.map(f => ` - ${f}`).join('\n')}`, - `Files we are checking include: \n${Array.from(files).map(f => ` - ${f}`).join('\n')}`, - ].join('\n')); - } - }); - return files; -}; - -export default validateBuild; - -/**** - * Functions to expose the main function as a CLI tool - */ - -interface Args { - src: string; - include?: string[]; -} - -const isValidStringArray = (arr: unknown): arr is string[] => Array.isArray(arr) && typeof arr[0] === 'string'; - -const getArgs = async (): Promise => { - const argv = await yargs.command('validate-build [platform]', 'validate a build', yargs => { - yargs.positional('src', { - describe: 'The package to validate', - }).options({ - include: { alias: 'c', type: 'string', demandOption: true }, - }).nargs('include', 0); - }) - .help() - .argv; - - const src = argv['_'][0]; - if (typeof src !== 'string') { - throw new Error('Invalid src'); - } - - const include = argv.c; - if (include !== undefined && !isValidStringArray(include)) { - throw new Error('Is not a valid array') - } - - return { - src, - include: include as string[] | undefined, - } -} - -if (require.main === module) { - (async () => { - const argv = await getArgs(); - const checkedFiles = Array.from(await validateBuild(argv.src, argv.include)); - console.log([ - 'The following files are present: ', - ...checkedFiles.map(file => { - return ` - ${file}`; - }), - ].join('\n')) - })(); -} - diff --git a/scripts/package-scripts/write-model-docs.ts b/scripts/package-scripts/write-model-docs.ts deleted file mode 100644 index a9765f01e..000000000 --- a/scripts/package-scripts/write-model-docs.ts +++ /dev/null @@ -1,171 +0,0 @@ -import { existsSync, readdirSync, readFile, readFileSync, writeFile } from 'fs-extra'; -import path from 'path'; -import yargs from 'yargs'; -import { ifDefined as _ifDefined } from './prompt/ifDefined'; -import { AVAILABLE_MODELS, getModel } from './prompt/getModel'; -import { SHARED_DIR, MODELS_DIR } from './utils/constants'; -import { getPackageJSON, JSONSchema } from './utils/packages'; - -/*** - * Types - */ - -interface Opts { - verbose?: boolean; -} - -/**** - * Utility functions - */ - -const getModelFamily = (packageJSON: JSONSchema) => { - return packageJSON['@upscalerjs']?.['modelFamily']; -}; - -const getSharedDoc = async (modelFamily: string) => { - const sharedDoc = path.resolve(SHARED_DIR, 'src', modelFamily, 'DOC.mdx'); - if (!existsSync(sharedDoc)) { - throw new Error(`File does not exist: ${sharedDoc}`) - } - return await readFile(sharedDoc, 'utf-8'); -}; - -const getSnippets = (model: string): Record => { - const snippets: Record = {}; - const docSnippetsPath = path.resolve(MODELS_DIR, model, 'doc-snippets'); - if (!existsSync(docSnippetsPath)) { - throw new Error(`doc snippets folder does not exist at "${docSnippetsPath}"`) - } - const snippetPaths = readdirSync(docSnippetsPath); - - for (const snippetPath of snippetPaths) { - const snippet = readFileSync(path.resolve(docSnippetsPath, snippetPath), 'utf-8') || ''; - const snippetKey = snippetPath.split('.').slice(0, -1).join('.'); - if (typeof snippetKey !== 'string') { - throw new Error(`Bad snippet key: ${snippetKey}`) - } - snippets[`snippets/${snippetKey}`] = snippet.trim(); - } - return snippets; - -} - -const getPackageJSONArgs = (model: string, packageJSON: JSONSchema): Record => { - const name = packageJSON.name; - if (!name) { - throw new Error(`No name defined for packageJSON for model ${model}`); - } - return { - key: name.split("@upscalerjs/").pop(), - description: `Overview of @upscalerjs/${model} model`, - title: packageJSON['@upscalerjs']?.title, - ...getSnippets(model) - }; -}; - -const getKey = (match: string) => match.match(/<%(.*)%>/)?.[1].trim(); - -const getPreparedDoc = async (model: string, { verbose }: Opts) => { - const packageJSON = getPackageJSON(path.resolve(MODELS_DIR, model, 'package.json')); - const modelFamily = getModelFamily(packageJSON); - if (!modelFamily) { - throw new Error(`No explicit model family defined in package JSON: ${model}`) - } - - const sharedDoc = await getSharedDoc(modelFamily); - const args = getPackageJSONArgs(model, packageJSON); - const matches = sharedDoc.matchAll(/\<\%.+?\%\>/g); - const chunks: string[] = []; - let start = 0; - for (const match of matches) { - const key = getKey(match[0]); - if (key === undefined) { - throw new Error(`An undefined key was returned from the match "${match[0]}" for model ${model}`); - } else if (!(key in args)) { - throw new Error(`Key "${key}" for model family ${modelFamily} and model ${model} was not found in args. Did you mean to prepend it with 'snippets/'? Args is: ${JSON.stringify(args, null, 2)}}`); - } else if (typeof args[key] !== 'string') { - throw new Error(`Key "${key}" for model family ${modelFamily} and model ${model} is not a string, it is: ${typeof args[key]}`) - } else { - const matchStart = match?.index || 0; - const matchEnd = matchStart + (match[0]?.length || 0); - - chunks.push(sharedDoc.slice(start, matchStart)); - chunks.push(args[key]) - start = matchEnd; - - if (verbose) { - console.log( - `Found ${match[0]} (${key}) start=${match?.index} end=${(match?.index || 0) + match[0]?.length - }.`, - ); - } - } - } - chunks.push(sharedDoc.slice(start)); - return chunks.join(''); -} - -/**** - * Main function - */ - -const writeModelDocs = async ( - models: Array = AVAILABLE_MODELS, - { - verbose = false, - }: Opts = {}, -) => { - if (models.length === 0) { - console.log('No models selected, nothing to do.') - return; - } - - await Promise.all(models.map(async model => { - const updatedDoc = await getPreparedDoc(model, { verbose }); - const targetPath = path.resolve(MODELS_DIR, model, 'DOC.mdx'); - - await readFile(targetPath, 'utf-8'); - - await writeFile(targetPath, updatedDoc); - })); -} - -export default writeModelDocs; - -/**** - * Functions to expose the main function as a CLI tool - */ - -interface Answers extends Opts { - models: Array; -} - -const getArgs = async (): Promise => { - const argv = await yargs.command('build models', 'build models', yargs => { - yargs.positional('model', { - describe: 'The model to build', - array: true, - }).option('v', { - alias: 'verbose', - type: 'boolean', - }); - }) - .help() - .argv; - - const models = await getModel(argv._, argv.model); - - function ifDefined(key: string, type: string) { return _ifDefined(argv, key, type) as T; } - - return { - models, - verbose: ifDefined('v', 'boolean'), - } -} - -if (require.main === module) { - (async () => { - const { models, ...opts } = await getArgs(); - await writeModelDocs(models, opts); - })(); -} diff --git a/scripts/package.json b/scripts/package.json deleted file mode 100644 index 30a42ce4e..000000000 --- a/scripts/package.json +++ /dev/null @@ -1,33 +0,0 @@ -{ - "name": "@upscalerjs/scripts", - "private": true, - "engines": { - "node": ">=20.0.0" - }, - "scripts": { - "__run_command": "ts-node --esm --project ./tsconfig.json" - }, - "dependencies": { - "@types/glob": "^8.1.0", - "@upscalerjs/default-model": "workspace:*", - "@upscalerjs/esrgan-legacy": "workspace:*", - "@upscalerjs/pixel-upsampler": "workspace:*", - "front-matter": "^4.0.2", - "glob": "^10.3.3", - "tsc-alias": "^1.8.7", - "typedoc": "^0.24.8", - "upscaler": "workspace:*" - }, - "devDependencies": { - "@types/inquirer": "^9.0.3", - "@types/yargs": "^17.0.24", - "crimson-progressbar": "^1.3.0", - "@internals/common": "workspace:*", - "@internals/webdriver": "workspace:*", - "@internals/http-server": "workspace:*", - "@internals/test-runner": "workspace:*", - "@internals/bundlers": "workspace:*", - "inquirer": "^8.2.4", - "yargs": "^17.7.2" - } -} diff --git a/scripts/tsconfig.json b/scripts/tsconfig.json deleted file mode 100644 index e20f466ce..000000000 --- a/scripts/tsconfig.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "compilerOptions": { - "skipLibCheck": true, - "strict": true, - "experimentalDecorators": true, - "moduleResolution": "node", - "target": "ES2022", - "esModuleInterop": true, - "resolveJsonModule": true, - "module": "nodenext", - } -} diff --git a/test/integration/model/tests/all-models.clientside.mts b/test/integration/model/tests/all-models.clientside.mts index 15235dcc2..e02fbed71 100644 --- a/test/integration/model/tests/all-models.clientside.mts +++ b/test/integration/model/tests/all-models.clientside.mts @@ -3,14 +3,12 @@ */ import Upscaler, { ModelDefinition } from 'upscaler'; import * as tf from '@tensorflow/tfjs'; -import { AvailableModel, getFilteredModels } from '../../../../scripts/package-scripts/utils/getAllAvailableModels.js'; +// import { AvailableModel, getFilteredModels } from '../../../../scripts/package-scripts/utils/getAllAvailableModels.js'; import path from 'path'; import { MODELS_DIR } from '@internals/common/constants'; -import { getPackageJSON } from '../../../../scripts/package-scripts/utils/packages.js'; import { ClientsideTestRunner } from '@internals/test-runner/clientside'; - -const SPECIFIC_PACKAGE: string | undefined = undefined; -const SPECIFIC_MODEL: string | undefined = undefined; +import { getPackagesAndModelsForEnvironment, getUMDNames } from '@internals/common/models'; +import { PackageJSONExport, getPackageJSON } from '@internals/common/package-json'; const getEnv = (key: string): string => { const value = process.env[key]; @@ -22,34 +20,8 @@ const getEnv = (key: string): string => { const ESBUILD_DIST_FOLDER = getEnv('ESBUILD_DIST_FOLDER'); const UMD_DIST_FOLDER = getEnv('UMD_DIST_FOLDER'); -const filteredPackagesAndModels = getFilteredModels({ - specificPackage: SPECIFIC_PACKAGE, - specificModel: SPECIFIC_MODEL, - filter: (packageName, model) => { - if (packageName === 'default-model') { - return false; - } - const packagePath = path.resolve(MODELS_DIR, packageName); - const packageJSON = getPackageJSON(packagePath); - const supportedPlatforms = packageJSON['@upscalerjs']?.models?.[model.export]?.supportedPlatforms; - - return supportedPlatforms === undefined || supportedPlatforms.includes('browser'); - }, -}).reduce<[string, AvailableModel[]][]>((arr, [packageName, models]) => { - const preparedModels: AvailableModel[] = models.map(({ esm, ...model }) => { - return { - ...model, - esm: esm === '' ? 'index' : esm, - }; - }); - return arr.concat([[ - packageName, - preparedModels, - ]]); -}, []); - describe('Clientside model integration tests', () => { - describe('ESM', () => { + describe('ESM', async () => { const testRunner = new ClientsideTestRunner({ name: 'esm', mock: true, @@ -72,29 +44,35 @@ describe('Clientside model integration tests', () => { await testRunner.afterEach(); }); - describe.each(filteredPackagesAndModels)('%s', (packageName, preparedModels) => { - test.each(preparedModels.map(({ esm }) => esm || 'index'))(`upscales with ${packageName}/%s as esm`, async (modelName) => { - const fixturePath = `${await testRunner.getFixturesServerURL()}/${packageName}/test/__fixtures__/fixture.png`; - const result = await testRunner.page.evaluate(({ fixturePath, packageName, modelName }) => { - const model = window[`@upscalerjs/${packageName}/${modelName}`] as unknown as ModelDefinition; - if (!model) { - throw new Error('No model found') - } - const upscaler = new window['Upscaler']({ - model, - }); - return upscaler.execute(fixturePath, { - patchSize: 64, - padding: 2, - }); - }, { fixturePath, packageName, modelName }); - const FIXTURE_PATH = path.resolve(MODELS_DIR, packageName, `test/__fixtures__${modelName === 'index' ? '' : `/${modelName}`}`, 'result.png'); - expect(result).toMatchImage(FIXTURE_PATH); - }); + const packagesWithModels = getPackagesAndModelsForEnvironment('clientside'); + + const models = (await packagesWithModels).map(({ packageDirectoryName, modelName }) => ([ + path.join('@upscalerjs', packageDirectoryName, modelName), + packageDirectoryName, + modelName, + ])); + + test.each(models)('%s', async (windowModelPath, packageDirectoryName, modelName) => { + const fixturePath = `${await testRunner.getFixturesServerURL()}/${packageDirectoryName}/test/__fixtures__/fixture.png`; + const result = await testRunner.page.evaluate(({ fixturePath, windowModelPath }) => { + const model = window[windowModelPath] as unknown as ModelDefinition; + if (!model) { + throw new Error('No model found') + } + const upscaler = new window['Upscaler']({ + model, + }); + return upscaler.execute(fixturePath, { + patchSize: 64, + padding: 2, + }); + }, { fixturePath, windowModelPath }); + const FIXTURE_PATH = path.resolve(MODELS_DIR, packageDirectoryName, `test/__fixtures__${modelName === 'index' ? '' : `/${modelName}`}`, 'result.png'); + expect(result).toMatchImage(FIXTURE_PATH); }); }); - describe('UMD', () => { + describe('UMD', async () => { const UMD_PORT = 8096; const testRunner = new ClientsideTestRunner({ name: 'umd', @@ -120,84 +98,81 @@ describe('Clientside model integration tests', () => { await testRunner.afterEach(); }); - describe.each(filteredPackagesAndModels)('%s', (packageName, preparedModels) => { - test.each(preparedModels.map(({ - umd, - esm, - mainUMDName, - pathName, + const packagesWithModels = getPackagesAndModelsForEnvironment('clientside'); + const modelsWithMainUMDs = await Promise.all((await packagesWithModels).map(async ({ packageDirectoryName, modelName, ...rest }) => { + const [ + packageJSON, + umdNames, + ] = await Promise.all([ + getPackageJSON(path.resolve(MODELS_DIR, packageDirectoryName)), + getUMDNames(path.resolve(MODELS_DIR, packageDirectoryName)), + ]); + const umdModelDefinition = umdNames[modelName]; + if (typeof umdModelDefinition === 'string') { + throw new Error(`Expected umdModelDefinition to be an object for ${packageDirectoryName}/${modelName}`) + } + return { + packageDirectoryName, + // packageJSON, + mainUMDScriptPath: packageJSON['umd:main'], + mainUMDName: umdNames['.'], + + modelName, + + modelUMDScriptPath: packageJSON['exports'][modelName].umd, + modelUMDIndexName: umdModelDefinition.index, + modelUMDDirectName: umdModelDefinition.direct, ...rest - }) => [ - umd || 'index', - esm || 'index', - mainUMDName, - rest['umd:main'], - ]))(`upscales with ${packageName}/%s as umd from index`, async ( - modelName, - esmName, - mainUMDName, - umdMain, - ) => { - const fixturePath = `${await testRunner.getFixturesServerURL()}/${packageName}/test/__fixtures__/fixture.png`; - const modelScriptPath = `${await testRunner.getFixturesServerURL()}/${packageName}/${umdMain}`; - const umdName = mainUMDName; - const result = await testRunner.page.evaluate(async ({ fixturePath, modelScriptPath, umdName, modelName }) => { - await window['loadScript'](modelScriptPath); - const model = window[umdName][modelName]; - if (!model) { - throw new Error(`No model for ${umdName} ${modelName}`); - } - const upscaler = new window['Upscaler']({ - model, - }); - return upscaler.execute(fixturePath, { - patchSize: 64, - padding: 2, - }); - }, { modelScriptPath, umdName, modelName, fixturePath }); - const FIXTURE_PATH = path.resolve(MODELS_DIR, packageName, `test/__fixtures__${esmName === 'index' ? '' : `/${esmName}`}`, 'result.png'); - expect(result).toMatchImage(FIXTURE_PATH); + } + })); + + test.each(modelsWithMainUMDs)('%s (from index)', async ({ packageDirectoryName, modelName, mainUMDName, mainUMDScriptPath, modelUMDIndexName, modelUMDDirectName, ...rest }) => { + const fixturePath = `${await testRunner.getFixturesServerURL()}/${packageDirectoryName}/test/__fixtures__/fixture.png`; + const modelScriptPath = `${await testRunner.getFixturesServerURL()}/${packageDirectoryName}/${mainUMDScriptPath}`; + await testRunner.page.evaluate(async ({ modelScriptPath, mainUMDName, modelUMDIndexName }) => { + await window['loadScript'](modelScriptPath); + const model = window[mainUMDName][modelUMDIndexName]; + if (!model) { + throw new Error(`No model for ${mainUMDName}.${modelUMDIndexName}`); + } + window['model'] = model; + }, { modelScriptPath, mainUMDName, modelUMDIndexName }); + const result = await testRunner.page.evaluate(async ({ fixturePath }) => { + const upscaler = new window['Upscaler']({ + model: window['model'], + }); + return upscaler.execute(fixturePath, { + patchSize: 64, + padding: 2, }); + }, { fixturePath, }); + const FIXTURE_PATH = path.resolve(MODELS_DIR, packageDirectoryName, `test/__fixtures__${modelName === 'index' ? '' : `/${modelName}`}`, 'result.png'); + expect(result).toMatchImage(FIXTURE_PATH); + }); - test.each(preparedModels.map(({ - umd, - esm, - mainUMDName, - pathName, - }) => [ - umd || 'index', - esm || 'index', - mainUMDName, - pathName, - ]))(`upscales with ${packageName}/%s as umd directly`, async ( - modelName, - esmName, - mainUMDName, - pathName, - ) => { - const fixturePath = `${await testRunner.getFixturesServerURL()}/${packageName}/test/__fixtures__/fixture.png`; - const modelScriptPath = `${await testRunner.getFixturesServerURL()}/${packageName}/${pathName.umd}`; - const umdName = mainUMDName; - await testRunner.page.evaluate(async ({ modelScriptPath, fixturePath, umdName, modelName }) => { - await window['loadScript'](modelScriptPath); - const model = window[modelName]; - if (!model) { - throw new Error(`No model for ${modelName}`); - } - window['model'] = model; - }, { modelScriptPath, fixturePath, umdName, modelName }); - const result = await testRunner.page.evaluate(async ({ modelScriptPath, fixturePath, umdName, modelName }) => { - const upscaler = new window['Upscaler']({ - model: window['model'], - }); - return upscaler.execute(fixturePath, { - patchSize: 64, - padding: 2, - }); - }, { modelScriptPath, fixturePath, umdName, modelName }); - const FIXTURE_PATH = path.resolve(MODELS_DIR, packageName, `test/__fixtures__${esmName === 'index' ? '' : `/${esmName}`}`, 'result.png'); - expect(result).toMatchImage(FIXTURE_PATH); + test.each(modelsWithMainUMDs)('%s (direct)', async ({ modelUMDScriptPath, packageDirectoryName, modelName, mainUMDName, mainUMDScriptPath, modelUMDIndexName, modelUMDDirectName, ...rest }) => { + const fixturePath = `${await testRunner.getFixturesServerURL()}/${packageDirectoryName}/test/__fixtures__/fixture.png`; + const modelScriptPath = `${await testRunner.getFixturesServerURL()}/${packageDirectoryName}/${modelUMDScriptPath}`; + /* await new Promise((resolve) => setTimeout(resolve, 1000)); */ + await testRunner.page.evaluate(async ({ modelScriptPath, fixturePath, modelUMDDirectName, modelName }) => { + await window['loadScript'](modelScriptPath); + const model = window[modelUMDDirectName]; + if (!model) { + throw new Error(`No model for ${modelUMDDirectName}`); + } + window['model'] = model; + }, { modelScriptPath, fixturePath, modelUMDDirectName, modelName }); + const result = await testRunner.page.evaluate(async ({ modelScriptPath, fixturePath, modelUMDDirectName, modelName }) => { + const upscaler = new window['Upscaler']({ + model: window['model'], + }); + return upscaler.execute(fixturePath, { + patchSize: 64, + padding: 2, }); + }, { modelScriptPath, fixturePath, modelUMDDirectName, modelName }); + const FIXTURE_PATH = path.resolve(MODELS_DIR, packageDirectoryName, `test/__fixtures__${modelName === 'index' ? '' : `/${modelName}`}`, 'result.png'); + expect(result).toMatchImage(FIXTURE_PATH); }); }); }); diff --git a/test/integration/model/tests/all-models.serverside.mts b/test/integration/model/tests/all-models.serverside.mts index 71b059b52..9dafed983 100644 --- a/test/integration/model/tests/all-models.serverside.mts +++ b/test/integration/model/tests/all-models.serverside.mts @@ -1,19 +1,15 @@ /**** * Tests that different approaches to loading a model all load correctly */ -import { AvailableModel, getFilteredModels } from '../../../../scripts/package-scripts/utils/getAllAvailableModels.js'; import path from 'path'; import { MODELS_DIR } from '@internals/common/constants'; -import { getPackageJSON } from '@internals/common/package-json'; import { ServersideTestRunner } from '@internals/test-runner/serverside'; import { getTemplate } from '@internals/common/get-template'; +import { ALL_MODELS } from '@internals/common/models'; const VERBOSE = false; const USE_GPU = process.env.useGPU === '1'; -const SPECIFIC_PACKAGE: string | undefined = undefined; -const SPECIFIC_MODEL: string | undefined = undefined; - const getEnv = (key: string): string => { const value = process.env[key]; if (typeof value !== 'string') { @@ -23,32 +19,6 @@ const getEnv = (key: string): string => { }; const NODE_DIST_FOLDER = getEnv('NODE_DIST_FOLDER'); -const filteredPackagesAndModels = getFilteredModels({ - specificPackage: SPECIFIC_PACKAGE, - specificModel: SPECIFIC_MODEL, - filter: (packageName, model) => { - if (packageName === 'default-model') { - return false; - } - const packagePath = path.resolve(MODELS_DIR, packageName); - const packageJSON = getPackageJSON(packagePath); - const supportedPlatforms = packageJSON['@upscalerjs']?.models?.[model.export]?.supportedPlatforms; - - return supportedPlatforms === undefined || supportedPlatforms.includes('browser'); - }, -}).reduce<[ string, AvailableModel[] ][]>((arr, [packageName, models]) => { - const preparedModels: AvailableModel[] = models.map(({ esm, ...model }) => { - return { - ...model, - esm: esm === '' ? 'index' : esm, - }; - }); - return arr.concat([[ - packageName, - preparedModels, - ]]); -}, []); - if (VERBOSE) { if (USE_GPU) { console.log('**** USING GPU in Node') @@ -64,24 +34,21 @@ describe('Serverside model integration tests', () => { trackTime: false, }); - describe.each(filteredPackagesAndModels)('%s', (packageName, preparedModels) => { - test.each(preparedModels.map(({ cjs }) => cjs || 'index'))(`upscales with ${packageName}/%s as cjs`, async (modelName) => { - const importPath = path.join('@upscalerjs', packageName, modelName === 'index' ? '' : `/${modelName}`); - const modelPackageDir = path.resolve(MODELS_DIR, packageName, 'test/__fixtures__'); - const fixturePath = path.resolve(modelPackageDir, 'fixture.png'); - const script = await getTemplate(path.resolve(__dirname, '../_templates/cjs.js.ejs'), { - tf: USE_GPU ? `@tensorflow/tfjs-node-gpu` : `@tensorflow/tfjs-node`, - customModel: importPath, - fixturePath, - }); - const buffer = await cjsTestRunner.run(script); - const result = `data:image/png;base64,${buffer.toString('utf-8')}` - expect(result).not.toEqual(''); - const formattedResult = `data:image/png;base64,${result}`; - const resultPath = path.resolve(MODELS_DIR, packageName, `test/__fixtures__${modelName === 'index' ? '' : `/${modelName}`}`, "result.png"); - expect(formattedResult).toMatchImage(resultPath); - + test.each(await ALL_MODELS)('%s', async ({ packageName, modelName }) => { + const importPath = path.join('@upscalerjs', packageName, modelName === 'index' ? '' : `/${modelName}`); + const modelPackageDir = path.resolve(MODELS_DIR, packageName, 'test/__fixtures__'); + const fixturePath = path.resolve(modelPackageDir, 'fixture.png'); + const script = await getTemplate(path.resolve(__dirname, '../_templates/cjs.js.ejs'), { + tf: USE_GPU ? `@tensorflow/tfjs-node-gpu` : `@tensorflow/tfjs-node`, + customModel: importPath, + fixturePath, }); + const buffer = await cjsTestRunner.run(script); + const result = `data:image/png;base64,${buffer.toString('utf-8')}` + expect(result).not.toEqual(''); + const formattedResult = `data:image/png;base64,${result}`; + const resultPath = path.resolve(MODELS_DIR, packageName, `test/__fixtures__${modelName === 'index' ? '' : `/${modelName}`}`, "result.png"); + expect(formattedResult).toMatchImage(resultPath); }); }); });