diff --git a/TESTING.md b/TESTING.md deleted file mode 100644 index 08074b7ff..000000000 --- a/TESTING.md +++ /dev/null @@ -1,91 +0,0 @@ -Testing strategy for MCP components - -Overview --------- -This document describes the testing approach, coverage goals, test matrix, recommended libraries, and instructions to run tests locally and in CI for the mcp-min (MCP minimal) components. - -Goals ------ -- Provide deterministic unit tests for API wrappers, auth/config resolution, tools, and transport implementations (stdio, SSE). -- Provide integration tests for HTTP endpoints and JSON-RPC compatibility (/call, /call-stream) against a running mcp-min server. -- Achieve coverage thresholds for the mcp-min package and global project coverage. - -Coverage targets ----------------- -- Global: branches 70%, functions 75%, lines 80%, statements 80% -- mcp-min package: branches 80%, functions 85%, lines 90%, statements 90% - -Test matrix (CI) ----------------- -- Node versions: 18.x, 20.x -- OS: ubuntu-latest, macos-latest, windows-latest - -Test types and cases --------------------- -Unit tests -- API wrapper classes: mock network responses, assert request shape, retries/error handling. -- Auth resolution: env vars, explicit params, .pos config precedence, missing auth errors. -- single-file helpers: normalizeLocalPath, computeRemotePath, isAssetsPath, masking tokens. -- Proxy-wrapper behavior: mock lib/proxy to ensure calls for sync/delete flow are invoked. -- Tools: echo (simple), list-envs (.pos parsing), sync.singleFile (dry-run path, validation failure, auth missing). -- stdio transport: parsing well-formed JSON-line, invalid JSON handling, unknown method errors. -- sse utilities: sseHandler framing, writeSSE escaping multiline data, heartbeat timing with fake timers. - -Integration tests -- Start mcp-min HTTP server in tests and exercise endpoints: /health, /tools, /call (success, 400, 404), /call-stream (JSON-RPC initialize/tools/list/tools/call) -- SSE streaming behavior: GET / with Accept: text/event-stream handshake, POST /call-stream streaming response bodies and initial endpoint events. -- Full tool chaining: tools that call other libs (sync.singleFile) with proxy mocked and asserting writer events. -- Error recovery: simulate gateway errors and ensure server responds with appropriate error payloads. - -End-to-end -- stdio + HTTP combined scenario where an external client uses JSON-RPC initialize, tools/list, and tools/call over HTTP and verifies SSE messages (using eventsource in real runs). - -Mock framework & fixtures -------------------------- -- Use nock to mock HTTP calls to platformOS endpoints and S3 presign/upload flows. -- Use jest.mock for internal libs (lib/proxy, lib/s3UploadFile, lib/presignUrl, lib/files) to create deterministic responses. -- Use test/utils/fixtures.js for managing temporary .pos configs. -- Use tmp or fs-extra for temp directories and files. - -Libraries recommended ---------------------- -- jest (testing framework) -- supertest (HTTP assertions) - optional in existing tests; current code uses http.request -- eventsource (EventSource polyfill) or eventsource package for SSE client tests -- nock (HTTP mocking) -- tmp / fs-extra (filesystem helpers) -- jest fake timers for heartbeat and SSE tests - -Jest config and coverage ------------------------- -- collectCoverage true, target mcp-min and lib. -- Set coverage thresholds (see Coverage targets section). -- Add test path ignore for heavy gui/next etc. - -CI job ------- -- GitHub Actions workflow at .github/workflows/ci.yml -- Matrix: node 18, 20; OS: ubuntu, macos, windows -- Steps: checkout, setup-node, npm ci, npm test, upload coverage artifact - -Files to add (initial PR) -------------------------- -- mcp-min/__tests__/http.test.js -- mcp-min/__tests__/sse.test.js -- mcp-min/__tests__/stdio.test.js -- mcp-min/__tests__/tools.test.js -- test/utils/fixtures.js -- .github/workflows/ci.yml -- TESTING.md -- package.json jest config updated with coverage settings - -Running tests locally ---------------------- -- npm ci -- npm test - -Maintainer notes ----------------- -- Expand tests to cover lib/proxy and network interactions using jest.mock + nock. -- Add integration tests that spin up a mocked S3 service if needed. -- Use supertest for more ergonomic HTTP assertions in future. diff --git a/bin/pos-cli-modules-install.js b/bin/pos-cli-modules-install.js index 4635faaf1..5f6e5278b 100755 --- a/bin/pos-cli-modules-install.js +++ b/bin/pos-cli-modules-install.js @@ -2,29 +2,13 @@ import { program } from '../lib/program.js'; import logger from '../lib/logger.js'; -import { posConfigDirectory, posModulesFilePath, posModulesLockFilePath, readLocalModules, writePosModules, writePosModulesLock } from '../lib/modules/configFiles.js'; -import { findModuleVersion, resolveDependencies } from '../lib/modules/dependencies.js'; -import { downloadAllModules } from '../lib/modules/downloadModule.js'; +import { posConfigDirectory, posModulesFilePath, readLocalModules, readRepositoryUrl, writePosModules } from '../lib/modules/configFiles.js'; +import { addNewModule, resolveAndDownload } from '../lib/modules/installModule.js'; import Portal from '../lib/portal.js'; import path from 'path'; import { createDirectory } from '../lib/utils/create-directory.js'; import ora from 'ora'; -const addNewModule = async (moduleName, moduleVersion, localModules, getVersions) => { - const newModule = await findModuleVersion(moduleName, moduleVersion, getVersions); - let modules; - if(newModule){ - if (moduleVersion || !localModules[moduleName]) { - modules = {...localModules, ...newModule}; - } else { - modules = {...localModules }; - } - return modules; - } else { - throw new Error(`Can't find module ${moduleName} with version ${moduleVersion}`); - } -}; - program .name('pos-cli modules install') .arguments('[module-name]', 'name of the module. Example: core. You can also pass version number: core@1.0.0') @@ -37,29 +21,26 @@ program spinner.start(); try { + const repositoryUrl = readRepositoryUrl(); + const getVersions = (names) => Portal.moduleVersions(names, repositoryUrl); let localModules = readLocalModules(); if(moduleNameWithVersion){ const [moduleName, moduleVersion] = moduleNameWithVersion.split('@'); - localModules = await addNewModule(moduleName, moduleVersion, localModules, Portal.moduleVersions); - writePosModules(localModules); - spinner.succeed(`Added module: ${moduleName}@${localModules[moduleName]} to ${posModulesFilePath}`); + const updated = await addNewModule(moduleName, moduleVersion, localModules, getVersions, repositoryUrl); + if (updated) { + localModules = updated; + writePosModules(localModules, repositoryUrl); + spinner.succeed(`Added module: ${moduleName}@${localModules[moduleName]} to ${posModulesFilePath}`); + } } if(Object.keys(localModules).length === 0) { spinner.stop(); } else { - spinner.start('Resolving module dependencies'); - const modulesLocked = await resolveDependencies(localModules, Portal.moduleVersions); - writePosModulesLock(modulesLocked); - spinner.succeed(`Modules lock file updated: ${posModulesLockFilePath}`); - - spinner.start('Downloading modules'); - await downloadAllModules(modulesLocked); - spinner.succeed('Modules downloaded successfully'); + await resolveAndDownload(spinner, localModules, repositoryUrl, getVersions); } } catch(e) { logger.Debug(e); - spinner.stopAndPersist(); spinner.fail(e.message); } } catch { diff --git a/bin/pos-cli-modules-update.js b/bin/pos-cli-modules-update.js index 2d4b98ff2..00e3d3c94 100755 --- a/bin/pos-cli-modules-update.js +++ b/bin/pos-cli-modules-update.js @@ -2,28 +2,16 @@ import { program } from '../lib/program.js'; import logger from '../lib/logger.js'; -import { posConfigDirectory, posModulesLockFilePath, readLocalModules, writePosModules, writePosModulesLock } from '../lib/modules/configFiles.js'; -import { findModuleVersion, resolveDependencies } from '../lib/modules/dependencies.js'; -import { downloadAllModules } from '../lib/modules/downloadModule.js'; +import { posConfigDirectory, readLocalModules, readRepositoryUrl, writePosModules } from '../lib/modules/configFiles.js'; +import { updateModule, updateAllModules, resolveAndDownload } from '../lib/modules/installModule.js'; import Portal from '../lib/portal.js'; import path from 'path'; import { createDirectory } from '../lib/utils/create-directory.js'; import ora from 'ora'; -const updateModule = async (moduleName, moduleVersion, localModules, getVersions) => { - const newModule = await findModuleVersion(moduleName, moduleVersion, getVersions); - if(newModule){ - const modules = {...localModules, ...newModule}; - return modules; - } else { - throw new Error(`Can't find module ${moduleName} with version ${moduleVersion}`); - } -}; - - program .name('pos-cli modules update') - .arguments('', 'name of the module. Example: core. You can also pass version number: core@1.0.0') + .arguments('[module-name]', 'name of the module. Example: core. You can also pass version number: core@1.0.0. Omit to update all modules.') .action(async (moduleNameWithVersion) => { try { await createDirectory(path.join(process.cwd(), posConfigDirectory)); @@ -32,29 +20,28 @@ program spinner.start(); try{ + const repositoryUrl = readRepositoryUrl(); + const getVersions = (names) => Portal.moduleVersions(names, repositoryUrl); let localModules = readLocalModules(); - if(moduleNameWithVersion){ + + if (moduleNameWithVersion) { const [moduleName, moduleVersion] = moduleNameWithVersion.split('@'); - localModules = await updateModule(moduleName, moduleVersion, localModules, Portal.moduleVersions); - writePosModules(localModules); + localModules = await updateModule(moduleName, moduleVersion, localModules, getVersions, repositoryUrl); + writePosModules(localModules, repositoryUrl); spinner.succeed(`Updated module: ${moduleName}@${localModules[moduleName]}`); - } - - if(Object.keys(localModules).length === 0) { - spinner.stop(); } else { - spinner.start('Resolving module dependencies'); - const modulesLocked = await resolveDependencies(localModules, Portal.moduleVersions); - writePosModulesLock(modulesLocked); - spinner.succeed(`Modules lock file generated: ${posModulesLockFilePath}`); - - spinner.start('Downloading modules'); - await downloadAllModules(modulesLocked); - spinner.succeed('Modules downloaded successfully'); + if (Object.keys(localModules).length === 0) { + spinner.warn('No modules to update'); + return; + } + localModules = await updateAllModules(localModules, getVersions, repositoryUrl); + writePosModules(localModules, repositoryUrl); + spinner.succeed('Updated all modules to latest versions'); } + + await resolveAndDownload(spinner, localModules, repositoryUrl, getVersions); } catch(e) { logger.Debug(e); - spinner.stopAndPersist(); spinner.fail(e.message); } } catch { diff --git a/lib/modules/configFiles.js b/lib/modules/configFiles.js index a680e97eb..657a7d100 100644 --- a/lib/modules/configFiles.js +++ b/lib/modules/configFiles.js @@ -5,24 +5,41 @@ import path from 'path'; const posConfigDirectory = 'app'; const posModulesFilePath = `${posConfigDirectory}/pos-modules.json`; const posModulesLockFilePath = `${posConfigDirectory}/pos-modules.lock.json`; +const DEFAULT_REPOSITORY_URL = 'https://partners.platformos.com'; const readLocalModules = () => { const config = files.readJSON(posModulesFilePath, { throwDoesNotExistError: false }); return config['modules'] || {}; }; -const writePosModules = (modules) => { +/** + * Returns the repository URL for the module registry. + * Reads from pos-modules.json, falling back to the default Partners Portal URL. + * The PARTNER_PORTAL_HOST environment variable takes precedence over both. + */ +const readRepositoryUrl = () => { + if (process.env.PARTNER_PORTAL_HOST) return process.env.PARTNER_PORTAL_HOST; + const config = files.readJSON(posModulesFilePath, { throwDoesNotExistError: false }); + return config['repository_url'] || DEFAULT_REPOSITORY_URL; +}; + +const readPosModulesLock = () => { + const config = files.readJSON(posModulesLockFilePath, { throwDoesNotExistError: false }); + return config['modules'] || {}; +}; + +const writePosModules = (modules, repositoryUrl = DEFAULT_REPOSITORY_URL) => { fs.writeFileSync( path.join(process.cwd(), posModulesFilePath), - JSON.stringify({ modules: modules }, null, 2) + JSON.stringify({ repository_url: repositoryUrl, modules }, null, 2) ); }; -const writePosModulesLock = (modules) => { +const writePosModulesLock = (modules, repositoryUrl = DEFAULT_REPOSITORY_URL) => { fs.writeFileSync( path.join(process.cwd(), posModulesLockFilePath), - JSON.stringify({ modules: modules }, null, 2) + JSON.stringify({ repository_url: repositoryUrl, modules }, null, 2) ); }; -export { posModulesFilePath, posModulesLockFilePath, readLocalModules, writePosModules, writePosModulesLock, posConfigDirectory }; +export { posModulesFilePath, posModulesLockFilePath, DEFAULT_REPOSITORY_URL, readLocalModules, readRepositoryUrl, readPosModulesLock, writePosModules, writePosModulesLock, posConfigDirectory }; diff --git a/lib/modules/dependencies.js b/lib/modules/dependencies.js index 4a8037127..20c80cd9f 100644 --- a/lib/modules/dependencies.js +++ b/lib/modules/dependencies.js @@ -1,76 +1,271 @@ -import flatten from 'lodash.flatten'; -import uniq from 'lodash.uniq'; import semver from 'semver'; import logger from '../logger.js'; -const resolveBestVersion = async (dependencyList, getVersions, topLevelModulesRequirments) => { - const dependenciesNames = uniq(dependencyList.map(dep => Object.keys(dep)[0])); - if(dependenciesNames.length === 0) return {}; - const versions = await getVersions(dependenciesNames); - - const dependenciesVersions = dependenciesNames.map(depName => { - const versionsAvailable = Object.keys(versions.find(m => m.module === depName)?.versions || {}); - let versionsRequested = []; - if (topLevelModulesRequirments[depName]) { - versionsRequested = [topLevelModulesRequirments[depName]]; - } else { - versionsRequested = dependencyList - .filter(dep => Object.keys(dep)[0] === depName) - .map(dep => Object.values(dep)[0]); +/** + * Wraps getVersions to cache registry responses within a single resolution run. + * Each module's data is fetched at most once, regardless of how many times it's needed. + */ +const withCache = (getVersions) => { + const cache = new Map(); + return async (names) => { + const uncached = names.filter(n => !cache.has(n)); + if (uncached.length > 0) { + const results = await getVersions(uncached); + for (const entry of results) cache.set(entry.module, entry); } + return names.map(n => cache.get(n)).filter(Boolean); + }; +}; + +/** + * Removes every constraint entry whose `requiredBy` matches the given key. + * + * Called when a module is downgraded to a different version: the constraints + * that the old version contributed are no longer valid and must not pollute + * resolution of transitive deps. Without this cleanup, a dep shared between + * the old and new version (with different range requirements) may end up with + * two mutually exclusive constraints → false "no satisfying version" error. + */ +const removeConstraintsFrom = (constraints, requiredBy) => { + for (const list of constraints.values()) { + let i = list.length; + while (i--) { + if (list[i].requiredBy === requiredBy) list.splice(i, 1); + } + } +}; + +/** + * Extracts the module name from a "name@version" requiredBy key. + * Uses lastIndexOf to handle the unlikely case of a name that contains '@'. + */ +const moduleNameFrom = (requiredBy) => requiredBy.slice(0, requiredBy.lastIndexOf('@')); + +/** + * Returns the highest available version satisfying all constraints simultaneously. + * + * When pinnedVersion is provided (the dep appears in root pos-modules.json) it is + * validated against all constraints rather than auto-resolved — the user owns that pin. + * + * rootModuleNames is used only for error attribution: constraints coming from root + * modules are annotated and a hint is appended when multiple root modules conflict. + * + * Note: pre-release versions are excluded from range matching by the semver library + * unless the range itself contains a pre-release tag (standard npm behaviour). + */ +const pickBestVersion = (depName, allConstraints, versionsAvailable, pinnedVersion, rootModuleNames = new Set()) => { + if (versionsAvailable.length === 0) { + throw new Error(`Module "${depName}" has no published versions`); + } + + if (pinnedVersion) { + const conflicts = allConstraints.filter(({ constraint }) => !semver.satisfies(pinnedVersion, constraint)); + if (conflicts.length > 0) { + const detail = conflicts + .map(({ constraint, requiredBy }) => `${constraint} (required by ${requiredBy})`) + .join(', '); + throw new Error( + `Version conflict: "${depName}@${pinnedVersion}" does not satisfy: ${detail}. ` + + `Update "${depName}" in pos-modules.json to a compatible version.` + ); + } + return pinnedVersion; + } + + const constraintList = allConstraints.map(({ constraint }) => constraint); + const best = versionsAvailable + .filter(v => constraintList.every(c => semver.satisfies(v, c))) + .sort(semver.compare) + .at(-1); - const versionsMaxSatisfying = versionsRequested.map(version => semver.maxSatisfying(versionsAvailable, version)); - const depBestVersion = versionsMaxSatisfying.sort(semver.compare)[0]; - return [depName, depBestVersion]; - }); + if (!best) { + const detail = allConstraints + .map(({ constraint, requiredBy }) => { + const isRoot = rootModuleNames.has(moduleNameFrom(requiredBy)); + return `${constraint} (required by ${requiredBy}${isRoot ? ', root module' : ''})`; + }) + .join(', '); - return Object.fromEntries(dependenciesVersions); + const rootsInvolved = [...new Set( + allConstraints + .map(({ requiredBy }) => moduleNameFrom(requiredBy)) + .filter(name => rootModuleNames.has(name)) + )]; + const hint = rootsInvolved.length >= 2 + ? ` Conflicting root modules: ${rootsInvolved.join(', ')}. Try updating them one at a time.` + : ''; + + throw new Error(`No version of "${depName}" satisfies all constraints: ${detail}.${hint}`); + } + + return best; }; -const resolveDependencies = async (modules, getVersions, rootModules) => { - if(!rootModules) rootModules = modules; - if(Object.keys(modules).length === 0) return {}; - - const deps = Object.assign({}, modules); - const modulesNames = Object.keys(modules); - const modulesVersions = await getVersions(modulesNames); - logger.Debug(`modulesVersions: ${JSON.stringify(modulesVersions)}`); - const dependenciesList = flatten( - modulesVersions.map(module => { - const versionName = modules[module.module]; - const version = module.versions[versionName]; - if(!version) throw new Error(`Can't find any version for module ${module.module}`); - - return Object.entries(version.dependencies).map(dependency=> { - const [dependencyName, dependencyVersion] = dependency; - return { [dependencyName]: dependencyVersion }; +/** + * Resolves the full flat dependency tree using BFS with a global constraint map. + * + * Key properties: + * - All constraints from all tree levels are accumulated before a version is chosen, + * so conflicts between requirements at different depths are always detected. + * - When a new constraint forces a dep to a lower version, stale constraints from + * the old version are purged to prevent false conflicts with the new version's deps. + * - Stale cleanup and version picking are separated into distinct passes so the + * iteration order of deps within a BFS round never affects correctness. + * - A post-BFS reachability walk over the final version graph removes phantom deps: + * modules that were tentatively resolved but are unreachable from root because the + * version that required them was later downgraded away. + * - Registry data is fetched at most once per module (memoised via withCache). + * - Because constraints only accumulate (never relax), version picks are monotonically + * non-increasing — the algorithm is guaranteed to converge. + * + * @param {Object} rootModules - { name: exactVersion } from pos-modules.json + * @param {Function} getVersions - async (names[]) => moduleVersionData[] + */ +const resolveDependencies = async (rootModules, getVersions) => { + if (Object.keys(rootModules).length === 0) return {}; + + const fetch = withCache(getVersions); + const resolved = { ...rootModules }; // name → exact version (refined as tree is walked) + const constraints = new Map(); // name → [{ constraint, requiredBy }] + const visited = new Set(); // "name@version" whose deps have been collected + const rootModuleNames = new Set(Object.keys(rootModules)); + + let queue = Object.entries(rootModules); + + while (queue.length > 0) { + const names = [...new Set(queue.map(([name]) => name))]; + const versionData = await fetch(names); + logger.Debug(`modulesVersions: ${JSON.stringify(versionData)}`); + const versionMap = new Map(versionData.map(m => [m.module, m])); + + // Phase 1: walk every module in this batch and collect their dependency constraints + const newDeps = new Set(); + + for (const [name, version] of queue) { + const key = `${name}@${version}`; + if (visited.has(key)) continue; + visited.add(key); + + const moduleEntry = versionMap.get(name); + if (!moduleEntry) throw new Error(`Module "${name}" not found in the registry`); + + const versionEntry = moduleEntry.versions[version]; + if (!versionEntry) throw new Error(`Version "${version}" not found for module "${name}"`); + + for (const [depName, constraint] of Object.entries(versionEntry.dependencies ?? {})) { + if (!constraints.has(depName)) constraints.set(depName, []); + constraints.get(depName).push({ constraint, requiredBy: key }); + newDeps.add(depName); + } + } + + if (newDeps.size === 0) break; + + // Phase 2 — three passes to ensure stale-constraint cleanup never affects a + // version pick computed in the same round (order of depNames must not matter). + + const depNames = [...newDeps]; + const availData = await fetch(depNames); + const availMap = new Map(availData.map(m => [m.module, m])); + + // Pass A: compute every new version using the constraint map as-is. + const newVersions = new Map(); // depName → { newVersion, prevVersion } + for (const depName of depNames) { + const depConstraints = constraints.get(depName); + if (!depConstraints?.length) continue; + + const moduleEntry = availMap.get(depName); + if (!moduleEntry) throw new Error(`Module "${depName}" not found in the registry`); + + const versionsAvailable = Object.keys(moduleEntry.versions); + newVersions.set(depName, { + newVersion: pickBestVersion(depName, depConstraints, versionsAvailable, rootModules[depName], rootModuleNames), + prevVersion: resolved[depName], }); - }) + } + + // Pass B: apply all stale-constraint cleanup for version changes in one sweep. + // Must happen before Pass C so that any dep whose only constraints came from a + // downgraded version is excluded from resolved / nextQueue (phantom dep prevention). + for (const [depName, { newVersion, prevVersion }] of newVersions) { + if (prevVersion && prevVersion !== newVersion) { + removeConstraintsFrom(constraints, `${depName}@${prevVersion}`); + visited.delete(`${depName}@${prevVersion}`); + } + } + + // Pass C: commit resolved versions and build next queue. + // Re-read constraint lists after cleanup: a dep that lost all its constraints was + // required only by a version that was just downgraded — don't install it. + const nextQueue = []; + for (const [depName, { newVersion }] of newVersions) { + const currentConstraints = constraints.get(depName); + if (!currentConstraints?.length && !rootModules[depName]) continue; + + resolved[depName] = newVersion; + const depKey = `${depName}@${newVersion}`; + if (!visited.has(depKey)) { + nextQueue.push([depName, newVersion]); + } + } + + queue = nextQueue; + } + + // Post-BFS reachability pruning: constraints accumulate monotonically so resolved + // may contain deps required only by a version that was later downgraded away. + // Walk the final version graph from root modules and remove anything unreachable. + // All registry data is already cached so this fetch is free. + const finalMap = new Map( + (await fetch(Object.keys(resolved))).map(m => [m.module, m]) ); - const dependenciesVersions = await resolveBestVersion(dependenciesList, getVersions, rootModules); - const dependenciesDependencies = await resolveDependencies(dependenciesVersions, getVersions, rootModules); + const reachable = new Set(Object.keys(rootModules)); + const stack = [...Object.keys(rootModules)]; + while (stack.length > 0) { + const name = stack.pop(); + const deps = finalMap.get(name)?.versions[resolved[name]]?.dependencies ?? {}; + for (const depName of Object.keys(deps)) { + if (depName in resolved && !reachable.has(depName)) { + reachable.add(depName); + stack.push(depName); + } + } + } + for (const name of Object.keys(resolved)) { + if (!reachable.has(name)) delete resolved[name]; + } - return {...{...deps, ...dependenciesVersions}, ...dependenciesDependencies}; + return resolved; }; +/** + * Finds the version to install for a single named module. + * + * - If moduleVersion is provided, verifies it exists and returns it. + * - If moduleVersion is null/undefined, returns the highest stable (non-prerelease) version. + * - Returns null when the specific requested version doesn't exist. + * - Throws when the module itself is not found in the registry. + */ const findModuleVersion = async (moduleName, moduleVersion, getVersions) => { - const modules = await getVersions([moduleName]); - logger.Debug(`find modulesVersions: ${JSON.stringify(modules)}`); - const versions = Object.keys(modules.find(m => m.module === moduleName)?.versions || {}); - let version; - if(moduleVersion){ - version = versions.find(v => v === moduleVersion); - } else { - version = versions - .filter(version => !semver.prerelease(version)) - .sort(semver.compare).slice(-1)[0]; - } + const results = await getVersions([moduleName]); + logger.Debug(`find modulesVersions: ${JSON.stringify(results)}`); + + const moduleEntry = results.find(m => m.module === moduleName); + if (!moduleEntry) throw new Error(`Can't find module ${moduleName}`); + + const versions = Object.keys(moduleEntry.versions); - if(version){ - return { [moduleName]: version }; - } else { - return null; + if (moduleVersion) { + const match = versions.find(v => v === moduleVersion); + return match ? { [moduleName]: match } : null; } + + const latest = versions + .filter(v => !semver.prerelease(v)) + .sort(semver.compare) + .at(-1); + + return latest ? { [moduleName]: latest } : null; }; export { resolveDependencies, findModuleVersion }; diff --git a/lib/modules/downloadModule.js b/lib/modules/downloadModule.js index 10e8b13fd..9c2dec8c5 100644 --- a/lib/modules/downloadModule.js +++ b/lib/modules/downloadModule.js @@ -6,12 +6,12 @@ import fs from 'fs'; import path from 'path'; import os from 'os'; -const downloadModule = async (moduleName, version) => { +const downloadModule = async (moduleName, version, registryUrl) => { const moduleWithVersion = `${moduleName}@${version}`; const tmpFile = path.join(os.tmpdir(), `pos-module-${moduleName}-${Date.now()}.zip`); try { logger.Info(`Downloading ${moduleWithVersion}...`); - const moduleVersion = await Portal.moduleVersionsSearch(moduleWithVersion); + const moduleVersion = await Portal.moduleVersionsSearch(moduleWithVersion, registryUrl); const modulePath = path.join(process.cwd(), 'modules', moduleName); await fs.promises.rm(modulePath, { recursive: true, force: true }); await downloadFile(moduleVersion['public_archive'], tmpFile); @@ -27,10 +27,28 @@ const downloadModule = async (moduleName, version) => { } }; -const downloadAllModules = async (modules) => { +const downloadAllModules = async (modules, registryUrl) => { for (const [moduleName, version] of Object.entries(modules)) { - await downloadModule(moduleName, version); + await downloadModule(moduleName, version, registryUrl); } }; -export { downloadModule, downloadAllModules }; +/** + * Returns the subset of modulesLocked that actually needs to be downloaded. + * + * A module is skipped when BOTH of the following are true: + * 1. Its version in previousLock matches the newly resolved version (no change). + * 2. Its directory already exists on disk (a previous download succeeded). + * + * The disk check catches the case where the lock file is up-to-date but the + * module directory was deleted manually — in that case we must re-download. + */ +const modulesToDownload = (modulesLocked, previousLock) => + Object.fromEntries( + Object.entries(modulesLocked).filter(([name, version]) => { + if (previousLock[name] !== version) return true; + return !fs.existsSync(path.join(process.cwd(), 'modules', name)); + }) + ); + +export { downloadModule, downloadAllModules, modulesToDownload }; diff --git a/lib/modules/formatModulesDiff.js b/lib/modules/formatModulesDiff.js new file mode 100644 index 000000000..a20fb07d6 --- /dev/null +++ b/lib/modules/formatModulesDiff.js @@ -0,0 +1,17 @@ +const formatModulesDiff = (previousModules, newModules) => { + const allNames = new Set([...Object.keys(previousModules), ...Object.keys(newModules)]); + const lines = []; + + for (const name of [...allNames].sort()) { + const prev = previousModules[name]; + const next = newModules[name]; + + if (!prev) lines.push(` + ${name}@${next}`); + else if (!next) lines.push(` - ${name}@${prev}`); + else if (prev !== next) lines.push(` ~ ${name}: ${prev} → ${next}`); + } + + return lines; +}; + +export { formatModulesDiff }; diff --git a/lib/modules/installModule.js b/lib/modules/installModule.js new file mode 100644 index 000000000..65e42266a --- /dev/null +++ b/lib/modules/installModule.js @@ -0,0 +1,65 @@ +import { findModuleVersion, resolveDependencies } from './dependencies.js'; +import { readPosModulesLock, writePosModulesLock, posModulesLockFilePath } from './configFiles.js'; +import { downloadAllModules, modulesToDownload } from './downloadModule.js'; +import { formatModulesDiff } from './formatModulesDiff.js'; + +// Wraps findModuleVersion to surface a consistent error message that includes +// which registry was queried, making it easier to diagnose wrong-registry issues. +const findVersionWithContext = async (moduleName, moduleVersion, getVersions, registryUrl) => { + let result; + try { + result = await findModuleVersion(moduleName, moduleVersion, getVersions); + } catch (e) { + throw new Error(`${e.message} (registry: ${registryUrl})`); + } + if (!result) { + const versionClause = moduleVersion ? ` with version ${moduleVersion}` : ''; + throw new Error(`Can't find module ${moduleName}${versionClause} (registry: ${registryUrl})`); + } + return result; +}; + +// Returns the updated modules map, or null when the module is already installed +// and no explicit version was requested (install is conditional, unlike update). +const addNewModule = async (moduleName, moduleVersion, localModules, getVersions, registryUrl) => { + if (!moduleVersion && localModules[moduleName]) return null; + const newModule = await findVersionWithContext(moduleName, moduleVersion, getVersions, registryUrl); + return { ...localModules, ...newModule }; +}; + +// Always replaces the module version in the map (update is unconditional). +const updateModule = async (moduleName, moduleVersion, localModules, getVersions, registryUrl) => { + const newModule = await findVersionWithContext(moduleName, moduleVersion, getVersions, registryUrl); + return { ...localModules, ...newModule }; +}; + +// Updates every root module to its latest stable version. +const updateAllModules = async (localModules, getVersions, registryUrl) => { + const updated = { ...localModules }; + for (const moduleName of Object.keys(localModules)) { + const newModule = await findVersionWithContext(moduleName, undefined, getVersions, registryUrl); + Object.assign(updated, newModule); + } + return updated; +}; + +// Resolves dependencies, updates the lock file, downloads changed modules, and prints a diff. +const resolveAndDownload = async (spinner, localModules, repositoryUrl, getVersions) => { + spinner.start('Resolving module dependencies'); + const previousLock = readPosModulesLock(); + const modulesLocked = await resolveDependencies(localModules, getVersions); + writePosModulesLock(modulesLocked, repositoryUrl); + spinner.succeed(`Modules lock file updated: ${posModulesLockFilePath}`); + + const toDownload = modulesToDownload(modulesLocked, previousLock); + const skipCount = Object.keys(modulesLocked).length - Object.keys(toDownload).length; + const skipNote = skipCount > 0 ? ` (${skipCount} already up-to-date)` : ''; + spinner.start('Downloading modules'); + await downloadAllModules(toDownload, repositoryUrl); + spinner.succeed(`Modules downloaded successfully${skipNote}`); + + const diffLines = formatModulesDiff(previousLock, modulesLocked); + if (diffLines.length > 0) process.stdout.write(diffLines.join('\n') + '\n'); +}; + +export { addNewModule, updateModule, updateAllModules, resolveAndDownload }; diff --git a/lib/portal.js b/lib/portal.js index fd8806c24..c73f253f6 100644 --- a/lib/portal.js +++ b/lib/portal.js @@ -28,9 +28,10 @@ const Portal = { headers: { Authorization: `Bearer ${token}` } }); }, - moduleVersions(modules) { + moduleVersions(modules, registryUrl) { + const base = registryUrl || Portal.url(); return apiRequest({ - uri: `${Portal.url()}/api/pos_modules?modules=${modules.join(',')}` + uri: `${base}/api/pos_modules?modules=${modules.join(',')}` }); }, createVersion: (token, url, name, posModuleId) => { @@ -48,10 +49,11 @@ const Portal = { headers: { Authorization: `Bearer ${token}` } }); }, - moduleVersionsSearch: (moduleVersionName) => { + moduleVersionsSearch: (moduleVersionName, registryUrl) => { + const base = registryUrl || Portal.url(); return apiRequest({ method: 'GET', - uri: `${Portal.url()}/api/pos_module_version?name=${moduleVersionName}` + uri: `${base}/api/pos_module_version?name=${moduleVersionName}` }); }, requestDeviceAuthorization: (instanceDomain) => { diff --git a/test/fixtures/deploy/modules_test/app/pos-modules.json b/test/fixtures/deploy/modules_test/app/pos-modules.json index 51f7fdbbf..7b8f8fce1 100644 --- a/test/fixtures/deploy/modules_test/app/pos-modules.json +++ b/test/fixtures/deploy/modules_test/app/pos-modules.json @@ -1,5 +1,6 @@ { + "repository_url": "https://partners.platformos.com", "modules": { "tests": "0.0.3" } -} \ No newline at end of file +} diff --git a/test/fixtures/deploy/modules_test/app/pos-modules.lock.json b/test/fixtures/deploy/modules_test/app/pos-modules.lock.json index 51f7fdbbf..7b8f8fce1 100644 --- a/test/fixtures/deploy/modules_test/app/pos-modules.lock.json +++ b/test/fixtures/deploy/modules_test/app/pos-modules.lock.json @@ -1,5 +1,6 @@ { + "repository_url": "https://partners.platformos.com", "modules": { "tests": "0.0.3" } -} \ No newline at end of file +} diff --git a/test/fixtures/deploy/modules_test_with_old_files/app/pos-modules.json b/test/fixtures/deploy/modules_test_with_old_files/app/pos-modules.json index 51f7fdbbf..7b8f8fce1 100644 --- a/test/fixtures/deploy/modules_test_with_old_files/app/pos-modules.json +++ b/test/fixtures/deploy/modules_test_with_old_files/app/pos-modules.json @@ -1,5 +1,6 @@ { + "repository_url": "https://partners.platformos.com", "modules": { "tests": "0.0.3" } -} \ No newline at end of file +} diff --git a/test/fixtures/deploy/modules_test_with_old_files/app/pos-modules.lock.json b/test/fixtures/deploy/modules_test_with_old_files/app/pos-modules.lock.json index 51f7fdbbf..9c68ff7a0 100644 --- a/test/fixtures/deploy/modules_test_with_old_files/app/pos-modules.lock.json +++ b/test/fixtures/deploy/modules_test_with_old_files/app/pos-modules.lock.json @@ -1,5 +1,6 @@ { + "repository_url": "https://partners.platformos.com", "modules": { - "tests": "0.0.3" + "tests": "0.0.2" } -} \ No newline at end of file +} diff --git a/test/fixtures/deploy/modules_update/app/pos-modules.json b/test/fixtures/deploy/modules_update/app/pos-modules.json index 55aee05b2..a2156fc2a 100644 --- a/test/fixtures/deploy/modules_update/app/pos-modules.json +++ b/test/fixtures/deploy/modules_update/app/pos-modules.json @@ -1,5 +1,6 @@ { + "repository_url": "https://partners.platformos.com", "modules": { "core": "2.0.7" } -} \ No newline at end of file +} diff --git a/test/fixtures/deploy/modules_update/app/pos-modules.lock.json b/test/fixtures/deploy/modules_update/app/pos-modules.lock.json index 55aee05b2..a2156fc2a 100644 --- a/test/fixtures/deploy/modules_update/app/pos-modules.lock.json +++ b/test/fixtures/deploy/modules_update/app/pos-modules.lock.json @@ -1,5 +1,6 @@ { + "repository_url": "https://partners.platformos.com", "modules": { "core": "2.0.7" } -} \ No newline at end of file +} diff --git a/test/fixtures/deploy/modules_user/app/pos-modules.json b/test/fixtures/deploy/modules_user/app/pos-modules.json index 4eb9c3b3f..6497bbf3d 100644 --- a/test/fixtures/deploy/modules_user/app/pos-modules.json +++ b/test/fixtures/deploy/modules_user/app/pos-modules.json @@ -1,5 +1,6 @@ { + "repository_url": "https://partners.platformos.com", "modules": { "user": "3.0.8" } -} \ No newline at end of file +} diff --git a/test/fixtures/deploy/modules_user/app/pos-modules.lock.json b/test/fixtures/deploy/modules_user/app/pos-modules.lock.json index d9f9123ab..103c1e13d 100644 --- a/test/fixtures/deploy/modules_user/app/pos-modules.lock.json +++ b/test/fixtures/deploy/modules_user/app/pos-modules.lock.json @@ -1,6 +1,7 @@ { + "repository_url": "https://partners.platformos.com", "modules": { "user": "3.0.8", "core": "1.5.5" } -} \ No newline at end of file +} diff --git a/test/integration/modules-install.test.js b/test/integration/modules-install.test.js index 941815ce2..0e888495a 100644 --- a/test/integration/modules-install.test.js +++ b/test/integration/modules-install.test.js @@ -4,7 +4,8 @@ import exec from '#test/utils/exec'; import cliPath from '#test/utils/cliPath'; import fs from 'fs'; import path from 'path'; -import { requireRealCredentials, noCredentials, applyCredentials } from '#test/utils/credentials'; +import { requireRealCredentials, noCredentials, applyCredentials, saveCredentials } from '#test/utils/credentials'; +import { plainMessages } from '#test/utils/parseOutput'; const cwd = name => path.join(process.cwd(), 'test', 'fixtures', name); @@ -20,7 +21,8 @@ describe('Successful install', () => { try { const { stdout } = await run('deploy/modules_test', 'tests'); - expect(stdout).toContain('Downloading tests@0.0.3'); + const msgs = plainMessages(stdout); + expect(msgs.find(m => m.startsWith('Downloading tests@'))).toBe('Downloading tests@0.0.3...'); expect(fs.existsSync(pathToModuleJson)).toBeTruthy(); const moduleJson = JSON.parse(fs.readFileSync(pathToModuleJson, 'utf8')); @@ -42,7 +44,8 @@ describe('Successful install', () => { try { const { stdout } = await run('deploy/modules_test_with_old_files', 'tests'); - expect(stdout).toContain('Downloading tests@0.0.3'); + const msgs = plainMessages(stdout); + expect(msgs.find(m => m.startsWith('Downloading tests@'))).toBe('Downloading tests@0.0.3...'); expect(fs.existsSync(pathToModuleJson)).toBeTruthy(); expect(fs.existsSync(pathToModuleLeftoverFile)).toBeFalsy(); } finally { @@ -66,7 +69,8 @@ describe('Successful install', () => { try { const { stdout } = await run('deploy/modules_test', 'tests@1.0.0'); - expect(stdout).toContain('Downloading tests@1.0.0'); + const msgs = plainMessages(stdout); + expect(msgs.find(m => m.startsWith('Downloading tests@'))).toBe('Downloading tests@1.0.0...'); expect(fs.existsSync(pathToModuleJson)).toBeTruthy(); const moduleJson = JSON.parse(fs.readFileSync(pathToModuleJson, 'utf8')); @@ -88,8 +92,10 @@ describe('Successful install', () => { try { const { stdout } = await run('deploy/modules_user', 'user'); - expect(stdout).toContain('Downloading user@'); - expect(stdout).toContain('Downloading core@'); + const msgs = plainMessages(stdout); + // user version is pinned in the fixture; core version is resolved from the registry + expect(msgs.find(m => m.startsWith('Downloading user@'))).toBe('Downloading user@3.0.8...'); + expect(msgs.find(m => m.startsWith('Downloading core@'))).toMatch(/^Downloading core@\d+\.\d+\.\d+\.\.\.$/); expect(fs.existsSync(pathToUserModuleJson)).toBeTruthy(); expect(fs.existsSync(pathToCoreModuleJson)).toBeTruthy(); } finally { @@ -107,23 +113,50 @@ describe('Successful install', () => { try { const { stdout } = await run('deploy/modules_test', ''); - expect(stdout).toContain('Downloading tests@0.0.3'); + const msgs = plainMessages(stdout); + expect(msgs.find(m => m.startsWith('Downloading tests@'))).toBe('Downloading tests@0.0.3...'); expect(fs.existsSync(pathToModuleJson)).toBeTruthy(); } finally { await fs.promises.rm(pathToDirectory, { recursive: true }); fs.writeFileSync(lockFilePath, originalLockContent); } }); + + test('skips re-downloading modules already at the locked version with files on disk', async () => { + requireRealCredentials(); + const pathToDirectory = `${cwd('deploy/modules_test')}/modules`; + const lockFilePath = `${cwd('deploy/modules_test')}/app/pos-modules.lock.json`; + const originalLockContent = fs.readFileSync(lockFilePath, 'utf8'); + + try { + // First run: module not on disk → must download + const { stdout: firstRun } = await run('deploy/modules_test', ''); + expect(plainMessages(firstRun).find(m => m.startsWith('Downloading tests@'))).toBe('Downloading tests@0.0.3...'); + + // Second run: same version, directory now exists → skip download + const { stdout: secondRun } = await run('deploy/modules_test', ''); + const secondMsgs = plainMessages(secondRun); + expect(secondMsgs.find(m => m.startsWith('Downloading tests@'))).toBeUndefined(); + expect(secondMsgs.find(m => m.startsWith('Modules downloaded'))).toBe('Modules downloaded successfully (1 already up-to-date)'); + } finally { + await fs.promises.rm(pathToDirectory, { recursive: true, force: true }); + fs.writeFileSync(lockFilePath, originalLockContent); + } + }); }); describe('Failed install', () => { test('Module not found - non-existing module', async () => { - const savedCreds = applyCredentials(noCredentials); + const savedCreds = saveCredentials(); const savedPortalHost = process.env.PARTNER_PORTAL_HOST; + applyCredentials(noCredentials); delete process.env.PARTNER_PORTAL_HOST; try { const { stdout } = await run('deploy/modules_test', 'moduleNotFound'); - expect(stdout).toContain("Can't find module moduleNotFound"); + const msgs = plainMessages(stdout); + expect(msgs.find(m => m.startsWith("Can't find"))).toBe( + "Can't find module moduleNotFound (registry: https://partners.platformos.com)" + ); } finally { applyCredentials(savedCreds); if (savedPortalHost) { @@ -132,3 +165,92 @@ describe('Failed install', () => { } }); }); + +describe('repository_url persistence', () => { + test('pos-modules.json written by install contains repository_url', async () => { + requireRealCredentials(); + const posModulesPath = `${cwd('deploy/modules_test')}/app/pos-modules.json`; + const lockFilePath = `${cwd('deploy/modules_test')}/app/pos-modules.lock.json`; + const originalModulesContent = fs.readFileSync(posModulesPath, 'utf8'); + const originalLockContent = fs.readFileSync(lockFilePath, 'utf8'); + const pathToDirectory = `${cwd('deploy/modules_test')}/modules`; + + try { + await run('deploy/modules_test', 'tests'); + + const posModules = JSON.parse(fs.readFileSync(posModulesPath, 'utf8')); + expect(posModules).toHaveProperty('repository_url'); + expect(typeof posModules.repository_url).toBe('string'); + expect(posModules.repository_url.length).toBeGreaterThan(0); + } finally { + await fs.promises.rm(pathToDirectory, { recursive: true, force: true }); + fs.writeFileSync(posModulesPath, originalModulesContent); + fs.writeFileSync(lockFilePath, originalLockContent); + } + }); + + test('pos-modules.lock.json written by install contains repository_url', async () => { + requireRealCredentials(); + const posModulesPath = `${cwd('deploy/modules_test')}/app/pos-modules.json`; + const lockFilePath = `${cwd('deploy/modules_test')}/app/pos-modules.lock.json`; + const originalModulesContent = fs.readFileSync(posModulesPath, 'utf8'); + const originalLockContent = fs.readFileSync(lockFilePath, 'utf8'); + const pathToDirectory = `${cwd('deploy/modules_test')}/modules`; + + try { + await run('deploy/modules_test', 'tests'); + + const lockFile = JSON.parse(fs.readFileSync(lockFilePath, 'utf8')); + expect(lockFile).toHaveProperty('repository_url'); + expect(typeof lockFile.repository_url).toBe('string'); + expect(lockFile.repository_url.length).toBeGreaterThan(0); + } finally { + await fs.promises.rm(pathToDirectory, { recursive: true, force: true }); + fs.writeFileSync(posModulesPath, originalModulesContent); + fs.writeFileSync(lockFilePath, originalLockContent); + } + }); + + test('repository_url in pos-modules.json and pos-modules.lock.json match after install', async () => { + requireRealCredentials(); + const posModulesPath = `${cwd('deploy/modules_test')}/app/pos-modules.json`; + const lockFilePath = `${cwd('deploy/modules_test')}/app/pos-modules.lock.json`; + const originalModulesContent = fs.readFileSync(posModulesPath, 'utf8'); + const originalLockContent = fs.readFileSync(lockFilePath, 'utf8'); + const pathToDirectory = `${cwd('deploy/modules_test')}/modules`; + + try { + await run('deploy/modules_test', 'tests'); + + const posModules = JSON.parse(fs.readFileSync(posModulesPath, 'utf8')); + const lockFile = JSON.parse(fs.readFileSync(lockFilePath, 'utf8')); + expect(posModules.repository_url).toBe(lockFile.repository_url); + } finally { + await fs.promises.rm(pathToDirectory, { recursive: true, force: true }); + fs.writeFileSync(posModulesPath, originalModulesContent); + fs.writeFileSync(lockFilePath, originalLockContent); + } + }); +}); + +describe('Idempotency', () => { + test('running install twice produces identical lock file content', async () => { + requireRealCredentials(); + const lockFilePath = `${cwd('deploy/modules_test')}/app/pos-modules.lock.json`; + const originalLockContent = fs.readFileSync(lockFilePath, 'utf8'); + const pathToDirectory = `${cwd('deploy/modules_test')}/modules`; + + try { + await run('deploy/modules_test', ''); + const lockAfterFirst = fs.readFileSync(lockFilePath, 'utf8'); + + await run('deploy/modules_test', ''); + const lockAfterSecond = fs.readFileSync(lockFilePath, 'utf8'); + + expect(JSON.parse(lockAfterFirst)).toEqual(JSON.parse(lockAfterSecond)); + } finally { + await fs.promises.rm(pathToDirectory, { recursive: true, force: true }); + fs.writeFileSync(lockFilePath, originalLockContent); + } + }); +}); diff --git a/test/integration/modules-update.test.js b/test/integration/modules-update.test.js index 5f759e254..ab247c0ad 100644 --- a/test/integration/modules-update.test.js +++ b/test/integration/modules-update.test.js @@ -4,7 +4,8 @@ import exec from '#test/utils/exec'; import cliPath from '#test/utils/cliPath'; import fs from 'fs'; import path from 'path'; -import { requireRealCredentials, noCredentials, applyCredentials } from '#test/utils/credentials'; +import { requireRealCredentials, noCredentials, applyCredentials, saveCredentials } from '#test/utils/credentials'; +import { plainMessages } from '#test/utils/parseOutput'; const cwd = name => path.join(process.cwd(), 'test', 'fixtures', name); const run = async (fixtureName, options) => await exec(`${cliPath} modules update ${options}`, { cwd: cwd(fixtureName), env: process.env }); @@ -20,13 +21,15 @@ describe('Successful update', () => { try { const { stdout } = await run('deploy/modules_update', 'core'); - expect(stdout).toMatch('Updating module'); + const msgs = plainMessages(stdout); + expect(msgs.find(m => m === 'Updating module')).toBe('Updating module'); const fileContent = fs.readFileSync(pathToLockFile, { encoding: 'utf8' }); const lockFile = JSON.parse(fileContent); expect(lockFile['modules']['core']).not.toEqual('1.0.0'); - expect(stdout).toContain('Downloading core@'); + // Core is updated to the latest version from the registry; exact version is not known in advance + expect(msgs.find(m => m.startsWith('Downloading core@'))).toMatch(/^Downloading core@\d+\.\d+\.\d+\.\.\.$/); expect(fs.existsSync(path.join(pathToDirectory, 'core', 'template-values.json'))).toBeTruthy(); } finally { await fs.promises.rm(pathToDirectory, { recursive: true, force: true }); @@ -34,16 +37,45 @@ describe('Successful update', () => { fs.writeFileSync(pathToLockFile, originalLockContent); } }, 30000); + + test('skips re-downloading modules that have not changed after update', async () => { + requireRealCredentials(); + const pathToLockFile = `${cwd('deploy/modules_update')}/app/pos-modules.lock.json`; + const posModulesPath = `${cwd('deploy/modules_update')}/app/pos-modules.json`; + const originalModulesContent = fs.readFileSync(posModulesPath, 'utf8'); + const originalLockContent = fs.readFileSync(pathToLockFile, 'utf8'); + const pathToDirectory = `${cwd('deploy/modules_update')}/modules`; + + try { + // First update: resolves latest core, downloads it + const { stdout: firstRun } = await run('deploy/modules_update', 'core'); + expect(plainMessages(firstRun).find(m => m.startsWith('Downloading core@'))).toMatch(/^Downloading core@\d+\.\d+\.\d+\.\.\.$/); + + // Second update with the same module: already at latest, directory on disk → skip + const { stdout: secondRun } = await run('deploy/modules_update', 'core'); + const secondMsgs = plainMessages(secondRun); + expect(secondMsgs.find(m => m.startsWith('Downloading core@'))).toBeUndefined(); + expect(secondMsgs.find(m => m.startsWith('Modules downloaded'))).toBe('Modules downloaded successfully (1 already up-to-date)'); + } finally { + await fs.promises.rm(pathToDirectory, { recursive: true, force: true }); + fs.writeFileSync(posModulesPath, originalModulesContent); + fs.writeFileSync(pathToLockFile, originalLockContent); + } + }, 30000); }); describe('Failed update', () => { test('Module not found - non-existing module', async () => { - const savedCreds = applyCredentials(noCredentials); + const savedCreds = saveCredentials(); const savedPortalHost = process.env.PARTNER_PORTAL_HOST; + applyCredentials(noCredentials); delete process.env.PARTNER_PORTAL_HOST; try { const { stdout } = await run('deploy/modules_update', 'moduleNotFound'); - expect(stdout).toMatch("Can't find module moduleNotFound"); + const msgs = plainMessages(stdout); + expect(msgs.find(m => m.startsWith("Can't find"))).toBe( + "Can't find module moduleNotFound (registry: https://partners.platformos.com)" + ); } finally { applyCredentials(savedCreds); if (savedPortalHost) { @@ -51,18 +83,115 @@ describe('Failed update', () => { } } }); - test('Module not found - no name for module', async () => { - const savedCreds = applyCredentials(noCredentials); - const savedPortalHost = process.env.PARTNER_PORTAL_HOST; - delete process.env.PARTNER_PORTAL_HOST; + + test('no modules to update when pos-modules.json is empty', async () => { + const savedCreds = saveCredentials(); + applyCredentials(noCredentials); try { - const { stderr } = await run('deploy/modules_update', ''); - expect(stderr).toMatch("error: missing required argument 'module-name'"); + const { stdout } = await exec(`${cliPath} modules update`, { cwd: cwd('test/without-tests-module'), env: process.env }); + expect(plainMessages(stdout).find(m => m === 'No modules to update')).toBe('No modules to update'); } finally { applyCredentials(savedCreds); - if (savedPortalHost) { - process.env.PARTNER_PORTAL_HOST = savedPortalHost; - } } }); }); + +describe('repository_url persistence', () => { + test('pos-modules.json written by update contains repository_url', async () => { + requireRealCredentials(); + const posModulesPath = `${cwd('deploy/modules_update')}/app/pos-modules.json`; + const pathToLockFile = `${cwd('deploy/modules_update')}/app/pos-modules.lock.json`; + const originalModulesContent = fs.readFileSync(posModulesPath, 'utf8'); + const originalLockContent = fs.readFileSync(pathToLockFile, 'utf8'); + const pathToDirectory = `${cwd('deploy/modules_update')}/modules`; + + try { + await run('deploy/modules_update', 'core'); + + const posModules = JSON.parse(fs.readFileSync(posModulesPath, 'utf8')); + expect(posModules).toHaveProperty('repository_url'); + expect(typeof posModules.repository_url).toBe('string'); + expect(posModules.repository_url.length).toBeGreaterThan(0); + } finally { + await fs.promises.rm(pathToDirectory, { recursive: true, force: true }); + fs.writeFileSync(posModulesPath, originalModulesContent); + fs.writeFileSync(pathToLockFile, originalLockContent); + } + }, 30000); + + test('pos-modules.lock.json written by update contains repository_url', async () => { + requireRealCredentials(); + const posModulesPath = `${cwd('deploy/modules_update')}/app/pos-modules.json`; + const pathToLockFile = `${cwd('deploy/modules_update')}/app/pos-modules.lock.json`; + const originalModulesContent = fs.readFileSync(posModulesPath, 'utf8'); + const originalLockContent = fs.readFileSync(pathToLockFile, 'utf8'); + const pathToDirectory = `${cwd('deploy/modules_update')}/modules`; + + try { + await run('deploy/modules_update', 'core'); + + const lockFile = JSON.parse(fs.readFileSync(pathToLockFile, 'utf8')); + expect(lockFile).toHaveProperty('repository_url'); + expect(typeof lockFile.repository_url).toBe('string'); + expect(lockFile.repository_url.length).toBeGreaterThan(0); + } finally { + await fs.promises.rm(pathToDirectory, { recursive: true, force: true }); + fs.writeFileSync(posModulesPath, originalModulesContent); + fs.writeFileSync(pathToLockFile, originalLockContent); + } + }, 30000); +}); + +describe('Update vs install distinction', () => { + test('modules update always replaces version even if already present', async () => { + requireRealCredentials(); + const posModulesPath = `${cwd('deploy/modules_update')}/app/pos-modules.json`; + const pathToLockFile = `${cwd('deploy/modules_update')}/app/pos-modules.lock.json`; + const originalModulesContent = fs.readFileSync(posModulesPath, 'utf8'); + const originalLockContent = fs.readFileSync(pathToLockFile, 'utf8'); + const pathToDirectory = `${cwd('deploy/modules_update')}/modules`; + + try { + // First update to establish latest + await run('deploy/modules_update', 'core'); + const afterFirst = JSON.parse(fs.readFileSync(posModulesPath, 'utf8')); + const versionAfterFirst = afterFirst.modules.core; + + // Second update — should still run and confirm (or re-confirm) the latest version + const { stdout } = await run('deploy/modules_update', 'core'); + const afterSecond = JSON.parse(fs.readFileSync(posModulesPath, 'utf8')); + + expect(afterSecond.modules.core).toBe(versionAfterFirst); + expect(plainMessages(stdout).find(m => m === 'Updating module')).toBe('Updating module'); + } finally { + await fs.promises.rm(pathToDirectory, { recursive: true, force: true }); + fs.writeFileSync(posModulesPath, originalModulesContent); + fs.writeFileSync(pathToLockFile, originalLockContent); + } + }, 30000); + + test('update to specific version pins that version in pos-modules.json', async () => { + requireRealCredentials(); + const posModulesPath = `${cwd('deploy/modules_update')}/app/pos-modules.json`; + const pathToLockFile = `${cwd('deploy/modules_update')}/app/pos-modules.lock.json`; + const originalModulesContent = fs.readFileSync(posModulesPath, 'utf8'); + const originalLockContent = fs.readFileSync(pathToLockFile, 'utf8'); + const pathToDirectory = `${cwd('deploy/modules_update')}/modules`; + // Use the version already pinned in the fixture — it is guaranteed to exist in the registry. + const pinnedVersion = JSON.parse(originalModulesContent).modules.core; + + try { + await run('deploy/modules_update', `core@${pinnedVersion}`); + + const posModules = JSON.parse(fs.readFileSync(posModulesPath, 'utf8')); + const lockFile = JSON.parse(fs.readFileSync(pathToLockFile, 'utf8')); + + expect(posModules.modules.core).toBe(pinnedVersion); + expect(lockFile.modules.core).toBe(pinnedVersion); + } finally { + await fs.promises.rm(pathToDirectory, { recursive: true, force: true }); + fs.writeFileSync(posModulesPath, originalModulesContent); + fs.writeFileSync(pathToLockFile, originalLockContent); + } + }, 30000); +}); diff --git a/test/unit/configFiles.test.js b/test/unit/configFiles.test.js new file mode 100644 index 000000000..162409df7 --- /dev/null +++ b/test/unit/configFiles.test.js @@ -0,0 +1,183 @@ +import { describe, test, expect, beforeEach, afterEach, vi } from 'vitest'; +import fs from 'fs'; +import path from 'path'; +import os from 'os'; +import { readPosModulesLock, readRepositoryUrl, readLocalModules, writePosModules, writePosModulesLock, DEFAULT_REPOSITORY_URL } from '#lib/modules/configFiles.js'; + +vi.mock('#lib/logger.js', () => ({ + default: { Debug: vi.fn(), Error: vi.fn(), Info: vi.fn(), Warn: vi.fn(), Success: vi.fn() } +})); + +// All tests use a temporary directory so the real project files are never touched. +let tmpDir; +let originalCwd; + +beforeEach(() => { + originalCwd = process.cwd(); + tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'pos-cli-test-')); + process.chdir(tmpDir); +}); + +afterEach(() => { + process.chdir(originalCwd); + fs.rmSync(tmpDir, { recursive: true, force: true }); + delete process.env.PARTNER_PORTAL_HOST; +}); + +// readPosModulesLock reads from process.cwd()/app/pos-modules.lock.json. +describe('readPosModulesLock', () => { + test('returns empty object when the lock file does not exist', () => { + expect(readPosModulesLock()).toEqual({}); + }); + + test('returns the modules map from an existing lock file', () => { + fs.mkdirSync(path.join(tmpDir, 'app')); + fs.writeFileSync( + path.join(tmpDir, 'app', 'pos-modules.lock.json'), + JSON.stringify({ modules: { core: '2.0.6', user: '5.1.2' } }, null, 2) + ); + + expect(readPosModulesLock()).toEqual({ core: '2.0.6', user: '5.1.2' }); + }); + + test('returns empty object when the lock file exists but has no modules key', () => { + fs.mkdirSync(path.join(tmpDir, 'app')); + fs.writeFileSync(path.join(tmpDir, 'app', 'pos-modules.lock.json'), JSON.stringify({})); + + expect(readPosModulesLock()).toEqual({}); + }); +}); + +// readRepositoryUrl reads from process.cwd()/app/pos-modules.json. +describe('readRepositoryUrl', () => { + test('returns DEFAULT_REPOSITORY_URL when pos-modules.json does not exist', () => { + expect(readRepositoryUrl()).toBe(DEFAULT_REPOSITORY_URL); + }); + + test('returns the repository_url from pos-modules.json when present', () => { + fs.mkdirSync(path.join(tmpDir, 'app')); + fs.writeFileSync( + path.join(tmpDir, 'app', 'pos-modules.json'), + JSON.stringify({ repository_url: 'https://custom.registry.example.com', modules: {} }, null, 2) + ); + + expect(readRepositoryUrl()).toBe('https://custom.registry.example.com'); + }); + + test('returns DEFAULT_REPOSITORY_URL when pos-modules.json has no repository_url', () => { + fs.mkdirSync(path.join(tmpDir, 'app')); + fs.writeFileSync( + path.join(tmpDir, 'app', 'pos-modules.json'), + JSON.stringify({ modules: { core: '2.0.6' } }, null, 2) + ); + + expect(readRepositoryUrl()).toBe(DEFAULT_REPOSITORY_URL); + }); + + test('PARTNER_PORTAL_HOST env var takes precedence over pos-modules.json', () => { + fs.mkdirSync(path.join(tmpDir, 'app')); + fs.writeFileSync( + path.join(tmpDir, 'app', 'pos-modules.json'), + JSON.stringify({ repository_url: 'https://custom.registry.example.com', modules: {} }, null, 2) + ); + process.env.PARTNER_PORTAL_HOST = 'https://env-override.example.com'; + + expect(readRepositoryUrl()).toBe('https://env-override.example.com'); + }); +}); + +// writePosModules writes to process.cwd()/app/pos-modules.json. +describe('writePosModules', () => { + test('writes repository_url and modules to pos-modules.json', () => { + fs.mkdirSync(path.join(tmpDir, 'app')); + + writePosModules({ core: '2.0.6' }); + + const written = JSON.parse(fs.readFileSync(path.join(tmpDir, 'app', 'pos-modules.json'), 'utf8')); + expect(written).toEqual({ repository_url: DEFAULT_REPOSITORY_URL, modules: { core: '2.0.6' } }); + }); + + test('writes a custom repository_url when provided', () => { + fs.mkdirSync(path.join(tmpDir, 'app')); + + writePosModules({ core: '2.0.6' }, 'https://custom.registry.example.com'); + + const written = JSON.parse(fs.readFileSync(path.join(tmpDir, 'app', 'pos-modules.json'), 'utf8')); + expect(written.repository_url).toBe('https://custom.registry.example.com'); + }); + + test('written file is readable by readLocalModules', () => { + fs.mkdirSync(path.join(tmpDir, 'app')); + + writePosModules({ core: '2.0.6', user: '5.1.2' }); + + expect(readLocalModules()).toEqual({ core: '2.0.6', user: '5.1.2' }); + }); + + test('written repository_url is readable by readRepositoryUrl', () => { + fs.mkdirSync(path.join(tmpDir, 'app')); + + writePosModules({ core: '2.0.6' }, 'https://custom.registry.example.com'); + + expect(readRepositoryUrl()).toBe('https://custom.registry.example.com'); + }); +}); + +// writePosModulesLock writes to process.cwd()/app/pos-modules.lock.json. +describe('writePosModulesLock', () => { + test('writes repository_url and modules to pos-modules.lock.json', () => { + fs.mkdirSync(path.join(tmpDir, 'app')); + + writePosModulesLock({ core: '2.0.6', user: '5.1.2' }); + + const written = JSON.parse(fs.readFileSync(path.join(tmpDir, 'app', 'pos-modules.lock.json'), 'utf8')); + expect(written).toEqual({ + repository_url: DEFAULT_REPOSITORY_URL, + modules: { core: '2.0.6', user: '5.1.2' } + }); + }); + + test('writes a custom repository_url when provided', () => { + fs.mkdirSync(path.join(tmpDir, 'app')); + + writePosModulesLock({ core: '2.0.6' }, 'https://custom.registry.example.com'); + + const written = JSON.parse(fs.readFileSync(path.join(tmpDir, 'app', 'pos-modules.lock.json'), 'utf8')); + expect(written.repository_url).toBe('https://custom.registry.example.com'); + }); + + test('round-trip: written lock file is readable by readPosModulesLock', () => { + fs.mkdirSync(path.join(tmpDir, 'app')); + + writePosModulesLock({ core: '2.0.6', user: '5.1.2' }); + + expect(readPosModulesLock()).toEqual({ core: '2.0.6', user: '5.1.2' }); + }); +}); + +// readLocalModules reads from process.cwd()/app/pos-modules.json. +describe('readLocalModules', () => { + test('returns empty object when pos-modules.json does not exist', () => { + expect(readLocalModules()).toEqual({}); + }); + + test('returns the modules map from an existing file', () => { + fs.mkdirSync(path.join(tmpDir, 'app')); + fs.writeFileSync( + path.join(tmpDir, 'app', 'pos-modules.json'), + JSON.stringify({ repository_url: 'https://partners.platformos.com', modules: { core: '2.0.6', user: '5.1.2' } }, null, 2) + ); + + expect(readLocalModules()).toEqual({ core: '2.0.6', user: '5.1.2' }); + }); + + test('returns empty object when file exists but has no modules key', () => { + fs.mkdirSync(path.join(tmpDir, 'app')); + fs.writeFileSync( + path.join(tmpDir, 'app', 'pos-modules.json'), + JSON.stringify({ repository_url: 'https://partners.platformos.com' }) + ); + + expect(readLocalModules()).toEqual({}); + }); +}); diff --git a/test/unit/dependencies.test.js b/test/unit/dependencies.test.js index 65112a980..eac081bed 100644 --- a/test/unit/dependencies.test.js +++ b/test/unit/dependencies.test.js @@ -1,117 +1,414 @@ import { resolveDependencies, findModuleVersion } from '#lib/modules/dependencies'; -import { isDeepStrictEqual } from 'node:util'; - -test('resolveDependencies ok', async () => { - const core = {'module':'core','versions':{'1.0.0':{'dependencies':{}}, '1.5.0':{'dependencies':{}}, '1.6.0':{'dependencies':{}}, '1.8.0':{'dependencies':{}}}}; - const modulesVersions = async (modulesNames) => { - if(isDeepStrictEqual(modulesNames, ['payments_stripe', 'tests', 'a'])) { - return [ - {'module':'payments_stripe','versions':{'1.0.6':{'dependencies':{'payments':'^1.0.0', 'core':'^1.0.0'}}}}, - {'module':'tests','versions':{'1.0.7':{'dependencies':{'core':'^1.5.0'}}}}, - {'module':'a','versions':{'1.0.0':{'dependencies':{'b':'1.0.0'}}}} - ]; - } else if(isDeepStrictEqual(modulesNames, ['payments', 'core', 'b'])){ - return [ - {'module':'payments','versions':{'1.0.0':{'dependencies':{'core':'1.6.0'}}}}, - {'module':'b','versions':{'1.0.0':{'dependencies':{'c':'1.0.0'}}}} - ].concat(core); - } else if(isDeepStrictEqual(modulesNames, ['core', 'c'])){ - return [ - {'module':'c','versions':{'1.0.0':{'dependencies':{}}}} - ].concat(core); - } - throw new Error(`Unexpected modulesNames: ${JSON.stringify(modulesNames)}`); - }; - const rootModules = { - 'payments_stripe': '1.0.6', - 'tests': '1.0.7', - 'a': '1.0.0' - }; - - const data = await resolveDependencies(rootModules, modulesVersions); - - expect(data).toEqual( - { - 'payments_stripe': '1.0.6', - 'tests': '1.0.7', - 'payments': '1.0.0', - 'core': '1.6.0', - 'a': '1.0.0', - 'b': '1.0.0', - 'c': '1.0.0' - } +import { mod, makeRegistry } from '#test/utils/moduleRegistry.js'; + +// spyRegistry wraps makeRegistry to record every batch of names requested, +// enabling assertions about how many registry fetches were made (memoisation tests). +const spyRegistry = (...modules) => { + const inner = makeRegistry(...modules); + const calls = []; + const fn = async (names) => { calls.push([...names]); return inner(names); }; + fn.calls = calls; + return fn; +}; + +// --------------------------------------------------------------------------- +// resolveDependencies — happy paths +// --------------------------------------------------------------------------- + +test('resolves a simple two-level dependency chain', async () => { + const core = mod('core', { '1.0.0': {}, '1.2.0': {} }); + const app = mod('app', { '1.0.0': { core: '^1.0.0' } }); + + const data = await resolveDependencies({ app: '1.0.0' }, makeRegistry(app, core)); + + expect(data).toEqual({ app: '1.0.0', core: '1.2.0' }); +}); + +test('resolves diamond dependency — all constraints from all levels satisfied simultaneously', async () => { + // payments_stripe and tests both need core (different lower bounds). + // payments@1.0.0 pins core to exactly 1.6.0 — that must win for everyone. + const core = mod('core', { '1.0.0': {}, '1.5.0': {}, '1.6.0': {}, '1.8.0': {} }); + const payments = mod('payments', { '1.0.0': { core: '1.6.0' } }); + const payments_stripe = mod('payments_stripe', { '1.0.6': { payments: '^1.0.0', core: '^1.0.0' } }); + const tests = mod('tests', { '1.0.7': { core: '^1.5.0' } }); + const a = mod('a', { '1.0.0': { b: '1.0.0' } }); + const b = mod('b', { '1.0.0': { c: '1.0.0' } }); + const c = mod('c', { '1.0.0': {} }); + + const data = await resolveDependencies( + { payments_stripe: '1.0.6', tests: '1.0.7', a: '1.0.0' }, + makeRegistry(payments_stripe, tests, a, payments, core, b, c) ); + + expect(data).toEqual({ + payments_stripe: '1.0.6', tests: '1.0.7', a: '1.0.0', + payments: '1.0.0', core: '1.6.0', b: '1.0.0', c: '1.0.0' + }); +}); + +test('picks the highest version satisfying all constraints when multiple modules require the same dep', async () => { + const core = mod('core', { '1.5.0': {}, '1.6.0': {}, '1.8.0': {} }); + const moduleA = mod('module-a', { '1.0.0': { core: '^1.5.0' } }); + const moduleB = mod('module-b', { '1.0.0': { core: '^1.6.0' } }); + + const data = await resolveDependencies( + { 'module-a': '1.0.0', 'module-b': '1.0.0' }, + makeRegistry(moduleA, moduleB, core) + ); + + expect(data['core']).toBe('1.8.0'); +}); + +test('respects a root-pinned version that satisfies all transitive constraints', async () => { + // root pins core@1.6.1; tests requires ^1.6.0 — 1.6.1 satisfies it, keep the pin + const core = mod('core', { '1.6.0': {}, '1.6.1': {}, '1.8.0': {} }); + const tests = mod('tests', { '1.0.7': { core: '^1.6.0' } }); + + const data = await resolveDependencies({ tests: '1.0.7', core: '1.6.1' }, makeRegistry(tests, core)); + + expect(data).toEqual({ tests: '1.0.7', core: '1.6.1' }); +}); + +test('resolves a deep transitive chain (4 levels)', async () => { + const d = mod('d', { '1.0.0': {} }); + const c = mod('c', { '1.0.0': { d: '1.0.0' } }); + const b = mod('b', { '1.0.0': { c: '1.0.0' } }); + const a = mod('a', { '1.0.0': { b: '1.0.0' } }); + + const data = await resolveDependencies({ a: '1.0.0' }, makeRegistry(a, b, c, d)); + + expect(data).toEqual({ a: '1.0.0', b: '1.0.0', c: '1.0.0', d: '1.0.0' }); +}); + +test('resolves nothing extra when root module has no dependencies', async () => { + const leaf = mod('leaf', { '1.0.0': {} }); + + const data = await resolveDependencies({ leaf: '1.0.0' }, makeRegistry(leaf)); + + expect(data).toEqual({ leaf: '1.0.0' }); +}); + +test('returns empty object for empty input', async () => { + expect(await resolveDependencies({}, makeRegistry())).toEqual({}); +}); + +// --------------------------------------------------------------------------- +// resolveDependencies — BFS global constraint resolution +// (cases the old level-by-level recursive approach could not handle correctly) +// --------------------------------------------------------------------------- + +test('detects conflict between constraints at different levels of the tree', async () => { + // A requires D@^1.5.0 directly AND B@^1.0.0 which requires D@1.3.0 (exact). + // The two constraints on D have no intersection → error. + const d = mod('d', { '1.3.0': {}, '1.5.0': {}, '1.8.0': {} }); + const b = mod('b', { '1.5.0': { d: '1.3.0' } }); + const a = mod('a', { '1.0.0': { b: '^1.0.0', d: '^1.5.0' } }); + + await expect( + resolveDependencies({ a: '1.0.0' }, makeRegistry(a, b, d)) + ).rejects.toThrow(/No version of "d" satisfies all constraints/); +}); + +test('downgrades a transitive dep when a later-discovered constraint narrows the range', async () => { + // A requires D@^1.2.0 → initially resolves to 1.8.0 (highest satisfying). + // B@1.5.0 (dep of A) then requires D@>=1.4.0 <1.8.0. + // Combined constraints eliminate 1.8.0 → 1.7.0 is the correct answer. + const d = mod('d', { '1.2.0': {}, '1.4.0': {}, '1.7.0': {}, '1.8.0': {} }); + const b = mod('b', { '1.5.0': { d: '>=1.4.0 <1.8.0' } }); + const a = mod('a', { '1.0.0': { b: '^1.0.0', d: '^1.2.0' } }); + + const data = await resolveDependencies({ a: '1.0.0' }, makeRegistry(a, b, d)); + + expect(data['d']).toBe('1.7.0'); }); -test('resolveDependencies do not use newest available version but the one defined in root', async () => { - const core = {'module':'core','versions':{'1.6.0':{'dependencies':{}}, '1.6.1':{'dependencies':{}}, '1.8.0':{'dependencies':{}}}}; - const tests = {'module':'tests','versions':{'1.0.7':{'dependencies':{'core':'^1.6.0'}}}}; - const modulesVersions = async (modulesNames) => { - if(isDeepStrictEqual(modulesNames, ['tests', 'core'])) { - return [tests, core]; - } else if(isDeepStrictEqual(modulesNames, ['tests'])) { - return [tests]; - } else if(isDeepStrictEqual(modulesNames, ['core'])) { - return [core]; - } - throw new Error(`Unexpected modulesNames: ${JSON.stringify(modulesNames)}`); - }; - const rootModules = { - 'tests': '1.0.7', - 'core': '1.6.1' - }; - - const data = await resolveDependencies(rootModules, modulesVersions, rootModules); - - expect(data).toEqual( - { - 'tests': '1.0.7', - 'core': '1.6.1' - } +test('cleans up stale constraints after a version downgrade', async () => { + // D@1.8.0 requires E@^2.0.0. D@1.7.0 requires E@^1.0.0. + // The algorithm initially resolves D to 1.8.0, then B's constraint (>=1.4.0 <1.8.0) + // forces a downgrade to 1.7.0. Without stale-constraint cleanup, the constraints + // map would contain both {^2.0.0 from D@1.8.0} and {^1.0.0 from D@1.7.0} for E — + // these ranges don't intersect → false "no satisfying version" error. + // With cleanup, only {^1.0.0 from D@1.7.0} remains → E@1.0.0. + const e = mod('e', { '1.0.0': {}, '2.0.0': {} }); + const d = mod('d', { '1.4.0': {}, '1.7.0': { e: '^1.0.0' }, '1.8.0': { e: '^2.0.0' } }); + const b = mod('b', { '1.5.0': { d: '>=1.4.0 <1.8.0' } }); + const a = mod('a', { '1.0.0': { b: '^1.0.0', d: '^1.4.0' } }); + + const data = await resolveDependencies({ a: '1.0.0' }, makeRegistry(a, b, d, e)); + + expect(data['d']).toBe('1.7.0'); + expect(data['e']).toBe('1.0.0'); +}); + +test('does not install a phantom dep when its only requiring version is downgraded away (same round)', async () => { + // D@1.8.0 requires E, but D gets downgraded to 1.0.0 which has no E dependency. + // The downgrade and E's tentative resolution happen in the same BFS round — + // the three-pass Phase 2 ordering must prevent E from being committed. + const e = mod('e', { '1.0.0': {} }); + const d = mod('d', { '1.0.0': {}, '1.8.0': { e: '^1.0.0' } }); + const b = mod('b', { '1.5.0': { d: '<1.5.0' } }); + const a = mod('a', { '1.0.0': { b: '^1.0.0', d: '^1.0.0' } }); + + const data = await resolveDependencies({ a: '1.0.0' }, makeRegistry(a, b, d, e)); + + expect(data['d']).toBe('1.0.0'); + expect(data['e']).toBeUndefined(); +}); + +test('does not install a phantom dep committed in an earlier round when its requirer is later downgraded', async () => { + // Multi-round phantom dep: E is resolved and committed in round 2 (as D@1.8.0's dep), + // but the narrowing constraint that forces D to downgrade arrives only in round 3 + // (via C → F → D@<1.5.0). After the downgrade, D@1.0.0 has no E dep, so E is + // unreachable from root. The post-BFS reachability walk must remove it. + const e = mod('e', { '1.0.0': {} }); + const d = mod('d', { '1.0.0': {}, '1.8.0': { e: '^1.0.0' } }); + const f = mod('f', { '1.0.0': { d: '<1.5.0' } }); + const c = mod('c', { '1.0.0': { f: '^1.0.0' } }); + const a = mod('a', { '1.0.0': { d: '^1.0.0', c: '^1.0.0' } }); + + const data = await resolveDependencies({ a: '1.0.0' }, makeRegistry(a, c, d, e, f)); + + expect(data['d']).toBe('1.0.0'); + expect(data['e']).toBeUndefined(); +}); + +test('combines compatible constraints across all levels to pick the tightest satisfying version', async () => { + // Three modules at different levels each put a lower bound on core. + // All three must be satisfied simultaneously → pick the highest. + const core = mod('core', { '1.0.0': {}, '1.4.0': {}, '1.6.0': {}, '1.9.0': {} }); + const moduleA = mod('module-a', { '1.0.0': { core: '^1.0.0' } }); + const moduleB = mod('module-b', { '1.0.0': { core: '^1.4.0' } }); + const moduleC = mod('module-c', { '1.0.0': { core: '^1.6.0' } }); + + const data = await resolveDependencies( + { 'module-a': '1.0.0', 'module-b': '1.0.0', 'module-c': '1.0.0' }, + makeRegistry(moduleA, moduleB, moduleC, core) ); + + expect(data['core']).toBe('1.9.0'); }); +// --------------------------------------------------------------------------- +// resolveDependencies — pre-release version handling +// --------------------------------------------------------------------------- -test('find module with newest version', async () => { - const modulesVersions = async (_modulesNames) => { - return [{'module':'core','versions':{'1.0.0':{'dependencies':{}}, '1.5.0':{'dependencies':{}}}}]; - }; +test('does not select a pre-release version to satisfy a range constraint', async () => { + // ^1.0.0 must not match 1.5.0-beta.1 even though it is numerically greater than 1.0.0. + // This matches standard npm behaviour: pre-releases are excluded from range matching. + const dep = mod('dep', { '1.0.0': {}, '1.5.0-beta.1': {} }); + const app = mod('app', { '1.0.0': { dep: '^1.0.0' } }); - const data = await findModuleVersion('core', null, modulesVersions); + const data = await resolveDependencies({ app: '1.0.0' }, makeRegistry(app, dep)); - expect(data).toEqual({ 'core': '1.5.0' }); + expect(data['dep']).toBe('1.0.0'); }); -test('find module with newest stable version', async () => { - const modulesVersions = async (_modulesNames) => { - return [{'module':'core','versions':{'1.0.0':{'dependencies':{}}, '1.5.0':{'dependencies':{}}, '1.5.1-beta.1':{'dependencies':{}}}}]; - }; +test('resolves a pre-release version when it is pinned explicitly as a transitive dep', async () => { + // When a module explicitly names an exact pre-release, it should be installed. + const dep = mod('dep', { '0.9.0': {}, '1.0.0-beta.1': {} }); + const app = mod('app', { '1.0.0': { dep: '1.0.0-beta.1' } }); - const data = await findModuleVersion('core', null, modulesVersions); + const data = await resolveDependencies({ app: '1.0.0' }, makeRegistry(app, dep)); - expect(data).toEqual({ 'core': '1.5.0' }); + expect(data['dep']).toBe('1.0.0-beta.1'); }); -test('find module with requested version', async () => { - const modulesVersions = async (_modulesNames) => [{'module':'core','versions':{'1.0.0':{'dependencies':{}}, '1.5.0':{'dependencies':{}}}}]; +// --------------------------------------------------------------------------- +// resolveDependencies — circular dependency guard +// --------------------------------------------------------------------------- + +test('handles a two-module circular dependency without infinite recursion', async () => { + const a = mod('a', { '1.0.0': { b: '1.0.0' } }); + const b = mod('b', { '1.0.0': { a: '1.0.0' } }); - const data = await findModuleVersion('core', '1.0.0', modulesVersions); + const data = await resolveDependencies({ a: '1.0.0' }, makeRegistry(a, b)); - expect(data).toEqual({ 'core': '1.0.0' }); + expect(data).toEqual({ a: '1.0.0', b: '1.0.0' }); }); -test('find module with requested version even if it is beta', async () => { - const modulesVersions = async (_modulesNames) => [{'module':'core','versions':{'1.0.0-beta.1':{'dependencies':{}}, '1.5.0':{'dependencies':{}}}}]; +test('handles a three-module circular dependency without infinite recursion', async () => { + const a = mod('a', { '1.0.0': { b: '1.0.0' } }); + const b = mod('b', { '1.0.0': { c: '1.0.0' } }); + const c = mod('c', { '1.0.0': { a: '1.0.0' } }); - const data = await findModuleVersion('core', '1.0.0-beta.1', modulesVersions); + const data = await resolveDependencies({ a: '1.0.0' }, makeRegistry(a, b, c)); - expect(data).toEqual({ 'core': '1.0.0-beta.1' }); + expect(data).toEqual({ a: '1.0.0', b: '1.0.0', c: '1.0.0' }); }); -test('can not find module with requested version', async () => { - const modulesVersions = async (_modulesNames) => [{'module':'core','versions':{'1.0.0':{'dependencies':{}}, '1.5.0':{'dependencies':{}}}}]; +// --------------------------------------------------------------------------- +// resolveDependencies — version conflict errors +// --------------------------------------------------------------------------- + +test('throws when a root-pinned version conflicts with a transitive constraint', async () => { + // community requires core ^2.0.0 but root pins core@1.5.5 + const core = mod('core', { '1.5.5': {}, '2.0.0': {}, '2.0.6': {} }); + const community = mod('community', { '1.3.8': { core: '^2.0.0' } }); + + await expect( + resolveDependencies({ community: '1.3.8', core: '1.5.5' }, makeRegistry(community, core)) + ).rejects.toThrow(/Version conflict.*core@1\.5\.5.*\^2\.0\.0.*community@1\.3\.8/); +}); + +test('throws when two transitive dependencies require mutually incompatible versions', async () => { + const core = mod('core', { '1.5.5': {}, '2.0.0': {} }); + const moduleA = mod('module-a', { '1.0.0': { core: '^2.0.0' } }); + const moduleB = mod('module-b', { '1.0.0': { core: '1.5.5' } }); + + await expect( + resolveDependencies({ 'module-a': '1.0.0', 'module-b': '1.0.0' }, makeRegistry(moduleA, moduleB, core)) + ).rejects.toThrow(/No version of "core" satisfies all constraints/); +}); + +test('error message names every module that contributed a conflicting constraint', async () => { + const core = mod('core', { '1.0.0': {}, '2.0.0': {} }); + const moduleA = mod('module-a', { '1.0.0': { core: '^2.0.0' } }); + const moduleB = mod('module-b', { '1.0.0': { core: '1.0.0' } }); + + await expect( + resolveDependencies({ 'module-a': '1.0.0', 'module-b': '1.0.0' }, makeRegistry(moduleA, moduleB, core)) + ).rejects.toThrow(/module-a@1\.0\.0.*module-b@1\.0\.0|module-b@1\.0\.0.*module-a@1\.0\.0/); +}); + +test('annotates root modules in conflict error and appends a hint', async () => { + // Both module-a and module-b are root modules; their conflicting core constraints + // should be labelled "root module" and a hint listing both should appear. + const core = mod('core', { '1.0.0': {}, '2.0.0': {} }); + const moduleA = mod('module-a', { '1.0.0': { core: '^2.0.0' } }); + const moduleB = mod('module-b', { '1.0.0': { core: '1.0.0' } }); + + const err = await resolveDependencies( + { 'module-a': '1.0.0', 'module-b': '1.0.0' }, makeRegistry(moduleA, moduleB, core) + ).catch(e => e); + + expect(err.message).toMatch(/root module/); + expect(err.message).toMatch(/Conflicting root modules:/); + expect(err.message).toMatch(/module-a/); + expect(err.message).toMatch(/module-b/); + expect(err.message).toMatch(/Try updating them one at a time/); +}); + +test('does not add root module hint when the conflict involves only transitive deps', async () => { + // a is the single root; b and c are transitive deps that conflict on d. + // Only one root module is involved (a), so no hint should appear. + const d = mod('d', { '1.0.0': {}, '2.0.0': {} }); + const b = mod('b', { '1.0.0': { d: '^2.0.0' } }); + const c = mod('c', { '1.0.0': { d: '1.0.0' } }); + const a = mod('a', { '1.0.0': { b: '^1.0.0', c: '^1.0.0' } }); + + const err = await resolveDependencies( + { a: '1.0.0' }, makeRegistry(a, b, c, d) + ).catch(e => e); + + expect(err.message).toMatch(/No version of "d" satisfies all constraints/); + expect(err.message).not.toMatch(/Conflicting root modules/); +}); + +test('throws when a required module is absent from the registry', async () => { + const app = mod('app', { '1.0.0': { core: '^1.0.0' } }); + + await expect( + resolveDependencies({ app: '1.0.0' }, makeRegistry(app /* core missing */)) + ).rejects.toThrow(/Module "core" not found in the registry/); +}); + +test('throws when the requested module version does not exist in the registry', async () => { + const app = mod('app', { '1.0.0': {} }); + + await expect( + resolveDependencies({ app: '9.9.9' }, makeRegistry(app)) + ).rejects.toThrow(/Version "9\.9\.9" not found for module "app"/); +}); + +test('throws when a dependency exists in the registry but has no published versions', async () => { + const app = mod('app', { '1.0.0': { core: '^1.0.0' } }); + const core = mod('core', {}); // registered but no versions published yet + + await expect( + resolveDependencies({ app: '1.0.0' }, makeRegistry(app, core)) + ).rejects.toThrow(/Module "core" has no published versions/); +}); + +test('handles a module version whose registry entry has no dependencies field', async () => { + // Some older registry entries may omit the dependencies key entirely. + // The resolver must treat that as an empty dependency list, not throw a TypeError. + const registry = async () => [ + { module: 'app', versions: { '1.0.0': { /* no dependencies key */ } } } + ]; + + const data = await resolveDependencies({ app: '1.0.0' }, registry); + + expect(data).toEqual({ app: '1.0.0' }); +}); + +// --------------------------------------------------------------------------- +// resolveDependencies — memoisation: no redundant registry fetches +// --------------------------------------------------------------------------- + +test('fetches each module from the registry at most once per resolution run', async () => { + // core appears as a dep of both tests and payments — it must be fetched only once + const core = mod('core', { '1.6.0': {}, '1.8.0': {} }); + const tests = mod('tests', { '1.0.0': { core: '^1.6.0' } }); + const payments = mod('payments', { '1.0.0': { core: '^1.0.0' } }); + const registry = spyRegistry(tests, payments, core); + + await resolveDependencies({ tests: '1.0.0', payments: '1.0.0' }, registry); + + const allFetched = registry.calls.flat(); + expect(allFetched.filter(n => n === 'core')).toHaveLength(1); +}); + +// --------------------------------------------------------------------------- +// findModuleVersion +// --------------------------------------------------------------------------- + +test('returns the highest stable version when no version is specified', async () => { + const core = mod('core', { '1.0.0': {}, '1.5.0': {} }); + + expect(await findModuleVersion('core', null, makeRegistry(core))).toEqual({ core: '1.5.0' }); +}); + +test('excludes pre-release versions from automatic resolution', async () => { + const core = mod('core', { '1.0.0': {}, '1.5.0': {}, '1.5.1-beta.1': {} }); + + expect(await findModuleVersion('core', null, makeRegistry(core))).toEqual({ core: '1.5.0' }); +}); + +test('returns null when all available versions are pre-release and none is requested explicitly', async () => { + // The module exists but has no stable release yet + const core = mod('core', { '1.0.0-alpha.1': {}, '1.0.0-beta.1': {} }); + + expect(await findModuleVersion('core', null, makeRegistry(core))).toBeNull(); +}); + +test('returns the exact version when explicitly requested', async () => { + const core = mod('core', { '1.0.0': {}, '1.5.0': {} }); + + expect(await findModuleVersion('core', '1.0.0', makeRegistry(core))).toEqual({ core: '1.0.0' }); +}); + +test('returns a pre-release version when explicitly requested', async () => { + const core = mod('core', { '1.0.0-beta.1': {}, '1.5.0': {} }); + + expect(await findModuleVersion('core', '1.0.0-beta.1', makeRegistry(core))).toEqual({ core: '1.0.0-beta.1' }); +}); + +test('returns null when the requested version does not exist', async () => { + const core = mod('core', { '1.0.0': {}, '1.5.0': {} }); + + expect(await findModuleVersion('core', '1.0.1', makeRegistry(core))).toBeNull(); +}); + +test('throws when the module itself is not found in the registry', async () => { + await expect( + findModuleVersion('nonexistent', null, makeRegistry()) + ).rejects.toThrow(/Can't find module nonexistent/); +}); - const data = await findModuleVersion('core', '1.0.1', modulesVersions); +test('returns null when the requested version is a semver range rather than an exact version', async () => { + // Users occasionally pass ranges (e.g. "^1.0.0") when only exact versions are valid + // as installable pins. The function must return null rather than crash or guess. + const core = mod('core', { '1.0.0': {}, '1.5.0': {} }); - expect(data).toEqual(null); + expect(await findModuleVersion('core', '^1.0.0', makeRegistry(core))).toBeNull(); }); diff --git a/test/unit/downloadModule.test.js b/test/unit/downloadModule.test.js new file mode 100644 index 000000000..d092c70b2 --- /dev/null +++ b/test/unit/downloadModule.test.js @@ -0,0 +1,244 @@ +import { describe, test, expect, beforeEach, afterEach, vi } from 'vitest'; +import fs from 'fs'; +import path from 'path'; +import os from 'os'; +import { modulesToDownload, downloadModule, downloadAllModules } from '#lib/modules/downloadModule.js'; + +vi.mock('#lib/logger.js', () => ({ + default: { Debug: vi.fn(), Error: vi.fn(), Info: vi.fn(), Warn: vi.fn(), Success: vi.fn() } +})); + +vi.mock('#lib/portal.js', () => ({ + default: { moduleVersionsSearch: vi.fn() } +})); + +vi.mock('#lib/downloadFile.js', () => ({ + default: vi.fn() +})); + +vi.mock('#lib/unzip.js', () => ({ + unzip: vi.fn() +})); + +// modulesToDownload checks process.cwd()/modules/ for directory existence. +// Tests use a temporary directory to control what's "on disk" without side effects. +describe('modulesToDownload', () => { + let tmpDir; + let originalCwd; + + beforeEach(() => { + originalCwd = process.cwd(); + tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'pos-cli-test-')); + process.chdir(tmpDir); + }); + + afterEach(() => { + process.chdir(originalCwd); + fs.rmSync(tmpDir, { recursive: true, force: true }); + }); + + test('returns empty object when the locked set is empty', () => { + expect(modulesToDownload({}, {})).toEqual({}); + }); + + test('includes a module when it is new (not in previous lock)', () => { + const result = modulesToDownload({ core: '2.0.6' }, {}); + expect(result).toEqual({ core: '2.0.6' }); + }); + + test('includes a module when its version changed', () => { + const result = modulesToDownload({ core: '2.0.7' }, { core: '2.0.6' }); + expect(result).toEqual({ core: '2.0.7' }); + }); + + test('includes a module when version matches but directory is missing from disk', () => { + // modules/core does not exist in tmpDir + const result = modulesToDownload({ core: '2.0.6' }, { core: '2.0.6' }); + expect(result).toEqual({ core: '2.0.6' }); + }); + + test('skips a module when version matches and directory exists on disk', () => { + fs.mkdirSync(path.join(tmpDir, 'modules', 'core'), { recursive: true }); + + const result = modulesToDownload({ core: '2.0.6' }, { core: '2.0.6' }); + expect(result).toEqual({}); + }); + + test('handles a mix: skips up-to-date, includes changed or missing', () => { + // core: up-to-date and on disk → skip + // user: version bumped → download + // tests: version matches but directory missing → download + fs.mkdirSync(path.join(tmpDir, 'modules', 'core'), { recursive: true }); + + const locked = { core: '2.0.6', user: '5.1.3', tests: '1.2.0' }; + const previous = { core: '2.0.6', user: '5.1.2', tests: '1.2.0' }; + + const result = modulesToDownload(locked, previous); + expect(result).toEqual({ user: '5.1.3', tests: '1.2.0' }); + }); + + test('includes all modules when previous lock is empty (first install)', () => { + fs.mkdirSync(path.join(tmpDir, 'modules', 'core'), { recursive: true }); + + // Even though core directory exists, no previous lock → treat as fresh install + const result = modulesToDownload({ core: '2.0.6', user: '5.1.2' }, {}); + expect(result).toEqual({ core: '2.0.6', user: '5.1.2' }); + }); + + test('skips all modules when every version matches and every directory exists', () => { + fs.mkdirSync(path.join(tmpDir, 'modules', 'core'), { recursive: true }); + fs.mkdirSync(path.join(tmpDir, 'modules', 'user'), { recursive: true }); + + const modules = { core: '2.0.6', user: '5.1.2' }; + const result = modulesToDownload(modules, modules); + expect(result).toEqual({}); + }); +}); + +// downloadModule downloads a single module archive and extracts it. +// Uses mocked Portal, downloadFile, and unzip to avoid real network/filesystem ops. +describe('downloadModule', () => { + let tmpDir; + let originalCwd; + let Portal; + let downloadFile; + let unzip; + + beforeEach(async () => { + originalCwd = process.cwd(); + tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'pos-cli-test-')); + process.chdir(tmpDir); + + Portal = (await import('#lib/portal.js')).default; + downloadFile = (await import('#lib/downloadFile.js')).default; + unzip = (await import('#lib/unzip.js')).unzip; + + vi.clearAllMocks(); + Portal.moduleVersionsSearch.mockResolvedValue({ public_archive: 'https://example.com/core-2.0.6.zip' }); + downloadFile.mockResolvedValue(undefined); + unzip.mockResolvedValue(undefined); + }); + + afterEach(() => { + process.chdir(originalCwd); + fs.rmSync(tmpDir, { recursive: true, force: true }); + }); + + test('calls Portal.moduleVersionsSearch with name@version and registryUrl', async () => { + await downloadModule('core', '2.0.6', 'https://custom.registry.example.com'); + + expect(Portal.moduleVersionsSearch).toHaveBeenCalledWith( + 'core@2.0.6', + 'https://custom.registry.example.com' + ); + }); + + test('calls downloadFile with public_archive URL', async () => { + await downloadModule('core', '2.0.6'); + + expect(downloadFile).toHaveBeenCalledWith( + 'https://example.com/core-2.0.6.zip', + expect.stringContaining('pos-module-core-') + ); + }); + + test('calls unzip to extract to modules/ directory', async () => { + await downloadModule('core', '2.0.6'); + + expect(unzip).toHaveBeenCalledWith( + expect.any(String), + path.join(process.cwd(), 'modules') + ); + }); + + test('throws formatted error message on 404', async () => { + const err = new Error('Not Found'); + err.statusCode = 404; + Portal.moduleVersionsSearch.mockRejectedValue(err); + + await expect(downloadModule('core', '2.0.6')).rejects.toThrow('core@2.0.6: 404 not found'); + }); + + test('throws formatted error message for non-404 errors', async () => { + Portal.moduleVersionsSearch.mockRejectedValue(new Error('Service Unavailable')); + + await expect(downloadModule('core', '2.0.6')).rejects.toThrow('core@2.0.6: Service Unavailable'); + }); + + test('cleans up temp file in finally block even when an error is thrown', async () => { + Portal.moduleVersionsSearch.mockRejectedValue(new Error('Service Unavailable')); + const rmSpy = vi.spyOn(fs.promises, 'rm'); + + await expect(downloadModule('core', '2.0.6')).rejects.toThrow(); + + // The finally block must call fs.promises.rm on the temp file path (force: true). + const cleanupCall = rmSpy.mock.calls.find(([p, opts]) => + typeof p === 'string' && p.includes('pos-module-core-') && opts?.force === true + ); + expect(cleanupCall).toBeDefined(); + + rmSpy.mockRestore(); + }); + + test('removes old module directory before downloading', async () => { + fs.mkdirSync(path.join(tmpDir, 'modules', 'core'), { recursive: true }); + fs.writeFileSync(path.join(tmpDir, 'modules', 'core', 'old-file.txt'), 'old'); + + await downloadModule('core', '2.0.6'); + + // unzip was called, meaning the old directory was removed and download proceeded + expect(unzip).toHaveBeenCalled(); + // The old directory should be gone (removed before download, not re-created by mock) + expect(fs.existsSync(path.join(tmpDir, 'modules', 'core', 'old-file.txt'))).toBe(false); + }); +}); + +// downloadAllModules iterates all modules and calls downloadModule for each. +describe('downloadAllModules', () => { + let Portal; + let downloadFile; + let unzip; + + beforeEach(async () => { + Portal = (await import('#lib/portal.js')).default; + downloadFile = (await import('#lib/downloadFile.js')).default; + unzip = (await import('#lib/unzip.js')).unzip; + + vi.clearAllMocks(); + Portal.moduleVersionsSearch.mockResolvedValue({ public_archive: 'https://example.com/module.zip' }); + downloadFile.mockResolvedValue(undefined); + unzip.mockResolvedValue(undefined); + }); + + test('calls downloadModule for each module in the map', async () => { + await downloadAllModules({ core: '2.0.6', user: '5.1.2' }, 'https://custom.registry.example.com'); + + expect(Portal.moduleVersionsSearch).toHaveBeenCalledTimes(2); + expect(Portal.moduleVersionsSearch).toHaveBeenCalledWith('core@2.0.6', 'https://custom.registry.example.com'); + expect(Portal.moduleVersionsSearch).toHaveBeenCalledWith('user@5.1.2', 'https://custom.registry.example.com'); + }); + + test('propagates error from first failing module and stops', async () => { + const err = new Error('Not Found'); + err.statusCode = 404; + Portal.moduleVersionsSearch.mockRejectedValue(err); + + await expect( + downloadAllModules({ core: '2.0.6', user: '5.1.2' }) + ).rejects.toThrow('core@2.0.6: 404 not found'); + + // Only one call because sequential execution stops on first error + expect(Portal.moduleVersionsSearch).toHaveBeenCalledTimes(1); + }); + + test('passes registryUrl to every download call', async () => { + await downloadAllModules( + { core: '2.0.6', user: '5.1.2', tests: '1.0.0' }, + 'https://custom.registry.example.com' + ); + + for (const call of Portal.moduleVersionsSearch.mock.calls) { + expect(call[1]).toBe('https://custom.registry.example.com'); + } + }); +}); diff --git a/test/unit/formatModulesDiff.test.js b/test/unit/formatModulesDiff.test.js new file mode 100644 index 000000000..ddda99479 --- /dev/null +++ b/test/unit/formatModulesDiff.test.js @@ -0,0 +1,63 @@ +import { describe, test, expect } from 'vitest'; +import { formatModulesDiff } from '#lib/modules/formatModulesDiff.js'; + +describe('formatModulesDiff', () => { + test('returns empty array when nothing changed', () => { + const modules = { core: '1.0.0', tests: '2.3.0' }; + expect(formatModulesDiff(modules, modules)).toEqual([]); + }); + + test('marks a new module as added (+)', () => { + const lines = formatModulesDiff({}, { core: '1.0.0' }); + expect(lines).toEqual([' + core@1.0.0']); + }); + + test('marks a missing module as removed (-)', () => { + const lines = formatModulesDiff({ core: '1.0.0' }, {}); + expect(lines).toEqual([' - core@1.0.0']); + }); + + test('marks a version change as updated (~)', () => { + const lines = formatModulesDiff({ core: '1.0.0' }, { core: '2.0.0' }); + expect(lines).toEqual([' ~ core: 1.0.0 → 2.0.0']); + }); + + test('omits modules whose version did not change', () => { + const lines = formatModulesDiff({ core: '1.0.0', tests: '1.0.0' }, { core: '2.0.0', tests: '1.0.0' }); + expect(lines).not.toContain(expect.stringContaining('tests')); + expect(lines).toEqual([' ~ core: 1.0.0 → 2.0.0']); + }); + + test('sorts output by module name', () => { + const lines = formatModulesDiff({}, { zebra: '1.0.0', alpha: '1.0.0', mango: '1.0.0' }); + expect(lines).toEqual([ + ' + alpha@1.0.0', + ' + mango@1.0.0', + ' + zebra@1.0.0', + ]); + }); + + test('handles a mixed scenario: added, removed, updated, and unchanged in one call', () => { + const prev = { core: '1.0.0', helper: '2.0.0', old: '3.0.0' }; + const next = { core: '1.5.0', helper: '2.0.0', fresh: '1.0.0' }; + + const lines = formatModulesDiff(prev, next); + + expect(lines).toContain(' ~ core: 1.0.0 → 1.5.0'); + expect(lines).toContain(' + fresh@1.0.0'); + expect(lines).toContain(' - old@3.0.0'); + expect(lines).not.toContain(expect.stringContaining('helper')); + expect(lines).toHaveLength(3); + }); + + test('output lines appear in alphabetical order across all change types', () => { + const prev = { bravo: '1.0.0', delta: '1.0.0' }; + const next = { alpha: '1.0.0', bravo: '2.0.0' }; + + const lines = formatModulesDiff(prev, next); + + expect(lines[0]).toMatch(/alpha/); + expect(lines[1]).toMatch(/bravo/); + expect(lines[2]).toMatch(/delta/); + }); +}); diff --git a/test/unit/installModule.test.js b/test/unit/installModule.test.js new file mode 100644 index 000000000..0543c0942 --- /dev/null +++ b/test/unit/installModule.test.js @@ -0,0 +1,102 @@ +import { describe, test, expect } from 'vitest'; +import { addNewModule, updateAllModules } from '#lib/modules/installModule.js'; +import { mod, makeRegistry } from '#test/utils/moduleRegistry.js'; + +const REGISTRY = 'https://partners.platformos.com'; + +// --------------------------------------------------------------------------- +// addNewModule — install is conditional (unlike update) +// --------------------------------------------------------------------------- + +describe('addNewModule', () => { + test('returns null when module is already installed and no version is specified', async () => { + const getVersions = makeRegistry(mod('tests', { '0.0.3': {}, '1.0.0': {} })); + + const result = await addNewModule('tests', undefined, { tests: '0.0.3' }, getVersions, REGISTRY); + + expect(result).toBeNull(); + }); + + test('adds the module when it is not yet in localModules and no version is specified', async () => { + const getVersions = makeRegistry(mod('tests', { '0.0.3': {}, '1.0.0': {} })); + + const result = await addNewModule('tests', undefined, {}, getVersions, REGISTRY); + + expect(result).toEqual({ tests: '1.0.0' }); + }); + + test('updates the pinned version when an explicit version is specified, even if already installed', async () => { + const getVersions = makeRegistry(mod('tests', { '0.0.3': {}, '1.0.0': {} })); + + const result = await addNewModule('tests', '0.0.3', { tests: '1.0.0' }, getVersions, REGISTRY); + + expect(result).toEqual({ tests: '0.0.3' }); + }); + + test('throws with exact message including registry URL when module is not found', async () => { + const getVersions = makeRegistry(); // empty registry + + await expect( + addNewModule('nonexistent', undefined, {}, getVersions, REGISTRY) + ).rejects.toMatchObject({ message: `Can't find module nonexistent (registry: ${REGISTRY})` }); + }); + + test('throws with exact message including registry URL when requested version does not exist', async () => { + const getVersions = makeRegistry(mod('tests', { '0.0.3': {} })); + + await expect( + addNewModule('tests', '9.9.9', {}, getVersions, REGISTRY) + ).rejects.toMatchObject({ message: `Can't find module tests with version 9.9.9 (registry: ${REGISTRY})` }); + }); + + test('preserves other existing modules in the returned map', async () => { + const getVersions = makeRegistry(mod('tests', { '1.0.0': {} })); + + const result = await addNewModule('tests', '1.0.0', { core: '2.0.0' }, getVersions, REGISTRY); + + expect(result).toEqual({ core: '2.0.0', tests: '1.0.0' }); + }); +}); + +// --------------------------------------------------------------------------- +// updateAllModules — updates every root module to its latest stable version +// --------------------------------------------------------------------------- + +describe('updateAllModules', () => { + test('updates all modules to their latest stable version', async () => { + const getVersions = makeRegistry( + mod('core', { '1.0.0': {}, '2.0.0': {} }), + mod('tests', { '0.0.3': {}, '1.0.0': {} }) + ); + + const result = await updateAllModules({ core: '1.0.0', tests: '0.0.3' }, getVersions, REGISTRY); + + expect(result).toEqual({ core: '2.0.0', tests: '1.0.0' }); + }); + + test('returns empty object unchanged when there are no modules', async () => { + const getVersions = makeRegistry(); + + const result = await updateAllModules({}, getVersions, REGISTRY); + + expect(result).toEqual({}); + }); + + test('skips pre-release versions', async () => { + const getVersions = makeRegistry( + mod('core', { '1.0.0': {}, '2.0.0-beta.1': {} }) + ); + + const result = await updateAllModules({ core: '1.0.0' }, getVersions, REGISTRY); + + expect(result).toEqual({ core: '1.0.0' }); + }); + + test('throws with registry context when a module is not found', async () => { + const getVersions = makeRegistry(); // empty registry + + await expect( + updateAllModules({ core: '1.0.0' }, getVersions, REGISTRY) + ).rejects.toMatchObject({ message: `Can't find module core (registry: ${REGISTRY})` }); + }); +}); diff --git a/test/utils/credentials.js b/test/utils/credentials.js index 2dc350945..234f25780 100644 --- a/test/utils/credentials.js +++ b/test/utils/credentials.js @@ -20,14 +20,13 @@ const noCredentials = { // Apply credentials to process.env const applyCredentials = (creds) => { - if (creds) { - process.env.MPKIT_URL = creds.MPKIT_URL; - process.env.MPKIT_TOKEN = creds.MPKIT_TOKEN; - process.env.MPKIT_EMAIL = creds.MPKIT_EMAIL; - } else { - delete process.env.MPKIT_URL; - delete process.env.MPKIT_TOKEN; - delete process.env.MPKIT_EMAIL; + const keys = ['MPKIT_URL', 'MPKIT_TOKEN', 'MPKIT_EMAIL']; + for (const key of keys) { + if (creds && creds[key] !== undefined) { + process.env[key] = creds[key]; + } else { + delete process.env[key]; + } } }; diff --git a/test/utils/moduleRegistry.js b/test/utils/moduleRegistry.js new file mode 100644 index 000000000..6b51a94cb --- /dev/null +++ b/test/utils/moduleRegistry.js @@ -0,0 +1,17 @@ +// Builds a module fixture object in the shape returned by Portal.moduleVersions. +// mod('core', { '1.0.0': {}, '2.0.0': { dep: '^1.0.0' } }) +const mod = (name, versions) => ({ + module: name, + versions: Object.fromEntries( + Object.entries(versions).map(([v, deps = {}]) => [v, { dependencies: deps }]) + ) +}); + +// Returns a getVersions mock that resolves names against the provided module fixtures. +// Names absent from the list return no entry (simulates "not in registry"). +const makeRegistry = (...modules) => { + const map = Object.fromEntries(modules.map(m => [m.module, m])); + return async (names) => names.map(n => map[n]).filter(Boolean); +}; + +export { mod, makeRegistry }; diff --git a/test/utils/parseOutput.js b/test/utils/parseOutput.js new file mode 100644 index 000000000..1bd9283ba --- /dev/null +++ b/test/utils/parseOutput.js @@ -0,0 +1,12 @@ +import stripAnsi from 'strip-ansi'; + +// Returns individual output lines from CLI stdout/stderr with ANSI codes, +// logger timestamps ([HH:MM:SS]), and leading ora symbols stripped, +// leaving only the plain message text. +const plainMessages = (output) => + stripAnsi(output) + .split('\n') + .map(l => l.replace(/^\[\d{2}:\d{2}:\d{2}\] /, '').replace(/^[-✔✖⚠ℹ] /, '').trim()) + .filter(Boolean); + +export { plainMessages };