From f6987ea139b70babe623061b3b9c01a93ea71187 Mon Sep 17 00:00:00 2001 From: Arpad Borsos Date: Wed, 2 Aug 2023 12:15:14 +0200 Subject: [PATCH] Improve errors and cleanup - Silence errors and do not create error annotations, fixes #144 - Implement cleanup for new sparse registry - Do not clean `-sys` dependencies from `registry/src`, hopefully fixes #150 --- TODO.md | 1 - dist/restore/index.js | 76 +++++++++++++++++++++++++++++++++------ dist/save/index.js | 84 +++++++++++++++++++++++++++++++++++-------- src/cleanup.ts | 52 +++++++++++++++++++++++---- src/restore.ts | 4 +-- src/save.ts | 14 ++++---- src/utils.ts | 38 +++++++++++++------- 7 files changed, 213 insertions(+), 56 deletions(-) diff --git a/TODO.md b/TODO.md index b15aefb..9f7b742 100644 --- a/TODO.md +++ b/TODO.md @@ -1,4 +1,3 @@ - better .cargo/bin handling: - get a list of all the files on "pre"/"restore" - move the files out of the way on "post"/"save" and move them back afterwards -- properly clean sparse registry diff --git a/dist/restore/index.js b/dist/restore/index.js index 2d879ff..ce4b6d7 100644 --- a/dist/restore/index.js +++ b/dist/restore/index.js @@ -66810,6 +66810,16 @@ var exec = __nccwpck_require__(1514); +function reportError(e) { + const { commandFailed } = e; + if (commandFailed) { + lib_core.error(`Command failed: ${commandFailed.command}`); + lib_core.error(commandFailed.stderr); + } + else { + lib_core.error(`${e.stack}`); + } +} async function getCmdOutput(cmd, args = [], options = {}) { let stdout = ""; let stderr = ""; @@ -66828,8 +66838,10 @@ async function getCmdOutput(cmd, args = [], options = {}) { }); } catch (e) { - lib_core.error(`Command failed: ${cmd} ${args.join(" ")}`); - lib_core.error(stderr); + e.commandFailed = { + command: `${cmd} ${args.join(" ")}`, + stderr, + }; throw e; } return stdout; @@ -66837,10 +66849,10 @@ async function getCmdOutput(cmd, args = [], options = {}) { function getCacheHandler() { const cacheProvider = lib_core.getInput("cache-provider"); switch (cacheProvider) { - case 'github': + case "github": lib_core.info("Using Github Cache."); return lib_cache; - case 'buildjet': + case "buildjet": lib_core.info("Using Buildjet Cache."); return cache; default: @@ -67259,10 +67271,8 @@ async function cleanBin(oldBins) { } } async function cleanRegistry(packages, crates = true) { - // `.cargo/registry/src` - // we can remove this completely, as cargo will recreate this from `cache` - await rmRF(path.join(CARGO_HOME, "registry", "src")); // `.cargo/registry/index` + let pkgSet = new Set(packages.map((p) => p.name)); const indexDir = await fs.promises.opendir(path.join(CARGO_HOME, "registry", "index")); for await (const dirent of indexDir) { if (dirent.isDirectory()) { @@ -67273,15 +67283,35 @@ async function cleanRegistry(packages, crates = true) { if (await exists(path.join(dirPath, ".git"))) { await rmRF(path.join(dirPath, ".cache")); } - // TODO: else, clean `.cache` based on the `packages` + else { + await cleanRegistryIndexCache(dirPath, pkgSet); + } } } if (!crates) { - core.debug(`skipping crate cleanup`); + core.debug("skipping registry cache and src cleanup"); return; } - const pkgSet = new Set(packages.map((p) => `${p.name}-${p.version}.crate`)); + // `.cargo/registry/src` + // Cargo usually re-creates these from the `.crate` cache below, + // but for some reason that does not work for `-sys` crates that check timestamps + // to decide if rebuilds are necessary. + pkgSet = new Set(packages.filter((p) => p.name.endsWith("-sys")).map((p) => `${p.name}-${p.version}`)); + const srcDir = await fs.promises.opendir(path.join(CARGO_HOME, "registry", "src")); + for await (const dirent of srcDir) { + if (dirent.isDirectory()) { + // eg `.cargo/registry/src/github.com-1ecc6299db9ec823` + // or `.cargo/registry/src/index.crates.io-e139d0d48fed7772` + const dir = await fs.promises.opendir(path.join(srcDir.path, dirent.name)); + for await (const dirent of dir) { + if (dirent.isDirectory() && !pkgSet.has(dirent.name)) { + await rmRF(path.join(dir.path, dirent.name)); + } + } + } + } // `.cargo/registry/cache` + pkgSet = new Set(packages.map((p) => `${p.name}-${p.version}.crate`)); const cacheDir = await fs.promises.opendir(path.join(CARGO_HOME, "registry", "cache")); for await (const dirent of cacheDir) { if (dirent.isDirectory()) { @@ -67297,6 +67327,30 @@ async function cleanRegistry(packages, crates = true) { } } } +/// Recursively walks and cleans the index `.cache` +async function cleanRegistryIndexCache(dirName, keepPkg) { + let dirIsEmpty = true; + const cacheDir = await fs.promises.opendir(dirName); + for await (const dirent of cacheDir) { + if (dirent.isDirectory()) { + if (await cleanRegistryIndexCache(path.join(dirName, dirent.name), keepPkg)) { + await rm(dirName, dirent); + } + else { + dirIsEmpty && (dirIsEmpty = false); + } + } + else { + if (keepPkg.has(dirent.name)) { + dirIsEmpty && (dirIsEmpty = false); + } + else { + await rm(dirName, dirent); + } + } + } + return dirIsEmpty; +} async function cleanGit(packages) { const coPath = path.join(CARGO_HOME, "git", "checkouts"); const dbPath = path.join(CARGO_HOME, "git", "db"); @@ -67466,7 +67520,7 @@ async function run() { } catch (e) { setCacheHitOutput(false); - lib_core.error(`${e.stack}`); + reportError(e); } } function setCacheHitOutput(cacheHit) { diff --git a/dist/save/index.js b/dist/save/index.js index 1979b19..35d72e1 100644 --- a/dist/save/index.js +++ b/dist/save/index.js @@ -66810,6 +66810,16 @@ var lib_cache = __nccwpck_require__(7799); +function reportError(e) { + const { commandFailed } = e; + if (commandFailed) { + core.error(`Command failed: ${commandFailed.command}`); + core.error(commandFailed.stderr); + } + else { + core.error(`${e.stack}`); + } +} async function getCmdOutput(cmd, args = [], options = {}) { let stdout = ""; let stderr = ""; @@ -66828,8 +66838,10 @@ async function getCmdOutput(cmd, args = [], options = {}) { }); } catch (e) { - core.error(`Command failed: ${cmd} ${args.join(" ")}`); - core.error(stderr); + e.commandFailed = { + command: `${cmd} ${args.join(" ")}`, + stderr, + }; throw e; } return stdout; @@ -66837,10 +66849,10 @@ async function getCmdOutput(cmd, args = [], options = {}) { function getCacheHandler() { const cacheProvider = core.getInput("cache-provider"); switch (cacheProvider) { - case 'github': + case "github": core.info("Using Github Cache."); return lib_cache; - case 'buildjet': + case "buildjet": core.info("Using Buildjet Cache."); return cache; default: @@ -67259,10 +67271,8 @@ async function cleanBin(oldBins) { } } async function cleanRegistry(packages, crates = true) { - // `.cargo/registry/src` - // we can remove this completely, as cargo will recreate this from `cache` - await rmRF(external_path_default().join(CARGO_HOME, "registry", "src")); // `.cargo/registry/index` + let pkgSet = new Set(packages.map((p) => p.name)); const indexDir = await external_fs_default().promises.opendir(external_path_default().join(CARGO_HOME, "registry", "index")); for await (const dirent of indexDir) { if (dirent.isDirectory()) { @@ -67273,15 +67283,35 @@ async function cleanRegistry(packages, crates = true) { if (await exists(external_path_default().join(dirPath, ".git"))) { await rmRF(external_path_default().join(dirPath, ".cache")); } - // TODO: else, clean `.cache` based on the `packages` + else { + await cleanRegistryIndexCache(dirPath, pkgSet); + } } } if (!crates) { - core.debug(`skipping crate cleanup`); + core.debug("skipping registry cache and src cleanup"); return; } - const pkgSet = new Set(packages.map((p) => `${p.name}-${p.version}.crate`)); + // `.cargo/registry/src` + // Cargo usually re-creates these from the `.crate` cache below, + // but for some reason that does not work for `-sys` crates that check timestamps + // to decide if rebuilds are necessary. + pkgSet = new Set(packages.filter((p) => p.name.endsWith("-sys")).map((p) => `${p.name}-${p.version}`)); + const srcDir = await external_fs_default().promises.opendir(external_path_default().join(CARGO_HOME, "registry", "src")); + for await (const dirent of srcDir) { + if (dirent.isDirectory()) { + // eg `.cargo/registry/src/github.com-1ecc6299db9ec823` + // or `.cargo/registry/src/index.crates.io-e139d0d48fed7772` + const dir = await external_fs_default().promises.opendir(external_path_default().join(srcDir.path, dirent.name)); + for await (const dirent of dir) { + if (dirent.isDirectory() && !pkgSet.has(dirent.name)) { + await rmRF(external_path_default().join(dir.path, dirent.name)); + } + } + } + } // `.cargo/registry/cache` + pkgSet = new Set(packages.map((p) => `${p.name}-${p.version}.crate`)); const cacheDir = await external_fs_default().promises.opendir(external_path_default().join(CARGO_HOME, "registry", "cache")); for await (const dirent of cacheDir) { if (dirent.isDirectory()) { @@ -67297,6 +67327,30 @@ async function cleanRegistry(packages, crates = true) { } } } +/// Recursively walks and cleans the index `.cache` +async function cleanRegistryIndexCache(dirName, keepPkg) { + let dirIsEmpty = true; + const cacheDir = await external_fs_default().promises.opendir(dirName); + for await (const dirent of cacheDir) { + if (dirent.isDirectory()) { + if (await cleanRegistryIndexCache(external_path_default().join(dirName, dirent.name), keepPkg)) { + await rm(dirName, dirent); + } + else { + dirIsEmpty && (dirIsEmpty = false); + } + } + else { + if (keepPkg.has(dirent.name)) { + dirIsEmpty && (dirIsEmpty = false); + } + else { + await rm(dirName, dirent); + } + } + } + return dirIsEmpty; +} async function cleanGit(packages) { const coPath = external_path_default().join(CARGO_HOME, "git", "checkouts"); const dbPath = external_path_default().join(CARGO_HOME, "git", "db"); @@ -67446,7 +67500,7 @@ async function run() { await cleanTargetDir(workspace.target, packages); } catch (e) { - core.error(`${e.stack}`); + core.debug(`${e.stack}`); } } try { @@ -67455,21 +67509,21 @@ async function run() { await cleanRegistry(allPackages, crates !== "true"); } catch (e) { - core.error(`${e.stack}`); + core.debug(`${e.stack}`); } try { core.info(`... Cleaning cargo/bin ...`); await cleanBin(config.cargoBins); } catch (e) { - core.error(`${e.stack}`); + core.debug(`${e.stack}`); } try { core.info(`... Cleaning cargo git cache ...`); await cleanGit(allPackages); } catch (e) { - core.error(`${e.stack}`); + core.debug(`${e.stack}`); } core.info(`... Saving cache ...`); // Pass a copy of cachePaths to avoid mutating the original array as reported by: @@ -67478,7 +67532,7 @@ async function run() { await cache.saveCache(config.cachePaths.slice(), config.cacheKey); } catch (e) { - core.error(`${e.stack}`); + reportError(e); } } run(); diff --git a/src/cleanup.ts b/src/cleanup.ts index 9f65366..6f7327c 100644 --- a/src/cleanup.ts +++ b/src/cleanup.ts @@ -91,11 +91,8 @@ export async function cleanBin(oldBins: Array) { } export async function cleanRegistry(packages: Packages, crates = true) { - // `.cargo/registry/src` - // we can remove this completely, as cargo will recreate this from `cache` - await rmRF(path.join(CARGO_HOME, "registry", "src")); - // `.cargo/registry/index` + let pkgSet = new Set(packages.map((p) => p.name)); const indexDir = await fs.promises.opendir(path.join(CARGO_HOME, "registry", "index")); for await (const dirent of indexDir) { if (dirent.isDirectory()) { @@ -106,19 +103,38 @@ export async function cleanRegistry(packages: Packages, crates = true) { // for a git registry, we can remove `.cache`, as cargo will recreate it from git if (await exists(path.join(dirPath, ".git"))) { await rmRF(path.join(dirPath, ".cache")); + } else { + await cleanRegistryIndexCache(dirPath, pkgSet); } - // TODO: else, clean `.cache` based on the `packages` } } if (!crates) { - core.debug(`skipping crate cleanup`); + core.debug("skipping registry cache and src cleanup"); return; } - const pkgSet = new Set(packages.map((p) => `${p.name}-${p.version}.crate`)); + // `.cargo/registry/src` + // Cargo usually re-creates these from the `.crate` cache below, + // but for some reason that does not work for `-sys` crates that check timestamps + // to decide if rebuilds are necessary. + pkgSet = new Set(packages.filter((p) => p.name.endsWith("-sys")).map((p) => `${p.name}-${p.version}`)); + const srcDir = await fs.promises.opendir(path.join(CARGO_HOME, "registry", "src")); + for await (const dirent of srcDir) { + if (dirent.isDirectory()) { + // eg `.cargo/registry/src/github.com-1ecc6299db9ec823` + // or `.cargo/registry/src/index.crates.io-e139d0d48fed7772` + const dir = await fs.promises.opendir(path.join(srcDir.path, dirent.name)); + for await (const dirent of dir) { + if (dirent.isDirectory() && !pkgSet.has(dirent.name)) { + await rmRF(path.join(dir.path, dirent.name)); + } + } + } + } // `.cargo/registry/cache` + pkgSet = new Set(packages.map((p) => `${p.name}-${p.version}.crate`)); const cacheDir = await fs.promises.opendir(path.join(CARGO_HOME, "registry", "cache")); for await (const dirent of cacheDir) { if (dirent.isDirectory()) { @@ -135,6 +151,28 @@ export async function cleanRegistry(packages: Packages, crates = true) { } } +/// Recursively walks and cleans the index `.cache` +async function cleanRegistryIndexCache(dirName: string, keepPkg: Set) { + let dirIsEmpty = true; + const cacheDir = await fs.promises.opendir(dirName); + for await (const dirent of cacheDir) { + if (dirent.isDirectory()) { + if (await cleanRegistryIndexCache(path.join(dirName, dirent.name), keepPkg)) { + await rm(dirName, dirent); + } else { + dirIsEmpty &&= false; + } + } else { + if (keepPkg.has(dirent.name)) { + dirIsEmpty &&= false; + } else { + await rm(dirName, dirent); + } + } + } + return dirIsEmpty; +} + export async function cleanGit(packages: Packages) { const coPath = path.join(CARGO_HOME, "git", "checkouts"); const dbPath = path.join(CARGO_HOME, "git", "db"); diff --git a/src/restore.ts b/src/restore.ts index 2c10a03..577324a 100644 --- a/src/restore.ts +++ b/src/restore.ts @@ -2,7 +2,7 @@ import * as core from "@actions/core"; import { cleanTargetDir } from "./cleanup"; import { CacheConfig } from "./config"; -import { getCacheHandler } from "./utils"; +import { getCacheHandler, reportError } from "./utils"; process.on("uncaughtException", (e) => { core.error(e.message); @@ -62,7 +62,7 @@ async function run() { } catch (e) { setCacheHitOutput(false); - core.error(`${(e as any).stack}`); + reportError(e); } } diff --git a/src/save.ts b/src/save.ts index cb9113a..77d4e11 100644 --- a/src/save.ts +++ b/src/save.ts @@ -3,7 +3,7 @@ import * as exec from "@actions/exec"; import { cleanBin, cleanGit, cleanRegistry, cleanTargetDir } from "./cleanup"; import { CacheConfig, isCacheUpToDate } from "./config"; -import { getCacheHandler } from "./utils"; +import { getCacheHandler, reportError } from "./utils"; process.on("uncaughtException", (e) => { core.error(e.message); @@ -42,30 +42,30 @@ async function run() { core.info(`... Cleaning ${workspace.target} ...`); await cleanTargetDir(workspace.target, packages); } catch (e) { - core.error(`${(e as any).stack}`); + core.debug(`${(e as any).stack}`); } } try { - const crates = core.getInput("cache-all-crates").toLowerCase() || "false" + const crates = core.getInput("cache-all-crates").toLowerCase() || "false"; core.info(`... Cleaning cargo registry cache-all-crates: ${crates} ...`); await cleanRegistry(allPackages, crates !== "true"); } catch (e) { - core.error(`${(e as any).stack}`); + core.debug(`${(e as any).stack}`); } try { core.info(`... Cleaning cargo/bin ...`); await cleanBin(config.cargoBins); } catch (e) { - core.error(`${(e as any).stack}`); + core.debug(`${(e as any).stack}`); } try { core.info(`... Cleaning cargo git cache ...`); await cleanGit(allPackages); } catch (e) { - core.error(`${(e as any).stack}`); + core.debug(`${(e as any).stack}`); } core.info(`... Saving cache ...`); @@ -74,7 +74,7 @@ async function run() { // TODO: remove this once the underlying bug is fixed. await cache.saveCache(config.cachePaths.slice(), config.cacheKey); } catch (e) { - core.error(`${(e as any).stack}`); + reportError(e); } } diff --git a/src/utils.ts b/src/utils.ts index 7dafd9c..3751dd8 100644 --- a/src/utils.ts +++ b/src/utils.ts @@ -3,6 +3,16 @@ import * as ghCache from "@actions/cache"; import * as core from "@actions/core"; import * as exec from "@actions/exec"; +export function reportError(e: any) { + const { commandFailed } = e; + if (commandFailed) { + core.error(`Command failed: ${commandFailed.command}`); + core.error(commandFailed.stderr); + } else { + core.error(`${e.stack}`); + } +} + export async function getCmdOutput( cmd: string, args: Array = [], @@ -24,23 +34,25 @@ export async function getCmdOutput( ...options, }); } catch (e) { - core.error(`Command failed: ${cmd} ${args.join(" ")}`); - core.error(stderr); + (e as any).commandFailed = { + command: `${cmd} ${args.join(" ")}`, + stderr, + }; throw e; } return stdout; } export function getCacheHandler() { - const cacheProvider = core.getInput("cache-provider"); - switch (cacheProvider) { - case 'github': - core.info ("Using Github Cache."); - return ghCache; - case 'buildjet': - core.info ("Using Buildjet Cache."); - return buildjetCache; - default: - throw new Error("Only currently support github and buildjet caches"); - } + const cacheProvider = core.getInput("cache-provider"); + switch (cacheProvider) { + case "github": + core.info("Using Github Cache."); + return ghCache; + case "buildjet": + core.info("Using Buildjet Cache."); + return buildjetCache; + default: + throw new Error("Only currently support github and buildjet caches"); + } }