Improve errors and cleanup

- Silence errors and do not create error annotations, fixes #144
- Implement cleanup for new sparse registry
- Do not clean `-sys` dependencies from `registry/src`, hopefully fixes  #150
This commit is contained in:
Arpad Borsos 2023-08-02 12:15:14 +02:00
parent e97a782690
commit f6987ea139
No known key found for this signature in database
GPG Key ID: FC7BCA77824B3298
7 changed files with 213 additions and 56 deletions

View File

@ -1,4 +1,3 @@
- better .cargo/bin handling: - better .cargo/bin handling:
- get a list of all the files on "pre"/"restore" - get a list of all the files on "pre"/"restore"
- move the files out of the way on "post"/"save" and move them back afterwards - move the files out of the way on "post"/"save" and move them back afterwards
- properly clean sparse registry

76
dist/restore/index.js vendored
View File

@ -66810,6 +66810,16 @@ var exec = __nccwpck_require__(1514);
function reportError(e) {
const { commandFailed } = e;
if (commandFailed) {
lib_core.error(`Command failed: ${commandFailed.command}`);
lib_core.error(commandFailed.stderr);
}
else {
lib_core.error(`${e.stack}`);
}
}
async function getCmdOutput(cmd, args = [], options = {}) { async function getCmdOutput(cmd, args = [], options = {}) {
let stdout = ""; let stdout = "";
let stderr = ""; let stderr = "";
@ -66828,8 +66838,10 @@ async function getCmdOutput(cmd, args = [], options = {}) {
}); });
} }
catch (e) { catch (e) {
lib_core.error(`Command failed: ${cmd} ${args.join(" ")}`); e.commandFailed = {
lib_core.error(stderr); command: `${cmd} ${args.join(" ")}`,
stderr,
};
throw e; throw e;
} }
return stdout; return stdout;
@ -66837,10 +66849,10 @@ async function getCmdOutput(cmd, args = [], options = {}) {
function getCacheHandler() { function getCacheHandler() {
const cacheProvider = lib_core.getInput("cache-provider"); const cacheProvider = lib_core.getInput("cache-provider");
switch (cacheProvider) { switch (cacheProvider) {
case 'github': case "github":
lib_core.info("Using Github Cache."); lib_core.info("Using Github Cache.");
return lib_cache; return lib_cache;
case 'buildjet': case "buildjet":
lib_core.info("Using Buildjet Cache."); lib_core.info("Using Buildjet Cache.");
return cache; return cache;
default: default:
@ -67259,10 +67271,8 @@ async function cleanBin(oldBins) {
} }
} }
async function cleanRegistry(packages, crates = true) { async function cleanRegistry(packages, crates = true) {
// `.cargo/registry/src`
// we can remove this completely, as cargo will recreate this from `cache`
await rmRF(path.join(CARGO_HOME, "registry", "src"));
// `.cargo/registry/index` // `.cargo/registry/index`
let pkgSet = new Set(packages.map((p) => p.name));
const indexDir = await fs.promises.opendir(path.join(CARGO_HOME, "registry", "index")); const indexDir = await fs.promises.opendir(path.join(CARGO_HOME, "registry", "index"));
for await (const dirent of indexDir) { for await (const dirent of indexDir) {
if (dirent.isDirectory()) { if (dirent.isDirectory()) {
@ -67273,15 +67283,35 @@ async function cleanRegistry(packages, crates = true) {
if (await exists(path.join(dirPath, ".git"))) { if (await exists(path.join(dirPath, ".git"))) {
await rmRF(path.join(dirPath, ".cache")); await rmRF(path.join(dirPath, ".cache"));
} }
// TODO: else, clean `.cache` based on the `packages` else {
await cleanRegistryIndexCache(dirPath, pkgSet);
}
} }
} }
if (!crates) { if (!crates) {
core.debug(`skipping crate cleanup`); core.debug("skipping registry cache and src cleanup");
return; return;
} }
const pkgSet = new Set(packages.map((p) => `${p.name}-${p.version}.crate`)); // `.cargo/registry/src`
// Cargo usually re-creates these from the `.crate` cache below,
// but for some reason that does not work for `-sys` crates that check timestamps
// to decide if rebuilds are necessary.
pkgSet = new Set(packages.filter((p) => p.name.endsWith("-sys")).map((p) => `${p.name}-${p.version}`));
const srcDir = await fs.promises.opendir(path.join(CARGO_HOME, "registry", "src"));
for await (const dirent of srcDir) {
if (dirent.isDirectory()) {
// eg `.cargo/registry/src/github.com-1ecc6299db9ec823`
// or `.cargo/registry/src/index.crates.io-e139d0d48fed7772`
const dir = await fs.promises.opendir(path.join(srcDir.path, dirent.name));
for await (const dirent of dir) {
if (dirent.isDirectory() && !pkgSet.has(dirent.name)) {
await rmRF(path.join(dir.path, dirent.name));
}
}
}
}
// `.cargo/registry/cache` // `.cargo/registry/cache`
pkgSet = new Set(packages.map((p) => `${p.name}-${p.version}.crate`));
const cacheDir = await fs.promises.opendir(path.join(CARGO_HOME, "registry", "cache")); const cacheDir = await fs.promises.opendir(path.join(CARGO_HOME, "registry", "cache"));
for await (const dirent of cacheDir) { for await (const dirent of cacheDir) {
if (dirent.isDirectory()) { if (dirent.isDirectory()) {
@ -67297,6 +67327,30 @@ async function cleanRegistry(packages, crates = true) {
} }
} }
} }
/// Recursively walks and cleans the index `.cache`
async function cleanRegistryIndexCache(dirName, keepPkg) {
let dirIsEmpty = true;
const cacheDir = await fs.promises.opendir(dirName);
for await (const dirent of cacheDir) {
if (dirent.isDirectory()) {
if (await cleanRegistryIndexCache(path.join(dirName, dirent.name), keepPkg)) {
await rm(dirName, dirent);
}
else {
dirIsEmpty && (dirIsEmpty = false);
}
}
else {
if (keepPkg.has(dirent.name)) {
dirIsEmpty && (dirIsEmpty = false);
}
else {
await rm(dirName, dirent);
}
}
}
return dirIsEmpty;
}
async function cleanGit(packages) { async function cleanGit(packages) {
const coPath = path.join(CARGO_HOME, "git", "checkouts"); const coPath = path.join(CARGO_HOME, "git", "checkouts");
const dbPath = path.join(CARGO_HOME, "git", "db"); const dbPath = path.join(CARGO_HOME, "git", "db");
@ -67466,7 +67520,7 @@ async function run() {
} }
catch (e) { catch (e) {
setCacheHitOutput(false); setCacheHitOutput(false);
lib_core.error(`${e.stack}`); reportError(e);
} }
} }
function setCacheHitOutput(cacheHit) { function setCacheHitOutput(cacheHit) {

84
dist/save/index.js vendored
View File

@ -66810,6 +66810,16 @@ var lib_cache = __nccwpck_require__(7799);
function reportError(e) {
const { commandFailed } = e;
if (commandFailed) {
core.error(`Command failed: ${commandFailed.command}`);
core.error(commandFailed.stderr);
}
else {
core.error(`${e.stack}`);
}
}
async function getCmdOutput(cmd, args = [], options = {}) { async function getCmdOutput(cmd, args = [], options = {}) {
let stdout = ""; let stdout = "";
let stderr = ""; let stderr = "";
@ -66828,8 +66838,10 @@ async function getCmdOutput(cmd, args = [], options = {}) {
}); });
} }
catch (e) { catch (e) {
core.error(`Command failed: ${cmd} ${args.join(" ")}`); e.commandFailed = {
core.error(stderr); command: `${cmd} ${args.join(" ")}`,
stderr,
};
throw e; throw e;
} }
return stdout; return stdout;
@ -66837,10 +66849,10 @@ async function getCmdOutput(cmd, args = [], options = {}) {
function getCacheHandler() { function getCacheHandler() {
const cacheProvider = core.getInput("cache-provider"); const cacheProvider = core.getInput("cache-provider");
switch (cacheProvider) { switch (cacheProvider) {
case 'github': case "github":
core.info("Using Github Cache."); core.info("Using Github Cache.");
return lib_cache; return lib_cache;
case 'buildjet': case "buildjet":
core.info("Using Buildjet Cache."); core.info("Using Buildjet Cache.");
return cache; return cache;
default: default:
@ -67259,10 +67271,8 @@ async function cleanBin(oldBins) {
} }
} }
async function cleanRegistry(packages, crates = true) { async function cleanRegistry(packages, crates = true) {
// `.cargo/registry/src`
// we can remove this completely, as cargo will recreate this from `cache`
await rmRF(external_path_default().join(CARGO_HOME, "registry", "src"));
// `.cargo/registry/index` // `.cargo/registry/index`
let pkgSet = new Set(packages.map((p) => p.name));
const indexDir = await external_fs_default().promises.opendir(external_path_default().join(CARGO_HOME, "registry", "index")); const indexDir = await external_fs_default().promises.opendir(external_path_default().join(CARGO_HOME, "registry", "index"));
for await (const dirent of indexDir) { for await (const dirent of indexDir) {
if (dirent.isDirectory()) { if (dirent.isDirectory()) {
@ -67273,15 +67283,35 @@ async function cleanRegistry(packages, crates = true) {
if (await exists(external_path_default().join(dirPath, ".git"))) { if (await exists(external_path_default().join(dirPath, ".git"))) {
await rmRF(external_path_default().join(dirPath, ".cache")); await rmRF(external_path_default().join(dirPath, ".cache"));
} }
// TODO: else, clean `.cache` based on the `packages` else {
await cleanRegistryIndexCache(dirPath, pkgSet);
}
} }
} }
if (!crates) { if (!crates) {
core.debug(`skipping crate cleanup`); core.debug("skipping registry cache and src cleanup");
return; return;
} }
const pkgSet = new Set(packages.map((p) => `${p.name}-${p.version}.crate`)); // `.cargo/registry/src`
// Cargo usually re-creates these from the `.crate` cache below,
// but for some reason that does not work for `-sys` crates that check timestamps
// to decide if rebuilds are necessary.
pkgSet = new Set(packages.filter((p) => p.name.endsWith("-sys")).map((p) => `${p.name}-${p.version}`));
const srcDir = await external_fs_default().promises.opendir(external_path_default().join(CARGO_HOME, "registry", "src"));
for await (const dirent of srcDir) {
if (dirent.isDirectory()) {
// eg `.cargo/registry/src/github.com-1ecc6299db9ec823`
// or `.cargo/registry/src/index.crates.io-e139d0d48fed7772`
const dir = await external_fs_default().promises.opendir(external_path_default().join(srcDir.path, dirent.name));
for await (const dirent of dir) {
if (dirent.isDirectory() && !pkgSet.has(dirent.name)) {
await rmRF(external_path_default().join(dir.path, dirent.name));
}
}
}
}
// `.cargo/registry/cache` // `.cargo/registry/cache`
pkgSet = new Set(packages.map((p) => `${p.name}-${p.version}.crate`));
const cacheDir = await external_fs_default().promises.opendir(external_path_default().join(CARGO_HOME, "registry", "cache")); const cacheDir = await external_fs_default().promises.opendir(external_path_default().join(CARGO_HOME, "registry", "cache"));
for await (const dirent of cacheDir) { for await (const dirent of cacheDir) {
if (dirent.isDirectory()) { if (dirent.isDirectory()) {
@ -67297,6 +67327,30 @@ async function cleanRegistry(packages, crates = true) {
} }
} }
} }
/// Recursively walks and cleans the index `.cache`
async function cleanRegistryIndexCache(dirName, keepPkg) {
let dirIsEmpty = true;
const cacheDir = await external_fs_default().promises.opendir(dirName);
for await (const dirent of cacheDir) {
if (dirent.isDirectory()) {
if (await cleanRegistryIndexCache(external_path_default().join(dirName, dirent.name), keepPkg)) {
await rm(dirName, dirent);
}
else {
dirIsEmpty && (dirIsEmpty = false);
}
}
else {
if (keepPkg.has(dirent.name)) {
dirIsEmpty && (dirIsEmpty = false);
}
else {
await rm(dirName, dirent);
}
}
}
return dirIsEmpty;
}
async function cleanGit(packages) { async function cleanGit(packages) {
const coPath = external_path_default().join(CARGO_HOME, "git", "checkouts"); const coPath = external_path_default().join(CARGO_HOME, "git", "checkouts");
const dbPath = external_path_default().join(CARGO_HOME, "git", "db"); const dbPath = external_path_default().join(CARGO_HOME, "git", "db");
@ -67446,7 +67500,7 @@ async function run() {
await cleanTargetDir(workspace.target, packages); await cleanTargetDir(workspace.target, packages);
} }
catch (e) { catch (e) {
core.error(`${e.stack}`); core.debug(`${e.stack}`);
} }
} }
try { try {
@ -67455,21 +67509,21 @@ async function run() {
await cleanRegistry(allPackages, crates !== "true"); await cleanRegistry(allPackages, crates !== "true");
} }
catch (e) { catch (e) {
core.error(`${e.stack}`); core.debug(`${e.stack}`);
} }
try { try {
core.info(`... Cleaning cargo/bin ...`); core.info(`... Cleaning cargo/bin ...`);
await cleanBin(config.cargoBins); await cleanBin(config.cargoBins);
} }
catch (e) { catch (e) {
core.error(`${e.stack}`); core.debug(`${e.stack}`);
} }
try { try {
core.info(`... Cleaning cargo git cache ...`); core.info(`... Cleaning cargo git cache ...`);
await cleanGit(allPackages); await cleanGit(allPackages);
} }
catch (e) { catch (e) {
core.error(`${e.stack}`); core.debug(`${e.stack}`);
} }
core.info(`... Saving cache ...`); core.info(`... Saving cache ...`);
// Pass a copy of cachePaths to avoid mutating the original array as reported by: // Pass a copy of cachePaths to avoid mutating the original array as reported by:
@ -67478,7 +67532,7 @@ async function run() {
await cache.saveCache(config.cachePaths.slice(), config.cacheKey); await cache.saveCache(config.cachePaths.slice(), config.cacheKey);
} }
catch (e) { catch (e) {
core.error(`${e.stack}`); reportError(e);
} }
} }
run(); run();

View File

@ -91,11 +91,8 @@ export async function cleanBin(oldBins: Array<string>) {
} }
export async function cleanRegistry(packages: Packages, crates = true) { export async function cleanRegistry(packages: Packages, crates = true) {
// `.cargo/registry/src`
// we can remove this completely, as cargo will recreate this from `cache`
await rmRF(path.join(CARGO_HOME, "registry", "src"));
// `.cargo/registry/index` // `.cargo/registry/index`
let pkgSet = new Set(packages.map((p) => p.name));
const indexDir = await fs.promises.opendir(path.join(CARGO_HOME, "registry", "index")); const indexDir = await fs.promises.opendir(path.join(CARGO_HOME, "registry", "index"));
for await (const dirent of indexDir) { for await (const dirent of indexDir) {
if (dirent.isDirectory()) { if (dirent.isDirectory()) {
@ -106,19 +103,38 @@ export async function cleanRegistry(packages: Packages, crates = true) {
// for a git registry, we can remove `.cache`, as cargo will recreate it from git // for a git registry, we can remove `.cache`, as cargo will recreate it from git
if (await exists(path.join(dirPath, ".git"))) { if (await exists(path.join(dirPath, ".git"))) {
await rmRF(path.join(dirPath, ".cache")); await rmRF(path.join(dirPath, ".cache"));
} else {
await cleanRegistryIndexCache(dirPath, pkgSet);
} }
// TODO: else, clean `.cache` based on the `packages`
} }
} }
if (!crates) { if (!crates) {
core.debug(`skipping crate cleanup`); core.debug("skipping registry cache and src cleanup");
return; return;
} }
const pkgSet = new Set(packages.map((p) => `${p.name}-${p.version}.crate`)); // `.cargo/registry/src`
// Cargo usually re-creates these from the `.crate` cache below,
// but for some reason that does not work for `-sys` crates that check timestamps
// to decide if rebuilds are necessary.
pkgSet = new Set(packages.filter((p) => p.name.endsWith("-sys")).map((p) => `${p.name}-${p.version}`));
const srcDir = await fs.promises.opendir(path.join(CARGO_HOME, "registry", "src"));
for await (const dirent of srcDir) {
if (dirent.isDirectory()) {
// eg `.cargo/registry/src/github.com-1ecc6299db9ec823`
// or `.cargo/registry/src/index.crates.io-e139d0d48fed7772`
const dir = await fs.promises.opendir(path.join(srcDir.path, dirent.name));
for await (const dirent of dir) {
if (dirent.isDirectory() && !pkgSet.has(dirent.name)) {
await rmRF(path.join(dir.path, dirent.name));
}
}
}
}
// `.cargo/registry/cache` // `.cargo/registry/cache`
pkgSet = new Set(packages.map((p) => `${p.name}-${p.version}.crate`));
const cacheDir = await fs.promises.opendir(path.join(CARGO_HOME, "registry", "cache")); const cacheDir = await fs.promises.opendir(path.join(CARGO_HOME, "registry", "cache"));
for await (const dirent of cacheDir) { for await (const dirent of cacheDir) {
if (dirent.isDirectory()) { if (dirent.isDirectory()) {
@ -135,6 +151,28 @@ export async function cleanRegistry(packages: Packages, crates = true) {
} }
} }
/// Recursively walks and cleans the index `.cache`
async function cleanRegistryIndexCache(dirName: string, keepPkg: Set<string>) {
let dirIsEmpty = true;
const cacheDir = await fs.promises.opendir(dirName);
for await (const dirent of cacheDir) {
if (dirent.isDirectory()) {
if (await cleanRegistryIndexCache(path.join(dirName, dirent.name), keepPkg)) {
await rm(dirName, dirent);
} else {
dirIsEmpty &&= false;
}
} else {
if (keepPkg.has(dirent.name)) {
dirIsEmpty &&= false;
} else {
await rm(dirName, dirent);
}
}
}
return dirIsEmpty;
}
export async function cleanGit(packages: Packages) { export async function cleanGit(packages: Packages) {
const coPath = path.join(CARGO_HOME, "git", "checkouts"); const coPath = path.join(CARGO_HOME, "git", "checkouts");
const dbPath = path.join(CARGO_HOME, "git", "db"); const dbPath = path.join(CARGO_HOME, "git", "db");

View File

@ -2,7 +2,7 @@ import * as core from "@actions/core";
import { cleanTargetDir } from "./cleanup"; import { cleanTargetDir } from "./cleanup";
import { CacheConfig } from "./config"; import { CacheConfig } from "./config";
import { getCacheHandler } from "./utils"; import { getCacheHandler, reportError } from "./utils";
process.on("uncaughtException", (e) => { process.on("uncaughtException", (e) => {
core.error(e.message); core.error(e.message);
@ -62,7 +62,7 @@ async function run() {
} catch (e) { } catch (e) {
setCacheHitOutput(false); setCacheHitOutput(false);
core.error(`${(e as any).stack}`); reportError(e);
} }
} }

View File

@ -3,7 +3,7 @@ import * as exec from "@actions/exec";
import { cleanBin, cleanGit, cleanRegistry, cleanTargetDir } from "./cleanup"; import { cleanBin, cleanGit, cleanRegistry, cleanTargetDir } from "./cleanup";
import { CacheConfig, isCacheUpToDate } from "./config"; import { CacheConfig, isCacheUpToDate } from "./config";
import { getCacheHandler } from "./utils"; import { getCacheHandler, reportError } from "./utils";
process.on("uncaughtException", (e) => { process.on("uncaughtException", (e) => {
core.error(e.message); core.error(e.message);
@ -42,30 +42,30 @@ async function run() {
core.info(`... Cleaning ${workspace.target} ...`); core.info(`... Cleaning ${workspace.target} ...`);
await cleanTargetDir(workspace.target, packages); await cleanTargetDir(workspace.target, packages);
} catch (e) { } catch (e) {
core.error(`${(e as any).stack}`); core.debug(`${(e as any).stack}`);
} }
} }
try { try {
const crates = core.getInput("cache-all-crates").toLowerCase() || "false" const crates = core.getInput("cache-all-crates").toLowerCase() || "false";
core.info(`... Cleaning cargo registry cache-all-crates: ${crates} ...`); core.info(`... Cleaning cargo registry cache-all-crates: ${crates} ...`);
await cleanRegistry(allPackages, crates !== "true"); await cleanRegistry(allPackages, crates !== "true");
} catch (e) { } catch (e) {
core.error(`${(e as any).stack}`); core.debug(`${(e as any).stack}`);
} }
try { try {
core.info(`... Cleaning cargo/bin ...`); core.info(`... Cleaning cargo/bin ...`);
await cleanBin(config.cargoBins); await cleanBin(config.cargoBins);
} catch (e) { } catch (e) {
core.error(`${(e as any).stack}`); core.debug(`${(e as any).stack}`);
} }
try { try {
core.info(`... Cleaning cargo git cache ...`); core.info(`... Cleaning cargo git cache ...`);
await cleanGit(allPackages); await cleanGit(allPackages);
} catch (e) { } catch (e) {
core.error(`${(e as any).stack}`); core.debug(`${(e as any).stack}`);
} }
core.info(`... Saving cache ...`); core.info(`... Saving cache ...`);
@ -74,7 +74,7 @@ async function run() {
// TODO: remove this once the underlying bug is fixed. // TODO: remove this once the underlying bug is fixed.
await cache.saveCache(config.cachePaths.slice(), config.cacheKey); await cache.saveCache(config.cachePaths.slice(), config.cacheKey);
} catch (e) { } catch (e) {
core.error(`${(e as any).stack}`); reportError(e);
} }
} }

View File

@ -3,6 +3,16 @@ import * as ghCache from "@actions/cache";
import * as core from "@actions/core"; import * as core from "@actions/core";
import * as exec from "@actions/exec"; import * as exec from "@actions/exec";
export function reportError(e: any) {
const { commandFailed } = e;
if (commandFailed) {
core.error(`Command failed: ${commandFailed.command}`);
core.error(commandFailed.stderr);
} else {
core.error(`${e.stack}`);
}
}
export async function getCmdOutput( export async function getCmdOutput(
cmd: string, cmd: string,
args: Array<string> = [], args: Array<string> = [],
@ -24,8 +34,10 @@ export async function getCmdOutput(
...options, ...options,
}); });
} catch (e) { } catch (e) {
core.error(`Command failed: ${cmd} ${args.join(" ")}`); (e as any).commandFailed = {
core.error(stderr); command: `${cmd} ${args.join(" ")}`,
stderr,
};
throw e; throw e;
} }
return stdout; return stdout;
@ -34,11 +46,11 @@ export async function getCmdOutput(
export function getCacheHandler() { export function getCacheHandler() {
const cacheProvider = core.getInput("cache-provider"); const cacheProvider = core.getInput("cache-provider");
switch (cacheProvider) { switch (cacheProvider) {
case 'github': case "github":
core.info ("Using Github Cache."); core.info("Using Github Cache.");
return ghCache; return ghCache;
case 'buildjet': case "buildjet":
core.info ("Using Buildjet Cache."); core.info("Using Buildjet Cache.");
return buildjetCache; return buildjetCache;
default: default:
throw new Error("Only currently support github and buildjet caches"); throw new Error("Only currently support github and buildjet caches");