mirror of
https://gitea.com/actions/cache.git
synced 2024-11-22 12:22:31 +08:00
Use zstd instead of gzip if available
Add zstd to cache versioning
This commit is contained in:
parent
9ceee97d99
commit
97f7baa910
|
@ -31,4 +31,4 @@ Here are a few things you can do that will increase the likelihood of your pull
|
||||||
|
|
||||||
- [How to Contribute to Open Source](https://opensource.guide/how-to-contribute/)
|
- [How to Contribute to Open Source](https://opensource.guide/how-to-contribute/)
|
||||||
- [Using Pull Requests](https://help.github.com/articles/about-pull-requests/)
|
- [Using Pull Requests](https://help.github.com/articles/about-pull-requests/)
|
||||||
- [GitHub Help](https://help.github.com)
|
- [GitHub Help](https://help.github.com)
|
||||||
|
|
|
@ -1,12 +1,12 @@
|
||||||
import { getCacheVersion } from "../src/cacheHttpClient";
|
import { getCacheVersion } from "../src/cacheHttpClient";
|
||||||
import { Inputs } from "../src/constants";
|
import { CompressionMethod, Inputs } from "../src/constants";
|
||||||
import * as testUtils from "../src/utils/testUtils";
|
import * as testUtils from "../src/utils/testUtils";
|
||||||
|
|
||||||
afterEach(() => {
|
afterEach(() => {
|
||||||
testUtils.clearInputs();
|
testUtils.clearInputs();
|
||||||
});
|
});
|
||||||
|
|
||||||
test("getCacheVersion with path input returns version", async () => {
|
test("getCacheVersion with path input and compression method undefined returns version", async () => {
|
||||||
testUtils.setInput(Inputs.Path, "node_modules");
|
testUtils.setInput(Inputs.Path, "node_modules");
|
||||||
|
|
||||||
const result = getCacheVersion();
|
const result = getCacheVersion();
|
||||||
|
@ -16,6 +16,24 @@ test("getCacheVersion with path input returns version", async () => {
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
test("getCacheVersion with zstd compression returns version", async () => {
|
||||||
|
testUtils.setInput(Inputs.Path, "node_modules");
|
||||||
|
const result = getCacheVersion(CompressionMethod.Zstd);
|
||||||
|
|
||||||
|
expect(result).toEqual(
|
||||||
|
"273877e14fd65d270b87a198edbfa2db5a43de567c9a548d2a2505b408befe24"
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getCacheVersion with gzip compression does not change vesion", async () => {
|
||||||
|
testUtils.setInput(Inputs.Path, "node_modules");
|
||||||
|
const result = getCacheVersion(CompressionMethod.Gzip);
|
||||||
|
|
||||||
|
expect(result).toEqual(
|
||||||
|
"b3e0c6cb5ecf32614eeb2997d905b9c297046d7cbf69062698f25b14b4cb0985"
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
test("getCacheVersion with no input throws", async () => {
|
test("getCacheVersion with no input throws", async () => {
|
||||||
expect(() => getCacheVersion()).toThrow();
|
expect(() => getCacheVersion()).toThrow();
|
||||||
});
|
});
|
||||||
|
|
|
@ -2,7 +2,12 @@ import * as core from "@actions/core";
|
||||||
import * as path from "path";
|
import * as path from "path";
|
||||||
|
|
||||||
import * as cacheHttpClient from "../src/cacheHttpClient";
|
import * as cacheHttpClient from "../src/cacheHttpClient";
|
||||||
import { Events, Inputs } from "../src/constants";
|
import {
|
||||||
|
CacheFilename,
|
||||||
|
CompressionMethod,
|
||||||
|
Events,
|
||||||
|
Inputs
|
||||||
|
} from "../src/constants";
|
||||||
import { ArtifactCacheEntry } from "../src/contracts";
|
import { ArtifactCacheEntry } from "../src/contracts";
|
||||||
import run from "../src/restore";
|
import run from "../src/restore";
|
||||||
import * as tar from "../src/tar";
|
import * as tar from "../src/tar";
|
||||||
|
@ -30,6 +35,11 @@ beforeAll(() => {
|
||||||
const actualUtils = jest.requireActual("../src/utils/actionUtils");
|
const actualUtils = jest.requireActual("../src/utils/actionUtils");
|
||||||
return actualUtils.getSupportedEvents();
|
return actualUtils.getSupportedEvents();
|
||||||
});
|
});
|
||||||
|
|
||||||
|
jest.spyOn(actionUtils, "getCacheFileName").mockImplementation(cm => {
|
||||||
|
const actualUtils = jest.requireActual("../src/utils/actionUtils");
|
||||||
|
return actualUtils.getCacheFileName(cm);
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
|
@ -197,7 +207,7 @@ test("restore with restore keys and no cache found", async () => {
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
test("restore with cache found", async () => {
|
test("restore with gzip compressed cache found", async () => {
|
||||||
const key = "node-test";
|
const key = "node-test";
|
||||||
testUtils.setInputs({
|
testUtils.setInputs({
|
||||||
path: "node_modules",
|
path: "node_modules",
|
||||||
|
@ -227,7 +237,7 @@ test("restore with cache found", async () => {
|
||||||
return Promise.resolve(tempPath);
|
return Promise.resolve(tempPath);
|
||||||
});
|
});
|
||||||
|
|
||||||
const archivePath = path.join(tempPath, "cache.tgz");
|
const archivePath = path.join(tempPath, CacheFilename.Gzip);
|
||||||
const setCacheStateMock = jest.spyOn(actionUtils, "setCacheState");
|
const setCacheStateMock = jest.spyOn(actionUtils, "setCacheState");
|
||||||
const downloadCacheMock = jest.spyOn(cacheHttpClient, "downloadCache");
|
const downloadCacheMock = jest.spyOn(cacheHttpClient, "downloadCache");
|
||||||
|
|
||||||
|
@ -240,10 +250,17 @@ test("restore with cache found", async () => {
|
||||||
const unlinkFileMock = jest.spyOn(actionUtils, "unlinkFile");
|
const unlinkFileMock = jest.spyOn(actionUtils, "unlinkFile");
|
||||||
const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput");
|
const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput");
|
||||||
|
|
||||||
|
const compression = CompressionMethod.Gzip;
|
||||||
|
const getCompressionMock = jest
|
||||||
|
.spyOn(actionUtils, "getCompressionMethod")
|
||||||
|
.mockReturnValue(Promise.resolve(compression));
|
||||||
|
|
||||||
await run();
|
await run();
|
||||||
|
|
||||||
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
|
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
|
||||||
expect(getCacheMock).toHaveBeenCalledWith([key]);
|
expect(getCacheMock).toHaveBeenCalledWith([key], {
|
||||||
|
compressionMethod: compression
|
||||||
|
});
|
||||||
expect(setCacheStateMock).toHaveBeenCalledWith(cacheEntry);
|
expect(setCacheStateMock).toHaveBeenCalledWith(cacheEntry);
|
||||||
expect(createTempDirectoryMock).toHaveBeenCalledTimes(1);
|
expect(createTempDirectoryMock).toHaveBeenCalledTimes(1);
|
||||||
expect(downloadCacheMock).toHaveBeenCalledWith(
|
expect(downloadCacheMock).toHaveBeenCalledWith(
|
||||||
|
@ -253,7 +270,7 @@ test("restore with cache found", async () => {
|
||||||
expect(getArchiveFileSizeMock).toHaveBeenCalledWith(archivePath);
|
expect(getArchiveFileSizeMock).toHaveBeenCalledWith(archivePath);
|
||||||
|
|
||||||
expect(extractTarMock).toHaveBeenCalledTimes(1);
|
expect(extractTarMock).toHaveBeenCalledTimes(1);
|
||||||
expect(extractTarMock).toHaveBeenCalledWith(archivePath);
|
expect(extractTarMock).toHaveBeenCalledWith(archivePath, compression);
|
||||||
|
|
||||||
expect(unlinkFileMock).toHaveBeenCalledTimes(1);
|
expect(unlinkFileMock).toHaveBeenCalledTimes(1);
|
||||||
expect(unlinkFileMock).toHaveBeenCalledWith(archivePath);
|
expect(unlinkFileMock).toHaveBeenCalledWith(archivePath);
|
||||||
|
@ -263,9 +280,10 @@ test("restore with cache found", async () => {
|
||||||
|
|
||||||
expect(infoMock).toHaveBeenCalledWith(`Cache restored from key: ${key}`);
|
expect(infoMock).toHaveBeenCalledWith(`Cache restored from key: ${key}`);
|
||||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||||
|
expect(getCompressionMock).toHaveBeenCalledTimes(1);
|
||||||
});
|
});
|
||||||
|
|
||||||
test("restore with a pull request event and cache found", async () => {
|
test("restore with a pull request event and zstd compressed cache found", async () => {
|
||||||
const key = "node-test";
|
const key = "node-test";
|
||||||
testUtils.setInputs({
|
testUtils.setInputs({
|
||||||
path: "node_modules",
|
path: "node_modules",
|
||||||
|
@ -297,7 +315,7 @@ test("restore with a pull request event and cache found", async () => {
|
||||||
return Promise.resolve(tempPath);
|
return Promise.resolve(tempPath);
|
||||||
});
|
});
|
||||||
|
|
||||||
const archivePath = path.join(tempPath, "cache.tgz");
|
const archivePath = path.join(tempPath, CacheFilename.Zstd);
|
||||||
const setCacheStateMock = jest.spyOn(actionUtils, "setCacheState");
|
const setCacheStateMock = jest.spyOn(actionUtils, "setCacheState");
|
||||||
const downloadCacheMock = jest.spyOn(cacheHttpClient, "downloadCache");
|
const downloadCacheMock = jest.spyOn(cacheHttpClient, "downloadCache");
|
||||||
|
|
||||||
|
@ -308,11 +326,17 @@ test("restore with a pull request event and cache found", async () => {
|
||||||
|
|
||||||
const extractTarMock = jest.spyOn(tar, "extractTar");
|
const extractTarMock = jest.spyOn(tar, "extractTar");
|
||||||
const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput");
|
const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput");
|
||||||
|
const compression = CompressionMethod.Zstd;
|
||||||
|
const getCompressionMock = jest
|
||||||
|
.spyOn(actionUtils, "getCompressionMethod")
|
||||||
|
.mockReturnValue(Promise.resolve(compression));
|
||||||
|
|
||||||
await run();
|
await run();
|
||||||
|
|
||||||
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
|
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
|
||||||
expect(getCacheMock).toHaveBeenCalledWith([key]);
|
expect(getCacheMock).toHaveBeenCalledWith([key], {
|
||||||
|
compressionMethod: compression
|
||||||
|
});
|
||||||
expect(setCacheStateMock).toHaveBeenCalledWith(cacheEntry);
|
expect(setCacheStateMock).toHaveBeenCalledWith(cacheEntry);
|
||||||
expect(createTempDirectoryMock).toHaveBeenCalledTimes(1);
|
expect(createTempDirectoryMock).toHaveBeenCalledTimes(1);
|
||||||
expect(downloadCacheMock).toHaveBeenCalledWith(
|
expect(downloadCacheMock).toHaveBeenCalledWith(
|
||||||
|
@ -323,13 +347,14 @@ test("restore with a pull request event and cache found", async () => {
|
||||||
expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~60 MB (62915000 B)`);
|
expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~60 MB (62915000 B)`);
|
||||||
|
|
||||||
expect(extractTarMock).toHaveBeenCalledTimes(1);
|
expect(extractTarMock).toHaveBeenCalledTimes(1);
|
||||||
expect(extractTarMock).toHaveBeenCalledWith(archivePath);
|
expect(extractTarMock).toHaveBeenCalledWith(archivePath, compression);
|
||||||
|
|
||||||
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
|
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
|
||||||
expect(setCacheHitOutputMock).toHaveBeenCalledWith(true);
|
expect(setCacheHitOutputMock).toHaveBeenCalledWith(true);
|
||||||
|
|
||||||
expect(infoMock).toHaveBeenCalledWith(`Cache restored from key: ${key}`);
|
expect(infoMock).toHaveBeenCalledWith(`Cache restored from key: ${key}`);
|
||||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||||
|
expect(getCompressionMock).toHaveBeenCalledTimes(1);
|
||||||
});
|
});
|
||||||
|
|
||||||
test("restore with cache found for restore key", async () => {
|
test("restore with cache found for restore key", async () => {
|
||||||
|
@ -364,7 +389,7 @@ test("restore with cache found for restore key", async () => {
|
||||||
return Promise.resolve(tempPath);
|
return Promise.resolve(tempPath);
|
||||||
});
|
});
|
||||||
|
|
||||||
const archivePath = path.join(tempPath, "cache.tgz");
|
const archivePath = path.join(tempPath, CacheFilename.Zstd);
|
||||||
const setCacheStateMock = jest.spyOn(actionUtils, "setCacheState");
|
const setCacheStateMock = jest.spyOn(actionUtils, "setCacheState");
|
||||||
const downloadCacheMock = jest.spyOn(cacheHttpClient, "downloadCache");
|
const downloadCacheMock = jest.spyOn(cacheHttpClient, "downloadCache");
|
||||||
|
|
||||||
|
@ -375,11 +400,17 @@ test("restore with cache found for restore key", async () => {
|
||||||
|
|
||||||
const extractTarMock = jest.spyOn(tar, "extractTar");
|
const extractTarMock = jest.spyOn(tar, "extractTar");
|
||||||
const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput");
|
const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput");
|
||||||
|
const compression = CompressionMethod.Zstd;
|
||||||
|
const getCompressionMock = jest
|
||||||
|
.spyOn(actionUtils, "getCompressionMethod")
|
||||||
|
.mockReturnValue(Promise.resolve(compression));
|
||||||
|
|
||||||
await run();
|
await run();
|
||||||
|
|
||||||
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
|
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
|
||||||
expect(getCacheMock).toHaveBeenCalledWith([key, restoreKey]);
|
expect(getCacheMock).toHaveBeenCalledWith([key, restoreKey], {
|
||||||
|
compressionMethod: compression
|
||||||
|
});
|
||||||
expect(setCacheStateMock).toHaveBeenCalledWith(cacheEntry);
|
expect(setCacheStateMock).toHaveBeenCalledWith(cacheEntry);
|
||||||
expect(createTempDirectoryMock).toHaveBeenCalledTimes(1);
|
expect(createTempDirectoryMock).toHaveBeenCalledTimes(1);
|
||||||
expect(downloadCacheMock).toHaveBeenCalledWith(
|
expect(downloadCacheMock).toHaveBeenCalledWith(
|
||||||
|
@ -390,7 +421,7 @@ test("restore with cache found for restore key", async () => {
|
||||||
expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~0 MB (142 B)`);
|
expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~0 MB (142 B)`);
|
||||||
|
|
||||||
expect(extractTarMock).toHaveBeenCalledTimes(1);
|
expect(extractTarMock).toHaveBeenCalledTimes(1);
|
||||||
expect(extractTarMock).toHaveBeenCalledWith(archivePath);
|
expect(extractTarMock).toHaveBeenCalledWith(archivePath, compression);
|
||||||
|
|
||||||
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
|
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
|
||||||
expect(setCacheHitOutputMock).toHaveBeenCalledWith(false);
|
expect(setCacheHitOutputMock).toHaveBeenCalledWith(false);
|
||||||
|
@ -399,4 +430,5 @@ test("restore with cache found for restore key", async () => {
|
||||||
`Cache restored from key: ${restoreKey}`
|
`Cache restored from key: ${restoreKey}`
|
||||||
);
|
);
|
||||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||||
|
expect(getCompressionMock).toHaveBeenCalledTimes(1);
|
||||||
});
|
});
|
||||||
|
|
|
@ -2,7 +2,12 @@ import * as core from "@actions/core";
|
||||||
import * as path from "path";
|
import * as path from "path";
|
||||||
|
|
||||||
import * as cacheHttpClient from "../src/cacheHttpClient";
|
import * as cacheHttpClient from "../src/cacheHttpClient";
|
||||||
import { CacheFilename, Events, Inputs } from "../src/constants";
|
import {
|
||||||
|
CacheFilename,
|
||||||
|
CompressionMethod,
|
||||||
|
Events,
|
||||||
|
Inputs
|
||||||
|
} from "../src/constants";
|
||||||
import { ArtifactCacheEntry } from "../src/contracts";
|
import { ArtifactCacheEntry } from "../src/contracts";
|
||||||
import run from "../src/save";
|
import run from "../src/save";
|
||||||
import * as tar from "../src/tar";
|
import * as tar from "../src/tar";
|
||||||
|
@ -50,6 +55,11 @@ beforeAll(() => {
|
||||||
jest.spyOn(actionUtils, "createTempDirectory").mockImplementation(() => {
|
jest.spyOn(actionUtils, "createTempDirectory").mockImplementation(() => {
|
||||||
return Promise.resolve("/foo/bar");
|
return Promise.resolve("/foo/bar");
|
||||||
});
|
});
|
||||||
|
|
||||||
|
jest.spyOn(actionUtils, "getCacheFileName").mockImplementation(cm => {
|
||||||
|
const actualUtils = jest.requireActual("../src/utils/actionUtils");
|
||||||
|
return actualUtils.getCacheFileName(cm);
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
|
@ -201,20 +211,27 @@ test("save with large cache outputs warning", async () => {
|
||||||
jest.spyOn(actionUtils, "getArchiveFileSize").mockImplementationOnce(() => {
|
jest.spyOn(actionUtils, "getArchiveFileSize").mockImplementationOnce(() => {
|
||||||
return cacheSize;
|
return cacheSize;
|
||||||
});
|
});
|
||||||
|
const compression = CompressionMethod.Gzip;
|
||||||
|
const getCompressionMock = jest
|
||||||
|
.spyOn(actionUtils, "getCompressionMethod")
|
||||||
|
.mockReturnValue(Promise.resolve(compression));
|
||||||
|
|
||||||
await run();
|
await run();
|
||||||
|
|
||||||
const archiveFolder = "/foo/bar";
|
const archiveFolder = "/foo/bar";
|
||||||
|
|
||||||
expect(createTarMock).toHaveBeenCalledTimes(1);
|
expect(createTarMock).toHaveBeenCalledTimes(1);
|
||||||
expect(createTarMock).toHaveBeenCalledWith(archiveFolder, cachePaths);
|
expect(createTarMock).toHaveBeenCalledWith(
|
||||||
|
archiveFolder,
|
||||||
|
cachePaths,
|
||||||
|
compression
|
||||||
|
);
|
||||||
expect(logWarningMock).toHaveBeenCalledTimes(1);
|
expect(logWarningMock).toHaveBeenCalledTimes(1);
|
||||||
expect(logWarningMock).toHaveBeenCalledWith(
|
expect(logWarningMock).toHaveBeenCalledWith(
|
||||||
"Cache size of ~6144 MB (6442450944 B) is over the 5GB limit, not saving cache."
|
"Cache size of ~6144 MB (6442450944 B) is over the 5GB limit, not saving cache."
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||||
|
expect(getCompressionMock).toHaveBeenCalledTimes(1);
|
||||||
});
|
});
|
||||||
|
|
||||||
test("save with reserve cache failure outputs warning", async () => {
|
test("save with reserve cache failure outputs warning", async () => {
|
||||||
|
@ -250,13 +267,18 @@ test("save with reserve cache failure outputs warning", async () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
const createTarMock = jest.spyOn(tar, "createTar");
|
const createTarMock = jest.spyOn(tar, "createTar");
|
||||||
|
|
||||||
const saveCacheMock = jest.spyOn(cacheHttpClient, "saveCache");
|
const saveCacheMock = jest.spyOn(cacheHttpClient, "saveCache");
|
||||||
|
const compression = CompressionMethod.Zstd;
|
||||||
|
const getCompressionMock = jest
|
||||||
|
.spyOn(actionUtils, "getCompressionMethod")
|
||||||
|
.mockReturnValue(Promise.resolve(compression));
|
||||||
|
|
||||||
await run();
|
await run();
|
||||||
|
|
||||||
expect(reserveCacheMock).toHaveBeenCalledTimes(1);
|
expect(reserveCacheMock).toHaveBeenCalledTimes(1);
|
||||||
expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey);
|
expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey, {
|
||||||
|
compressionMethod: compression
|
||||||
|
});
|
||||||
|
|
||||||
expect(infoMock).toHaveBeenCalledWith(
|
expect(infoMock).toHaveBeenCalledWith(
|
||||||
`Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.`
|
`Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.`
|
||||||
|
@ -266,6 +288,7 @@ test("save with reserve cache failure outputs warning", async () => {
|
||||||
expect(saveCacheMock).toHaveBeenCalledTimes(0);
|
expect(saveCacheMock).toHaveBeenCalledTimes(0);
|
||||||
expect(logWarningMock).toHaveBeenCalledTimes(0);
|
expect(logWarningMock).toHaveBeenCalledTimes(0);
|
||||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||||
|
expect(getCompressionMock).toHaveBeenCalledTimes(1);
|
||||||
});
|
});
|
||||||
|
|
||||||
test("save with server error outputs warning", async () => {
|
test("save with server error outputs warning", async () => {
|
||||||
|
@ -308,17 +331,27 @@ test("save with server error outputs warning", async () => {
|
||||||
.mockImplementationOnce(() => {
|
.mockImplementationOnce(() => {
|
||||||
throw new Error("HTTP Error Occurred");
|
throw new Error("HTTP Error Occurred");
|
||||||
});
|
});
|
||||||
|
const compression = CompressionMethod.Zstd;
|
||||||
|
const getCompressionMock = jest
|
||||||
|
.spyOn(actionUtils, "getCompressionMethod")
|
||||||
|
.mockReturnValue(Promise.resolve(compression));
|
||||||
|
|
||||||
await run();
|
await run();
|
||||||
|
|
||||||
expect(reserveCacheMock).toHaveBeenCalledTimes(1);
|
expect(reserveCacheMock).toHaveBeenCalledTimes(1);
|
||||||
expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey);
|
expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey, {
|
||||||
|
compressionMethod: compression
|
||||||
|
});
|
||||||
|
|
||||||
const archiveFolder = "/foo/bar";
|
const archiveFolder = "/foo/bar";
|
||||||
const archiveFile = path.join(archiveFolder, CacheFilename);
|
const archiveFile = path.join(archiveFolder, CacheFilename.Zstd);
|
||||||
|
|
||||||
expect(createTarMock).toHaveBeenCalledTimes(1);
|
expect(createTarMock).toHaveBeenCalledTimes(1);
|
||||||
expect(createTarMock).toHaveBeenCalledWith(archiveFolder, cachePaths);
|
expect(createTarMock).toHaveBeenCalledWith(
|
||||||
|
archiveFolder,
|
||||||
|
cachePaths,
|
||||||
|
compression
|
||||||
|
);
|
||||||
|
|
||||||
expect(saveCacheMock).toHaveBeenCalledTimes(1);
|
expect(saveCacheMock).toHaveBeenCalledTimes(1);
|
||||||
expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archiveFile);
|
expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archiveFile);
|
||||||
|
@ -327,6 +360,7 @@ test("save with server error outputs warning", async () => {
|
||||||
expect(logWarningMock).toHaveBeenCalledWith("HTTP Error Occurred");
|
expect(logWarningMock).toHaveBeenCalledWith("HTTP Error Occurred");
|
||||||
|
|
||||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||||
|
expect(getCompressionMock).toHaveBeenCalledTimes(1);
|
||||||
});
|
});
|
||||||
|
|
||||||
test("save with valid inputs uploads a cache", async () => {
|
test("save with valid inputs uploads a cache", async () => {
|
||||||
|
@ -364,20 +398,31 @@ test("save with valid inputs uploads a cache", async () => {
|
||||||
const createTarMock = jest.spyOn(tar, "createTar");
|
const createTarMock = jest.spyOn(tar, "createTar");
|
||||||
|
|
||||||
const saveCacheMock = jest.spyOn(cacheHttpClient, "saveCache");
|
const saveCacheMock = jest.spyOn(cacheHttpClient, "saveCache");
|
||||||
|
const compression = CompressionMethod.Zstd;
|
||||||
|
const getCompressionMock = jest
|
||||||
|
.spyOn(actionUtils, "getCompressionMethod")
|
||||||
|
.mockReturnValue(Promise.resolve(compression));
|
||||||
|
|
||||||
await run();
|
await run();
|
||||||
|
|
||||||
expect(reserveCacheMock).toHaveBeenCalledTimes(1);
|
expect(reserveCacheMock).toHaveBeenCalledTimes(1);
|
||||||
expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey);
|
expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey, {
|
||||||
|
compressionMethod: compression
|
||||||
|
});
|
||||||
|
|
||||||
const archiveFolder = "/foo/bar";
|
const archiveFolder = "/foo/bar";
|
||||||
const archiveFile = path.join(archiveFolder, CacheFilename);
|
const archiveFile = path.join(archiveFolder, CacheFilename.Zstd);
|
||||||
|
|
||||||
expect(createTarMock).toHaveBeenCalledTimes(1);
|
expect(createTarMock).toHaveBeenCalledTimes(1);
|
||||||
expect(createTarMock).toHaveBeenCalledWith(archiveFolder, cachePaths);
|
expect(createTarMock).toHaveBeenCalledWith(
|
||||||
|
archiveFolder,
|
||||||
|
cachePaths,
|
||||||
|
compression
|
||||||
|
);
|
||||||
|
|
||||||
expect(saveCacheMock).toHaveBeenCalledTimes(1);
|
expect(saveCacheMock).toHaveBeenCalledTimes(1);
|
||||||
expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archiveFile);
|
expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archiveFile);
|
||||||
|
|
||||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||||
|
expect(getCompressionMock).toHaveBeenCalledTimes(1);
|
||||||
});
|
});
|
||||||
|
|
|
@ -2,14 +2,17 @@ import * as exec from "@actions/exec";
|
||||||
import * as io from "@actions/io";
|
import * as io from "@actions/io";
|
||||||
import * as path from "path";
|
import * as path from "path";
|
||||||
|
|
||||||
import { CacheFilename } from "../src/constants";
|
import { CacheFilename, CompressionMethod } from "../src/constants";
|
||||||
import * as tar from "../src/tar";
|
import * as tar from "../src/tar";
|
||||||
|
import * as utils from "../src/utils/actionUtils";
|
||||||
|
|
||||||
import fs = require("fs");
|
import fs = require("fs");
|
||||||
|
|
||||||
jest.mock("@actions/exec");
|
jest.mock("@actions/exec");
|
||||||
jest.mock("@actions/io");
|
jest.mock("@actions/io");
|
||||||
|
|
||||||
|
const IS_WINDOWS = process.platform === "win32";
|
||||||
|
|
||||||
function getTempDir(): string {
|
function getTempDir(): string {
|
||||||
return path.join(__dirname, "_temp", "tar");
|
return path.join(__dirname, "_temp", "tar");
|
||||||
}
|
}
|
||||||
|
@ -28,29 +31,28 @@ afterAll(async () => {
|
||||||
await jest.requireActual("@actions/io").rmRF(getTempDir());
|
await jest.requireActual("@actions/io").rmRF(getTempDir());
|
||||||
});
|
});
|
||||||
|
|
||||||
test("extract BSD tar", async () => {
|
test("zstd extract tar", async () => {
|
||||||
const mkdirMock = jest.spyOn(io, "mkdirP");
|
const mkdirMock = jest.spyOn(io, "mkdirP");
|
||||||
const execMock = jest.spyOn(exec, "exec");
|
const execMock = jest.spyOn(exec, "exec");
|
||||||
|
|
||||||
const IS_WINDOWS = process.platform === "win32";
|
|
||||||
const archivePath = IS_WINDOWS
|
const archivePath = IS_WINDOWS
|
||||||
? `${process.env["windir"]}\\fakepath\\cache.tar`
|
? `${process.env["windir"]}\\fakepath\\cache.tar`
|
||||||
: "cache.tar";
|
: "cache.tar";
|
||||||
const workspace = process.env["GITHUB_WORKSPACE"];
|
const workspace = process.env["GITHUB_WORKSPACE"];
|
||||||
|
|
||||||
await tar.extractTar(archivePath);
|
await tar.extractTar(archivePath, CompressionMethod.Zstd);
|
||||||
|
|
||||||
expect(mkdirMock).toHaveBeenCalledWith(workspace);
|
expect(mkdirMock).toHaveBeenCalledWith(workspace);
|
||||||
|
|
||||||
const tarPath = IS_WINDOWS
|
const tarPath = IS_WINDOWS
|
||||||
? `${process.env["windir"]}\\System32\\tar.exe`
|
? `${process.env["windir"]}\\System32\\tar.exe`
|
||||||
: "tar";
|
: "tar";
|
||||||
expect(execMock).toHaveBeenCalledTimes(1);
|
expect(execMock).toHaveBeenCalledTimes(1);
|
||||||
expect(execMock).toHaveBeenCalledWith(
|
expect(execMock).toHaveBeenCalledWith(
|
||||||
`"${tarPath}"`,
|
`${tarPath}`,
|
||||||
[
|
[
|
||||||
"-xz",
|
"--use-compress-program",
|
||||||
"-f",
|
"zstd -d",
|
||||||
|
"-xf",
|
||||||
IS_WINDOWS ? archivePath.replace(/\\/g, "/") : archivePath,
|
IS_WINDOWS ? archivePath.replace(/\\/g, "/") : archivePath,
|
||||||
"-P",
|
"-P",
|
||||||
"-C",
|
"-C",
|
||||||
|
@ -60,24 +62,55 @@ test("extract BSD tar", async () => {
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
test("extract GNU tar", async () => {
|
test("gzip extract tar", async () => {
|
||||||
const IS_WINDOWS = process.platform === "win32";
|
const mkdirMock = jest.spyOn(io, "mkdirP");
|
||||||
|
const execMock = jest.spyOn(exec, "exec");
|
||||||
|
const archivePath = IS_WINDOWS
|
||||||
|
? `${process.env["windir"]}\\fakepath\\cache.tar`
|
||||||
|
: "cache.tar";
|
||||||
|
const workspace = process.env["GITHUB_WORKSPACE"];
|
||||||
|
|
||||||
|
await tar.extractTar(archivePath, CompressionMethod.Gzip);
|
||||||
|
|
||||||
|
expect(mkdirMock).toHaveBeenCalledWith(workspace);
|
||||||
|
const tarPath = IS_WINDOWS
|
||||||
|
? `${process.env["windir"]}\\System32\\tar.exe`
|
||||||
|
: "tar";
|
||||||
|
expect(execMock).toHaveBeenCalledTimes(1);
|
||||||
|
expect(execMock).toHaveBeenCalledWith(
|
||||||
|
`${tarPath}`,
|
||||||
|
[
|
||||||
|
"-z",
|
||||||
|
"-xf",
|
||||||
|
IS_WINDOWS ? archivePath.replace(/\\/g, "/") : archivePath,
|
||||||
|
"-P",
|
||||||
|
"-C",
|
||||||
|
IS_WINDOWS ? workspace?.replace(/\\/g, "/") : workspace
|
||||||
|
],
|
||||||
|
{ cwd: undefined }
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("gzip extract GNU tar on windows", async () => {
|
||||||
if (IS_WINDOWS) {
|
if (IS_WINDOWS) {
|
||||||
jest.spyOn(fs, "existsSync").mockReturnValueOnce(false);
|
jest.spyOn(fs, "existsSync").mockReturnValueOnce(false);
|
||||||
jest.spyOn(tar, "isGnuTar").mockReturnValue(Promise.resolve(true));
|
|
||||||
|
|
||||||
|
const isGnuMock = jest
|
||||||
|
.spyOn(utils, "useGnuTar")
|
||||||
|
.mockReturnValue(Promise.resolve(true));
|
||||||
const execMock = jest.spyOn(exec, "exec");
|
const execMock = jest.spyOn(exec, "exec");
|
||||||
const archivePath = `${process.env["windir"]}\\fakepath\\cache.tar`;
|
const archivePath = `${process.env["windir"]}\\fakepath\\cache.tar`;
|
||||||
const workspace = process.env["GITHUB_WORKSPACE"];
|
const workspace = process.env["GITHUB_WORKSPACE"];
|
||||||
|
|
||||||
await tar.extractTar(archivePath);
|
await tar.extractTar(archivePath, CompressionMethod.Gzip);
|
||||||
|
|
||||||
expect(execMock).toHaveBeenCalledTimes(2);
|
expect(isGnuMock).toHaveBeenCalledTimes(1);
|
||||||
expect(execMock).toHaveBeenLastCalledWith(
|
expect(execMock).toHaveBeenCalledTimes(1);
|
||||||
`"tar"`,
|
expect(execMock).toHaveBeenCalledWith(
|
||||||
|
`tar`,
|
||||||
[
|
[
|
||||||
"-xz",
|
"-z",
|
||||||
"-f",
|
"-xf",
|
||||||
archivePath.replace(/\\/g, "/"),
|
archivePath.replace(/\\/g, "/"),
|
||||||
"-P",
|
"-P",
|
||||||
"-C",
|
"-C",
|
||||||
|
@ -89,7 +122,7 @@ test("extract GNU tar", async () => {
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
test("create BSD tar", async () => {
|
test("zstd create tar", async () => {
|
||||||
const execMock = jest.spyOn(exec, "exec");
|
const execMock = jest.spyOn(exec, "exec");
|
||||||
|
|
||||||
const archiveFolder = getTempDir();
|
const archiveFolder = getTempDir();
|
||||||
|
@ -98,20 +131,66 @@ test("create BSD tar", async () => {
|
||||||
|
|
||||||
await fs.promises.mkdir(archiveFolder, { recursive: true });
|
await fs.promises.mkdir(archiveFolder, { recursive: true });
|
||||||
|
|
||||||
await tar.createTar(archiveFolder, sourceDirectories);
|
await tar.createTar(
|
||||||
|
archiveFolder,
|
||||||
|
sourceDirectories,
|
||||||
|
CompressionMethod.Zstd
|
||||||
|
);
|
||||||
|
|
||||||
const IS_WINDOWS = process.platform === "win32";
|
|
||||||
const tarPath = IS_WINDOWS
|
const tarPath = IS_WINDOWS
|
||||||
? `${process.env["windir"]}\\System32\\tar.exe`
|
? `${process.env["windir"]}\\System32\\tar.exe`
|
||||||
: "tar";
|
: "tar";
|
||||||
|
|
||||||
expect(execMock).toHaveBeenCalledTimes(1);
|
expect(execMock).toHaveBeenCalledTimes(1);
|
||||||
expect(execMock).toHaveBeenCalledWith(
|
expect(execMock).toHaveBeenCalledWith(
|
||||||
`"${tarPath}"`,
|
`${tarPath}`,
|
||||||
[
|
[
|
||||||
"-cz",
|
"--use-compress-program",
|
||||||
"-f",
|
"zstd -T0",
|
||||||
IS_WINDOWS ? CacheFilename.replace(/\\/g, "/") : CacheFilename,
|
"-cf",
|
||||||
|
IS_WINDOWS
|
||||||
|
? CacheFilename.Zstd.replace(/\\/g, "/")
|
||||||
|
: CacheFilename.Zstd,
|
||||||
|
"-P",
|
||||||
|
"-C",
|
||||||
|
IS_WINDOWS ? workspace?.replace(/\\/g, "/") : workspace,
|
||||||
|
"--files-from",
|
||||||
|
"manifest.txt"
|
||||||
|
],
|
||||||
|
{
|
||||||
|
cwd: archiveFolder
|
||||||
|
}
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("gzip create tar", async () => {
|
||||||
|
const execMock = jest.spyOn(exec, "exec");
|
||||||
|
|
||||||
|
const archiveFolder = getTempDir();
|
||||||
|
const workspace = process.env["GITHUB_WORKSPACE"];
|
||||||
|
const sourceDirectories = ["~/.npm/cache", `${workspace}/dist`];
|
||||||
|
|
||||||
|
await fs.promises.mkdir(archiveFolder, { recursive: true });
|
||||||
|
|
||||||
|
await tar.createTar(
|
||||||
|
archiveFolder,
|
||||||
|
sourceDirectories,
|
||||||
|
CompressionMethod.Gzip
|
||||||
|
);
|
||||||
|
|
||||||
|
const tarPath = IS_WINDOWS
|
||||||
|
? `${process.env["windir"]}\\System32\\tar.exe`
|
||||||
|
: "tar";
|
||||||
|
|
||||||
|
expect(execMock).toHaveBeenCalledTimes(1);
|
||||||
|
expect(execMock).toHaveBeenCalledWith(
|
||||||
|
`${tarPath}`,
|
||||||
|
[
|
||||||
|
"-z",
|
||||||
|
"-cf",
|
||||||
|
IS_WINDOWS
|
||||||
|
? CacheFilename.Gzip.replace(/\\/g, "/")
|
||||||
|
: CacheFilename.Gzip,
|
||||||
"-P",
|
"-P",
|
||||||
"-C",
|
"-C",
|
||||||
IS_WINDOWS ? workspace?.replace(/\\/g, "/") : workspace,
|
IS_WINDOWS ? workspace?.replace(/\\/g, "/") : workspace,
|
||||||
|
|
140
dist/restore/index.js
vendored
140
dist/restore/index.js
vendored
|
@ -2236,23 +2236,22 @@ function createHttpClient() {
|
||||||
const bearerCredentialHandler = new auth_1.BearerCredentialHandler(token);
|
const bearerCredentialHandler = new auth_1.BearerCredentialHandler(token);
|
||||||
return new http_client_1.HttpClient("actions/cache", [bearerCredentialHandler], getRequestOptions());
|
return new http_client_1.HttpClient("actions/cache", [bearerCredentialHandler], getRequestOptions());
|
||||||
}
|
}
|
||||||
function getCacheVersion() {
|
function getCacheVersion(compressionMethod) {
|
||||||
// Add salt to cache version to support breaking changes in cache entry
|
// Add salt to cache version to support breaking changes in cache entry
|
||||||
const components = [
|
const components = [core.getInput(constants_1.Inputs.Path, { required: true })].concat(compressionMethod == constants_1.CompressionMethod.Zstd
|
||||||
core.getInput(constants_1.Inputs.Path, { required: true }),
|
? [compressionMethod, versionSalt]
|
||||||
versionSalt
|
: versionSalt);
|
||||||
];
|
|
||||||
return crypto
|
return crypto
|
||||||
.createHash("sha256")
|
.createHash("sha256")
|
||||||
.update(components.join("|"))
|
.update(components.join("|"))
|
||||||
.digest("hex");
|
.digest("hex");
|
||||||
}
|
}
|
||||||
exports.getCacheVersion = getCacheVersion;
|
exports.getCacheVersion = getCacheVersion;
|
||||||
function getCacheEntry(keys) {
|
function getCacheEntry(keys, options) {
|
||||||
var _a;
|
var _a, _b;
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
const httpClient = createHttpClient();
|
const httpClient = createHttpClient();
|
||||||
const version = getCacheVersion();
|
const version = getCacheVersion((_a = options) === null || _a === void 0 ? void 0 : _a.compressionMethod);
|
||||||
const resource = `cache?keys=${encodeURIComponent(keys.join(","))}&version=${version}`;
|
const resource = `cache?keys=${encodeURIComponent(keys.join(","))}&version=${version}`;
|
||||||
const response = yield httpClient.getJson(getCacheApiUrl(resource));
|
const response = yield httpClient.getJson(getCacheApiUrl(resource));
|
||||||
if (response.statusCode === 204) {
|
if (response.statusCode === 204) {
|
||||||
|
@ -2262,7 +2261,7 @@ function getCacheEntry(keys) {
|
||||||
throw new Error(`Cache service responded with ${response.statusCode}`);
|
throw new Error(`Cache service responded with ${response.statusCode}`);
|
||||||
}
|
}
|
||||||
const cacheResult = response.result;
|
const cacheResult = response.result;
|
||||||
const cacheDownloadUrl = (_a = cacheResult) === null || _a === void 0 ? void 0 : _a.archiveLocation;
|
const cacheDownloadUrl = (_b = cacheResult) === null || _b === void 0 ? void 0 : _b.archiveLocation;
|
||||||
if (!cacheDownloadUrl) {
|
if (!cacheDownloadUrl) {
|
||||||
throw new Error("Cache not found.");
|
throw new Error("Cache not found.");
|
||||||
}
|
}
|
||||||
|
@ -2306,17 +2305,17 @@ function downloadCache(archiveLocation, archivePath) {
|
||||||
}
|
}
|
||||||
exports.downloadCache = downloadCache;
|
exports.downloadCache = downloadCache;
|
||||||
// Reserve Cache
|
// Reserve Cache
|
||||||
function reserveCache(key) {
|
function reserveCache(key, options) {
|
||||||
var _a, _b, _c;
|
var _a, _b, _c, _d;
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
const httpClient = createHttpClient();
|
const httpClient = createHttpClient();
|
||||||
const version = getCacheVersion();
|
const version = getCacheVersion((_a = options) === null || _a === void 0 ? void 0 : _a.compressionMethod);
|
||||||
const reserveCacheRequest = {
|
const reserveCacheRequest = {
|
||||||
key,
|
key,
|
||||||
version
|
version
|
||||||
};
|
};
|
||||||
const response = yield httpClient.postJson(getCacheApiUrl("caches"), reserveCacheRequest);
|
const response = yield httpClient.postJson(getCacheApiUrl("caches"), reserveCacheRequest);
|
||||||
return _c = (_b = (_a = response) === null || _a === void 0 ? void 0 : _a.result) === null || _b === void 0 ? void 0 : _b.cacheId, (_c !== null && _c !== void 0 ? _c : -1);
|
return _d = (_c = (_b = response) === null || _b === void 0 ? void 0 : _b.result) === null || _c === void 0 ? void 0 : _c.cacheId, (_d !== null && _d !== void 0 ? _d : -1);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.reserveCache = reserveCache;
|
exports.reserveCache = reserveCache;
|
||||||
|
@ -3201,6 +3200,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
const core = __importStar(__webpack_require__(470));
|
const core = __importStar(__webpack_require__(470));
|
||||||
|
const exec = __importStar(__webpack_require__(986));
|
||||||
const glob = __importStar(__webpack_require__(281));
|
const glob = __importStar(__webpack_require__(281));
|
||||||
const io = __importStar(__webpack_require__(1));
|
const io = __importStar(__webpack_require__(1));
|
||||||
const fs = __importStar(__webpack_require__(747));
|
const fs = __importStar(__webpack_require__(747));
|
||||||
|
@ -3320,6 +3320,50 @@ function unlinkFile(path) {
|
||||||
return util.promisify(fs.unlink)(path);
|
return util.promisify(fs.unlink)(path);
|
||||||
}
|
}
|
||||||
exports.unlinkFile = unlinkFile;
|
exports.unlinkFile = unlinkFile;
|
||||||
|
function checkVersion(app) {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
core.debug(`Checking ${app} --version`);
|
||||||
|
let versionOutput = "";
|
||||||
|
try {
|
||||||
|
yield exec.exec(`${app} --version`, [], {
|
||||||
|
ignoreReturnCode: true,
|
||||||
|
silent: true,
|
||||||
|
listeners: {
|
||||||
|
stdout: (data) => (versionOutput += data.toString()),
|
||||||
|
stderr: (data) => (versionOutput += data.toString())
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
catch (err) {
|
||||||
|
core.debug(err.message);
|
||||||
|
}
|
||||||
|
versionOutput = versionOutput.trim();
|
||||||
|
core.debug(versionOutput);
|
||||||
|
return versionOutput;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
function getCompressionMethod() {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
const versionOutput = yield checkVersion("zstd");
|
||||||
|
return versionOutput.toLowerCase().includes("zstd command line interface")
|
||||||
|
? constants_1.CompressionMethod.Zstd
|
||||||
|
: constants_1.CompressionMethod.Gzip;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
exports.getCompressionMethod = getCompressionMethod;
|
||||||
|
function getCacheFileName(compressionMethod) {
|
||||||
|
return compressionMethod == constants_1.CompressionMethod.Zstd
|
||||||
|
? constants_1.CacheFilename.Zstd
|
||||||
|
: constants_1.CacheFilename.Gzip;
|
||||||
|
}
|
||||||
|
exports.getCacheFileName = getCacheFileName;
|
||||||
|
function useGnuTar() {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
const versionOutput = yield checkVersion("tar");
|
||||||
|
return versionOutput.toLowerCase().includes("gnu tar");
|
||||||
|
});
|
||||||
|
}
|
||||||
|
exports.useGnuTar = useGnuTar;
|
||||||
|
|
||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
@ -3599,12 +3643,6 @@ class HttpClientResponse {
|
||||||
this.message.on('data', (chunk) => {
|
this.message.on('data', (chunk) => {
|
||||||
output = Buffer.concat([output, chunk]);
|
output = Buffer.concat([output, chunk]);
|
||||||
});
|
});
|
||||||
this.message.on('aborted', () => {
|
|
||||||
reject("Request was aborted or closed prematurely");
|
|
||||||
});
|
|
||||||
this.message.on('timeout', (socket) => {
|
|
||||||
reject("Request timed out");
|
|
||||||
});
|
|
||||||
this.message.on('end', () => {
|
this.message.on('end', () => {
|
||||||
resolve(output.toString());
|
resolve(output.toString());
|
||||||
});
|
});
|
||||||
|
@ -3726,7 +3764,6 @@ class HttpClient {
|
||||||
let response;
|
let response;
|
||||||
while (numTries < maxTries) {
|
while (numTries < maxTries) {
|
||||||
response = await this.requestRaw(info, data);
|
response = await this.requestRaw(info, data);
|
||||||
|
|
||||||
// Check if it's an authentication challenge
|
// Check if it's an authentication challenge
|
||||||
if (response && response.message && response.message.statusCode === HttpCodes.Unauthorized) {
|
if (response && response.message && response.message.statusCode === HttpCodes.Unauthorized) {
|
||||||
let authenticationHandler;
|
let authenticationHandler;
|
||||||
|
@ -4490,7 +4527,16 @@ var Events;
|
||||||
Events["Push"] = "push";
|
Events["Push"] = "push";
|
||||||
Events["PullRequest"] = "pull_request";
|
Events["PullRequest"] = "pull_request";
|
||||||
})(Events = exports.Events || (exports.Events = {}));
|
})(Events = exports.Events || (exports.Events = {}));
|
||||||
exports.CacheFilename = "cache.tgz";
|
var CacheFilename;
|
||||||
|
(function (CacheFilename) {
|
||||||
|
CacheFilename["Gzip"] = "cache.tgz";
|
||||||
|
CacheFilename["Zstd"] = "cache.tzst";
|
||||||
|
})(CacheFilename = exports.CacheFilename || (exports.CacheFilename = {}));
|
||||||
|
var CompressionMethod;
|
||||||
|
(function (CompressionMethod) {
|
||||||
|
CompressionMethod["Gzip"] = "gzip";
|
||||||
|
CompressionMethod["Zstd"] = "zstd";
|
||||||
|
})(CompressionMethod = exports.CompressionMethod || (exports.CompressionMethod = {}));
|
||||||
// Socket timeout in milliseconds during download. If no traffic is received
|
// Socket timeout in milliseconds during download. If no traffic is received
|
||||||
// over the socket during this period, the socket is destroyed and the download
|
// over the socket during this period, the socket is destroyed and the download
|
||||||
// is aborted.
|
// is aborted.
|
||||||
|
@ -4617,13 +4663,16 @@ function run() {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
const compressionMethod = yield utils.getCompressionMethod();
|
||||||
try {
|
try {
|
||||||
const cacheEntry = yield cacheHttpClient.getCacheEntry(keys);
|
const cacheEntry = yield cacheHttpClient.getCacheEntry(keys, {
|
||||||
|
compressionMethod: compressionMethod
|
||||||
|
});
|
||||||
if (!((_a = cacheEntry) === null || _a === void 0 ? void 0 : _a.archiveLocation)) {
|
if (!((_a = cacheEntry) === null || _a === void 0 ? void 0 : _a.archiveLocation)) {
|
||||||
core.info(`Cache not found for input keys: ${keys.join(", ")}`);
|
core.info(`Cache not found for input keys: ${keys.join(", ")}`);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
const archivePath = path.join(yield utils.createTempDirectory(), "cache.tgz");
|
const archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod));
|
||||||
core.debug(`Archive Path: ${archivePath}`);
|
core.debug(`Archive Path: ${archivePath}`);
|
||||||
// Store the cache result
|
// Store the cache result
|
||||||
utils.setCacheState(cacheEntry);
|
utils.setCacheState(cacheEntry);
|
||||||
|
@ -4632,7 +4681,7 @@ function run() {
|
||||||
yield cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath);
|
yield cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath);
|
||||||
const archiveFileSize = utils.getArchiveFileSize(archivePath);
|
const archiveFileSize = utils.getArchiveFileSize(archivePath);
|
||||||
core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`);
|
core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`);
|
||||||
yield tar_1.extractTar(archivePath);
|
yield tar_1.extractTar(archivePath, compressionMethod);
|
||||||
}
|
}
|
||||||
finally {
|
finally {
|
||||||
// Try to delete the archive to save space
|
// Try to delete the archive to save space
|
||||||
|
@ -4993,29 +5042,12 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
const core = __importStar(__webpack_require__(470));
|
|
||||||
const exec_1 = __webpack_require__(986);
|
const exec_1 = __webpack_require__(986);
|
||||||
const io = __importStar(__webpack_require__(1));
|
const io = __importStar(__webpack_require__(1));
|
||||||
const fs_1 = __webpack_require__(747);
|
const fs_1 = __webpack_require__(747);
|
||||||
const path = __importStar(__webpack_require__(622));
|
const path = __importStar(__webpack_require__(622));
|
||||||
const constants_1 = __webpack_require__(694);
|
const constants_1 = __webpack_require__(694);
|
||||||
function isGnuTar() {
|
const utils = __importStar(__webpack_require__(443));
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
|
||||||
core.debug("Checking tar --version");
|
|
||||||
let versionOutput = "";
|
|
||||||
yield exec_1.exec("tar --version", [], {
|
|
||||||
ignoreReturnCode: true,
|
|
||||||
silent: true,
|
|
||||||
listeners: {
|
|
||||||
stdout: (data) => (versionOutput += data.toString()),
|
|
||||||
stderr: (data) => (versionOutput += data.toString())
|
|
||||||
}
|
|
||||||
});
|
|
||||||
core.debug(versionOutput.trim());
|
|
||||||
return versionOutput.toUpperCase().includes("GNU TAR");
|
|
||||||
});
|
|
||||||
}
|
|
||||||
exports.isGnuTar = isGnuTar;
|
|
||||||
function getTarPath(args) {
|
function getTarPath(args) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
// Explicitly use BSD Tar on Windows
|
// Explicitly use BSD Tar on Windows
|
||||||
|
@ -5025,7 +5057,7 @@ function getTarPath(args) {
|
||||||
if (fs_1.existsSync(systemTar)) {
|
if (fs_1.existsSync(systemTar)) {
|
||||||
return systemTar;
|
return systemTar;
|
||||||
}
|
}
|
||||||
else if (isGnuTar()) {
|
else if (yield utils.useGnuTar()) {
|
||||||
args.push("--force-local");
|
args.push("--force-local");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -5036,7 +5068,7 @@ function execTar(args, cwd) {
|
||||||
var _a;
|
var _a;
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
try {
|
try {
|
||||||
yield exec_1.exec(`"${yield getTarPath(args)}"`, args, { cwd: cwd });
|
yield exec_1.exec(`${yield getTarPath(args)}`, args, { cwd: cwd });
|
||||||
}
|
}
|
||||||
catch (error) {
|
catch (error) {
|
||||||
throw new Error(`Tar failed with error: ${(_a = error) === null || _a === void 0 ? void 0 : _a.message}`);
|
throw new Error(`Tar failed with error: ${(_a = error) === null || _a === void 0 ? void 0 : _a.message}`);
|
||||||
|
@ -5047,14 +5079,16 @@ function getWorkingDirectory() {
|
||||||
var _a;
|
var _a;
|
||||||
return _a = process.env["GITHUB_WORKSPACE"], (_a !== null && _a !== void 0 ? _a : process.cwd());
|
return _a = process.env["GITHUB_WORKSPACE"], (_a !== null && _a !== void 0 ? _a : process.cwd());
|
||||||
}
|
}
|
||||||
function extractTar(archivePath) {
|
function extractTar(archivePath, compressionMethod) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
// Create directory to extract tar into
|
// Create directory to extract tar into
|
||||||
const workingDirectory = getWorkingDirectory();
|
const workingDirectory = getWorkingDirectory();
|
||||||
yield io.mkdirP(workingDirectory);
|
yield io.mkdirP(workingDirectory);
|
||||||
const args = [
|
const args = [
|
||||||
"-xz",
|
...(compressionMethod == constants_1.CompressionMethod.Zstd
|
||||||
"-f",
|
? ["--use-compress-program", "zstd -d"]
|
||||||
|
: ["-z"]),
|
||||||
|
"-xf",
|
||||||
archivePath.replace(new RegExp("\\" + path.sep, "g"), "/"),
|
archivePath.replace(new RegExp("\\" + path.sep, "g"), "/"),
|
||||||
"-P",
|
"-P",
|
||||||
"-C",
|
"-C",
|
||||||
|
@ -5064,16 +5098,20 @@ function extractTar(archivePath) {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.extractTar = extractTar;
|
exports.extractTar = extractTar;
|
||||||
function createTar(archiveFolder, sourceDirectories) {
|
function createTar(archiveFolder, sourceDirectories, compressionMethod) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
// Write source directories to manifest.txt to avoid command length limits
|
// Write source directories to manifest.txt to avoid command length limits
|
||||||
const manifestFilename = "manifest.txt";
|
const manifestFilename = "manifest.txt";
|
||||||
|
const cacheFileName = utils.getCacheFileName(compressionMethod);
|
||||||
fs_1.writeFileSync(path.join(archiveFolder, manifestFilename), sourceDirectories.join("\n"));
|
fs_1.writeFileSync(path.join(archiveFolder, manifestFilename), sourceDirectories.join("\n"));
|
||||||
|
// -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores.
|
||||||
const workingDirectory = getWorkingDirectory();
|
const workingDirectory = getWorkingDirectory();
|
||||||
const args = [
|
const args = [
|
||||||
"-cz",
|
...(compressionMethod == constants_1.CompressionMethod.Zstd
|
||||||
"-f",
|
? ["--use-compress-program", "zstd -T0"]
|
||||||
constants_1.CacheFilename.replace(new RegExp("\\" + path.sep, "g"), "/"),
|
: ["-z"]),
|
||||||
|
"-cf",
|
||||||
|
cacheFileName.replace(new RegExp("\\" + path.sep, "g"), "/"),
|
||||||
"-P",
|
"-P",
|
||||||
"-C",
|
"-C",
|
||||||
workingDirectory.replace(new RegExp("\\" + path.sep, "g"), "/"),
|
workingDirectory.replace(new RegExp("\\" + path.sep, "g"), "/"),
|
||||||
|
|
140
dist/save/index.js
vendored
140
dist/save/index.js
vendored
|
@ -2236,23 +2236,22 @@ function createHttpClient() {
|
||||||
const bearerCredentialHandler = new auth_1.BearerCredentialHandler(token);
|
const bearerCredentialHandler = new auth_1.BearerCredentialHandler(token);
|
||||||
return new http_client_1.HttpClient("actions/cache", [bearerCredentialHandler], getRequestOptions());
|
return new http_client_1.HttpClient("actions/cache", [bearerCredentialHandler], getRequestOptions());
|
||||||
}
|
}
|
||||||
function getCacheVersion() {
|
function getCacheVersion(compressionMethod) {
|
||||||
// Add salt to cache version to support breaking changes in cache entry
|
// Add salt to cache version to support breaking changes in cache entry
|
||||||
const components = [
|
const components = [core.getInput(constants_1.Inputs.Path, { required: true })].concat(compressionMethod == constants_1.CompressionMethod.Zstd
|
||||||
core.getInput(constants_1.Inputs.Path, { required: true }),
|
? [compressionMethod, versionSalt]
|
||||||
versionSalt
|
: versionSalt);
|
||||||
];
|
|
||||||
return crypto
|
return crypto
|
||||||
.createHash("sha256")
|
.createHash("sha256")
|
||||||
.update(components.join("|"))
|
.update(components.join("|"))
|
||||||
.digest("hex");
|
.digest("hex");
|
||||||
}
|
}
|
||||||
exports.getCacheVersion = getCacheVersion;
|
exports.getCacheVersion = getCacheVersion;
|
||||||
function getCacheEntry(keys) {
|
function getCacheEntry(keys, options) {
|
||||||
var _a;
|
var _a, _b;
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
const httpClient = createHttpClient();
|
const httpClient = createHttpClient();
|
||||||
const version = getCacheVersion();
|
const version = getCacheVersion((_a = options) === null || _a === void 0 ? void 0 : _a.compressionMethod);
|
||||||
const resource = `cache?keys=${encodeURIComponent(keys.join(","))}&version=${version}`;
|
const resource = `cache?keys=${encodeURIComponent(keys.join(","))}&version=${version}`;
|
||||||
const response = yield httpClient.getJson(getCacheApiUrl(resource));
|
const response = yield httpClient.getJson(getCacheApiUrl(resource));
|
||||||
if (response.statusCode === 204) {
|
if (response.statusCode === 204) {
|
||||||
|
@ -2262,7 +2261,7 @@ function getCacheEntry(keys) {
|
||||||
throw new Error(`Cache service responded with ${response.statusCode}`);
|
throw new Error(`Cache service responded with ${response.statusCode}`);
|
||||||
}
|
}
|
||||||
const cacheResult = response.result;
|
const cacheResult = response.result;
|
||||||
const cacheDownloadUrl = (_a = cacheResult) === null || _a === void 0 ? void 0 : _a.archiveLocation;
|
const cacheDownloadUrl = (_b = cacheResult) === null || _b === void 0 ? void 0 : _b.archiveLocation;
|
||||||
if (!cacheDownloadUrl) {
|
if (!cacheDownloadUrl) {
|
||||||
throw new Error("Cache not found.");
|
throw new Error("Cache not found.");
|
||||||
}
|
}
|
||||||
|
@ -2306,17 +2305,17 @@ function downloadCache(archiveLocation, archivePath) {
|
||||||
}
|
}
|
||||||
exports.downloadCache = downloadCache;
|
exports.downloadCache = downloadCache;
|
||||||
// Reserve Cache
|
// Reserve Cache
|
||||||
function reserveCache(key) {
|
function reserveCache(key, options) {
|
||||||
var _a, _b, _c;
|
var _a, _b, _c, _d;
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
const httpClient = createHttpClient();
|
const httpClient = createHttpClient();
|
||||||
const version = getCacheVersion();
|
const version = getCacheVersion((_a = options) === null || _a === void 0 ? void 0 : _a.compressionMethod);
|
||||||
const reserveCacheRequest = {
|
const reserveCacheRequest = {
|
||||||
key,
|
key,
|
||||||
version
|
version
|
||||||
};
|
};
|
||||||
const response = yield httpClient.postJson(getCacheApiUrl("caches"), reserveCacheRequest);
|
const response = yield httpClient.postJson(getCacheApiUrl("caches"), reserveCacheRequest);
|
||||||
return _c = (_b = (_a = response) === null || _a === void 0 ? void 0 : _a.result) === null || _b === void 0 ? void 0 : _b.cacheId, (_c !== null && _c !== void 0 ? _c : -1);
|
return _d = (_c = (_b = response) === null || _b === void 0 ? void 0 : _b.result) === null || _c === void 0 ? void 0 : _c.cacheId, (_d !== null && _d !== void 0 ? _d : -1);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.reserveCache = reserveCache;
|
exports.reserveCache = reserveCache;
|
||||||
|
@ -3201,6 +3200,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
const core = __importStar(__webpack_require__(470));
|
const core = __importStar(__webpack_require__(470));
|
||||||
|
const exec = __importStar(__webpack_require__(986));
|
||||||
const glob = __importStar(__webpack_require__(281));
|
const glob = __importStar(__webpack_require__(281));
|
||||||
const io = __importStar(__webpack_require__(1));
|
const io = __importStar(__webpack_require__(1));
|
||||||
const fs = __importStar(__webpack_require__(747));
|
const fs = __importStar(__webpack_require__(747));
|
||||||
|
@ -3320,6 +3320,50 @@ function unlinkFile(path) {
|
||||||
return util.promisify(fs.unlink)(path);
|
return util.promisify(fs.unlink)(path);
|
||||||
}
|
}
|
||||||
exports.unlinkFile = unlinkFile;
|
exports.unlinkFile = unlinkFile;
|
||||||
|
function checkVersion(app) {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
core.debug(`Checking ${app} --version`);
|
||||||
|
let versionOutput = "";
|
||||||
|
try {
|
||||||
|
yield exec.exec(`${app} --version`, [], {
|
||||||
|
ignoreReturnCode: true,
|
||||||
|
silent: true,
|
||||||
|
listeners: {
|
||||||
|
stdout: (data) => (versionOutput += data.toString()),
|
||||||
|
stderr: (data) => (versionOutput += data.toString())
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
catch (err) {
|
||||||
|
core.debug(err.message);
|
||||||
|
}
|
||||||
|
versionOutput = versionOutput.trim();
|
||||||
|
core.debug(versionOutput);
|
||||||
|
return versionOutput;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
function getCompressionMethod() {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
const versionOutput = yield checkVersion("zstd");
|
||||||
|
return versionOutput.toLowerCase().includes("zstd command line interface")
|
||||||
|
? constants_1.CompressionMethod.Zstd
|
||||||
|
: constants_1.CompressionMethod.Gzip;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
exports.getCompressionMethod = getCompressionMethod;
|
||||||
|
function getCacheFileName(compressionMethod) {
|
||||||
|
return compressionMethod == constants_1.CompressionMethod.Zstd
|
||||||
|
? constants_1.CacheFilename.Zstd
|
||||||
|
: constants_1.CacheFilename.Gzip;
|
||||||
|
}
|
||||||
|
exports.getCacheFileName = getCacheFileName;
|
||||||
|
function useGnuTar() {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
const versionOutput = yield checkVersion("tar");
|
||||||
|
return versionOutput.toLowerCase().includes("gnu tar");
|
||||||
|
});
|
||||||
|
}
|
||||||
|
exports.useGnuTar = useGnuTar;
|
||||||
|
|
||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
@ -3599,12 +3643,6 @@ class HttpClientResponse {
|
||||||
this.message.on('data', (chunk) => {
|
this.message.on('data', (chunk) => {
|
||||||
output = Buffer.concat([output, chunk]);
|
output = Buffer.concat([output, chunk]);
|
||||||
});
|
});
|
||||||
this.message.on('aborted', () => {
|
|
||||||
reject("Request was aborted or closed prematurely");
|
|
||||||
});
|
|
||||||
this.message.on('timeout', (socket) => {
|
|
||||||
reject("Request timed out");
|
|
||||||
});
|
|
||||||
this.message.on('end', () => {
|
this.message.on('end', () => {
|
||||||
resolve(output.toString());
|
resolve(output.toString());
|
||||||
});
|
});
|
||||||
|
@ -3726,7 +3764,6 @@ class HttpClient {
|
||||||
let response;
|
let response;
|
||||||
while (numTries < maxTries) {
|
while (numTries < maxTries) {
|
||||||
response = await this.requestRaw(info, data);
|
response = await this.requestRaw(info, data);
|
||||||
|
|
||||||
// Check if it's an authentication challenge
|
// Check if it's an authentication challenge
|
||||||
if (response && response.message && response.message.statusCode === HttpCodes.Unauthorized) {
|
if (response && response.message && response.message.statusCode === HttpCodes.Unauthorized) {
|
||||||
let authenticationHandler;
|
let authenticationHandler;
|
||||||
|
@ -4511,8 +4548,11 @@ function run() {
|
||||||
core.info(`Cache hit occurred on the primary key ${primaryKey}, not saving cache.`);
|
core.info(`Cache hit occurred on the primary key ${primaryKey}, not saving cache.`);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
const compressionMethod = yield utils.getCompressionMethod();
|
||||||
core.debug("Reserving Cache");
|
core.debug("Reserving Cache");
|
||||||
const cacheId = yield cacheHttpClient.reserveCache(primaryKey);
|
const cacheId = yield cacheHttpClient.reserveCache(primaryKey, {
|
||||||
|
compressionMethod: compressionMethod
|
||||||
|
});
|
||||||
if (cacheId == -1) {
|
if (cacheId == -1) {
|
||||||
core.info(`Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.`);
|
core.info(`Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.`);
|
||||||
return;
|
return;
|
||||||
|
@ -4525,9 +4565,9 @@ function run() {
|
||||||
core.debug("Cache Paths:");
|
core.debug("Cache Paths:");
|
||||||
core.debug(`${JSON.stringify(cachePaths)}`);
|
core.debug(`${JSON.stringify(cachePaths)}`);
|
||||||
const archiveFolder = yield utils.createTempDirectory();
|
const archiveFolder = yield utils.createTempDirectory();
|
||||||
const archivePath = path.join(archiveFolder, constants_1.CacheFilename);
|
const archivePath = path.join(archiveFolder, utils.getCacheFileName(compressionMethod));
|
||||||
core.debug(`Archive Path: ${archivePath}`);
|
core.debug(`Archive Path: ${archivePath}`);
|
||||||
yield tar_1.createTar(archiveFolder, cachePaths);
|
yield tar_1.createTar(archiveFolder, cachePaths, compressionMethod);
|
||||||
const fileSizeLimit = 5 * 1024 * 1024 * 1024; // 5GB per repo limit
|
const fileSizeLimit = 5 * 1024 * 1024 * 1024; // 5GB per repo limit
|
||||||
const archiveFileSize = utils.getArchiveFileSize(archivePath);
|
const archiveFileSize = utils.getArchiveFileSize(archivePath);
|
||||||
core.debug(`File Size: ${archiveFileSize}`);
|
core.debug(`File Size: ${archiveFileSize}`);
|
||||||
|
@ -4576,7 +4616,16 @@ var Events;
|
||||||
Events["Push"] = "push";
|
Events["Push"] = "push";
|
||||||
Events["PullRequest"] = "pull_request";
|
Events["PullRequest"] = "pull_request";
|
||||||
})(Events = exports.Events || (exports.Events = {}));
|
})(Events = exports.Events || (exports.Events = {}));
|
||||||
exports.CacheFilename = "cache.tgz";
|
var CacheFilename;
|
||||||
|
(function (CacheFilename) {
|
||||||
|
CacheFilename["Gzip"] = "cache.tgz";
|
||||||
|
CacheFilename["Zstd"] = "cache.tzst";
|
||||||
|
})(CacheFilename = exports.CacheFilename || (exports.CacheFilename = {}));
|
||||||
|
var CompressionMethod;
|
||||||
|
(function (CompressionMethod) {
|
||||||
|
CompressionMethod["Gzip"] = "gzip";
|
||||||
|
CompressionMethod["Zstd"] = "zstd";
|
||||||
|
})(CompressionMethod = exports.CompressionMethod || (exports.CompressionMethod = {}));
|
||||||
// Socket timeout in milliseconds during download. If no traffic is received
|
// Socket timeout in milliseconds during download. If no traffic is received
|
||||||
// over the socket during this period, the socket is destroyed and the download
|
// over the socket during this period, the socket is destroyed and the download
|
||||||
// is aborted.
|
// is aborted.
|
||||||
|
@ -4970,29 +5019,12 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
const core = __importStar(__webpack_require__(470));
|
|
||||||
const exec_1 = __webpack_require__(986);
|
const exec_1 = __webpack_require__(986);
|
||||||
const io = __importStar(__webpack_require__(1));
|
const io = __importStar(__webpack_require__(1));
|
||||||
const fs_1 = __webpack_require__(747);
|
const fs_1 = __webpack_require__(747);
|
||||||
const path = __importStar(__webpack_require__(622));
|
const path = __importStar(__webpack_require__(622));
|
||||||
const constants_1 = __webpack_require__(694);
|
const constants_1 = __webpack_require__(694);
|
||||||
function isGnuTar() {
|
const utils = __importStar(__webpack_require__(443));
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
|
||||||
core.debug("Checking tar --version");
|
|
||||||
let versionOutput = "";
|
|
||||||
yield exec_1.exec("tar --version", [], {
|
|
||||||
ignoreReturnCode: true,
|
|
||||||
silent: true,
|
|
||||||
listeners: {
|
|
||||||
stdout: (data) => (versionOutput += data.toString()),
|
|
||||||
stderr: (data) => (versionOutput += data.toString())
|
|
||||||
}
|
|
||||||
});
|
|
||||||
core.debug(versionOutput.trim());
|
|
||||||
return versionOutput.toUpperCase().includes("GNU TAR");
|
|
||||||
});
|
|
||||||
}
|
|
||||||
exports.isGnuTar = isGnuTar;
|
|
||||||
function getTarPath(args) {
|
function getTarPath(args) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
// Explicitly use BSD Tar on Windows
|
// Explicitly use BSD Tar on Windows
|
||||||
|
@ -5002,7 +5034,7 @@ function getTarPath(args) {
|
||||||
if (fs_1.existsSync(systemTar)) {
|
if (fs_1.existsSync(systemTar)) {
|
||||||
return systemTar;
|
return systemTar;
|
||||||
}
|
}
|
||||||
else if (isGnuTar()) {
|
else if (yield utils.useGnuTar()) {
|
||||||
args.push("--force-local");
|
args.push("--force-local");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -5013,7 +5045,7 @@ function execTar(args, cwd) {
|
||||||
var _a;
|
var _a;
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
try {
|
try {
|
||||||
yield exec_1.exec(`"${yield getTarPath(args)}"`, args, { cwd: cwd });
|
yield exec_1.exec(`${yield getTarPath(args)}`, args, { cwd: cwd });
|
||||||
}
|
}
|
||||||
catch (error) {
|
catch (error) {
|
||||||
throw new Error(`Tar failed with error: ${(_a = error) === null || _a === void 0 ? void 0 : _a.message}`);
|
throw new Error(`Tar failed with error: ${(_a = error) === null || _a === void 0 ? void 0 : _a.message}`);
|
||||||
|
@ -5024,14 +5056,16 @@ function getWorkingDirectory() {
|
||||||
var _a;
|
var _a;
|
||||||
return _a = process.env["GITHUB_WORKSPACE"], (_a !== null && _a !== void 0 ? _a : process.cwd());
|
return _a = process.env["GITHUB_WORKSPACE"], (_a !== null && _a !== void 0 ? _a : process.cwd());
|
||||||
}
|
}
|
||||||
function extractTar(archivePath) {
|
function extractTar(archivePath, compressionMethod) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
// Create directory to extract tar into
|
// Create directory to extract tar into
|
||||||
const workingDirectory = getWorkingDirectory();
|
const workingDirectory = getWorkingDirectory();
|
||||||
yield io.mkdirP(workingDirectory);
|
yield io.mkdirP(workingDirectory);
|
||||||
const args = [
|
const args = [
|
||||||
"-xz",
|
...(compressionMethod == constants_1.CompressionMethod.Zstd
|
||||||
"-f",
|
? ["--use-compress-program", "zstd -d"]
|
||||||
|
: ["-z"]),
|
||||||
|
"-xf",
|
||||||
archivePath.replace(new RegExp("\\" + path.sep, "g"), "/"),
|
archivePath.replace(new RegExp("\\" + path.sep, "g"), "/"),
|
||||||
"-P",
|
"-P",
|
||||||
"-C",
|
"-C",
|
||||||
|
@ -5041,16 +5075,20 @@ function extractTar(archivePath) {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.extractTar = extractTar;
|
exports.extractTar = extractTar;
|
||||||
function createTar(archiveFolder, sourceDirectories) {
|
function createTar(archiveFolder, sourceDirectories, compressionMethod) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
// Write source directories to manifest.txt to avoid command length limits
|
// Write source directories to manifest.txt to avoid command length limits
|
||||||
const manifestFilename = "manifest.txt";
|
const manifestFilename = "manifest.txt";
|
||||||
|
const cacheFileName = utils.getCacheFileName(compressionMethod);
|
||||||
fs_1.writeFileSync(path.join(archiveFolder, manifestFilename), sourceDirectories.join("\n"));
|
fs_1.writeFileSync(path.join(archiveFolder, manifestFilename), sourceDirectories.join("\n"));
|
||||||
|
// -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores.
|
||||||
const workingDirectory = getWorkingDirectory();
|
const workingDirectory = getWorkingDirectory();
|
||||||
const args = [
|
const args = [
|
||||||
"-cz",
|
...(compressionMethod == constants_1.CompressionMethod.Zstd
|
||||||
"-f",
|
? ["--use-compress-program", "zstd -T0"]
|
||||||
constants_1.CacheFilename.replace(new RegExp("\\" + path.sep, "g"), "/"),
|
: ["-z"]),
|
||||||
|
"-cf",
|
||||||
|
cacheFileName.replace(new RegExp("\\" + path.sep, "g"), "/"),
|
||||||
"-P",
|
"-P",
|
||||||
"-C",
|
"-C",
|
||||||
workingDirectory.replace(new RegExp("\\" + path.sep, "g"), "/"),
|
workingDirectory.replace(new RegExp("\\" + path.sep, "g"), "/"),
|
||||||
|
|
|
@ -11,9 +11,10 @@ import * as fs from "fs";
|
||||||
import * as stream from "stream";
|
import * as stream from "stream";
|
||||||
import * as util from "util";
|
import * as util from "util";
|
||||||
|
|
||||||
import { Inputs, SocketTimeout } from "./constants";
|
import { CompressionMethod, Inputs, SocketTimeout } from "./constants";
|
||||||
import {
|
import {
|
||||||
ArtifactCacheEntry,
|
ArtifactCacheEntry,
|
||||||
|
CacheOptions,
|
||||||
CommitCacheRequest,
|
CommitCacheRequest,
|
||||||
ReserveCacheRequest,
|
ReserveCacheRequest,
|
||||||
ReserveCacheResponse
|
ReserveCacheResponse
|
||||||
|
@ -84,12 +85,13 @@ function createHttpClient(): HttpClient {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
export function getCacheVersion(): string {
|
export function getCacheVersion(compressionMethod?: CompressionMethod): string {
|
||||||
// Add salt to cache version to support breaking changes in cache entry
|
// Add salt to cache version to support breaking changes in cache entry
|
||||||
const components = [
|
const components = [core.getInput(Inputs.Path, { required: true })].concat(
|
||||||
core.getInput(Inputs.Path, { required: true }),
|
compressionMethod == CompressionMethod.Zstd
|
||||||
versionSalt
|
? [compressionMethod, versionSalt]
|
||||||
];
|
: versionSalt
|
||||||
|
);
|
||||||
|
|
||||||
return crypto
|
return crypto
|
||||||
.createHash("sha256")
|
.createHash("sha256")
|
||||||
|
@ -98,10 +100,11 @@ export function getCacheVersion(): string {
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function getCacheEntry(
|
export async function getCacheEntry(
|
||||||
keys: string[]
|
keys: string[],
|
||||||
|
options?: CacheOptions
|
||||||
): Promise<ArtifactCacheEntry | null> {
|
): Promise<ArtifactCacheEntry | null> {
|
||||||
const httpClient = createHttpClient();
|
const httpClient = createHttpClient();
|
||||||
const version = getCacheVersion();
|
const version = getCacheVersion(options?.compressionMethod);
|
||||||
const resource = `cache?keys=${encodeURIComponent(
|
const resource = `cache?keys=${encodeURIComponent(
|
||||||
keys.join(",")
|
keys.join(",")
|
||||||
)}&version=${version}`;
|
)}&version=${version}`;
|
||||||
|
@ -173,9 +176,12 @@ export async function downloadCache(
|
||||||
}
|
}
|
||||||
|
|
||||||
// Reserve Cache
|
// Reserve Cache
|
||||||
export async function reserveCache(key: string): Promise<number> {
|
export async function reserveCache(
|
||||||
|
key: string,
|
||||||
|
options?: CacheOptions
|
||||||
|
): Promise<number> {
|
||||||
const httpClient = createHttpClient();
|
const httpClient = createHttpClient();
|
||||||
const version = getCacheVersion();
|
const version = getCacheVersion(options?.compressionMethod);
|
||||||
|
|
||||||
const reserveCacheRequest: ReserveCacheRequest = {
|
const reserveCacheRequest: ReserveCacheRequest = {
|
||||||
key,
|
key,
|
||||||
|
|
|
@ -19,7 +19,15 @@ export enum Events {
|
||||||
PullRequest = "pull_request"
|
PullRequest = "pull_request"
|
||||||
}
|
}
|
||||||
|
|
||||||
export const CacheFilename = "cache.tgz";
|
export enum CacheFilename {
|
||||||
|
Gzip = "cache.tgz",
|
||||||
|
Zstd = "cache.tzst"
|
||||||
|
}
|
||||||
|
|
||||||
|
export enum CompressionMethod {
|
||||||
|
Gzip = "gzip",
|
||||||
|
Zstd = "zstd"
|
||||||
|
}
|
||||||
|
|
||||||
// Socket timeout in milliseconds during download. If no traffic is received
|
// Socket timeout in milliseconds during download. If no traffic is received
|
||||||
// over the socket during this period, the socket is destroyed and the download
|
// over the socket during this period, the socket is destroyed and the download
|
||||||
|
|
6
src/contracts.d.ts
vendored
6
src/contracts.d.ts
vendored
|
@ -1,3 +1,5 @@
|
||||||
|
import { CompressionMethod } from "./constants";
|
||||||
|
|
||||||
export interface ArtifactCacheEntry {
|
export interface ArtifactCacheEntry {
|
||||||
cacheKey?: string;
|
cacheKey?: string;
|
||||||
scope?: string;
|
scope?: string;
|
||||||
|
@ -17,3 +19,7 @@ export interface ReserveCacheRequest {
|
||||||
export interface ReserveCacheResponse {
|
export interface ReserveCacheResponse {
|
||||||
cacheId: number;
|
cacheId: number;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export interface CacheOptions {
|
||||||
|
compressionMethod?: CompressionMethod;
|
||||||
|
}
|
||||||
|
|
|
@ -54,8 +54,12 @@ async function run(): Promise<void> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const compressionMethod = await utils.getCompressionMethod();
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const cacheEntry = await cacheHttpClient.getCacheEntry(keys);
|
const cacheEntry = await cacheHttpClient.getCacheEntry(keys, {
|
||||||
|
compressionMethod: compressionMethod
|
||||||
|
});
|
||||||
if (!cacheEntry?.archiveLocation) {
|
if (!cacheEntry?.archiveLocation) {
|
||||||
core.info(`Cache not found for input keys: ${keys.join(", ")}`);
|
core.info(`Cache not found for input keys: ${keys.join(", ")}`);
|
||||||
return;
|
return;
|
||||||
|
@ -63,7 +67,7 @@ async function run(): Promise<void> {
|
||||||
|
|
||||||
const archivePath = path.join(
|
const archivePath = path.join(
|
||||||
await utils.createTempDirectory(),
|
await utils.createTempDirectory(),
|
||||||
"cache.tgz"
|
utils.getCacheFileName(compressionMethod)
|
||||||
);
|
);
|
||||||
core.debug(`Archive Path: ${archivePath}`);
|
core.debug(`Archive Path: ${archivePath}`);
|
||||||
|
|
||||||
|
@ -84,7 +88,7 @@ async function run(): Promise<void> {
|
||||||
)} MB (${archiveFileSize} B)`
|
)} MB (${archiveFileSize} B)`
|
||||||
);
|
);
|
||||||
|
|
||||||
await extractTar(archivePath);
|
await extractTar(archivePath, compressionMethod);
|
||||||
} finally {
|
} finally {
|
||||||
// Try to delete the archive to save space
|
// Try to delete the archive to save space
|
||||||
try {
|
try {
|
||||||
|
|
16
src/save.ts
16
src/save.ts
|
@ -2,7 +2,7 @@ import * as core from "@actions/core";
|
||||||
import * as path from "path";
|
import * as path from "path";
|
||||||
|
|
||||||
import * as cacheHttpClient from "./cacheHttpClient";
|
import * as cacheHttpClient from "./cacheHttpClient";
|
||||||
import { CacheFilename, Events, Inputs, State } from "./constants";
|
import { Events, Inputs, State } from "./constants";
|
||||||
import { createTar } from "./tar";
|
import { createTar } from "./tar";
|
||||||
import * as utils from "./utils/actionUtils";
|
import * as utils from "./utils/actionUtils";
|
||||||
|
|
||||||
|
@ -35,8 +35,12 @@ async function run(): Promise<void> {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const compressionMethod = await utils.getCompressionMethod();
|
||||||
|
|
||||||
core.debug("Reserving Cache");
|
core.debug("Reserving Cache");
|
||||||
const cacheId = await cacheHttpClient.reserveCache(primaryKey);
|
const cacheId = await cacheHttpClient.reserveCache(primaryKey, {
|
||||||
|
compressionMethod: compressionMethod
|
||||||
|
});
|
||||||
if (cacheId == -1) {
|
if (cacheId == -1) {
|
||||||
core.info(
|
core.info(
|
||||||
`Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.`
|
`Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.`
|
||||||
|
@ -55,10 +59,14 @@ async function run(): Promise<void> {
|
||||||
core.debug(`${JSON.stringify(cachePaths)}`);
|
core.debug(`${JSON.stringify(cachePaths)}`);
|
||||||
|
|
||||||
const archiveFolder = await utils.createTempDirectory();
|
const archiveFolder = await utils.createTempDirectory();
|
||||||
const archivePath = path.join(archiveFolder, CacheFilename);
|
const archivePath = path.join(
|
||||||
|
archiveFolder,
|
||||||
|
utils.getCacheFileName(compressionMethod)
|
||||||
|
);
|
||||||
|
|
||||||
core.debug(`Archive Path: ${archivePath}`);
|
core.debug(`Archive Path: ${archivePath}`);
|
||||||
|
|
||||||
await createTar(archiveFolder, cachePaths);
|
await createTar(archiveFolder, cachePaths, compressionMethod);
|
||||||
|
|
||||||
const fileSizeLimit = 5 * 1024 * 1024 * 1024; // 5GB per repo limit
|
const fileSizeLimit = 5 * 1024 * 1024 * 1024; // 5GB per repo limit
|
||||||
const archiveFileSize = utils.getArchiveFileSize(archivePath);
|
const archiveFileSize = utils.getArchiveFileSize(archivePath);
|
||||||
|
|
50
src/tar.ts
50
src/tar.ts
|
@ -1,27 +1,10 @@
|
||||||
import * as core from "@actions/core";
|
|
||||||
import { exec } from "@actions/exec";
|
import { exec } from "@actions/exec";
|
||||||
import * as io from "@actions/io";
|
import * as io from "@actions/io";
|
||||||
import { existsSync, writeFileSync } from "fs";
|
import { existsSync, writeFileSync } from "fs";
|
||||||
import * as path from "path";
|
import * as path from "path";
|
||||||
|
|
||||||
import { CacheFilename } from "./constants";
|
import { CompressionMethod } from "./constants";
|
||||||
|
import * as utils from "./utils/actionUtils";
|
||||||
export async function isGnuTar(): Promise<boolean> {
|
|
||||||
core.debug("Checking tar --version");
|
|
||||||
let versionOutput = "";
|
|
||||||
await exec("tar --version", [], {
|
|
||||||
ignoreReturnCode: true,
|
|
||||||
silent: true,
|
|
||||||
listeners: {
|
|
||||||
stdout: (data: Buffer): string =>
|
|
||||||
(versionOutput += data.toString()),
|
|
||||||
stderr: (data: Buffer): string => (versionOutput += data.toString())
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
core.debug(versionOutput.trim());
|
|
||||||
return versionOutput.toUpperCase().includes("GNU TAR");
|
|
||||||
}
|
|
||||||
|
|
||||||
async function getTarPath(args: string[]): Promise<string> {
|
async function getTarPath(args: string[]): Promise<string> {
|
||||||
// Explicitly use BSD Tar on Windows
|
// Explicitly use BSD Tar on Windows
|
||||||
|
@ -30,7 +13,7 @@ async function getTarPath(args: string[]): Promise<string> {
|
||||||
const systemTar = `${process.env["windir"]}\\System32\\tar.exe`;
|
const systemTar = `${process.env["windir"]}\\System32\\tar.exe`;
|
||||||
if (existsSync(systemTar)) {
|
if (existsSync(systemTar)) {
|
||||||
return systemTar;
|
return systemTar;
|
||||||
} else if (isGnuTar()) {
|
} else if (await utils.useGnuTar()) {
|
||||||
args.push("--force-local");
|
args.push("--force-local");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -39,7 +22,7 @@ async function getTarPath(args: string[]): Promise<string> {
|
||||||
|
|
||||||
async function execTar(args: string[], cwd?: string): Promise<void> {
|
async function execTar(args: string[], cwd?: string): Promise<void> {
|
||||||
try {
|
try {
|
||||||
await exec(`"${await getTarPath(args)}"`, args, { cwd: cwd });
|
await exec(`${await getTarPath(args)}`, args, { cwd: cwd });
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
throw new Error(`Tar failed with error: ${error?.message}`);
|
throw new Error(`Tar failed with error: ${error?.message}`);
|
||||||
}
|
}
|
||||||
|
@ -49,13 +32,18 @@ function getWorkingDirectory(): string {
|
||||||
return process.env["GITHUB_WORKSPACE"] ?? process.cwd();
|
return process.env["GITHUB_WORKSPACE"] ?? process.cwd();
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function extractTar(archivePath: string): Promise<void> {
|
export async function extractTar(
|
||||||
|
archivePath: string,
|
||||||
|
compressionMethod: CompressionMethod
|
||||||
|
): Promise<void> {
|
||||||
// Create directory to extract tar into
|
// Create directory to extract tar into
|
||||||
const workingDirectory = getWorkingDirectory();
|
const workingDirectory = getWorkingDirectory();
|
||||||
await io.mkdirP(workingDirectory);
|
await io.mkdirP(workingDirectory);
|
||||||
const args = [
|
const args = [
|
||||||
"-xz",
|
...(compressionMethod == CompressionMethod.Zstd
|
||||||
"-f",
|
? ["--use-compress-program", "zstd -d"]
|
||||||
|
: ["-z"]),
|
||||||
|
"-xf",
|
||||||
archivePath.replace(new RegExp("\\" + path.sep, "g"), "/"),
|
archivePath.replace(new RegExp("\\" + path.sep, "g"), "/"),
|
||||||
"-P",
|
"-P",
|
||||||
"-C",
|
"-C",
|
||||||
|
@ -66,20 +54,24 @@ export async function extractTar(archivePath: string): Promise<void> {
|
||||||
|
|
||||||
export async function createTar(
|
export async function createTar(
|
||||||
archiveFolder: string,
|
archiveFolder: string,
|
||||||
sourceDirectories: string[]
|
sourceDirectories: string[],
|
||||||
|
compressionMethod: CompressionMethod
|
||||||
): Promise<void> {
|
): Promise<void> {
|
||||||
// Write source directories to manifest.txt to avoid command length limits
|
// Write source directories to manifest.txt to avoid command length limits
|
||||||
const manifestFilename = "manifest.txt";
|
const manifestFilename = "manifest.txt";
|
||||||
|
const cacheFileName = utils.getCacheFileName(compressionMethod);
|
||||||
writeFileSync(
|
writeFileSync(
|
||||||
path.join(archiveFolder, manifestFilename),
|
path.join(archiveFolder, manifestFilename),
|
||||||
sourceDirectories.join("\n")
|
sourceDirectories.join("\n")
|
||||||
);
|
);
|
||||||
|
// -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores.
|
||||||
const workingDirectory = getWorkingDirectory();
|
const workingDirectory = getWorkingDirectory();
|
||||||
const args = [
|
const args = [
|
||||||
"-cz",
|
...(compressionMethod == CompressionMethod.Zstd
|
||||||
"-f",
|
? ["--use-compress-program", "zstd -T0"]
|
||||||
CacheFilename.replace(new RegExp("\\" + path.sep, "g"), "/"),
|
: ["-z"]),
|
||||||
|
"-cf",
|
||||||
|
cacheFileName.replace(new RegExp("\\" + path.sep, "g"), "/"),
|
||||||
"-P",
|
"-P",
|
||||||
"-C",
|
"-C",
|
||||||
workingDirectory.replace(new RegExp("\\" + path.sep, "g"), "/"),
|
workingDirectory.replace(new RegExp("\\" + path.sep, "g"), "/"),
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
import * as core from "@actions/core";
|
import * as core from "@actions/core";
|
||||||
|
import * as exec from "@actions/exec";
|
||||||
import * as glob from "@actions/glob";
|
import * as glob from "@actions/glob";
|
||||||
import * as io from "@actions/io";
|
import * as io from "@actions/io";
|
||||||
import * as fs from "fs";
|
import * as fs from "fs";
|
||||||
|
@ -6,7 +7,13 @@ import * as path from "path";
|
||||||
import * as util from "util";
|
import * as util from "util";
|
||||||
import * as uuidV4 from "uuid/v4";
|
import * as uuidV4 from "uuid/v4";
|
||||||
|
|
||||||
import { Events, Outputs, State } from "../constants";
|
import {
|
||||||
|
CacheFilename,
|
||||||
|
CompressionMethod,
|
||||||
|
Events,
|
||||||
|
Outputs,
|
||||||
|
State
|
||||||
|
} from "../constants";
|
||||||
import { ArtifactCacheEntry } from "../contracts";
|
import { ArtifactCacheEntry } from "../contracts";
|
||||||
|
|
||||||
// From https://github.com/actions/toolkit/blob/master/packages/tool-cache/src/tool-cache.ts#L23
|
// From https://github.com/actions/toolkit/blob/master/packages/tool-cache/src/tool-cache.ts#L23
|
||||||
|
@ -116,3 +123,44 @@ export function isValidEvent(): boolean {
|
||||||
export function unlinkFile(path: fs.PathLike): Promise<void> {
|
export function unlinkFile(path: fs.PathLike): Promise<void> {
|
||||||
return util.promisify(fs.unlink)(path);
|
return util.promisify(fs.unlink)(path);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async function checkVersion(app: string): Promise<string> {
|
||||||
|
core.debug(`Checking ${app} --version`);
|
||||||
|
let versionOutput = "";
|
||||||
|
try {
|
||||||
|
await exec.exec(`${app} --version`, [], {
|
||||||
|
ignoreReturnCode: true,
|
||||||
|
silent: true,
|
||||||
|
listeners: {
|
||||||
|
stdout: (data: Buffer): string =>
|
||||||
|
(versionOutput += data.toString()),
|
||||||
|
stderr: (data: Buffer): string =>
|
||||||
|
(versionOutput += data.toString())
|
||||||
|
}
|
||||||
|
});
|
||||||
|
} catch (err) {
|
||||||
|
core.debug(err.message);
|
||||||
|
}
|
||||||
|
|
||||||
|
versionOutput = versionOutput.trim();
|
||||||
|
core.debug(versionOutput);
|
||||||
|
return versionOutput;
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getCompressionMethod(): Promise<CompressionMethod> {
|
||||||
|
const versionOutput = await checkVersion("zstd");
|
||||||
|
return versionOutput.toLowerCase().includes("zstd command line interface")
|
||||||
|
? CompressionMethod.Zstd
|
||||||
|
: CompressionMethod.Gzip;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getCacheFileName(compressionMethod: CompressionMethod): string {
|
||||||
|
return compressionMethod == CompressionMethod.Zstd
|
||||||
|
? CacheFilename.Zstd
|
||||||
|
: CacheFilename.Gzip;
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function useGnuTar(): Promise<boolean> {
|
||||||
|
const versionOutput = await checkVersion("tar");
|
||||||
|
return versionOutput.toLowerCase().includes("gnu tar");
|
||||||
|
}
|
||||||
|
|
Loading…
Reference in New Issue
Block a user