Merge branch 'main' into ibm-semeru

This commit is contained in:
Jordie 2022-10-31 09:33:48 +01:00
commit a04857f594
34 changed files with 1654 additions and 380 deletions

View file

@ -1,6 +1,6 @@
---
name: "@actions/cache"
version: 3.0.0
version: 3.0.4
type: npm
summary: Actions cache lib
homepage: https://github.com/actions/toolkit/tree/main/packages/cache
@ -17,4 +17,4 @@ licenses:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
notices: []
notices: []

View file

@ -1,6 +1,6 @@
---
name: "@actions/core"
version: 1.9.1
version: 1.10.0
type: npm
summary: Actions core lib
homepage: https://github.com/actions/toolkit/tree/main/packages/core

View file

@ -12,6 +12,7 @@ The `setup-java` action provides the following functionality for GitHub Actions
- Caching dependencies managed by Apache Maven
- Caching dependencies managed by Gradle
- Caching dependencies managed by sbt
- [Maven Toolchains declaration](https://maven.apache.org/guides/mini/guide-using-toolchains.html) for specified JDK versions
This action allows you to work with Java and Scala projects.
@ -22,7 +23,40 @@ This action allows you to work with Java and Scala projects.
## Usage
Inputs `java-version` and `distribution` are mandatory. See [Supported distributions](#supported-distributions) section for a list of available options.
- `java-version`: _(required)_ The Java version to set up. Takes a whole or [semver](#supported-version-syntax) Java version.
- `distribution`: _(required)_ Java [distribution](#supported-distributions).
- `java-package`: The packaging variant of the choosen distribution. Possible values: `jdk`, `jre`, `jdk+fx`, `jre+fx`. Default value: `jdk`.
- `architecture`: The target architecture of the package. Possible values: `x86`, `x64`, `armv7`, `aarch64`, `ppc64le`. Default value: `x64`.
- `jdkFile`: If a use-case requires a custom distribution setup-java uses the compressed JDK from the location pointed by this input and will take care of the installation and caching on the VM.
- `check-latest`: Setting this option makes the action to check for the latest available version for the version spec.
- `cache`: Quick [setup caching](#caching-packages-dependencies) for the dependencies managed through one of the predifined package managers. It can be one of "maven", "gradle" or "sbt".
#### Maven options
The action has a bunch of inputs to generate maven's [settings.xml](https://maven.apache.org/settings.html) on the fly and pass the values to Apache Maven GPG Plugin as well as Apache Maven Toolchains. See [advanced usage](docs/advanced-usage.md) for more.
- `overwrite-settings`: By default action overwrites the settings.xml. In order to skip generation of file if it exists set this to `false`.
- `server-id`: ID of the distributionManagement repository in the pom.xml file. Default is `github`.
- `server-username`: Environment variable name for the username for authentication to the Apache Maven repository. Default is GITHUB_ACTOR.
- `server-password`: Environment variable name for password or token for authentication to the Apache Maven repository. Default is GITHUB_TOKEN.
- `settings-path`: Maven related setting to point to the diractory where the settings.xml file will be written. Default is ~/.m2.
- `gpg-private-key`: GPG private key to import. Default is empty string.'
- `gpg-passphrase`: description: 'Environment variable name for the GPG private key passphrase. Default is GPG_PASSPHRASE.
- `mvn-toolchain-id`: Name of Maven Toolchain ID if the default name of `${distribution}_${java-version}` is not wanted.
- `mvn-toolchain-vendor`: Name of Maven Toolchain Vendor if the default name of `${distribution}` is not wanted.
### Basic Configuration
@ -75,7 +109,7 @@ Currently, the following distributions are supported:
### Caching packages dependencies
The action has a built-in functionality for caching and restoring dependencies. It uses [actions/cache](https://github.com/actions/cache) under hood for caching dependencies but requires less configuration settings. Supported package managers are gradle, maven and sbt. The format of the used cache key is `setup-java-${{ platform }}-${{ packageManager }}-${{ fileHash }}`, where the hash is based on the following files:
- gradle: `**/*.gradle*`, `**/gradle-wrapper.properties`, `buildSrc/**/Versions.kt`, `buildSrc/**/Dependencies.kt`
- gradle: `**/*.gradle*`, `**/gradle-wrapper.properties`, `buildSrc/**/Versions.kt`, `buildSrc/**/Dependencies.kt`, and `gradle/*.versions.toml`
- maven: `**/pom.xml`
- sbt: all sbt build definition files `**/*.sbt`, `**/project/build.properties`, `**/project/**.{scala,sbt}`
@ -135,7 +169,7 @@ steps:
- uses: actions/checkout@v3
- uses: actions/setup-java@v3
with:
distribution: 'adopt'
distribution: 'temurin'
java-version: '17'
check-latest: true
- run: java HelloWorldApp.java
@ -175,7 +209,11 @@ All versions are added to the PATH. The last version will be used and available
15
```
### Using Maven Toolchains
In the example above multiple JDKs are installed for the same job. The result after the last JDK is installed is a Maven Toolchains declaration containing references to all three JDKs. The values for `id`, `version`, and `vendor` of the individual Toolchain entries are the given input values for `distribution` and `java-version` (`vendor` being the combination of `${distribution}_${java-version}`) by default.
### Advanced Configuration
- [Selecting a Java distribution](docs/advanced-usage.md#Selecting-a-Java-distribution)
- [Eclipse Temurin](docs/advanced-usage.md#Eclipse-Temurin)
- [Adopt](docs/advanced-usage.md#Adopt)
@ -191,6 +229,7 @@ All versions are added to the PATH. The last version will be used and available
- [Publishing using Apache Maven](docs/advanced-usage.md#Publishing-using-Apache-Maven)
- [Publishing using Gradle](docs/advanced-usage.md#Publishing-using-Gradle)
- [Hosted Tool Cache](docs/advanced-usage.md#Hosted-Tool-Cache)
- [Modifying Maven Toolchains](docs/advanced-usage.md#Modifying-Maven-Toolchains)
## License

View file

@ -5,9 +5,10 @@ import * as core from '@actions/core';
import os from 'os';
import * as auth from '../src/auth';
import { M2_DIR, MVN_SETTINGS_FILE } from '../src/constants';
const m2Dir = path.join(__dirname, auth.M2_DIR);
const settingsFile = path.join(m2Dir, auth.SETTINGS_FILE);
const m2Dir = path.join(__dirname, M2_DIR);
const settingsFile = path.join(m2Dir, MVN_SETTINGS_FILE);
describe('auth tests', () => {
let spyOSHomedir: jest.SpyInstance;
@ -38,7 +39,7 @@ describe('auth tests', () => {
const password = 'TOLKIEN';
const altHome = path.join(__dirname, 'runner', 'settings');
const altSettingsFile = path.join(altHome, auth.SETTINGS_FILE);
const altSettingsFile = path.join(altHome, MVN_SETTINGS_FILE);
await io.rmRF(altHome); // ensure it doesn't already exist
await auth.createAuthenticationSettings(id, username, password, altHome, true);

View file

@ -98,7 +98,7 @@ describe('dependency cache', () => {
await expect(restore('gradle')).rejects.toThrowError(
`No file in ${projectRoot(
workspace
)} matched to [**/*.gradle*,**/gradle-wrapper.properties,buildSrc/**/Versions.kt,buildSrc/**/Dependencies.kt], make sure you have checked out the target repository`
)} matched to [**/*.gradle*,**/gradle-wrapper.properties,buildSrc/**/Versions.kt,buildSrc/**/Dependencies.kt,gradle/*.versions.toml], make sure you have checked out the target repository`
);
});
it('downloads cache based on build.gradle', async () => {
@ -112,6 +112,15 @@ describe('dependency cache', () => {
it('downloads cache based on build.gradle.kts', async () => {
createFile(join(workspace, 'build.gradle.kts'));
await restore('gradle');
expect(spyCacheRestore).toBeCalled();
expect(spyWarning).not.toBeCalled();
expect(spyInfo).toBeCalledWith('gradle cache is not found');
});
it('downloads cache based on libs.versions.toml', async () => {
createDirectory(join(workspace, 'gradle'));
createFile(join(workspace, 'gradle', 'libs.versions.toml'));
await restore('gradle');
expect(spyCacheRestore).toBeCalled();
expect(spyWarning).not.toBeCalled();

View file

@ -3,6 +3,8 @@ import { HttpClient } from '@actions/http-client';
import { AdoptDistribution, AdoptImplementation } from '../../src/distributions/adopt/installer';
import { JavaInstallerOptions } from '../../src/distributions/base-models';
import os from 'os';
let manifestData = require('../data/adopt.json') as [];
describe('getAvailableVersions', () => {
@ -128,6 +130,35 @@ describe('getAvailableVersions', () => {
expect(distribution.toolcacheFolderName).toBe(expected);
}
);
it.each([
['amd64', 'x64'],
['arm64', 'aarch64']
])(
'defaults to os.arch(): %s mapped to distro arch: %s',
async (osArch: string, distroArch: string) => {
jest.spyOn(os, 'arch').mockReturnValue(osArch);
const installerOptions: JavaInstallerOptions = {
version: '17',
architecture: '', // to get default value
packageType: 'jdk',
checkLatest: false
};
const expectedParameters = `os=mac&architecture=${distroArch}&image_type=jdk&release_type=ga&jvm_impl=hotspot&page_size=20&page=0`;
const distribution = new AdoptDistribution(installerOptions, AdoptImplementation.Hotspot);
const baseUrl = 'https://api.adoptopenjdk.net/v3/assets/version/%5B1.0,100.0%5D';
const expectedUrl = `${baseUrl}?project=jdk&vendor=adoptopenjdk&heap_size=normal&sort_method=DEFAULT&sort_order=DESC&${expectedParameters}`;
distribution['getPlatformOption'] = () => 'mac';
await distribution['getAvailableVersions']();
expect(spyHttpClient.mock.calls).toHaveLength(1);
expect(spyHttpClient.mock.calls[0][0]).toBe(expectedUrl);
}
);
});
describe('findPackageForDownload', () => {

View file

@ -12,6 +12,8 @@ import {
JavaInstallerResults
} from '../../src/distributions/base-models';
import os from 'os';
class EmptyJavaBase extends JavaBase {
constructor(installerOptions: JavaInstallerOptions) {
super('Empty', installerOptions);
@ -192,6 +194,8 @@ describe('setupJava', () => {
spyCoreSetOutput = jest.spyOn(core, 'setOutput');
spyCoreSetOutput.mockImplementation(() => undefined);
jest.spyOn(os, 'arch').mockReturnValue('x86');
});
afterEach(() => {
@ -212,6 +216,10 @@ describe('setupJava', () => {
[
{ version: '11.0.8', architecture: 'x86', packageType: 'jdk', checkLatest: false },
{ version: installedJavaVersion, path: javaPath }
],
[
{ version: '11', architecture: '', packageType: 'jdk', checkLatest: false },
{ version: installedJavaVersion, path: javaPath }
]
])('should find java locally for %s', (input, expected) => {
mockJavaBase = new EmptyJavaBase(input);
@ -237,6 +245,10 @@ describe('setupJava', () => {
[
{ version: '11', architecture: 'x64', packageType: 'jre', checkLatest: false },
{ path: path.join('toolcache', 'Java_Empty_jre', '11.0.9', 'x64'), version: '11.0.9' }
],
[
{ version: '11', architecture: '', packageType: 'jre', checkLatest: false },
{ path: path.join('toolcache', 'Java_Empty_jre', '11.0.9', 'x86'), version: '11.0.9' }
]
])('download java with configuration %s', async (input, expected) => {
mockJavaBase = new EmptyJavaBase(input);
@ -245,7 +257,7 @@ describe('setupJava', () => {
expect(spyCoreAddPath).toHaveBeenCalled();
expect(spyCoreExportVariable).toHaveBeenCalled();
expect(spyCoreExportVariable).toHaveBeenCalledWith(
`JAVA_HOME_${input.version}_${input.architecture.toLocaleUpperCase()}`,
`JAVA_HOME_${input.version}_${(input.architecture || 'x86').toLocaleUpperCase()}`,
expected.path
);
expect(spyCoreSetOutput).toHaveBeenCalled();
@ -260,6 +272,10 @@ describe('setupJava', () => {
[
{ version: '11.0.9', architecture: 'x86', packageType: 'jdk', checkLatest: true },
{ version: '11.0.9', path: javaPathInstalled }
],
[
{ version: '11.0.9', architecture: '', packageType: 'jdk', checkLatest: true },
{ version: '11.0.9', path: javaPathInstalled }
]
])('should check the latest java version for %s and resolve locally', async (input, expected) => {
mockJavaBase = new EmptyJavaBase(input);
@ -283,6 +299,10 @@ describe('setupJava', () => {
[
{ version: '11.0.x', architecture: 'x86', packageType: 'jdk', checkLatest: true },
{ version: actualJavaVersion, path: javaPathInstalled }
],
[
{ version: '11', architecture: '', packageType: 'jdk', checkLatest: true },
{ version: actualJavaVersion, path: javaPathInstalled }
]
])('should check the latest java version for %s and download', async (input, expected) => {
mockJavaBase = new EmptyJavaBase(input);

View file

@ -3,6 +3,8 @@ import { JavaInstallerOptions } from '../../src/distributions/base-models';
import { CorrettoDistribution } from '../../src/distributions/corretto/installer';
import * as util from '../../src/util';
import os from 'os';
import { isGeneratorFunction } from 'util/types';
const manifestData = require('../data/corretto.json') as [];
@ -142,6 +144,33 @@ describe('getAvailableVersions', () => {
"Could not find satisfied version for SemVer '4'"
);
});
it.each([
['arm64', 'aarch64'],
['amd64', 'x64']
])(
'defaults to os.arch(): %s mapped to distro arch: %s',
async (osArch: string, distroArch: string) => {
jest.spyOn(os, 'arch').mockReturnValue(osArch);
const version = '17';
const installerOptions: JavaInstallerOptions = {
version,
architecture: '', // to get default value
packageType: 'jdk',
checkLatest: false
};
const distribution = new CorrettoDistribution(installerOptions);
mockPlatform(distribution, 'macos');
const expectedLink = `https://corretto.aws/downloads/resources/17.0.2.8.1/amazon-corretto-17.0.2.8.1-macosx-${distroArch}.tar.gz`;
const availableVersion = await distribution['findPackageForDownload'](version);
expect(availableVersion).not.toBeNull();
expect(availableVersion.url).toBe(expectedLink);
}
);
});
const mockPlatform = (distribution: CorrettoDistribution, platform: string) => {

View file

@ -1,6 +1,7 @@
import { LibericaDistributions } from '../../src/distributions/liberica/installer';
import { ArchitectureOptions, LibericaVersion } from '../../src/distributions/liberica/models';
import { HttpClient } from '@actions/http-client';
import os from 'os';
const manifestData = require('../data/liberica.json') as LibericaVersion[];
@ -61,6 +62,39 @@ describe('getAvailableVersions', () => {
expect(spyHttpClient.mock.calls[0][0]).toBe(buildUrl);
});
type DistroArch = {
bitness: string;
arch: string;
};
it.each([
['amd64', { bitness: '64', arch: 'x86' }],
['arm64', { bitness: '64', arch: 'arm' }]
])(
'defaults to os.arch(): %s mapped to distro arch: %s',
async (osArch: string, distroArch: DistroArch) => {
jest.spyOn(os, 'arch').mockReturnValue(osArch);
const distribution = new LibericaDistributions({
version: '17',
architecture: '', // to get default value
packageType: 'jdk',
checkLatest: false
});
const additionalParams =
'&installation-type=archive&fields=downloadUrl%2Cversion%2CfeatureVersion%2CinterimVersion%2C' +
'updateVersion%2CbuildVersion';
distribution['getPlatformOption'] = () => 'macos';
const buildUrl = `https://api.bell-sw.com/v1/liberica/releases?os=macos&bundle-type=jdk&bitness=${distroArch.bitness}&arch=${distroArch.arch}&build-type=all${additionalParams}`;
await distribution['getAvailableVersions']();
expect(spyHttpClient.mock.calls).toHaveLength(1);
expect(spyHttpClient.mock.calls[0][0]).toBe(buildUrl);
}
);
it('load available versions', async () => {
const distribution = new LibericaDistributions({
version: '11',

View file

@ -1,7 +1,13 @@
import { MicrosoftDistributions } from '../../src/distributions/microsoft/installer';
import os from 'os';
import data from '../../src/distributions/microsoft/microsoft-openjdk-versions.json';
import * as httpm from '@actions/http-client';
import * as core from '@actions/core';
describe('findPackageForDownload', () => {
let distribution: MicrosoftDistributions;
let spyGetManifestFromRepo: jest.SpyInstance;
let spyDebug: jest.SpyInstance;
beforeEach(() => {
distribution = new MicrosoftDistributions({
@ -10,12 +16,22 @@ describe('findPackageForDownload', () => {
packageType: 'jdk',
checkLatest: false
});
spyGetManifestFromRepo = jest.spyOn(httpm.HttpClient.prototype, 'getJson');
spyGetManifestFromRepo.mockReturnValue({
result: data,
statusCode: 200,
headers: {}
});
spyDebug = jest.spyOn(core, 'debug');
spyDebug.mockImplementation(() => {});
});
it.each([
[
'17.0.1',
'17.0.1',
'17.0.1+12.1',
'https://aka.ms/download-jdk/microsoft-jdk-17.0.1.12.1-{{OS_TYPE}}-x64.{{ARCHIVE_TYPE}}'
],
[
@ -25,12 +41,12 @@ describe('findPackageForDownload', () => {
],
[
'16.0.x',
'16.0.2',
'16.0.2+7.1',
'https://aka.ms/download-jdk/microsoft-jdk-16.0.2.7.1-{{OS_TYPE}}-x64.{{ARCHIVE_TYPE}}'
],
[
'11.0.13',
'11.0.13',
'11.0.13+8.1',
'https://aka.ms/download-jdk/microsoft-jdk-11.0.13.8.1-{{OS_TYPE}}-x64.{{ARCHIVE_TYPE}}'
],
[
@ -61,38 +77,33 @@ describe('findPackageForDownload', () => {
expect(result.url).toBe(url);
});
it.each([
['amd64', 'x64'],
['arm64', 'aarch64']
])(
'defaults to os.arch(): %s mapped to distro arch: %s',
async (osArch: string, distroArch: string) => {
jest.spyOn(os, 'arch').mockReturnValue(osArch);
jest.spyOn(os, 'platform').mockReturnValue('linux');
const version = '17';
const distro = new MicrosoftDistributions({
version,
architecture: '', // to get default value
packageType: 'jdk',
checkLatest: false
});
const result = await distro['findPackageForDownload'](version);
const expectedUrl = `https://aka.ms/download-jdk/microsoft-jdk-17.0.3-linux-${distroArch}.tar.gz`;
expect(result.url).toBe(expectedUrl);
}
);
it('should throw an error', async () => {
await expect(distribution['findPackageForDownload']('8')).rejects.toThrow(
/Could not find satisfied version for SemVer */
);
});
});
describe('getPlatformOption', () => {
const distributions = new MicrosoftDistributions({
architecture: 'x64',
version: '11',
packageType: 'jdk',
checkLatest: false
});
it.each([
['linux', 'tar.gz', 'linux'],
['darwin', 'tar.gz', 'macos'],
['win32', 'zip', 'windows']
])('os version %s -> %s', (input, expectedArchive, expectedOs) => {
const actual = distributions['getPlatformOption'](input as NodeJS.Platform);
expect(actual.archive).toEqual(expectedArchive);
expect(actual.os).toEqual(expectedOs);
});
it.each(['aix', 'android', 'freebsd', 'openbsd', 'netbsd', 'solaris', 'cygwin'])(
'not support os version %s',
input => {
expect(() => distributions['getPlatformOption'](input as NodeJS.Platform)).toThrow(
/Platform '\w+' is not supported\. Supported platforms: .+/
);
}
);
});

View file

@ -1,5 +1,5 @@
import { HttpClient } from '@actions/http-client';
import os from 'os';
import {
TemurinDistribution,
TemurinImplementation
@ -109,6 +109,35 @@ describe('getAvailableVersions', () => {
expect(distribution.toolcacheFolderName).toBe(expected);
}
);
it.each([
['amd64', 'x64'],
['arm64', 'aarch64']
])(
'defaults to os.arch(): %s mapped to distro arch: %s',
async (osArch: string, distroArch: string) => {
jest.spyOn(os, 'arch').mockReturnValue(distroArch);
const installerOptions: JavaInstallerOptions = {
version: '17',
architecture: '',
packageType: 'jdk',
checkLatest: false
};
const expectedParameters = `os=mac&architecture=${distroArch}&image_type=jdk&release_type=ga&jvm_impl=hotspot&page_size=20&page=0`;
const distribution = new TemurinDistribution(installerOptions, TemurinImplementation.Hotspot);
const baseUrl = 'https://api.adoptium.net/v3/assets/version/%5B1.0,100.0%5D';
const expectedUrl = `${baseUrl}?project=jdk&vendor=adoptium&heap_size=normal&sort_method=DEFAULT&sort_order=DESC&${expectedParameters}`;
distribution['getPlatformOption'] = () => 'mac';
await distribution['getAvailableVersions']();
expect(spyHttpClient.mock.calls).toHaveLength(1);
expect(spyHttpClient.mock.calls[0][0]).toBe(expectedUrl);
}
);
});
describe('findPackageForDownload', () => {

View file

@ -3,6 +3,7 @@ import * as semver from 'semver';
import { ZuluDistribution } from '../../src/distributions/zulu/installer';
import { IZuluVersions } from '../../src/distributions/zulu/models';
import * as utils from '../../src/util';
import os from 'os';
const manifestData = require('../data/zulu-releases-default.json') as [];
@ -72,6 +73,34 @@ describe('getAvailableVersions', () => {
expect(spyHttpClient.mock.calls[0][0]).toBe(buildUrl);
});
type DistroArch = {
bitness: string;
arch: string;
};
it.each([
['amd64', { bitness: '64', arch: 'x86' }],
['arm64', { bitness: '64', arch: 'arm' }]
])(
'defaults to os.arch(): %s mapped to distro arch: %s',
async (osArch: string, distroArch: DistroArch) => {
jest.spyOn(os, 'arch').mockReturnValue(osArch);
const distribution = new ZuluDistribution({
version: '17',
architecture: '', // to get default value
packageType: 'jdk',
checkLatest: false
});
distribution['getPlatformOption'] = () => 'macos';
const buildUrl = `https://api.azul.com/zulu/download/community/v1.0/bundles/?os=macos&ext=tar.gz&bundle_type=jdk&javafx=false&arch=${distroArch.arch}&hw_bitness=${distroArch.bitness}&release_status=ga`;
await distribution['getAvailableVersions']();
expect(spyHttpClient.mock.calls).toHaveLength(1);
expect(spyHttpClient.mock.calls[0][0]).toBe(buildUrl);
}
);
it('load available versions', async () => {
const distribution = new ZuluDistribution({
version: '11',

View file

@ -0,0 +1,292 @@
import * as fs from 'fs';
import os from 'os';
import * as path from 'path';
import * as core from '@actions/core';
import * as io from '@actions/io';
import * as toolchains from '../src/toolchains';
import { M2_DIR, MVN_TOOLCHAINS_FILE } from '../src/constants';
const m2Dir = path.join(__dirname, M2_DIR);
const toolchainsFile = path.join(m2Dir, MVN_TOOLCHAINS_FILE);
describe('toolchains tests', () => {
let spyOSHomedir: jest.SpyInstance;
let spyInfo: jest.SpyInstance;
beforeEach(async () => {
await io.rmRF(m2Dir);
spyOSHomedir = jest.spyOn(os, 'homedir');
spyOSHomedir.mockReturnValue(__dirname);
spyInfo = jest.spyOn(core, 'info');
spyInfo.mockImplementation(() => null);
}, 300000);
afterAll(async () => {
try {
await io.rmRF(m2Dir);
} catch {
console.log('Failed to remove test directories');
}
jest.resetAllMocks();
jest.clearAllMocks();
jest.restoreAllMocks();
}, 100000);
it('creates toolchains.xml in alternate locations', async () => {
const jdkInfo = {
version: '17',
vendor: 'Eclipse Temurin',
id: 'temurin_17',
jdkHome: '/opt/hostedtoolcache/Java_Temurin-Hotspot_jdk/17.0.1-12/x64'
};
const altHome = path.join(__dirname, 'runner', 'toolchains');
const altToolchainsFile = path.join(altHome, MVN_TOOLCHAINS_FILE);
await io.rmRF(altHome); // ensure it doesn't already exist
await toolchains.createToolchainsSettings({
jdkInfo,
settingsDirectory: altHome,
overwriteSettings: true
});
expect(fs.existsSync(m2Dir)).toBe(false);
expect(fs.existsSync(toolchainsFile)).toBe(false);
expect(fs.existsSync(altHome)).toBe(true);
expect(fs.existsSync(altToolchainsFile)).toBe(true);
expect(fs.readFileSync(altToolchainsFile, 'utf-8')).toEqual(
toolchains.generateToolchainDefinition(
'',
jdkInfo.version,
jdkInfo.vendor,
jdkInfo.id,
jdkInfo.jdkHome
)
);
await io.rmRF(altHome);
}, 100000);
it('creates toolchains.xml with minimal configuration', async () => {
const jdkInfo = {
version: '17',
vendor: 'Eclipse Temurin',
id: 'temurin_17',
jdkHome: '/opt/hostedtoolcache/Java_Temurin-Hotspot_jdk/17.0.1-12/x64'
};
const result = `<?xml version="1.0"?>
<toolchains xmlns="https://maven.apache.org/TOOLCHAINS/1.1.0"
xmlns:xsi="https://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="https://maven.apache.org/TOOLCHAINS/1.1.0 https://maven.apache.org/xsd/toolchains-1.1.0.xsd">
<toolchain>
<type>jdk</type>
<provides>
<version>17</version>
<vendor>Eclipse Temurin</vendor>
<id>temurin_17</id>
</provides>
<configuration>
<jdkHome>/opt/hostedtoolcache/Java_Temurin-Hotspot_jdk/17.0.1-12/x64</jdkHome>
</configuration>
</toolchain>
</toolchains>`;
await toolchains.createToolchainsSettings({
jdkInfo,
settingsDirectory: m2Dir,
overwriteSettings: true
});
expect(fs.existsSync(m2Dir)).toBe(true);
expect(fs.existsSync(toolchainsFile)).toBe(true);
expect(fs.readFileSync(toolchainsFile, 'utf-8')).toEqual(
toolchains.generateToolchainDefinition(
'',
jdkInfo.version,
jdkInfo.vendor,
jdkInfo.id,
jdkInfo.jdkHome
)
);
expect(
toolchains.generateToolchainDefinition(
'',
jdkInfo.version,
jdkInfo.vendor,
jdkInfo.id,
jdkInfo.jdkHome
)
).toEqual(result);
}, 100000);
it('reuses existing toolchains.xml files', async () => {
const jdkInfo = {
version: '17',
vendor: 'Eclipse Temurin',
id: 'temurin_17',
jdkHome: '/opt/hostedtoolcache/Java_Temurin-Hotspot_jdk/17.0.1-12/x64'
};
const originalFile = `<toolchains>
<toolchain>
<type>jdk</type>
<provides>
<version>1.6</version>
<vendor>Sun</vendor>
<id>sun_1.6</id>
</provides>
<configuration>
<jdkHome>/opt/jdk/sun/1.6</jdkHome>
</configuration>
</toolchain>
</toolchains>`;
const result = `<?xml version="1.0"?>
<toolchains>
<toolchain>
<type>jdk</type>
<provides>
<version>1.6</version>
<vendor>Sun</vendor>
<id>sun_1.6</id>
</provides>
<configuration>
<jdkHome>/opt/jdk/sun/1.6</jdkHome>
</configuration>
</toolchain>
<toolchain>
<type>jdk</type>
<provides>
<version>17</version>
<vendor>Eclipse Temurin</vendor>
<id>temurin_17</id>
</provides>
<configuration>
<jdkHome>/opt/hostedtoolcache/Java_Temurin-Hotspot_jdk/17.0.1-12/x64</jdkHome>
</configuration>
</toolchain>
</toolchains>`;
fs.mkdirSync(m2Dir, { recursive: true });
fs.writeFileSync(toolchainsFile, originalFile);
expect(fs.existsSync(m2Dir)).toBe(true);
expect(fs.existsSync(toolchainsFile)).toBe(true);
await toolchains.createToolchainsSettings({
jdkInfo,
settingsDirectory: m2Dir,
overwriteSettings: true
});
expect(fs.existsSync(m2Dir)).toBe(true);
expect(fs.existsSync(toolchainsFile)).toBe(true);
expect(fs.readFileSync(toolchainsFile, 'utf-8')).toEqual(
toolchains.generateToolchainDefinition(
originalFile,
jdkInfo.version,
jdkInfo.vendor,
jdkInfo.id,
jdkInfo.jdkHome
)
);
expect(
toolchains.generateToolchainDefinition(
originalFile,
jdkInfo.version,
jdkInfo.vendor,
jdkInfo.id,
jdkInfo.jdkHome
)
).toEqual(result);
}, 100000);
it('does not overwrite existing toolchains.xml files', async () => {
const jdkInfo = {
version: '17',
vendor: 'Eclipse Temurin',
id: 'temurin_17',
jdkHome: '/opt/hostedtoolcache/Java_Temurin-Hotspot_jdk/17.0.1-12/x64'
};
const originalFile = `<toolchains>
<toolchain>
<type>jdk</type>
<provides>
<version>1.6</version>
<vendor>Sun</vendor>
<id>sun_1.6</id>
</provides>
<configuration>
<jdkHome>/opt/jdk/sun/1.6</jdkHome>
</configuration>
</toolchain>
</toolchains>`;
fs.mkdirSync(m2Dir, { recursive: true });
fs.writeFileSync(toolchainsFile, originalFile);
expect(fs.existsSync(m2Dir)).toBe(true);
expect(fs.existsSync(toolchainsFile)).toBe(true);
await toolchains.createToolchainsSettings({
jdkInfo,
settingsDirectory: m2Dir,
overwriteSettings: false
});
expect(fs.existsSync(m2Dir)).toBe(true);
expect(fs.existsSync(toolchainsFile)).toBe(true);
expect(fs.readFileSync(toolchainsFile, 'utf-8')).toEqual(originalFile);
}, 100000);
it('generates valid toolchains.xml with minimal configuration', () => {
const jdkInfo = {
version: 'JAVA_VERSION',
vendor: 'JAVA_VENDOR',
id: 'VENDOR_VERSION',
jdkHome: 'JAVA_HOME'
};
const expectedToolchains = `<?xml version="1.0"?>
<toolchains xmlns="https://maven.apache.org/TOOLCHAINS/1.1.0"
xmlns:xsi="https://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="https://maven.apache.org/TOOLCHAINS/1.1.0 https://maven.apache.org/xsd/toolchains-1.1.0.xsd">
<toolchain>
<type>jdk</type>
<provides>
<version>${jdkInfo.version}</version>
<vendor>${jdkInfo.vendor}</vendor>
<id>${jdkInfo.id}</id>
</provides>
<configuration>
<jdkHome>${jdkInfo.jdkHome}</jdkHome>
</configuration>
</toolchain>
</toolchains>`;
expect(
toolchains.generateToolchainDefinition(
'',
jdkInfo.version,
jdkInfo.vendor,
jdkInfo.id,
jdkInfo.jdkHome
)
).toEqual(expectedToolchains);
}, 100000);
it('creates toolchains.xml with correct id when none is supplied', async () => {
const version = '17';
const distributionName = 'temurin';
const id = 'temurin_17';
const jdkHome = '/opt/hostedtoolcache/Java_Temurin-Hotspot_jdk/17.0.1-12/x64';
await toolchains.configureToolchains(version, distributionName, jdkHome, undefined);
expect(fs.existsSync(m2Dir)).toBe(true);
expect(fs.existsSync(toolchainsFile)).toBe(true);
expect(fs.readFileSync(toolchainsFile, 'utf-8')).toEqual(
toolchains.generateToolchainDefinition('', version, distributionName, id, jdkHome)
);
}, 100000);
});

View file

@ -14,9 +14,8 @@ inputs:
required: false
default: 'jdk'
architecture:
description: 'The architecture of the package'
description: "The architecture of the package (defaults to the action runner's architecture)"
required: false
default: 'x64'
jdkFile:
description: 'Path to where the compressed JDK is located'
required: false
@ -59,6 +58,15 @@ inputs:
job-status:
description: 'Workaround to pass job status to post job step. This variable is not intended for manual setting'
default: ${{ job.status }}
token:
description: Used to pull java versions from setup-java. Since there is a default value, token is typically not supplied by the user.
default: ${{ github.token }}
mvn-toolchain-id:
description: 'Name of Maven Toolchain ID if the default name of "${distribution}_${java-version}" is not wanted. See examples of supported syntax in Advanced Usage file'
required: false
mvn-toolchain-vendor:
description: 'Name of Maven Toolchain Vendor if the default name of "${distribution}" is not wanted. See examples of supported syntax in Advanced Usage file'
required: false
outputs:
distribution:
description: 'Distribution of Java that has been installed'

195
dist/cleanup/index.js vendored
View file

@ -525,7 +525,13 @@ function resolvePaths(patterns) {
.replace(new RegExp(`\\${path.sep}`, 'g'), '/');
core.debug(`Matched: ${relativeFile}`);
// Paths are made relative so the tar entries are all relative to the root of the workspace.
paths.push(`${relativeFile}`);
if (relativeFile === '') {
// path.relative returns empty string if workspace and file are equal
paths.push('.');
}
else {
paths.push(`${relativeFile}`);
}
}
}
catch (e_1_1) { e_1 = { error: e_1_1 }; }
@ -683,6 +689,7 @@ const util = __importStar(__nccwpck_require__(3837));
const utils = __importStar(__nccwpck_require__(1518));
const constants_1 = __nccwpck_require__(8840);
const requestUtils_1 = __nccwpck_require__(3981);
const abort_controller_1 = __nccwpck_require__(2557);
/**
* Pipes the body of a HTTP response to a stream
*
@ -866,15 +873,24 @@ function downloadCacheStorageSDK(archiveLocation, archivePath, options) {
const fd = fs.openSync(archivePath, 'w');
try {
downloadProgress.startDisplayTimer();
const controller = new abort_controller_1.AbortController();
const abortSignal = controller.signal;
while (!downloadProgress.isDone()) {
const segmentStart = downloadProgress.segmentOffset + downloadProgress.segmentSize;
const segmentSize = Math.min(maxSegmentSize, contentLength - segmentStart);
downloadProgress.nextSegment(segmentSize);
const result = yield client.downloadToBuffer(segmentStart, segmentSize, {
const result = yield promiseWithTimeout(options.segmentTimeoutInMs || 3600000, client.downloadToBuffer(segmentStart, segmentSize, {
abortSignal,
concurrency: options.downloadConcurrency,
onProgress: downloadProgress.onProgress()
});
fs.writeFileSync(fd, result);
}));
if (result === 'timeout') {
controller.abort();
throw new Error('Aborting cache download as the download time exceeded the timeout.');
}
else if (Buffer.isBuffer(result)) {
fs.writeFileSync(fd, result);
}
}
}
finally {
@ -885,6 +901,16 @@ function downloadCacheStorageSDK(archiveLocation, archivePath, options) {
});
}
exports.downloadCacheStorageSDK = downloadCacheStorageSDK;
const promiseWithTimeout = (timeoutMs, promise) => __awaiter(void 0, void 0, void 0, function* () {
let timeoutHandle;
const timeoutPromise = new Promise(resolve => {
timeoutHandle = setTimeout(() => resolve('timeout'), timeoutMs);
});
return Promise.race([promise, timeoutPromise]).then(result => {
clearTimeout(timeoutHandle);
return result;
});
});
//# sourceMappingURL=downloadUtils.js.map
/***/ }),
@ -1044,6 +1070,7 @@ const fs_1 = __nccwpck_require__(7147);
const path = __importStar(__nccwpck_require__(1017));
const utils = __importStar(__nccwpck_require__(1518));
const constants_1 = __nccwpck_require__(8840);
const IS_WINDOWS = process.platform === 'win32';
function getTarPath(args, compressionMethod) {
return __awaiter(this, void 0, void 0, function* () {
switch (process.platform) {
@ -1091,26 +1118,43 @@ function getWorkingDirectory() {
var _a;
return (_a = process.env['GITHUB_WORKSPACE']) !== null && _a !== void 0 ? _a : process.cwd();
}
// Common function for extractTar and listTar to get the compression method
function getCompressionProgram(compressionMethod) {
// -d: Decompress.
// unzstd is equivalent to 'zstd -d'
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
// Using 30 here because we also support 32-bit self-hosted runners.
switch (compressionMethod) {
case constants_1.CompressionMethod.Zstd:
return [
'--use-compress-program',
IS_WINDOWS ? 'zstd -d --long=30' : 'unzstd --long=30'
];
case constants_1.CompressionMethod.ZstdWithoutLong:
return ['--use-compress-program', IS_WINDOWS ? 'zstd -d' : 'unzstd'];
default:
return ['-z'];
}
}
function listTar(archivePath, compressionMethod) {
return __awaiter(this, void 0, void 0, function* () {
const args = [
...getCompressionProgram(compressionMethod),
'-tf',
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
'-P'
];
yield execTar(args, compressionMethod);
});
}
exports.listTar = listTar;
function extractTar(archivePath, compressionMethod) {
return __awaiter(this, void 0, void 0, function* () {
// Create directory to extract tar into
const workingDirectory = getWorkingDirectory();
yield io.mkdirP(workingDirectory);
// --d: Decompress.
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
// Using 30 here because we also support 32-bit self-hosted runners.
function getCompressionProgram() {
switch (compressionMethod) {
case constants_1.CompressionMethod.Zstd:
return ['--use-compress-program', 'zstd -d --long=30'];
case constants_1.CompressionMethod.ZstdWithoutLong:
return ['--use-compress-program', 'zstd -d'];
default:
return ['-z'];
}
}
const args = [
...getCompressionProgram(),
...getCompressionProgram(compressionMethod),
'-xf',
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
'-P',
@ -1129,15 +1173,19 @@ function createTar(archiveFolder, sourceDirectories, compressionMethod) {
fs_1.writeFileSync(path.join(archiveFolder, manifestFilename), sourceDirectories.join('\n'));
const workingDirectory = getWorkingDirectory();
// -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores.
// zstdmt is equivalent to 'zstd -T0'
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
// Using 30 here because we also support 32-bit self-hosted runners.
// Long range mode is added to zstd in v1.3.2 release, so we will not use --long in older version of zstd.
function getCompressionProgram() {
switch (compressionMethod) {
case constants_1.CompressionMethod.Zstd:
return ['--use-compress-program', 'zstd -T0 --long=30'];
return [
'--use-compress-program',
IS_WINDOWS ? 'zstd -T0 --long=30' : 'zstdmt --long=30'
];
case constants_1.CompressionMethod.ZstdWithoutLong:
return ['--use-compress-program', 'zstd -T0'];
return ['--use-compress-program', IS_WINDOWS ? 'zstd -T0' : 'zstdmt'];
default:
return ['-z'];
}
@ -1159,32 +1207,6 @@ function createTar(archiveFolder, sourceDirectories, compressionMethod) {
});
}
exports.createTar = createTar;
function listTar(archivePath, compressionMethod) {
return __awaiter(this, void 0, void 0, function* () {
// --d: Decompress.
// --long=#: Enables long distance matching with # bits.
// Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
// Using 30 here because we also support 32-bit self-hosted runners.
function getCompressionProgram() {
switch (compressionMethod) {
case constants_1.CompressionMethod.Zstd:
return ['--use-compress-program', 'zstd -d --long=30'];
case constants_1.CompressionMethod.ZstdWithoutLong:
return ['--use-compress-program', 'zstd -d'];
default:
return ['-z'];
}
}
const args = [
...getCompressionProgram(),
'-tf',
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
'-P'
];
yield execTar(args, compressionMethod);
});
}
exports.listTar = listTar;
//# sourceMappingURL=tar.js.map
/***/ }),
@ -1235,7 +1257,8 @@ function getDownloadOptions(copy) {
const result = {
useAzureSdk: true,
downloadConcurrency: 8,
timeoutInMs: 30000
timeoutInMs: 30000,
segmentTimeoutInMs: 3600000
};
if (copy) {
if (typeof copy.useAzureSdk === 'boolean') {
@ -1247,10 +1270,21 @@ function getDownloadOptions(copy) {
if (typeof copy.timeoutInMs === 'number') {
result.timeoutInMs = copy.timeoutInMs;
}
if (typeof copy.segmentTimeoutInMs === 'number') {
result.segmentTimeoutInMs = copy.segmentTimeoutInMs;
}
}
const segmentDownloadTimeoutMins = process.env['SEGMENT_DOWNLOAD_TIMEOUT_MINS'];
if (segmentDownloadTimeoutMins &&
!isNaN(Number(segmentDownloadTimeoutMins)) &&
isFinite(Number(segmentDownloadTimeoutMins))) {
result.segmentTimeoutInMs = Number(segmentDownloadTimeoutMins) * 60 * 1000;
}
core.debug(`Use Azure SDK: ${result.useAzureSdk}`);
core.debug(`Download concurrency: ${result.downloadConcurrency}`);
core.debug(`Request timeout (ms): ${result.timeoutInMs}`);
core.debug(`Cache segment download timeout mins env var: ${process.env['SEGMENT_DOWNLOAD_TIMEOUT_MINS']}`);
core.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`);
return result;
}
exports.getDownloadOptions = getDownloadOptions;
@ -4831,7 +4865,6 @@ const file_command_1 = __nccwpck_require__(717);
const utils_1 = __nccwpck_require__(5278);
const os = __importStar(__nccwpck_require__(2037));
const path = __importStar(__nccwpck_require__(1017));
const uuid_1 = __nccwpck_require__(8974);
const oidc_utils_1 = __nccwpck_require__(8041);
/**
* The code to exit an action
@ -4861,20 +4894,9 @@ function exportVariable(name, val) {
process.env[name] = convertedVal;
const filePath = process.env['GITHUB_ENV'] || '';
if (filePath) {
const delimiter = `ghadelimiter_${uuid_1.v4()}`;
// These should realistically never happen, but just in case someone finds a way to exploit uuid generation let's not allow keys or values that contain the delimiter.
if (name.includes(delimiter)) {
throw new Error(`Unexpected input: name should not contain the delimiter "${delimiter}"`);
}
if (convertedVal.includes(delimiter)) {
throw new Error(`Unexpected input: value should not contain the delimiter "${delimiter}"`);
}
const commandValue = `${name}<<${delimiter}${os.EOL}${convertedVal}${os.EOL}${delimiter}`;
file_command_1.issueCommand('ENV', commandValue);
}
else {
command_1.issueCommand('set-env', { name }, convertedVal);
return file_command_1.issueFileCommand('ENV', file_command_1.prepareKeyValueMessage(name, val));
}
command_1.issueCommand('set-env', { name }, convertedVal);
}
exports.exportVariable = exportVariable;
/**
@ -4892,7 +4914,7 @@ exports.setSecret = setSecret;
function addPath(inputPath) {
const filePath = process.env['GITHUB_PATH'] || '';
if (filePath) {
file_command_1.issueCommand('PATH', inputPath);
file_command_1.issueFileCommand('PATH', inputPath);
}
else {
command_1.issueCommand('add-path', {}, inputPath);
@ -4932,7 +4954,10 @@ function getMultilineInput(name, options) {
const inputs = getInput(name, options)
.split('\n')
.filter(x => x !== '');
return inputs;
if (options && options.trimWhitespace === false) {
return inputs;
}
return inputs.map(input => input.trim());
}
exports.getMultilineInput = getMultilineInput;
/**
@ -4965,8 +4990,12 @@ exports.getBooleanInput = getBooleanInput;
*/
// eslint-disable-next-line @typescript-eslint/no-explicit-any
function setOutput(name, value) {
const filePath = process.env['GITHUB_OUTPUT'] || '';
if (filePath) {
return file_command_1.issueFileCommand('OUTPUT', file_command_1.prepareKeyValueMessage(name, value));
}
process.stdout.write(os.EOL);
command_1.issueCommand('set-output', { name }, value);
command_1.issueCommand('set-output', { name }, utils_1.toCommandValue(value));
}
exports.setOutput = setOutput;
/**
@ -5095,7 +5124,11 @@ exports.group = group;
*/
// eslint-disable-next-line @typescript-eslint/no-explicit-any
function saveState(name, value) {
command_1.issueCommand('save-state', { name }, value);
const filePath = process.env['GITHUB_STATE'] || '';
if (filePath) {
return file_command_1.issueFileCommand('STATE', file_command_1.prepareKeyValueMessage(name, value));
}
command_1.issueCommand('save-state', { name }, utils_1.toCommandValue(value));
}
exports.saveState = saveState;
/**
@ -5161,13 +5194,14 @@ var __importStar = (this && this.__importStar) || function (mod) {
return result;
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.issueCommand = void 0;
exports.prepareKeyValueMessage = exports.issueFileCommand = void 0;
// We use any as a valid input type
/* eslint-disable @typescript-eslint/no-explicit-any */
const fs = __importStar(__nccwpck_require__(7147));
const os = __importStar(__nccwpck_require__(2037));
const uuid_1 = __nccwpck_require__(8974);
const utils_1 = __nccwpck_require__(5278);
function issueCommand(command, message) {
function issueFileCommand(command, message) {
const filePath = process.env[`GITHUB_${command}`];
if (!filePath) {
throw new Error(`Unable to find environment variable for file command ${command}`);
@ -5179,7 +5213,22 @@ function issueCommand(command, message) {
encoding: 'utf8'
});
}
exports.issueCommand = issueCommand;
exports.issueFileCommand = issueFileCommand;
function prepareKeyValueMessage(key, value) {
const delimiter = `ghadelimiter_${uuid_1.v4()}`;
const convertedValue = utils_1.toCommandValue(value);
// These should realistically never happen, but just in case someone finds a
// way to exploit uuid generation let's not allow keys or values that contain
// the delimiter.
if (key.includes(delimiter)) {
throw new Error(`Unexpected input: name should not contain the delimiter "${delimiter}"`);
}
if (convertedValue.includes(delimiter)) {
throw new Error(`Unexpected input: value should not contain the delimiter "${delimiter}"`);
}
return `${key}<<${delimiter}${os.EOL}${convertedValue}${os.EOL}${delimiter}`;
}
exports.prepareKeyValueMessage = prepareKeyValueMessage;
//# sourceMappingURL=file-command.js.map
/***/ }),
@ -68175,7 +68224,8 @@ const supportedPackageManager = [
'**/*.gradle*',
'**/gradle-wrapper.properties',
'buildSrc/**/Versions.kt',
'buildSrc/**/Dependencies.kt'
'buildSrc/**/Dependencies.kt',
'gradle/*.versions.toml'
]
},
{
@ -68406,7 +68456,7 @@ else {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.STATE_GPG_PRIVATE_KEY_FINGERPRINT = exports.INPUT_JOB_STATUS = exports.INPUT_CACHE = exports.INPUT_DEFAULT_GPG_PASSPHRASE = exports.INPUT_DEFAULT_GPG_PRIVATE_KEY = exports.INPUT_GPG_PASSPHRASE = exports.INPUT_GPG_PRIVATE_KEY = exports.INPUT_OVERWRITE_SETTINGS = exports.INPUT_SETTINGS_PATH = exports.INPUT_SERVER_PASSWORD = exports.INPUT_SERVER_USERNAME = exports.INPUT_SERVER_ID = exports.INPUT_CHECK_LATEST = exports.INPUT_JDK_FILE = exports.INPUT_DISTRIBUTION = exports.INPUT_JAVA_PACKAGE = exports.INPUT_ARCHITECTURE = exports.INPUT_JAVA_VERSION = exports.MACOS_JAVA_CONTENT_POSTFIX = void 0;
exports.INPUT_MVN_TOOLCHAIN_VENDOR = exports.INPUT_MVN_TOOLCHAIN_ID = exports.MVN_TOOLCHAINS_FILE = exports.MVN_SETTINGS_FILE = exports.M2_DIR = exports.STATE_GPG_PRIVATE_KEY_FINGERPRINT = exports.INPUT_JOB_STATUS = exports.INPUT_CACHE = exports.INPUT_DEFAULT_GPG_PASSPHRASE = exports.INPUT_DEFAULT_GPG_PRIVATE_KEY = exports.INPUT_GPG_PASSPHRASE = exports.INPUT_GPG_PRIVATE_KEY = exports.INPUT_OVERWRITE_SETTINGS = exports.INPUT_SETTINGS_PATH = exports.INPUT_SERVER_PASSWORD = exports.INPUT_SERVER_USERNAME = exports.INPUT_SERVER_ID = exports.INPUT_CHECK_LATEST = exports.INPUT_JDK_FILE = exports.INPUT_DISTRIBUTION = exports.INPUT_JAVA_PACKAGE = exports.INPUT_ARCHITECTURE = exports.INPUT_JAVA_VERSION = exports.MACOS_JAVA_CONTENT_POSTFIX = void 0;
exports.MACOS_JAVA_CONTENT_POSTFIX = 'Contents/Home';
exports.INPUT_JAVA_VERSION = 'java-version';
exports.INPUT_ARCHITECTURE = 'architecture';
@ -68426,6 +68476,11 @@ exports.INPUT_DEFAULT_GPG_PASSPHRASE = 'GPG_PASSPHRASE';
exports.INPUT_CACHE = 'cache';
exports.INPUT_JOB_STATUS = 'job-status';
exports.STATE_GPG_PRIVATE_KEY_FINGERPRINT = 'gpg-private-key-fingerprint';
exports.M2_DIR = '.m2';
exports.MVN_SETTINGS_FILE = 'settings.xml';
exports.MVN_TOOLCHAINS_FILE = 'toolchains.xml';
exports.INPUT_MVN_TOOLCHAIN_ID = 'mvn-toolchain-id';
exports.INPUT_MVN_TOOLCHAIN_VENDOR = 'mvn-toolchain-vendor';
/***/ }),

529
dist/setup/index.js vendored
View file

@ -525,7 +525,13 @@ function resolvePaths(patterns) {
.replace(new RegExp(`\\${path.sep}`, 'g'), '/');
core.debug(`Matched: ${relativeFile}`);
// Paths are made relative so the tar entries are all relative to the root of the workspace.
paths.push(`${relativeFile}`);
if (relativeFile === '') {
// path.relative returns empty string if workspace and file are equal
paths.push('.');
}
else {
paths.push(`${relativeFile}`);
}
}
}
catch (e_1_1) { e_1 = { error: e_1_1 }; }
@ -683,6 +689,7 @@ const util = __importStar(__nccwpck_require__(3837));
const utils = __importStar(__nccwpck_require__(1518));
const constants_1 = __nccwpck_require__(8840);
const requestUtils_1 = __nccwpck_require__(3981);
const abort_controller_1 = __nccwpck_require__(2557);
/**
* Pipes the body of a HTTP response to a stream
*
@ -866,15 +873,24 @@ function downloadCacheStorageSDK(archiveLocation, archivePath, options) {
const fd = fs.openSync(archivePath, 'w');
try {
downloadProgress.startDisplayTimer();
const controller = new abort_controller_1.AbortController();
const abortSignal = controller.signal;
while (!downloadProgress.isDone()) {
const segmentStart = downloadProgress.segmentOffset + downloadProgress.segmentSize;
const segmentSize = Math.min(maxSegmentSize, contentLength - segmentStart);
downloadProgress.nextSegment(segmentSize);
const result = yield client.downloadToBuffer(segmentStart, segmentSize, {
const result = yield promiseWithTimeout(options.segmentTimeoutInMs || 3600000, client.downloadToBuffer(segmentStart, segmentSize, {
abortSignal,
concurrency: options.downloadConcurrency,
onProgress: downloadProgress.onProgress()
});
fs.writeFileSync(fd, result);
}));
if (result === 'timeout') {
controller.abort();
throw new Error('Aborting cache download as the download time exceeded the timeout.');
}
else if (Buffer.isBuffer(result)) {
fs.writeFileSync(fd, result);
}
}
}
finally {
@ -885,6 +901,16 @@ function downloadCacheStorageSDK(archiveLocation, archivePath, options) {
});
}
exports.downloadCacheStorageSDK = downloadCacheStorageSDK;
const promiseWithTimeout = (timeoutMs, promise) => __awaiter(void 0, void 0, void 0, function* () {
let timeoutHandle;
const timeoutPromise = new Promise(resolve => {
timeoutHandle = setTimeout(() => resolve('timeout'), timeoutMs);
});
return Promise.race([promise, timeoutPromise]).then(result => {
clearTimeout(timeoutHandle);
return result;
});
});
//# sourceMappingURL=downloadUtils.js.map
/***/ }),
@ -1044,6 +1070,7 @@ const fs_1 = __nccwpck_require__(7147);
const path = __importStar(__nccwpck_require__(1017));
const utils = __importStar(__nccwpck_require__(1518));
const constants_1 = __nccwpck_require__(8840);
const IS_WINDOWS = process.platform === 'win32';
function getTarPath(args, compressionMethod) {
return __awaiter(this, void 0, void 0, function* () {
switch (process.platform) {
@ -1091,26 +1118,43 @@ function getWorkingDirectory() {
var _a;
return (_a = process.env['GITHUB_WORKSPACE']) !== null && _a !== void 0 ? _a : process.cwd();
}
// Common function for extractTar and listTar to get the compression method
function getCompressionProgram(compressionMethod) {
// -d: Decompress.
// unzstd is equivalent to 'zstd -d'
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
// Using 30 here because we also support 32-bit self-hosted runners.
switch (compressionMethod) {
case constants_1.CompressionMethod.Zstd:
return [
'--use-compress-program',
IS_WINDOWS ? 'zstd -d --long=30' : 'unzstd --long=30'
];
case constants_1.CompressionMethod.ZstdWithoutLong:
return ['--use-compress-program', IS_WINDOWS ? 'zstd -d' : 'unzstd'];
default:
return ['-z'];
}
}
function listTar(archivePath, compressionMethod) {
return __awaiter(this, void 0, void 0, function* () {
const args = [
...getCompressionProgram(compressionMethod),
'-tf',
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
'-P'
];
yield execTar(args, compressionMethod);
});
}
exports.listTar = listTar;
function extractTar(archivePath, compressionMethod) {
return __awaiter(this, void 0, void 0, function* () {
// Create directory to extract tar into
const workingDirectory = getWorkingDirectory();
yield io.mkdirP(workingDirectory);
// --d: Decompress.
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
// Using 30 here because we also support 32-bit self-hosted runners.
function getCompressionProgram() {
switch (compressionMethod) {
case constants_1.CompressionMethod.Zstd:
return ['--use-compress-program', 'zstd -d --long=30'];
case constants_1.CompressionMethod.ZstdWithoutLong:
return ['--use-compress-program', 'zstd -d'];
default:
return ['-z'];
}
}
const args = [
...getCompressionProgram(),
...getCompressionProgram(compressionMethod),
'-xf',
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
'-P',
@ -1129,15 +1173,19 @@ function createTar(archiveFolder, sourceDirectories, compressionMethod) {
fs_1.writeFileSync(path.join(archiveFolder, manifestFilename), sourceDirectories.join('\n'));
const workingDirectory = getWorkingDirectory();
// -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores.
// zstdmt is equivalent to 'zstd -T0'
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
// Using 30 here because we also support 32-bit self-hosted runners.
// Long range mode is added to zstd in v1.3.2 release, so we will not use --long in older version of zstd.
function getCompressionProgram() {
switch (compressionMethod) {
case constants_1.CompressionMethod.Zstd:
return ['--use-compress-program', 'zstd -T0 --long=30'];
return [
'--use-compress-program',
IS_WINDOWS ? 'zstd -T0 --long=30' : 'zstdmt --long=30'
];
case constants_1.CompressionMethod.ZstdWithoutLong:
return ['--use-compress-program', 'zstd -T0'];
return ['--use-compress-program', IS_WINDOWS ? 'zstd -T0' : 'zstdmt'];
default:
return ['-z'];
}
@ -1159,32 +1207,6 @@ function createTar(archiveFolder, sourceDirectories, compressionMethod) {
});
}
exports.createTar = createTar;
function listTar(archivePath, compressionMethod) {
return __awaiter(this, void 0, void 0, function* () {
// --d: Decompress.
// --long=#: Enables long distance matching with # bits.
// Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
// Using 30 here because we also support 32-bit self-hosted runners.
function getCompressionProgram() {
switch (compressionMethod) {
case constants_1.CompressionMethod.Zstd:
return ['--use-compress-program', 'zstd -d --long=30'];
case constants_1.CompressionMethod.ZstdWithoutLong:
return ['--use-compress-program', 'zstd -d'];
default:
return ['-z'];
}
}
const args = [
...getCompressionProgram(),
'-tf',
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
'-P'
];
yield execTar(args, compressionMethod);
});
}
exports.listTar = listTar;
//# sourceMappingURL=tar.js.map
/***/ }),
@ -1235,7 +1257,8 @@ function getDownloadOptions(copy) {
const result = {
useAzureSdk: true,
downloadConcurrency: 8,
timeoutInMs: 30000
timeoutInMs: 30000,
segmentTimeoutInMs: 3600000
};
if (copy) {
if (typeof copy.useAzureSdk === 'boolean') {
@ -1247,10 +1270,21 @@ function getDownloadOptions(copy) {
if (typeof copy.timeoutInMs === 'number') {
result.timeoutInMs = copy.timeoutInMs;
}
if (typeof copy.segmentTimeoutInMs === 'number') {
result.segmentTimeoutInMs = copy.segmentTimeoutInMs;
}
}
const segmentDownloadTimeoutMins = process.env['SEGMENT_DOWNLOAD_TIMEOUT_MINS'];
if (segmentDownloadTimeoutMins &&
!isNaN(Number(segmentDownloadTimeoutMins)) &&
isFinite(Number(segmentDownloadTimeoutMins))) {
result.segmentTimeoutInMs = Number(segmentDownloadTimeoutMins) * 60 * 1000;
}
core.debug(`Use Azure SDK: ${result.useAzureSdk}`);
core.debug(`Download concurrency: ${result.downloadConcurrency}`);
core.debug(`Request timeout (ms): ${result.timeoutInMs}`);
core.debug(`Cache segment download timeout mins env var: ${process.env['SEGMENT_DOWNLOAD_TIMEOUT_MINS']}`);
core.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`);
return result;
}
exports.getDownloadOptions = getDownloadOptions;
@ -4831,7 +4865,6 @@ const file_command_1 = __nccwpck_require__(717);
const utils_1 = __nccwpck_require__(5278);
const os = __importStar(__nccwpck_require__(2037));
const path = __importStar(__nccwpck_require__(1017));
const uuid_1 = __nccwpck_require__(8974);
const oidc_utils_1 = __nccwpck_require__(8041);
/**
* The code to exit an action
@ -4861,20 +4894,9 @@ function exportVariable(name, val) {
process.env[name] = convertedVal;
const filePath = process.env['GITHUB_ENV'] || '';
if (filePath) {
const delimiter = `ghadelimiter_${uuid_1.v4()}`;
// These should realistically never happen, but just in case someone finds a way to exploit uuid generation let's not allow keys or values that contain the delimiter.
if (name.includes(delimiter)) {
throw new Error(`Unexpected input: name should not contain the delimiter "${delimiter}"`);
}
if (convertedVal.includes(delimiter)) {
throw new Error(`Unexpected input: value should not contain the delimiter "${delimiter}"`);
}
const commandValue = `${name}<<${delimiter}${os.EOL}${convertedVal}${os.EOL}${delimiter}`;
file_command_1.issueCommand('ENV', commandValue);
}
else {
command_1.issueCommand('set-env', { name }, convertedVal);
return file_command_1.issueFileCommand('ENV', file_command_1.prepareKeyValueMessage(name, val));
}
command_1.issueCommand('set-env', { name }, convertedVal);
}
exports.exportVariable = exportVariable;
/**
@ -4892,7 +4914,7 @@ exports.setSecret = setSecret;
function addPath(inputPath) {
const filePath = process.env['GITHUB_PATH'] || '';
if (filePath) {
file_command_1.issueCommand('PATH', inputPath);
file_command_1.issueFileCommand('PATH', inputPath);
}
else {
command_1.issueCommand('add-path', {}, inputPath);
@ -4932,7 +4954,10 @@ function getMultilineInput(name, options) {
const inputs = getInput(name, options)
.split('\n')
.filter(x => x !== '');
return inputs;
if (options && options.trimWhitespace === false) {
return inputs;
}
return inputs.map(input => input.trim());
}
exports.getMultilineInput = getMultilineInput;
/**
@ -4965,8 +4990,12 @@ exports.getBooleanInput = getBooleanInput;
*/
// eslint-disable-next-line @typescript-eslint/no-explicit-any
function setOutput(name, value) {
const filePath = process.env['GITHUB_OUTPUT'] || '';
if (filePath) {
return file_command_1.issueFileCommand('OUTPUT', file_command_1.prepareKeyValueMessage(name, value));
}
process.stdout.write(os.EOL);
command_1.issueCommand('set-output', { name }, value);
command_1.issueCommand('set-output', { name }, utils_1.toCommandValue(value));
}
exports.setOutput = setOutput;
/**
@ -5095,7 +5124,11 @@ exports.group = group;
*/
// eslint-disable-next-line @typescript-eslint/no-explicit-any
function saveState(name, value) {
command_1.issueCommand('save-state', { name }, value);
const filePath = process.env['GITHUB_STATE'] || '';
if (filePath) {
return file_command_1.issueFileCommand('STATE', file_command_1.prepareKeyValueMessage(name, value));
}
command_1.issueCommand('save-state', { name }, utils_1.toCommandValue(value));
}
exports.saveState = saveState;
/**
@ -5161,13 +5194,14 @@ var __importStar = (this && this.__importStar) || function (mod) {
return result;
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.issueCommand = void 0;
exports.prepareKeyValueMessage = exports.issueFileCommand = void 0;
// We use any as a valid input type
/* eslint-disable @typescript-eslint/no-explicit-any */
const fs = __importStar(__nccwpck_require__(7147));
const os = __importStar(__nccwpck_require__(2037));
const uuid_1 = __nccwpck_require__(8974);
const utils_1 = __nccwpck_require__(5278);
function issueCommand(command, message) {
function issueFileCommand(command, message) {
const filePath = process.env[`GITHUB_${command}`];
if (!filePath) {
throw new Error(`Unable to find environment variable for file command ${command}`);
@ -5179,7 +5213,22 @@ function issueCommand(command, message) {
encoding: 'utf8'
});
}
exports.issueCommand = issueCommand;
exports.issueFileCommand = issueFileCommand;
function prepareKeyValueMessage(key, value) {
const delimiter = `ghadelimiter_${uuid_1.v4()}`;
const convertedValue = utils_1.toCommandValue(value);
// These should realistically never happen, but just in case someone finds a
// way to exploit uuid generation let's not allow keys or values that contain
// the delimiter.
if (key.includes(delimiter)) {
throw new Error(`Unexpected input: name should not contain the delimiter "${delimiter}"`);
}
if (convertedValue.includes(delimiter)) {
throw new Error(`Unexpected input: value should not contain the delimiter "${delimiter}"`);
}
return `${key}<<${delimiter}${os.EOL}${convertedValue}${os.EOL}${delimiter}`;
}
exports.prepareKeyValueMessage = prepareKeyValueMessage;
//# sourceMappingURL=file-command.js.map
/***/ }),
@ -103216,7 +103265,7 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
});
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.generate = exports.createAuthenticationSettings = exports.configureAuthentication = exports.SETTINGS_FILE = exports.M2_DIR = void 0;
exports.generate = exports.createAuthenticationSettings = exports.configureAuthentication = void 0;
const path = __importStar(__nccwpck_require__(1017));
const core = __importStar(__nccwpck_require__(2186));
const io = __importStar(__nccwpck_require__(7436));
@ -103226,14 +103275,12 @@ const xmlbuilder2_1 = __nccwpck_require__(151);
const constants = __importStar(__nccwpck_require__(9042));
const gpg = __importStar(__nccwpck_require__(3759));
const util_1 = __nccwpck_require__(2629);
exports.M2_DIR = '.m2';
exports.SETTINGS_FILE = 'settings.xml';
function configureAuthentication() {
return __awaiter(this, void 0, void 0, function* () {
const id = core.getInput(constants.INPUT_SERVER_ID);
const username = core.getInput(constants.INPUT_SERVER_USERNAME);
const password = core.getInput(constants.INPUT_SERVER_PASSWORD);
const settingsDirectory = core.getInput(constants.INPUT_SETTINGS_PATH) || path.join(os.homedir(), exports.M2_DIR);
const settingsDirectory = core.getInput(constants.INPUT_SETTINGS_PATH) || path.join(os.homedir(), constants.M2_DIR);
const overwriteSettings = util_1.getBooleanInput(constants.INPUT_OVERWRITE_SETTINGS, true);
const gpgPrivateKey = core.getInput(constants.INPUT_GPG_PRIVATE_KEY) || constants.INPUT_DEFAULT_GPG_PRIVATE_KEY;
const gpgPassphrase = core.getInput(constants.INPUT_GPG_PASSPHRASE) ||
@ -103252,7 +103299,7 @@ function configureAuthentication() {
exports.configureAuthentication = configureAuthentication;
function createAuthenticationSettings(id, username, password, settingsDirectory, overwriteSettings, gpgPassphrase = undefined) {
return __awaiter(this, void 0, void 0, function* () {
core.info(`Creating ${exports.SETTINGS_FILE} with server-id: ${id}`);
core.info(`Creating ${constants.MVN_SETTINGS_FILE} with server-id: ${id}`);
// when an alternate m2 location is specified use only that location (no .m2 directory)
// otherwise use the home/.m2/ path
yield io.mkdirP(settingsDirectory);
@ -103294,7 +103341,7 @@ function generate(id, username, password, gpgPassphrase) {
exports.generate = generate;
function write(directory, settings, overwriteSettings) {
return __awaiter(this, void 0, void 0, function* () {
const location = path.join(directory, exports.SETTINGS_FILE);
const location = path.join(directory, constants.MVN_SETTINGS_FILE);
const settingsExists = fs.existsSync(location);
if (settingsExists && overwriteSettings) {
core.info(`Overwriting existing file ${location}`);
@ -103380,7 +103427,8 @@ const supportedPackageManager = [
'**/*.gradle*',
'**/gradle-wrapper.properties',
'buildSrc/**/Versions.kt',
'buildSrc/**/Dependencies.kt'
'buildSrc/**/Dependencies.kt',
'gradle/*.versions.toml'
]
},
{
@ -103510,7 +103558,7 @@ function isProbablyGradleDaemonProblem(packageManager, error) {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.STATE_GPG_PRIVATE_KEY_FINGERPRINT = exports.INPUT_JOB_STATUS = exports.INPUT_CACHE = exports.INPUT_DEFAULT_GPG_PASSPHRASE = exports.INPUT_DEFAULT_GPG_PRIVATE_KEY = exports.INPUT_GPG_PASSPHRASE = exports.INPUT_GPG_PRIVATE_KEY = exports.INPUT_OVERWRITE_SETTINGS = exports.INPUT_SETTINGS_PATH = exports.INPUT_SERVER_PASSWORD = exports.INPUT_SERVER_USERNAME = exports.INPUT_SERVER_ID = exports.INPUT_CHECK_LATEST = exports.INPUT_JDK_FILE = exports.INPUT_DISTRIBUTION = exports.INPUT_JAVA_PACKAGE = exports.INPUT_ARCHITECTURE = exports.INPUT_JAVA_VERSION = exports.MACOS_JAVA_CONTENT_POSTFIX = void 0;
exports.INPUT_MVN_TOOLCHAIN_VENDOR = exports.INPUT_MVN_TOOLCHAIN_ID = exports.MVN_TOOLCHAINS_FILE = exports.MVN_SETTINGS_FILE = exports.M2_DIR = exports.STATE_GPG_PRIVATE_KEY_FINGERPRINT = exports.INPUT_JOB_STATUS = exports.INPUT_CACHE = exports.INPUT_DEFAULT_GPG_PASSPHRASE = exports.INPUT_DEFAULT_GPG_PRIVATE_KEY = exports.INPUT_GPG_PASSPHRASE = exports.INPUT_GPG_PRIVATE_KEY = exports.INPUT_OVERWRITE_SETTINGS = exports.INPUT_SETTINGS_PATH = exports.INPUT_SERVER_PASSWORD = exports.INPUT_SERVER_USERNAME = exports.INPUT_SERVER_ID = exports.INPUT_CHECK_LATEST = exports.INPUT_JDK_FILE = exports.INPUT_DISTRIBUTION = exports.INPUT_JAVA_PACKAGE = exports.INPUT_ARCHITECTURE = exports.INPUT_JAVA_VERSION = exports.MACOS_JAVA_CONTENT_POSTFIX = void 0;
exports.MACOS_JAVA_CONTENT_POSTFIX = 'Contents/Home';
exports.INPUT_JAVA_VERSION = 'java-version';
exports.INPUT_ARCHITECTURE = 'architecture';
@ -103530,6 +103578,11 @@ exports.INPUT_DEFAULT_GPG_PASSPHRASE = 'GPG_PASSPHRASE';
exports.INPUT_CACHE = 'cache';
exports.INPUT_JOB_STATUS = 'job-status';
exports.STATE_GPG_PRIVATE_KEY_FINGERPRINT = 'gpg-private-key-fingerprint';
exports.M2_DIR = '.m2';
exports.MVN_SETTINGS_FILE = 'settings.xml';
exports.MVN_TOOLCHAINS_FILE = 'toolchains.xml';
exports.INPUT_MVN_TOOLCHAIN_ID = 'mvn-toolchain-id';
exports.INPUT_MVN_TOOLCHAIN_VENDOR = 'mvn-toolchain-vendor';
/***/ }),
@ -103643,7 +103696,7 @@ class AdoptDistribution extends base_installer_1.JavaBase {
getAvailableVersions() {
return __awaiter(this, void 0, void 0, function* () {
const platform = this.getPlatformOption();
const arch = this.architecture;
const arch = this.distributionArchitecture();
const imageType = this.packageType;
const versionRange = encodeURI('[1.0,100.0]'); // retrieve all available versions
const releaseType = this.stable ? 'ga' : 'ea';
@ -103754,6 +103807,7 @@ const path_1 = __importDefault(__nccwpck_require__(1017));
const httpm = __importStar(__nccwpck_require__(9925));
const util_1 = __nccwpck_require__(2629);
const constants_1 = __nccwpck_require__(9042);
const os_1 = __importDefault(__nccwpck_require__(2037));
class JavaBase {
constructor(distribution, installerOptions) {
this.distribution = distribution;
@ -103762,7 +103816,7 @@ class JavaBase {
maxRetries: 3
});
({ version: this.version, stable: this.stable } = this.normalizeVersion(installerOptions.version));
this.architecture = installerOptions.architecture;
this.architecture = installerOptions.architecture || os_1.default.arch();
this.packageType = installerOptions.packageType;
this.checkLatest = installerOptions.checkLatest;
}
@ -103873,6 +103927,24 @@ class JavaBase {
core.setOutput('version', version);
core.exportVariable(`JAVA_HOME_${majorVersion}_${this.architecture.toUpperCase()}`, toolPath);
}
distributionArchitecture() {
// default mappings of config architectures to distribution architectures
// override if a distribution uses any different names; see liberica for an example
// node's os.arch() - which this defaults to - can return any of:
// 'arm', 'arm64', 'ia32', 'mips', 'mipsel', 'ppc', 'ppc64', 's390', 's390x', and 'x64'
// so we need to map these to java distribution architectures
// 'amd64' is included here too b/c it's a common alias for 'x64' people might use explicitly
switch (this.architecture) {
case 'amd64':
return 'x64';
case 'ia32':
return 'x86';
case 'arm64':
return 'aarch64';
default:
return this.architecture;
}
}
}
exports.JavaBase = JavaBase;
@ -103972,7 +104044,7 @@ class CorrettoDistribution extends base_installer_1.JavaBase {
var _a, _b;
return __awaiter(this, void 0, void 0, function* () {
const platform = this.getPlatformOption();
const arch = this.architecture;
const arch = this.distributionArchitecture();
const imageType = this.packageType;
if (core.isDebug()) {
console.time('corretto-retrieve-available-versions');
@ -104151,7 +104223,7 @@ const tc = __importStar(__nccwpck_require__(7784));
const fs_1 = __importDefault(__nccwpck_require__(7147));
const path_1 = __importDefault(__nccwpck_require__(1017));
const supportedPlatform = `'linux', 'linux-musl', 'macos', 'solaris', 'windows'`;
const supportedArchitecture = `'x86', 'x64', 'armv7', 'aarch64', 'ppc64le'`;
const supportedArchitectures = `'x86', 'x64', 'armv7', 'aarch64', 'ppc64le'`;
class LibericaDistributions extends base_installer_1.JavaBase {
constructor(installerOptions) {
super('Liberica', installerOptions);
@ -104223,7 +104295,8 @@ class LibericaDistributions extends base_installer_1.JavaBase {
return bundleType;
}
getArchitectureOptions() {
switch (this.architecture) {
const arch = this.distributionArchitecture();
switch (arch) {
case 'x86':
return { bitness: '32', arch: 'x86' };
case 'x64':
@ -104235,7 +104308,7 @@ class LibericaDistributions extends base_installer_1.JavaBase {
case 'ppc64le':
return { bitness: '64', arch: 'ppc' };
default:
throw new Error(`Architecture '${this.architecture}' is not supported. Supported architectures: ${supportedArchitecture}`);
throw new Error(`Architecture '${this.architecture}' is not supported. Supported architectures: ${supportedArchitectures}`);
}
}
getPlatformOption(platform = process.platform) {
@ -104261,6 +104334,15 @@ class LibericaDistributions extends base_installer_1.JavaBase {
}
return mainVersion;
}
distributionArchitecture() {
let arch = super.distributionArchitecture();
switch (arch) {
case 'arm':
return 'armv7';
default:
return arch;
}
}
}
exports.LibericaDistributions = LibericaDistributions;
@ -104409,7 +104491,6 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.MicrosoftDistributions = void 0;
const base_installer_1 = __nccwpck_require__(9741);
const semver_1 = __importDefault(__nccwpck_require__(1383));
const util_1 = __nccwpck_require__(2629);
const core = __importStar(__nccwpck_require__(2186));
const tc = __importStar(__nccwpck_require__(7784));
@ -104434,7 +104515,8 @@ class MicrosoftDistributions extends base_installer_1.JavaBase {
}
findPackageForDownload(range) {
return __awaiter(this, void 0, void 0, function* () {
if (this.architecture !== 'x64' && this.architecture !== 'aarch64') {
const arch = this.distributionArchitecture();
if (arch !== 'x64' && arch !== 'aarch64') {
throw new Error(`Unsupported architecture: ${this.architecture}`);
}
if (!this.stable) {
@ -104443,70 +104525,51 @@ class MicrosoftDistributions extends base_installer_1.JavaBase {
if (this.packageType !== 'jdk') {
throw new Error('Microsoft Build of OpenJDK provides only the `jdk` package type');
}
const availableVersionsRaw = yield this.getAvailableVersions();
const opts = this.getPlatformOption();
const availableVersions = availableVersionsRaw.map(item => ({
url: `https://aka.ms/download-jdk/microsoft-jdk-${item.version.join('.')}-${opts.os}-${this.architecture}.${opts.archive}`,
version: this.convertVersionToSemver(item)
}));
const satisfiedVersion = availableVersions
.filter(item => util_1.isVersionSatisfies(range, item.version))
.sort((a, b) => -semver_1.default.compareBuild(a.version, b.version))[0];
if (!satisfiedVersion) {
const availableOptions = availableVersions.map(item => item.version).join(', ');
const availableOptionsMessage = availableOptions
? `\nAvailable versions: ${availableOptions}`
: '';
throw new Error(`Could not find satisfied version for SemVer ${range}. ${availableOptionsMessage}`);
const manifest = yield this.getAvailableVersions();
if (!manifest) {
throw new Error('Could not load manifest for Microsoft Build of OpenJDK');
}
return satisfiedVersion;
const foundRelease = yield tc.findFromManifest(range, true, manifest, arch);
if (!foundRelease) {
throw new Error(`Could not find satisfied version for SemVer ${range}. ${manifest
.map(item => item.version)
.join(', ')}`);
}
return { url: foundRelease.files[0].download_url, version: foundRelease.version };
});
}
getAvailableVersions() {
return __awaiter(this, void 0, void 0, function* () {
// TODO get these dynamically!
// We will need Microsoft to add an endpoint where we can query for versions.
const jdkVersions = [
{
version: [17, 0, 3]
},
{
version: [17, 0, 1, 12, 1]
},
{
version: [16, 0, 2, 7, 1]
},
{
version: [11, 0, 15]
const token = core.getInput('token');
const owner = 'actions';
const repository = 'setup-java';
const branch = 'main';
const filePath = 'src/distributions/microsoft/microsoft-openjdk-versions.json';
let releases = null;
const fileUrl = `https://api.github.com/repos/${owner}/${repository}/contents/${filePath}?ref=${branch}`;
const headers = {
authorization: token,
accept: 'application/vnd.github.VERSION.raw'
};
let response = null;
try {
response = yield this.http.getJson(fileUrl, headers);
if (!response.result) {
return null;
}
];
// M1 is only supported for Java 16 & 17
if (process.platform !== 'darwin' || this.architecture !== 'aarch64') {
jdkVersions.push({
version: [11, 0, 13, 8, 1]
});
}
return jdkVersions;
catch (err) {
core.debug(`Http request for microsoft-openjdk-versions.json failed with status code: ${response === null || response === void 0 ? void 0 : response.statusCode}`);
return null;
}
if (response.result) {
releases = response.result;
}
return releases;
});
}
getPlatformOption(platform = process.platform /* for testing */) {
switch (platform) {
case 'darwin':
return { archive: 'tar.gz', os: 'macos' };
case 'win32':
return { archive: 'zip', os: 'windows' };
case 'linux':
return { archive: 'tar.gz', os: 'linux' };
default:
throw new Error(`Platform '${platform}' is not supported. Supported platforms: 'darwin', 'linux', 'win32'`);
}
}
convertVersionToSemver(version) {
const major = version.version[0];
const minor = version.version[1];
const patch = version.version[2];
return `${major}.${minor}.${patch}`;
}
}
exports.MicrosoftDistributions = MicrosoftDistributions;
@ -104807,7 +104870,7 @@ class TemurinDistribution extends base_installer_1.JavaBase {
getAvailableVersions() {
return __awaiter(this, void 0, void 0, function* () {
const platform = this.getPlatformOption();
const arch = this.architecture;
const arch = this.distributionArchitecture();
const imageType = this.packageType;
const versionRange = encodeURI('[1.0,100.0]'); // retrieve all available versions
const releaseType = this.stable ? 'ga' : 'ea';
@ -105011,17 +105074,17 @@ class ZuluDistribution extends base_installer_1.JavaBase {
});
}
getArchitectureOptions() {
if (this.architecture == 'x64') {
return { arch: 'x86', hw_bitness: '64', abi: '' };
}
else if (this.architecture == 'x86') {
return { arch: 'x86', hw_bitness: '32', abi: '' };
}
else if (this.architecture == 'arm64') {
return { arch: 'arm', hw_bitness: '64', abi: '' };
}
else {
return { arch: this.architecture, hw_bitness: '', abi: '' };
const arch = this.distributionArchitecture();
switch (arch) {
case 'x64':
return { arch: 'x86', hw_bitness: '64', abi: '' };
case 'x86':
return { arch: 'x86', hw_bitness: '32', abi: '' };
case 'aarch64':
case 'arm64':
return { arch: 'arm', hw_bitness: '64', abi: '' };
default:
return { arch: arch, hw_bitness: '', abi: '' };
}
}
getPlatformOption() {
@ -105163,6 +105226,7 @@ Object.defineProperty(exports, "__esModule", ({ value: true }));
const core = __importStar(__nccwpck_require__(2186));
const auth = __importStar(__nccwpck_require__(3497));
const util_1 = __nccwpck_require__(2629);
const toolchains = __importStar(__nccwpck_require__(9322));
const constants = __importStar(__nccwpck_require__(9042));
const cache_1 = __nccwpck_require__(4810);
const path = __importStar(__nccwpck_require__(1017));
@ -105177,8 +105241,12 @@ function run() {
const jdkFile = core.getInput(constants.INPUT_JDK_FILE);
const cache = core.getInput(constants.INPUT_CACHE);
const checkLatest = util_1.getBooleanInput(constants.INPUT_CHECK_LATEST, false);
let toolchainIds = core.getMultilineInput(constants.INPUT_MVN_TOOLCHAIN_ID);
if (versions.length !== toolchainIds.length) {
toolchainIds = [];
}
core.startGroup('Installed distributions');
for (const version of versions) {
for (const [index, version] of versions.entries()) {
const installerOptions = {
architecture,
packageType,
@ -105190,6 +105258,7 @@ function run() {
throw new Error(`No supported distribution was found for input ${distributionName}`);
}
const result = yield distribution.setupJava();
yield toolchains.configureToolchains(version, distributionName, result.path, toolchainIds[index]);
core.info('');
core.info('Java configuration:');
core.info(` Distribution: ${distributionName}`);
@ -105213,6 +105282,166 @@ function run() {
run();
/***/ }),
/***/ 9322:
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.generateToolchainDefinition = exports.createToolchainsSettings = exports.configureToolchains = void 0;
const fs = __importStar(__nccwpck_require__(7147));
const os = __importStar(__nccwpck_require__(2037));
const path = __importStar(__nccwpck_require__(1017));
const core = __importStar(__nccwpck_require__(2186));
const io = __importStar(__nccwpck_require__(7436));
const constants = __importStar(__nccwpck_require__(9042));
const util_1 = __nccwpck_require__(2629);
const xmlbuilder2_1 = __nccwpck_require__(151);
function configureToolchains(version, distributionName, jdkHome, toolchainId) {
return __awaiter(this, void 0, void 0, function* () {
const vendor = core.getInput(constants.INPUT_MVN_TOOLCHAIN_VENDOR) || distributionName;
const id = toolchainId || `${vendor}_${version}`;
const settingsDirectory = core.getInput(constants.INPUT_SETTINGS_PATH) || path.join(os.homedir(), constants.M2_DIR);
const overwriteSettings = util_1.getBooleanInput(constants.INPUT_OVERWRITE_SETTINGS, true);
yield createToolchainsSettings({
jdkInfo: {
version,
vendor,
id,
jdkHome
},
settingsDirectory,
overwriteSettings
});
});
}
exports.configureToolchains = configureToolchains;
function createToolchainsSettings({ jdkInfo, settingsDirectory, overwriteSettings }) {
return __awaiter(this, void 0, void 0, function* () {
core.info(`Creating ${constants.MVN_TOOLCHAINS_FILE} for JDK version ${jdkInfo.version} from ${jdkInfo.vendor}`);
// when an alternate m2 location is specified use only that location (no .m2 directory)
// otherwise use the home/.m2/ path
yield io.mkdirP(settingsDirectory);
const originalToolchains = yield readExistingToolchainsFile(settingsDirectory);
const updatedToolchains = generateToolchainDefinition(originalToolchains, jdkInfo.version, jdkInfo.vendor, jdkInfo.id, jdkInfo.jdkHome);
yield writeToolchainsFileToDisk(settingsDirectory, updatedToolchains, overwriteSettings);
});
}
exports.createToolchainsSettings = createToolchainsSettings;
// only exported for testing purposes
function generateToolchainDefinition(original, version, vendor, id, jdkHome) {
let xmlObj;
if (original === null || original === void 0 ? void 0 : original.length) {
xmlObj = xmlbuilder2_1.create(original)
.root()
.ele({
toolchain: {
type: 'jdk',
provides: {
version: `${version}`,
vendor: `${vendor}`,
id: `${id}`
},
configuration: {
jdkHome: `${jdkHome}`
}
}
});
}
else
xmlObj = xmlbuilder2_1.create({
toolchains: {
'@xmlns': 'https://maven.apache.org/TOOLCHAINS/1.1.0',
'@xmlns:xsi': 'https://www.w3.org/2001/XMLSchema-instance',
'@xsi:schemaLocation': 'https://maven.apache.org/TOOLCHAINS/1.1.0 https://maven.apache.org/xsd/toolchains-1.1.0.xsd',
toolchain: [
{
type: 'jdk',
provides: {
version: `${version}`,
vendor: `${vendor}`,
id: `${id}`
},
configuration: {
jdkHome: `${jdkHome}`
}
}
]
}
});
return xmlObj.end({
format: 'xml',
wellFormed: false,
headless: false,
prettyPrint: true,
width: 80
});
}
exports.generateToolchainDefinition = generateToolchainDefinition;
function readExistingToolchainsFile(directory) {
return __awaiter(this, void 0, void 0, function* () {
const location = path.join(directory, constants.MVN_TOOLCHAINS_FILE);
if (fs.existsSync(location)) {
return fs.readFileSync(location, {
encoding: 'utf-8',
flag: 'r'
});
}
return '';
});
}
function writeToolchainsFileToDisk(directory, settings, overwriteSettings) {
return __awaiter(this, void 0, void 0, function* () {
const location = path.join(directory, constants.MVN_TOOLCHAINS_FILE);
const settingsExists = fs.existsSync(location);
if (settingsExists && overwriteSettings) {
core.info(`Overwriting existing file ${location}`);
}
else if (!settingsExists) {
core.info(`Writing to ${location}`);
}
else {
core.info(`Skipping generation of ${location} because file already exists and overwriting is not enabled`);
return;
}
return fs.writeFileSync(location, settings, {
encoding: 'utf-8',
flag: 'w'
});
});
}
/***/ }),
/***/ 2629:

View file

@ -14,6 +14,7 @@
- [Publishing using Apache Maven](#Publishing-using-Apache-Maven)
- [Publishing using Gradle](#Publishing-using-Gradle)
- [Hosted Tool Cache](#Hosted-Tool-Cache)
- [Modifying Maven Toolchains](#Modifying-Maven-Toolchains)
See [action.yml](../action.yml) for more details on task inputs.
@ -350,3 +351,100 @@ GitHub Hosted Runners have a tool cache that comes with some Java versions pre-i
Currently, LTS versions of Adopt OpenJDK (`adopt`) are cached on the GitHub Hosted Runners.
The tools cache gets updated on a weekly basis. For information regarding locally cached versions of Java on GitHub hosted runners, check out [GitHub Actions Virtual Environments](https://github.com/actions/virtual-environments).
## Modifying Maven Toolchains
The `setup-java` action generates a basic [Maven Toolchains declaration](https://maven.apache.org/guides/mini/guide-using-toolchains.html) for specified Java versions by either creating a minimal toolchains file or extending an existing declaration with the additional JDKs.
### Installing Multiple JDKs With Toolchains
Subsequent calls to `setup-java` with distinct distribution and version parameters will continue to extend the toolchains declaration and make all specified Java versions available.
```yaml
steps:
- uses: actions/setup-java@v3
with:
distribution: '<distribution>'
java-version: |
8
11
- uses: actions/setup-java@v3
with:
distribution: '<distribution>'
java-version: 15
```
The result is a Toolchain with entries for JDKs 8, 11 and 15. You can even combine this with custom JDKs of arbitrary versions:
```yaml
- run: |
download_url="https://example.com/java/jdk/6u45-b06/jdk-6u45-linux-x64.tar.gz"
wget -O $RUNNER_TEMP/java_package.tar.gz $download_url
- uses: actions/setup-java@v3
with:
distribution: 'jdkfile'
jdkFile: ${{ runner.temp }}/java_package.tar.gz
java-version: '1.6'
architecture: x64
```
This will generate a Toolchains entry with the following values: `version: 1.6`, `vendor: jkdfile`, `id: Oracle_1.6`.
### Modifying The Toolchain Vendor For JDKs
Each JDK provider will receive a default `vendor` using the `distribution` input value but this can be overridden with the `mvn-toolchain-vendor` parameter as follows.
```yaml
- run: |
download_url="https://example.com/java/jdk/6u45-b06/jdk-6u45-linux-x64.tar.gz"
wget -O $RUNNER_TEMP/java_package.tar.gz $download_url
- uses: actions/setup-java@v3
with:
distribution: 'jdkfile'
jdkFile: ${{ runner.temp }}/java_package.tar.gz
java-version: '1.6'
architecture: x64
mvn-toolchain-vendor: 'Oracle'
```
This will generate a Toolchains entry with the following values: `version: 1.6`, `vendor: Oracle`, `id: Oracle_1.6`.
In case you install multiple versions of Java at once with multi-line `java-version` input setting the `mvn-toolchain-vendor` still only accepts one value and will use this value for installed JDKs as expected when installing multiple versions of the same `distribution`.
```yaml
steps:
- uses: actions/setup-java@v3
with:
distribution: '<distribution>'
java-version: |
8
11
mvn-toolchain-vendor: Eclipse Temurin
```
### Modifying The Toolchain ID For JDKs
Each JDK provider will receive a default `id` based on the combination of `distribution` and `java-version` in the format of `distribution_java-version` (e.g. `temurin_11`) but this can be overridden with the `mvn-toolchain-id` parameter as follows.
```yaml
steps:
- uses: actions/checkout@v3
- uses: actions/setup-java@v3
with:
distribution: 'temurin'
java-version: '11'
mvn-toolchain-id: 'some_other_id'
- run: java -cp java HelloWorldApp
```
In case you install multiple versions of Java at once you can use the same syntax as used in `java-versions`. Please note that you have to declare an ID for all Java versions that will be installed or the `mvn-toolchain-id` instruction will be skipped wholesale due to mapping ambiguities.
```yaml
steps:
- uses: actions/setup-java@v3
with:
distribution: '<distribution>'
java-version: |
8
11
mvn-toolchain-id: |
something_else
something_other
```

28
package-lock.json generated
View file

@ -9,8 +9,8 @@
"version": "3.4.1",
"license": "MIT",
"dependencies": {
"@actions/cache": "^3.0.0",
"@actions/core": "^1.9.0",
"@actions/cache": "^3.0.4",
"@actions/core": "^1.10.0",
"@actions/exec": "^1.0.4",
"@actions/glob": "^0.2.0",
"@actions/http-client": "^1.0.11",
@ -32,9 +32,9 @@
}
},
"node_modules/@actions/cache": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.0.0.tgz",
"integrity": "sha512-GL9CT1Fnu+pqs8TTB621q8Xa8Cilw2n9MwvbgMedetH7L1q2n6jY61gzbwGbKgtVbp3gVJ12aNMi4osSGXx3KQ==",
"version": "3.0.4",
"resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.0.4.tgz",
"integrity": "sha512-9RwVL8/ISJoYWFNH1wR/C26E+M3HDkGPWmbFJMMCKwTkjbNZJreMT4XaR/EB1bheIvN4PREQxEQQVJ18IPnf/Q==",
"dependencies": {
"@actions/core": "^1.2.6",
"@actions/exec": "^1.0.1",
@ -73,9 +73,9 @@
}
},
"node_modules/@actions/core": {
"version": "1.9.1",
"resolved": "https://registry.npmjs.org/@actions/core/-/core-1.9.1.tgz",
"integrity": "sha512-5ad+U2YGrmmiw6du20AQW5XuWo7UKN2052FjSV7MX+Wfjf8sCqcsZe62NfgHys4QI4/Y+vQvLKYL8jWtA1ZBTA==",
"version": "1.10.0",
"resolved": "https://registry.npmjs.org/@actions/core/-/core-1.10.0.tgz",
"integrity": "sha512-2aZDDa3zrrZbP5ZYg159sNoLRb61nQ7awl5pSvIq5Qpj81vwDzdMRKzkWJGJuwVvWpvZKx7vspJALyvaaIQyug==",
"dependencies": {
"@actions/http-client": "^2.0.1",
"uuid": "^8.3.2"
@ -4790,9 +4790,9 @@
},
"dependencies": {
"@actions/cache": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.0.0.tgz",
"integrity": "sha512-GL9CT1Fnu+pqs8TTB621q8Xa8Cilw2n9MwvbgMedetH7L1q2n6jY61gzbwGbKgtVbp3gVJ12aNMi4osSGXx3KQ==",
"version": "3.0.4",
"resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.0.4.tgz",
"integrity": "sha512-9RwVL8/ISJoYWFNH1wR/C26E+M3HDkGPWmbFJMMCKwTkjbNZJreMT4XaR/EB1bheIvN4PREQxEQQVJ18IPnf/Q==",
"requires": {
"@actions/core": "^1.2.6",
"@actions/exec": "^1.0.1",
@ -4830,9 +4830,9 @@
}
},
"@actions/core": {
"version": "1.9.1",
"resolved": "https://registry.npmjs.org/@actions/core/-/core-1.9.1.tgz",
"integrity": "sha512-5ad+U2YGrmmiw6du20AQW5XuWo7UKN2052FjSV7MX+Wfjf8sCqcsZe62NfgHys4QI4/Y+vQvLKYL8jWtA1ZBTA==",
"version": "1.10.0",
"resolved": "https://registry.npmjs.org/@actions/core/-/core-1.10.0.tgz",
"integrity": "sha512-2aZDDa3zrrZbP5ZYg159sNoLRb61nQ7awl5pSvIq5Qpj81vwDzdMRKzkWJGJuwVvWpvZKx7vspJALyvaaIQyug==",
"requires": {
"@actions/http-client": "^2.0.1",
"uuid": "^8.3.2"

View file

@ -24,8 +24,8 @@
"author": "GitHub",
"license": "MIT",
"dependencies": {
"@actions/cache": "^3.0.0",
"@actions/core": "^1.9.0",
"@actions/cache": "^3.0.4",
"@actions/core": "^1.10.0",
"@actions/exec": "^1.0.4",
"@actions/glob": "^0.2.0",
"@actions/http-client": "^1.0.11",

View file

@ -10,15 +10,12 @@ import * as constants from './constants';
import * as gpg from './gpg';
import { getBooleanInput } from './util';
export const M2_DIR = '.m2';
export const SETTINGS_FILE = 'settings.xml';
export async function configureAuthentication() {
const id = core.getInput(constants.INPUT_SERVER_ID);
const username = core.getInput(constants.INPUT_SERVER_USERNAME);
const password = core.getInput(constants.INPUT_SERVER_PASSWORD);
const settingsDirectory =
core.getInput(constants.INPUT_SETTINGS_PATH) || path.join(os.homedir(), M2_DIR);
core.getInput(constants.INPUT_SETTINGS_PATH) || path.join(os.homedir(), constants.M2_DIR);
const overwriteSettings = getBooleanInput(constants.INPUT_OVERWRITE_SETTINGS, true);
const gpgPrivateKey =
core.getInput(constants.INPUT_GPG_PRIVATE_KEY) || constants.INPUT_DEFAULT_GPG_PRIVATE_KEY;
@ -54,7 +51,7 @@ export async function createAuthenticationSettings(
overwriteSettings: boolean,
gpgPassphrase: string | undefined = undefined
) {
core.info(`Creating ${SETTINGS_FILE} with server-id: ${id}`);
core.info(`Creating ${constants.MVN_SETTINGS_FILE} with server-id: ${id}`);
// when an alternate m2 location is specified use only that location (no .m2 directory)
// otherwise use the home/.m2/ path
await io.mkdirP(settingsDirectory);
@ -106,7 +103,7 @@ export function generate(
}
async function write(directory: string, settings: string, overwriteSettings: boolean) {
const location = path.join(directory, SETTINGS_FILE);
const location = path.join(directory, constants.MVN_SETTINGS_FILE);
const settingsExists = fs.existsSync(location);
if (settingsExists && overwriteSettings) {
core.info(`Overwriting existing file ${location}`);

View file

@ -35,7 +35,8 @@ const supportedPackageManager: PackageManager[] = [
'**/*.gradle*',
'**/gradle-wrapper.properties',
'buildSrc/**/Versions.kt',
'buildSrc/**/Dependencies.kt'
'buildSrc/**/Dependencies.kt',
'gradle/*.versions.toml'
]
},
{

View file

@ -20,3 +20,9 @@ export const INPUT_CACHE = 'cache';
export const INPUT_JOB_STATUS = 'job-status';
export const STATE_GPG_PRIVATE_KEY_FINGERPRINT = 'gpg-private-key-fingerprint';
export const M2_DIR = '.m2';
export const MVN_SETTINGS_FILE = 'settings.xml';
export const MVN_TOOLCHAINS_FILE = 'toolchains.xml';
export const INPUT_MVN_TOOLCHAIN_ID = 'mvn-toolchain-id';
export const INPUT_MVN_TOOLCHAIN_VENDOR = 'mvn-toolchain-vendor';

View file

@ -88,7 +88,7 @@ export class AdoptDistribution extends JavaBase {
private async getAvailableVersions(): Promise<IAdoptAvailableVersions[]> {
const platform = this.getPlatformOption();
const arch = this.architecture;
const arch = this.distributionArchitecture();
const imageType = this.packageType;
const versionRange = encodeURI('[1.0,100.0]'); // retrieve all available versions
const releaseType = this.stable ? 'ga' : 'ea';

View file

@ -7,6 +7,7 @@ import * as httpm from '@actions/http-client';
import { getToolcachePath, isVersionSatisfies } from '../util';
import { JavaDownloadRelease, JavaInstallerOptions, JavaInstallerResults } from './base-models';
import { MACOS_JAVA_CONTENT_POSTFIX } from '../constants';
import os from 'os';
export abstract class JavaBase {
protected http: httpm.HttpClient;
@ -25,7 +26,7 @@ export abstract class JavaBase {
({ version: this.version, stable: this.stable } = this.normalizeVersion(
installerOptions.version
));
this.architecture = installerOptions.architecture;
this.architecture = installerOptions.architecture || os.arch();
this.packageType = installerOptions.packageType;
this.checkLatest = installerOptions.checkLatest;
}
@ -150,4 +151,24 @@ export abstract class JavaBase {
core.setOutput('version', version);
core.exportVariable(`JAVA_HOME_${majorVersion}_${this.architecture.toUpperCase()}`, toolPath);
}
protected distributionArchitecture(): string {
// default mappings of config architectures to distribution architectures
// override if a distribution uses any different names; see liberica for an example
// node's os.arch() - which this defaults to - can return any of:
// 'arm', 'arm64', 'ia32', 'mips', 'mipsel', 'ppc', 'ppc64', 's390', 's390x', and 'x64'
// so we need to map these to java distribution architectures
// 'amd64' is included here too b/c it's a common alias for 'x64' people might use explicitly
switch (this.architecture) {
case 'amd64':
return 'x64';
case 'ia32':
return 'x86';
case 'arm64':
return 'aarch64';
default:
return this.architecture;
}
}
}

View file

@ -68,7 +68,7 @@ export class CorrettoDistribution extends JavaBase {
private async getAvailableVersions(): Promise<ICorrettoAvailableVersions[]> {
const platform = this.getPlatformOption();
const arch = this.architecture;
const arch = this.distributionArchitecture();
const imageType = this.packageType;
if (core.isDebug()) {

View file

@ -10,7 +10,7 @@ import path from 'path';
const supportedPlatform = `'linux', 'linux-musl', 'macos', 'solaris', 'windows'`;
const supportedArchitecture = `'x86', 'x64', 'armv7', 'aarch64', 'ppc64le'`;
const supportedArchitectures = `'x86', 'x64', 'armv7', 'aarch64', 'ppc64le'`;
export class LibericaDistributions extends JavaBase {
constructor(installerOptions: JavaInstallerOptions) {
@ -112,7 +112,8 @@ export class LibericaDistributions extends JavaBase {
}
private getArchitectureOptions(): ArchitectureOptions {
switch (this.architecture) {
const arch = this.distributionArchitecture();
switch (arch) {
case 'x86':
return { bitness: '32', arch: 'x86' };
case 'x64':
@ -125,7 +126,7 @@ export class LibericaDistributions extends JavaBase {
return { bitness: '64', arch: 'ppc' };
default:
throw new Error(
`Architecture '${this.architecture}' is not supported. Supported architectures: ${supportedArchitecture}`
`Architecture '${this.architecture}' is not supported. Supported architectures: ${supportedArchitectures}`
);
}
}
@ -156,4 +157,14 @@ export class LibericaDistributions extends JavaBase {
}
return mainVersion;
}
protected distributionArchitecture(): string {
let arch = super.distributionArchitecture();
switch (arch) {
case 'arm':
return 'armv7';
default:
return arch;
}
}
}

View file

@ -1,12 +1,12 @@
import { JavaBase } from '../base-installer';
import { JavaDownloadRelease, JavaInstallerOptions, JavaInstallerResults } from '../base-models';
import semver from 'semver';
import { extractJdkFile, getDownloadArchiveExtension, isVersionSatisfies } from '../../util';
import { extractJdkFile, getDownloadArchiveExtension } from '../../util';
import * as core from '@actions/core';
import { MicrosoftVersion, PlatformOptions } from './models';
import * as tc from '@actions/tool-cache';
import { OutgoingHttpHeaders } from 'http';
import fs from 'fs';
import path from 'path';
import { ITypedResponse } from '@actions/http-client/interfaces';
export class MicrosoftDistributions extends JavaBase {
constructor(installerOptions: JavaInstallerOptions) {
@ -37,7 +37,8 @@ export class MicrosoftDistributions extends JavaBase {
}
protected async findPackageForDownload(range: string): Promise<JavaDownloadRelease> {
if (this.architecture !== 'x64' && this.architecture !== 'aarch64') {
const arch = this.distributionArchitecture();
if (arch !== 'x64' && arch !== 'aarch64') {
throw new Error(`Unsupported architecture: ${this.architecture}`);
}
@ -49,82 +50,60 @@ export class MicrosoftDistributions extends JavaBase {
throw new Error('Microsoft Build of OpenJDK provides only the `jdk` package type');
}
const availableVersionsRaw = await this.getAvailableVersions();
const manifest = await this.getAvailableVersions();
const opts = this.getPlatformOption();
const availableVersions = availableVersionsRaw.map(item => ({
url: `https://aka.ms/download-jdk/microsoft-jdk-${item.version.join('.')}-${opts.os}-${
this.architecture
}.${opts.archive}`,
version: this.convertVersionToSemver(item)
}));
if (!manifest) {
throw new Error('Could not load manifest for Microsoft Build of OpenJDK');
}
const satisfiedVersion = availableVersions
.filter(item => isVersionSatisfies(range, item.version))
.sort((a, b) => -semver.compareBuild(a.version, b.version))[0];
const foundRelease = await tc.findFromManifest(range, true, manifest, arch);
if (!satisfiedVersion) {
const availableOptions = availableVersions.map(item => item.version).join(', ');
const availableOptionsMessage = availableOptions
? `\nAvailable versions: ${availableOptions}`
: '';
if (!foundRelease) {
throw new Error(
`Could not find satisfied version for SemVer ${range}. ${availableOptionsMessage}`
`Could not find satisfied version for SemVer ${range}. ${manifest
.map(item => item.version)
.join(', ')}`
);
}
return satisfiedVersion;
return { url: foundRelease.files[0].download_url, version: foundRelease.version };
}
private async getAvailableVersions(): Promise<MicrosoftVersion[]> {
private async getAvailableVersions(): Promise<tc.IToolRelease[] | null> {
// TODO get these dynamically!
// We will need Microsoft to add an endpoint where we can query for versions.
const jdkVersions = [
{
version: [17, 0, 3]
},
{
version: [17, 0, 1, 12, 1]
},
{
version: [16, 0, 2, 7, 1]
},
{
version: [11, 0, 15]
const token = core.getInput('token');
const owner = 'actions';
const repository = 'setup-java';
const branch = 'main';
const filePath = 'src/distributions/microsoft/microsoft-openjdk-versions.json';
let releases: tc.IToolRelease[] | null = null;
const fileUrl = `https://api.github.com/repos/${owner}/${repository}/contents/${filePath}?ref=${branch}`;
const headers: OutgoingHttpHeaders = {
authorization: token,
accept: 'application/vnd.github.VERSION.raw'
};
let response: ITypedResponse<tc.IToolRelease[]> | null = null;
try {
response = await this.http.getJson<tc.IToolRelease[]>(fileUrl, headers);
if (!response.result) {
return null;
}
];
// M1 is only supported for Java 16 & 17
if (process.platform !== 'darwin' || this.architecture !== 'aarch64') {
jdkVersions.push({
version: [11, 0, 13, 8, 1]
});
} catch (err) {
core.debug(
`Http request for microsoft-openjdk-versions.json failed with status code: ${response?.statusCode}`
);
return null;
}
return jdkVersions;
}
private getPlatformOption(
platform: NodeJS.Platform = process.platform /* for testing */
): PlatformOptions {
switch (platform) {
case 'darwin':
return { archive: 'tar.gz', os: 'macos' };
case 'win32':
return { archive: 'zip', os: 'windows' };
case 'linux':
return { archive: 'tar.gz', os: 'linux' };
default:
throw new Error(
`Platform '${platform}' is not supported. Supported platforms: 'darwin', 'linux', 'win32'`
);
if (response.result) {
releases = response.result;
}
}
private convertVersionToSemver(version: MicrosoftVersion): string {
const major = version.version[0];
const minor = version.version[1];
const patch = version.version[2];
return `${major}.${minor}.${patch}`;
return releases;
}
}

View file

@ -0,0 +1,181 @@
[
{
"version": "17.0.3",
"stable": true,
"release_url": "https://aka.ms/download-jdk",
"files": [
{
"filename": "microsoft-jdk-17.0.3-macos-x64.tar.gz",
"arch": "x64",
"platform": "darwin",
"download_url": "https://aka.ms/download-jdk/microsoft-jdk-17.0.3-macos-x64.tar.gz"
},
{
"filename": "microsoft-jdk-17.0.3-linux-x64.tar.gz",
"arch": "x64",
"platform": "linux",
"download_url": "https://aka.ms/download-jdk/microsoft-jdk-17.0.3-linux-x64.tar.gz"
},
{
"filename": "microsoft-jdk-17.0.3-windows-x64.zip",
"arch": "x64",
"platform": "win32",
"download_url": "https://aka.ms/download-jdk/microsoft-jdk-17.0.3-windows-x64.zip"
},
{
"filename": "microsoft-jdk-17.0.3-macos-aarch64.tar.gz",
"arch": "aarch64",
"platform": "darwin",
"download_url": "https://aka.ms/download-jdk/microsoft-jdk-17.0.3-macos-aarch64.tar.gz"
},
{
"filename": "microsoft-jdk-17.0.3-linux-aarch64.tar.gz",
"arch": "aarch64",
"platform": "linux",
"download_url": "https://aka.ms/download-jdk/microsoft-jdk-17.0.3-linux-aarch64.tar.gz"
}
]
},
{
"version": "17.0.1+12.1",
"stable": true,
"release_url": "https://aka.ms/download-jdk",
"files": [
{
"filename": "microsoft-jdk-17.0.1.12.1-macos-x64.tar.gz",
"arch": "x64",
"platform": "darwin",
"download_url": "https://aka.ms/download-jdk/microsoft-jdk-17.0.1.12.1-macos-x64.tar.gz"
},
{
"filename": "microsoft-jdk-17.0.1.12.1-linux-x64.tar.gz",
"arch": "x64",
"platform": "linux",
"download_url": "https://aka.ms/download-jdk/microsoft-jdk-17.0.1.12.1-linux-x64.tar.gz"
},
{
"filename": "microsoft-jdk-17.0.1.12.1-windows-x64.zip",
"arch": "x64",
"platform": "win32",
"download_url": "https://aka.ms/download-jdk/microsoft-jdk-17.0.1.12.1-windows-x64.zip"
},
{
"filename": "microsoft-jdk-17.0.1.12.1-macos-aarch64.tar.gz",
"arch": "aarch64",
"platform": "darwin",
"download_url": "https://aka.ms/download-jdk/microsoft-jdk-17.0.1.12.1-macos-aarch64.tar.gz"
},
{
"filename": "microsoft-jdk-17.0.1.12.1-linux-aarch64.tar.gz",
"arch": "aarch64",
"platform": "linux",
"download_url": "https://aka.ms/download-jdk/microsoft-jdk-17.0.1.12.1-linux-aarch64.tar.gz"
}
]
},
{
"version": "16.0.2+7.1",
"stable": true,
"release_url": "https://aka.ms/download-jdk",
"files": [
{
"filename": "microsoft-jdk-16.0.2.7.1-macos-x64.tar.gz",
"arch": "x64",
"platform": "darwin",
"download_url": "https://aka.ms/download-jdk/microsoft-jdk-16.0.2.7.1-macos-x64.tar.gz"
},
{
"filename": "microsoft-jdk-16.0.2.7.1-linux-x64.tar.gz",
"arch": "x64",
"platform": "linux",
"download_url": "https://aka.ms/download-jdk/microsoft-jdk-16.0.2.7.1-linux-x64.tar.gz"
},
{
"filename": "microsoft-jdk-16.0.2.7.1-windows-x64.zip",
"arch": "x64",
"platform": "win32",
"download_url": "https://aka.ms/download-jdk/microsoft-jdk-16.0.2.7.1-windows-x64.zip"
},
{
"filename": "microsoft-jdk-16.0.2.7.1-macos-aarch64.tar.gz",
"arch": "aarch64",
"platform": "darwin",
"download_url": "https://aka.ms/download-jdk/microsoft-jdk-16.0.2.7.1-macos-aarch64.tar.gz"
},
{
"filename": "microsoft-jdk-16.0.2.7.1-linux-aarch64.tar.gz",
"arch": "aarch64",
"platform": "linux",
"download_url": "https://aka.ms/download-jdk/microsoft-jdk-16.0.2.7.1-linux-aarch64.tar.gz"
}
]
},
{
"version": "11.0.15",
"stable": true,
"release_url": "https://aka.ms/download-jdk",
"files": [
{
"filename": "microsoft-jdk-11.0.15-macos-x64.tar.gz",
"arch": "x64",
"platform": "darwin",
"download_url": "https://aka.ms/download-jdk/microsoft-jdk-11.0.15-macos-x64.tar.gz"
},
{
"filename": "microsoft-jdk-11.0.15-linux-x64.tar.gz",
"arch": "x64",
"platform": "linux",
"download_url": "https://aka.ms/download-jdk/microsoft-jdk-11.0.15-linux-x64.tar.gz"
},
{
"filename": "microsoft-jdk-11.0.15-windows-x64.zip",
"arch": "x64",
"platform": "win32",
"download_url": "https://aka.ms/download-jdk/microsoft-jdk-11.0.15-windows-x64.zip"
},
{
"filename": "microsoft-jdk-11.0.15-macos-aarch64.tar.gz",
"arch": "aarch64",
"platform": "darwin",
"download_url": "https://aka.ms/download-jdk/microsoft-jdk-11.0.15-macos-aarch64.tar.gz"
},
{
"filename": "microsoft-jdk-11.0.15-linux-aarch64.tar.gz",
"arch": "aarch64",
"platform": "linux",
"download_url": "https://aka.ms/download-jdk/microsoft-jdk-11.0.15-linux-aarch64.tar.gz"
}
]
},
{
"version": "11.0.13+8.1",
"stable": true,
"release_url": "https://aka.ms/download-jdk",
"files": [
{
"filename": "microsoft-jdk-11.0.13.8.1-macos-x64.tar.gz",
"arch": "x64",
"platform": "darwin",
"download_url": "https://aka.ms/download-jdk/microsoft-jdk-11.0.13.8.1-macos-x64.tar.gz"
},
{
"filename": "microsoft-jdk-11.0.13.8.1-linux-x64.tar.gz",
"arch": "x64",
"platform": "linux",
"download_url": "https://aka.ms/download-jdk/microsoft-jdk-11.0.13.8.1-linux-x64.tar.gz"
},
{
"filename": "microsoft-jdk-11.0.13.8.1-windows-x64.zip",
"arch": "x64",
"platform": "win32",
"download_url": "https://aka.ms/download-jdk/microsoft-jdk-11.0.13.8.1-windows-x64.zip"
},
{
"filename": "microsoft-jdk-11.0.13.8.1-linux-aarch64.tar.gz",
"arch": "aarch64",
"platform": "linux",
"download_url": "https://aka.ms/download-jdk/microsoft-jdk-11.0.13.8.1-linux-aarch64.tar.gz"
}
]
}
]

View file

@ -1,12 +1,2 @@
type OsVersions = 'linux' | 'macos' | 'windows';
type ArchiveType = 'tar.gz' | 'zip';
export interface PlatformOptions {
archive: ArchiveType;
os: OsVersions;
}
export interface MicrosoftVersion {
downloadUrl?: string;
version: Array<number>;
}

View file

@ -86,7 +86,7 @@ export class TemurinDistribution extends JavaBase {
private async getAvailableVersions(): Promise<ITemurinAvailableVersions[]> {
const platform = this.getPlatformOption();
const arch = this.architecture;
const arch = this.distributionArchitecture();
const imageType = this.packageType;
const versionRange = encodeURI('[1.0,100.0]'); // retrieve all available versions
const releaseType = this.stable ? 'ga' : 'ea';

View file

@ -131,14 +131,17 @@ export class ZuluDistribution extends JavaBase {
hw_bitness: string;
abi: string;
} {
if (this.architecture == 'x64') {
return { arch: 'x86', hw_bitness: '64', abi: '' };
} else if (this.architecture == 'x86') {
return { arch: 'x86', hw_bitness: '32', abi: '' };
} else if (this.architecture == 'arm64') {
return { arch: 'arm', hw_bitness: '64', abi: '' };
} else {
return { arch: this.architecture, hw_bitness: '', abi: '' };
const arch = this.distributionArchitecture();
switch (arch) {
case 'x64':
return { arch: 'x86', hw_bitness: '64', abi: '' };
case 'x86':
return { arch: 'x86', hw_bitness: '32', abi: '' };
case 'aarch64':
case 'arm64':
return { arch: 'arm', hw_bitness: '64', abi: '' };
default:
return { arch: arch, hw_bitness: '', abi: '' };
}
}

View file

@ -1,6 +1,7 @@
import * as core from '@actions/core';
import * as auth from './auth';
import { getBooleanInput, isCacheFeatureAvailable } from './util';
import * as toolchains from './toolchains';
import * as constants from './constants';
import { restore } from './cache';
import * as path from 'path';
@ -16,9 +17,14 @@ async function run() {
const jdkFile = core.getInput(constants.INPUT_JDK_FILE);
const cache = core.getInput(constants.INPUT_CACHE);
const checkLatest = getBooleanInput(constants.INPUT_CHECK_LATEST, false);
let toolchainIds = core.getMultilineInput(constants.INPUT_MVN_TOOLCHAIN_ID);
if (versions.length !== toolchainIds.length) {
toolchainIds = [];
}
core.startGroup('Installed distributions');
for (const version of versions) {
for (const [index, version] of versions.entries()) {
const installerOptions: JavaInstallerOptions = {
architecture,
packageType,
@ -32,6 +38,12 @@ async function run() {
}
const result = await distribution.setupJava();
await toolchains.configureToolchains(
version,
distributionName,
result.path,
toolchainIds[index]
);
core.info('');
core.info('Java configuration:');

158
src/toolchains.ts Normal file
View file

@ -0,0 +1,158 @@
import * as fs from 'fs';
import * as os from 'os';
import * as path from 'path';
import * as core from '@actions/core';
import * as io from '@actions/io';
import * as constants from './constants';
import { getBooleanInput } from './util';
import { create as xmlCreate } from 'xmlbuilder2';
interface JdkInfo {
version: string;
vendor: string;
id: string;
jdkHome: string;
}
export async function configureToolchains(
version: string,
distributionName: string,
jdkHome: string,
toolchainId?: string
) {
const vendor = core.getInput(constants.INPUT_MVN_TOOLCHAIN_VENDOR) || distributionName;
const id = toolchainId || `${vendor}_${version}`;
const settingsDirectory =
core.getInput(constants.INPUT_SETTINGS_PATH) || path.join(os.homedir(), constants.M2_DIR);
const overwriteSettings = getBooleanInput(constants.INPUT_OVERWRITE_SETTINGS, true);
await createToolchainsSettings({
jdkInfo: {
version,
vendor,
id,
jdkHome
},
settingsDirectory,
overwriteSettings
});
}
export async function createToolchainsSettings({
jdkInfo,
settingsDirectory,
overwriteSettings
}: {
jdkInfo: JdkInfo;
settingsDirectory: string;
overwriteSettings: boolean;
}) {
core.info(
`Creating ${constants.MVN_TOOLCHAINS_FILE} for JDK version ${jdkInfo.version} from ${jdkInfo.vendor}`
);
// when an alternate m2 location is specified use only that location (no .m2 directory)
// otherwise use the home/.m2/ path
await io.mkdirP(settingsDirectory);
const originalToolchains = await readExistingToolchainsFile(settingsDirectory);
const updatedToolchains = generateToolchainDefinition(
originalToolchains,
jdkInfo.version,
jdkInfo.vendor,
jdkInfo.id,
jdkInfo.jdkHome
);
await writeToolchainsFileToDisk(settingsDirectory, updatedToolchains, overwriteSettings);
}
// only exported for testing purposes
export function generateToolchainDefinition(
original: string,
version: string,
vendor: string,
id: string,
jdkHome: string
) {
let xmlObj;
if (original?.length) {
xmlObj = xmlCreate(original)
.root()
.ele({
toolchain: {
type: 'jdk',
provides: {
version: `${version}`,
vendor: `${vendor}`,
id: `${id}`
},
configuration: {
jdkHome: `${jdkHome}`
}
}
});
} else
xmlObj = xmlCreate({
toolchains: {
'@xmlns': 'https://maven.apache.org/TOOLCHAINS/1.1.0',
'@xmlns:xsi': 'https://www.w3.org/2001/XMLSchema-instance',
'@xsi:schemaLocation':
'https://maven.apache.org/TOOLCHAINS/1.1.0 https://maven.apache.org/xsd/toolchains-1.1.0.xsd',
toolchain: [
{
type: 'jdk',
provides: {
version: `${version}`,
vendor: `${vendor}`,
id: `${id}`
},
configuration: {
jdkHome: `${jdkHome}`
}
}
]
}
});
return xmlObj.end({
format: 'xml',
wellFormed: false,
headless: false,
prettyPrint: true,
width: 80
});
}
async function readExistingToolchainsFile(directory: string) {
const location = path.join(directory, constants.MVN_TOOLCHAINS_FILE);
if (fs.existsSync(location)) {
return fs.readFileSync(location, {
encoding: 'utf-8',
flag: 'r'
});
}
return '';
}
async function writeToolchainsFileToDisk(
directory: string,
settings: string,
overwriteSettings: boolean
) {
const location = path.join(directory, constants.MVN_TOOLCHAINS_FILE);
const settingsExists = fs.existsSync(location);
if (settingsExists && overwriteSettings) {
core.info(`Overwriting existing file ${location}`);
} else if (!settingsExists) {
core.info(`Writing to ${location}`);
} else {
core.info(
`Skipping generation of ${location} because file already exists and overwriting is not enabled`
);
return;
}
return fs.writeFileSync(location, settings, {
encoding: 'utf-8',
flag: 'w'
});
}

View file

@ -6,6 +6,7 @@
"module": "commonjs", /* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', or 'ESNext'. */
// "allowJs": true, /* Allow javascript files to be compiled. */
// "checkJs": true, /* Report errors in .js files. */
"resolveJsonModule": true,
// "jsx": "preserve", /* Specify JSX code generation: 'preserve', 'react-native', or 'react'. */
// "declaration": true, /* Generates corresponding '.d.ts' file. */
// "declarationMap": true, /* Generates a sourcemap for each corresponding '.d.ts' file. */