Compare commits
10 Commits
4a1fa615de
...
096b500552
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
096b500552 | ||
|
|
59bb2dd7cd | ||
|
|
093dbebc2e | ||
|
|
d79ea53307 | ||
|
|
0fa906a067 | ||
|
|
0b63af4c8c | ||
|
|
31d9a4bd37 | ||
|
|
154a05918b | ||
|
|
7a2b445a4b | ||
|
|
eed20d30d5 |
206
dist/index.js
vendored
206
dist/index.js
vendored
@@ -8358,7 +8358,6 @@ const core = __importStar(__webpack_require__(470));
|
||||
const exec = __importStar(__webpack_require__(986));
|
||||
const fs = __importStar(__webpack_require__(747));
|
||||
const github = __importStar(__webpack_require__(469));
|
||||
const https = __importStar(__webpack_require__(211));
|
||||
const io = __importStar(__webpack_require__(1));
|
||||
const path = __importStar(__webpack_require__(622));
|
||||
const refHelper = __importStar(__webpack_require__(227));
|
||||
@@ -8371,63 +8370,49 @@ function downloadRepository(accessToken, owner, repo, ref, commit, repositoryPat
|
||||
const runnerTemp = process.env['RUNNER_TEMP'];
|
||||
assert.ok(runnerTemp, 'RUNNER_TEMP not defined');
|
||||
const archivePath = path.join(runnerTemp, 'checkout.tar.gz');
|
||||
// await fs.promises.writeFile(archivePath, raw)
|
||||
// Get the archive URL using the REST API
|
||||
yield retryHelper.execute(() => __awaiter(this, void 0, void 0, function* () {
|
||||
// Prepare the archive stream
|
||||
core.debug(`Preparing the archive stream: ${archivePath}`);
|
||||
yield io.rmRF(archivePath);
|
||||
const fileStream = fs.createWriteStream(archivePath);
|
||||
const fileStreamClosed = getFileClosedPromise(fileStream);
|
||||
try {
|
||||
// Get the archive URL using the GitHub REST API
|
||||
core.info('Getting archive URL from GitHub REST API');
|
||||
const octokit = new github.GitHub(accessToken);
|
||||
const params = {
|
||||
method: 'HEAD',
|
||||
archive_format: IS_WINDOWS ? 'zipball' : 'tarball',
|
||||
owner: owner,
|
||||
repo: repo,
|
||||
ref: refHelper.getDownloadRef(ref, commit)
|
||||
};
|
||||
const response = yield octokit.repos.getArchiveLink(params);
|
||||
console.log('GOT THE RESPONSE');
|
||||
if (response.status != 302) {
|
||||
throw new Error(`Unexpected response from GitHub API. Status: '${response.status}'`);
|
||||
}
|
||||
console.log('GETTING THE LOCATION');
|
||||
const archiveUrl = response.headers['Location']; // Do not print the archive URL because it has an embedded token
|
||||
assert.ok(archiveUrl, `Expected GitHub API response to contain 'Location' header`);
|
||||
// Download the archive
|
||||
core.info('Downloading the archive'); // Do not print the archive URL because it has an embedded token
|
||||
yield downloadFile(archiveUrl, fileStream);
|
||||
}
|
||||
finally {
|
||||
yield fileStreamClosed;
|
||||
}
|
||||
// return Buffer.from(response.data) // response.data is ArrayBuffer
|
||||
// Ensure file does not exist
|
||||
core.debug(`Ensuring archive file does not exist: ${archivePath}`);
|
||||
yield io.rmRF(archivePath);
|
||||
// Download the archive
|
||||
let archiveData = yield retryHelper.execute(() => __awaiter(this, void 0, void 0, function* () {
|
||||
core.info('Downloading the archive using the REST API');
|
||||
return yield downloadArchive(accessToken, owner, repo, ref, commit);
|
||||
}));
|
||||
// // Download the archive
|
||||
// core.info('Downloading the archive') // Do not print the URL since it contains a token to download the archive
|
||||
// await downloadFile(archiveUrl, archivePath)
|
||||
// // console.log(`status=${response.status}`)
|
||||
// // console.log(`headers=${JSON.stringify(response.headers)}`)
|
||||
// // console.log(`data=${response.data}`)
|
||||
// // console.log(`data=${JSON.stringify(response.data)}`)
|
||||
// // for (const key of Object.keys(response.data)) {
|
||||
// // console.log(`data['${key}']=${response.data[key]}`)
|
||||
// // }
|
||||
// // Write archive to file
|
||||
// const runnerTemp = process.env['RUNNER_TEMP'] as string
|
||||
// assert.ok(runnerTemp, 'RUNNER_TEMP not defined')
|
||||
// const archivePath = path.join(runnerTemp, 'checkout.tar.gz')
|
||||
// await io.rmRF(archivePath)
|
||||
// Write archive to disk
|
||||
core.info('Writing archive to disk');
|
||||
yield fs.promises.writeFile(archivePath, archiveData);
|
||||
archiveData = Buffer.from(''); // Free memory
|
||||
// // Get the archive URL using the REST API
|
||||
// await retryHelper.execute(async () => {
|
||||
// // Prepare the archive stream
|
||||
// core.debug(`Preparing the archive stream: ${archivePath}`)
|
||||
// await io.rmRF(archivePath)
|
||||
// const fileStream = fs.createWriteStream(archivePath)
|
||||
// const fileStreamClosed = getFileClosedPromise(fileStream)
|
||||
// try {
|
||||
// // Get the archive URL
|
||||
// core.info('Getting archive URL')
|
||||
// const archiveUrl = await getArchiveUrl(
|
||||
// accessToken,
|
||||
// owner,
|
||||
// repo,
|
||||
// ref,
|
||||
// commit
|
||||
// )
|
||||
// // Download the archive
|
||||
// core.info('Downloading the archive') // Do not print the archive URL because it has an embedded token
|
||||
// await downloadFile(archiveUrl, fileStream)
|
||||
// } finally {
|
||||
// fileStream.end()
|
||||
// await fileStreamClosed
|
||||
// }
|
||||
// })
|
||||
// await fs.promises.writeFile(archivePath, raw)
|
||||
// // await exec.exec(`ls -la "${archiveFile}"`, [], {
|
||||
// // cwd: repositoryPath
|
||||
// // } as ExecOptions)
|
||||
// Extract archive
|
||||
const extractPath = path.join(runnerTemp, `checkout-archive${IS_WINDOWS ? '.zip' : '.tar.gz'}`);
|
||||
const extractPath = path.join(runnerTemp, `checkout`);
|
||||
yield io.rmRF(extractPath);
|
||||
yield io.mkdirP(extractPath);
|
||||
if (IS_WINDOWS) {
|
||||
@@ -8457,42 +8442,93 @@ function downloadRepository(accessToken, owner, repo, ref, commit, repositoryPat
|
||||
});
|
||||
}
|
||||
exports.downloadRepository = downloadRepository;
|
||||
function downloadFile(url, fileStream) {
|
||||
return new Promise((resolve, reject) => {
|
||||
try {
|
||||
https.get(url, (response) => {
|
||||
if (response.statusCode != 200) {
|
||||
reject(`Request failed with status '${response.statusCode}'`);
|
||||
response.resume(); // Consume response data to free up memory
|
||||
return;
|
||||
}
|
||||
response.on('data', chunk => {
|
||||
fileStream.write(chunk);
|
||||
});
|
||||
response.on('end', () => {
|
||||
resolve();
|
||||
});
|
||||
response.on('error', err => {
|
||||
reject(err);
|
||||
});
|
||||
// response.pipe(fileStream)
|
||||
});
|
||||
}
|
||||
catch (err) {
|
||||
reject(err);
|
||||
function downloadArchive(accessToken, owner, repo, ref, commit) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const octokit = new github.GitHub(accessToken);
|
||||
const params = {
|
||||
owner: owner,
|
||||
repo: repo,
|
||||
archive_format: IS_WINDOWS ? 'zipball' : 'tarball',
|
||||
ref: refHelper.getDownloadRef(ref, commit)
|
||||
};
|
||||
const response = yield octokit.repos.getArchiveLink(params);
|
||||
console.log('GOT THE RESPONSE');
|
||||
console.log(`status=${response.status}`);
|
||||
console.log(`headers=${JSON.stringify(response.headers)}`);
|
||||
console.log(`data=${JSON.stringify(response.data)}`);
|
||||
if (response.status != 200) {
|
||||
throw new Error(`Unexpected response from GitHub API. Status: '${response.status}'`);
|
||||
}
|
||||
return Buffer.from(response.data); // response.data is ArrayBuffer
|
||||
});
|
||||
}
|
||||
function getFileClosedPromise(stream) {
|
||||
return new Promise((resolve, reject) => {
|
||||
stream.on('error', err => {
|
||||
reject(err);
|
||||
});
|
||||
stream.on('finish', () => {
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
}
|
||||
// async function getArchiveUrl(
|
||||
// accessToken: string,
|
||||
// owner: string,
|
||||
// repo: string,
|
||||
// ref: string,
|
||||
// commit: string
|
||||
// ): Promise<string> {
|
||||
// const octokit = new github.GitHub(accessToken)
|
||||
// const params: RequestOptions & ReposGetArchiveLinkParams = {
|
||||
// method: 'HEAD',
|
||||
// owner: owner,
|
||||
// repo: repo,
|
||||
// archive_format: IS_WINDOWS ? 'zipball' : 'tarball',
|
||||
// ref: refHelper.getDownloadRef(ref, commit)
|
||||
// }
|
||||
// const response = await octokit.repos.getArchiveLink(params)
|
||||
// console.log('GOT THE RESPONSE')
|
||||
// console.log(`status=${response.status}`)
|
||||
// console.log(`headers=${JSON.stringify(response.headers)}`)
|
||||
// console.log(`data=${JSON.stringify(response.data)}`)
|
||||
// if (response.status != 200) {
|
||||
// throw new Error(
|
||||
// `Unexpected response from GitHub API. Status: '${response.status}'`
|
||||
// )
|
||||
// }
|
||||
// console.log('GETTING THE LOCATION')
|
||||
// const archiveUrl = response.headers['Location'] // Do not print the archive URL because it has an embedded token
|
||||
// assert.ok(
|
||||
// archiveUrl,
|
||||
// `Expected GitHub API response to contain 'Location' header`
|
||||
// )
|
||||
// return archiveUrl
|
||||
// }
|
||||
// function downloadFile(url: string, fileStream: WriteStream): Promise<void> {
|
||||
// return new Promise((resolve, reject) => {
|
||||
// try {
|
||||
// https.get(url, (response: IncomingMessage) => {
|
||||
// if (response.statusCode != 200) {
|
||||
// reject(`Request failed with status '${response.statusCode}'`)
|
||||
// response.resume() // Consume response data to free up memory
|
||||
// return
|
||||
// }
|
||||
// response.on('data', chunk => {
|
||||
// fileStream.write(chunk)
|
||||
// })
|
||||
// response.on('end', () => {
|
||||
// resolve()
|
||||
// })
|
||||
// response.on('error', err => {
|
||||
// reject(err)
|
||||
// })
|
||||
// })
|
||||
// } catch (err) {
|
||||
// reject(err)
|
||||
// }
|
||||
// })
|
||||
// }
|
||||
// function getFileClosedPromise(stream: WriteStream): Promise<void> {
|
||||
// return new Promise((resolve, reject) => {
|
||||
// stream.on('error', err => {
|
||||
// reject(err)
|
||||
// })
|
||||
// stream.on('finish', () => {
|
||||
// resolve()
|
||||
// })
|
||||
// })
|
||||
// }
|
||||
|
||||
|
||||
/***/ }),
|
||||
|
||||
@@ -11,8 +11,7 @@ import * as retryHelper from './retry-helper'
|
||||
import * as toolCache from '@actions/tool-cache'
|
||||
import {ExecOptions} from '@actions/exec/lib/interfaces'
|
||||
import {IncomingMessage} from 'http'
|
||||
import {ReposGetArchiveLinkParams} from '@octokit/rest'
|
||||
import {RequestOptions} from 'https'
|
||||
import {RequestOptions, ReposGetArchiveLinkParams} from '@octokit/rest'
|
||||
import {WriteStream} from 'fs'
|
||||
|
||||
const IS_WINDOWS = process.platform === 'win32'
|
||||
@@ -29,78 +28,52 @@ export async function downloadRepository(
|
||||
const runnerTemp = process.env['RUNNER_TEMP'] as string
|
||||
assert.ok(runnerTemp, 'RUNNER_TEMP not defined')
|
||||
const archivePath = path.join(runnerTemp, 'checkout.tar.gz')
|
||||
// await fs.promises.writeFile(archivePath, raw)
|
||||
|
||||
// Get the archive URL using the REST API
|
||||
await retryHelper.execute(async () => {
|
||||
// Prepare the archive stream
|
||||
core.debug(`Preparing the archive stream: ${archivePath}`)
|
||||
await io.rmRF(archivePath)
|
||||
const fileStream = fs.createWriteStream(archivePath)
|
||||
const fileStreamClosed = getFileClosedPromise(fileStream)
|
||||
// Ensure file does not exist
|
||||
core.debug(`Ensuring archive file does not exist: ${archivePath}`)
|
||||
await io.rmRF(archivePath)
|
||||
|
||||
try {
|
||||
// Get the archive URL using the GitHub REST API
|
||||
core.info('Getting archive URL from GitHub REST API')
|
||||
const octokit = new github.GitHub(accessToken)
|
||||
const params: RequestOptions & ReposGetArchiveLinkParams = {
|
||||
method: 'HEAD',
|
||||
archive_format: IS_WINDOWS ? 'zipball' : 'tarball',
|
||||
owner: owner,
|
||||
repo: repo,
|
||||
ref: refHelper.getDownloadRef(ref, commit)
|
||||
}
|
||||
const response = await octokit.repos.getArchiveLink(params)
|
||||
console.log('GOT THE RESPONSE')
|
||||
if (response.status != 302) {
|
||||
throw new Error(
|
||||
`Unexpected response from GitHub API. Status: '${response.status}'`
|
||||
)
|
||||
}
|
||||
console.log('GETTING THE LOCATION')
|
||||
const archiveUrl = response.headers['Location'] // Do not print the archive URL because it has an embedded token
|
||||
assert.ok(
|
||||
archiveUrl,
|
||||
`Expected GitHub API response to contain 'Location' header`
|
||||
)
|
||||
|
||||
// Download the archive
|
||||
core.info('Downloading the archive') // Do not print the archive URL because it has an embedded token
|
||||
await downloadFile(archiveUrl, fileStream)
|
||||
} finally {
|
||||
await fileStreamClosed
|
||||
}
|
||||
|
||||
// return Buffer.from(response.data) // response.data is ArrayBuffer
|
||||
// Download the archive
|
||||
let archiveData = await retryHelper.execute(async () => {
|
||||
core.info('Downloading the archive using the REST API')
|
||||
return await downloadArchive(accessToken, owner, repo, ref, commit)
|
||||
})
|
||||
|
||||
// // Download the archive
|
||||
// core.info('Downloading the archive') // Do not print the URL since it contains a token to download the archive
|
||||
// await downloadFile(archiveUrl, archivePath)
|
||||
// Write archive to disk
|
||||
core.info('Writing archive to disk')
|
||||
await fs.promises.writeFile(archivePath, archiveData)
|
||||
archiveData = Buffer.from('') // Free memory
|
||||
|
||||
// // console.log(`status=${response.status}`)
|
||||
// // console.log(`headers=${JSON.stringify(response.headers)}`)
|
||||
// // console.log(`data=${response.data}`)
|
||||
// // console.log(`data=${JSON.stringify(response.data)}`)
|
||||
// // for (const key of Object.keys(response.data)) {
|
||||
// // console.log(`data['${key}']=${response.data[key]}`)
|
||||
// // }
|
||||
// // Get the archive URL using the REST API
|
||||
// await retryHelper.execute(async () => {
|
||||
// // Prepare the archive stream
|
||||
// core.debug(`Preparing the archive stream: ${archivePath}`)
|
||||
// await io.rmRF(archivePath)
|
||||
// const fileStream = fs.createWriteStream(archivePath)
|
||||
// const fileStreamClosed = getFileClosedPromise(fileStream)
|
||||
|
||||
// // Write archive to file
|
||||
// const runnerTemp = process.env['RUNNER_TEMP'] as string
|
||||
// assert.ok(runnerTemp, 'RUNNER_TEMP not defined')
|
||||
// const archivePath = path.join(runnerTemp, 'checkout.tar.gz')
|
||||
// await io.rmRF(archivePath)
|
||||
// await fs.promises.writeFile(archivePath, raw)
|
||||
// // await exec.exec(`ls -la "${archiveFile}"`, [], {
|
||||
// // cwd: repositoryPath
|
||||
// // } as ExecOptions)
|
||||
// try {
|
||||
// // Get the archive URL
|
||||
// core.info('Getting archive URL')
|
||||
// const archiveUrl = await getArchiveUrl(
|
||||
// accessToken,
|
||||
// owner,
|
||||
// repo,
|
||||
// ref,
|
||||
// commit
|
||||
// )
|
||||
|
||||
// // Download the archive
|
||||
// core.info('Downloading the archive') // Do not print the archive URL because it has an embedded token
|
||||
// await downloadFile(archiveUrl, fileStream)
|
||||
// } finally {
|
||||
// fileStream.end()
|
||||
// await fileStreamClosed
|
||||
// }
|
||||
// })
|
||||
|
||||
// Extract archive
|
||||
const extractPath = path.join(
|
||||
runnerTemp,
|
||||
`checkout-archive${IS_WINDOWS ? '.zip' : '.tar.gz'}`
|
||||
)
|
||||
const extractPath = path.join(runnerTemp, `checkout`)
|
||||
await io.rmRF(extractPath)
|
||||
await io.mkdirP(extractPath)
|
||||
if (IS_WINDOWS) {
|
||||
@@ -108,9 +81,6 @@ export async function downloadRepository(
|
||||
} else {
|
||||
await toolCache.extractTar(archivePath, extractPath)
|
||||
}
|
||||
// await exec.exec(`tar -xzf "${archiveFile}"`, [], {
|
||||
// cwd: extractPath
|
||||
// } as ExecOptions)
|
||||
|
||||
// Determine the real directory to copy (ignore extra dir at root of the archive)
|
||||
const archiveFileNames = await fs.promises.readdir(extractPath)
|
||||
@@ -134,40 +104,101 @@ export async function downloadRepository(
|
||||
} as ExecOptions)
|
||||
}
|
||||
|
||||
function downloadFile(url: string, fileStream: WriteStream): Promise<void> {
|
||||
return new Promise((resolve, reject) => {
|
||||
try {
|
||||
https.get(url, (response: IncomingMessage) => {
|
||||
if (response.statusCode != 200) {
|
||||
reject(`Request failed with status '${response.statusCode}'`)
|
||||
response.resume() // Consume response data to free up memory
|
||||
return
|
||||
}
|
||||
async function downloadArchive(
|
||||
accessToken: string,
|
||||
owner: string,
|
||||
repo: string,
|
||||
ref: string,
|
||||
commit: string
|
||||
): Promise<Buffer> {
|
||||
const octokit = new github.GitHub(accessToken)
|
||||
const params: ReposGetArchiveLinkParams = {
|
||||
owner: owner,
|
||||
repo: repo,
|
||||
archive_format: IS_WINDOWS ? 'zipball' : 'tarball',
|
||||
ref: refHelper.getDownloadRef(ref, commit)
|
||||
}
|
||||
const response = await octokit.repos.getArchiveLink(params)
|
||||
console.log('GOT THE RESPONSE')
|
||||
console.log(`status=${response.status}`)
|
||||
console.log(`headers=${JSON.stringify(response.headers)}`)
|
||||
console.log(`data=${JSON.stringify(response.data)}`)
|
||||
if (response.status != 200) {
|
||||
throw new Error(
|
||||
`Unexpected response from GitHub API. Status: '${response.status}'`
|
||||
)
|
||||
}
|
||||
|
||||
response.on('data', chunk => {
|
||||
fileStream.write(chunk)
|
||||
})
|
||||
response.on('end', () => {
|
||||
resolve()
|
||||
})
|
||||
response.on('error', err => {
|
||||
reject(err)
|
||||
})
|
||||
// response.pipe(fileStream)
|
||||
})
|
||||
} catch (err) {
|
||||
reject(err)
|
||||
}
|
||||
})
|
||||
return Buffer.from(response.data) // response.data is ArrayBuffer
|
||||
}
|
||||
|
||||
function getFileClosedPromise(stream: WriteStream): Promise<void> {
|
||||
return new Promise((resolve, reject) => {
|
||||
stream.on('error', err => {
|
||||
reject(err)
|
||||
})
|
||||
stream.on('finish', () => {
|
||||
resolve()
|
||||
})
|
||||
})
|
||||
}
|
||||
// async function getArchiveUrl(
|
||||
// accessToken: string,
|
||||
// owner: string,
|
||||
// repo: string,
|
||||
// ref: string,
|
||||
// commit: string
|
||||
// ): Promise<string> {
|
||||
// const octokit = new github.GitHub(accessToken)
|
||||
// const params: RequestOptions & ReposGetArchiveLinkParams = {
|
||||
// method: 'HEAD',
|
||||
// owner: owner,
|
||||
// repo: repo,
|
||||
// archive_format: IS_WINDOWS ? 'zipball' : 'tarball',
|
||||
// ref: refHelper.getDownloadRef(ref, commit)
|
||||
// }
|
||||
// const response = await octokit.repos.getArchiveLink(params)
|
||||
// console.log('GOT THE RESPONSE')
|
||||
// console.log(`status=${response.status}`)
|
||||
// console.log(`headers=${JSON.stringify(response.headers)}`)
|
||||
// console.log(`data=${JSON.stringify(response.data)}`)
|
||||
// if (response.status != 200) {
|
||||
// throw new Error(
|
||||
// `Unexpected response from GitHub API. Status: '${response.status}'`
|
||||
// )
|
||||
// }
|
||||
// console.log('GETTING THE LOCATION')
|
||||
// const archiveUrl = response.headers['Location'] // Do not print the archive URL because it has an embedded token
|
||||
// assert.ok(
|
||||
// archiveUrl,
|
||||
// `Expected GitHub API response to contain 'Location' header`
|
||||
// )
|
||||
// return archiveUrl
|
||||
// }
|
||||
|
||||
// function downloadFile(url: string, fileStream: WriteStream): Promise<void> {
|
||||
// return new Promise((resolve, reject) => {
|
||||
// try {
|
||||
// https.get(url, (response: IncomingMessage) => {
|
||||
// if (response.statusCode != 200) {
|
||||
// reject(`Request failed with status '${response.statusCode}'`)
|
||||
// response.resume() // Consume response data to free up memory
|
||||
// return
|
||||
// }
|
||||
|
||||
// response.on('data', chunk => {
|
||||
// fileStream.write(chunk)
|
||||
// })
|
||||
// response.on('end', () => {
|
||||
// resolve()
|
||||
// })
|
||||
// response.on('error', err => {
|
||||
// reject(err)
|
||||
// })
|
||||
// })
|
||||
// } catch (err) {
|
||||
// reject(err)
|
||||
// }
|
||||
// })
|
||||
// }
|
||||
|
||||
// function getFileClosedPromise(stream: WriteStream): Promise<void> {
|
||||
// return new Promise((resolve, reject) => {
|
||||
// stream.on('error', err => {
|
||||
// reject(err)
|
||||
// })
|
||||
// stream.on('finish', () => {
|
||||
// resolve()
|
||||
// })
|
||||
// })
|
||||
// }
|
||||
|
||||
Reference in New Issue
Block a user