123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171 |
- import * as assert from 'assert'
- import * as core from '@actions/core'
- import * as exec from '@actions/exec'
- import * as fs from 'fs'
- import * as github from '@actions/github'
- import * as https from 'https'
- import * as io from '@actions/io'
- import * as path from 'path'
- import * as refHelper from './ref-helper'
- import * as retryHelper from './retry-helper'
- import * as toolCache from '@actions/tool-cache'
- import {ExecOptions} from '@actions/exec/lib/interfaces'
- import {IncomingMessage} from 'http'
- import {ReposGetArchiveLinkParams} from '@octokit/rest'
- import {RequestOptions} from 'https'
- import {WriteStream} from 'fs'
- const IS_WINDOWS = process.platform === 'win32'
- export async function downloadRepository(
- accessToken: string,
- owner: string,
- repo: string,
- ref: string,
- commit: string,
- repositoryPath: string
- ): Promise<void> {
- // Determine archive path
- const runnerTemp = process.env['RUNNER_TEMP'] as string
- assert.ok(runnerTemp, 'RUNNER_TEMP not defined')
- const archivePath = path.join(runnerTemp, 'checkout.tar.gz')
- // await fs.promises.writeFile(archivePath, raw)
- // Get the archive URL using the REST API
- await retryHelper.execute(async () => {
- // Prepare the archive stream
- core.debug(`Preparing the archive stream: ${archivePath}`)
- await io.rmRF(archivePath)
- const fileStream = fs.createWriteStream(archivePath)
- const fileStreamClosed = getFileClosedPromise(fileStream)
- try {
- // Get the archive URL using the GitHub REST API
- core.info('Getting archive URL from GitHub REST API')
- const octokit = new github.GitHub(accessToken)
- const params: RequestOptions & ReposGetArchiveLinkParams = {
- method: 'HEAD',
- archive_format: IS_WINDOWS ? 'zipball' : 'tarball',
- owner: owner,
- repo: repo,
- ref: refHelper.getDownloadRef(ref, commit)
- }
- const response = await octokit.repos.getArchiveLink(params)
- if (response.status != 302) {
- throw new Error(
- `Unexpected response from GitHub API. Status: '${response.status}'`
- )
- }
- const archiveUrl = response.headers['Location'] // Do not print the archive URL because it has an embedded token
- assert.ok(
- archiveUrl,
- `Expected GitHub API response to contain 'Location' header`
- )
- // Download the archive
- core.info('Downloading the archive') // Do not print the archive URL because it has an embedded token
- await downloadFile(archiveUrl, fileStream)
- } finally {
- await fileStreamClosed
- }
- // return Buffer.from(response.data) // response.data is ArrayBuffer
- })
- // // Download the archive
- // core.info('Downloading the archive') // Do not print the URL since it contains a token to download the archive
- // await downloadFile(archiveUrl, archivePath)
- // // console.log(`status=${response.status}`)
- // // console.log(`headers=${JSON.stringify(response.headers)}`)
- // // console.log(`data=${response.data}`)
- // // console.log(`data=${JSON.stringify(response.data)}`)
- // // for (const key of Object.keys(response.data)) {
- // // console.log(`data['${key}']=${response.data[key]}`)
- // // }
- // // Write archive to file
- // const runnerTemp = process.env['RUNNER_TEMP'] as string
- // assert.ok(runnerTemp, 'RUNNER_TEMP not defined')
- // const archivePath = path.join(runnerTemp, 'checkout.tar.gz')
- // await io.rmRF(archivePath)
- // await fs.promises.writeFile(archivePath, raw)
- // // await exec.exec(`ls -la "${archiveFile}"`, [], {
- // // cwd: repositoryPath
- // // } as ExecOptions)
- // Extract archive
- const extractPath = path.join(
- runnerTemp,
- `checkout-archive${IS_WINDOWS ? '.zip' : '.tar.gz'}`
- )
- await io.rmRF(extractPath)
- await io.mkdirP(extractPath)
- if (IS_WINDOWS) {
- await toolCache.extractZip(archivePath, extractPath)
- } else {
- await toolCache.extractTar(archivePath, extractPath)
- }
- // await exec.exec(`tar -xzf "${archiveFile}"`, [], {
- // cwd: extractPath
- // } as ExecOptions)
- // Determine the real directory to copy (ignore extra dir at root of the archive)
- const archiveFileNames = await fs.promises.readdir(extractPath)
- assert.ok(
- archiveFileNames.length == 1,
- 'Expected exactly one directory inside archive'
- )
- const extraDirectoryName = archiveFileNames[0]
- core.info(`Resolved ${extraDirectoryName}`) // contains the short SHA
- const tempRepositoryPath = path.join(extractPath, extraDirectoryName)
- // Move the files
- for (const fileName of await fs.promises.readdir(tempRepositoryPath)) {
- const sourcePath = path.join(tempRepositoryPath, fileName)
- const targetPath = path.join(repositoryPath, fileName)
- await io.mv(sourcePath, targetPath)
- }
- await exec.exec(`find .`, [], {
- cwd: repositoryPath
- } as ExecOptions)
- }
- function downloadFile(url: string, fileStream: WriteStream): Promise<void> {
- return new Promise((resolve, reject) => {
- try {
- https.get(url, (response: IncomingMessage) => {
- if (response.statusCode != 200) {
- reject(`Request failed with status '${response.statusCode}'`)
- response.resume() // Consume response data to free up memory
- return
- }
- response.on('data', chunk => {
- fileStream.write(chunk)
- })
- response.on('end', () => {
- resolve()
- })
- response.on('error', err => {
- reject(err)
- })
- // response.pipe(fileStream)
- })
- } catch (err) {
- reject(err)
- }
- })
- }
- function getFileClosedPromise(stream: WriteStream): Promise<void> {
- return new Promise((resolve, reject) => {
- stream.on('error', err => {
- reject(err)
- })
- stream.on('finish', () => {
- resolve()
- })
- })
- }
|