diff --git a/CMakeSettings.json b/CMakeSettings.json index 255a15b1a9..603b17abce 100644 --- a/CMakeSettings.json +++ b/CMakeSettings.json @@ -33,6 +33,87 @@ } ] }, + { + "name": "x64-Debug-NoArtifacts", + "generator": "Ninja", + "configurationType": "Debug", + "inheritEnvironments": [ "msvc_x64_x64" ], + "buildRoot": "${projectDir}\\out\\build\\${name}", + "installRoot": "${projectDir}\\out\\install\\${name}", + "cmakeCommandArgs": "", + "buildCommandArgs": "", + "ctestCommandArgs": "", + "variables": [ + { + "name": "VCPKG_ARTIFACTS_DEVELOPMENT", + "value": "False", + "type": "BOOL" + }, + { + "name": "VCPKG_BUILD_TLS12_DOWNLOADER", + "value": "True", + "type": "BOOL" + }, + { + "name": "VCPKG_BUILD_BENCHMARKING", + "value": "True", + "type": "BOOL" + }, + { + "name": "VCPKG_BUILD_FUZZING", + "value": "True", + "type": "BOOL" + } + ] + }, + { + "name": "x64-Debug-Official-2022-11-10", + "generator": "Ninja", + "configurationType": "Debug", + "inheritEnvironments": [ "msvc_x64_x64" ], + "buildRoot": "${projectDir}\\out\\build\\${name}", + "installRoot": "${projectDir}\\out\\install\\${name}", + "cmakeCommandArgs": "", + "buildCommandArgs": "", + "ctestCommandArgs": "", + "variables": [ + { + "name": "VCPKG_OFFICIAL_BUILD", + "value": "True", + "type": "BOOL" + }, + { + "name": "VCPKG_BASE_VERSION", + "value": "2022-11-10", + "type": "STRING" + }, + { + "name": "VCPKG_STANDALONE_BUNDLE_SHA", + "value": "edc1bad5689508953842d13b062a750791af57e0c6bb9d441d4545e81d99841dead2d33a5cb382b6cf50d2d32414ee617ada6e761c868fcbb28fa9bcb7bca6ba", + "type": "STRING" + }, + { + "name": "VCPKG_CE_SHA", + "value": "b677e4d66e711e623a2765499cc5b662544c1df07a95b495f31a195f6e525c00cef0111dbb008b544d987fdcd1140fd69877908b3c3e7771231eaaa2cb1939ac", + "type": "STRING" + }, + { + "name": "VCPKG_BUILD_TLS12_DOWNLOADER", + "value": "True", + "type": "BOOL" + }, + { + "name": "VCPKG_BUILD_BENCHMARKING", + "value": "True", + "type": "BOOL" + }, + { + "name": "VCPKG_BUILD_FUZZING", + "value": "True", + "type": "BOOL" + } + ] + }, { "name": "x64-Release", "generator": "Ninja", diff --git a/ce/ce/archivers/git.ts b/ce/ce/archivers/git.ts index e8f41be30d..296a7c572e 100644 --- a/ce/ce/archivers/git.ts +++ b/ce/ce/archivers/git.ts @@ -2,13 +2,11 @@ // Licensed under the MIT License. import { UnpackEvents } from '../interfaces/events'; -import { Credentials } from '../util/credentials'; import { execute } from '../util/exec-cmd'; import { isFilePath, Uri } from '../util/uri'; export interface CloneOptions { force?: boolean; - credentials?: Credentials; } /** @internal */ diff --git a/ce/ce/fs/acquire.ts b/ce/ce/fs/acquire.ts index 1074200a23..4149964251 100644 --- a/ce/ce/fs/acquire.ts +++ b/ce/ce/fs/acquire.ts @@ -2,48 +2,23 @@ // Licensed under the MIT License. import { strict } from 'assert'; -import { pipeline as origPipeline } from 'stream'; -import { promisify } from 'util'; import { i } from '../i18n'; import { DownloadEvents } from '../interfaces/events'; import { Session } from '../session'; -import { Credentials } from '../util/credentials'; import { RemoteFileUnavailable } from '../util/exceptions'; -import { Algorithm, Hash } from '../util/hash'; +import { Hash } from '../util/hash'; import { Uri } from '../util/uri'; -import { get, getStream, RemoteFile, resolveRedirect } from './https'; -import { ProgressTrackingStream } from './streams'; - -const pipeline = promisify(origPipeline); - -const size32K = 1 << 15; -const size64K = 1 << 16; +import { vcpkgDownload } from '../vcpkg'; export interface AcquireOptions extends Hash { /** force a redownload even if it's in cache */ force?: boolean; - credentials?: Credentials; } export async function acquireArtifactFile(session: Session, uris: Array, outputFilename: string, events: Partial, options?: AcquireOptions) { await session.downloads.createDirectory(); - const outputFile = session.downloads.join(outputFilename); session.channels.debug(`Acquire file '${outputFilename}' from [${uris.map(each => each.toString()).join(',')}]`); - if (options?.algorithm && options?.value) { - session.channels.debug(`We have a hash: ${options.algorithm}/${options.value}`); - - // if we have hash data, check to see if the output file is good. - if (await outputFile.isFile()) { - session.channels.debug(`There is an output file already, verifying: ${outputFile.fsPath}`); - - if (await outputFile.hashValid(events, options)) { - session.channels.debug(`Cached file matched hash: ${outputFile.fsPath}`); - return outputFile; - } - } - } - // is the file present on a local filesystem? for (const uri of uris) { if (uri.isLocal) { @@ -80,146 +55,51 @@ export async function acquireArtifactFile(session: Session, uris: Array, ou /** */ async function https(session: Session, uris: Array, outputFilename: string, events: Partial, options?: AcquireOptions) { session.channels.debug(`Attempting to download file '${outputFilename}' from [${uris.map(each => each.toString()).join(',')}]`); - - let resumeAtOffset = 0; - await session.downloads.createDirectory(); + const hashAlgorithm = options?.algorithm; const outputFile = session.downloads.join(outputFilename); - if (options?.force) { session.channels.debug(`Acquire '${outputFilename}': force specified, forcing download`); // is force specified; delete the current file await outputFile.delete(); - } - - // start this peeking at the target uris. - session.channels.debug(`Acquire '${outputFilename}': checking remote connections`); - events.downloadStart?.(uris, outputFile.fsPath); - const locations = new RemoteFile(uris, { credentials: options?.credentials }); - let url: Uri | undefined; - - // is there a file in the cache - if (await outputFile.exists()) { - session.channels.debug(`Acquire '${outputFilename}': local file exists`); - if (options?.algorithm) { - // does it match a hash that we have? - if (await outputFile.hashValid(events, options)) { - session.channels.debug(`Acquire '${outputFilename}': local file hash matches metdata`); - // yes it does. let's just return done. - return outputFile; - } + } else if (hashAlgorithm) { + // does it match a hash that we have? + if (await outputFile.hashValid(events, options)) { + session.channels.debug(`Acquire '${outputFilename}': local file hash matches metdata`); + // yes it does. let's just return done. + return outputFile; } - // it doesn't match a known hash. - const contentLength = await locations.contentLength; - session.channels.debug(`Acquire '${outputFilename}': remote connection info is back`); - const onDiskSize = await outputFile.size(); - if (!await locations.availableLocation) { - if (locations.failures.all(each => each.code === 404)) { - let msg = i`Unable to download file`; - if (options?.credentials) { - msg += (i` - It could be that your authentication credentials are not correct`); - } - - session.channels.error(msg); - throw new RemoteFileUnavailable(uris); - } - } - // first, make sure that there is a remote that is accessible. - strict.ok(!!await locations.availableLocation, `Requested file ${outputFilename} has no accessible locations ${uris.map(each => each.toString()).join(',')}`); - - url = await locations.resumableLocation; - // ok, does it support resume? - if (url) { - // yes, let's check what the size is expected to be. - - if (!options?.algorithm) { - - if (contentLength === onDiskSize) { - session.channels.debug(`Acquire '${outputFilename}': on disk file matches length of remote file`); - const algorithm = (await locations.algorithm); - const value = await locations.hash; - session.channels.debug(`Acquire '${outputFilename}': remote alg/hash: '${algorithm}'/'${value}`); - if (algorithm && value && await outputFile.hashValid(events, { algorithm, value, ...options })) { - session.channels.debug(`Acquire '${outputFilename}': on disk file hash matches the server hash`); - // so *we* don't have the hash, but ... if the server has a hash, we could see if what we have is what they have? - // it does match what the server has. - // I call this an win. - return outputFile; - } - - // we don't have a hash, or what we have doesn't match. - // maybe we will get a match below (or resume) - } - } - - if (onDiskSize > size64K) { - // it's bigger than 64k. Good. otherwise, we're just wasting time. - - // so, how big is the remote - if (contentLength >= onDiskSize) { - session.channels.debug(`Acquire '${outputFilename}': local file length is less than or equal to remote file length`); - // looks like there could be more remotely than we have. - // lets compare the first 32k and the last 32k of what we have - // against what they have and see if they match. - const top = (await get(url, { start: 0, end: size32K - 1, credentials: options?.credentials })).rawBody; - const bottom = (await get(url, { start: onDiskSize - size32K, end: onDiskSize - 1, credentials: options?.credentials })).rawBody; - - const onDiskTop = await outputFile.readBlock(0, size32K - 1); - const onDiskBottom = await outputFile.readBlock(onDiskSize - size32K, onDiskSize - 1); - - if (top.compare(onDiskTop) === 0 && bottom.compare(onDiskBottom) === 0) { - session.channels.debug(`Acquire '${outputFilename}': first/last blocks are equal`); - // the start and end of what we have does match what they have. - // is this file the same size? - if (contentLength === onDiskSize) { - // same file size, front and back match, let's accept this. begrudgingly - session.channels.debug(`Acquire '${outputFilename}': file size is identical. keeping this one`); - return outputFile; - } - // looks like we can continue from here. - session.channels.debug(`Acquire '${outputFilename}': ok to resume`); - resumeAtOffset = onDiskSize; - } - } - } - } - } - - if (resumeAtOffset === 0) { - // clearly we mean to not resume. clean any existing file. - session.channels.debug(`Acquire '${outputFilename}': not resuming file, full download`); + // invalid hash, deleting file + session.channels.debug(`Acquire '${outputFilename}': local file hash mismatch, redownloading`); await outputFile.delete(); + } else if (await outputFile.exists()) { + session.channels.debug(`Acquire '${outputFilename}': skipped due to existing file, no hash known`); + session.channels.warning(i`Assuming '${outputFilename}' is correct; supply a hash in the artifact metadata to suppress this message.`); + return outputFile; } - url = url || await locations.availableLocation; - strict.ok(!!url, `Requested file ${outputFilename} has no accessible locations ${uris.map(each => each.toString()).join(',')}`); - session.channels.debug(`Acquire '${outputFilename}': initiating download`); - const length = await locations.contentLength; - - const inputStream = getStream(url, { start: resumeAtOffset, end: length > 0 ? length : undefined, credentials: options?.credentials }); - let progressStream; - if (length > 0) { - progressStream = new ProgressTrackingStream(resumeAtOffset, length); - progressStream.on('progress', (filePercentage) => events.downloadProgress?.(url!, outputFile.fsPath, filePercentage)); + session.channels.debug(`Acquire '${outputFilename}': checking remote connections`); + events.downloadStart?.(uris, outputFile.fsPath); + let sha512 = undefined; + if (hashAlgorithm == 'sha512') { + sha512 = options?.value; } - const outputStream = await outputFile.writeStream({ append: true }); - // whoooosh. write out the file - if (progressStream) { - await pipeline(inputStream, progressStream, outputStream); - } else { - await pipeline(inputStream, outputStream); - } + await vcpkgDownload(session, outputFile.fsPath, sha512, uris, events); events.downloadComplete?.(); // we've downloaded the file, let's see if it matches the hash we have. - if (options?.algorithm) { + if (hashAlgorithm == 'sha512') { + // vcpkg took care of it already + session.channels.debug(`Acquire '${outputFilename}': vcpkg checked SHA512`); + } else if (hashAlgorithm) { session.channels.debug(`Acquire '${outputFilename}': checking downloaded file hash`); // does it match the hash that we have? if (!await outputFile.hashValid(events, options)) { await outputFile.delete(); throw new Error(i`Downloaded file '${outputFile.fsPath}' did not have the correct hash (${options.algorithm}: ${options.value}) `); } + session.channels.debug(`Acquire '${outputFilename}': downloaded file hash matches specified hash`); } @@ -233,10 +113,7 @@ export async function resolveNugetUrl(session: Session, pkg: string) { // let's resolve the redirect first, since nuget servers don't like us getting HEAD data on the targets via a redirect. // even if this wasn't the case, this is lower cost now rather than later. - const url = await resolveRedirect(session.fileSystem.parseUri(`https://www.nuget.org/api/v2/package/${name}/${version}`)); - - session.channels.debug(`Resolving nuget package for '${pkg}' to '${url}'`); - return url; + return session.fileSystem.parseUri(`https://www.nuget.org/api/v2/package/${name}/${version}`); } export async function acquireNugetFile(session: Session, pkg: string, outputFilename: string, events: Partial, options?: AcquireOptions): Promise { diff --git a/ce/ce/fs/http-filesystem.ts b/ce/ce/fs/http-filesystem.ts index 477934fdab..8b0c38a7e1 100644 --- a/ce/ce/fs/http-filesystem.ts +++ b/ce/ce/fs/http-filesystem.ts @@ -4,7 +4,6 @@ import { Readable, Writable } from 'stream'; import { Uri } from '../util/uri'; import { FileStat, FileSystem, FileType, ReadHandle } from './filesystem'; -import { get, getStream, head } from './https'; /** * HTTPS Filesystem @@ -13,15 +12,7 @@ import { get, getStream, head } from './https'; export class HttpsFileSystem extends FileSystem { async stat(uri: Uri): Promise { - const result = await head(uri); - - return { - type: FileType.File, - mtime: Date.parse(result.headers.date || ''), - ctime: Date.parse(result.headers.date || ''), - size: Number.parseInt(result.headers['content-length'] || '0'), - mode: 0o555 // https is read only but always 'executable' - }; + throw new Error('Method not implemented'); } readDirectory(uri: Uri): Promise> { throw new Error('Method not implemented'); @@ -30,7 +21,7 @@ export class HttpsFileSystem extends FileSystem { throw new Error('Method not implemented'); } async readFile(uri: Uri): Promise { - return (await get(uri)).rawBody; + throw new Error('Method not implemented'); } writeFile(uri: Uri, content: Uint8Array): Promise { throw new Error('Method not implemented'); @@ -48,46 +39,13 @@ export class HttpsFileSystem extends FileSystem { throw new Error('Method not implemented'); } async readStream(uri: Uri, options?: { start?: number, end?: number }): Promise { - return getStream(uri, options); + throw new Error('Method not implemented'); } writeStream(uri: Uri): Promise { throw new Error('Method not implemented'); } async openFile(uri: Uri): Promise { - return new HttpsReadHandle(uri); - } -} - - -class HttpsReadHandle extends ReadHandle { - position = 0; - constructor(private target: Uri) { - super(); - } - - async read(buffer: TBuffer, offset = 0, length = buffer.byteLength, position: number | null = null): Promise<{ bytesRead: number; buffer: TBuffer; }> { - if (position !== null) { - this.position = position; - } - - const r = getStream(this.target, { start: this.position, end: this.position + length }); - let bytesRead = 0; - - for await (const chunk of r) { - const c = chunk; - c.copy(buffer, offset); - bytesRead += c.length; - offset += c.length; - } - return { bytesRead, buffer }; - } - - async size(): Promise { - return this.target.size(); - } - - async close() { - //return this.handle.close(); + throw new Error('Method not implemented'); } } diff --git a/ce/ce/fs/https.ts b/ce/ce/fs/https.ts deleted file mode 100644 index 697b64ee5e..0000000000 --- a/ce/ce/fs/https.ts +++ /dev/null @@ -1,225 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT License. - -import { default as got, Headers, HTTPError, Response } from 'got'; -import { Credentials } from '../util/credentials'; -import { anyWhere } from '../util/promise'; -import { Uri } from '../util/uri'; - -/** - * Resolves an HTTPS GET redirect by doing the GET, grabbing the redirects and then cancelling the rest of the request - * @param location the URL to get the final location of - */ -export async function resolveRedirect(location: Uri) { - let finalUrl = location; - - const stream = got.get(location.toUrl(), { timeout: 15000, isStream: true }); - - // when the response comes thru, we can grab the headers & stuff from it - stream.on('response', (response: Response) => { - finalUrl = location.fileSystem.parseUri(response.redirectUrls.last || finalUrl.toString()); - }); - - // we have to get at least some data for the response event to trigger. - for await (const chunk of stream) { - // but we don't need any of it :D - break; - } - stream.destroy(); - return finalUrl; -} - -/** - * Does an HTTPS HEAD request, and on a 404, tries to do an HTTPS GET and see if we get a redirect, and harvest the headers from that. - * @param location the target URL - * @param headers any headers to put in the request. - */ -export async function head(location: Uri, headers: Headers = {}, credentials?: Credentials): Promise> { - try { - setCredentials(headers, location, credentials); - // on a successful HEAD request, do nothing different - return await got.head(location.toUrl(), { timeout: 15000, headers }); - } catch (E) { - // O_o - // - // So, it turns out that nuget servers (maybe others too?) don't do redirects on HEAD requests, - // and instead issue a 404. - // let's retry the request as a GET, and dump it after the first chunk. - // typically, a HEAD request should see a 300-400msec response time - // and yes, this does stretch that out to 500-700msec, but whatcha gonna do? - if (E instanceof HTTPError && E.response.statusCode === 404) { - try { - const syntheticResponse = >{}; - const stream = got.get(location.toUrl(), { timeout: 15000, headers, isStream: true }); - - // when the response comes thru, we can grab the headers & stuff from it - stream.on('response', (response: Response) => { - syntheticResponse.headers = response.headers; - syntheticResponse.statusCode = response.statusCode; - syntheticResponse.redirectUrls = response.redirectUrls; - }); - - // we have to get at least some data for the response event to trigger. - for await (const chunk of stream) { - // but we don't need any of it :D - break; - } - stream.destroy(); - return syntheticResponse; - } - catch { - // whatever, it didn't work. let the rethrow happen. - } - } - throw E; - } -} - -/** HTTPS Get request, returns a buffer */ -export function get(location: Uri, options?: { start?: number, end?: number, headers?: Headers, credentials?: Credentials }) { - let headers: Headers | undefined = undefined; - headers = setRange(headers, options?.start, options?.end); - headers = setCredentials(headers, location, options?.credentials); - - return got.get(location.toUrl(), { headers }); -} - -function setRange(headers: Headers | undefined, start?: number, end?: number) { - if (start !== undefined || end !== undefined) { - headers = headers || {}; - headers['range'] = `bytes=${start !== undefined ? start : ''}-${end !== undefined ? end : ''}`; - } - return headers; -} - - -function setCredentials(headers: Headers | undefined, target: Uri, credentials?: Credentials) { - if (credentials) { - // todo: if we have to add some credential headers, we'd do it here. - // we've removed github auth support until we actually need such a thing - } - return headers; -} - -/** HTTPS Get request, returns a stream - * @internal -*/ -export function getStream(location: Uri, options?: { start?: number, end?: number, headers?: Headers, credentials?: Credentials }) { - let headers: Headers | undefined = options?.headers; - headers = setRange(headers, options?.start, undefined); - headers = setCredentials(headers, location, options?.credentials); - - return got.get(location.toUrl(), { isStream: true, retry: 3, headers }); -} - -export interface Info { - failed?: boolean; - location: Uri; - resumeable: boolean; - contentLength: number; - hash?: string; - algorithm?: string; -} - -function digest(headers: Headers) { - let hash = hashAlgorithm(headers['digest'], 'sha-256'); - - // any of the sha* hashes.. - if (hash) { - return { hash, algorithm: 'sha256' }; - } - hash = hashAlgorithm(headers['digest'], 'sha-384'); - if (hash) { - return { hash, algorithm: 'sha384' }; - } - hash = hashAlgorithm(headers['digest'], 'sha-512'); - if (hash) { - return { hash, algorithm: 'sha512' }; - } - - // nothing we know about. - return { hash: undefined, algorithm: undefined }; -} - -/** - * RemoteFile is a class that represents a single remote file, but mapped to multiple mirrored URLs - * on creation, it kicks off HEAD requests to each URL so that we can get hash/digest, length, resumability etc - * - * the properties are Promises<> to the results, where it grabs data from the first returning valid query without - * blocking elsewhere. - * -*/ -export class RemoteFile { - info: Array>; - constructor(protected locations: Array, options?: { credentials?: Credentials }) { - this.info = locations.map(location => { - return head(location, setCredentials({ - 'want-digest': 'sha-256;q=1, sha-512;q=0.9', - 'accept-encoding': 'identity;q=0', // we need to know the content length without gzip encoding, - }, location, options?.credentials)).then(data => { - if (data.statusCode === 200) { - const { hash, algorithm } = digest(data.headers); - return { - location, - resumeable: data.headers['accept-ranges'] === 'bytes', - contentLength: Number.parseInt(data.headers['content-length']!) || -1, // -1 means we were not told. - hash, - algorithm, - }; - } - this.failures.push({ - code: data.statusCode, - reason: `A non-ok status code was returned: ${data.statusMessage}` - }); - throw new Error(`A non-ok status code was returned: ${data.statusCode}`); - }, err => { - this.failures.push({ - code: err?.response?.statusCode, - reason: `A non-ok status code was returned: ${err?.response?.statusMessage}` - }); - throw err; - }); - }); - - - // lazy properties (which do not throw on errors.) - this.availableLocation = Promise.any(this.info).then(success => success.location, fail => undefined); - this.resumable = anyWhere(this.info, each => each.resumeable).then(success => true, fail => false); - this.resumableLocation = anyWhere(this.info, each => each.resumeable).then(success => success.location, fail => undefined); - this.contentLength = anyWhere(this.info, each => !!each.contentLength).then(success => success.contentLength, fail => -2); - this.hash = anyWhere(this.info, each => !!each.hash).then(success => success.hash, fail => undefined); - this.algorithm = anyWhere(this.info, each => !!each.algorithm).then(success => success.algorithm, fail => undefined); - } - - resumable: Promise; - contentLength: Promise; - hash: Promise; - algorithm: Promise; - availableLocation: Promise; - resumableLocation: Promise; - failures = new Array<{ code: number, reason: string }>(); -} - -/** - * Digest/hash in headers are base64 encoded strings. - * @param data the base64 encoded string - */ -function decode(data?: string): string | undefined { - return data ? Buffer.from(data, 'base64').toString('hex').toLowerCase() : undefined; -} - -/** - * Get the hash alg/hash from the digest. - * @param digest the digest header - * @param algorithm the algorithm we're trying to match - */ -function hashAlgorithm(digest: string | Array | undefined, algorithm: 'sha-256' | 'sha-384' | 'sha-512'): string | undefined { - for (const each of (digest ? Array.isArray(digest) ? digest : [digest] : [])) { - if (each.startsWith(algorithm)) { - return decode(each.substr(8)); - } - } - - // nothing. - return undefined; -} diff --git a/ce/ce/installers/espidf.ts b/ce/ce/installers/espidf.ts index 578337d1e3..c557fa50c8 100644 --- a/ce/ce/installers/espidf.ts +++ b/ce/ce/installers/espidf.ts @@ -8,7 +8,7 @@ import { UnpackEvents } from '../interfaces/events'; import { Session } from '../session'; import { execute } from '../util/exec-cmd'; import { Uri } from '../util/uri'; -import { Vcpkg } from '../vcpkg'; +import { vcpkgFetch } from '../vcpkg'; export async function installEspIdf(session: Session, events: Partial, targetLocation: Uri) { // check for some file that espressif installs to see if it's installed. @@ -17,8 +17,7 @@ export async function installEspIdf(session: Session, events: Partial, options: Partial): Promise { - const vcpkg = new Vcpkg(session); - - const gitPath = await vcpkg.fetch('git'); + const gitPath = await vcpkgFetch(session, 'git'); if (!gitPath) { throw new Error(i`Git is not installed`); diff --git a/ce/ce/package.json b/ce/ce/package.json index e6e38c44f6..eafce8b250 100644 --- a/ce/ce/package.json +++ b/ce/ce/package.json @@ -60,7 +60,6 @@ "yaml": "2.0.0-10", "semver": "7.3.5", "tar-stream": "~2.3.0", - "got": "11.8.5", "sorted-btree": "1.6.0", "sed-lite": "0.8.4", "unbzip2-stream": "1.4.3", diff --git a/ce/ce/util/credentials.ts b/ce/ce/util/credentials.ts deleted file mode 100644 index eb16a7eb52..0000000000 --- a/ce/ce/util/credentials.ts +++ /dev/null @@ -1,6 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT License. - -export interface Credentials { - githubToken?: string -} diff --git a/ce/ce/vcpkg.ts b/ce/ce/vcpkg.ts index 0fc0fe7aa8..0b45a76dad 100644 --- a/ce/ce/vcpkg.ts +++ b/ce/ce/vcpkg.ts @@ -3,41 +3,74 @@ import { spawn } from 'child_process'; import { i } from './i18n'; +import { DownloadEvents } from './interfaces/events'; import { Session } from './session'; +import { Uri } from './util/uri'; -/** @internal */ -export class Vcpkg { - constructor(private readonly session: Session) { } +function streamVcpkg(vcpkgCommand: string | undefined, args: Array, listener: (chunk: any) => void): Promise { + return new Promise((accept, reject) => { + if (!vcpkgCommand) { + reject(i`VCPKG_COMMAND was not set`); + return; + } - fetch(fetchKey: string): Promise { - return this.runVcpkg(['fetch', fetchKey, '--x-stderr-status']).then((output) => { - return output.trimEnd(); - }, (error) => { - if (fetchKey === 'git') { - this.session.channels.warning('failed to fetch git, falling back to attempting to use git from the PATH'); - return Promise.resolve('git'); - } - - return Promise.reject(error); + const subproc = spawn(vcpkgCommand, args, { stdio: ['ignore', 'pipe', 'pipe'] }); + subproc.stdout.on('data', listener); + subproc.stderr.pipe(process.stdout); + subproc.on('error', (err) => { reject(err); }); + subproc.on('close', (code: number, signal) => { + if (code === 0) { accept(); } + reject(i`Running vcpkg internally returned a nonzero exit code: ${code}`); }); + }); +} + +async function runVcpkg(vcpkgCommand: string | undefined, args: Array): Promise { + let result = ''; + await streamVcpkg(vcpkgCommand, args, (chunk) => { result += chunk; }); + return result.trimEnd(); +} + +export function vcpkgFetch(session: Session, fetchKey: string): Promise { + return runVcpkg(session.vcpkgCommand, ['fetch', fetchKey, '--x-stderr-status']).then((output) => { + return output; + }, (error) => { + if (fetchKey === 'git') { + session.channels.warning('failed to fetch git, falling back to attempting to use git from the PATH'); + return Promise.resolve('git'); + } + + return Promise.reject(error); + }); +} + +export async function vcpkgDownload(session: Session, destination: string, sha512: string | undefined, uris: Array, events: Partial) : Promise { + const args = ['x-download', destination, '--z-machine-readable-progress']; + if (sha512) { + args.push(`--sha512=${sha512}`); + } else { + args.push('--skip-sha512'); } - private runVcpkg(args: Array): Promise { - return new Promise((accept, reject) => { - if (!this.session.vcpkgCommand) { - reject(i`VCPKG_COMMAND was not set`); - return; - } - - const subproc = spawn(this.session.vcpkgCommand, args, { stdio: ['ignore', 'pipe', 'pipe'] }); - let result = ''; - subproc.stdout.on('data', (chunk) => { result += chunk; }); - subproc.stderr.pipe(process.stdout); - subproc.on('error', (err) => { reject(err); }); - subproc.on('close', (code: number, signal) => { - if (code === 0) { accept(result); } - reject(i`Running vcpkg internally returned a nonzero exit code: ${code}`); + for (const uri of uris) { + events.downloadProgress?.(uri, destination, 0); + const uriArgs = [...args, `--url=${uri.toString()}`]; + try { + await streamVcpkg(session.vcpkgCommand, uriArgs, (chunk) => { + const match = /(\d+)(\.\d+)?%\s*$/.exec(chunk); + if (!match) { return; } + const number = parseFloat(match[1]); + // throwing out 100s avoids displaying temporarily full progress bars resulting from redirects getting resolved + if (number && number < 100) { + events.downloadProgress?.(uri, destination, number); + } }); - }); + + return; + } catch { + session.channels.warning(i`failed to download from ${uri.toString()}`); + } } + + throw new Error(i`failed to download ${destination} from any source`); } diff --git a/ce/common/config/rush/pnpm-lock.yaml b/ce/common/config/rush/pnpm-lock.yaml index a43262cca0..e51b587223 100644 --- a/ce/common/config/rush/pnpm-lock.yaml +++ b/ce/common/config/rush/pnpm-lock.yaml @@ -21,7 +21,6 @@ specifiers: eslint: 8.8.0 eslint-plugin-notice: 0.9.10 fs-constants: ^1.0.0 - got: 11.8.5 inherits: ^2.0.3 marked: 4.0.12 marked-terminal: 5.1.1 @@ -62,7 +61,6 @@ dependencies: eslint: 8.8.0 eslint-plugin-notice: 0.9.10_eslint@8.8.0 fs-constants: 1.0.0 - got: 11.8.5 inherits: 2.0.4 marked: 4.0.12 marked-terminal: 5.1.1_marked@4.0.12 @@ -94,7 +92,7 @@ packages: /@azure/cognitiveservices-translatortext/1.0.1: resolution: {integrity: sha1-iIRCw+KXZ33jaIf7MlEAonFS0CU=} dependencies: - '@azure/ms-rest-js': 2.6.2 + '@azure/ms-rest-js': 2.6.4 tslib: 1.14.1 transitivePeerDependencies: - encoding @@ -112,14 +110,14 @@ packages: resolution: {integrity: sha1-jJCzFGiuyjFGsGxxRLOG/Ugn9kw=} dependencies: '@azure/core-auth': 1.4.0 - '@azure/ms-rest-js': 2.6.2 + '@azure/ms-rest-js': 2.6.4 tslib: 1.14.1 transitivePeerDependencies: - encoding dev: false - /@azure/ms-rest-js/2.6.2: - resolution: {integrity: sha1-GFqdZD6lXGlhNK92pcYCbJTiYhc=} + /@azure/ms-rest-js/2.6.4: + resolution: {integrity: sha1-sKD4mEFDRHGt91fQnn456Oz81lA=} dependencies: '@azure/core-auth': 1.4.0 abort-controller: 3.0.0 @@ -157,7 +155,7 @@ packages: debug: 4.3.4 espree: 9.4.1 globals: 13.18.0 - ignore: 5.2.0 + ignore: 5.2.1 import-fresh: 3.3.0 js-yaml: 4.1.0 minimatch: 3.1.2 @@ -202,23 +200,11 @@ packages: fastq: 1.13.0 dev: false - /@sindresorhus/is/4.6.0: - resolution: {integrity: sha1-PHycRuZ4/u/nouW7YJ09vWZf+z8=} - engines: {node: '>=10'} - dev: false - /@snyk/nuget-semver/1.3.0: resolution: {integrity: sha1-p071NA3K2jvEs6AZt5UHON9RGZw=} engines: {node: '>=6'} dev: false - /@szmarczak/http-timer/4.0.6: - resolution: {integrity: sha1-tKkUu2LnwnLU5Zif5EQPgSqx2Ac=} - engines: {node: '>=10'} - dependencies: - defer-to-connect: 2.0.1 - dev: false - /@ts-morph/common/0.7.5: resolution: {integrity: sha1-2BYDq9S4bQCZ1pI5y7zfmQpd+yU=} dependencies: @@ -230,35 +216,16 @@ packages: typescript: 4.1.6 dev: false - /@types/cacheable-request/6.0.3: - resolution: {integrity: sha1-pDCzJgRmyntcpb/XNWk7Nuep0YM=} - dependencies: - '@types/http-cache-semantics': 4.0.1 - '@types/keyv': 3.1.4 - '@types/node': 17.0.15 - '@types/responselike': 1.0.0 - dev: false - /@types/cli-progress/3.11.0: resolution: {integrity: sha1-7HnfmbJnV8PRxxcK+EIuD8le734=} dependencies: '@types/node': 17.0.15 dev: false - /@types/http-cache-semantics/4.0.1: - resolution: {integrity: sha1-Dqe2FJaQK5WJDcTDoRa2DLja6BI=} - dev: false - /@types/json-schema/7.0.11: resolution: {integrity: sha1-1CG2xSejA398hEM/0sQingFoY9M=} dev: false - /@types/keyv/3.1.4: - resolution: {integrity: sha1-PM2xxnUbDH5SMAvNrNW8v4+qdbY=} - dependencies: - '@types/node': 17.0.15 - dev: false - /@types/marked-terminal/3.1.3: resolution: {integrity: sha1-Qf798j1siJXCC6V7TiH6fjxp76w=} dependencies: @@ -286,12 +253,6 @@ packages: resolution: {integrity: sha1-l3eSgsCcCVdxIKIWLnHYOAADWQo=} dev: false - /@types/responselike/1.0.0: - resolution: {integrity: sha1-JR9P59FU0rrRJavhtCmyOv0mLik=} - dependencies: - '@types/node': 17.0.15 - dev: false - /@types/semver/7.3.9: resolution: {integrity: sha1-FSxsIKdojDC5Z+wYQdMazlaYY/w=} dev: false @@ -320,7 +281,7 @@ packages: debug: 4.3.4 eslint: 8.8.0 functional-red-black-tree: 1.0.1 - ignore: 5.2.0 + ignore: 5.2.1 regexpp: 3.2.0 semver: 7.3.5 tsutils: 3.21.0_typescript@4.5.5 @@ -570,24 +531,6 @@ packages: ieee754: 1.2.1 dev: false - /cacheable-lookup/5.0.4: - resolution: {integrity: sha1-WmuGWyxENXvj1evCpGewMnGacAU=} - engines: {node: '>=10.6.0'} - dev: false - - /cacheable-request/7.0.2: - resolution: {integrity: sha1-6g0LiJNkolhUdXMByhKy2nf5HSc=} - engines: {node: '>=8'} - dependencies: - clone-response: 1.0.3 - get-stream: 5.2.0 - http-cache-semantics: 4.1.0 - keyv: 4.5.2 - lowercase-keys: 2.0.0 - normalize-url: 6.1.0 - responselike: 2.0.1 - dev: false - /call-bind/1.0.2: resolution: {integrity: sha1-sdTonmiBGcPJqQOtMKuy9qkZvjw=} dependencies: @@ -682,12 +625,6 @@ packages: wrap-ansi: 7.0.0 dev: false - /clone-response/1.0.3: - resolution: {integrity: sha1-ryAyqkeBY5nPXwodDbkC9ReruMM=} - dependencies: - mimic-response: 1.0.1 - dev: false - /code-block-writer/10.1.1: resolution: {integrity: sha1-rVaE7Uv7KweDyLExKBroTuZApC8=} dev: false @@ -763,13 +700,6 @@ packages: engines: {node: '>=10'} dev: false - /decompress-response/6.0.0: - resolution: {integrity: sha1-yjh2Et234QS9FthaqwDV7PCcZvw=} - engines: {node: '>=10'} - dependencies: - mimic-response: 3.1.0 - dev: false - /deep-equal/1.1.1: resolution: {integrity: sha1-tcmMlCzv+vfLBR4k4UNKJaLmB2o=} dependencies: @@ -785,11 +715,6 @@ packages: resolution: {integrity: sha1-pvLc5hL63S7x9Rm3NVHxfoUZmDE=} dev: false - /defer-to-connect/2.0.1: - resolution: {integrity: sha1-gBa9tBQ+RjK3ejRJxiNid95SBYc=} - engines: {node: '>=10'} - dev: false - /define-properties/1.1.4: resolution: {integrity: sha1-CxTXvX++svNXLDp+2oDqXVf7BbE=} engines: {node: '>= 0.4'} @@ -977,7 +902,7 @@ packages: functional-red-black-tree: 1.0.1 glob-parent: 6.0.2 globals: 13.18.0 - ignore: 5.2.0 + ignore: 5.2.1 import-fresh: 3.3.0 imurmurhash: 0.1.4 is-glob: 4.0.3 @@ -1184,13 +1109,6 @@ packages: has-symbols: 1.0.3 dev: false - /get-stream/5.2.0: - resolution: {integrity: sha1-SWaheV7lrOZecGxLe+txJX1uItM=} - engines: {node: '>=8'} - dependencies: - pump: 3.0.0 - dev: false - /get-symbol-description/1.0.0: resolution: {integrity: sha1-f9uByQAQH71WTdXxowr1qtweWNY=} engines: {node: '>= 0.4'} @@ -1249,28 +1167,11 @@ packages: array-union: 2.1.0 dir-glob: 3.0.1 fast-glob: 3.2.12 - ignore: 5.2.0 + ignore: 5.2.1 merge2: 1.4.1 slash: 3.0.0 dev: false - /got/11.8.5: - resolution: {integrity: sha1-znfQRRNt5W6PAkvruC6jSbxzAEY=} - engines: {node: '>=10.19.0'} - dependencies: - '@sindresorhus/is': 4.6.0 - '@szmarczak/http-timer': 4.0.6 - '@types/cacheable-request': 6.0.3 - '@types/responselike': 1.0.0 - cacheable-lookup: 5.0.4 - cacheable-request: 7.0.2 - decompress-response: 6.0.0 - http2-wrapper: 1.0.3 - lowercase-keys: 2.0.0 - p-cancelable: 2.1.1 - responselike: 2.0.1 - dev: false - /growl/1.10.5: resolution: {integrity: sha1-8nNdwig2dPpnR4sQGBBZNVw2nl4=} engines: {node: '>=4.x'} @@ -1320,24 +1221,12 @@ packages: hasBin: true dev: false - /http-cache-semantics/4.1.0: - resolution: {integrity: sha1-SekcXL82yblLz81xwj1SSex045A=} - dev: false - - /http2-wrapper/1.0.3: - resolution: {integrity: sha1-uPVeDB8l1OvQizsMLAeflZCACz0=} - engines: {node: '>=10.19.0'} - dependencies: - quick-lru: 5.1.1 - resolve-alpn: 1.2.1 - dev: false - /ieee754/1.2.1: resolution: {integrity: sha1-jrehCmP/8l0VpXsAFYbRd9Gw01I=} dev: false - /ignore/5.2.0: - resolution: {integrity: sha1-bTusj6f+DUXZ+b57rC/CeVd+NFo=} + /ignore/5.2.1: + resolution: {integrity: sha1-wrH3bLmZ7eFQLzoiapMQ/f6I1Gw=} engines: {node: '>= 4'} dev: false @@ -1552,10 +1441,6 @@ packages: argparse: 2.0.1 dev: false - /json-buffer/3.0.1: - resolution: {integrity: sha1-kziAKjDTtmBfvgYT4JQAjKjAWhM=} - dev: false - /json-schema-traverse/0.4.1: resolution: {integrity: sha1-afaofZUTq4u4/mO9sJecRI5oRmA=} dev: false @@ -1564,12 +1449,6 @@ packages: resolution: {integrity: sha1-nbe1lJatPzz+8wp1FC0tkwrXJlE=} dev: false - /keyv/4.5.2: - resolution: {integrity: sha1-DjEM5zv3hR7HAvLq9G7E44BczlY=} - dependencies: - json-buffer: 3.0.1 - dev: false - /levn/0.4.1: resolution: {integrity: sha1-rkViwAdHO5MqYgDUAyaN0v/8at4=} engines: {node: '>= 0.8.0'} @@ -1601,11 +1480,6 @@ packages: is-unicode-supported: 0.1.0 dev: false - /lowercase-keys/2.0.0: - resolution: {integrity: sha1-JgPni3tLAAbLyi+8yKMgJVislHk=} - engines: {node: '>=8'} - dev: false - /lru-cache/6.0.0: resolution: {integrity: sha1-bW/mVw69lqr5D8rR2vo7JWbbOpQ=} engines: {node: '>=10'} @@ -1663,16 +1537,6 @@ packages: mime-db: 1.52.0 dev: false - /mimic-response/1.0.1: - resolution: {integrity: sha1-SSNTiHju9CBjy4o+OweYeBSHqxs=} - engines: {node: '>=4'} - dev: false - - /mimic-response/3.1.0: - resolution: {integrity: sha1-LR1Zr5wbEpgVrMwsRqAipc4fo8k=} - engines: {node: '>=10'} - dev: false - /minimatch/3.1.2: resolution: {integrity: sha1-Gc0ZS/0+Qo8EmnCBfAONiatL41s=} dependencies: @@ -1779,11 +1643,6 @@ packages: engines: {node: '>=0.10.0'} dev: false - /normalize-url/6.1.0: - resolution: {integrity: sha1-QNCIW1Nd7/4/MUe+yHfQX+TFZoo=} - engines: {node: '>=10'} - dev: false - /object-inspect/1.12.2: resolution: {integrity: sha1-wGQfJjlFMvKKuNeWq5VOQ8AJqOo=} dev: false @@ -1829,11 +1688,6 @@ packages: word-wrap: 1.2.3 dev: false - /p-cancelable/2.1.1: - resolution: {integrity: sha1-qrf71BZYL6MqPbSYWcEiSHxe0s8=} - engines: {node: '>=8'} - dev: false - /p-limit/3.1.0: resolution: {integrity: sha1-4drMvnjQ0TiMoYxk/qOOPlfjcGs=} engines: {node: '>=10'} @@ -1893,13 +1747,6 @@ packages: resolution: {integrity: sha1-0N8qE38AeUVl/K87LADNCfjVpac=} dev: false - /pump/3.0.0: - resolution: {integrity: sha1-tKIRaBW94vTh6mAjVOjHVWUQemQ=} - dependencies: - end-of-stream: 1.4.4 - once: 1.4.0 - dev: false - /punycode/2.1.1: resolution: {integrity: sha1-tYsBCsQMIsVldhbI0sLALHv0eew=} engines: {node: '>=6'} @@ -1909,11 +1756,6 @@ packages: resolution: {integrity: sha1-SSkii7xyTfrEPg77BYyve2z7YkM=} dev: false - /quick-lru/5.1.1: - resolution: {integrity: sha1-NmST5rPkKjpoheLpnRj4D7eoyTI=} - engines: {node: '>=10'} - dev: false - /randombytes/2.1.0: resolution: {integrity: sha1-32+ENy8CcNxlzfYpE0mrekc9Tyo=} dependencies: @@ -1959,10 +1801,6 @@ packages: engines: {node: '>=0.10.0'} dev: false - /resolve-alpn/1.2.1: - resolution: {integrity: sha1-t629rDVGqq7CC0Xn2CZZJwcnJvk=} - dev: false - /resolve-from/4.0.0: resolution: {integrity: sha1-SrzYUq0y3Xuqv+m0DgCjbbXzkuY=} engines: {node: '>=4'} @@ -1977,12 +1815,6 @@ packages: supports-preserve-symlinks-flag: 1.0.0 dev: false - /responselike/2.0.1: - resolution: {integrity: sha1-mgvI/cJS8/scymiwFlkQWboUIrw=} - dependencies: - lowercase-keys: 2.0.0 - dev: false - /resumer/0.0.0: resolution: {integrity: sha1-8ej0YeQGS6Oegq883CqMiT0HZ1k=} dependencies: @@ -2499,7 +2331,7 @@ packages: dev: false file:projects/vcpkg-ce.tgz: - resolution: {integrity: sha512-HFo6CZKU2zxaLgIwg3beQerWgcek66UjHUz9/COWhczg/IFkDc80pAVT6HXEbHvlJps2bUVZzwzK84e6V+csTQ==, tarball: file:projects/vcpkg-ce.tgz} + resolution: {integrity: sha512-3XJ7b/ScbG0/B0JLlKK0DVNa0kmSdPbHKFNeAf0Ct7H3nuYPCXoSNOLTOvLulXY3jwsQOwDEeHokZoSARv7IjA==, tarball: file:projects/vcpkg-ce.tgz} name: '@rush-temp/vcpkg-ce' version: 0.0.0 dependencies: @@ -2518,7 +2350,6 @@ packages: ee-ts: 2.0.0-rc.6_typescript@4.5.5 eslint: 8.8.0 eslint-plugin-notice: 0.9.10_eslint@8.8.0 - got: 11.8.5 marked: 4.0.12 marked-terminal: 5.1.1_marked@4.0.12 sed-lite: 0.8.4 diff --git a/ce/test/core/acquire-tests.ts b/ce/test/core/acquire-tests.ts deleted file mode 100644 index 2e5b005bba..0000000000 --- a/ce/test/core/acquire-tests.ts +++ /dev/null @@ -1,117 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT License. - -import { acquireArtifactFile, resolveNugetUrl } from '@microsoft/vcpkg-ce/dist/fs/acquire'; -import { strict } from 'assert'; -import { SuiteLocal } from './SuiteLocal'; - -describe('Acquire', () => { - const local = new SuiteLocal(); - const fs = local.fs; - - after(local.after.bind(local)); - - it('try some downloads', async () => { - - const remoteFile = fs.parseUri('https://raw.githubusercontent.com/microsoft/vscode/main/README.md'); - - let acq = acquireArtifactFile(local.session, [remoteFile], 'readme.md', {}); - - const outputFile = await acq; - - strict.ok(await outputFile.exists(), 'File should exist!'); - const size = await outputFile.size(); - // let's try some resume scenarios - - // chopped file, very small. - // let's chop the file in half - const fullFile = await outputFile.readFile(); - const halfFile = fullFile.slice(0, fullFile.length / 2); - - await outputFile.delete(); - await outputFile.writeFile(halfFile); - - local.session.channels.debug('==== chopped the file in half, redownload'); - - acq = acquireArtifactFile(local.session, [remoteFile], 'readme.md', {}); - await acq; - const newsize = await outputFile.size(); - strict.equal(newsize, size, 'the file should be the right size at the end'); - - }); - - - it('larger file', async () => { - const remoteFile = fs.parseUri('https://user-images.githubusercontent.com/1487073/58344409-70473b80-7e0a-11e9-8570-b2efc6f8fa44.png'); - - let acq = acquireArtifactFile(local.session, [remoteFile], 'xyz.png', {}); - - const outputFile = await acq; - - const fullSize = await outputFile.size(); - - strict.ok(await outputFile.exists(), 'File should exist!'); - strict.ok(fullSize > 1 << 16, 'Should be at least 64k'); - - const size = await outputFile.size(); - - - // try getting the same file again (so, should hit the cache.) - local.session.channels.debug('==== get the same large file again. should hit cache'); - await acquireArtifactFile(local.session, [remoteFile], 'xyz.png', {}); - - local.session.channels.debug('==== was that ok?'); - - // chopped file, big. - // let's chop the file in half - const fullFile = await outputFile.readFile(); - const halfFile = fullFile.slice(0, fullFile.length / 2); - - await outputFile.delete(); - await outputFile.writeFile(halfFile); - - local.session.channels.debug('==== chopped the large file in half, should resume'); - acq = acquireArtifactFile(local.session, [remoteFile], 'xyz.png', {}); - - await acq; - const newsize = await outputFile.size(); - strict.equal(newsize, size, 'the file should be the right size at the end'); - - const newfull = (await outputFile.readFile()); - strict.equal(newfull.compare(fullFile), 0, 'files should be identical'); - }); - - /** - * The NuGet gallery servers don't do redirects on HEAD requests, and to work around it we have to issue a second GET - * for each HEAD, after the HEAD fails, which increases the overhead of getting the target file (or verifying that we have it.) - * - * I've made the test call resolve redirects up front, which did reduce the cost, so... it's about as fast as I can make it. - * (~400msec for the whole test, which ain't terrible.) - * - * The same thing can be accomplished by the all-encompassing nuget() call, but the test suffers if I use that directly, since we're - * calling for the same package multple times. 🤷 - */ - it('Download a nuget file', async () => { - const url = await resolveNugetUrl(local.session, 'zlib-msvc14-x64/1.2.11.7795'); - - local.session.channels.debug('==== Downloading nuget package'); - - const acq = acquireArtifactFile(local.session, [url], 'zlib-msvc.zip', {}); - // or const acq = nuget(local.session, 'zlib-msvc14-x64/1.2.11.7795', 'zlib-msvc.zip'); - - const outputFile = await acq; - local.session.channels.debug('==== done downloading'); - const fullSize = await outputFile.size(); - - strict.ok(await outputFile.exists(), 'File should exist!'); - strict.ok(fullSize > 1 << 16, 'Should be at least 64k'); - - const size = await outputFile.size(); - local.session.channels.debug(`==== Size: ${size}`); - - // what happens if we try again? We should hit our local cache - await acquireArtifactFile(local.session, [url], 'zlib-msvc.zip', {}); - - }); - -}); diff --git a/ce/test/core/http-filesystem-tests.ts b/ce/test/core/http-filesystem-tests.ts deleted file mode 100644 index bbb84d9499..0000000000 --- a/ce/test/core/http-filesystem-tests.ts +++ /dev/null @@ -1,47 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT License. - -import { FileType } from '@microsoft/vcpkg-ce/dist/fs/filesystem'; -import { HttpsFileSystem } from '@microsoft/vcpkg-ce/dist/fs/http-filesystem'; -import { fail, strict } from 'assert'; -import { SuiteLocal } from './SuiteLocal'; - - -describe('HttpFileSystemTests', () => { - const local = new SuiteLocal(); - - after(local.after.bind(local)); - const fs = new HttpsFileSystem(local.session); - - it('stat a file', async () => { - - const uri = fs.parseUri('https://aka.ms/vcpkg-ce.version'); - const s = await fs.stat(uri); - strict.equal(s.type, FileType.File, 'Should be a file'); - strict.ok(s.size < 40, 'should be less than 40 bytes'); - strict.ok(s.size > 20, 'should be more than 20 bytes'); - - }); - - it('stat a non existant file', async () => { - try { - const uri = fs.parseUri('https://file.not.found/blabla'); - const s = await fs.stat(uri); - } catch { - return; - } - fail('Should have thrown'); - }); - - it('read a stream', async () => { - const uri = fs.parseUri('https://aka.ms/vcpkg-ce.version'); - - let text = ''; - - for await (const chunk of await fs.readStream(uri)) { - text += chunk.toString('utf8'); - } - strict.ok(text.length > 5, 'should have some text'); - strict.ok(text.length < 20, 'shouldnt have too much text'); - }); -}); diff --git a/ce/test/core/regression-tests.ts b/ce/test/core/regression-tests.ts index 342f40ccf1..177c6dd7e8 100644 --- a/ce/test/core/regression-tests.ts +++ b/ce/test/core/regression-tests.ts @@ -1,14 +1,8 @@ // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. -import { Session } from '@microsoft/vcpkg-ce/dist/session'; import { SuiteLocal } from './SuiteLocal'; -async function testRegistry(session: Session, sha: string) { - const uri = `https://github.com/microsoft/vcpkg-ce-catalog/archive/${sha}.zip`; - await session.registryDatabase.loadRegistry(session, session.fileSystem.parseUri(uri)); -} - describe('Regressions', () => { const local = new SuiteLocal(); @@ -18,10 +12,12 @@ describe('Regressions', () => { // regression discovered in https://github.com/microsoft/vcpkg-ce-catalog/pull/33 it('Loads 2ffbc04d6856a1d03c5de0ab94404f90636f7855 registry', async () => { - await testRegistry(local.session, '2ffbc04d6856a1d03c5de0ab94404f90636f7855'); + await local.session.registryDatabase.loadRegistry(local.session, + local.resourcesFolderUri.join('vcpkg-ce-catalog-2ffbc04d6856a1d03c5de0ab94404f90636f7855')); }); it('Loads d471612be63b2fb506ab5f47122da460f5aa4d30 registry', async () => { - await testRegistry(local.session, 'd471612be63b2fb506ab5f47122da460f5aa4d30'); + await local.session.registryDatabase.loadRegistry(local.session, + local.resourcesFolderUri.join('vcpkg-ce-catalog-d471612be63b2fb506ab5f47122da460f5aa4d30')); }); }); diff --git a/ce/test/core/sample-tests.ts b/ce/test/core/sample-tests.ts index 87697f97f3..a536222cad 100644 --- a/ce/test/core/sample-tests.ts +++ b/ce/test/core/sample-tests.ts @@ -2,27 +2,8 @@ // Licensed under the MIT License. import { sanitizePath } from '@microsoft/vcpkg-ce/dist/artifacts/artifact'; -import { notStrictEqual, strict } from 'assert'; +import { strict } from 'assert'; import { describe, it } from 'mocha'; -import { pipeline as origPipeline } from 'stream'; -import { promisify } from 'util'; - -const pipeline = promisify(origPipeline); - -// sample test using decorators. -describe('SomeTests', () => { - it('Try This Sample Test', () => { - notStrictEqual(5, 4, 'numbers should not be equal'); - }); -}); - -// sample test that uses describe/it -describe('sample test', () => { - it('does not make mistakes', () => { - notStrictEqual('A', 'B', 'letters should not be equal'); - }); -}); - describe('sanitization of paths', () => { it('makes nice clean paths', () => { @@ -37,7 +18,5 @@ describe('sanitization of paths', () => { strict.equal(sanitizePath('..\\.config'), '.config'); strict.equal(sanitizePath('/bar'), 'bar'); strict.equal(sanitizePath('\\this\\is\\a//test/of//a\\path//..'), 'this/is/a/test/of/a/path'); - }); }); - diff --git a/ce/test/core/stream-tests.ts b/ce/test/core/stream-tests.ts index ac018ad972..ca83b9fc17 100644 --- a/ce/test/core/stream-tests.ts +++ b/ce/test/core/stream-tests.ts @@ -7,8 +7,6 @@ import { SuiteLocal } from './SuiteLocal'; describe('StreamTests', () => { const local = new SuiteLocal(); - const fs = local.fs; - after(local.after.bind(local)); it('event emitter works', async () => { diff --git a/ce/test/resources/vcpkg-ce-catalog-2ffbc04d6856a1d03c5de0ab94404f90636f7855/compilers/arm/gcc/gcc-2020.10.0.json b/ce/test/resources/vcpkg-ce-catalog-2ffbc04d6856a1d03c5de0ab94404f90636f7855/compilers/arm/gcc/gcc-2020.10.0.json new file mode 100644 index 0000000000..6dbbe5839f --- /dev/null +++ b/ce/test/resources/vcpkg-ce-catalog-2ffbc04d6856a1d03c5de0ab94404f90636f7855/compilers/arm/gcc/gcc-2020.10.0.json @@ -0,0 +1,59 @@ +{ + "info": { + "id": "compilers/arm/gcc", + "version": "2020.10.0", + "description": "The GNU Arm Embedded Toolchain is a ready-to-use, open-source suite of tools for C, C++ and assembly programming. The GNU Arm Embedded Toolchain targets the 32-bit Arm Cortex-A, Arm Cortex-M, and Arm Cortex-R processor families. The GNU Arm Embedded Toolchain includes the GNU Compiler (GCC) and is available free of charge directly from Arm for embedded software development on Windows, Linux, and Mac OS X operating systems.", + "summary": "GCC compiler for ARM CPUs." + }, + "contacts": { + "Garrett Serack": { + "email": "garretts@microsoft.com", + "role": "publisher" + } + }, + "demands": { + "windows": { + "install": { + "unzip": "https://developer.arm.com/-/media/Files/downloads/gnu-rm/10-2020q4/gcc-arm-none-eabi-10-2020-q4-major-win32.zip", + "sha256": "90057B8737B888C53CA5AEE332F1F73C401D6D3873124D2C2906DF4347EBEF9E", + "strip": 1 + }, + "exports": { + "paths": { + "PATH": "bin" + } + } + }, + "linux and arm64": { + "install": { + "untar": "https://developer.arm.com/-/media/Files/downloads/gnu-rm/10-2020q4/gcc-arm-none-eabi-10-2020-q4-major-aarch64-linux.tar.bz2", + "sha256": "343D8C812934FE5A904C73583A91EDD812B1AC20636EB52DE04135BB0F5CF36A", + "strip": 1 + } + }, + "linux and x64": { + "install": { + "untar": "https://developer.arm.com/-/media/Files/downloads/gnu-rm/10-2020q4/gcc-arm-none-eabi-10-2020-q4-major-x86_64-linux.tar.bz2", + "sha256": "21134CAA478BBF5352E239FBC6E2DA3038F8D2207E089EFC96C3B55F1EDCD618", + "strip": 1 + } + }, + "osx and x64": { + "install": { + "untar": "https://developer.arm.com/-/media/Files/downloads/gnu-rm/10-2020q4/gcc-arm-none-eabi-10-2020-q4-major-mac.tar.bz2", + "sha256": "BED12DE3565D4EB02E7B58BE945376EACA79A8AE3EBB785EC7344E7E2DB0BDC0", + "strip": 1 + } + }, + "not windows": { + "exports": { + "paths": { + "PATH": "bin" + } + } + }, + "linux": { + "warning": "Ensure libncurses5 is installed with your system package manager before running arm-none-eabi-gdb." + } + } +} \ No newline at end of file diff --git a/ce/test/resources/vcpkg-ce-catalog-2ffbc04d6856a1d03c5de0ab94404f90636f7855/index.yaml b/ce/test/resources/vcpkg-ce-catalog-2ffbc04d6856a1d03c5de0ab94404f90636f7855/index.yaml new file mode 100644 index 0000000000..be59ad5689 --- /dev/null +++ b/ce/test/resources/vcpkg-ce-catalog-2ffbc04d6856a1d03c5de0ab94404f90636f7855/index.yaml @@ -0,0 +1,130 @@ +# MANIFEST-INDEX +items: + [ + compilers/arm/gcc/gcc-2020.10.0.json, tools/arduino/arduino-ide-1.18.15.json, tools/arduino/arduino-cli-0.18.3.json, tools/compuphase/termite-3.4.0.json, tools/kitware/cmake-3.20.1.json, tools/microsoft/openocd-0.11.0-ms1.json, tools/microsoft/openocd-0.11.0.json, tools/ninja-build/ninja-1.10.2.json, tools/raspberrypi/pico-sdk-1.3.0.json + ] +indexes: + IdentityKey/info.id: + keys: + compilers/arm/gcc: [ 0 ] + raspberrypi/pico-sdk: [ 8 ] + tools/arduino/arduino-cli: [ 2 ] + tools/arduino/arduino-ide: [ 1 ] + tools/compuphase/termite: [ 3 ] + tools/kitware/cmake: [ 4 ] + tools/microsoft/openocd: [ 5, 6 ] + tools/ninja-build/ninja: [ 7 ] + words: + arduino: [ 1, 2 ] + arduino-cli: [ 2 ] + arduino-ide: [ 1 ] + arduino/arduino: [ 1, 2 ] + arduino/arduino-cli: [ 2 ] + arduino/arduino-ide: [ 1 ] + arm: [ 0 ] + arm/gcc: [ 0 ] + build: [ 7 ] + build/ninja: [ 7 ] + cli: [ 2 ] + cmake: [ 4 ] + compilers: [ 0 ] + compilers/arm: [ 0 ] + compilers/arm/gcc: [ 0 ] + compuphase: [ 3 ] + compuphase/termite: [ 3 ] + gcc: [ 0 ] + ide: [ 1 ] + kitware: [ 4 ] + kitware/cmake: [ 4 ] + microsoft: [ 5, 6 ] + microsoft/openocd: [ 5, 6 ] + ninja: [ 7 ] + ninja-build: [ 7 ] + ninja-build/ninja: [ 7 ] + openocd: [ 5, 6 ] + pico: [ 8 ] + pico-sdk: [ 8 ] + raspberrypi: [ 8 ] + raspberrypi/pico: [ 8 ] + raspberrypi/pico-sdk: [ 8 ] + sdk: [ 8 ] + termite: [ 3 ] + tools: [ 1, 2, 3, 4, 5, 6, 7 ] + tools/arduino: [ 1, 2 ] + tools/arduino/arduino: [ 1, 2 ] + tools/arduino/arduino-cli: [ 2 ] + tools/arduino/arduino-ide: [ 1 ] + tools/compuphase: [ 3 ] + tools/compuphase/termite: [ 3 ] + tools/kitware: [ 4 ] + tools/kitware/cmake: [ 4 ] + tools/microsoft: [ 5, 6 ] + tools/microsoft/openocd: [ 5, 6 ] + tools/ninja: [ 7 ] + tools/ninja-build: [ 7 ] + tools/ninja-build/ninja: [ 7 ] + SemverKey/info.version: + keys: + 0.11.0-ms1: [ 5 ] + 0.11.0: [ 6 ] + 0.18.3: [ 2 ] + 1.3.0: [ 8 ] + 1.10.2: [ 7 ] + 1.18.15: [ 1 ] + 3.4.0: [ 3 ] + 3.20.1: [ 4 ] + 2020.10.0: [ 0 ] + StringKey/info.summary: + keys: + Arduino IDE: [ 1, 2 ] + Free and open on-chip debugging: [ 5, 6 ] + GCC compiler for ARM CPUs.: [ 0 ] + Kitware's cmake tool: [ 4 ] + Ninja is a small build system with a focus on speed.: [ 7 ] + Termite is an easy to use and easy to configure RS232 terminal.: [ 3 ] + The Raspberry Pi Pico SDK: [ 8 ] + words: + ARM: [ 0 ] + Arduino: [ 1, 2 ] + CPUs: [ 0 ] + CPUs.: [ 0 ] + Free: [ 5, 6 ] + GCC: [ 0 ] + IDE: [ 1, 2 ] + Kitware: [ 4 ] + Kitware's: [ 4 ] + Ninja: [ 7 ] + Pi: [ 8 ] + Pico: [ 8 ] + RS232: [ 3 ] + Raspberry: [ 8 ] + SDK: [ 8 ] + Termite: [ 3 ] + The: [ 8 ] + a: [ 7 ] + an: [ 3 ] + and: [ 3, 5, 6 ] + build: [ 7 ] + chip: [ 5, 6 ] + cmake: [ 4 ] + compiler: [ 0 ] + configure: [ 3 ] + debugging: [ 5, 6 ] + easy: [ 3 ] + focus: [ 7 ] + for: [ 0 ] + is: [ 3, 7 ] + on: [ 5, 6, 7 ] + on-chip: [ 5, 6 ] + open: [ 5, 6 ] + s: [ 4 ] + small: [ 7 ] + speed: [ 7 ] + speed.: [ 7 ] + system: [ 7 ] + terminal: [ 3 ] + terminal.: [ 3 ] + to: [ 3 ] + tool: [ 4 ] + use: [ 3 ] + with: [ 7 ] diff --git a/ce/test/resources/vcpkg-ce-catalog-2ffbc04d6856a1d03c5de0ab94404f90636f7855/tools/arduino/arduino-cli-0.18.3.json b/ce/test/resources/vcpkg-ce-catalog-2ffbc04d6856a1d03c5de0ab94404f90636f7855/tools/arduino/arduino-cli-0.18.3.json new file mode 100644 index 0000000000..95d46bf029 --- /dev/null +++ b/ce/test/resources/vcpkg-ce-catalog-2ffbc04d6856a1d03c5de0ab94404f90636f7855/tools/arduino/arduino-cli-0.18.3.json @@ -0,0 +1,72 @@ +{ + "info": { + "id": "tools/arduino/arduino-cli", + "version": "0.18.3", + "description": "The open-source Arduino Software (IDE) makes it easy to write code and upload it to the board. This software can be used with any Arduino board.", + "summary": "Arduino IDE" + }, + "contacts": { + "Marc Goodner": { + "email": "mgoodner@microsoft.com", + "role": "publisher" + }, + "Arduino": { + "role": "originator" + } + }, + "demands": { + "windows and x64": { + "install": { + "unzip": "https://github.com/arduino/arduino-cli/releases/download/0.18.3/arduino-cli_0.18.3_Windows_64bit.zip", + "sha256": "b92ae2923edab07e7d39ac8fdc54500bf5198868522d7acfa5090de970cf9603" + } + }, + "windows and x86": { + "install": { + "unzip": "https://github.com/arduino/arduino-cli/releases/download/0.18.3/arduino-cli_0.18.3_Windows_32bit.zip", + "sha256": "b8fa3f2c614557decf6ebe6688bc635b9260a220305be139d2368c437d4c3cfa" + } + }, + "windows": { + "exports": { + "tools": { + "arduino-cli": "arduino-cli.exe" + }, + "paths": { + "PATH": "." + } + } + }, + "linux and x86": { + "install": { + "untar": "https://github.com/arduino/arduino-cli/releases/download/0.18.3/arduino-cli_0.18.3_Linux_32bit.tar.gz", + "sha256": "fe238a22579905866ed9e6582a0d6078060f29a9de3dbbb47b3931ef9a5f1f08", + "strip": 1 + } + }, + "linux and x64": { + "install": { + "untar": "https://github.com/arduino/arduino-cli/releases/download/0.18.3/arduino-cli_0.18.3_Linux_64bit.tar.gz", + "sha256": "80fb4547fb869086769dade348040864ae77b30d13cf6786d384bebccf4eb7eb", + "strip": 1 + } + }, + "linux and arm64": { + "install": { + "untar": "https://github.com/arduino/arduino-cli/releases/download/0.18.3/arduino-cli_0.18.3_Linux_ARM64.tar.gz", + "sha256": "13eb5ab0edb9a8f20768e7e0e5b967140f0fac7f84ef4f78c0dae0c8f13cdb73", + "strip": 1 + } + }, + "not windows": { + "exports": { + "tools": { + "arduino-cli": "arduino-cli" + }, + "paths": { + "PATH": "." + } + } + } + } +} \ No newline at end of file diff --git a/ce/test/resources/vcpkg-ce-catalog-2ffbc04d6856a1d03c5de0ab94404f90636f7855/tools/arduino/arduino-ide-1.18.15.json b/ce/test/resources/vcpkg-ce-catalog-2ffbc04d6856a1d03c5de0ab94404f90636f7855/tools/arduino/arduino-ide-1.18.15.json new file mode 100644 index 0000000000..d5bd75a898 --- /dev/null +++ b/ce/test/resources/vcpkg-ce-catalog-2ffbc04d6856a1d03c5de0ab94404f90636f7855/tools/arduino/arduino-ide-1.18.15.json @@ -0,0 +1,65 @@ +{ + "info": { + "id": "tools/arduino/arduino-ide", + "version": "1.18.15", + "description": "The open-source Arduino Software (IDE) makes it easy to write code and upload it to the board. This software can be used with any Arduino board.", + "summary": "Arduino IDE" + }, + "contacts": { + "Marc Goodner": { + "email": "mgoodner@microsoft.com", + "role": "publisher" + }, + "Arduino": { + "role": "originator" + } + }, + "demands": { + "windows": { + "install": { + "unzip": "https://downloads.arduino.cc/arduino-1.8.15-windows.zip", + "sha256": "C53E7D291EDEBCDB58FFA34AEB53C5B777D814CEA8030297F06116ED0598D139", + "strip": 1 + }, + "exports": { + "tools": { + "arduino": "arduino.exe" + }, + "paths": { + "PATH": "." + } + } + }, + "linux and x86": { + "install": { + "untar": "https://downloads.arduino.cc/arduino-1.8.15-linux32.tar.xz", + "sha512": "02a10831c7125144ac6f701528f9d176a1a7ac0df6d9391d31d6758ae8f3dea3f8b8390320c7e7d3efb9ed45fb79527caa798ad354bc8a857c2f9c42f4612a8f", + "strip": 2 + } + }, + "linux and x64": { + "install": { + "untar": "https://downloads.arduino.cc/arduino-1.8.15-linux64.tar.xz", + "sha512": "ae84a8f62cbd3ecf5400a357ac5ebd04cbc80b31a2fbc80f280850465f7460ad3a02b32830021ef980b72c60d52eb65a4fc551988c91d01b25e8d646596175f8", + "strip": 2 + } + }, + "linux and arm64": { + "install": { + "untar": "https://downloads.arduino.cc/arduino-1.8.15-linuxaarch64.tar.xz", + "sha512": "22b4e5f3a79723bb09d85107facfe7d367d8a1aa347447e935481823192bd2390bfe0e5e694a5e2ee6addb59ec269a72eb829f8f791fb8641000179884bcfff2", + "strip": 2 + } + }, + "not windows": { + "exports": { + "tools": { + "arduino": "arduino" + }, + "paths": { + "PATH": "." + } + } + } + } +} diff --git a/ce/test/resources/vcpkg-ce-catalog-2ffbc04d6856a1d03c5de0ab94404f90636f7855/tools/compuphase/termite-3.4.0.json b/ce/test/resources/vcpkg-ce-catalog-2ffbc04d6856a1d03c5de0ab94404f90636f7855/tools/compuphase/termite-3.4.0.json new file mode 100644 index 0000000000..691231b80c --- /dev/null +++ b/ce/test/resources/vcpkg-ce-catalog-2ffbc04d6856a1d03c5de0ab94404f90636f7855/tools/compuphase/termite-3.4.0.json @@ -0,0 +1,34 @@ +{ + "info": { + "id": "tools/compuphase/termite", + "version": "3.4.0", + "description": "Termite is an easy to use and easy to configure RS232 terminal. It uses an interface similar to that of \"messenger\" or \"chat\" programs, with a large window that contains all received data and an edit line for typing in strings to transmit.", + "summary": "Termite is an easy to use and easy to configure RS232 terminal." + }, + "contacts": { + "Alan Leung": { + "email": "alleu@microsoft.com", + "role": "publisher" + }, + "CompuPhase": { + "email": "info@compuphase.com", + "role": "originator" + } + }, + "demands": { + "windows": { + "install": { + "unzip": "https://www.compuphase.com/software/termite-3.4.zip", + "sha256": "e72eddaabe1375dc9422d20b359206d242bf0745f47f60ce37d21e9dd905ba51" + }, + "exports": { + "tools": { + "termite": "bin/Termite.exe" + }, + "paths": { + "PATH": "bin" + } + } + } + } +} \ No newline at end of file diff --git a/ce/test/resources/vcpkg-ce-catalog-2ffbc04d6856a1d03c5de0ab94404f90636f7855/tools/kitware/cmake-3.20.1.json b/ce/test/resources/vcpkg-ce-catalog-2ffbc04d6856a1d03c5de0ab94404f90636f7855/tools/kitware/cmake-3.20.1.json new file mode 100644 index 0000000000..7387c6fa31 --- /dev/null +++ b/ce/test/resources/vcpkg-ce-catalog-2ffbc04d6856a1d03c5de0ab94404f90636f7855/tools/kitware/cmake-3.20.1.json @@ -0,0 +1,79 @@ +{ + "info": { + "id": "tools/kitware/cmake", + "version": "3.20.1", + "description": "CMake is an open-source, cross-platform family of tools designed to build, test and package software. CMake is used to control the software compilation process using simple platform and compiler independent configuration files, and generate native makefiles and workspaces that can be used in the compiler environment of your choice. The suite of CMake tools were created by Kitware in response to the need for a powerful, cross-platform build environment for open-source projects such as ITK and VTK.", + "summary": "Kitware's cmake tool" + }, + "contacts": { + "Garrett Serack": { + "email": "garretts@microsoft.com", + "role": "publisher" + }, + "Kitware": { + "email": "kitware@kitware.com", + "role": "originator" + } + }, + "demands": { + "windows and x64": { + "install": { + "unzip": "https://github.com/Kitware/CMake/releases/download/v3.20.0/cmake-3.20.0-windows-x86_64.zip", + "sha256": "056378cb599353479c3a8aa2654454b8a3eaa3c8c0872928ba7e09c3ec50774c", + "strip": 1 + } + }, + "windows and x86": { + "install": { + "unzip": "https://github.com/Kitware/CMake/releases/download/v3.20.1/cmake-3.20.1-windows-i386.zip", + "sha256": "650026534e66dabe0ed6be3422e86fabce5fa86d43927171ea8b8dfd0877fc9d", + "strip": 1 + } + }, + "windows": { + "exports": { + "tools": { + "cmake": "bin/cmake.exe", + "cmake_gui": "bin/cmake-gui.exe", + "ctest": "bin/ctest.exe" + }, + "paths": { + "PATH": "bin" + } + } + }, + "osx": { + "install": { + "untar": "https://github.com/Kitware/CMake/releases/download/v3.20.1/cmake-3.20.1-macos-universal.tar.gz", + "sha256": "44143d47fdcc7fc3042576c6a8b661e3b65a18143666f74d6e8d93ca3ab5cd95", + "strip": 3 + } + }, + "linux and x64": { + "install": { + "untar": "https://github.com/Kitware/CMake/releases/download/v3.20.1/cmake-3.20.1-linux-x86_64.tar.gz", + "sha256": "B8C141BD7A6D335600AB0A8A35E75AF79F95B837F736456B5532F4D717F20A09", + "strip": 1 + } + }, + "linux and arm64": { + "install": { + "untar": "https://github.com/Kitware/CMake/releases/download/v3.20.1/cmake-3.20.1-linux-aarch64.tar.gz", + "sha256": "2761a222c14a15b9bdf1bdb4a17c10806757b7ed3bc26a84523f042ec212b76c", + "strip": 1 + } + }, + "not windows": { + "exports": { + "tools": { + "cmake": "bin/cmake", + "cmake_gui": "bin/cmake-gui", + "ctest": "bin/ctest" + }, + "paths": { + "PATH": "bin" + } + } + } + } +} \ No newline at end of file diff --git a/ce/test/resources/vcpkg-ce-catalog-2ffbc04d6856a1d03c5de0ab94404f90636f7855/tools/microsoft/openocd-0.11.0-ms1.json b/ce/test/resources/vcpkg-ce-catalog-2ffbc04d6856a1d03c5de0ab94404f90636f7855/tools/microsoft/openocd-0.11.0-ms1.json new file mode 100644 index 0000000000..67e28f1956 --- /dev/null +++ b/ce/test/resources/vcpkg-ce-catalog-2ffbc04d6856a1d03c5de0ab94404f90636f7855/tools/microsoft/openocd-0.11.0-ms1.json @@ -0,0 +1,52 @@ +{ + "info": { + "id": "tools/microsoft/openocd", + "version": "0.11.0-ms1", + "description": "OpenOCD provides on-chip programming and debugging support with a layered architecture of JTAG interface and TAP support including: (X)SVF playback to facilitate automated boundary scan and FPGA/CPLD programming; debug target support (e.g. ARM, MIPS): single-stepping, breakpoints/watchpoints, gprof profiling, etc; flash chip drivers (e.g. CFI, NAND, internal flash); embedded TCL interpreter for easy scripting. Several network interfaces are available for interacting with OpenOCD: telnet, TCL, and GDB. The GDB server enables OpenOCD to function as a \"remote target\" for source-level debugging of embedded systems using the GNU GDB program (and the others who talk GDB protocol, e.g. IDA Pro). This build of OpenOCD includes additional vendor extensions from Azure Sphere, Raspberry Pi, and STMicroelectronics, plus improved RTOS support.", + "summary": "Free and open on-chip debugging" + }, + "contacts": { + "Ben McMorran": { + "email": "bemcmorr@microsoft.com", + "role": [ + "publisher", + "originator" + ] + }, + "OpenOCD (upstream)": { + "email": "openocd-user@lists.sourceforge.net", + "role": "other" + } + }, + "demands": { + "windows and x64": { + "install": { + "untar": "https://github.com/microsoft/openocd/releases/download/ms-v0.11.0-ms1/openocd-ms-v0.11.0-ms1-i686-w64-mingw32.tar.gz", + "sha256": "dabe82ecc1aa1b1aa6d28216ee74d5702b9147fc74796990e14a7fa5644744a1" + }, + "exports": { + "tools": { + "openocd": "bin/openocd.exe" + }, + "paths": { + "PATH": "bin" + } + } + }, + "linux and x64": { + "install": { + "untar": "https://github.com/microsoft/openocd/releases/download/ms-v0.11.0-ms1/openocd-ms-v0.11.0-ms1-linux.tar.gz", + "sha256": "e70a1405f5ffeb87d9487b49fe40171fe896fbd7d01a51b12cffdfb6d2b0501b", + "strip": 1 + }, + "exports": { + "tools": { + "openocd": "bin/openocd" + }, + "paths": { + "PATH": "bin" + } + } + } + } +} \ No newline at end of file diff --git a/ce/test/resources/vcpkg-ce-catalog-2ffbc04d6856a1d03c5de0ab94404f90636f7855/tools/microsoft/openocd-0.11.0.json b/ce/test/resources/vcpkg-ce-catalog-2ffbc04d6856a1d03c5de0ab94404f90636f7855/tools/microsoft/openocd-0.11.0.json new file mode 100644 index 0000000000..6a3defb0f9 --- /dev/null +++ b/ce/test/resources/vcpkg-ce-catalog-2ffbc04d6856a1d03c5de0ab94404f90636f7855/tools/microsoft/openocd-0.11.0.json @@ -0,0 +1,52 @@ +{ + "info": { + "id": "tools/microsoft/openocd", + "version": "0.11.0", + "description": "OpenOCD provides on-chip programming and debugging support with a layered architecture of JTAG interface and TAP support including: (X)SVF playback to facilitate automated boundary scan and FPGA/CPLD programming; debug target support (e.g. ARM, MIPS): single-stepping, breakpoints/watchpoints, gprof profiling, etc; flash chip drivers (e.g. CFI, NAND, internal flash); embedded TCL interpreter for easy scripting. Several network interfaces are available for interacting with OpenOCD: telnet, TCL, and GDB. The GDB server enables OpenOCD to function as a \"remote target\" for source-level debugging of embedded systems using the GNU GDB program (and the others who talk GDB protocol, e.g. IDA Pro). This build of OpenOCD includes additional vendor extensions from Azure Sphere, Raspberry Pi, and STMicroelectronics.", + "summary": "Free and open on-chip debugging" + }, + "contacts": { + "Ben McMorran": { + "email": "bemcmorr@microsoft.com", + "role": [ + "publisher", + "originator" + ] + }, + "OpenOCD (upstream)": { + "email": "openocd-user@lists.sourceforge.net", + "role": "other" + } + }, + "demands": { + "windows and x64": { + "install": { + "untar": "https://github.com/microsoft/openocd/releases/download/ms-v0.11.0/openocd-ms-v0.11.0-i686-w64-mingw32.tar.gz", + "sha256": "f15c1b604f5f138a2731511143dcbbd565fa4dfed8f392abc599acea65177523" + }, + "exports": { + "tools": { + "openocd": "bin/openocd.exe" + }, + "paths": { + "PATH": "./bin" + } + } + }, + "linux and x64": { + "install": { + "untar": "https://github.com/microsoft/openocd/releases/download/ms-v0.11.0/openocd-ms-v0.11.0-linux.tar.gz", + "sha256": "bfa359756d0cad2d3a2fa72a8416d369960732dd25f262397b66048db7a9c570", + "strip": 1 + }, + "exports": { + "tools": { + "openocd": "./bin/openocd" + }, + "paths": { + "PATH": "bin" + } + } + } + } +} \ No newline at end of file diff --git a/ce/test/resources/vcpkg-ce-catalog-2ffbc04d6856a1d03c5de0ab94404f90636f7855/tools/ninja-build/ninja-1.10.2.json b/ce/test/resources/vcpkg-ce-catalog-2ffbc04d6856a1d03c5de0ab94404f90636f7855/tools/ninja-build/ninja-1.10.2.json new file mode 100644 index 0000000000..3ec55b5d5b --- /dev/null +++ b/ce/test/resources/vcpkg-ce-catalog-2ffbc04d6856a1d03c5de0ab94404f90636f7855/tools/ninja-build/ninja-1.10.2.json @@ -0,0 +1,56 @@ +{ + "info": { + "id": "tools/ninja-build/ninja", + "version": "1.10.2", + "description": "Ninja is a small build system with a focus on speed. It differs from other build systems in two major respects, it is designed to have its input files generated by a higher-level build system, and it is designed to run builds as fast as possible.", + "summary": "Ninja is a small build system with a focus on speed." + }, + "contacts": { + "Marc Goodner": { + "email": "mgoodner@microsoft.com", + "role": "publisher" + }, + "ninja-build": { + "email": "ninja-build@googlegroups.com", + "role": "originator" + } + }, + "demands": { + "windows": { + "install": { + "unzip": "https://github.com/ninja-build/ninja/releases/download/v1.10.2/ninja-win.zip", + "sha256": "bbde850d247d2737c5764c927d1071cbb1f1957dcabda4a130fa8547c12c695f" + }, + "exports": { + "tools": { + "ninja": "ninja.exe" + }, + "paths": { + "PATH": "." + } + } + }, + "osx": { + "install": { + "unzip": "https://github.com/ninja-build/ninja/releases/download/v1.10.2/ninja-mac.zip", + "sha256": "6fa359f491fac7e5185273c6421a000eea6a2f0febf0ac03ac900bd4d80ed2a5" + } + }, + "linux": { + "install": { + "unzip": "https://github.com/ninja-build/ninja/releases/download/v1.10.2/ninja-linux.zip", + "sha256": "763464859c7ef2ea3a0a10f4df40d2025d3bb9438fcb1228404640410c0ec22d" + } + }, + "not windows": { + "exports": { + "tools": { + "ninja": "ninja" + }, + "paths": { + "PATH": "." + } + } + } + } +} \ No newline at end of file diff --git a/ce/test/resources/vcpkg-ce-catalog-2ffbc04d6856a1d03c5de0ab94404f90636f7855/tools/raspberrypi/pico-sdk-1.3.0.json b/ce/test/resources/vcpkg-ce-catalog-2ffbc04d6856a1d03c5de0ab94404f90636f7855/tools/raspberrypi/pico-sdk-1.3.0.json new file mode 100644 index 0000000000..48b485c6db --- /dev/null +++ b/ce/test/resources/vcpkg-ce-catalog-2ffbc04d6856a1d03c5de0ab94404f90636f7855/tools/raspberrypi/pico-sdk-1.3.0.json @@ -0,0 +1,31 @@ +{ + "info": { + "id": "raspberrypi/pico-sdk", + "version": "1.3.0", + "description": "The Raspberry Pi Pico SDK provides the headers, libraries and build system necessary to write programs for the RP2040-based devices such as the Raspberry Pi Pico in C, C++ or assembly language.", + "summary": "The Raspberry Pi Pico SDK" + }, + "contacts": { + "Marc Goodner": { + "email": "mgoodner@microsoft.com", + "role": "publisher" + } + }, + "requires": { + "compilers/arm/gcc": "2020.10.0", + "tools/kitware/cmake": "3.20.1" + }, + "install": { + "git": "https://github.com/raspberrypi/pico-sdk/", + "commit": "1.3.0", + "options": [ + "recurse", + "full" + ] + }, + "exports": { + "paths": { + "PICO_SDK_PATH": "./" + } + } +} \ No newline at end of file diff --git a/ce/test/resources/vcpkg-ce-catalog-d471612be63b2fb506ab5f47122da460f5aa4d30/compilers/arm/gcc/gcc-2020.10.0.json b/ce/test/resources/vcpkg-ce-catalog-d471612be63b2fb506ab5f47122da460f5aa4d30/compilers/arm/gcc/gcc-2020.10.0.json new file mode 100644 index 0000000000..ad16e3ccfa --- /dev/null +++ b/ce/test/resources/vcpkg-ce-catalog-d471612be63b2fb506ab5f47122da460f5aa4d30/compilers/arm/gcc/gcc-2020.10.0.json @@ -0,0 +1,57 @@ +{ + "id": "compilers/arm/gcc", + "version": "2020.10.0", + "description": "The GNU Arm Embedded Toolchain is a ready-to-use, open-source suite of tools for C, C++ and assembly programming. The GNU Arm Embedded Toolchain targets the 32-bit Arm Cortex-A, Arm Cortex-M, and Arm Cortex-R processor families. The GNU Arm Embedded Toolchain includes the GNU Compiler (GCC) and is available free of charge directly from Arm for embedded software development on Windows, Linux, and Mac OS X operating systems.", + "summary": "GCC compiler for ARM CPUs.", + "contacts": { + "Garrett Serack": { + "email": "garretts@microsoft.com", + "role": "publisher" + } + }, + "demands": { + "windows": { + "install": { + "unzip": "https://developer.arm.com/-/media/Files/downloads/gnu-rm/10-2020q4/gcc-arm-none-eabi-10-2020-q4-major-win32.zip", + "sha256": "90057B8737B888C53CA5AEE332F1F73C401D6D3873124D2C2906DF4347EBEF9E", + "strip": 1 + }, + "exports": { + "paths": { + "PATH": "bin" + } + } + }, + "linux and arm64": { + "install": { + "untar": "https://developer.arm.com/-/media/Files/downloads/gnu-rm/10-2020q4/gcc-arm-none-eabi-10-2020-q4-major-aarch64-linux.tar.bz2", + "sha256": "343D8C812934FE5A904C73583A91EDD812B1AC20636EB52DE04135BB0F5CF36A", + "strip": 1 + } + }, + "linux and x64": { + "install": { + "untar": "https://developer.arm.com/-/media/Files/downloads/gnu-rm/10-2020q4/gcc-arm-none-eabi-10-2020-q4-major-x86_64-linux.tar.bz2", + "sha256": "21134CAA478BBF5352E239FBC6E2DA3038F8D2207E089EFC96C3B55F1EDCD618", + "strip": 1 + } + }, + "osx and x64": { + "install": { + "untar": "https://developer.arm.com/-/media/Files/downloads/gnu-rm/10-2020q4/gcc-arm-none-eabi-10-2020-q4-major-mac.tar.bz2", + "sha256": "BED12DE3565D4EB02E7B58BE945376EACA79A8AE3EBB785EC7344E7E2DB0BDC0", + "strip": 1 + } + }, + "not windows": { + "exports": { + "paths": { + "PATH": "bin" + } + } + }, + "linux": { + "warning": "Ensure libncurses5 is installed with your system package manager before running arm-none-eabi-gdb." + } + } +} \ No newline at end of file diff --git a/ce/test/resources/vcpkg-ce-catalog-d471612be63b2fb506ab5f47122da460f5aa4d30/index.yaml b/ce/test/resources/vcpkg-ce-catalog-d471612be63b2fb506ab5f47122da460f5aa4d30/index.yaml new file mode 100644 index 0000000000..96a1188eb8 --- /dev/null +++ b/ce/test/resources/vcpkg-ce-catalog-d471612be63b2fb506ab5f47122da460f5aa4d30/index.yaml @@ -0,0 +1,130 @@ +# MANIFEST-INDEX +items: + [ + compilers/arm/gcc/gcc-2020.10.0.json, tools/arduino/arduino-cli-0.18.3.json, tools/arduino/arduino-ide-1.18.15.json, tools/compuphase/termite-3.4.0.json, tools/kitware/cmake-3.20.1.json, tools/microsoft/openocd-0.11.0-ms1.json, tools/microsoft/openocd-0.11.0.json, tools/ninja-build/ninja-1.10.2.json, tools/raspberrypi/pico-sdk-1.3.0.json + ] +indexes: + IdentityKey/id: + keys: + compilers/arm/gcc: [ 0 ] + raspberrypi/pico-sdk: [ 8 ] + tools/arduino/arduino-cli: [ 1 ] + tools/arduino/arduino-ide: [ 2 ] + tools/compuphase/termite: [ 3 ] + tools/kitware/cmake: [ 4 ] + tools/microsoft/openocd: [ 5, 6 ] + tools/ninja-build/ninja: [ 7 ] + words: + arduino: [ 1, 2 ] + arduino-cli: [ 1 ] + arduino-ide: [ 2 ] + arduino/arduino: [ 1, 2 ] + arduino/arduino-cli: [ 1 ] + arduino/arduino-ide: [ 2 ] + arm: [ 0 ] + arm/gcc: [ 0 ] + build: [ 7 ] + build/ninja: [ 7 ] + cli: [ 1 ] + cmake: [ 4 ] + compilers: [ 0 ] + compilers/arm: [ 0 ] + compilers/arm/gcc: [ 0 ] + compuphase: [ 3 ] + compuphase/termite: [ 3 ] + gcc: [ 0 ] + ide: [ 2 ] + kitware: [ 4 ] + kitware/cmake: [ 4 ] + microsoft: [ 5, 6 ] + microsoft/openocd: [ 5, 6 ] + ninja: [ 7 ] + ninja-build: [ 7 ] + ninja-build/ninja: [ 7 ] + openocd: [ 5, 6 ] + pico: [ 8 ] + pico-sdk: [ 8 ] + raspberrypi: [ 8 ] + raspberrypi/pico: [ 8 ] + raspberrypi/pico-sdk: [ 8 ] + sdk: [ 8 ] + termite: [ 3 ] + tools: [ 1, 2, 3, 4, 5, 6, 7 ] + tools/arduino: [ 1, 2 ] + tools/arduino/arduino: [ 1, 2 ] + tools/arduino/arduino-cli: [ 1 ] + tools/arduino/arduino-ide: [ 2 ] + tools/compuphase: [ 3 ] + tools/compuphase/termite: [ 3 ] + tools/kitware: [ 4 ] + tools/kitware/cmake: [ 4 ] + tools/microsoft: [ 5, 6 ] + tools/microsoft/openocd: [ 5, 6 ] + tools/ninja: [ 7 ] + tools/ninja-build: [ 7 ] + tools/ninja-build/ninja: [ 7 ] + SemverKey/version: + keys: + 0.11.0-ms1: [ 5 ] + 0.11.0: [ 6 ] + 0.18.3: [ 1 ] + 1.3.0: [ 8 ] + 1.10.2: [ 7 ] + 1.18.15: [ 2 ] + 3.4.0: [ 3 ] + 3.20.1: [ 4 ] + 2020.10.0: [ 0 ] + StringKey/summary: + keys: + Arduino IDE: [ 1, 2 ] + Free and open on-chip debugging: [ 5, 6 ] + GCC compiler for ARM CPUs.: [ 0 ] + Kitware's cmake tool: [ 4 ] + Ninja is a small build system with a focus on speed.: [ 7 ] + Termite is an easy to use and easy to configure RS232 terminal.: [ 3 ] + The Raspberry Pi Pico SDK: [ 8 ] + words: + ARM: [ 0 ] + Arduino: [ 1, 2 ] + CPUs: [ 0 ] + CPUs.: [ 0 ] + Free: [ 5, 6 ] + GCC: [ 0 ] + IDE: [ 1, 2 ] + Kitware: [ 4 ] + Kitware's: [ 4 ] + Ninja: [ 7 ] + Pi: [ 8 ] + Pico: [ 8 ] + RS232: [ 3 ] + Raspberry: [ 8 ] + SDK: [ 8 ] + Termite: [ 3 ] + The: [ 8 ] + a: [ 7 ] + an: [ 3 ] + and: [ 3, 5, 6 ] + build: [ 7 ] + chip: [ 5, 6 ] + cmake: [ 4 ] + compiler: [ 0 ] + configure: [ 3 ] + debugging: [ 5, 6 ] + easy: [ 3 ] + focus: [ 7 ] + for: [ 0 ] + is: [ 3, 7 ] + on: [ 5, 6, 7 ] + on-chip: [ 5, 6 ] + open: [ 5, 6 ] + s: [ 4 ] + small: [ 7 ] + speed: [ 7 ] + speed.: [ 7 ] + system: [ 7 ] + terminal: [ 3 ] + terminal.: [ 3 ] + to: [ 3 ] + tool: [ 4 ] + use: [ 3 ] + with: [ 7 ] diff --git a/ce/test/resources/vcpkg-ce-catalog-d471612be63b2fb506ab5f47122da460f5aa4d30/tools/arduino/arduino-cli-0.18.3.json b/ce/test/resources/vcpkg-ce-catalog-d471612be63b2fb506ab5f47122da460f5aa4d30/tools/arduino/arduino-cli-0.18.3.json new file mode 100644 index 0000000000..af21d15b7e --- /dev/null +++ b/ce/test/resources/vcpkg-ce-catalog-d471612be63b2fb506ab5f47122da460f5aa4d30/tools/arduino/arduino-cli-0.18.3.json @@ -0,0 +1,70 @@ +{ + "id": "tools/arduino/arduino-cli", + "version": "0.18.3", + "description": "The open-source Arduino Software (IDE) makes it easy to write code and upload it to the board. This software can be used with any Arduino board.", + "summary": "Arduino IDE", + "contacts": { + "Marc Goodner": { + "email": "mgoodner@microsoft.com", + "role": "publisher" + }, + "Arduino": { + "role": "originator" + } + }, + "demands": { + "windows and x64": { + "install": { + "unzip": "https://github.com/arduino/arduino-cli/releases/download/0.18.3/arduino-cli_0.18.3_Windows_64bit.zip", + "sha256": "b92ae2923edab07e7d39ac8fdc54500bf5198868522d7acfa5090de970cf9603" + } + }, + "windows and x86": { + "install": { + "unzip": "https://github.com/arduino/arduino-cli/releases/download/0.18.3/arduino-cli_0.18.3_Windows_32bit.zip", + "sha256": "b8fa3f2c614557decf6ebe6688bc635b9260a220305be139d2368c437d4c3cfa" + } + }, + "windows": { + "exports": { + "tools": { + "arduino-cli": "arduino-cli.exe" + }, + "paths": { + "PATH": "." + } + } + }, + "linux and x86": { + "install": { + "untar": "https://github.com/arduino/arduino-cli/releases/download/0.18.3/arduino-cli_0.18.3_Linux_32bit.tar.gz", + "sha256": "fe238a22579905866ed9e6582a0d6078060f29a9de3dbbb47b3931ef9a5f1f08", + "strip": 1 + } + }, + "linux and x64": { + "install": { + "untar": "https://github.com/arduino/arduino-cli/releases/download/0.18.3/arduino-cli_0.18.3_Linux_64bit.tar.gz", + "sha256": "80fb4547fb869086769dade348040864ae77b30d13cf6786d384bebccf4eb7eb", + "strip": 1 + } + }, + "linux and arm64": { + "install": { + "untar": "https://github.com/arduino/arduino-cli/releases/download/0.18.3/arduino-cli_0.18.3_Linux_ARM64.tar.gz", + "sha256": "13eb5ab0edb9a8f20768e7e0e5b967140f0fac7f84ef4f78c0dae0c8f13cdb73", + "strip": 1 + } + }, + "not windows": { + "exports": { + "tools": { + "arduino-cli": "arduino-cli" + }, + "paths": { + "PATH": "." + } + } + } + } +} \ No newline at end of file diff --git a/ce/test/resources/vcpkg-ce-catalog-d471612be63b2fb506ab5f47122da460f5aa4d30/tools/arduino/arduino-ide-1.18.15.json b/ce/test/resources/vcpkg-ce-catalog-d471612be63b2fb506ab5f47122da460f5aa4d30/tools/arduino/arduino-ide-1.18.15.json new file mode 100644 index 0000000000..cea20f42f0 --- /dev/null +++ b/ce/test/resources/vcpkg-ce-catalog-d471612be63b2fb506ab5f47122da460f5aa4d30/tools/arduino/arduino-ide-1.18.15.json @@ -0,0 +1,63 @@ +{ + "id": "tools/arduino/arduino-ide", + "version": "1.18.15", + "description": "The open-source Arduino Software (IDE) makes it easy to write code and upload it to the board. This software can be used with any Arduino board.", + "summary": "Arduino IDE", + "contacts": { + "Marc Goodner": { + "email": "mgoodner@microsoft.com", + "role": "publisher" + }, + "Arduino": { + "role": "originator" + } + }, + "demands": { + "windows": { + "install": { + "unzip": "https://downloads.arduino.cc/arduino-1.8.15-windows.zip", + "sha256": "C53E7D291EDEBCDB58FFA34AEB53C5B777D814CEA8030297F06116ED0598D139", + "strip": 1 + }, + "exports": { + "tools": { + "arduino": "arduino.exe" + }, + "paths": { + "PATH": "." + } + } + }, + "linux and x86": { + "install": { + "untar": "https://downloads.arduino.cc/arduino-1.8.15-linux32.tar.xz", + "sha512": "02a10831c7125144ac6f701528f9d176a1a7ac0df6d9391d31d6758ae8f3dea3f8b8390320c7e7d3efb9ed45fb79527caa798ad354bc8a857c2f9c42f4612a8f", + "strip": 2 + } + }, + "linux and x64": { + "install": { + "untar": "https://downloads.arduino.cc/arduino-1.8.15-linux64.tar.xz", + "sha512": "ae84a8f62cbd3ecf5400a357ac5ebd04cbc80b31a2fbc80f280850465f7460ad3a02b32830021ef980b72c60d52eb65a4fc551988c91d01b25e8d646596175f8", + "strip": 2 + } + }, + "linux and arm64": { + "install": { + "untar": "https://downloads.arduino.cc/arduino-1.8.15-linuxaarch64.tar.xz", + "sha512": "22b4e5f3a79723bb09d85107facfe7d367d8a1aa347447e935481823192bd2390bfe0e5e694a5e2ee6addb59ec269a72eb829f8f791fb8641000179884bcfff2", + "strip": 2 + } + }, + "not windows": { + "exports": { + "tools": { + "arduino": "arduino" + }, + "paths": { + "PATH": "." + } + } + } + } +} diff --git a/ce/test/resources/vcpkg-ce-catalog-d471612be63b2fb506ab5f47122da460f5aa4d30/tools/compuphase/termite-3.4.0.json b/ce/test/resources/vcpkg-ce-catalog-d471612be63b2fb506ab5f47122da460f5aa4d30/tools/compuphase/termite-3.4.0.json new file mode 100644 index 0000000000..dbb23fa768 --- /dev/null +++ b/ce/test/resources/vcpkg-ce-catalog-d471612be63b2fb506ab5f47122da460f5aa4d30/tools/compuphase/termite-3.4.0.json @@ -0,0 +1,32 @@ +{ + "id": "tools/compuphase/termite", + "version": "3.4.0", + "description": "Termite is an easy to use and easy to configure RS232 terminal. It uses an interface similar to that of \"messenger\" or \"chat\" programs, with a large window that contains all received data and an edit line for typing in strings to transmit.", + "summary": "Termite is an easy to use and easy to configure RS232 terminal.", + "contacts": { + "Alan Leung": { + "email": "alleu@microsoft.com", + "role": "publisher" + }, + "CompuPhase": { + "email": "info@compuphase.com", + "role": "originator" + } + }, + "demands": { + "windows": { + "install": { + "unzip": "https://www.compuphase.com/software/termite-3.4.zip", + "sha256": "e72eddaabe1375dc9422d20b359206d242bf0745f47f60ce37d21e9dd905ba51" + }, + "exports": { + "tools": { + "termite": "bin/Termite.exe" + }, + "paths": { + "PATH": "bin" + } + } + } + } +} \ No newline at end of file diff --git a/ce/test/resources/vcpkg-ce-catalog-d471612be63b2fb506ab5f47122da460f5aa4d30/tools/kitware/cmake-3.20.1.json b/ce/test/resources/vcpkg-ce-catalog-d471612be63b2fb506ab5f47122da460f5aa4d30/tools/kitware/cmake-3.20.1.json new file mode 100644 index 0000000000..72ae6fc2ce --- /dev/null +++ b/ce/test/resources/vcpkg-ce-catalog-d471612be63b2fb506ab5f47122da460f5aa4d30/tools/kitware/cmake-3.20.1.json @@ -0,0 +1,77 @@ +{ + "id": "tools/kitware/cmake", + "version": "3.20.1", + "description": "CMake is an open-source, cross-platform family of tools designed to build, test and package software. CMake is used to control the software compilation process using simple platform and compiler independent configuration files, and generate native makefiles and workspaces that can be used in the compiler environment of your choice. The suite of CMake tools were created by Kitware in response to the need for a powerful, cross-platform build environment for open-source projects such as ITK and VTK.", + "summary": "Kitware's cmake tool", + "contacts": { + "Garrett Serack": { + "email": "garretts@microsoft.com", + "role": "publisher" + }, + "Kitware": { + "email": "kitware@kitware.com", + "role": "originator" + } + }, + "demands": { + "windows and x64": { + "install": { + "unzip": "https://github.com/Kitware/CMake/releases/download/v3.20.0/cmake-3.20.0-windows-x86_64.zip", + "sha256": "056378cb599353479c3a8aa2654454b8a3eaa3c8c0872928ba7e09c3ec50774c", + "strip": 1 + } + }, + "windows and x86": { + "install": { + "unzip": "https://github.com/Kitware/CMake/releases/download/v3.20.1/cmake-3.20.1-windows-i386.zip", + "sha256": "650026534e66dabe0ed6be3422e86fabce5fa86d43927171ea8b8dfd0877fc9d", + "strip": 1 + } + }, + "windows": { + "exports": { + "tools": { + "cmake": "bin/cmake.exe", + "cmake_gui": "bin/cmake-gui.exe", + "ctest": "bin/ctest.exe" + }, + "paths": { + "PATH": "bin" + } + } + }, + "osx": { + "install": { + "untar": "https://github.com/Kitware/CMake/releases/download/v3.20.1/cmake-3.20.1-macos-universal.tar.gz", + "sha256": "44143d47fdcc7fc3042576c6a8b661e3b65a18143666f74d6e8d93ca3ab5cd95", + "strip": 3 + } + }, + "linux and x64": { + "install": { + "untar": "https://github.com/Kitware/CMake/releases/download/v3.20.1/cmake-3.20.1-linux-x86_64.tar.gz", + "sha256": "B8C141BD7A6D335600AB0A8A35E75AF79F95B837F736456B5532F4D717F20A09", + "strip": 1 + } + }, + "linux and arm64": { + "install": { + "untar": "https://github.com/Kitware/CMake/releases/download/v3.20.1/cmake-3.20.1-linux-aarch64.tar.gz", + "sha256": "2761a222c14a15b9bdf1bdb4a17c10806757b7ed3bc26a84523f042ec212b76c", + "strip": 1 + } + }, + "not windows": { + "exports": { + "tools": { + "cmake": "bin/cmake", + "cmake_gui": "bin/cmake-gui", + "ctest": "bin/ctest" + }, + "paths": { + "PATH": "bin" + } + } + } + } +} \ No newline at end of file diff --git a/ce/test/resources/vcpkg-ce-catalog-d471612be63b2fb506ab5f47122da460f5aa4d30/tools/microsoft/openocd-0.11.0-ms1.json b/ce/test/resources/vcpkg-ce-catalog-d471612be63b2fb506ab5f47122da460f5aa4d30/tools/microsoft/openocd-0.11.0-ms1.json new file mode 100644 index 0000000000..f6d49b9260 --- /dev/null +++ b/ce/test/resources/vcpkg-ce-catalog-d471612be63b2fb506ab5f47122da460f5aa4d30/tools/microsoft/openocd-0.11.0-ms1.json @@ -0,0 +1,50 @@ +{ + "id": "tools/microsoft/openocd", + "version": "0.11.0-ms1", + "description": "OpenOCD provides on-chip programming and debugging support with a layered architecture of JTAG interface and TAP support including: (X)SVF playback to facilitate automated boundary scan and FPGA/CPLD programming; debug target support (e.g. ARM, MIPS): single-stepping, breakpoints/watchpoints, gprof profiling, etc; flash chip drivers (e.g. CFI, NAND, internal flash); embedded TCL interpreter for easy scripting. Several network interfaces are available for interacting with OpenOCD: telnet, TCL, and GDB. The GDB server enables OpenOCD to function as a \"remote target\" for source-level debugging of embedded systems using the GNU GDB program (and the others who talk GDB protocol, e.g. IDA Pro). This build of OpenOCD includes additional vendor extensions from Azure Sphere, Raspberry Pi, and STMicroelectronics, plus improved RTOS support.", + "summary": "Free and open on-chip debugging", + "contacts": { + "Ben McMorran": { + "email": "bemcmorr@microsoft.com", + "role": [ + "publisher", + "originator" + ] + }, + "OpenOCD (upstream)": { + "email": "openocd-user@lists.sourceforge.net", + "role": "other" + } + }, + "demands": { + "windows and x64": { + "install": { + "untar": "https://github.com/microsoft/openocd/releases/download/ms-v0.11.0-ms1/openocd-ms-v0.11.0-ms1-i686-w64-mingw32.tar.gz", + "sha256": "dabe82ecc1aa1b1aa6d28216ee74d5702b9147fc74796990e14a7fa5644744a1" + }, + "exports": { + "tools": { + "openocd": "bin/openocd.exe" + }, + "paths": { + "PATH": "bin" + } + } + }, + "linux and x64": { + "install": { + "untar": "https://github.com/microsoft/openocd/releases/download/ms-v0.11.0-ms1/openocd-ms-v0.11.0-ms1-linux.tar.gz", + "sha256": "e70a1405f5ffeb87d9487b49fe40171fe896fbd7d01a51b12cffdfb6d2b0501b", + "strip": 1 + }, + "exports": { + "tools": { + "openocd": "bin/openocd" + }, + "paths": { + "PATH": "bin" + } + } + } + } +} \ No newline at end of file diff --git a/ce/test/resources/vcpkg-ce-catalog-d471612be63b2fb506ab5f47122da460f5aa4d30/tools/microsoft/openocd-0.11.0.json b/ce/test/resources/vcpkg-ce-catalog-d471612be63b2fb506ab5f47122da460f5aa4d30/tools/microsoft/openocd-0.11.0.json new file mode 100644 index 0000000000..55b7f3c4e9 --- /dev/null +++ b/ce/test/resources/vcpkg-ce-catalog-d471612be63b2fb506ab5f47122da460f5aa4d30/tools/microsoft/openocd-0.11.0.json @@ -0,0 +1,50 @@ +{ + "id": "tools/microsoft/openocd", + "version": "0.11.0", + "description": "OpenOCD provides on-chip programming and debugging support with a layered architecture of JTAG interface and TAP support including: (X)SVF playback to facilitate automated boundary scan and FPGA/CPLD programming; debug target support (e.g. ARM, MIPS): single-stepping, breakpoints/watchpoints, gprof profiling, etc; flash chip drivers (e.g. CFI, NAND, internal flash); embedded TCL interpreter for easy scripting. Several network interfaces are available for interacting with OpenOCD: telnet, TCL, and GDB. The GDB server enables OpenOCD to function as a \"remote target\" for source-level debugging of embedded systems using the GNU GDB program (and the others who talk GDB protocol, e.g. IDA Pro). This build of OpenOCD includes additional vendor extensions from Azure Sphere, Raspberry Pi, and STMicroelectronics.", + "summary": "Free and open on-chip debugging", + "contacts": { + "Ben McMorran": { + "email": "bemcmorr@microsoft.com", + "role": [ + "publisher", + "originator" + ] + }, + "OpenOCD (upstream)": { + "email": "openocd-user@lists.sourceforge.net", + "role": "other" + } + }, + "demands": { + "windows and x64": { + "install": { + "untar": "https://github.com/microsoft/openocd/releases/download/ms-v0.11.0/openocd-ms-v0.11.0-i686-w64-mingw32.tar.gz", + "sha256": "f15c1b604f5f138a2731511143dcbbd565fa4dfed8f392abc599acea65177523" + }, + "exports": { + "tools": { + "openocd": "bin/openocd.exe" + }, + "paths": { + "PATH": "./bin" + } + } + }, + "linux and x64": { + "install": { + "untar": "https://github.com/microsoft/openocd/releases/download/ms-v0.11.0/openocd-ms-v0.11.0-linux.tar.gz", + "sha256": "bfa359756d0cad2d3a2fa72a8416d369960732dd25f262397b66048db7a9c570", + "strip": 1 + }, + "exports": { + "tools": { + "openocd": "./bin/openocd" + }, + "paths": { + "PATH": "bin" + } + } + } + } +} \ No newline at end of file diff --git a/ce/test/resources/vcpkg-ce-catalog-d471612be63b2fb506ab5f47122da460f5aa4d30/tools/ninja-build/ninja-1.10.2.json b/ce/test/resources/vcpkg-ce-catalog-d471612be63b2fb506ab5f47122da460f5aa4d30/tools/ninja-build/ninja-1.10.2.json new file mode 100644 index 0000000000..120191b62a --- /dev/null +++ b/ce/test/resources/vcpkg-ce-catalog-d471612be63b2fb506ab5f47122da460f5aa4d30/tools/ninja-build/ninja-1.10.2.json @@ -0,0 +1,54 @@ +{ + "id": "tools/ninja-build/ninja", + "version": "1.10.2", + "description": "Ninja is a small build system with a focus on speed. It differs from other build systems in two major respects, it is designed to have its input files generated by a higher-level build system, and it is designed to run builds as fast as possible.", + "summary": "Ninja is a small build system with a focus on speed.", + "contacts": { + "Marc Goodner": { + "email": "mgoodner@microsoft.com", + "role": "publisher" + }, + "ninja-build": { + "email": "ninja-build@googlegroups.com", + "role": "originator" + } + }, + "demands": { + "windows": { + "install": { + "unzip": "https://github.com/ninja-build/ninja/releases/download/v1.10.2/ninja-win.zip", + "sha256": "bbde850d247d2737c5764c927d1071cbb1f1957dcabda4a130fa8547c12c695f" + }, + "exports": { + "tools": { + "ninja": "ninja.exe" + }, + "paths": { + "PATH": "." + } + } + }, + "osx": { + "install": { + "unzip": "https://github.com/ninja-build/ninja/releases/download/v1.10.2/ninja-mac.zip", + "sha256": "6fa359f491fac7e5185273c6421a000eea6a2f0febf0ac03ac900bd4d80ed2a5" + } + }, + "linux": { + "install": { + "unzip": "https://github.com/ninja-build/ninja/releases/download/v1.10.2/ninja-linux.zip", + "sha256": "763464859c7ef2ea3a0a10f4df40d2025d3bb9438fcb1228404640410c0ec22d" + } + }, + "not windows": { + "exports": { + "tools": { + "ninja": "ninja" + }, + "paths": { + "PATH": "." + } + } + } + } +} \ No newline at end of file diff --git a/ce/test/resources/vcpkg-ce-catalog-d471612be63b2fb506ab5f47122da460f5aa4d30/tools/raspberrypi/pico-sdk-1.3.0.json b/ce/test/resources/vcpkg-ce-catalog-d471612be63b2fb506ab5f47122da460f5aa4d30/tools/raspberrypi/pico-sdk-1.3.0.json new file mode 100644 index 0000000000..7cec7ff597 --- /dev/null +++ b/ce/test/resources/vcpkg-ce-catalog-d471612be63b2fb506ab5f47122da460f5aa4d30/tools/raspberrypi/pico-sdk-1.3.0.json @@ -0,0 +1,29 @@ +{ + "id": "raspberrypi/pico-sdk", + "version": "1.3.0", + "description": "The Raspberry Pi Pico SDK provides the headers, libraries and build system necessary to write programs for the RP2040-based devices such as the Raspberry Pi Pico in C, C++ or assembly language.", + "summary": "The Raspberry Pi Pico SDK", + "contacts": { + "Marc Goodner": { + "email": "mgoodner@microsoft.com", + "role": "publisher" + } + }, + "requires": { + "compilers/arm/gcc": "2020.10.0", + "tools/kitware/cmake": "3.20.1" + }, + "install": { + "git": "https://github.com/raspberrypi/pico-sdk/", + "commit": "1.3.0", + "options": [ + "recurse", + "full" + ] + }, + "exports": { + "paths": { + "PICO_SDK_PATH": "./" + } + } +} \ No newline at end of file diff --git a/include/vcpkg/base/downloads.h b/include/vcpkg/base/downloads.h index 89360efa18..37fbbba010 100644 --- a/include/vcpkg/base/downloads.h +++ b/include/vcpkg/base/downloads.h @@ -1,6 +1,7 @@ #pragma once #include +#include #include #include @@ -12,18 +13,15 @@ namespace vcpkg { - namespace details + struct SplitURIView { - struct SplitURIView - { - StringView scheme; - Optional authority; - StringView path_query_fragment; - }; + StringView scheme; + Optional authority; + StringView path_query_fragment; + }; - // e.g. {"https","//example.org", "/index.html"} - ExpectedS split_uri_view(StringView uri); - } + // e.g. {"https","//example.org", "/index.html"} + ExpectedL split_uri_view(StringView uri); void verify_downloaded_file_hash(const Filesystem& fs, StringView sanitized_url, @@ -35,9 +33,12 @@ namespace vcpkg std::vector download_files(Filesystem& fs, View> url_pairs, View headers); - ExpectedS put_file(const Filesystem&, StringView url, View headers, const Path& file); + ExpectedL put_file(const Filesystem&, + StringView url, + const std::vector& secrets, + View headers, + const Path& file); std::vector url_heads(View urls, View headers, View secrets); - std::string replace_secrets(std::string input, View secrets); struct DownloadManagerConfig { @@ -57,30 +58,44 @@ namespace vcpkg explicit DownloadManager(const DownloadManagerConfig& config) : m_config(config) { } explicit DownloadManager(DownloadManagerConfig&& config) : m_config(std::move(config)) { } - void download_file(Filesystem& fs, - const std::string& url, - const Path& download_path, - const Optional& sha512) const - { - this->download_file(fs, url, {}, download_path, sha512); - } - void download_file(Filesystem& fs, const std::string& url, View headers, const Path& download_path, - const Optional& sha512) const; + const Optional& sha512, + MessageSink& progress_sink) const; // Returns url that was successfully downloaded from std::string download_file(Filesystem& fs, View urls, View headers, const Path& download_path, - const Optional& sha512) const; + const Optional& sha512, + MessageSink& progress_sink) const; - ExpectedS put_file_to_mirror(const Filesystem& fs, const Path& file_to_put, StringView sha512) const; + ExpectedL put_file_to_mirror(const Filesystem& fs, const Path& file_to_put, StringView sha512) const; private: DownloadManagerConfig m_config; }; + + Optional try_parse_curl_max5_size(StringView sv); + + struct CurlProgressData + { + unsigned int total_percent; + unsigned long long total_size; + unsigned int recieved_percent; + unsigned long long recieved_size; + unsigned int transfer_percent; + unsigned long long transfer_size; + unsigned long long average_download_speed; // bytes per second + unsigned long long average_upload_speed; // bytes per second + // ElapsedTime total_time; + // ElapsedTime time_spent; + // ElapsedTime time_left; + unsigned long long current_speed; + }; + + Optional try_parse_curl_progress_data(StringView curl_progress_line); } diff --git a/include/vcpkg/base/fwd/downloads.h b/include/vcpkg/base/fwd/downloads.h index 7b1890b8f6..1f7a59ca3a 100644 --- a/include/vcpkg/base/fwd/downloads.h +++ b/include/vcpkg/base/fwd/downloads.h @@ -2,6 +2,8 @@ namespace vcpkg { + struct SplitURIView; struct DownloadManager; struct DownloadManagerConfig; + struct CurlProgressData; } diff --git a/include/vcpkg/base/messages.h b/include/vcpkg/base/messages.h index 311934c653..a62f5d13c7 100644 --- a/include/vcpkg/base/messages.h +++ b/include/vcpkg/base/messages.h @@ -291,6 +291,7 @@ namespace vcpkg::msg DECLARE_MSG_ARG(old_value, ""); DECLARE_MSG_ARG(new_value, ""); + DECLARE_MSG_ARG(action_index, "340"); DECLARE_MSG_ARG(actual_version, "1.3.8"); DECLARE_MSG_ARG(arch, "x64"); DECLARE_MSG_ARG(base_url, "azblob://"); @@ -300,18 +301,26 @@ namespace vcpkg::msg DECLARE_MSG_ARG(column, "42"); DECLARE_MSG_ARG(command_line, "vcpkg install zlib"); DECLARE_MSG_ARG(command_name, "install"); + DECLARE_MSG_ARG(commit_sha, "7cfad47ae9f68b183983090afd6337cd60fd4949"); DECLARE_MSG_ARG(count, "42"); DECLARE_MSG_ARG(elapsed, "3.532 min"); + DECLARE_MSG_ARG(env_var, "VCPKG_DEFAULT_TRIPLET"); DECLARE_MSG_ARG(error_msg, "File Not Found"); DECLARE_MSG_ARG(exit_code, "127"); DECLARE_MSG_ARG(expected_version, "1.3.8"); + DECLARE_MSG_ARG(extension, ".exe"); + DECLARE_MSG_ARG(feature, "avisynthplus"); DECLARE_MSG_ARG(new_scheme, "version"); DECLARE_MSG_ARG(old_scheme, "version-string"); DECLARE_MSG_ARG(option, "editable"); DECLARE_MSG_ARG(package_name, "zlib"); DECLARE_MSG_ARG(path, "/foo/bar"); DECLARE_MSG_ARG(row, "42"); + DECLARE_MSG_ARG(sha, + "eb32643dd2164c72b8a660ef52f1e701bb368324ae461e12d70d6a9aefc0c9573387ee2ed3828037ed62bb3e8f566416a2" + "d3b3827a3928f0bff7c29f7662293e"); DECLARE_MSG_ARG(spec, "zlib:x64-windows"); + DECLARE_MSG_ARG(supports_expression, "windows & !static"); DECLARE_MSG_ARG(system_api, "CreateProcessW"); DECLARE_MSG_ARG(system_name, "Darwin"); DECLARE_MSG_ARG(tool_name, "aria2"); @@ -320,14 +329,6 @@ namespace vcpkg::msg DECLARE_MSG_ARG(vcpkg_line_info, "/a/b/foo.cpp(13)"); DECLARE_MSG_ARG(vendor, "Azure"); DECLARE_MSG_ARG(version, "1.3.8"); - DECLARE_MSG_ARG(action_index, "340"); - DECLARE_MSG_ARG(env_var, "VCPKG_DEFAULT_TRIPLET"); - DECLARE_MSG_ARG(extension, ".exe"); - DECLARE_MSG_ARG(supports_expression, "windows & !static"); - DECLARE_MSG_ARG(feature, "avisynthplus"); - DECLARE_MSG_ARG(commit_sha, - "a18442042722dd48e20714ec034a12fcc0576c9af7be5188586970e2edf47529825bdc99af366b1d5891630c8dbf6f63bf" - "a9f012e77ab3d3ed80d1a118e3b2be"); #undef DECLARE_MSG_ARG @@ -991,7 +992,18 @@ namespace vcpkg DECLARE_MESSAGE(CreatingNugetPackage, (), "", "Creating NuGet package..."); DECLARE_MESSAGE(CreatingZipArchive, (), "", "Creating zip archive..."); DECLARE_MESSAGE(CreationFailed, (msg::path), "", "Creating {path} failed."); - DECLARE_MESSAGE(CurlFailedToExecute, (msg::exit_code), "", "curl failed to execute with exit code {exit_code}."); + DECLARE_MESSAGE(CurlFailedToExecute, + (msg::exit_code), + "curl is the name of a program, see curl.se", + "curl failed to execute with exit code {exit_code}."); + DECLARE_MESSAGE(CurlFailedToPut, + (msg::exit_code, msg::url), + "curl is the name of a program, see curl.se", + "curl failed to put file to {url} with exit code {exit_code}."); + DECLARE_MESSAGE(CurlFailedToPutHttp, + (msg::exit_code, msg::url, msg::value), + "curl is the name of a program, see curl.se. {value} is an HTTP status code", + "curl failed to put file to {url} with exit code {exit_code} and http code {value}."); DECLARE_MESSAGE(CurlReportedUnexpectedResults, (msg::command_line, msg::actual), "{command_line} is the command line to call curl.exe, {actual} is the console output " @@ -1005,7 +1017,7 @@ namespace vcpkg "=== end curl output ==="); DECLARE_MESSAGE(CurlReturnedUnexpectedResponseCodes, (msg::actual, msg::expected), - "{actual} and {expected} are integers", + "{actual} and {expected} are integers, curl is the name of a program, see curl.se", "curl returned a different number of response codes than were expected for the request ({actual} " "vs expected {expected})."); DECLARE_MESSAGE(CurrentCommitBaseline, @@ -1035,6 +1047,26 @@ namespace vcpkg "", "A downloadable copy of this tool is available and can be used by unsetting {env_var}."); DECLARE_MESSAGE(DownloadedSources, (msg::spec), "", "Downloaded sources for {spec}"); + DECLARE_MESSAGE(DownloadFailedCurl, + (msg::url, msg::exit_code), + "", + "{url}: curl failed to download with exit code {exit_code}"); + DECLARE_MESSAGE(DownloadFailedHashMismatch, + (msg::url, msg::path, msg::expected, msg::actual), + "{expected} and {actual} are SHA512 hashes in hex format.", + "File does not have the expected hash:\n" + "url: {url}\n" + "File: {path}\n" + "Expected hash: {expected}\n" + "Actual hash: {actual}"); + DECLARE_MESSAGE(DownloadFailedRetrying, + (msg::value), + "{value} is a number of milliseconds", + "Download failed -- retrying after {value}ms"); + DECLARE_MESSAGE(DownloadFailedStatusCode, + (msg::url, msg::value), + "{value} is an HTTP status code", + "{url}: failed: status code {value}"); DECLARE_MESSAGE(DownloadingPortableToolVersionX, (msg::tool_name, msg::version), "", @@ -1044,6 +1076,11 @@ namespace vcpkg (msg::tool_name, msg::url, msg::path), "", "Downloading {tool_name}...\n{url}->{path}"); + DECLARE_MESSAGE(DownloadingUrl, (msg::url), "", "Downloading {url}"); + DECLARE_MESSAGE(DownloadWinHttpError, + (msg::system_api, msg::exit_code, msg::url), + "", + "{url}: {system_api} failed with exit code {exit_code}"); DECLARE_MESSAGE(DownloadingVcpkgCeBundle, (msg::version), "", "Downloading vcpkg-ce bundle {version}..."); DECLARE_MESSAGE(DownloadingVcpkgCeBundleLatest, (), @@ -1875,6 +1912,8 @@ namespace vcpkg DECLARE_MESSAGE(NoLocalizationForMessages, (), "", "No localized messages for the following: "); DECLARE_MESSAGE(NoOutdatedPackages, (), "", "There are no outdated packages."); DECLARE_MESSAGE(NoRegistryForPort, (msg::package_name), "", "no registry configured for port {package_name}"); + DECLARE_MESSAGE(NoUrlsAndHashSpecified, (msg::sha), "", "No urls specified to download SHA: {sha}"); + DECLARE_MESSAGE(NoUrlsAndNoHashSpecified, (), "", "No urls specified and no hash specified."); DECLARE_MESSAGE(NugetPackageFileSucceededButCreationFailed, (msg::path), "", @@ -2138,6 +2177,7 @@ namespace vcpkg "{value} may be either a 'vendor' like 'Azure' or 'NuGet', or a file path like C:\\example or /usr/example", "Restored {count} package(s) from {value} in {elapsed}. Use --debug to see more details."); DECLARE_MESSAGE(ResultsHeader, (), "Displayed before a list of installation results.", "RESULTS"); + DECLARE_MESSAGE(SecretBanner, (), "", "*** SECRET ***"); DECLARE_MESSAGE(SerializedBinParagraphHeader, (), "", "\nSerialized Binary Paragraph"); DECLARE_MESSAGE(SettingEnvVar, (msg::env_var, msg::url), @@ -2443,6 +2483,10 @@ namespace vcpkg "vcpkg has crashed. Please create an issue at https://github.com/microsoft/vcpkg containing a brief summary of " "what you were trying to do and the following information."); DECLARE_MESSAGE(VcpkgInvalidCommand, (msg::command_name), "", "invalid command: {command_name}"); + DECLARE_MESSAGE(InvalidUri, + (msg::value), + "{value} is the URI we attempted to parse.", + "unable to parse uri: {value}"); DECLARE_MESSAGE(VcpkgInVsPrompt, (msg::value, msg::triplet), "'{value}' is a VS prompt", diff --git a/include/vcpkg/base/strings.h b/include/vcpkg/base/strings.h index 99e87805ec..b84876156c 100644 --- a/include/vcpkg/base/strings.h +++ b/include/vcpkg/base/strings.h @@ -235,10 +235,16 @@ namespace vcpkg::Strings template<> Optional strto(StringView); template<> + Optional strto(StringView); + template<> Optional strto(StringView); template<> + Optional strto(StringView); + template<> Optional strto(StringView); template<> + Optional strto(StringView); + template<> Optional strto(StringView); const char* search(StringView haystack, StringView needle); diff --git a/locales/messages.json b/locales/messages.json index 3aa005fbf5..80108fb925 100644 --- a/locales/messages.json +++ b/locales/messages.json @@ -90,9 +90,9 @@ "AvailableHelpTopics": "Available help topics:", "BaselineConflict": "Specifying vcpkg-configuration.default-registry in a manifest file conflicts with built-in baseline.\nPlease remove one of these conflicting settings.", "BaselineFileNoDefaultField": "The baseline file at commit {commit_sha} was invalid (no \"default\" field).", - "_BaselineFileNoDefaultField.comment": "An example of {commit_sha} is a18442042722dd48e20714ec034a12fcc0576c9af7be5188586970e2edf47529825bdc99af366b1d5891630c8dbf6f63bfa9f012e77ab3d3ed80d1a118e3b2be.", + "_BaselineFileNoDefaultField.comment": "An example of {commit_sha} is 7cfad47ae9f68b183983090afd6337cd60fd4949.", "BaselineMissingDefault": "The baseline.json from commit `\"{commit_sha}\"` in the repo {url} was invalid (did not contain a \"default\" field).", - "_BaselineMissingDefault.comment": "An example of {commit_sha} is a18442042722dd48e20714ec034a12fcc0576c9af7be5188586970e2edf47529825bdc99af366b1d5891630c8dbf6f63bfa9f012e77ab3d3ed80d1a118e3b2be. An example of {url} is https://github.com/microsoft/vcpkg.", + "_BaselineMissingDefault.comment": "An example of {commit_sha} is 7cfad47ae9f68b183983090afd6337cd60fd4949. An example of {url} is https://github.com/microsoft/vcpkg.", "BinarySourcesArg": "Add sources for binary caching. See 'vcpkg help binarycaching'.", "BinaryWithInvalidArchitecture": "{path}\n Expected: {expected}, but was {actual}", "_BinaryWithInvalidArchitecture.comment": "{expected} and {actual} are architectures An example of {path} is /foo/bar.", @@ -281,13 +281,13 @@ "CouldNotDeduceNugetIdAndVersion": "Could not deduce nuget id and version from filename: {path}", "_CouldNotDeduceNugetIdAndVersion.comment": "An example of {path} is /foo/bar.", "CouldNotFindBaseline": "Could not find explicitly specified baseline `\"{commit_sha}\"` in baseline file {path}", - "_CouldNotFindBaseline.comment": "An example of {commit_sha} is a18442042722dd48e20714ec034a12fcc0576c9af7be5188586970e2edf47529825bdc99af366b1d5891630c8dbf6f63bfa9f012e77ab3d3ed80d1a118e3b2be. An example of {path} is /foo/bar.", + "_CouldNotFindBaseline.comment": "An example of {commit_sha} is 7cfad47ae9f68b183983090afd6337cd60fd4949. An example of {path} is /foo/bar.", "CouldNotFindBaselineForRepo": "Couldn't find baseline `\"{commit_sha}\"` for repo {package_name}", - "_CouldNotFindBaselineForRepo.comment": "An example of {commit_sha} is a18442042722dd48e20714ec034a12fcc0576c9af7be5188586970e2edf47529825bdc99af366b1d5891630c8dbf6f63bfa9f012e77ab3d3ed80d1a118e3b2be. An example of {package_name} is zlib.", + "_CouldNotFindBaselineForRepo.comment": "An example of {commit_sha} is 7cfad47ae9f68b183983090afd6337cd60fd4949. An example of {package_name} is zlib.", "CouldNotFindBaselineInCommit": "Couldn't find baseline in commit `\"{commit_sha}\"` from repo {package_name}:", - "_CouldNotFindBaselineInCommit.comment": "An example of {commit_sha} is a18442042722dd48e20714ec034a12fcc0576c9af7be5188586970e2edf47529825bdc99af366b1d5891630c8dbf6f63bfa9f012e77ab3d3ed80d1a118e3b2be. An example of {package_name} is zlib.", + "_CouldNotFindBaselineInCommit.comment": "An example of {commit_sha} is 7cfad47ae9f68b183983090afd6337cd60fd4949. An example of {package_name} is zlib.", "CouldNotFindGitTreeAtCommit": "could not find the git tree for `versions` in repo {package_name} at commit {commit_sha}", - "_CouldNotFindGitTreeAtCommit.comment": "An example of {package_name} is zlib. An example of {commit_sha} is a18442042722dd48e20714ec034a12fcc0576c9af7be5188586970e2edf47529825bdc99af366b1d5891630c8dbf6f63bfa9f012e77ab3d3ed80d1a118e3b2be.", + "_CouldNotFindGitTreeAtCommit.comment": "An example of {package_name} is zlib. An example of {commit_sha} is 7cfad47ae9f68b183983090afd6337cd60fd4949.", "CouldNotFindToolVersion": "Could not find in {path}", "_CouldNotFindToolVersion.comment": "An example of {version} is 1.3.8. An example of {path} is /foo/bar.", "CreateFailureLogsDir": "Creating failure logs output directory {path}.", @@ -300,11 +300,15 @@ "CreationFailed": "Creating {path} failed.", "_CreationFailed.comment": "An example of {path} is /foo/bar.", "CurlFailedToExecute": "curl failed to execute with exit code {exit_code}.", - "_CurlFailedToExecute.comment": "An example of {exit_code} is 127.", + "_CurlFailedToExecute.comment": "curl is the name of a program, see curl.se An example of {exit_code} is 127.", + "CurlFailedToPut": "curl failed to put file to {url} with exit code {exit_code}.", + "_CurlFailedToPut.comment": "curl is the name of a program, see curl.se An example of {exit_code} is 127. An example of {url} is https://github.com/microsoft/vcpkg.", + "CurlFailedToPutHttp": "curl failed to put file to {url} with exit code {exit_code} and http code {value}.", + "_CurlFailedToPutHttp.comment": "curl is the name of a program, see curl.se. {value} is an HTTP status code An example of {exit_code} is 127. An example of {url} is https://github.com/microsoft/vcpkg.", "CurlReportedUnexpectedResults": "curl has reported unexpected results to vcpkg and vcpkg cannot continue.\nPlease review the following text for sensitive information and open an issue on the Microsoft/vcpkg GitHub to help fix this problem!\ncmd: {command_line}\n=== curl output ===\n{actual}\n=== end curl output ===", "_CurlReportedUnexpectedResults.comment": "{command_line} is the command line to call curl.exe, {actual} is the console output of curl.exe locale-invariant download results. An example of {command_line} is vcpkg install zlib.", "CurlReturnedUnexpectedResponseCodes": "curl returned a different number of response codes than were expected for the request ({actual} vs expected {expected}).", - "_CurlReturnedUnexpectedResponseCodes.comment": "{actual} and {expected} are integers", + "_CurlReturnedUnexpectedResponseCodes.comment": "{actual} and {expected} are integers, curl is the name of a program, see curl.se", "CurrentCommitBaseline": "You can use the current commit as a baseline, which is:\n\t\"builtin-baseline\": \"{value}\"", "_CurrentCommitBaseline.comment": "{value} is a 40 hexadecimal character commit sha", "DateTableHeader": "Date", @@ -322,14 +326,26 @@ "DocumentedFieldsSuggestUpdate": "If these are documented fields that should be recognized try updating the vcpkg tool.", "DownloadAvailable": "A downloadable copy of this tool is available and can be used by unsetting {env_var}.", "_DownloadAvailable.comment": "An example of {env_var} is VCPKG_DEFAULT_TRIPLET.", + "DownloadFailedCurl": "{url}: curl failed to download with exit code {exit_code}", + "_DownloadFailedCurl.comment": "An example of {url} is https://github.com/microsoft/vcpkg. An example of {exit_code} is 127.", + "DownloadFailedHashMismatch": "File does not have the expected hash:\nurl: {url}\nFile: {path}\nExpected hash: {expected}\nActual hash: {actual}", + "_DownloadFailedHashMismatch.comment": "{expected} and {actual} are SHA512 hashes in hex format. An example of {url} is https://github.com/microsoft/vcpkg. An example of {path} is /foo/bar.", + "DownloadFailedRetrying": "Download failed -- retrying after {value}ms", + "_DownloadFailedRetrying.comment": "{value} is a number of milliseconds", + "DownloadFailedStatusCode": "{url}: failed: status code {value}", + "_DownloadFailedStatusCode.comment": "{value} is an HTTP status code An example of {url} is https://github.com/microsoft/vcpkg.", "DownloadRootsDir": "Specify the downloads root directory.\n(default: {env_var})", "_DownloadRootsDir.comment": "An example of {env_var} is VCPKG_DEFAULT_TRIPLET.", + "DownloadWinHttpError": "{url}: {system_api} failed with exit code {exit_code}", + "_DownloadWinHttpError.comment": "An example of {system_api} is CreateProcessW. An example of {exit_code} is 127. An example of {url} is https://github.com/microsoft/vcpkg.", "DownloadedSources": "Downloaded sources for {spec}", "_DownloadedSources.comment": "An example of {spec} is zlib:x64-windows.", "DownloadingPortableToolVersionX": "A suitable version of {tool_name} was not found (required v{version}) Downloading portable {tool_name} {version}...", "_DownloadingPortableToolVersionX.comment": "An example of {tool_name} is aria2. An example of {version} is 1.3.8.", "DownloadingTool": "Downloading {tool_name}...\n{url}->{path}", "_DownloadingTool.comment": "An example of {tool_name} is aria2. An example of {url} is https://github.com/microsoft/vcpkg. An example of {path} is /foo/bar.", + "DownloadingUrl": "Downloading {url}", + "_DownloadingUrl.comment": "An example of {url} is https://github.com/microsoft/vcpkg.", "DownloadingVcpkgCeBundle": "Downloading vcpkg-ce bundle {version}...", "_DownloadingVcpkgCeBundle.comment": "An example of {version} is 1.3.8.", "DownloadingVcpkgCeBundleLatest": "Downloading latest vcpkg-ce bundle...", @@ -694,7 +710,7 @@ "InvalidCommandArgSort": "Value of --sort must be one of 'lexicographical', 'topological', 'reverse'.", "InvalidCommentStyle": "vcpkg does not support c-style comments, however most objects allow $-prefixed fields to be used as comments.", "InvalidCommitId": "Invalid commit id: {commit_sha}", - "_InvalidCommitId.comment": "An example of {commit_sha} is a18442042722dd48e20714ec034a12fcc0576c9af7be5188586970e2edf47529825bdc99af366b1d5891630c8dbf6f63bfa9f012e77ab3d3ed80d1a118e3b2be.", + "_InvalidCommitId.comment": "An example of {commit_sha} is 7cfad47ae9f68b183983090afd6337cd60fd4949.", "InvalidFileType": "failed: {path} cannot handle file type", "_InvalidFileType.comment": "An example of {path} is /foo/bar.", "InvalidFilename": "Filename cannot contain invalid chars {value}, but was {path}", @@ -715,6 +731,8 @@ "InvalidString": "Invalid utf8 passed to Value::string(std::string)", "InvalidTriplet": "Invalid triplet: {triplet}", "_InvalidTriplet.comment": "An example of {triplet} is x64-windows.", + "InvalidUri": "unable to parse uri: {value}", + "_InvalidUri.comment": "{value} is the URI we attempted to parse.", "IrregularFile": "path was not a regular file: {path}", "_IrregularFile.comment": "An example of {path} is /foo/bar.", "JsonErrorMustBeAnObject": "Expected \"{path}\" to be an object.", @@ -813,6 +831,9 @@ "NoOutdatedPackages": "There are no outdated packages.", "NoRegistryForPort": "no registry configured for port {package_name}", "_NoRegistryForPort.comment": "An example of {package_name} is zlib.", + "NoUrlsAndHashSpecified": "No urls specified to download SHA: {sha}", + "_NoUrlsAndHashSpecified.comment": "An example of {sha} is eb32643dd2164c72b8a660ef52f1e701bb368324ae461e12d70d6a9aefc0c9573387ee2ed3828037ed62bb3e8f566416a2d3b3827a3928f0bff7c29f7662293e.", + "NoUrlsAndNoHashSpecified": "No urls specified and no hash specified.", "NoteMessage": "note: ", "NugetPackageFileSucceededButCreationFailed": "NuGet package creation succeeded, but no .nupkg was produced. Expected: \"{path}\"", "_NugetPackageFileSucceededButCreationFailed.comment": "An example of {path} is /foo/bar.", @@ -954,6 +975,7 @@ "_RestoredPackagesFromVendor.comment": "{value} may be either a 'vendor' like 'Azure' or 'NuGet', or a file path like C:\\example or /usr/example An example of {count} is 42. An example of {elapsed} is 3.532 min.", "ResultsHeader": "RESULTS", "_ResultsHeader.comment": "Displayed before a list of installation results.", + "SecretBanner": "*** SECRET ***", "SeeURL": "See {url} for more information.", "_SeeURL.comment": "An example of {url} is https://github.com/microsoft/vcpkg.", "SerializedBinParagraphHeader": "\nSerialized Binary Paragraph", diff --git a/src/vcpkg-test/downloads.cpp b/src/vcpkg-test/downloads.cpp index 0ecb2b4861..02c9be3f69 100644 --- a/src/vcpkg-test/downloads.cpp +++ b/src/vcpkg-test/downloads.cpp @@ -5,56 +5,162 @@ using namespace vcpkg; -TEST_CASE ("details::split_uri_view", "[downloads]") +TEST_CASE ("split_uri_view", "[downloads]") { { - auto x = details::split_uri_view("https://github.com/Microsoft/vcpkg"); + auto x = split_uri_view("https://github.com/Microsoft/vcpkg"); REQUIRE(x.has_value()); REQUIRE(x.get()->scheme == "https"); REQUIRE(x.get()->authority.value_or("") == "//github.com"); REQUIRE(x.get()->path_query_fragment == "/Microsoft/vcpkg"); } { - auto x = details::split_uri_view(""); + auto x = split_uri_view(""); REQUIRE(!x.has_value()); } { - auto x = details::split_uri_view("hello"); + auto x = split_uri_view("hello"); REQUIRE(!x.has_value()); } { - auto x = details::split_uri_view("file:"); + auto x = split_uri_view("file:"); REQUIRE(x.has_value()); REQUIRE(x.get()->scheme == "file"); REQUIRE(!x.get()->authority.has_value()); REQUIRE(x.get()->path_query_fragment == ""); } { - auto x = details::split_uri_view("file:path"); + auto x = split_uri_view("file:path"); REQUIRE(x.has_value()); REQUIRE(x.get()->scheme == "file"); REQUIRE(!x.get()->authority.has_value()); REQUIRE(x.get()->path_query_fragment == "path"); } { - auto x = details::split_uri_view("file:/path"); + auto x = split_uri_view("file:/path"); REQUIRE(x.has_value()); REQUIRE(x.get()->scheme == "file"); REQUIRE(!x.get()->authority.has_value()); REQUIRE(x.get()->path_query_fragment == "/path"); } { - auto x = details::split_uri_view("file://user:pw@host"); + auto x = split_uri_view("file://user:pw@host"); REQUIRE(x.has_value()); REQUIRE(x.get()->scheme == "file"); REQUIRE(x.get()->authority.value_or({}) == "//user:pw@host"); REQUIRE(x.get()->path_query_fragment == ""); } { - auto x = details::split_uri_view("ftp://host:port/"); + auto x = split_uri_view("ftp://host:port/"); REQUIRE(x.has_value()); REQUIRE(x.get()->scheme == "ftp"); REQUIRE(x.get()->authority.value_or({}) == "//host:port"); REQUIRE(x.get()->path_query_fragment == "/"); } } + +TEST_CASE ("try_parse_curl_max5_size", "[downloads]") +{ + REQUIRE(!try_parse_curl_max5_size("").has_value()); + REQUIRE(!try_parse_curl_max5_size("hi").has_value()); + REQUIRE(try_parse_curl_max5_size("0").value_or_exit(VCPKG_LINE_INFO) == 0ull); + REQUIRE(try_parse_curl_max5_size("1").value_or_exit(VCPKG_LINE_INFO) == 1ull); + REQUIRE(try_parse_curl_max5_size("10").value_or_exit(VCPKG_LINE_INFO) == 10ull); + REQUIRE(!try_parse_curl_max5_size("10 ").has_value()); // no unknown suffixes + REQUIRE(try_parse_curl_max5_size("100").value_or_exit(VCPKG_LINE_INFO) == 100ull); + REQUIRE(try_parse_curl_max5_size("100").value_or_exit(VCPKG_LINE_INFO) == 100ull); + REQUIRE(try_parse_curl_max5_size("1000").value_or_exit(VCPKG_LINE_INFO) == 1000ull); + REQUIRE(!try_parse_curl_max5_size("1000.").has_value()); // dot needs 1 or 2 digits + REQUIRE(!try_parse_curl_max5_size("1000.k").has_value()); + // fails in parsing the number: + REQUIRE(!try_parse_curl_max5_size("18446744073709551616").has_value()); + + // suffixes are 1024'd + REQUIRE(try_parse_curl_max5_size("1k").value_or_exit(VCPKG_LINE_INFO) == (1ull << 10)); + REQUIRE(try_parse_curl_max5_size("1M").value_or_exit(VCPKG_LINE_INFO) == (1ull << 20)); + REQUIRE(try_parse_curl_max5_size("1G").value_or_exit(VCPKG_LINE_INFO) == (1ull << 30)); + REQUIRE(try_parse_curl_max5_size("1T").value_or_exit(VCPKG_LINE_INFO) == (1ull << 40)); + REQUIRE(try_parse_curl_max5_size("1P").value_or_exit(VCPKG_LINE_INFO) == (1ull << 50)); + REQUIRE(!try_parse_curl_max5_size("1a").has_value()); + + // 1.3*1024 == 1'331.2 + REQUIRE(try_parse_curl_max5_size("1.3k").value_or_exit(VCPKG_LINE_INFO) == 1331ull); + // 1.33*1024 == 1'361.92 + REQUIRE(try_parse_curl_max5_size("1.33k").value_or_exit(VCPKG_LINE_INFO) == 1361ull); + + // 1.3*1024*1024 == 1'363'148.8 + REQUIRE(try_parse_curl_max5_size("1.3M").value_or_exit(VCPKG_LINE_INFO) == 1363148ull); + // 1.33*1024*1024 == 1'394'606.08 + REQUIRE(try_parse_curl_max5_size("1.33M").value_or_exit(VCPKG_LINE_INFO) == 1394606ull); + + // 1.3*1024*1024*1024 == 1'395'864'371.2 + REQUIRE(try_parse_curl_max5_size("1.3G").value_or_exit(VCPKG_LINE_INFO) == 1395864371ull); + // 1.33*1024*1024*1024 == 1'428'076'625.92 + REQUIRE(try_parse_curl_max5_size("1.33G").value_or_exit(VCPKG_LINE_INFO) == 1428076625ull); + + // 1.3*1024*1024*1024*1024 == 1'429'365'116'108.8 + REQUIRE(try_parse_curl_max5_size("1.3T").value_or_exit(VCPKG_LINE_INFO) == 1429365116108ull); + // 1.33*1024*1024*1024*1024 == 1'462'350'464'942.08 + REQUIRE(try_parse_curl_max5_size("1.33T").value_or_exit(VCPKG_LINE_INFO) == 1462350464942ull); + + // 1.3*1024*1024*1024*1024*1024 == 1'463'669'878'895'411.2 + REQUIRE(try_parse_curl_max5_size("1.3P").value_or_exit(VCPKG_LINE_INFO) == 1463669878895411ull); + // 1.33*1024*1024*1024*1024*1024 == 1'497'446'876'100'689.92 + REQUIRE(try_parse_curl_max5_size("1.33P").value_or_exit(VCPKG_LINE_INFO) == 1497446876100689ull); +} + +TEST_CASE ("try_parse_curl_progress_data", "[downloads]") +{ + // % Total % Received % Xferd Average Speed Time Time Time Current + // Dload Upload Total Spent Left Speed + // + // 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 + // 100 242 100 242 0 0 298 0 --:--:-- --:--:-- --:--:-- 298 + // 100 242 100 242 0 0 297 0 --:--:-- --:--:-- --:--:-- 297 + // + // 0 0 0 0 0 0 0 0 --:--:-- 0:00:01 --:--:-- 0 + // 0 190M 0 511k 0 0 199k 0 0:16:19 0:00:02 0:16:17 548k + // 0 190M 0 1423k 0 0 410k 0 0:07:55 0:00:03 0:07:52 776k + // 1 190M 1 2159k 0 0 468k 0 0:06:56 0:00:04 0:06:52 726k + // 1 190M 1 2767k 0 0 499k 0 0:06:30 0:00:05 0:06:25 709k + // 1 190M 1 3327k 0 0 507k 0 0:06:24 0:00:06 0:06:18 676k + // 2 190M 2 3935k 0 0 519k 0 0:06:15 0:00:07 0:06:08 683k + + REQUIRE( + !try_parse_curl_progress_data(" % Total % Received % Xferd Average Speed Time Time Time Current") + .has_value()); + + REQUIRE( + !try_parse_curl_progress_data(" Dload Upload Total Spent Left Speed") + .has_value()); + + { + const auto out = try_parse_curl_progress_data( + " 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0") + .value_or_exit(VCPKG_LINE_INFO); + REQUIRE(out.total_percent == 0); + REQUIRE(out.total_size == 0); + REQUIRE(out.recieved_percent == 0); + REQUIRE(out.recieved_size == 0); + REQUIRE(out.transfer_percent == 0); + REQUIRE(out.transfer_size == 0); + REQUIRE(out.average_upload_speed == 0); + REQUIRE(out.average_download_speed == 0); + REQUIRE(out.current_speed == 0); + } + + { + const auto out = try_parse_curl_progress_data( + " 2 190M 2 3935k 0 0 519k 0 0:06:15 0:00:07 0:06:08 683k") + .value_or_exit(VCPKG_LINE_INFO); + REQUIRE(out.total_percent == 2); + REQUIRE(out.total_size == 190 * 1024 * 1024); + REQUIRE(out.recieved_percent == 2); + REQUIRE(out.recieved_size == 3935 * 1024); + REQUIRE(out.transfer_percent == 0); + REQUIRE(out.transfer_size == 0); + REQUIRE(out.average_upload_speed == 0); + REQUIRE(out.average_download_speed == 519 * 1024); + REQUIRE(out.current_speed == 683 * 1024); + } +} diff --git a/src/vcpkg/base/downloads.cpp b/src/vcpkg/base/downloads.cpp index ae55589853..3ccaf88869 100644 --- a/src/vcpkg/base/downloads.cpp +++ b/src/vcpkg/base/downloads.cpp @@ -3,6 +3,7 @@ #include #include #include +#include #include #include #include @@ -11,100 +12,234 @@ namespace vcpkg { + static std::string replace_secrets(std::string input, View secrets) + { + const auto replacement = msg::format(msgSecretBanner); + for (const auto& secret : secrets) + { + Strings::inplace_replace_all(input, secret, replacement); + } + + return input; + } + #if defined(_WIN32) - struct WinHttpHandleDeleter + struct WinHttpHandle { - void operator()(HINTERNET h) const { WinHttpCloseHandle(h); } + HINTERNET h; + + WinHttpHandle() : h(0) { } + explicit WinHttpHandle(HINTERNET h_) : h(h_) { } + WinHttpHandle(const WinHttpHandle&) = delete; + WinHttpHandle(WinHttpHandle&& other) : h(other.h) { other.h = 0; } + WinHttpHandle& operator=(const WinHttpHandle&) = delete; + WinHttpHandle& operator=(WinHttpHandle&& other) + { + auto cpy = std::move(other); + std::swap(h, cpy.h); + return *this; + } + + ~WinHttpHandle() + { + if (h) + { + WinHttpCloseHandle(h); + } + } }; + static LocalizedString format_winhttp_last_error_message(StringLiteral api_name, StringView url, DWORD last_error) + { + return msg::format_error( + msgDownloadWinHttpError, msg::system_api = api_name, msg::exit_code = last_error, msg::url = url); + } + + static LocalizedString format_winhttp_last_error_message(StringLiteral api_name, StringView url) + { + return format_winhttp_last_error_message(api_name, url, GetLastError()); + } + + static void maybe_emit_winhttp_progress(const Optional& maybe_content_length, + std::chrono::steady_clock::time_point& last_write, + unsigned long long total_downloaded_size, + MessageSink& progress_sink) + { + if (const auto content_length = maybe_content_length.get()) + { + const auto now = std::chrono::steady_clock::now(); + if ((now - last_write) >= std::chrono::milliseconds(100)) + { + const double percent = + (static_cast(total_downloaded_size) / static_cast(*content_length)) * 100; + progress_sink.print(Color::none, fmt::format("{:.2f}%\n", percent)); + last_write = now; + } + } + } + struct WinHttpRequest { - static ExpectedS make(HINTERNET hConnect, + static ExpectedL make(HINTERNET hConnect, StringView url_path, + StringView sanitized_url, bool https, const wchar_t* method = L"GET") { WinHttpRequest ret; + ret.m_sanitized_url.assign(sanitized_url.data(), sanitized_url.size()); // Create an HTTP request handle. - auto h = WinHttpOpenRequest(hConnect, - method, - Strings::to_utf16(url_path).c_str(), - nullptr, - WINHTTP_NO_REFERER, - WINHTTP_DEFAULT_ACCEPT_TYPES, - https ? WINHTTP_FLAG_SECURE : 0); - if (!h) return Strings::concat("WinHttpOpenRequest() failed: ", GetLastError()); - ret.m_hRequest.reset(h); + { + auto h = WinHttpOpenRequest(hConnect, + method, + Strings::to_utf16(url_path).c_str(), + nullptr, + WINHTTP_NO_REFERER, + WINHTTP_DEFAULT_ACCEPT_TYPES, + https ? WINHTTP_FLAG_SECURE : 0); + if (!h) + { + return format_winhttp_last_error_message("WinHttpOpenRequest", sanitized_url); + } + + ret.m_hRequest = WinHttpHandle{h}; + } // Send a request. auto bResults = WinHttpSendRequest( - ret.m_hRequest.get(), WINHTTP_NO_ADDITIONAL_HEADERS, 0, WINHTTP_NO_REQUEST_DATA, 0, 0, 0); + ret.m_hRequest.h, WINHTTP_NO_ADDITIONAL_HEADERS, 0, WINHTTP_NO_REQUEST_DATA, 0, 0, 0); - if (!bResults) return Strings::concat("WinHttpSendRequest() failed: ", GetLastError()); + if (!bResults) + { + return format_winhttp_last_error_message("WinHttpSendRequest", sanitized_url); + } // End the request. - bResults = WinHttpReceiveResponse(ret.m_hRequest.get(), NULL); - if (!bResults) return Strings::concat("WinHttpReceiveResponse() failed: ", GetLastError()); + bResults = WinHttpReceiveResponse(ret.m_hRequest.h, NULL); + if (!bResults) + { + return format_winhttp_last_error_message("WinHttpReceiveResponse", sanitized_url); + } return ret; } - ExpectedS query_status() const + ExpectedL query_status() const { - DWORD dwStatusCode = 0; - DWORD dwSize = sizeof(dwStatusCode); + DWORD status_code; + DWORD size = sizeof(status_code); + + auto succeeded = WinHttpQueryHeaders(m_hRequest.h, + WINHTTP_QUERY_STATUS_CODE | WINHTTP_QUERY_FLAG_NUMBER, + WINHTTP_HEADER_NAME_BY_INDEX, + &status_code, + &size, + WINHTTP_NO_HEADER_INDEX); + if (succeeded) + { + return status_code; + } - auto bResults = WinHttpQueryHeaders(m_hRequest.get(), - WINHTTP_QUERY_STATUS_CODE | WINHTTP_QUERY_FLAG_NUMBER, - WINHTTP_HEADER_NAME_BY_INDEX, - &dwStatusCode, - &dwSize, - WINHTTP_NO_HEADER_INDEX); - if (!bResults) return Strings::concat("WinHttpQueryHeaders() failed: ", GetLastError()); - return dwStatusCode; + return format_winhttp_last_error_message("WinHttpQueryHeaders", m_sanitized_url); } - template - ExpectedS forall_data(F f) + ExpectedL> query_content_length() const { - std::vector buf; + static constexpr DWORD buff_characters = 21; // 18446744073709551615 + wchar_t buff[buff_characters]; + DWORD size = sizeof(buff); + auto succeeded = WinHttpQueryHeaders(m_hRequest.h, + WINHTTP_QUERY_CONTENT_LENGTH, + WINHTTP_HEADER_NAME_BY_INDEX, + buff, + &size, + WINHTTP_NO_HEADER_INDEX); + if (succeeded) + { + return Strings::strto(Strings::to_utf8(buff, size >> 1)); + } - size_t total_downloaded_size = 0; - DWORD dwSize = 0; - do + const DWORD last_error = GetLastError(); + if (last_error == ERROR_WINHTTP_HEADER_NOT_FOUND) { - DWORD downloaded_size = 0; - auto bResults = WinHttpQueryDataAvailable(m_hRequest.get(), &dwSize); - if (!bResults) return Strings::concat("WinHttpQueryDataAvailable() failed: ", GetLastError()); + return Optional{nullopt}; + } - if (buf.size() < dwSize) buf.resize(static_cast(dwSize) * 2); + return format_winhttp_last_error_message("WinHttpQueryHeaders", m_sanitized_url, last_error); + } - bResults = WinHttpReadData(m_hRequest.get(), (LPVOID)buf.data(), dwSize, &downloaded_size); - if (!bResults) return Strings::concat("WinHttpReadData() failed: ", GetLastError()); - f(Span(buf.data(), downloaded_size)); + ExpectedL write_response_body(WriteFilePointer& file, MessageSink& progress_sink) + { + static constexpr DWORD buff_size = 65535; + std::unique_ptr buff{new char[buff_size]}; + Optional maybe_content_length; + auto last_write = std::chrono::steady_clock::now(); + + { + auto maybe_maybe_content_length = query_content_length(); + if (const auto p = maybe_maybe_content_length.get()) + { + maybe_content_length = *p; + } + else + { + return std::move(maybe_maybe_content_length).error(); + } + } - total_downloaded_size += downloaded_size; - } while (dwSize > 0); - return 1; + unsigned long long total_downloaded_size = 0; + for (;;) + { + DWORD this_read; + if (!WinHttpReadData(m_hRequest.h, buff.get(), buff_size, &this_read)) + { + return format_winhttp_last_error_message("WinHttpReadData", m_sanitized_url); + } + + if (this_read == 0) + { + return Unit{}; + } + + do + { + const auto this_write = static_cast(file.write(buff.get(), 1, this_read)); + if (this_write == 0) + { + return format_winhttp_last_error_message("fwrite", m_sanitized_url); + } + + maybe_emit_winhttp_progress(maybe_content_length, last_write, total_downloaded_size, progress_sink); + this_read -= this_write; + total_downloaded_size += this_write; + } while (this_read > 0); + } } - std::unique_ptr m_hRequest; + WinHttpHandle m_hRequest; + std::string m_sanitized_url; }; struct WinHttpSession { - static ExpectedS make() + static ExpectedL make(StringView sanitized_url) { - auto h = WinHttpOpen( - L"vcpkg/1.0", WINHTTP_ACCESS_TYPE_NO_PROXY, WINHTTP_NO_PROXY_NAME, WINHTTP_NO_PROXY_BYPASS, 0); - if (!h) return Strings::concat("WinHttpOpen() failed: ", GetLastError()); WinHttpSession ret; - ret.m_hSession.reset(h); + { + auto h = WinHttpOpen( + L"vcpkg/1.0", WINHTTP_ACCESS_TYPE_NO_PROXY, WINHTTP_NO_PROXY_NAME, WINHTTP_NO_PROXY_BYPASS, 0); + if (!h) + { + return format_winhttp_last_error_message("WinHttpOpen", sanitized_url); + } + + ret.m_hSession = WinHttpHandle{h}; + } // Increase default timeouts to help connections behind proxies // WinHttpSetTimeouts(HINTERNET hInternet, int nResolveTimeout, int nConnectTimeout, int nSendTimeout, int // nReceiveTimeout); - WinHttpSetTimeouts(h, 0, 120000, 120000, 120000); + WinHttpSetTimeouts(ret.m_hSession.h, 0, 120000, 120000, 120000); // If the environment variable HTTPS_PROXY is set // use that variable as proxy. This situation might exist when user is in a company network @@ -118,7 +253,7 @@ namespace vcpkg proxy.lpszProxy = env_proxy_settings.data(); proxy.lpszProxyBypass = nullptr; - WinHttpSetOption(ret.m_hSession.get(), WINHTTP_OPTION_PROXY, &proxy, sizeof(proxy)); + WinHttpSetOption(ret.m_hSession.h, WINHTTP_OPTION_PROXY, &proxy, sizeof(proxy)); } // IE Proxy fallback, this works on Windows 10 else @@ -132,7 +267,7 @@ namespace vcpkg proxy.dwAccessType = WINHTTP_ACCESS_TYPE_NAMED_PROXY; proxy.lpszProxy = ieProxy.get()->server.data(); proxy.lpszProxyBypass = ieProxy.get()->bypass.data(); - WinHttpSetOption(ret.m_hSession.get(), WINHTTP_OPTION_PROXY, &proxy, sizeof(proxy)); + WinHttpSetOption(ret.m_hSession.h, WINHTTP_OPTION_PROXY, &proxy, sizeof(proxy)); } } @@ -140,72 +275,61 @@ namespace vcpkg DWORD secure_protocols(WINHTTP_FLAG_SECURE_PROTOCOL_TLS1 | WINHTTP_FLAG_SECURE_PROTOCOL_TLS1_1 | WINHTTP_FLAG_SECURE_PROTOCOL_TLS1_2); WinHttpSetOption( - ret.m_hSession.get(), WINHTTP_OPTION_SECURE_PROTOCOLS, &secure_protocols, sizeof(secure_protocols)); + ret.m_hSession.h, WINHTTP_OPTION_SECURE_PROTOCOLS, &secure_protocols, sizeof(secure_protocols)); // Many open source mirrors such as https://download.gnome.org/ will redirect to http mirrors. // `curl.exe -L` does follow https -> http redirection. // Additionally, vcpkg hash checks the resulting archive. DWORD redirect_policy(WINHTTP_OPTION_REDIRECT_POLICY_ALWAYS); WinHttpSetOption( - ret.m_hSession.get(), WINHTTP_OPTION_REDIRECT_POLICY, &redirect_policy, sizeof(redirect_policy)); + ret.m_hSession.h, WINHTTP_OPTION_REDIRECT_POLICY, &redirect_policy, sizeof(redirect_policy)); return ret; } - std::unique_ptr m_hSession; + WinHttpHandle m_hSession; }; struct WinHttpConnection { - static ExpectedS make(HINTERNET hSession, StringView hostname, INTERNET_PORT port) + static ExpectedL make(HINTERNET hSession, + StringView hostname, + INTERNET_PORT port, + StringView sanitized_url) { // Specify an HTTP server. auto h = WinHttpConnect(hSession, Strings::to_utf16(hostname).c_str(), port, 0); - if (!h) return Strings::concat("WinHttpConnect() failed: ", GetLastError()); - WinHttpConnection ret; - ret.m_hConnect.reset(h); - return ret; + if (!h) + { + return format_winhttp_last_error_message("WinHttpConnect", sanitized_url); + } + + return WinHttpConnection{WinHttpHandle{h}}; } - std::unique_ptr m_hConnect; + WinHttpHandle m_hConnect; }; #endif - ExpectedS details::split_uri_view(StringView uri) + ExpectedL split_uri_view(StringView uri) { auto sep = std::find(uri.begin(), uri.end(), ':'); - if (sep == uri.end()) return Strings::concat("Error: unable to parse uri: '", uri, "'"); + if (sep == uri.end()) return msg::format_error(msgInvalidUri, msg::value = uri); StringView scheme(uri.begin(), sep); if (Strings::starts_with({sep + 1, uri.end()}, "//")) { auto path_start = std::find(sep + 3, uri.end(), '/'); - return details::SplitURIView{scheme, StringView{sep + 1, path_start}, {path_start, uri.end()}}; + return SplitURIView{scheme, StringView{sep + 1, path_start}, {path_start, uri.end()}}; } // no authority - return details::SplitURIView{scheme, {}, {sep + 1, uri.end()}}; - } - - static std::string format_hash_mismatch(StringView url, - const Path& downloaded_path, - StringView expected, - StringView actual) - { - return Strings::format("File does not have the expected hash:\n" - " url : [ %s ]\n" - " File path : [ %s ]\n" - " Expected hash : [ %s ]\n" - " Actual hash : [ %s ]\n", - url, - downloaded_path, - expected, - actual); + return SplitURIView{scheme, {}, {sep + 1, uri.end()}}; } - static Optional try_verify_downloaded_file_hash(const Filesystem& fs, - StringView sanitized_url, - const Path& downloaded_path, - StringView sha512) + static ExpectedL try_verify_downloaded_file_hash(const Filesystem& fs, + StringView sanitized_url, + const Path& downloaded_path, + StringView sha512) { std::string actual_hash = vcpkg::Hash::get_file_hash(fs, downloaded_path, Hash::Algorithm::Sha512).value_or_exit(VCPKG_LINE_INFO); @@ -223,9 +347,14 @@ namespace vcpkg if (!Strings::case_insensitive_ascii_equals(sha512, actual_hash)) { - return format_hash_mismatch(sanitized_url, downloaded_path, sha512, actual_hash); + return msg::format_error(msgDownloadFailedHashMismatch, + msg::url = sanitized_url, + msg::path = downloaded_path, + msg::expected = sha512, + msg::actual = actual_hash); } - return nullopt; + + return Unit{}; } void verify_downloaded_file_hash(const Filesystem& fs, @@ -233,25 +362,21 @@ namespace vcpkg const Path& downloaded_path, StringView sha512) { - auto maybe_error = try_verify_downloaded_file_hash(fs, url, downloaded_path, sha512); - if (auto err = maybe_error.get()) - { - Checks::exit_with_message(VCPKG_LINE_INFO, *err); - } + try_verify_downloaded_file_hash(fs, url, downloaded_path, sha512).value_or_exit(VCPKG_LINE_INFO); } static bool check_downloaded_file_hash(Filesystem& fs, const Optional& hash, StringView sanitized_url, const Path& download_part_path, - std::string& errors) + std::vector& errors) { if (auto p = hash.get()) { - auto maybe_error = try_verify_downloaded_file_hash(fs, sanitized_url, download_part_path, *p); - if (auto err = maybe_error.get()) + auto maybe_success = try_verify_downloaded_file_hash(fs, sanitized_url, download_part_path, *p); + if (!maybe_success.has_value()) { - Strings::append(errors, *err, '\n'); + errors.push_back(std::move(maybe_success).error()); return false; } } @@ -272,7 +397,7 @@ namespace vcpkg .string_arg("--head") .string_arg("--location") .string_arg("-w") - .string_arg(Strings::concat(guid_marker, " %{http_code}\\n")); + .string_arg(guid_marker.to_string() + " %{http_code}\\n"); for (auto&& header : headers) { cmd.string_arg("-H").string_arg(header); @@ -323,17 +448,6 @@ namespace vcpkg return ret; } - std::string replace_secrets(std::string input, View secrets) - { - static constexpr StringLiteral replacement{"*** SECRET ***"}; - for (const auto& secret : secrets) - { - Strings::inplace_replace_all(input, secret, replacement); - } - - return input; - } - static void download_files_inner(Filesystem&, View> url_pairs, View headers, @@ -349,7 +463,7 @@ namespace vcpkg .string_arg("--create-dirs") .string_arg("--location") .string_arg("-w") - .string_arg(Strings::concat(guid_marker, " %{http_code}\\n")); + .string_arg(guid_marker.to_string() + " %{http_code}\\n"); for (StringView header : headers) { cmd.string_arg("-H").string_arg(header); @@ -408,7 +522,11 @@ namespace vcpkg return ret; } - ExpectedS put_file(const Filesystem&, StringView url, View headers, const Path& file) + ExpectedL put_file(const Filesystem&, + StringView url, + const std::vector& secrets, + View headers, + const Path& file) { static constexpr StringLiteral guid_marker = "9a1db05f-a65d-419b-aa72-037fb4d0672e"; @@ -428,11 +546,12 @@ namespace vcpkg } Debug::print(res->output, '\n'); - return Strings::concat( - "Error: curl failed to put file to ", url, " with exit code: ", res->exit_code, '\n'); + return msg::format_error(msgCurlFailedToPut, + msg::exit_code = res->exit_code, + msg::url = replace_secrets(url.to_string(), secrets)); } - return Strings::concat("Error: launching curl failed: ", maybe_res.error()); + return std::move(maybe_res).error(); } Command cmd; @@ -441,27 +560,23 @@ namespace vcpkg { cmd.string_arg("-H").string_arg(header); } - cmd.string_arg("-w").string_arg(Strings::concat("\\n", guid_marker, "%{http_code}")); + cmd.string_arg("-w").string_arg("\\n" + guid_marker.to_string() + "%{http_code}"); cmd.string_arg(url); cmd.string_arg("-T").string_arg(file); int code = 0; auto res = cmd_execute_and_stream_lines(cmd, [&code](StringView line) { - if (Strings::starts_with(line, guid_marker)) - { - code = std::strtol(line.data() + guid_marker.size(), nullptr, 10); - } - }).map_error([](LocalizedString&& ls) { return ls.extract_data(); }); + if (Strings::starts_with(line, guid_marker)) + { + code = std::strtol(line.data() + guid_marker.size(), nullptr, 10); + } + }); + if (auto pres = res.get()) { if (*pres != 0 || (code >= 100 && code < 200) || code >= 300) { - res = Strings::concat("Error: curl failed to put file to ", - url, - " with exit code '", - *pres, - "' and http code '", - code, - "'\n"); + return msg::format_error( + msgCurlFailedToPutHttp, msg::exit_code = *pres, msg::url = url, msg::value = code); } } @@ -469,92 +584,121 @@ namespace vcpkg } #if defined(_WIN32) - namespace + enum class WinHttpTrialResult { - /// - /// Download a file using WinHTTP -- only supports HTTP and HTTPS - /// - static bool download_winhttp(Filesystem& fs, - const Path& download_path_part_path, - details::SplitURIView split_uri, - const std::string& url, - const std::vector& secrets, - std::string& errors) + failed, + succeeded, + retry + }; + + static WinHttpTrialResult download_winhttp_trial(Filesystem& fs, + WinHttpSession& s, + const Path& download_path_part_path, + SplitURIView split_uri, + StringView hostname, + INTERNET_PORT port, + StringView sanitized_url, + std::vector& errors, + MessageSink& progress_sink) + { + auto maybe_conn = WinHttpConnection::make(s.m_hSession.h, hostname, port, sanitized_url); + const auto conn = maybe_conn.get(); + if (!conn) { - // `download_winhttp` does not support user or port syntax in authorities - auto hostname = split_uri.authority.value_or_exit(VCPKG_LINE_INFO).substr(2); - INTERNET_PORT port; - if (split_uri.scheme == "https") - { - port = INTERNET_DEFAULT_HTTPS_PORT; - } - else if (split_uri.scheme == "http") - { - port = INTERNET_DEFAULT_HTTP_PORT; - } - else - { - Checks::unreachable(VCPKG_LINE_INFO); - } + errors.push_back(std::move(maybe_conn).error()); + return WinHttpTrialResult::retry; + } - // Make sure the directories are present, otherwise fopen_s fails - const auto dir = download_path_part_path.parent_path(); - fs.create_directories(dir, VCPKG_LINE_INFO); + auto maybe_req = WinHttpRequest::make( + conn->m_hConnect.h, split_uri.path_query_fragment, sanitized_url, split_uri.scheme == "https"); + const auto req = maybe_req.get(); + if (!req) + { + errors.push_back(std::move(maybe_req).error()); + return WinHttpTrialResult::retry; + } - const auto sanitized_url = replace_secrets(url, secrets); - msg::write_unlocalized_text_to_stdout(Color::none, fmt::format("Downloading {}\n", sanitized_url)); - static auto s = WinHttpSession::make().value_or_exit(VCPKG_LINE_INFO); - for (size_t trials = 0; trials < 4; ++trials) - { - if (trials > 0) - { - // 1s, 2s, 4s - msg::write_unlocalized_text_to_stdout( - Color::none, fmt::format("Download failed -- retrying after {}ms.", 500 << trials)); - std::this_thread::sleep_for(std::chrono::milliseconds(500 << trials)); - } - auto conn = WinHttpConnection::make(s.m_hSession.get(), hostname, port); - if (!conn) - { - Strings::append(errors, sanitized_url, ": ", conn.error(), '\n'); - continue; - } - auto req = WinHttpRequest::make( - conn.get()->m_hConnect.get(), split_uri.path_query_fragment, split_uri.scheme == "https"); - if (!req) - { - Strings::append(errors, sanitized_url, ": ", req.error(), '\n'); - continue; - } + auto maybe_status = req->query_status(); + const auto status = maybe_status.get(); + if (!status) + { + errors.push_back(std::move(maybe_status).error()); + return WinHttpTrialResult::retry; + } - auto maybe_status = req.get()->query_status(); - if (auto status = maybe_status.get()) - { - if (*status < 200 || *status >= 300) - { - Strings::append(errors, sanitized_url, ": failed: status code ", *status, '\n'); - return false; - } - } - else - { - Strings::append(errors, sanitized_url, ": ", maybe_status.error(), '\n'); - continue; - } + if (*status < 200 || *status >= 300) + { + errors.push_back( + msg::format_error(msgDownloadFailedStatusCode, msg::url = sanitized_url, msg::value = *status)); + return WinHttpTrialResult::failed; + } - const auto f = fs.open_for_write(download_path_part_path, VCPKG_LINE_INFO); + auto f = fs.open_for_write(download_path_part_path, VCPKG_LINE_INFO); + auto maybe_write = req->write_response_body(f, progress_sink); + const auto write = maybe_write.get(); + if (!write) + { + errors.push_back(std::move(maybe_write).error()); + return WinHttpTrialResult::retry; + } - auto forall_data = - req.get()->forall_data([&f](Span span) { f.write(span.data(), 1, span.size()); }); - if (!forall_data) - { - Strings::append(errors, sanitized_url, ": ", forall_data.error(), '\n'); - continue; - } - return true; + return WinHttpTrialResult::succeeded; + } + + /// + /// Download a file using WinHTTP -- only supports HTTP and HTTPS + /// + static bool download_winhttp(Filesystem& fs, + const Path& download_path_part_path, + SplitURIView split_uri, + const std::string& url, + const std::vector& secrets, + std::vector& errors, + MessageSink& progress_sink) + { + // `download_winhttp` does not support user or port syntax in authorities + auto hostname = split_uri.authority.value_or_exit(VCPKG_LINE_INFO).substr(2); + INTERNET_PORT port; + if (split_uri.scheme == "https") + { + port = INTERNET_DEFAULT_HTTPS_PORT; + } + else if (split_uri.scheme == "http") + { + port = INTERNET_DEFAULT_HTTP_PORT; + } + else + { + Checks::unreachable(VCPKG_LINE_INFO); + } + + // Make sure the directories are present, otherwise fopen_s fails + const auto dir = download_path_part_path.parent_path(); + fs.create_directories(dir, VCPKG_LINE_INFO); + + const auto sanitized_url = replace_secrets(url, secrets); + msg::println(msgDownloadingUrl, msg::url = sanitized_url); + static auto s = WinHttpSession::make(sanitized_url).value_or_exit(VCPKG_LINE_INFO); + for (size_t trials = 0; trials < 4; ++trials) + { + if (trials > 0) + { + // 1s, 2s, 4s + const auto trialMs = 500 << trials; + msg::println_warning(msgDownloadFailedRetrying, msg::value = trialMs); + std::this_thread::sleep_for(std::chrono::milliseconds(trialMs)); + } + + switch (download_winhttp_trial( + fs, s, download_path_part_path, split_uri, hostname, port, sanitized_url, errors, progress_sink)) + { + case WinHttpTrialResult::failed: return false; + case WinHttpTrialResult::succeeded: return true; + case WinHttpTrialResult::retry: break; } - return false; } + + return false; } #endif @@ -564,7 +708,8 @@ namespace vcpkg const Path& download_path, const Optional& sha512, const std::vector& secrets, - std::string& errors) + std::vector& errors, + MessageSink& progress_sink) { auto download_path_part_path = download_path; download_path_part_path += "."; @@ -578,14 +723,14 @@ namespace vcpkg #if defined(_WIN32) if (headers.size() == 0) { - auto split_uri = details::split_uri_view(url).value_or_exit(VCPKG_LINE_INFO); + auto split_uri = split_uri_view(url).value_or_exit(VCPKG_LINE_INFO); auto authority = split_uri.authority.value_or_exit(VCPKG_LINE_INFO).substr(2); if (split_uri.scheme == "https" || split_uri.scheme == "http") { // This check causes complex URLs (non-default port, embedded basic auth) to be passed down to curl.exe if (Strings::find_first_of(authority, ":@") == authority.end()) { - if (download_winhttp(fs, download_path_part_path, split_uri, url, secrets, errors)) + if (download_winhttp(fs, download_path_part_path, split_uri, url, secrets, errors, progress_sink)) { if (check_downloaded_file_hash(fs, sha512, url, download_path_part_path, errors)) { @@ -610,13 +755,33 @@ namespace vcpkg { cmd.string_arg("-H").string_arg(header); } - const auto maybe_out = cmd_execute_and_capture_output(cmd); + + std::string non_progress_lines; + const auto maybe_exit_code = cmd_execute_and_stream_lines( + cmd, + [&](StringView line) { + const auto maybe_parsed = try_parse_curl_progress_data(line); + if (const auto parsed = maybe_parsed.get()) + { + progress_sink.print(Color::none, fmt::format("{}%\n", parsed->total_percent)); + } + else + { + non_progress_lines.append(line.data(), line.size()); + non_progress_lines.push_back('\n'); + } + }, + default_working_directory, + default_environment, + Encoding::Utf8); + const auto sanitized_url = replace_secrets(url, secrets); - if (const auto out = maybe_out.get()) + if (const auto exit_code = maybe_exit_code.get()) { - if (out->exit_code != 0) + if (*exit_code != 0) { - Strings::append(errors, sanitized_url, ": ", out->output, '\n'); + errors.push_back( + msg::format_error(msgDownloadFailedCurl, msg::url = sanitized_url, msg::exit_code = *exit_code)); return false; } @@ -628,24 +793,29 @@ namespace vcpkg } else { - Strings::append(errors, sanitized_url, ": ", maybe_out.error(), '\n'); + errors.push_back(std::move(maybe_exit_code).error()); } return false; } - static Optional try_download_files(vcpkg::Filesystem& fs, - View urls, - View headers, - const Path& download_path, - const Optional& sha512, - const std::vector& secrets, - std::string& errors) + static Optional try_download_file(vcpkg::Filesystem& fs, + View urls, + View headers, + const Path& download_path, + const Optional& sha512, + const std::vector& secrets, + std::vector& errors, + MessageSink& progress_sink) { for (auto&& url : urls) { - if (try_download_file(fs, url, headers, download_path, sha512, secrets, errors)) return url; + if (try_download_file(fs, url, headers, download_path, sha512, secrets, errors, progress_sink)) + { + return url; + } } + return nullopt; } @@ -659,36 +829,45 @@ namespace vcpkg const std::string& url, View headers, const Path& download_path, - const Optional& sha512) const + const Optional& sha512, + MessageSink& progress_sink) const { - this->download_file(fs, View(&url, 1), headers, download_path, sha512); + this->download_file(fs, View(&url, 1), headers, download_path, sha512, progress_sink); } std::string DownloadManager::download_file(Filesystem& fs, View urls, View headers, const Path& download_path, - const Optional& sha512) const + const Optional& sha512, + MessageSink& progress_sink) const { - std::string errors; + std::vector errors; if (urls.size() == 0) { if (auto hash = sha512.get()) { - Strings::append(errors, "Error: No urls specified to download SHA: ", *hash); + errors.push_back(msg::format_error(msgNoUrlsAndHashSpecified, msg::sha = *hash)); } else { - Strings::append(errors, "Error: No urls specified and no hash specified."); + errors.push_back(msg::format_error(msgNoUrlsAndNoHashSpecified)); } } + if (auto hash = sha512.get()) { if (auto read_template = m_config.m_read_url_template.get()) { auto read_url = Strings::replace_all(*read_template, "", *hash); - if (try_download_file( - fs, read_url, m_config.m_read_headers, download_path, sha512, m_config.m_secrets, errors)) + if (try_download_file(fs, + read_url, + m_config.m_read_headers, + download_path, + sha512, + m_config.m_secrets, + errors, + progress_sink)) { return read_url; } @@ -728,21 +907,19 @@ namespace vcpkg if (maybe_res) { - auto maybe_error = + auto maybe_success = try_verify_downloaded_file_hash(fs, "", download_path_part_path, *hash); - if (auto err = maybe_error.get()) - { - Strings::append(errors, *err); - } - else + if (maybe_success) { fs.rename(download_path_part_path, download_path, VCPKG_LINE_INFO); return urls[0]; } + + errors.push_back(std::move(maybe_success).error()); } else { - Strings::append(errors, maybe_res.error(), '\n'); + errors.push_back(std::move(maybe_res).error()); } } } @@ -752,8 +929,8 @@ namespace vcpkg { if (urls.size() != 0) { - auto maybe_url = - try_download_files(fs, urls, headers, download_path, sha512, m_config.m_secrets, errors); + auto maybe_url = try_download_file( + fs, urls, headers, download_path, sha512, m_config.m_secrets, errors, progress_sink); if (auto url = maybe_url.get()) { if (auto hash = sha512.get()) @@ -762,27 +939,162 @@ namespace vcpkg if (!maybe_push) { msg::println_warning(msgFailedToStoreBackToMirror); - msg::write_unlocalized_text_to_stdout(Color::warning, maybe_push.error()); + msg::println(maybe_push.error()); } } + return *url; } } } msg::println_error(msgFailedToDownloadFromMirrorSet); - msg::println_error(LocalizedString::from_raw(errors)); + for (LocalizedString& error : errors) + { + msg::println(error); + } + Checks::exit_fail(VCPKG_LINE_INFO); } - ExpectedS DownloadManager::put_file_to_mirror(const Filesystem& fs, + ExpectedL DownloadManager::put_file_to_mirror(const Filesystem& fs, const Path& file_to_put, StringView sha512) const { auto maybe_mirror_url = Strings::replace_all(m_config.m_write_url_template.value_or(""), "", sha512); if (!maybe_mirror_url.empty()) { - return put_file(fs, maybe_mirror_url, m_config.m_write_headers, file_to_put); + return put_file(fs, maybe_mirror_url, m_config.m_secrets, m_config.m_write_headers, file_to_put); } return 0; } + + Optional try_parse_curl_max5_size(StringView sv) + { + // \d+(\.\d{1, 2})?[kMGTP]? + std::size_t idx = 0; + while (idx < sv.size() && ParserBase::is_ascii_digit(sv[idx])) + { + ++idx; + } + + if (idx == 0) + { + return nullopt; + } + + unsigned long long accumulator; + { + const auto maybe_first_digits = Strings::strto(sv.substr(0, idx)); + if (auto p = maybe_first_digits.get()) + { + accumulator = *p; + } + else + { + return nullopt; + } + } + + unsigned long long after_digits = 0; + if (idx < sv.size() && sv[idx] == '.') + { + ++idx; + if (idx >= sv.size() || !ParserBase::is_ascii_digit(sv[idx])) + { + return nullopt; + } + + after_digits = (sv[idx] - '0') * 10u; + ++idx; + if (idx < sv.size() && ParserBase::is_ascii_digit(sv[idx])) + { + after_digits += sv[idx] - '0'; + ++idx; + } + } + + if (idx == sv.size()) + { + return accumulator; + } + + if (idx + 1 != sv.size()) + { + return nullopt; + } + + switch (sv[idx]) + { + case 'k': return (accumulator << 10) + (after_digits << 10) / 100; + case 'M': return (accumulator << 20) + (after_digits << 20) / 100; + case 'G': return (accumulator << 30) + (after_digits << 30) / 100; + case 'T': return (accumulator << 40) + (after_digits << 40) / 100; + case 'P': return (accumulator << 50) + (after_digits << 50) / 100; + default: return nullopt; + } + } + + static bool parse_curl_uint_impl(unsigned int& target, const char*& first, const char* const last) + { + first = std::find_if_not(first, last, ParserBase::is_whitespace); + const auto start = first; + first = std::find_if(first, last, ParserBase::is_whitespace); + const auto maybe_parsed = Strings::strto(StringView{start, first}); + if (const auto parsed = maybe_parsed.get()) + { + target = *parsed; + return false; + } + + return true; + } + + static bool parse_curl_max5_impl(unsigned long long& target, const char*& first, const char* const last) + { + first = std::find_if_not(first, last, ParserBase::is_whitespace); + const auto start = first; + first = std::find_if(first, last, ParserBase::is_whitespace); + const auto maybe_parsed = try_parse_curl_max5_size(StringView{start, first}); + if (const auto parsed = maybe_parsed.get()) + { + target = *parsed; + return false; + } + + return true; + } + + static bool skip_curl_time_impl(const char*& first, const char* const last) + { + first = std::find_if_not(first, last, ParserBase::is_whitespace); + first = std::find_if(first, last, ParserBase::is_whitespace); + return false; + } + + Optional try_parse_curl_progress_data(StringView curl_progress_line) + { + // Curl's maintainer Daniel Stenberg clarified that this output is semi-contractual + // here: https://twitter.com/bagder/status/1600615752725307400 + // % Total % Received % Xferd Average Speed Time Time Time Current + // Dload Upload Total Spent Left Speed + // https://github.com/curl/curl/blob/5ccddf64398c1186deb5769dac086d738e150e09/lib/progress.c#L546 + CurlProgressData result; + auto first = curl_progress_line.begin(); + const auto last = curl_progress_line.end(); + if (parse_curl_uint_impl(result.total_percent, first, last) || + parse_curl_max5_impl(result.total_size, first, last) || + parse_curl_uint_impl(result.recieved_percent, first, last) || + parse_curl_max5_impl(result.recieved_size, first, last) || + parse_curl_uint_impl(result.transfer_percent, first, last) || + parse_curl_max5_impl(result.transfer_size, first, last) || + parse_curl_max5_impl(result.average_download_speed, first, last) || + parse_curl_max5_impl(result.average_upload_speed, first, last) || skip_curl_time_impl(first, last) || + skip_curl_time_impl(first, last) || skip_curl_time_impl(first, last) || + parse_curl_max5_impl(result.current_speed, first, last)) + { + return nullopt; + } + + return result; + } } diff --git a/src/vcpkg/base/messages.cpp b/src/vcpkg/base/messages.cpp index 99ffed09f4..c786dcd20e 100644 --- a/src/vcpkg/base/messages.cpp +++ b/src/vcpkg/base/messages.cpp @@ -524,6 +524,8 @@ namespace vcpkg REGISTER_MESSAGE(CreatingZipArchive); REGISTER_MESSAGE(CreationFailed); REGISTER_MESSAGE(CurlFailedToExecute); + REGISTER_MESSAGE(CurlFailedToPut); + REGISTER_MESSAGE(CurlFailedToPutHttp); REGISTER_MESSAGE(CurlReportedUnexpectedResults); REGISTER_MESSAGE(CurlReturnedUnexpectedResponseCodes); REGISTER_MESSAGE(CurrentCommitBaseline); @@ -537,8 +539,14 @@ namespace vcpkg REGISTER_MESSAGE(DocumentedFieldsSuggestUpdate); REGISTER_MESSAGE(DownloadAvailable); REGISTER_MESSAGE(DownloadedSources); + REGISTER_MESSAGE(DownloadFailedCurl); + REGISTER_MESSAGE(DownloadFailedHashMismatch); + REGISTER_MESSAGE(DownloadFailedRetrying); + REGISTER_MESSAGE(DownloadFailedStatusCode); REGISTER_MESSAGE(DownloadingPortableToolVersionX); REGISTER_MESSAGE(DownloadingTool); + REGISTER_MESSAGE(DownloadingUrl); + REGISTER_MESSAGE(DownloadWinHttpError); REGISTER_MESSAGE(DownloadingVcpkgCeBundle); REGISTER_MESSAGE(DownloadingVcpkgCeBundleLatest); REGISTER_MESSAGE(DownloadingVcpkgStandaloneBundle); @@ -825,6 +833,8 @@ namespace vcpkg REGISTER_MESSAGE(NoLocalizationForMessages); REGISTER_MESSAGE(NoOutdatedPackages); REGISTER_MESSAGE(NoRegistryForPort); + REGISTER_MESSAGE(NoUrlsAndHashSpecified); + REGISTER_MESSAGE(NoUrlsAndNoHashSpecified); REGISTER_MESSAGE(NugetPackageFileSucceededButCreationFailed); REGISTER_MESSAGE(OptionMustBeInteger); REGISTER_MESSAGE(OptionRequired); @@ -875,6 +885,7 @@ namespace vcpkg REGISTER_MESSAGE(RestoredPackage); REGISTER_MESSAGE(RestoredPackagesFromVendor); REGISTER_MESSAGE(ResultsHeader); + REGISTER_MESSAGE(SecretBanner); REGISTER_MESSAGE(SerializedBinParagraphHeader); REGISTER_MESSAGE(SettingEnvVar); REGISTER_MESSAGE(ShallowRepositoryDetected); @@ -976,6 +987,7 @@ namespace vcpkg REGISTER_MESSAGE(VcpkgHasCrashed); REGISTER_MESSAGE(VcpkgInvalidCommand); REGISTER_MESSAGE(InvalidCommentStyle); + REGISTER_MESSAGE(InvalidUri); REGISTER_MESSAGE(VcpkgInVsPrompt); REGISTER_MESSAGE(VcpkgRootRequired); REGISTER_MESSAGE(VcpkgRootsDir); diff --git a/src/vcpkg/base/strings.cpp b/src/vcpkg/base/strings.cpp index d5fbb3cf9c..a1c96872fa 100644 --- a/src/vcpkg/base/strings.cpp +++ b/src/vcpkg/base/strings.cpp @@ -466,6 +466,20 @@ Optional Strings::strto(StringView sv) return nullopt; } +template<> +Optional Strings::strto(StringView sv) +{ + auto opt = strto(sv); + if (auto p = opt.get()) + { + if (*p <= UINT_MAX) + { + return static_cast(*p); + } + } + return nullopt; +} + template<> Optional Strings::strto(StringView sv) { @@ -493,6 +507,33 @@ Optional Strings::strto(StringView sv) return res; } +template<> +Optional Strings::strto(StringView sv) +{ + // disallow initial whitespace + if (sv.empty() || ParserBase::is_whitespace(sv[0])) + { + return nullopt; + } + + auto with_nul_terminator = sv.to_string(); + + errno = 0; + char* endptr = nullptr; + long res = strtoul(with_nul_terminator.c_str(), &endptr, 10); + if (endptr != with_nul_terminator.data() + with_nul_terminator.size()) + { + // contains invalid characters + return nullopt; + } + else if (errno == ERANGE) + { + return nullopt; + } + + return res; +} + template<> Optional Strings::strto(StringView sv) { @@ -520,6 +561,33 @@ Optional Strings::strto(StringView sv) return res; } +template<> +Optional Strings::strto(StringView sv) +{ + // disallow initial whitespace + if (sv.empty() || ParserBase::is_whitespace(sv[0])) + { + return nullopt; + } + + auto with_nul_terminator = sv.to_string(); + + errno = 0; + char* endptr = nullptr; + long long res = strtoull(with_nul_terminator.c_str(), &endptr, 10); + if (endptr != with_nul_terminator.data() + with_nul_terminator.size()) + { + // contains invalid characters + return nullopt; + } + else if (errno == ERANGE) + { + return nullopt; + } + + return res; +} + template<> Optional Strings::strto(StringView sv) { diff --git a/src/vcpkg/binarycaching.cpp b/src/vcpkg/binarycaching.cpp index 1fe8866e66..18795e3a15 100644 --- a/src/vcpkg/binarycaching.cpp +++ b/src/vcpkg/binarycaching.cpp @@ -325,15 +325,14 @@ namespace for (auto&& put_url_template : m_put_url_templates) { auto url = put_url_template.instantiate_variables(action); - auto maybe_success = put_file(fs, url, put_url_template.headers_for_put, tmp_archive_path); + auto maybe_success = put_file(fs, url, m_secrets, put_url_template.headers_for_put, tmp_archive_path); if (maybe_success) { http_remotes_pushed++; continue; } - msg::println(Color::warning, - LocalizedString::from_raw(replace_secrets(std::move(maybe_success).error(), m_secrets))); + msg::println(Color::warning, maybe_success.error()); } if (!m_put_url_templates.empty()) diff --git a/src/vcpkg/commands.xdownload.cpp b/src/vcpkg/commands.xdownload.cpp index 64c8356f25..077aa22a80 100644 --- a/src/vcpkg/commands.xdownload.cpp +++ b/src/vcpkg/commands.xdownload.cpp @@ -16,11 +16,12 @@ namespace vcpkg::Commands::X_Download static constexpr StringLiteral OPTION_SHA512 = "sha512"; static constexpr StringLiteral OPTION_URL = "url"; static constexpr StringLiteral OPTION_HEADER = "header"; + static constexpr StringLiteral OPTION_MACHINE_PROGRESS = "z-machine-readable-progress"; static constexpr CommandSwitch FETCH_SWITCHES[] = { {OPTION_STORE, []() { return msg::format(msgCmdXDownloadOptStore); }}, {OPTION_SKIP_SHA512, []() { return msg::format(msgCmdXDownloadOptSkipSha); }}, - }; + {OPTION_MACHINE_PROGRESS, nullptr}}; static constexpr CommandSetting FETCH_SETTINGS[] = { {OPTION_SHA512, []() { return msg::format(msgCmdXDownloadOptSha); }}, }; @@ -133,7 +134,13 @@ namespace vcpkg::Commands::X_Download urls = it_urls->second; } - download_manager.download_file(fs, urls, headers, file, sha); + download_manager.download_file(fs, + urls, + headers, + file, + sha, + Util::Sets::contains(parsed.switches, OPTION_MACHINE_PROGRESS) ? stdout_sink + : null_sink); Checks::exit_success(VCPKG_LINE_INFO); } } diff --git a/src/vcpkg/commands.zbootstrap-standalone.cpp b/src/vcpkg/commands.zbootstrap-standalone.cpp index fca9103c3d..9a9ccd79ca 100644 --- a/src/vcpkg/commands.zbootstrap-standalone.cpp +++ b/src/vcpkg/commands.zbootstrap-standalone.cpp @@ -38,12 +38,12 @@ namespace vcpkg::Commands "https://github.com/microsoft/vcpkg-tool/releases/download/" VCPKG_BASE_VERSION_AS_STRING "/vcpkg-standalone-bundle.tar.gz"; download_manager.download_file( - fs, bundle_uri, bundle_tarball, std::string(MACRO_TO_STRING(VCPKG_STANDALONE_BUNDLE_SHA))); + fs, bundle_uri, {}, bundle_tarball, MACRO_TO_STRING(VCPKG_STANDALONE_BUNDLE_SHA), null_sink); #else // ^^^ VCPKG_STANDALONE_BUNDLE_SHA / !VCPKG_STANDALONE_BUNDLE_SHA vvv msg::println(Color::warning, msgDownloadingVcpkgStandaloneBundleLatest); const auto bundle_uri = "https://github.com/microsoft/vcpkg-tool/releases/latest/download/vcpkg-standalone-bundle.tar.gz"; - download_manager.download_file(fs, bundle_uri, bundle_tarball, nullopt); + download_manager.download_file(fs, bundle_uri, {}, bundle_tarball, nullopt, null_sink); #endif // ^^^ !VCPKG_STANDALONE_BUNDLE_SHA extract_tar(find_system_tar(fs).value_or_exit(VCPKG_LINE_INFO), bundle_tarball, vcpkg_root); diff --git a/src/vcpkg/configure-environment.cpp b/src/vcpkg/configure-environment.cpp index e9976220c2..d0a599031f 100644 --- a/src/vcpkg/configure-environment.cpp +++ b/src/vcpkg/configure-environment.cpp @@ -125,7 +125,7 @@ namespace vcpkg "https://github.com/microsoft/vcpkg-tool/releases/download/" VCPKG_BASE_VERSION_AS_STRING "/vcpkg-ce.tgz"; const auto ce_tarball = paths.downloads / "vcpkg-ce-" VCPKG_BASE_VERSION_AS_STRING ".tgz"; - download_manager.download_file(fs, ce_uri, ce_tarball, VCPKG_CE_SHA_AS_STRING); + download_manager.download_file(fs, ce_uri, {}, ce_tarball, VCPKG_CE_SHA_AS_STRING, null_sink); extract_ce_tarball(paths, ce_tarball, node_path, node_modules); fs.write_contents(ce_sha_path, VCPKG_CE_SHA_AS_STRING, VCPKG_LINE_INFO); } @@ -143,7 +143,7 @@ namespace vcpkg msg::println(Color::warning, msgDownloadingVcpkgCeBundleLatest); const auto ce_uri = "https://github.com/microsoft/vcpkg-tool/releases/latest/download/vcpkg-ce.tgz"; const auto ce_tarball = paths.downloads / "vcpkg-ce-latest.tgz"; - download_manager.download_file(fs, ce_uri, ce_tarball, nullopt); + download_manager.download_file(fs, ce_uri, {}, ce_tarball, nullopt, null_sink); extract_ce_tarball(paths, ce_tarball, node_path, node_modules); #endif // ^^^ !VCPKG_CE_SHA diff --git a/src/vcpkg/tools.cpp b/src/vcpkg/tools.cpp index 9bb2f1d7d6..587d915976 100644 --- a/src/vcpkg/tools.cpp +++ b/src/vcpkg/tools.cpp @@ -677,7 +677,7 @@ namespace vcpkg msg::url = tool_data.url, msg::path = download_path); - downloader->download_file(fs, tool_data.url, download_path, tool_data.sha512); + downloader->download_file(fs, tool_data.url, {}, download_path, tool_data.sha512, null_sink); } else {