Skip to content

Commit

Permalink
tests: update many test traces, support .json.gz (#16007)
Browse files Browse the repository at this point in the history
  • Loading branch information
connorjclark authored May 30, 2024
1 parent c977fe8 commit b2fd47f
Show file tree
Hide file tree
Showing 23 changed files with 54,967 additions and 52,658 deletions.
103 changes: 75 additions & 28 deletions core/lib/asset-saver.js
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ import fs from 'fs';
import path from 'path';
import stream from 'stream';
import url from 'url';
import {createGzip, gunzipSync} from 'zlib';

import log from 'lighthouse-logger';

Expand All @@ -33,6 +34,46 @@ const stepDirectoryRegex = /^step(\d+)$/;
* @property {LH.DevtoolsLog} [devtoolsLog]
*/

/**
* @param {import('stream').PipelineSource<any>} contents
* @param {string} path
* @param {boolean} gzip
*/
async function writeJson(contents, path, gzip) {
const writeStream = fs.createWriteStream(gzip ? path + '.gz' : path);
if (gzip) {
await stream.promises.pipeline(contents, createGzip(), writeStream);
} else {
await stream.promises.pipeline(contents, writeStream);
}
}

/**
* Prefers reading a gzipped file (.gz) if present.
* @param {string} filename
* @param {(this: any, key: string, value: any) => any=} reviver
*/
function readJson(filename, reviver) {
if (fs.existsSync(filename + '.gz')) {
filename = filename + '.gz';
}

if (!filename.endsWith('.json.gz')) {
return JSON.parse(fs.readFileSync(filename, 'utf8'), reviver);
}

const buffer = gunzipSync(fs.readFileSync(filename));
return JSON.parse(buffer.toString('utf8'), reviver);
}

/**
* @param {string} filename
* @param {string} suffix
* @returns
*/
function endsWithSuffix(filename, suffix) {
return filename.endsWith(suffix) || filename.endsWith(suffix + '.gz');
}

/**
* Load artifacts object from files located within basePath
Expand All @@ -48,16 +89,15 @@ function loadArtifacts(basePath) {
}

// load artifacts.json using a reviver to deserialize any LighthouseErrors in artifacts.
const artifactsStr = fs.readFileSync(path.join(basePath, artifactsFilename), 'utf8');
/** @type {LH.Artifacts} */
const artifacts = JSON.parse(artifactsStr, LighthouseError.parseReviver);
const artifacts = readJson(path.join(basePath, artifactsFilename), LighthouseError.parseReviver);

const filenames = fs.readdirSync(basePath);

filenames.filter(f => f.endsWith(devtoolsLogSuffix)).forEach(filename => {
filenames.filter(f => endsWithSuffix(f, devtoolsLogSuffix)).forEach(filename => {
if (!artifacts.devtoolsLogs) artifacts.devtoolsLogs = {};
const prefix = filename.replace(devtoolsLogSuffix, '');
const devtoolsLog = JSON.parse(fs.readFileSync(path.join(basePath, filename), 'utf8'));
const prefix = filename.replace(devtoolsLogSuffix + '.gz', '').replace(devtoolsLogSuffix, '');
const devtoolsLog = readJson(path.join(basePath, filename));
artifacts.devtoolsLogs[prefix] = devtoolsLog;
if (prefix === defaultPrefix) {
artifacts.DevtoolsLog = devtoolsLog;
Expand All @@ -67,11 +107,10 @@ function loadArtifacts(basePath) {
}
});

filenames.filter(f => f.endsWith(traceSuffix)).forEach(filename => {
filenames.filter(f => endsWithSuffix(f, traceSuffix)).forEach(filename => {
if (!artifacts.traces) artifacts.traces = {};
const file = fs.readFileSync(path.join(basePath, filename), {encoding: 'utf-8'});
const trace = JSON.parse(file);
const prefix = filename.replace(traceSuffix, '');
const trace = readJson(path.join(basePath, filename));
const prefix = filename.replace(traceSuffix + '.gz', '').replace(traceSuffix, '');
artifacts.traces[prefix] = Array.isArray(trace) ? {traceEvents: trace} : trace;
if (prefix === defaultPrefix) {
artifacts.Trace = artifacts.traces[prefix];
Expand Down Expand Up @@ -200,21 +239,25 @@ async function saveFlowArtifacts(flowArtifacts, basePath) {

/**
* Save artifacts object mostly to single file located at basePath/artifacts.json.
* Also save the traces & devtoolsLogs to their own files
* Also save the traces & devtoolsLogs to their own files, with optional compression.
* @param {LH.Artifacts} artifacts
* @param {string} basePath
* @param {{gzip?: boolean}} options
* @return {Promise<void>}
*/
async function saveArtifacts(artifacts, basePath) {
async function saveArtifacts(artifacts, basePath, options = {}) {
const status = {msg: 'Saving artifacts', id: 'lh:assetSaver:saveArtifacts'};
log.time(status);
fs.mkdirSync(basePath, {recursive: true});

// Delete any previous artifacts in this directory.
const filenames = fs.readdirSync(basePath);
for (const filename of filenames) {
if (filename.endsWith(traceSuffix) || filename.endsWith(devtoolsLogSuffix) ||
filename === artifactsFilename) {
const isPreviousFile =
filename.endsWith(traceSuffix) || filename.endsWith(devtoolsLogSuffix) ||
filename.endsWith(traceSuffix + '.gz') || filename.endsWith(devtoolsLogSuffix + '.gz') ||
filename === artifactsFilename || filename === artifactsFilename + '.gz';
if (isPreviousFile) {
fs.unlinkSync(`${basePath}/${filename}`);
}
}
Expand All @@ -234,24 +277,30 @@ async function saveArtifacts(artifacts, basePath) {
} = artifacts;

if (Trace) {
await saveTrace(Trace, `${basePath}/${defaultPrefix}${traceSuffix}`);
await saveTrace(Trace, `${basePath}/${defaultPrefix}${traceSuffix}`, options);
}

if (TraceError) {
await saveTrace(TraceError, `${basePath}/${errorPrefix}${traceSuffix}`);
await saveTrace(TraceError, `${basePath}/${errorPrefix}${traceSuffix}`, options);
}

if (DevtoolsLog) {
await saveDevtoolsLog(DevtoolsLog, `${basePath}/${defaultPrefix}${devtoolsLogSuffix}`);
await saveDevtoolsLog(
DevtoolsLog, `${basePath}/${defaultPrefix}${devtoolsLogSuffix}`, options);
}

if (DevtoolsLogError) {
await saveDevtoolsLog(DevtoolsLogError, `${basePath}/${errorPrefix}${devtoolsLogSuffix}`);
await saveDevtoolsLog(
DevtoolsLogError, `${basePath}/${errorPrefix}${devtoolsLogSuffix}`, options);
}

// save everything else, using a replacer to serialize LighthouseErrors in the artifacts.
const restArtifactsString = JSON.stringify(restArtifacts, stringifyReplacer, 2) + '\n';
fs.writeFileSync(`${basePath}/${artifactsFilename}`, restArtifactsString, 'utf8');
const restArtifactsString = JSON.stringify(restArtifacts, stringifyReplacer, 2);
await writeJson(function* () {
yield restArtifactsString;
yield '\n';
}, `${basePath}/${artifactsFilename}`, !!options.gzip);

log.log('Artifacts saved to disk in folder:', basePath);
log.timeEnd(status);
}
Expand Down Expand Up @@ -371,28 +420,26 @@ function* traceJsonGenerator(traceData) {
* Save a trace as JSON by streaming to disk at traceFilename.
* @param {LH.Trace} traceData
* @param {string} traceFilename
* @param {{gzip?: boolean}=} options
* @return {Promise<void>}
*/
async function saveTrace(traceData, traceFilename) {
function saveTrace(traceData, traceFilename, options = {}) {
const traceIter = traceJsonGenerator(traceData);
const writeStream = fs.createWriteStream(traceFilename);

return stream.promises.pipeline(traceIter, writeStream);
return writeJson(traceIter, traceFilename, !!options.gzip);
}

/**
* Save a devtoolsLog as JSON by streaming to disk at devtoolLogFilename.
* @param {LH.DevtoolsLog} devtoolsLog
* @param {string} devtoolLogFilename
* @param {{gzip?: boolean}=} options
* @return {Promise<void>}
*/
function saveDevtoolsLog(devtoolsLog, devtoolLogFilename) {
const writeStream = fs.createWriteStream(devtoolLogFilename);

return stream.promises.pipeline(function* () {
function saveDevtoolsLog(devtoolsLog, devtoolLogFilename, options = {}) {
return writeJson(function* () {
yield* arrayOfObjectsJsonGenerator(devtoolsLog);
yield '\n';
}, writeStream);
}, devtoolLogFilename, !!options.gzip);
}

/**
Expand Down
56 changes: 5 additions & 51 deletions core/test/audits/byte-efficiency/duplicated-javascript-test.js
Original file line number Diff line number Diff line change
Expand Up @@ -4,16 +4,10 @@
* SPDX-License-Identifier: Apache-2.0
*/

import {LH_ROOT} from '../../../../shared/root.js';
import DuplicatedJavascript from '../../../audits/byte-efficiency/duplicated-javascript.js';
import {
loadSourceMapFixture,
createScript,
getURLArtifactFromDevtoolsLog,
readJson,
} from '../../test-utils.js';

const trace = readJson('../../fixtures/traces/lcp-m78.json', import.meta);
const devtoolsLog = readJson('../../fixtures/traces/lcp-m78.devtools.log.json', import.meta);
import {loadArtifacts} from '../../../lib/asset-saver.js';
import {loadSourceMapFixture, createScript} from '../../test-utils.js';

describe('DuplicatedJavascript computed artifact', () => {
it('works (simple)', async () => {
Expand Down Expand Up @@ -319,54 +313,14 @@ describe('DuplicatedJavascript computed artifact', () => {
});

it('.audit', async () => {
// Use a real trace fixture, but the bundle stuff.
// Note: this mixing of data from different sources makes the exact results
// of this audit pretty meaningless. The important part of this test is that
// `wastedBytesByUrl` is functioning.
const bundleData1 = loadSourceMapFixture('coursehero-bundle-1');
const bundleData2 = loadSourceMapFixture('coursehero-bundle-2');
const artifacts = {
URL: getURLArtifactFromDevtoolsLog(devtoolsLog),
GatherContext: {gatherMode: 'navigation'},
devtoolsLogs: {
[DuplicatedJavascript.DEFAULT_PASS]: devtoolsLog,
},
traces: {
[DuplicatedJavascript.DEFAULT_PASS]: trace,
},
SourceMaps: [
{
scriptId: '1',
scriptUrl: 'https://www.paulirish.com/javascripts/firebase-performance.js',
map: bundleData1.map,
},
{
scriptId: '2',
scriptUrl: 'https://www.paulirish.com/javascripts/firebase-app.js',
map: bundleData2.map,
},
],
Scripts: [
{
scriptId: '1',
url: 'https://www.paulirish.com/javascripts/firebase-performance.js',
content: bundleData1.content,
},
{
scriptId: '2',
url: 'https://www.paulirish.com/javascripts/firebase-app.js',
content: bundleData2.content,
},
].map(createScript),
};

const artifacts = await loadArtifacts(`${LH_ROOT}/core/test/fixtures/artifacts/cnn`);
const ultraSlowThrottling = {rttMs: 150, throughputKbps: 100, cpuSlowdownMultiplier: 8};
const settings = {throttlingMethod: 'simulate', throttling: ultraSlowThrottling};
const context = {settings, computedCache: new Map()};
const results = await DuplicatedJavascript.audit(artifacts, context);

// Without the `wastedBytesByUrl` this would be zero because the items don't define a url.
expect(results.details.overallSavingsMs).toBe(1370);
expect(results.details.overallSavingsMs).toBe(160);
});

it('_getNodeModuleName', () => {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,8 +33,8 @@ describe('Render blocking resources audit', () => {
const computedCache = new Map();
const result = await RenderBlockingResourcesAudit.audit(artifacts, {settings, computedCache});
assert.equal(result.score, 0);
assert.equal(result.numericValue, 304);
assert.deepStrictEqual(result.metricSavings, {FCP: 304, LCP: 0});
assert.equal(result.numericValue, 300);
assert.deepStrictEqual(result.metricSavings, {FCP: 300, LCP: 0});
});

it('evaluates correct wastedMs when LCP is text', async () => {
Expand All @@ -59,7 +59,7 @@ describe('Render blocking resources audit', () => {
const settings = {throttlingMethod: 'simulate', throttling: mobileSlow4G};
const computedCache = new Map();
const result = await RenderBlockingResourcesAudit.audit(artifacts, {settings, computedCache});
assert.deepStrictEqual(result.metricSavings, {FCP: 304, LCP: 304});
assert.deepStrictEqual(result.metricSavings, {FCP: 300, LCP: 300});
});

it('evaluates amp page correctly', async () => {
Expand All @@ -86,14 +86,14 @@ describe('Render blocking resources audit', () => {
expect(result.details.items).toEqual([
{
totalBytes: 389629,
url: 'http://localhost:57822/style.css',
url: 'http://localhost:50049/style.css',
// This value would be higher if we didn't have a special case for AMP stylesheets
wastedMs: 1489,
wastedMs: 1496,
},
{
totalBytes: 291,
url: 'http://localhost:57822/script.js',
wastedMs: 311,
url: 'http://localhost:50049/script.js',
wastedMs: 304,
},
]);
expect(result.metricSavings).toEqual({FCP: 0, LCP: 0});
Expand Down
42 changes: 21 additions & 21 deletions core/test/audits/long-tasks-test.js
Original file line number Diff line number Diff line change
Expand Up @@ -295,16 +295,16 @@ describe('Long tasks audit', () => {
details: {
items: [{
url: expect.stringContaining('https://'),
startTime: expect.toBeApproximately(2150.4, 1),
duration: expect.toBeApproximately(247.6, 1),
startTime: expect.toBeApproximately(2686.9, 1),
duration: expect.toBeApproximately(89.2, 1),
}, {
url: expect.stringContaining('https://'),
startTime: expect.toBeApproximately(1957.1, 1),
duration: expect.toBeApproximately(104.6, 1),
startTime: expect.toBeApproximately(2236.7, 1),
duration: expect.toBeApproximately(71.1, 1),
}],
},
});
expect(result.metricSavings.TBT).toBeApproximately(171.95);
expect(result.metricSavings.TBT).toBeApproximately(60.29);

const debugData = result.details.debugData;
expect(debugData).toStrictEqual({
Expand All @@ -315,24 +315,24 @@ describe('Long tasks audit', () => {
],
tasks: [{
urlIndex: 0,
startTime: 2150.4,
duration: 247.6,
garbageCollection: 2.7,
other: 13.7,
paintCompositeRender: 0.6,
parseHTML: 0.5,
scriptEvaluation: 212.9,
scriptParseCompile: 4,
styleLayout: 13.2,
startTime: 2686.9,
duration: 89.2,
garbageCollection: 6.7,
other: 0.2,
parseHTML: 0.1,
scriptEvaluation: 81.3,
scriptParseCompile: 0.3,
styleLayout: 0.5,
}, {
urlIndex: 1,
startTime: 1957.1,
duration: 104.6,
other: 0.6,
parseHTML: 0.2,
scriptEvaluation: 96.8,
scriptParseCompile: 5.7,
styleLayout: 1.2,
startTime: 2236.7,
duration: 71.1,
garbageCollection: 1.4,
other: 1.1,
parseHTML: 0.1,
scriptEvaluation: 62.7,
scriptParseCompile: 5.1,
styleLayout: 0.8,
}],
});
});
Expand Down
Loading

0 comments on commit b2fd47f

Please sign in to comment.