Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

core: remove dependency on devtools-frontend NetworkRequest #5451

Merged
merged 26 commits into from
Jun 20, 2018
Merged
Show file tree
Hide file tree
Changes from 24 commits
Commits
Show all changes
26 commits
Select commit Hold shift + click to select a range
3848575
remove network record dependency
patrickhulce Jun 8, 2018
58e7c51
fix byte-efficiency-audit
patrickhulce Jun 8, 2018
f35c56c
SUPER WIP
patrickhulce Jun 8, 2018
ade9def
fix tests
patrickhulce Jun 8, 2018
05194ea
fix tests
patrickhulce Jun 8, 2018
dd11851
golden LHR diffs
patrickhulce Jun 8, 2018
5988b11
remove commented code
patrickhulce Jun 11, 2018
1c16232
more tests
patrickhulce Jun 11, 2018
aecb428
more feedback
patrickhulce Jun 13, 2018
432b989
dump more of WebInspector
patrickhulce Jun 13, 2018
0324bc0
fix resource category
patrickhulce Jun 13, 2018
c5db54d
Merge branch 'master' into standardize_network_req_api
patrickhulce Jun 13, 2018
5836b4c
merge conflicts
patrickhulce Jun 13, 2018
cee1f0e
cleanup ts-nocheck files
patrickhulce Jun 13, 2018
4212ab6
fix mixed content
patrickhulce Jun 13, 2018
7634cf8
typo
patrickhulce Jun 14, 2018
21fe6df
feedback
patrickhulce Jun 14, 2018
1f0ac70
Merge branch 'master' into standardize_network_req_api
patrickhulce Jun 15, 2018
915449f
remove unused portions of ResourceType
patrickhulce Jun 18, 2018
95551ee
Merge branch 'master' into standardize_network_req_api
patrickhulce Jun 18, 2018
92e8abf
match two subtle DT behaviors
patrickhulce Jun 18, 2018
b831cbb
more DT behavior matching
patrickhulce Jun 19, 2018
911a268
Merge branch 'master' into standardize_network_req_api
patrickhulce Jun 19, 2018
d11d2e7
update lantern expectations
patrickhulce Jun 19, 2018
a2015af
feedback
patrickhulce Jun 19, 2018
e5bea12
more feedback
patrickhulce Jun 19, 2018
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 5 additions & 5 deletions lighthouse-core/audits/byte-efficiency/byte-efficiency-audit.js
Original file line number Diff line number Diff line change
Expand Up @@ -82,11 +82,11 @@ class UnusedBytes extends Audit {
return Math.round(totalBytes * compressionRatio);
} else if (networkRecord._resourceType && networkRecord._resourceType._name === resourceType) {
// This was a regular standalone asset, just use the transfer size.
return networkRecord._transferSize || 0;
return networkRecord.transferSize || 0;
} else {
// This was an asset that was inlined in a different resource type (e.g. HTML document).
// Use the compression ratio of the resource to estimate the total transferred bytes.
const transferSize = networkRecord._transferSize || 0;
const transferSize = networkRecord.transferSize || 0;
const resourceSize = networkRecord._resourceSize;
const compressionRatio = resourceSize !== undefined ? (transferSize / resourceSize) : 1;
return Math.round(totalBytes * compressionRatio);
Expand Down Expand Up @@ -151,12 +151,12 @@ class UnusedBytes extends Audit {
const networkNode = /** @type {NetworkNode} */ (node);
const result = resultsByUrl.get(networkNode.record.url);
if (!result) return;

const original = networkNode.record.transferSize;
// cloning NetworkRequest objects is difficult, so just stash the original transfer size
originalTransferSizes.set(networkNode.record.requestId, original);

const wastedBytes = result.wastedBytes;
networkNode.record._transferSize = Math.max(original - wastedBytes, 0);
networkNode.record.transferSize = Math.max(original - wastedBytes, 0);
});

const simulationAfterChanges = simulator.simulate(graph, {label: afterLabel});
Expand All @@ -167,7 +167,7 @@ class UnusedBytes extends Audit {
const networkNode = /** @type {NetworkNode} */ (node);
const originalTransferSize = originalTransferSizes.get(networkNode.record.requestId);
if (originalTransferSize === undefined) return;
networkNode.record._transferSize = originalTransferSize;
networkNode.record.transferSize = originalTransferSize;
});

const savingsOnOverallLoad = simulationBeforeChanges.timeInMs - simulationAfterChanges.timeInMs;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -158,17 +158,17 @@ class RenderBlockingResources extends Audit {
if (canDeferRequest && isStylesheet) {
// We'll inline the used bytes of the stylesheet and assume the rest can be deferred
const wastedBytes = wastedCssBytesByUrl.get(networkNode.record.url) || 0;
totalChildNetworkBytes += (networkNode.record._transferSize || 0) - wastedBytes;
totalChildNetworkBytes += (networkNode.record.transferSize || 0) - wastedBytes;
}
return !canDeferRequest;
}));

// Add the inlined bytes to the HTML response
const originalTransferSize = minimalFCPGraph.record._transferSize;
const originalTransferSize = minimalFCPGraph.record.transferSize;
const safeTransferSize = originalTransferSize || 0;
minimalFCPGraph.record._transferSize = safeTransferSize + totalChildNetworkBytes;
minimalFCPGraph.record.transferSize = safeTransferSize + totalChildNetworkBytes;
const estimateAfterInline = simulator.simulate(minimalFCPGraph).timeInMs;
minimalFCPGraph.record._transferSize = originalTransferSize;
minimalFCPGraph.record.transferSize = originalTransferSize;
return Math.round(Math.max(originalEstimate - estimateAfterInline, 0));
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -185,7 +185,7 @@ class CacheHeaders extends Audit {
if (cacheHitProbability > IGNORE_THRESHOLD_IN_PERCENT) continue;

const url = URL.elideDataURI(record._url);
const totalBytes = record._transferSize || 0;
const totalBytes = record.transferSize || 0;
const wastedBytes = (1 - cacheHitProbability) * totalBytes;

totalWastedBytes += wastedBytes;
Expand Down
25 changes: 6 additions & 19 deletions lighthouse-core/audits/mixed-content.js
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
*/
// @ts-nocheck
'use strict';

const Audit = require('./audit');
Expand Down Expand Up @@ -32,18 +31,6 @@ class MixedContent extends Audit {
};
}

/**
* Checks whether the resource was securely loaded.
* We special-case data: URLs, as they inherit the security state of their
* referring document url, and so are trivially "upgradeable" for mixed-content purposes.
*
* @param {{scheme: string, protocol: string, securityState: function}} record
* @return {boolean}
*/
static isSecureRecord(record) {
return record.securityState() === 'secure' || record.protocol === 'data';
}

/**
* Upgrades a URL to use HTTPS.
*
Expand Down Expand Up @@ -72,10 +59,10 @@ class MixedContent extends Audit {
/**
* Simplifies a URL string for display.
*
* @param {string} url
* @param {string=} url
* @return {string}
*/
static displayURL(url) {
static displayURL(url = '') {
const displayOptions = {
numPathParts: 4,
preserveQuery: false,
Expand All @@ -100,15 +87,15 @@ class MixedContent extends Audit {

return Promise.all(computedArtifacts).then(([defaultRecords, upgradedRecords]) => {
const insecureRecords = defaultRecords.filter(
record => !MixedContent.isSecureRecord(record));
record => !record.isSecure);
const secureRecords = defaultRecords.filter(
record => MixedContent.isSecureRecord(record));
record => record.isSecure);

const upgradePassHosts = new Set();
const upgradePassSecureHosts = new Set();
upgradedRecords.forEach(record => {
upgradePassHosts.add(new URL(record.url).hostname);
if (MixedContent.isSecureRecord(record) && record.finished && !record.failed) {
if (record.isSecure && record.finished && !record.failed) {
upgradePassSecureHosts.add(new URL(record.url).hostname);
}
});
Expand All @@ -127,7 +114,7 @@ class MixedContent extends Audit {
const resource = {
host: new URL(record.url).hostname,
fullUrl: record.url,
referrerDocUrl: this.displayURL(record._documentURL),
referrerDocUrl: this.displayURL(record.documentURL),
};
// Exclude any records that aren't on an upgradeable secure host
if (!upgradePassSecureHosts.has(resource.host)) continue;
Expand Down
7 changes: 3 additions & 4 deletions lighthouse-core/audits/time-to-first-byte.js
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@
'use strict';

const Audit = require('./audit');
const Util = require('../report/html/renderer/util');

const TTFB_THRESHOLD = 600;

Expand All @@ -29,8 +28,7 @@ class TTFBMetric extends Audit {
*/
static caclulateTTFB(record) {
const timing = record._timing;

return timing.receiveHeadersEnd - timing.sendEnd;
return timing ? timing.receiveHeadersEnd - timing.sendEnd : 0;
}

/**
Expand All @@ -42,6 +40,7 @@ class TTFBMetric extends Audit {

return artifacts.requestNetworkRecords(devtoolsLogs)
.then((networkRecords) => {
/** @type {LH.Audit.DisplayValue} */
let displayValue = '';

const finalUrl = artifacts.URL.finalUrl;
Expand All @@ -53,7 +52,7 @@ class TTFBMetric extends Audit {
const passed = ttfb < TTFB_THRESHOLD;

if (!passed) {
displayValue = `Root document took ${Util.formatMilliseconds(ttfb, 1)} `;
displayValue = ['Root document took %10d', ttfb];
}

/** @type {LH.Result.Audit.OpportunityDetails} */
Expand Down
8 changes: 6 additions & 2 deletions lighthouse-core/audits/uses-rel-preconnect.js
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ class UsesRelPreconnectAudit extends Audit {
* @return {boolean}
*/
static hasValidTiming(record) {
return record._timing && record._timing.connectEnd > 0 && record._timing.connectStart > 0;
return !!record._timing && record._timing.connectEnd > 0 && record._timing.connectStart > 0;
}

/**
Expand All @@ -48,6 +48,7 @@ class UsesRelPreconnectAudit extends Audit {
*/
static hasAlreadyConnectedToOrigin(record) {
return (
!!record._timing &&
record._timing.dnsEnd - record._timing.dnsStart === 0 &&
record._timing.connectEnd - record._timing.connectStart === 0
);
Expand Down Expand Up @@ -90,7 +91,7 @@ class UsesRelPreconnectAudit extends Audit {
// filter out all resources where timing info was invalid
!UsesRelPreconnectAudit.hasValidTiming(record) ||
// filter out all resources that are loaded by the document
record.initiatorRequest() === mainResource ||
record._initiator.url === mainResource.url ||
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

these aren't totally equivalent, fwiw. but the difference may not matter. And I suppose devtools was making this assumption previously. :p

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

yeah I was kinda shocked that the whole initiatorRequest logic is literally just first one with matching URL and manager 😮

// filter out urls that do not have an origin (data, ...)
!record.parsedURL || !record.parsedURL.securityOrigin() ||
// filter out all resources that have the same origin
Expand Down Expand Up @@ -118,6 +119,9 @@ class UsesRelPreconnectAudit extends Audit {
return (record.startTime < firstRecord.startTime) ? record: firstRecord;
});

// Skip the origin if we don't have timing information
if (!firstRecordOfOrigin._timing) return;

const securityOrigin = firstRecordOfOrigin.parsedURL.securityOrigin();

// Approximate the connection time with the duration of TCP (+potentially SSL) handshake
Expand Down
2 changes: 1 addition & 1 deletion lighthouse-core/config/mixed-content-config.js
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ module.exports = {
// (2) Re-load page but attempt to upgrade each request to HTTPS.
passes: [{
passName: 'defaultPass',
gatherers: ['url'],
gatherers: [],
}, {
passName: 'mixedContentPass',
gatherers: ['mixed-content'],
Expand Down
2 changes: 1 addition & 1 deletion lighthouse-core/gather/driver.js
Original file line number Diff line number Diff line change
Expand Up @@ -586,7 +586,7 @@ class Driver {
* @private
*/
_beginNetworkStatusMonitoring(startingUrl) {
this._networkStatusMonitor = new NetworkRecorder([]);
this._networkStatusMonitor = new NetworkRecorder();

// Update startingUrl if it's ever redirected.
this._monitoredUrl = startingUrl;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@ class OptimizedImages extends Gatherer {
const isSameOrigin = URL.originsMatch(pageUrl, record._url);
const isBase64DataUri = /^data:.{2,40}base64\s*,/.test(record._url);

const actualResourceSize = Math.min(record._resourceSize || 0, record._transferSize || 0);
const actualResourceSize = Math.min(record._resourceSize || 0, record.transferSize || 0);
if (isOptimizableImage && actualResourceSize > MINIMUM_IMAGE_SIZE) {
prev.push({
isSameOrigin,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -115,10 +115,10 @@ class TagsBlockingFirstPaint extends Gatherer {
// Include 404 scripts/links generated by the parser because they are likely blocking.
if (isHtml || isParserScriptOrStyle || (isFailedRequest && isParserGenerated)) {
prev[record._url] = {
isLinkPreload: record.isLinkPreload,
transferSize: record._transferSize,
startTime: record._startTime,
endTime: record._endTime,
isLinkPreload: !!record._isLinkPreload,
transferSize: record.transferSize,
startTime: record.startTime,
endTime: record.endTime,
};
}

Expand All @@ -133,7 +133,7 @@ class TagsBlockingFirstPaint extends Gatherer {
static findBlockingTags(driver, networkRecords) {
const scriptSrc = `(${collectTagsThatBlockFirstPaint.toString()}())`;
const firstRequestEndTime = networkRecords.reduce(
(min, record) => Math.min(min, record._endTime),
(min, record) => Math.min(min, record.endTime),
Infinity
);
return driver.evaluateAsync(scriptSrc).then(tags => {
Expand Down
2 changes: 1 addition & 1 deletion lighthouse-core/lib/lantern-trace-saver.js
Original file line number Diff line number Diff line change
Expand Up @@ -147,7 +147,7 @@ function convertNodeTimingsToTrace(nodeTimings) {
...requestData,
statusCode: record.statusCode,
mimeType: record._mimeType,
encodedDataLength: record._transferSize,
encodedDataLength: record.transferSize,
fromCache: record._fromDiskCache,
fromServiceWorker: record._fetchedViaServiceWorker,
};
Expand Down
Loading