Remove support for crawling via the browser.

This feature was only experimental, never advertised via the cli, and barely tested. Better to remove it now to simplify the code, and potentially bring it back if the need arises later.
This commit is contained in:
Tobias Lidskog 2016-09-27 20:59:48 +02:00
parent 2aa15d904b
commit e80e218145
4 changed files with 0 additions and 69 deletions

View File

@ -32,14 +32,6 @@ function parseUserScripts(scripts) {
{});
}
function addCrawlerScripts(scripts) {
const crawlerScriptsPath = path.resolve(__dirname, 'scripts', 'crawler');
const crawlerScripts = browserScripts.parseScriptDirectory(crawlerScriptsPath);
return Promise.join(scripts, crawlerScripts,
(scripts, crawlerScripts) => scripts.concat(crawlerScripts));
}
function addCoachScripts(scripts) {
return Promise.join(scripts, coach.getDomAdvice(),
(scripts, advice) => {
@ -74,10 +66,6 @@ module.exports = {
(scriptsByCategory, userScripts) => merge(scriptsByCategory, userScripts));
}
if (btOptions.crawl) {
scriptsByCategory = addCrawlerScripts(scriptsByCategory);
}
if (btOptions.coach) {
scriptsByCategory = addCoachScripts(scriptsByCategory);
}

View File

@ -3,7 +3,6 @@
const browsertime = require('browsertime'),
Promise = require('bluebird'),
path = require('path'),
urlParser = require('url'),
messageMaker = require('../../support/messageMaker'),
filterRegistry = require('../../support/filterRegistry'),
aggregator = require('./aggregator'),
@ -14,22 +13,10 @@ const browsertime = require('browsertime'),
analyzer = require('./analyzer'),
isNotEmpty = require('../../support/util').isNotEmpty;
function removeHash(url) {
const parsedUrl = urlParser.parse(url);
parsedUrl.hash = null;
return urlParser.format(parsedUrl);
}
function parseDomain(url) {
return urlParser.parse(url).hostname;
}
const make = messageMaker('browsertime').make;
const visitedUrls = new Set();
const maxDepth = 1;
const DEFAULT_METRICS_PAGE_SUMMARY = [
'statistics.timings.pageTimings',
'statistics.timings.rumSpeedIndex',
@ -77,33 +64,6 @@ module.exports = {
filterRegistry.registerFilterForType(DEFAULT_METRICS_SUMMARY, 'browsertime.summary');
},
processMessage(message, queue) {
function processCrawlOutput(url, group, results) {
const originalDomain = parseDomain(url);
let depth = message.data.depth || 0;
if (depth < maxDepth) {
depth += 1;
const links = results.browserScripts[0].crawler.links;
for (let link of links) {
link = removeHash(link);
const domain = parseDomain(link);
if (!visitedUrls.has(link) && domain === originalDomain) {
visitedUrls.add(link);
queue.postMessage(make('url', {
depth,
referrer: url
}, {
url: link,
group
}));
}
}
}
}
function processCoachOutput(url, group, results) {
return Promise.resolve(results.browserScripts)
.each((run, runIndex) => {
@ -150,9 +110,6 @@ module.exports = {
return analyzer.analyzeUrl(url, this.options)
.tap((results) => {
log.trace('Result from Browsertime for %s with %:2j', url, results);
if (this.options.browsertime.crawl) {
processCrawlOutput(url, group, results);
}
})
.tap((results) => {
if (this.options.browsertime.coach) {

View File

@ -1,7 +0,0 @@
root: true
env:
browser: true
extends:
"eslint:recommended"

View File

@ -1,7 +0,0 @@
(function() {
return Array.prototype.slice.call(document.links).map(function(a) {
return a.href;
}).filter(function(e, i, arr) {
return arr.lastIndexOf(e) === i;
});
})();