diff --git a/CHANGELOG.md b/CHANGELOG.md index 9f7b5f41e..a79b82306 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,15 +1,17 @@ # CHANGELOG - sitespeed.io version 3.11.3 - 2016-01-13 +------------------------ ### Fixed * Fixes bug for collecting summary metrics for WPT, introduced in 3.11.2 version 3.11.2 - 2016-01-13 +------------------------ ### Fixed * Browser name in WebPageTest can have spaces and that wasn't handled so when the metrics is sent to Graphite, it fails. #798 ### Changed -* Bumbed 3rd party dependencies: winston, request, phantomjs, moment, fs-extra, browsertime, cross-spawn-async, async +* Bumped 3rd party dependencies: winston, request, phantomjs, moment, fs-extra, browsertime, cross-spawn-async, async version 3.11.1 - 2015-10-27 ------------------------ diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index d62ed2a03..a59eceac0 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,5 +1,5 @@ # How to contribute -Sitespeed.io continues to evolve thanks to people who contributes, so please please help out. Check your [list](HELP.md) of what you can do to help us. +Sitespeed.io continues to evolve thanks to people who contributes, so please please help out. Check our [list](HELP.md) of what you can do to help us. ## Making changes If you want help out, that's great! Check the issue list and see if there's something there you want to do and drop me a note so I know and I can help you get into the project. diff --git a/lib/analyze/screenshots.js b/lib/analyze/screenshots.js index fece343a9..d05d0a9a0 100644 --- a/lib/analyze/screenshots.js +++ b/lib/analyze/screenshots.js @@ -11,6 +11,7 @@ var path = require('path'), phantomPath = require('phantomjs').path, slimerPath = require('slimerjs').path, util = require('../util/util'), + urlParser = require('url'), inspect = require('util').inspect, fs = require('fs'), winston = require('winston'), @@ -98,6 +99,12 @@ function screenshot(args, asyncDoneCallback) { } else { childArgs.push('false'); } + + if (config.proxy) { + var pUrl = urlParser.parse(config.proxy); + childArgs.push('--proxy=' + pUrl.hostname + ':' + pUrl.port) + } + childArgs.push(config.waitScript); return childArgs; } diff --git a/lib/analyze/webpagetest.js b/lib/analyze/webpagetest.js index 781e2e759..b175e1b84 100644 --- a/lib/analyze/webpagetest.js +++ b/lib/analyze/webpagetest.js @@ -209,7 +209,7 @@ function analyzeUrl(args, asyncDoneCallback) { fs.writeFile(jsonPath, JSON.stringify(data), cb); } }, - function(err5, results) { + function(err5) { if (err5) { asyncDoneCallback(undefined, err5); } else { diff --git a/lib/analyzeMultipleSites.js b/lib/analyzeMultipleSites.js index 0d8d712ec..6419c6068 100644 --- a/lib/analyzeMultipleSites.js +++ b/lib/analyzeMultipleSites.js @@ -9,7 +9,6 @@ var path = require('path'), moment = require('moment'), fs = require('fs-extra'), async = require('async'), - EOL = require('os').EOL, urlParser = require('url'), winston = require('winston'), SitesHTMLRenderer = require('./sitesHTMLRenderer'), diff --git a/lib/analyzeOneSite.js b/lib/analyzeOneSite.js index 53b4b8aac..02360d31b 100644 --- a/lib/analyzeOneSite.js +++ b/lib/analyzeOneSite.js @@ -36,10 +36,10 @@ AnalyzeOneSite.prototype.run = function(callback) { /** This is the main flow of the application and this is what we do: - 1. Fetch the URL:s that will be analyzed, either we crawl a site using - a start url or we read the URL:s from a file. - 2. Finetune the URL:s = do other thing that's needed, store them to disk etc. - 3. Let the analyser take a go at the URL:s, the analyzer + 1. Fetch the URL(s) that will be analyzed, either we crawl a site using + a start url or we read the URL(s) from a file. + 2. Finetune the URL(s) = do other thing that's needed, store them to disk etc. + 3. Let the analyser take a go at the URL(s), the analyzer got a lot to do, lets check the analyzer.js file 4. The analyze is finished, lets create output **/ @@ -82,8 +82,7 @@ AnalyzeOneSite.prototype.run = function(callback) { }; AnalyzeOneSite.prototype._fetchUrls = function(callback) { - var self = this; - // if we have an url configured, start crawling, else read the URL:s + // if we have an url configured, start crawling, else read the URL(s) // from file if (this.config.urls) { this._getMultipleURLs(callback); @@ -126,7 +125,7 @@ AnalyzeOneSite.prototype._verifyURL = function(callback) { }; } - request(options, function(error, response, body) { + request(options, function(error, response) { // request follow redirects and we want to end up if (!error && response.statusCode === 200) { callback(null, [self.config.url], []); @@ -210,8 +209,6 @@ AnalyzeOneSite.prototype._getPostTasks = function(result) { }; AnalyzeOneSite.prototype._getResult = function(downloadErrors, analysisErrors) { - var self = this; - this.log.log('info', 'Done analyzing urls'); // fetch all the data we need, and then generate the output diff --git a/lib/cli.js b/lib/cli.js index 46fa49b1d..5ad741017 100644 --- a/lib/cli.js +++ b/lib/cli.js @@ -448,7 +448,7 @@ var cli = nomnom.help( }, processJson: { metavar: '', - help: 'Pass the path to a result JSON that will be processed again. Use this to recongigure what to show in the HTML.', + help: 'Pass the path to a result JSON that will be processed again. Use this to reconfigure what to show in the HTML.', hidden: true, transform: function(path) { return fileHelper.getFileAsJSON(path); @@ -480,14 +480,14 @@ var cli = nomnom.help( }).parse(); if ((!cli.url) && (!cli.file) && (!cli.sites) && (!cli.configFile)) { - console.log('You must specify either a URL to test, a file with URL:s or a config file'); + console.log('You must specify either a URL to test, a file with URL(s) or a config file'); console.log(nomnom.getUsage()); process.exit(1); } if (cli.file) { cli.urls = fileHelper.getFileAsArray(cli.file); - // are all URL:s valid? + // are all URL(s) valid? var valid = true; cli.urls.forEach(function(url) { if (!validUrl.isWebUri(url)) { diff --git a/lib/collectors/domains.js b/lib/collectors/domains.js index 847892053..b8892d29c 100644 --- a/lib/collectors/domains.js +++ b/lib/collectors/domains.js @@ -7,7 +7,6 @@ 'use strict'; var util = require('../util/util'), RequestTiming = require('../requestTiming'), - Stats = require('fast-stats').Stats, winston = require('winston'); var domains = {}; diff --git a/lib/config.js b/lib/config.js index ae3f7dda8..32d4e4f2f 100644 --- a/lib/config.js +++ b/lib/config.js @@ -16,7 +16,7 @@ exports.setupDefaultValues = function(config) { // to be able to run sitespeed.io you need a array of urls, a URL or a file if ((!config.url) && (!config.urls) && (!config.sites) && (!config.configFile)) { - throw new Error('You must specify either a URL to test, a array with URL:s or a configuration file'); + throw new Error('You must specify either a URL to test, an array with URL(s) or a configuration file'); } // if we have default values not set in the config diff --git a/lib/crawler/crawler.js b/lib/crawler/crawler.js index da0d59092..a8d73ae53 100644 --- a/lib/crawler/crawler.js +++ b/lib/crawler/crawler.js @@ -82,7 +82,7 @@ module.exports.crawl = function(url, config, callback) { log.error('Error from the crawl: %s', s); }); - crawl.on('close', function(code) { + crawl.on('close', function() { // the crawler always return code ok today, hmm var okUrls = []; var errorUrls = {}; diff --git a/lib/graphite/graphiteCollector.js b/lib/graphite/graphiteCollector.js index 9357ffa1e..0287fa016 100644 --- a/lib/graphite/graphiteCollector.js +++ b/lib/graphite/graphiteCollector.js @@ -6,8 +6,7 @@ */ 'use strict'; var util = require('../util/util'), - winston = require('winston'), - net = require('net'); + winston = require('winston') var navigationTimingNames = ['navigationStart', 'unloadEventStart', @@ -323,7 +322,7 @@ GraphiteCollector.prototype._getDomainStats = function(domains, hostname) { domains.forEach(function(domain) { timings.forEach(function(timing) { values.forEach(function(value) { - // TODO we should use the protovol also in the key right + // TODO we should use the protocol also in the key right stats += self.namespace + '.summary.' + hostname + '.domains.timings.' + domain.domain.split('.').join('_') + '.' + timing + '.' + diff --git a/lib/graphite/graphiteSender.js b/lib/graphite/graphiteSender.js index a1e9f8c1c..d84d5fc74 100644 --- a/lib/graphite/graphiteSender.js +++ b/lib/graphite/graphiteSender.js @@ -5,8 +5,7 @@ * Released under the Apache 2.0 License */ 'use strict'; -var util = require('../util/util'), - winston = require('winston'), +var winston = require('winston'), net = require('net'); function GraphiteSender(host, port, config) { diff --git a/lib/postTasks/postResult.js b/lib/postTasks/postResult.js index 43614af36..da966580e 100644 --- a/lib/postTasks/postResult.js +++ b/lib/postTasks/postResult.js @@ -6,9 +6,7 @@ */ 'use strict'; -var path = require('path'), - winston = require('winston'), - fs = require('fs-extra'), +var winston = require('winston'), request = require('request'); exports.task = function(result, config, cb) { diff --git a/lib/postTasks/renderSummaryHTML.js b/lib/postTasks/renderSummaryHTML.js index 2943221ce..ee60d034a 100644 --- a/lib/postTasks/renderSummaryHTML.js +++ b/lib/postTasks/renderSummaryHTML.js @@ -51,7 +51,7 @@ exports.task = function(result, config, cb) { render('detailed-site-summary', detailedData, config.run.absResultDir, callback); } }, - function(err, results) { + function(err) { if (err) { log.error('Could not write summary files ' + err); } diff --git a/lib/requestTiming.js b/lib/requestTiming.js index 47464bb99..4d6821630 100644 --- a/lib/requestTiming.js +++ b/lib/requestTiming.js @@ -8,7 +8,7 @@ var Stats = require('fast-stats').Stats; /** - * Create a a new request timing. + * Create a new request timing. * * @param {Integer} time - the time this part took in ms. * @param {Integer} url - the url to the asset that took this time. diff --git a/lib/siteHTMLRenderer.js b/lib/siteHTMLRenderer.js index 4a94b81d0..1914d107f 100644 --- a/lib/siteHTMLRenderer.js +++ b/lib/siteHTMLRenderer.js @@ -9,7 +9,6 @@ var ySlowUtil = require('./util/yslowUtil'), util = require('./util/util'), simplehar = require('simplehar.sitespeed.io'), path = require('path'), - fs = require('fs-extra'), winston = require('winston'), inspect = require('util').inspect, render = require('./util/htmlRenderer'); diff --git a/lib/sitespeed.js b/lib/sitespeed.js index 96d82cb2e..ffc857e3d 100644 --- a/lib/sitespeed.js +++ b/lib/sitespeed.js @@ -65,7 +65,7 @@ Sitespeed.prototype.run = function(config, finishedCb) { } ], - function(err, results) { + function(err) { if (err) { return finishedCb(err); } diff --git a/lib/tests/testRenderer.js b/lib/tests/testRenderer.js index 34ecdb690..c7d1bdaa0 100644 --- a/lib/tests/testRenderer.js +++ b/lib/tests/testRenderer.js @@ -349,7 +349,7 @@ TestRenderer.prototype.render = function(cb) { } } }, - function(err, results) { + function(err) { if (err) { self.log.log('error', 'Error rendering budget ' + err); cb(err); diff --git a/lib/util/util.js b/lib/util/util.js index ea8df3b76..de2190d5e 100644 --- a/lib/util/util.js +++ b/lib/util/util.js @@ -11,6 +11,7 @@ var crypto = require('crypto'), path = require('path'), async = require('async'), phantomjsPath = require('phantomjs').path, + slimerPath = require('slimerjs').path, childProcess = require('child_process'), fileHelper = require('./fileHelpers'), winston = require('winston'), @@ -167,7 +168,7 @@ module.exports = { } } - // add a small md5-sum, taking care of URL:s with request parameters + // add a small md5-sum, taking care of URL(s) with request parameters if (urlComponents.query) { name = name + crypto.createHash('md5').update(u).digest('hex').substr(0, 5); } @@ -364,7 +365,13 @@ module.exports = { async.parallel([ function(callback) { - childProcess.execFile(config.phantomjsPath || phantomjsPath, ['--version'], { + var path = ''; + if (config.headless === 'slimerjs') { + path = config.slimerPath || slimerPath + } else { + path = config.phantomjsPath || phantomjsPath + } + childProcess.execFile(path, ['--version'], { timeout: 120000 }, function(err, stdout) { if (err) { @@ -393,12 +400,13 @@ module.exports = { } var osVersion = os.platform() + ' ' + os.release(); - var phantomjsVersion = results[0]; + var headlessVersion = results[0]; var javaVersion = results[1]; + var headlessName = config.headless === 'slimerjs' ? 'SlimerJS' : 'PhantomJS'; log.info( - 'OS: \'%s\', Node.js: \'%s\', sitespeed.io: \'%s\', PhantomJS: \'%s\', java: \'%s\', browsertime: \'%s\'', - osVersion, process.version, sitespeedVersion, phantomjsVersion, javaVersion, browserTimeVersion); + 'OS: \'%s\', Node.js: \'%s\', sitespeed.io: \'%s\', %s: \'%s\', java: \'%s\', browsertime: \'%s\'', + osVersion, process.version, sitespeedVersion, headlessName, headlessVersion, javaVersion, browserTimeVersion); return cb(); });