new strucuture to keep the flow understadable, part 1
This commit is contained in:
parent
9ee402ad30
commit
aeba3b38c3
|
|
@ -10,4 +10,6 @@ require('whereis')('java', function searched(err) {
|
|||
}
|
||||
});
|
||||
|
||||
r.run();
|
||||
r.run(function() {
|
||||
console.log("Finished callback");
|
||||
});
|
||||
|
|
|
|||
|
|
@ -20,10 +20,10 @@ module.exports = Analyzer;
|
|||
function Analyzer() {
|
||||
}
|
||||
|
||||
Analyzer.prototype.analyze = function(urls, collector, urlAnalysedCallback, completionCallback) {
|
||||
Analyzer.prototype.analyze = function(urls, collector, downloadErrors, analysisErrors, urlAnalysedCallback, completionCallback) {
|
||||
var self = this;
|
||||
if (urls.length === 0) {
|
||||
completionCallback();
|
||||
completionCallback(downloadErrors, analysisErrors);
|
||||
}
|
||||
|
||||
var tasks = [
|
||||
|
|
@ -93,6 +93,7 @@ Analyzer.prototype.analyze = function(urls, collector, urlAnalysedCallback, comp
|
|||
collector.collectPageData(pageData);
|
||||
urlAnalysedCallback(err, url, pageData);
|
||||
});
|
||||
completionCallback(undefined);
|
||||
|
||||
completionCallback(null,downloadErrors, analysisErrors);
|
||||
});
|
||||
};
|
||||
|
|
|
|||
|
|
@ -176,7 +176,6 @@ HTMLRenderer.prototype.renderPages = function (pages, cb) {
|
|||
};
|
||||
|
||||
HTMLRenderer.prototype.renderAssets = function (assets, cb) {
|
||||
|
||||
var sorted = assets.sort(function(asset, asset2) {
|
||||
return asset2.count - asset.count;
|
||||
});
|
||||
|
|
@ -205,6 +204,8 @@ function renderHtmlToFile(template, renderData, cb, fileName, optionalPath) {
|
|||
fs.outputFile(file, result, function(err) {
|
||||
if (err)
|
||||
log.log('error', "Couldn't write the file " + file + ' err:' + err);
|
||||
else
|
||||
log.log('info', "Wrote file " + fileName);
|
||||
cb();
|
||||
});
|
||||
}
|
||||
|
|
|
|||
216
lib/runner.js
216
lib/runner.js
|
|
@ -20,74 +20,16 @@ var crawler = require('./crawler'),
|
|||
module.exports = Runner;
|
||||
|
||||
function Runner() {
|
||||
var self = this;
|
||||
this.analyzer = new Analyzer();
|
||||
this.collector = new Collector();
|
||||
this.htmlRenderer = new HTMLRenderer();
|
||||
this.junitRenderer = new JUnitRenderer(this.collector);
|
||||
this.graphite = new Graphite(config.graphiteHost, config.graphitePort, config
|
||||
.graphiteNamespace, this.collector);
|
||||
this.downloadErrors = {};
|
||||
this.analysisErrors = {};
|
||||
}
|
||||
|
||||
|
||||
Runner.prototype.analysisComplete = function(err) {
|
||||
log.log('info', 'Done analyzing urls');
|
||||
|
||||
var aggregates = this.collector.createAggregates();
|
||||
var assets = this.collector.createCollections().assets;
|
||||
var pages = this.collector.createCollections().pages;
|
||||
|
||||
var htmlRenderer = this.htmlRenderer;
|
||||
var graphite = this.graphite;
|
||||
var junitRenderer = this.junitRenderer;
|
||||
var downloadErrors = this.downloadErrors;
|
||||
var analysisErrors = this.analysisErrors;
|
||||
// function a = this.a;
|
||||
async.parallel({
|
||||
renderSummary: function(cb){
|
||||
htmlRenderer.renderSummary(aggregates,cb);
|
||||
},
|
||||
renderAssets: function(cb){
|
||||
htmlRenderer.renderAssets(assets,cb);
|
||||
},
|
||||
renderPages: function(cb){
|
||||
htmlRenderer.renderPages(pages,cb);
|
||||
},
|
||||
renderRules: function(cb){
|
||||
// TODO the rules needs to be generated after ...
|
||||
htmlRenderer.renderRules(cb);
|
||||
},
|
||||
renderErrors: function(cb){
|
||||
htmlRenderer.renderErrors(downloadErrors, analysisErrors,cb);
|
||||
},
|
||||
renderScreenshots: function(cb){
|
||||
if (config.screenshot) {
|
||||
htmlRenderer.renderScreenshots(pages,cp);
|
||||
} else cb();
|
||||
},
|
||||
sendToGraphite: function(cb){
|
||||
if (config.graphiteHost)
|
||||
graphite.sendPageData(aggregates, pages,cb);
|
||||
else cb();
|
||||
},
|
||||
renderJUnit: function(cb){
|
||||
if (config.junit)
|
||||
junitRenderer.renderAfterFullAnalyse(cb);
|
||||
else cb();
|
||||
}
|
||||
},
|
||||
function(err, results) {
|
||||
if (!err)
|
||||
log.log('info', "Wrote results to " + config.run.absResultDir);
|
||||
// call the callback!
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
Runner.prototype.run = function(finshedCb) {
|
||||
finshedCb = finshedCb || function () {};
|
||||
finshedCb = finshedCb || function() {};
|
||||
// setup the directories needed
|
||||
fs.mkdirsSync(path.join(config.run.absResultDir, config.dataDir));
|
||||
|
||||
|
|
@ -97,34 +39,35 @@ Runner.prototype.run = function(finshedCb) {
|
|||
if (err) throw err;
|
||||
});
|
||||
|
||||
console.time("sitespeed.io");
|
||||
if (config.url) {
|
||||
log.log('info', "Will crawl from start point " + config.url +
|
||||
" with crawl depth " + config.deep);
|
||||
this.crawl(config.urlObject);
|
||||
} else {
|
||||
log.log('info', "Will fetch urls from the file " + config.file);
|
||||
this.readFromFile(config.file);
|
||||
}
|
||||
};
|
||||
|
||||
Runner.prototype.readFromFile = function(file) {
|
||||
var urls = fs.readFileSync(file).toString().split("\n");
|
||||
urls = urls.filter(function(l) {
|
||||
return l.length > 0;
|
||||
});
|
||||
analyzeUrls(urls);
|
||||
};
|
||||
|
||||
Runner.prototype.crawl = function(url) {
|
||||
var analyzer = this.analyzer;
|
||||
var junitRenderer = this.junitRenderer;
|
||||
var htmlRenderer = this.htmlRenderer;
|
||||
var collector = this.collector;
|
||||
var self = this;
|
||||
crawler.crawl(url, function(okUrls, errorUrls) {
|
||||
self.handleResult(okUrls, errorUrls);
|
||||
|
||||
async.waterfall([
|
||||
|
||||
function(cb) {
|
||||
fetchUrls(crawler, cb);
|
||||
},
|
||||
function(okUrls, errorUrls, cb) {
|
||||
fineTuneUrls(okUrls, errorUrls, cb);
|
||||
},
|
||||
function(urls, downloadErrors, cb) {
|
||||
analyze(analyzer, urls, collector, downloadErrors, junitRenderer, htmlRenderer, self, cb);
|
||||
},
|
||||
function(downloadErrors, analysisErrors, cb) {
|
||||
self.analysisComplete(downloadErrors, analysisErrors, cb);
|
||||
}
|
||||
], function(err, result) {
|
||||
if (err)
|
||||
log.log('error', err);
|
||||
finshedCb();
|
||||
});
|
||||
};
|
||||
|
||||
Runner.prototype.handleResult = function(okUrls, errorUrls) {
|
||||
var downloadErrors = this.downloadErrors;
|
||||
function fineTuneUrls(okUrls, errorUrls, callback) {
|
||||
var downloadErrors = {};
|
||||
Object.keys(errorUrls).forEach(function(url) {
|
||||
log.log('error', "Failed to download " + url);
|
||||
downloadErrors[url] = errorUrls[url];
|
||||
|
|
@ -136,14 +79,47 @@ Runner.prototype.handleResult = function(okUrls, errorUrls) {
|
|||
okUrls.length = config.maxPagesToTest;
|
||||
}
|
||||
if (okUrls.length === 0) {
|
||||
log.log('info', "Didn't get any URLs from the crawl");
|
||||
return;
|
||||
log.log('info', "Didn't get any URLs");
|
||||
throw Error('No URL:s to analyze');
|
||||
}
|
||||
|
||||
saveUrls(okUrls);
|
||||
callback(null, okUrls, downloadErrors);
|
||||
}
|
||||
|
||||
function fetchUrls(crawler, callback) {
|
||||
if (config.url) {
|
||||
log.log('info', "Will crawl from start point " + config.url +
|
||||
" with crawl depth " + config.deep);
|
||||
crawler.crawl(config.url, function(okUrls, errorUrls) {
|
||||
callback(null, okUrls, errorUrls);
|
||||
});
|
||||
} else {
|
||||
var urls = fs.readFileSync(config.file).toString().split("\n");
|
||||
urls = urls.filter(function(l) {
|
||||
return l.length > 0;
|
||||
});
|
||||
callback(null, urls, {});
|
||||
}
|
||||
}
|
||||
|
||||
function analyze(analyzer, urls, collector, downloadErrors, junitRenderer, htmlRenderer, self, callback) {
|
||||
var analysisErrors = {};
|
||||
log.log('info', "Will analyze " + urls.length + " pages");
|
||||
analyzer.analyze(urls, collector, downloadErrors, analysisErrors, function(err, url, pageData) {
|
||||
|
||||
if (err) {
|
||||
log.log('error', 'Could not analyze ' + url + ' (' + JSON.stringify(err) +
|
||||
')');
|
||||
analysisErrors[url] = err;
|
||||
return;
|
||||
}
|
||||
|
||||
if (config.junit)
|
||||
junitRenderer.renderForEachPage(url, pageData);
|
||||
htmlRenderer.renderPage(url, pageData, function() {});
|
||||
}, callback);
|
||||
}
|
||||
|
||||
this.analyzeUrls(okUrls);
|
||||
};
|
||||
|
||||
function saveUrls(urls) {
|
||||
fs.writeFile(path.join(config.run.absResultDir, 'data', 'urls.txt'), urls.join(
|
||||
|
|
@ -154,25 +130,53 @@ function saveUrls(urls) {
|
|||
});
|
||||
}
|
||||
|
||||
Runner.prototype.analyzeUrls = function(urls) {
|
||||
console.log("Will analyze " + urls.length + " pages");
|
||||
var junitRenderer = this.junitRenderer;
|
||||
Runner.prototype.analysisComplete = function(downloadErrors, analysisErrors, callBack) {
|
||||
log.log('info', 'Done analyzing urls');
|
||||
var aggregates = this.collector.createAggregates();
|
||||
var assets = this.collector.createCollections().assets;
|
||||
var pages = this.collector.createCollections().pages;
|
||||
|
||||
var htmlRenderer = this.htmlRenderer;
|
||||
var analysisErrors = this.analysisErrors;
|
||||
var self = this;
|
||||
this.analyzer.analyze(urls, this.collector, function(err, url, pageData) {
|
||||
var graphite = this.graphite;
|
||||
var junitRenderer = this.junitRenderer;
|
||||
|
||||
if (err) {
|
||||
log.log('error', 'Could not analyze ' + url + ' (' + JSON.stringify(err) +
|
||||
')');
|
||||
analysisErrors[url] = err;
|
||||
return;
|
||||
// function a = this.a;
|
||||
async.parallel({
|
||||
renderSummary: function(cb) {
|
||||
htmlRenderer.renderSummary(aggregates, cb);
|
||||
},
|
||||
renderAssets: function(cb) {
|
||||
htmlRenderer.renderAssets(assets, cb);
|
||||
},
|
||||
renderPages: function(cb) {
|
||||
htmlRenderer.renderPages(pages, cb);
|
||||
},
|
||||
renderRules: function(cb) {
|
||||
// TODO the rules needs to be generated after ...
|
||||
htmlRenderer.renderRules(cb);
|
||||
},
|
||||
renderErrors: function(cb) {
|
||||
htmlRenderer.renderErrors(downloadErrors, analysisErrors, cb);
|
||||
},
|
||||
renderScreenshots: function(cb) {
|
||||
if (config.screenshot) {
|
||||
htmlRenderer.renderScreenshots(pages, cp);
|
||||
} else cb();
|
||||
},
|
||||
sendToGraphite: function(cb) {
|
||||
if (config.graphiteHost)
|
||||
graphite.sendPageData(aggregates, pages, cb);
|
||||
else cb();
|
||||
},
|
||||
renderJUnit: function(cb) {
|
||||
if (config.junit)
|
||||
junitRenderer.renderAfterFullAnalyse(cb);
|
||||
else cb();
|
||||
}
|
||||
|
||||
if (config.junit)
|
||||
junitRenderer.renderForEachPage(url, pageData);
|
||||
htmlRenderer.renderPage(url, pageData, function(){});
|
||||
},function(err) {
|
||||
self.analysisComplete(err);
|
||||
});
|
||||
},
|
||||
function(err, results) {
|
||||
if (!err)
|
||||
log.log('info', "Wrote results to " + config.run.absResultDir);
|
||||
callBack();
|
||||
});
|
||||
};
|
||||
|
|
|
|||
Loading…
Reference in New Issue