cleanup part 2

This commit is contained in:
soulgalore 2014-07-07 12:34:53 +02:00
parent aeba3b38c3
commit 054e15661b
1 changed files with 22 additions and 14 deletions

View File

@ -43,10 +43,18 @@ Runner.prototype.run = function(finshedCb) {
var junitRenderer = this.junitRenderer;
var htmlRenderer = this.htmlRenderer;
var collector = this.collector;
var self = this;
var graphite = this.graphite;
/**
This is the main flow of the application and this is what we do:
1. Fetch the URL:s that will be analyzed, either we crawl a site using
a start url or we read the URL:s from a file.
2. Finetune the URL:s = do other thing that's needed, store them to disk etc.
3. Let the analyser take a go at the URL:s, the analyzer
got a lot to do, lets check the analyzer.js file
4. The analyze is finished, lets create output
**/
async.waterfall([
function(cb) {
fetchUrls(crawler, cb);
},
@ -54,10 +62,10 @@ Runner.prototype.run = function(finshedCb) {
fineTuneUrls(okUrls, errorUrls, cb);
},
function(urls, downloadErrors, cb) {
analyze(analyzer, urls, collector, downloadErrors, junitRenderer, htmlRenderer, self, cb);
analyze(analyzer, urls, collector, downloadErrors, junitRenderer, htmlRenderer, cb);
},
function(downloadErrors, analysisErrors, cb) {
self.analysisComplete(downloadErrors, analysisErrors, cb);
createOutput(collector, htmlRenderer, graphite, junitRenderer, downloadErrors, analysisErrors, cb);
}
], function(err, result) {
if (err)
@ -102,7 +110,7 @@ function fetchUrls(crawler, callback) {
}
}
function analyze(analyzer, urls, collector, downloadErrors, junitRenderer, htmlRenderer, self, callback) {
function analyze(analyzer, urls, collector, downloadErrors, junitRenderer, htmlRenderer, callback) {
var analysisErrors = {};
log.log('info', "Will analyze " + urls.length + " pages");
analyzer.analyze(urls, collector, downloadErrors, analysisErrors, function(err, url, pageData) {
@ -130,17 +138,17 @@ function saveUrls(urls) {
});
}
Runner.prototype.analysisComplete = function(downloadErrors, analysisErrors, callBack) {
function createOutput(collector, htmlRenderer, graphite, junitRenderer, downloadErrors, analysisErrors, callBack) {
log.log('info', 'Done analyzing urls');
var aggregates = this.collector.createAggregates();
var assets = this.collector.createCollections().assets;
var pages = this.collector.createCollections().pages;
var htmlRenderer = this.htmlRenderer;
var graphite = this.graphite;
var junitRenderer = this.junitRenderer;
// fetch all the data we need, and then generate the output
var aggregates = collector.createAggregates();
var assets = collector.createCollections().assets;
var pages = collector.createCollections().pages;
// function a = this.a;
/* We got a lot of things to do, lets generate all results
in parallel and then let us know when we are finished
*/
async.parallel({
renderSummary: function(cb) {
htmlRenderer.renderSummary(aggregates, cb);
@ -179,4 +187,4 @@ Runner.prototype.analysisComplete = function(downloadErrors, analysisErrors, cal
log.log('info', "Wrote results to " + config.run.absResultDir);
callBack();
});
};
}