I would like someone to review this code and tell if it can be done better, or if the code is elegant and simple enough for the task at hand.
I used code climate and I got 4/4 and my test coverage is 96%, yet I would like a professional opinion about it.
'use strict'; var cheerio = require('cheerio'); var FeedParser = require('feedparser'); var http = require('http'); var PromisePolyfill = require('promise'); var urlEncode = require('urlencode'); /** * Extracts a valid url from the RSS Feed Item, taking into account exception urls * @param {string} content - the item to be analyzed * @returns {string} - the valid url inside the item */ function getValidURL(content) { var $ = cheerio.load(content), responseUrl; $('a').each(function(i, e) { if ( $(e).attr('href').indexOf('reddit.com') === -1 && $(e).attr('href').indexOf('imgur.com') === -1 ) { responseUrl = $(e).attr('href'); } }); return responseUrl; } /** * Parses a RSS stream into an Object * @param {string} rssUrl - RSS url to fetch the stream * @returns {Object} - the Object with meta and item information */ exports.parseRss = function(rssUrl) { var responseObject = { title: '', link: '', image: '', items: [] }; return new PromisePolyfill(function(resolve, reject) { http.get(rssUrl, function(resGet) { resGet.pipe(new FeedParser({})) .on('error', function(error) { reject(error.message); }) .on('meta', function(meta) { responseObject.title = meta.title; responseObject.link = meta.link; }) .on('readable', function() { var stream = this, item, validUrl = ''; while ((item = stream.read())) { validUrl = getValidURL(item.description); item.title2 = urlEncode(item.title); if (validUrl) { var ep = { 'title': item.title, 'title2': item.title2, 'mediaUrl': item.link, 'newUrl': validUrl, 'date': item.date }; responseObject.items.push(ep); } } }) .on('end', function() { resolve(responseObject); }); }) .on('error', function(error) { reject(error.message); }); }); };
And its respective test file:
'use strict'; var chai = require('chai'); var chaiAsPromised = require('chai-as-promised'); var sinon = require('sinon'); var http = require('http'); var fs = require('fs'); var PassThrough = require('stream').PassThrough; var rssParser = require('../../modules/rssParser'); var response, responsePT; chai.should(); chai.use(chaiAsPromised); describe('rssParser Module', function() { beforeEach(function(done) { response = new PassThrough(); responsePT = new PassThrough(); sinon.stub(http, 'get'); done(); }); afterEach(function(done) { http.get.restore(); done(); }); it('should return valid object when a valid url is passed', function(done) { var data = fs.readFileSync('test/fixtures/rssResponse1.xml', 'utf8'); response.write(data); response.end(); http.get.callsArgWith(1, response) .returns(responsePT); rssParser.parseRss('http://www.reddit.com/r/science/.rss') .done(function(responseObject) { responseObject.should.be.an('object'); responseObject.items.length.should.equal(3); done(); }); }); it('should return empty items when a valid url is not found', function(done) { var data = fs.readFileSync('test/fixtures/notFound.xml', 'utf8'); response.write(data); response.end(); http.get.callsArgWith(1, response) .returns(responsePT); rssParser.parseRss('http://www.reddit.com/r/urlnotfound/.rss') .done(function(responseObject) { responseObject.should.be.an('object'); responseObject.items.length.should.equal(0); done(); }); }); it('should throw an error with an empty response', function(done) { var data = ''; response.write(data); response.end(); http.get.callsArgWith(1, response) .returns(responsePT); rssParser.parseRss('http://www.reddit.com/r/emptyresponse/.rss') .then(function() { // Not returning a valid responseObject, should catch the error }, function(error) { error.should.equal('Not a feed'); done(); }); }); });