removed courier tests

This commit is contained in:
Spencer Alger 2014-08-01 12:24:35 -07:00
parent a16c0abec1
commit ac2c11def0
7 changed files with 0 additions and 598 deletions

View file

@ -1,24 +0,0 @@
define(function (require) {
var _ = require('lodash');
require('angular-mocks');
return function extendCourierSuite() {
inject(function (es, courier) {
describe('#abort', function () {
it('calls abort on the current request if it exists', function () {
courier
.createSource('search')
.on('results', _.noop);
courier.abort();
expect(es.abortCalled).to.eql(0);
courier.fetch('search');
courier.abort();
expect(es.abortCalled).to.eql(1);
});
});
});
};
});

View file

@ -1,103 +0,0 @@
define(function (require) {
var calculateIndices = require('components/courier/calculate_indices');
var moment = require('moment');
return function extendCourierSuite() {
describe('#calculateIndices', function () {
describe('error checking', function () {
it('should throw an error if start is > end', function () {
expect(function () { calculateIndices(moment().add('day', 1), moment()); }).to.throwError();
});
it('should throw an error if interval is not [ hour, day, week, year ]', function () {
expect(function () { calculateIndices(moment().subtract('day', 1), moment(), 'century'); }).to.throwError();
});
it('should throw an error if pattern is not set', function () {
expect(function () { calculateIndices(moment().subtract('day', 1), moment(), 'hour'); }).to.throwError();
});
});
describe('hourly interval', function () {
beforeEach(function () {
var date = '2014-01-15 04:30:10';
this.start = moment.utc(date).subtract('hours', 4);
this.end = moment.utc(date);
this.interval = 'hour';
this.pattern = '[logstash-]YYYY.MM.DD.HH';
this.fixture = [
'logstash-2014.01.15.01',
'logstash-2014.01.15.02',
'logstash-2014.01.15.03',
'logstash-2014.01.15.04'
];
});
it('should return a set of hourly indices', function () {
expect(calculateIndices(this.start, this.end, this.interval, this.pattern))
.to.eql(this.fixture);
});
});
describe('daily interval', function () {
beforeEach(function () {
var date = '2014-01-15 04:30:10';
this.start = moment.utc(date).subtract('days', 4);
this.end = moment.utc(date);
this.interval = 'day';
this.pattern = '[logstash-]YYYY.MM.DD';
this.fixture = [
'logstash-2014.01.12',
'logstash-2014.01.13',
'logstash-2014.01.14',
'logstash-2014.01.15'
];
});
it('should return a set of daily indices', function () {
expect(calculateIndices(this.start, this.end, this.interval, this.pattern))
.to.eql(this.fixture);
});
});
describe('weekly interval', function () {
beforeEach(function () {
var date = '2014-01-15 04:30:10';
this.start = moment.utc(date).subtract('week', 4);
this.end = moment.utc(date);
this.interval = 'week';
this.pattern = '[logstash-]YYYY.MM.DD';
this.fixture = [
'logstash-2013.12.25',
'logstash-2014.01.01',
'logstash-2014.01.08',
'logstash-2014.01.15'
];
});
it('should return a set of daily indices', function () {
expect(calculateIndices(this.start, this.end, this.interval, this.pattern))
.to.eql(this.fixture);
});
});
describe('yearly interval', function () {
beforeEach(function () {
var date = '2014-01-15 04:30:10';
this.start = moment.utc(date).subtract('years', 4);
this.end = moment.utc(date);
this.interval = 'year';
this.pattern = '[logstash-]YYYY.MM.DD';
this.fixture = [
'logstash-2011.01.15',
'logstash-2012.01.15',
'logstash-2013.01.15',
'logstash-2014.01.15'
];
});
it('should return a set of yearly indices', function () {
expect(calculateIndices(this.start, this.end, this.interval, this.pattern))
.to.eql(this.fixture);
});
});
});
};
});

View file

@ -1,35 +0,0 @@
define(function (require) {
var SearchSource = require('components/courier/data_source/search_source');
var DocSource = require('components/courier/data_source/doc_source');
return function extendCourierSuite() {
inject(function (courier) {
describe('#createSource', function () {
it('creates an empty search DataSource object', function () {
var source = courier.createSource();
expect(source._state).to.eql({});
});
it('optionally accepts a type for the DataSource', function () {
expect(courier.createSource()).to.be.a(SearchSource);
expect(courier.createSource('search')).to.be.a(SearchSource);
expect(courier.createSource('doc')).to.be.a(DocSource);
expect(function () {
courier.createSource('invalid type');
}).to.throwError(TypeError);
});
it('optionally accepts a json object/string that will populate the DataSource object with settings', function () {
var savedState = JSON.stringify({
_type: 'doc',
index: 'logstash-[YYYY-MM-DD]',
type: 'nginx',
id: '1'
});
var source = courier.createSource('doc', savedState);
expect(source + '').to.eql(savedState);
});
});
});
};
});

View file

@ -1,83 +0,0 @@
define(function (require) {
require('components/courier/courier');
return function extendCourierSuite() {
inject(function ($injector) {
var courier = $injector.get('courier');
var DocSource = $injector.get('CouriersDocSource');
describe('DataSource class', function () {
describe('::new', function () {
it('optionally accepts a json object/string that will populate the DataSource object with settings', function () {
var initialState = {
_type: 'doc',
index: 'logstash-[YYYY-MM-DD]',
type: 'nginx',
id: '1'
};
expect((new DocSource(courier, initialState)).toJSON()).to.eql(initialState);
var savedState = JSON.stringify(initialState);
expect(String(new DocSource(courier, savedState))).to.eql(savedState);
});
});
describe('events', function () {
describe('results', function () {
it('emits when a new result is available');
it('emits null when an error occurs');
});
});
describe('chainable and synch API', function () {
describe('#query', function () {
it('sets the query of a DataSource');
});
describe('#filters', function () {
it('converts the query to a filtered_query and sets the filters in that query');
});
describe('#sort', function () {
it('adds a sort to the DataSource');
});
describe('#highlight', function () {
it('sets the highlight fields for a DataSource');
});
describe('#aggs', function () {
it('sets the aggs for the DataSource');
});
describe('#from', function () {
it('sets the from property of the DataSource');
});
describe('#size', function () {
it('sets the size property of the DataSource');
});
describe('#inherits', function () {
it('sets the parent of a DataSource, meaning it will absorb it\'s filters/aggregations/etc.');
});
describe('#toJSON', function () {
it('serializes the own properties of this DataSource to a JSON string');
});
});
describe('async API', function () {
describe('#fetch', function () {
it('initiates a fetch at the Courier');
});
describe('#fields', function () {
it('fetches the fields available for the given query, including the types possible for each field');
it('returns types as an array, possibly containing multiple types or multi-index queries');
});
});
});
});
};
});

View file

@ -1,225 +0,0 @@
define(function (require) {
var elasticsearch = require('bower_components/elasticsearch/elasticsearch');
var sinon = require('test_utils/auto_release_sinon');
var Mapper = require('components/courier/mapper');
var fieldMapping = require('fixtures/field_mapping');
var fieldMappingWithDupes = require('fixtures/mapping_with_dupes');
var nextTick = require('utils/next_tick');
require('angular-mocks');
return function extendCourierSuite() {
inject(function (es, courier) {
describe('Mapper', function () {
var source, mapper;
beforeEach(function () {
source = courier.createSource('search')
.index('valid')
.size(5);
mapper = new Mapper(courier);
// Stub out a mini mapping response.
sinon.stub(es.indices, 'getFieldMapping', function (params, callback) {
if (params.index === 'valid') {
nextTick(callback, undefined, fieldMapping);
} else if (params.index === 'dupes') {
nextTick(callback, undefined, fieldMappingWithDupes);
} else {
nextTick(callback, new Error('Error: Not Found'), undefined);
}
});
sinon.stub(es, 'getSource', function (params, callback) {
if (params.id === 'valid') {
nextTick(callback, undefined, {'baz': {'type': 'long'}, 'foo.bar': {'type': 'string'}});
} else {
nextTick(callback, new Error('Error: Not Found'), undefined);
}
});
sinon.stub(es, 'delete', function (params, callback) {
nextTick(callback, undefined, true);
});
});
it('provides a constructor for the Mapper class', function (done) {
var mapper = new Mapper(courier);
expect(mapper).to.be.a(Mapper);
done();
});
it('has getFieldsFromMapping function that returns a mapping', function (done) {
mapper.getFieldsFromMapping(source, function (err, mapping) {
expect(es.indices.getFieldMapping.called).to.be(true);
expect(mapping['foo.bar'].type).to.be('string');
done();
});
});
it('has getFieldsFromCache that returns an error for uncached indices', function (done) {
source = courier.createSource('search')
.index('invalid')
.size(5);
mapper.getFieldsFromCache(source, function (err, mapping) {
expect(es.getSource.called).to.be(true);
expect(err.message).to.be('Error: Not Found');
done();
});
});
it('has getFieldsFromCache that returns a mapping', function (done) {
mapper.getFieldsFromCache(source, function (err, mapping) {
expect(es.getSource.called).to.be(true);
expect(mapping['foo.bar'].type).to.be('string');
done();
});
});
it('has a getFieldsFromObject function', function (done) {
expect(mapper.getFieldsFromObject).to.be.a('function');
done();
});
it('has a getFields that returns a mapping from cache', function (done) {
mapper.getFields(source, function (err, mapping) {
expect(es.getSource.called).to.be(true);
expect(es.indices.getFieldMapping.called).to.be(false);
expect(mapping['foo.bar'].type).to.be('string');
done();
});
});
it('can get fields from a cached object if they have been retrieved before', function (done) {
sinon.spy(mapper, 'getFieldsFromObject');
mapper.getFields(source, function (err, mapping) {
mapper.getFields(source, function (err, mapping) {
expect(mapping['foo.bar'].type).to.be('string');
expect(mapper.getFieldsFromObject.calledOnce);
done();
});
});
});
it('gets fields from the mapping if not already cached', function (done) {
sinon.stub(mapper, 'getFieldsFromCache', function (source, callback) {
callback({error: 'Stubbed cache get failure'});
});
sinon.stub(es, 'index', function (params, callback) {
nextTick(callback, null, {});
});
sinon.spy(mapper, 'getFieldsFromMapping');
mapper.getFields(source, function (err, mapping) {
expect(mapping['foo.bar'].type).to.be('string');
expect(mapper.getFieldsFromMapping.calledOnce);
done();
});
});
it('throws an error if it is unable to cache to Elasticsearch', function (done) {
sinon.stub(mapper, 'getFieldsFromCache', function (source, callback) {
callback({error: 'Stubbed failure'});
});
sinon.stub(es, 'index', function (params, callback) {
callback({error: 'Stubbed cache write failure'});
});
// TODO: Correctly test thrown errors.
sinon.stub(courier, '_error', function () { return; });
mapper.getFields(source, function (err, mapping) {
expect(courier._error.calledOnce);
});
done();
});
it('has getFields that throws an error for invalid indices', function (done) {
source = courier.createSource('search')
.index('invalid')
.size(5);
sinon.stub(es, 'index', function (params, callback) {
nextTick(callback, undefined, {});
});
mapper.getFields(source, function (err, mapping) {
expect(err).to.be.ok();
done();
});
});
it('has a clearCache that calls es.delete', function (done) {
mapper.clearCache(source, function () {
expect(es.delete.called).to.be(true);
done();
});
});
it('has a clearCache that clears the object cache', function (done) {
mapper.getFields(source, function (err, mapping) {
expect(mapper.getFieldsFromObject(source)).to.be.a(Object);
mapper.clearCache(source, function () {
expect(mapper.getFieldsFromObject(source)).to.be(false);
done();
});
});
});
it('has a getFieldMapping that returns the mapping for a field', function (done) {
mapper.getFieldMapping(source, 'foo.bar', function (err, field) {
expect(field).to.be.a(Object);
done();
});
});
it('has a getFieldMapping that returns the mapping for a field', function (done) {
mapper.getFieldMapping(source, 'foo.bar', function (err, field) {
expect(field.type).to.be('string');
done();
});
});
it('has a getFieldsMapping that returns the mapping for multiple fields', function (done) {
mapper.getFieldsMapping(source, ['foo.bar', 'baz'], function (err, mapping) {
expect(mapping['foo.bar'].type).to.be('string');
expect(mapping.baz.type).to.be('long');
done();
});
});
it('has a getFieldsFromMapping that throws an error if a field is defined differently in 2 indices', function (done) {
source = courier.createSource('search').index('dupes');
// TODO: Correctly test thrown errors.
sinon.stub(courier, '_error', function () { return; });
mapper.getFieldsFromMapping(source, function (err, mapping) {
expect(courier._error.calledOnce);
done();
});
});
it('has an ignoreFields that sets the type of a field to "ignore"', function (done) {
mapper.getFields(source, function (err, mapping) {
mapper.getFieldMapping(source, 'foo.bar', function (err, field) {
expect(field.type).to.be('string');
mapper.ignoreFields(source, 'foo.bar', function (err, mapping) {
expect(mapping['foo.bar'].type).to.be('ignore');
done();
});
});
});
});
});
});
};
});

View file

@ -1,48 +0,0 @@
define(function (require) {
var createCourier = require('test_utils/create_courier');
var _ = require('lodash');
return function extendCourierSuite() {
describe('source merging', function () {
it('merges the state of one data source with it\'s parents', function () {
var courier = createCourier();
var root = courier.createSource('search')
.index('people')
.type('students')
.filter({
term: {
school: 'high school'
}
});
var math = courier.createSource('search')
.inherits(root)
.filter({
terms: {
classes: ['algebra', 'calculus', 'geometry'],
execution: 'or'
}
})
.on('results', _.noop);
var query = math._flatten();
expect(query.index).to.eql('people');
expect(query.type).to.eql('students');
expect(query.body).to.eql({
query: {
filtered: {
query: { 'match_all': {} },
filter: { bool: {
must: [
{ terms: { classes: ['algebra', 'calculus', 'geometry'], execution: 'or' } },
{ term: { school: 'high school' } }
]
} }
}
}
});
});
});
};
});

View file

@ -1,80 +0,0 @@
define(function (require) {
var createCourier = require('test_utils/create_courier');
var sinon = require('test_utils/auto_release_sinon');
var stubbedClient = require('test_utils/stubbed_client');
var _ = require('lodash');
return function extendCourierSuite() {
var HastyRefresh;
before(function () {
inject(function (couriersErrors) {
HastyRefresh = couriersErrors.HastyRefresh;
});
});
describe('#start', function () {
it('triggers a fetch and begins the fetch cycle', function (done) {
var clock = sinon.useFakeTimers();
var client = stubbedClient();
var courier = createCourier({
client: client
});
// TODO: check that tests that listen for resutls and call courier.fetch are running async
courier
.createSource('search')
.on('results', function () { done(); });
courier.start();
expect(client.callCount).to.equal(1); // just msearch, no mget
expect(clock.timeoutCount()).to.equal(2); // one for search and one for doc
});
it('restarts the courier if it is already running', function () {
var clock = sinon.useFakeTimers();
var courier = createCourier({
client: stubbedClient()
});
courier.on('error', function (err) {
// since we are calling start before the first query returns
expect(err).to.be.a(HastyRefresh);
});
// set the intervals to known values
courier.fetchInterval(10);
courier.docInterval(10);
courier.start();
// one for doc, one for search
expect(clock.timeoutCount()).to.eql(2);
// timeouts should be scheduled for 10 ticks out
expect(_.where(clock.timeoutList(), { callAt: 10 }).length).to.eql(2);
clock.tick(1);
courier.start();
// still two
expect(clock.timeoutCount()).to.eql(2);
// but new timeouts, due to tick(1);
expect(_.where(clock.timeoutList(), { callAt: 11 }).length).to.eql(2);
});
});
describe('#stop', function () {
it('cancels current and future fetches', function () {
var clock = sinon.useFakeTimers();
var courier = createCourier({
client: stubbedClient()
});
courier.start();
expect(clock.timeoutCount()).to.eql(2);
courier.stop();
expect(clock.timeoutCount()).to.eql(0);
});
});
};
});