Преглед изворни кода

Merge remote-tracking branch 'origin/master' into stash_mongo25

Conflicts:
	README.md
	lib/index.js
	lib/pipeline/Value.js
	lib/pipeline/documentSources/SortDocumentSource.js
	lib/pipeline/expressions/NaryExpression.js
	lib/pipeline/expressions/SubtractExpression.js
	npm_scripts/test/test.sh
	test/lib/aggregate.js
	test/lib/pipeline/Pipeline.js
	test/lib/pipeline/expressions/FieldRangeExpression.js
	test/lib/pipeline/expressions/NaryExpression.js
Kyle P Davis пре 11 година
родитељ
комит
80f1f3ed57

+ 26 - 0
NOTES.md

@@ -0,0 +1,26 @@
+Differences
+-----------
+Here is a list of the major items where we have deviated from the MongoDB code and a little bit about why:
+
+* **General**
+	* DESIGN: A lot of these things are packed into a single `.cpp` file in the MongoDB code but to keep things clean and separate they have been broken out into files named the same and only rarely is there more than one class within a single file
+	* `BSON` vs `JSON`
+	* DESIGN: Basically all of the `BSON`-specific code has become equivalent `JSON`-specific code since that's what we're working with (no need for needless conversions)
+	* DESIGN: A lot of these have a `addToBson...` and other `BSONObjBuilder`-related methods that take in an instance to be modified but it's owned by the caller; in `mungedb-aggregate` we build a new `Object` and return it because it's simpler and that's how they're generally used anyhow
+	* TESTING: Many of the tests have been written without the use of the testing base classes as they are in the MongoDB code to try and simplify and make things more clear (but never less complete)
+* **Pipeline components**
+	* `Document` class
+	* DESIGN: `Document` now provides static helpers rather than instance helpers to avoid unnecessary boxing/unboxing since that seems to make more sense here (we treat any `Object` like a `Document`)
+	* `Value` class
+	* DESIGN: `Value` now provides static helpers rather than instance helpers to avoid unnecessary boxing/unboxing since that seems to make more sense here (we treat any `Object` like a `Value)
+	* NAMING: `Value#get{TYPE}` methods have been renamed to `Value.verify{TYPE}` since that seemed to make more sense given what they're really doing for us as statics
+	* DESIGN: `Value.coerceToDate` static returns a JavaScript `Date` object rather than milliseconds since that seems to make more sense where possible
+	* `Expression` classes
+	* `Expression` base class
+		* DESIGN: The nested `ObjectCtx` class no longer uses contants and bitmask flags, instead it takes an `Object` with similarly named `Boolean`s; e.g., `{isDocumentOk:true}` rather than `DOCUMENT_OK`
+	* NAMING: The `Expression{FOO}` classes have all been renamed to `{FOO}Expression` to satisfy my naming OCD.
+	* DESIGN: The `{FOO}Expression` classes do not provide `create` statics since calling new is easy enough
+		* DESIGN: To further this, the `CompareExpression` class doesn't provide any of it's various `create{FOO}` helpers so compensate I am just binding the appropriate args to the `constructor` to create a similar factory
+	* `DocumentSource` classes
+	* DESIGN: We have implemented a `reset` method for all document sources so that we can reuse them against different streams of data
+	* DESIGN: GroupDocumentSource stores copies of all unique _id's that it accumulates to dodge a javascript Stringify/Parse issue with dates

+ 57 - 110
README.md

@@ -1,129 +1,76 @@
 mungedb-aggregate
 =================
-A JavaScript data aggregation pipeline based on the MongoDB aggregation framework.
+A JavaScript data aggregation pipeline based on the [MongoDB][MongoDB]
+database's [aggregation framework][Aggregation].
 
-In general, this code is a port from the MongoDB C++ code (v2.4.0) to JavaScript.
+Based on the MongoDB C++ code (v2.4.0).
 
+Updating to v2.6 soon.
 
-WHY?!
------
-MongoDB is awesome. JavaScript is awesome(ish). So we decided to put them together.
 
-Now, with the ease of JavaScript and the power of the MongoDB aggregation pipeline, we can provide a single API for data munging, regardless of where execution occurs.
 
+Why
+---
+MongoDB Aggregation and JavaScript are both awesome. We put them together.
 
-EXAMPLE
+Now, with the ease of JavaScript and the power of the MongoDB aggregation
+pipeline, we can provide a single API for data munging, regardless of where
+execution occurs. You can extend the base functionality to suit your needs.
+
+
+
+Example
 -------
-```
-// REQUIRE STUFF
-var assert = require("assert");
+```javascript
 var aggregate = require("mungedb-aggregate");
 
-// SETUP SOME VARIABLES
-var inputs = [{val:1}, {val:2}, {val:3}, {val:4}, {val:5}],
-	pipeline = [
-		{$match:{
-			val: {$gte:3}
-		}},
-		{$project:{
-			square: {$multiply:["$val", "$val"]}
-		}}
-	];
-
-// SINGLE INPUT USAGE
-aggregate(pipeline, inputs, function(err, results){
-	assert.deepEqual(results, [{square:9}, {square:16}, {square:25}]);	// look ma, no server!
-});
-
-// MULTI INPUT USAGE
-var aggregator = aggregate(pipeline);
-aggregator(inputs, function(err, results){
-	assert.deepEqual(results, [{square:9}, {square:16}, {square:25}]);	// look ma, no server!
-});
-aggregator(inputs, function(err, results){
-	assert.deepEqual(results, [{square:9}, {square:16}, {square:25}]);	// look ma, no server!
-});
-
+var inputs = [
+  {v: 1},
+  {v: 2},
+  {v: 3},
+  {v: 4},
+  {v: 5}
+];
+
+var pipeline = [
+  {$match:{
+    v: {$gte: 3}
+  }},
+  {$project:{
+    v2: {$multiply: ["$v", "$v"]}
+  }}
+];
+
+aggregate(pipeline, inputs);  // => [{v2:9}, {v2:16}, {v2:25}]
 ```
 
 
-EXPORTS
--------
-Main publics:
+API
+---
+
+Public parts:
 
-* `aggregate` -- this is also the root of the package exports
-* `version`  --  The MongoDB version that this code represents
-* `gitVersion`  --  And, if you want to get really specific, the MongoDB git version that this code represents
+* `aggregate(pipeline, [inputs], [callback])` - The data aggregator
+ - `pipeline`   - The [aggregation][Aggregation] pipeline to apply to `inputs`
+ - `[inputs]`   - The input Objects to aggregate or return curried if omitted
+ - `[callback]` - The callback if needed (for extensions using async calls)
+* `version` - The MongoDB version that this code represents
+* `gitVersion` - The MongoDB git revision that this code represents
 
 Inner workings:
 
-* `Cursor` -- Used internally to go through data (by `PipelineD` and `CursorDocumentSource`)
+* `Cursor` - Used to go thru data (by `PipelineD` and `CursorDocumentSource`)
 * `pipeline`
-  * `Pipeline`  --  The pipeline handler
-  * `PipelineD`  --  The pipeline data reader helper
-  * `FieldPath`  --  Represents a path to a field within a document
-  * `Document`  --  Document helpers used throughout the code
-  * `Value`  --  Value helpers used throughout the code
-  * `accumulators`  --  All of the `Accumulator` classes, which are used for `$group`
-  * `documentSources`  --  All of the `DocumentSource` classes, which are used as the top-level pipeline components / stages
-  * `expressions`  --  All of the `Expression` classes, which are used as the building blocks for many things, but especially for `$project`
-
-
-DEVIATIONS
-----------
-Here is a list of the major items where we have deviated from the MongoDB code and a little bit about why:
-
-  * **General**
-    * DESIGN: A lot of these things are packed into a single `.cpp` file in the MongoDB code but to keep things clean and separate they have been broken out into files named the same and only rarely is there more than one class within a single file
-    * `BSON` vs `JSON`
-      * DESIGN: Basically all of the `BSON`-specific code has become equivalent `JSON`-specific code since that's what we're working with (no need for needless conversions)
-      * DESIGN: A lot of these have a `addToBson...` and other `BSONObjBuilder`-related methods that take in an instance to be modified but it's owned by the caller; in `mungedb-aggregate` we build a new `Object` and return it because it's simpler and that's how they're generally used anyhow
-    * TESTING: Many of the tests have been written without the use of the testing base classes as they are in the MongoDB code to try and simplify and make things more clear (but never less complete)
-  * **Pipeline components**
-    * `Document` class
-      * DESIGN: `Document` now provides static helpers rather than instance helpers to avoid unecessary boxing/unboxing since that seems to make more sense here (we treat any `Object` like a `Document`)
-    * `Value` class
-      * DESIGN: `Value` now provides static helpers rather than instance helpers to avoid unecessary boxing/unboxing since that seems to make more sense here (we treat any `Object` like a `Value)
-      * NAMING: `Value#get{TYPE}` methods have been renamed to `Value.verify{TYPE}` since that seemed to make more sense given what they're really doing for us as statics
-      * DESIGN: `Value.coerceToDate` static returns a JavaScript `Date` object rather than milliseconds since that seems to make more sense where possible
-    * `Expression` classes
-      * `Expression` base class
-        * DESIGN: The nested `ObjectCtx` class no longer uses contants and bitmask flags, instead it takes an `Object` with similarly named `Boolean`s; e.g., `{isDocumentOk:true}` rather than `DOCUMENT_OK`
-      * NAMING: The `Expression{FOO}` classes have all been renamed to `{FOO}Expression` to satisfy my naming OCD.
-      * DESIGN: The `{FOO}Expression` classes do not provide `create` statics since calling new is easy enough
-        * DESIGN: To further this, the `CompareExpression` class doesn't provide any of it's various `create{FOO}` helpers so compensate I am just binding the appropriate args to the `constructor` to create a similar factory
-    * `DocumentSource` classes
-      * DESIGN: We have implemented a `reset` method for all document sources so that we can reuse them against different streams of data
-	  * DESIGN: GroupDocumentSource stores copies of all unique _id's that it accumulates to dodge a javascript Stringify/Parse issue with dates
-	  * DESIGN: We have implemented an EOF class to signify the end of document streams where Mongo uses boost::none
-  * **2.5 Matcher components**
-    * `ElementPath`
-	  * `getFieldDottedOrArray: ElementPath.getFieldDottedOrArray (was unattached function, now static function on ElementPath class)
-	  * `isAllDigits`: ElementPath.isAllDigits ( now static function )
-	  * `elementPath iteration`: This is now encapsulated inside the elementPath class as a static method. It needs an input function to check the item at the end of the path.
-	* `Status` return
-	  * Status objects are now generic objects having a 'code' property that is a string of the error code that mongo uses (e.g. {code:'OK'}).
-	  * StatusWith is now rolled into status as a result field. status.getValue() is now status.result
-	* `debugString`
-	  * debugString takes one argument 'level', as does debugAddSpace. Debug statements are printed to console.debug()
-    * `.get()`, `.release()`, and `.reset()` methods
-	  * All of these are for manual memory management, and are no longer necessary
-
-
-TODO
-----
-Here is a list of global items that I know about that may need to be done in the future:
-
-  * Go through the TODOs....
-  * NAMING: need to go back through and make sure that places referencing <Document> in the C++ code are represented here by referencing a var called "doc" or similar
-  * Go through test cases and try to turn `assert.equal()` calls into `assert.strictEqual()` calls
-  * Go through and modify classes to use advanced OO property settings properly (`seal`, `freeze`, etc.) where appropriate
-  * Make sure that nobody is using private (underscored) variables that they shouldn't be ...might have broken encapsulation somewhere along the way...
-  * Make sure that all of the pure `virtual`s (i.e., `/virtual .* = 0;$/`) are implemented as a proto with a throw new Error("NOT IMPLEMENTED BY INHERITOR") or similar
-  * Go through uses of `throw` and make them actually use `UserException` vs `SystemException` (or whatever they're called)
-  * Currently using the `sift` package to fake the `MatchDocumentSource` class but need to actually port the real code
-  * Async support has been partially implemented but this needs to go deeper into the APIs; all layers need async capabilities (but not requirements), and layers that require it but recieve no callback should throw an Error()
-  * Consider ditching `PipelineD` entirely here; might be more confusing than helpful and can still achieve the same results with ease
-  * Setup a browserify build step to create a browser version of this or something
-  * $group and $group.$addToSet both use JSON.stringify for key checks but really need a deepEqual (via Document.compare) or maybe use jsonplus (faster?) ... fix me now!
-  * Consider moving async stuff out of here and up to a higher level package if possible just to keep things clean and as close to the MongoDB implementations as possible
+  - `Pipeline`        - The pipeline handler
+  - `PipelineD`       - The pipeline data reader helper
+  - `FieldPath`       - Represents a path to a field within a document
+  - `Document`        - Document helpers used throughout the code
+  - `Value`           - Value helpers used throughout the code
+  - `accumulators`    - The `Accumulator` classes (used in `$group`)
+  - `documentSources` - The `DocumentSource` classes (upper pipeline objects)
+  - `expressions`     - The `Expression` classes (used in `$project`)
+
+
+
+[MongoDB]: http://www.mongodb.org
+[Aggregation]: http://docs.mongodb.org/manual/core/aggregation-introduction/

+ 16 - 0
TODO.md

@@ -0,0 +1,16 @@
+TODO
+----
+Here is a list of global items that I know about that may need to be done in the future:
+
+* NAMING: need to go back through and make sure that places referencing <Document> in the C++ code are represented here by referencing a var called "doc" or similar
+* Go through test cases and try to turn `assert.equal()` calls into `assert.strictEqual()` calls
+* Go through and modify classes to use advanced OO property settings properly (`seal`, `freeze`, etc.) where appropriate
+* Make sure that nobody is using private (underscored) variables that they shouldn't be ...might have broken encapsulation somewhere along the way...
+* Make sure that all of the pure `virtual`s (i.e., `/virtual .* = 0;$/`) are implemented as a proto with a throw new Error("NOT IMPLEMENTED BY INHERITOR") or similar
+* Go through uses of `throw` and make them actually use `UserException` vs `SystemException` (or whatever they're called)
+* Currently using the `sift` package to fake the `MatchDocumentSource` class but need to actually port the real code
+* Async support has been partially implemented but this needs to go deeper into the APIs; all layers need async capabilities (but not requirements), and layers that require it but recieve no callback should throw an Error()
+* Consider ditching `PipelineD` entirely here; might be more confusing than helpful and can still achieve the same results with ease
+* Setup a browserify build step to create a browser version of this or something
+* $group and $group.$addToSet both use JSON.stringify for key checks but really need a deepEqual (via Document.compare) or maybe use jsonplus (faster?) ... fix me now!
+* Consider moving async stuff out of here and up to a higher level package if possible just to keep things clean and as close to the MongoDB implementations as possible

+ 6 - 4
lib/Cursor.js

@@ -6,19 +6,21 @@
  **/
 var klass = module.exports = function Cursor(items){
 	if (!(items instanceof Array)) throw new Error("arg `items` must be an Array");
-	this.cachedData = items.slice(0);	// keep a copy
+	this.cachedData = items.slice(0);	// keep a copy so array changes when using async doc srcs do not cause side effects
+	this.length = items.length;
+	this.offset = 0;
 }, base = Object, proto = klass.prototype = Object.create(base.prototype, {constructor:{value:klass}});
 
 proto.ok = function ok(){
-	return this.cachedData.length > 0 || this.hasOwnProperty("curr");
+	return (this.offset < this.length) || this.hasOwnProperty("curr");
 };
 
 proto.advance = function advance(){
-	if (this.cachedData.length === 0){
+	if (this.offset >= this.length){
 		delete this.curr;
 		return false;
 	}
-	this.curr = this.cachedData.shift();
+	this.curr = this.cachedData[this.offset++];
 	return this.curr;
 };
 

+ 33 - 19
lib/index.js

@@ -11,29 +11,42 @@
  * @method aggregate
  * @namespace mungedb
  * @module mungedb-aggregate
- * @param pipeline  {Array}  The list of pipeline document sources in JSON format
- * @param [inputs]  {Array}  Optional inputs to pass through the `docSrcs` pipeline
- * @param [callback]               {Function}                                 Optional callback if using async extensions, called when done
+ * @param pipeline  {Array}   The list of pipeline document sources in JSON format
+ * @param [ctx]     {Object}  Optional context object to pass through to pipeline
+ * @param [inputs]  {Array}   Optional inputs to pass through the `docSrcs` pipeline
+ * @param [callback]             {Function}                                 Optional callback if using async extensions, called when done
  * @param   callback.err           {Error}                                    The Error if one occurred
  * @param   callback.docs          {Array}                                    The resulting documents
  **/
-exports = module.exports = function aggregate(pipeline, inputs, callback) {	// function-style interface; i.e., return the utility function directly as the require
-	var ctx = {}, //not used yet
-		pipelineInst,
-		aggregator = function aggregator(inputs, callback) {
+exports = module.exports = function aggregate(pipeline, ctx, inputs, callback) {	// function-style interface; i.e., return the utility function directly as the require
+	var DocumentSource = exports.pipeline.documentSources.DocumentSource;
+	if (ctx instanceof Array || ctx instanceof DocumentSource) callback = inputs, inputs = ctx, ctx = {};
+	var pipelineInst = exports.pipeline.Pipeline.parseCommand({
+			pipeline: pipeline
+		}, ctx),
+		aggregator = function aggregator(ctx, inputs, callback) {
+			if (ctx instanceof Array || ctx instanceof DocumentSource) callback = inputs, inputs = ctx, ctx = {};
+			if (!callback) callback = exports.SYNC_CALLBACK;
 			if (!inputs) return callback("arg `inputs` is required");
 
 			// rebuild the pipeline on subsequent calls
-			pipelineInst = exports.pipeline.Pipeline.parseCommand({
-				pipeline: pipeline
-			}, ctx);
+			if (!pipelineInst) {
+				pipelineInst = exports.pipeline.Pipeline.parseCommand({
+					pipeline: pipeline
+				}, ctx);
+			}
 
 			// use or build input src
-			try{
-				ctx.ns = inputs;	//NOTE: use the given `inputs` directly; hacking so that the cursor source will be our inputs instead of the context namespace
-				exports.pipeline.PipelineD.prepareCursorSource(pipelineInst, ctx);
-			}catch(err){
-				return callback(err);
+			var src;
+			if(inputs instanceof DocumentSource){
+				src = inputs;
+			}else{
+				try{
+					ctx.ns = inputs;	//NOTE: use the given `inputs` directly; hacking so that the cursor source will be our inputs instead of the context namespace
+					src = exports.pipeline.PipelineD.prepareCursorSource(pipelineInst, ctx);
+				}catch(err){
+					return callback(err);
+				}
 			}
 
 			var runCallback;
@@ -42,9 +55,9 @@ exports = module.exports = function aggregate(pipeline, inputs, callback) {	// f
 				pipelineInst.SYNC_MODE = true;
 			} else {
 				runCallback = function aggregated(err, results){
-					if(err) return callback(err);
-					return callback(null, results.result);
-				};
+				if(err) return callback(err);
+				return callback(null, results.result);
+		};
 			}
 
 			// run the pipeline against
@@ -52,13 +65,14 @@ exports = module.exports = function aggregate(pipeline, inputs, callback) {	// f
 			var results = pipelineInst.run(runCallback);
 			return results ? results.result : undefined;
 		};
-	if(inputs) return aggregator(inputs, callback);
+	if(inputs) return aggregator(ctx, inputs, callback);
 	return aggregator;
 };
 
 // sync callback for aggregate if none was provided
 exports.SYNC_CALLBACK = function(err, docs){
 	if (err) throw err;
+	return docs;
 };
 
 // package-style interface; i.e., return a function underneath of the require

+ 26 - 18
lib/pipeline/Value.js

@@ -45,6 +45,16 @@ klass._coerceToNumber = function _coerceToNumber(value) { //NOTE: replaces .coer
 		return 0;
 	case "number":
 		return value;
+	case "object":
+		switch (value.constructor.name) {
+			case "Long":
+				return parseInt(value.toString(), 10);
+			case "Double":
+				return parseFloat(value.value, 10);
+			default:
+				throw new Error("can't convert from BSON type " + value.constructor.name + " to int; codes 16003, 16004, 16005");
+		}
+		return value;
 	default:
 		throw new Error("can't convert from BSON type " + typeof(value) + " to int; codes 16003, 16004, 16005");
 	}
@@ -82,27 +92,22 @@ klass.coerceToString = function coerceToString(value) {
  **/
 var Document;  // loaded lazily below //TODO: a dirty hack; need to investigate and clean up
 klass.compare = function compare(l, r) {
-	var lt = typeof(l),
-		rt = typeof(r);
+	//NOTE: deviation from mongo code: we have to do some coercing for null "types" because of javascript
+	var lt = l === null ? "null" : typeof(l),
+		rt = r === null ? "null" : typeof(r),
+		ret;
+
+	// NOTE: deviation from mongo code: javascript types do not work quite the same, so for proper results we always canonicalize, and we don't need the "speed" hack
+	ret = (klass.cmp(klass.canonicalize(l), klass.canonicalize(r)));
+
+	if(ret !== 0) return ret;
 
-	// Special handling for Undefined and NULL values ...
-	if (lt === "undefined") {
-		if (rt === "undefined") return 0;
-		return -1;
-	}
-	if (l === null) {
-		if (rt === "undefined") return 1;
-		if (r === null) return 0;
-		return -1;
-	}
-	// We know the left value isn't Undefined, because of the above. Count a NULL value as greater than an undefined one.
-	if (rt === "undefined" || r === null) return 1;
 	// Numbers
 	if (lt === "number" && rt === "number"){
 		//NOTE: deviation from Mongo code: they handle NaN a bit differently
 		if (isNaN(l)) return isNaN(r) ? 0 : -1;
 		if (isNaN(r)) return 1;
-		return l < r ? -1 : l > r ? 1 : 0;
+		return klass.cmp(l,r);
 	}
 	// Compare MinKey and MaxKey cases
 	if(l.constructor && l.constructor.name in {'MinKey':1,'MaxKey':1} ){
@@ -127,9 +132,12 @@ klass.compare = function compare(l, r) {
 	case "number":
 		throw new Error("number types should have been handled earlier!");
 	case "string":
-		return l < r ? -1 : l > r ? 1 : 0;
+		return klass.cmp(l,r);
 	case "boolean":
 		return l == r ? 0 : l ? 1 : -1;
+	case "undefined": //NOTE: deviation from mongo code: we are comparing null to null or undefined to undefined (otherwise the ret stuff above would have caught it)
+	case "null":
+		return 0;
 	case "object":
 		if (l instanceof Array) {
 			for (var i = 0, ll = l.length, rl = r.length; true ; ++i) {
@@ -144,8 +152,8 @@ klass.compare = function compare(l, r) {
 
 			throw new Error("logic error in Value.compare for Array types!");
 		}
-		if (l instanceof Date) return l < r ? -1 : l > r ? 1 : 0;
-		if (l instanceof RegExp) return l < r ? -1 : l > r ? 1 : 0;
+		if (l instanceof Date) return klass.cmp(l,r);
+		if (l instanceof RegExp) return klass.cmp(l,r);
 		if (Document === undefined) Document = require("./Document");	//TODO: a dirty hack; need to investigate and clean up
 		return Document.compare(l, r);
 	default:

+ 3 - 3
lib/pipeline/documentSources/SortDocumentSource.js

@@ -185,15 +185,15 @@ proto.populate = function populate(callback) {
 			return next !== DocumentSource.EOF;
 		},
 		function(err) {
-			/* sort the list */
+	/* sort the list */
 			self.documents.sort(SortDocumentSource.prototype.compare.bind(self));
 
-			/* start the sort iterator */
+	/* start the sort iterator */
 			self.docIterator = 0;
 
 			self.populated = true;
 			return callback();
-		}
+	}
 	);
 };
 

+ 11 - 11
lib/pipeline/expressions/NaryExpression.js

@@ -12,8 +12,8 @@ var Expression = require("./Expression");
 
 var NaryExpression = module.exports = function NaryExpression(){
     if (arguments.length !== 0) throw new Error("Zero args expected");
-    this.operands = [];
-    base.call(this);
+	this.operands = [];
+	base.call(this);
 }, klass = NaryExpression, base = Expression, proto = klass.prototype = Object.create(base.prototype, {constructor:{value:klass}});
 
 klass.parse = function(SubClass) {
@@ -58,7 +58,7 @@ var ConstantExpression = require("./ConstantExpression");
 proto.evaluate = undefined; // evaluate(doc){ ... defined by inheritor ... }
 
 proto.getOpName = function getOpName(doc){
-        throw new Error("NOT IMPLEMENTED BY INHERITOR");
+	throw new Error("NOT IMPLEMENTED BY INHERITOR");
 };
 
 proto.optimize = function optimize(){
@@ -68,18 +68,18 @@ proto.optimize = function optimize(){
     for(var ii = 0; ii < n; ii++) {
         if(this.operands[ii] instanceof ConstantExpression) {
             constantCount++;
-        } else {
+		} else {
             this.operands[ii] = this.operands[ii].optimize();
-        }
-    }
+						}
+					}
 
     if(constantCount === n) {
         return new ConstantExpression(this.evaluateInternal({}));
-    }
+				}
 
     if(!this.isAssociativeAndCommutative) {
         return this;
-    }
+	}
 
     // Flatten and inline nested operations of the same type
 
@@ -101,8 +101,8 @@ proto.optimize = function optimize(){
 };
 
 proto.addDependencies = function addDependencies(deps){
-        for(var i = 0, l = this.operands.length; i < l; ++i)
-                this.operands[i].addDependencies(deps);
+	for(var i = 0, l = this.operands.length; i < l; ++i)
+		this.operands[i].addDependencies(deps);
 };
 
 /**
@@ -111,7 +111,7 @@ proto.addDependencies = function addDependencies(deps){
  * @param pExpression the expression to add
  **/
 proto.addOperand = function addOperand(expr) {
-        this.operands.push(expr);
+	this.operands.push(expr);
 };
 
 proto.serialize = function serialize() {

+ 9 - 9
lib/pipeline/expressions/SubtractExpression.js

@@ -1,6 +1,6 @@
 "use strict";
 
-/**
+/** 
  * A $subtract pipeline expression.
  * @see evaluateInternal
  * @class SubtractExpression
@@ -8,9 +8,9 @@
  * @module mungedb-aggregate
  * @constructor
  **/
-var SubtractExpression = module.exports = function SubtractExpression() {
+var SubtractExpression = module.exports = function SubtractExpression(){
     this.nargs = 2;
-    base.call(this);
+	base.call(this);
 }, klass = SubtractExpression,
     base = require("./NaryExpression"),
     proto = klass.prototype = Object.create(base.prototype, {
@@ -24,18 +24,18 @@ var Value = require("../Value"),
                 Expression = require("./Expression");
 
 // PROTOTYPE MEMBERS
-proto.getOpName = function getOpName() {
-                return "$subtract";
+proto.getOpName = function getOpName(){
+	return "$subtract";
 };
 
-/**
- * Takes an array that contains a pair of numbers and subtracts the second from the first, returning their difference.
- **/
+/** 
+* Takes an array that contains a pair of numbers and subtracts the second from the first, returning their difference. 
+**/
 proto.evaluateInternal = function evaluateInternal(vars) {
                 var left = this.operands[0].evaluateInternal(vars),
                                 right = this.operands[1].evaluateInternal(vars);
                 if (left instanceof Date || right instanceof Date) throw new Error("$subtract does not support dates; code 16376");
-                return left - right;
+	return left - right;
 };
 
 /** Register Expression */

+ 68 - 69
npm_scripts/test/test.sh

@@ -3,7 +3,7 @@
 # Configuration occurs either through the environment variables set thru the
 # config section of the package.json file or via identical command line options.
 ###############################################################################
-CMD_PWD=$(pwd)
+set -e
 CMD="$0"
 CMD_DIR=$(cd "$(dirname "$CMD")"; pwd)
 
@@ -18,7 +18,7 @@ BUILD_SYSTEM=$BAMBOO_HOME
 # Shortcut for running echo and then exit
 die() {
 	echo "$1" 1>&2
-	[ -n "$2" ] && exit $2 || exit 1
+	[ "$2" ] && exit "$2" || exit 1
 }
 # Show help function to be used below
 show_help() {
@@ -35,20 +35,20 @@ NARGS=-1; while [ "$#" -ne "$NARGS" ]; do NARGS=$#; case $1 in
 	-h|--help)        # This help message
 		show_help; exit 1; ;;
 	-d|--debug)       # Enable debugging messages (implies verbose)
-		DEBUG=$(( $DEBUG + 1 )) && VERBOSE="$DEBUG" && shift && echo "#-INFO: DEBUG=$DEBUG (implies VERBOSE=$VERBOSE)"; ;;
+		DEBUG=$(( DEBUG + 1 )) && VERBOSE="$DEBUG" && shift && echo "#-INFO: DEBUG=$DEBUG (implies VERBOSE=$VERBOSE)"; ;;
 	-v|--verbose)     # Enable verbose messages
-		VERBOSE=$(( $VERBOSE + 1 )) && shift && echo "#-INFO: VERBOSE=$VERBOSE"; ;;
+		VERBOSE=$(( VERBOSE + 1 )) && shift && echo "#-INFO: VERBOSE=$VERBOSE"; ;;
 	-S|--no-syntax)   # Disable syntax tests
-		NO_SYNTAX=$(( $NO_SYNTAX + 1 )) && shift && echo "#-INFO: NO_SYNTAX=$NO_SYNTAX"; ;;
+		NO_SYNTAX=$(( NO_SYNTAX + 1 )) && shift && echo "#-INFO: NO_SYNTAX=$NO_SYNTAX"; ;;
 	-U|--no-unit)     # Disable unit tests
-		NO_UNIT=$(( $NO_UNIT + 1 )) && shift && echo "#-INFO: NO_UNIT=$NO_UNIT"; ;;
+		NO_UNIT=$(( NO_UNIT + 1 )) && shift && echo "#-INFO: NO_UNIT=$NO_UNIT"; ;;
 	-C|--no-coverage) # Enable coverage tests
-		NO_COVERAGE=$(( $NO_COVERAGE + 1 )) && shift && echo "#-INFO: NO_COVERAGE=$NO_COVERAGE"; ;;
+		NO_COVERAGE=$(( NO_COVERAGE + 1 )) && shift && echo "#-INFO: NO_COVERAGE=$NO_COVERAGE"; ;;
 	-B|--build-system) # Enable options needed for the build system
-		BUILD_SYSTEM=$(( $BUILD_SYSTEM + 1 )) && shift && echo "#-INFO: BUILD_SYSTEM=$BUILD_SYSTEM"; ;;
+		BUILD_SYSTEM=$(( BUILD_SYSTEM + 1 )) && shift && echo "#-INFO: BUILD_SYSTEM=$BUILD_SYSTEM"; ;;
 	# PAIRS
 #	-t|--thing)	 # Set a thing to a value (DEFAULT: $THING)
-#		shift && THING="$1" && shift && [ -n "$VERBOSE" ] && echo "#-INFO: THING=$THING"; ;;
+#		shift && THING="$1" && shift && [ "$VERBOSE" ] && echo "#-INFO: THING=$THING"; ;;
 esac; done
 
 ###############################################################################
@@ -57,11 +57,11 @@ esac; done
 
 # Enable debug messages in silly mode
 [ "$npm_config_loglevel" = "silly" ] && DEBUG=1
-[ -n "$DEBUG" ] && set -x
+[ "$DEBUG" ] && set -x
 
 # Show all of the package config variables for debugging if non-standard loglevel
-[ -n "$npm_config_loglevel" ] && [ "$npm_config_loglevel" != "http" ] && VERBOSE=1
-[ -n "$VERBOSE" ] && env | egrep -i '^(npm|jenkins)_' | sort | sed 's/^/#-INFO: /g'
+[ "$npm_config_loglevel" -a "$npm_config_loglevel" != "http" ] && VERBOSE=1
+[ "$VERBOSE" ] && env | egrep -i '^(npm|jenkins)_' | sort | sed 's/^/#-INFO: /g'
 
 # Change to root directory of package
 cd "$CMD_DIR/../../"	 # assuming that this is $PKG_ROOT/npm_scripts/MyAwesomeScript/MyAwesomeScript.sh or similar
@@ -72,31 +72,31 @@ cd "$CMD_DIR/../../"	 # assuming that this is $PKG_ROOT/npm_scripts/MyAwesomeScr
 
 # Determing package name
 PKG_NAME="$npm_package_name"
-[ -n "$PKG_NAME" ] || PKG_NAME="$npm_config_package_name"
-[ -n "$PKG_NAME" ] || PKG_NAME=$(node -e 'console.log(require("./package.json").name)')
-[ -n "$PKG_NAME" ] || die "ERROR: Unable to determine package name! Broken package?"
+[ "$PKG_NAME" ] || PKG_NAME="$npm_config_package_name"
+[ "$PKG_NAME" ] || PKG_NAME=$(node -e 'console.log(require("./package.json").name)')
+[ "$PKG_NAME" ] || die "ERROR: Unable to determine package name! Broken package?"
 
 # Determine code directory
 CODE_DIR="$npm_package_config_code_dir"
-[ -n "$CODE_DIR" ] && [ -d "$CODE_DIR" ] || CODE_DIR="$npm_config_default_code_dir"
-[ -n "$CODE_DIR" ] && [ -d "$CODE_DIR" ] || CODE_DIR="lib"
-[ -n "$CODE_DIR" ] && [ -d "$CODE_DIR" ] || die "ERROR: Unable to find code directory at \"$CODE_DIR\"!"
+[ "$CODE_DIR" -a -d "$CODE_DIR" ] || CODE_DIR="$npm_config_default_code_dir"
+[ "$CODE_DIR" -a -d "$CODE_DIR" ] || CODE_DIR="lib"
+[ "$CODE_DIR" -a -d "$CODE_DIR" ] || die "ERROR: Unable to find code directory at \"$CODE_DIR\"!"
 CODE_DIR=$(echo "$CODE_DIR" | sed 's/\/$//')	# remove trailing slash
-[ -n "$VERBOSE" ] && echo "CODE_DIR=$CODE_DIR"
+[ "$VERBOSE" ] && echo "CODE_DIR=$CODE_DIR"
 
 # Determine test directory
 TEST_DIR="$npm_package_config_test_dir"
-[ -n "$TEST_DIR" ] && [ -d "$TEST_DIR" ] || TEST_DIR="$npm_config_default_test_dir"
-[ -n "$TEST_DIR" ] && [ -d "$TEST_DIR" ] || TEST_DIR="test/lib"
+[ "$TEST_DIR" -a -d "$TEST_DIR" ] || TEST_DIR="$npm_config_default_test_dir"
+[ "$TEST_DIR" -a -d "$TEST_DIR" ] || TEST_DIR="test/lib"
 [ -d "$TEST_DIR" ] || die "ERROR: Unable to find test directory at \"$TEST_DIR\"!"
 TEST_DIR=$(echo "$TEST_DIR" | sed 's/\/$//')	# remove trailing slash
-[ -n "$VERBOSE" ] && echo "TEST_DIR=$TEST_DIR"
+[ "$VERBOSE" ] && echo "TEST_DIR=$TEST_DIR"
 
 # Helper to check if given file is valid XML
 XMLLINT_BIN=$(which xmllint || true)
 validate_xml() {
 	REPORT_FILE="$1"
-	if [ -n "$XMLLINT_BIN" ]; then
+	if [ "$XMLLINT_BIN" ]; then
 		if [ -s "$REPORT_FILE" ]; then
 			"$XMLLINT_BIN" --noout "$REPORT_FILE" || die "ERROR: Invalid XML in \"$REPORT_FILE\"!"
 		else
@@ -114,16 +114,16 @@ if [ -z "$NO_SYNTAX" ]; then
 
 	# Deps
 	JSHINT_BIN="$npm_package_config_jshint_bin"
-	#[ -n "$JSHINT_BIN" ] && [ -x "$JSHINT_BIN" ] || JSHINT_BIN=$(which jshint || true)
-	[ -n "$JSHINT_BIN" ] && [ -x "$JSHINT_BIN" ] || JSHINT_BIN="./node_modules/.bin/jshint"
-	[ -n "$JSHINT_BIN" ] && [ -x "$JSHINT_BIN" ] || JSHINT_BIN=$(node -e 'console.log("%s/bin/jshint",require("path").dirname(require.resolve("jshint/package.json")))')
-	[ -n "$JSHINT_BIN" ] && [ -x "$JSHINT_BIN" ] || die "ERROR: Unable to find 'jshint' binary! Install via 'npm install jshint' to proceed!"
+	#[ "$JSHINT_BIN" -a -x "$JSHINT_BIN" ] || JSHINT_BIN=$(which jshint || true)
+	[ "$JSHINT_BIN" -a -x "$JSHINT_BIN" ] || JSHINT_BIN="./node_modules/.bin/jshint"
+	[ "$JSHINT_BIN" -a -x "$JSHINT_BIN" ] || JSHINT_BIN=$(node -e 'console.log("%s/bin/jshint",require("path").dirname(require.resolve("jshint/package.json")))')
+	[ "$JSHINT_BIN" -a -x "$JSHINT_BIN" ] || die "ERROR: Unable to find 'jshint' binary! Install via 'npm install jshint' to proceed!"
 
 	# Prep
 	JSHINT_OUTPUT_DIR="$npm_package_config_jshint_output_dir"
-	[ -n "$JSHINT_OUTPUT_DIR" ] || JSHINT_OUTPUT_DIR="$npm_config_default_jshint_output_dir"
-	[ -n "$JSHINT_OUTPUT_DIR" ] || [ -n "$npm_config_default_reports_output_dir" ] && JSHINT_OUTPUT_DIR="$npm_config_default_reports_output_dir/syntax"
-	[ -n "$JSHINT_OUTPUT_DIR" ] || JSHINT_OUTPUT_DIR="reports/syntax"
+	[ "$JSHINT_OUTPUT_DIR" ] || JSHINT_OUTPUT_DIR="$npm_config_default_jshint_output_dir"
+	[ "$JSHINT_OUTPUT_DIR" ] || [ "$npm_config_default_reports_output_dir" ] && JSHINT_OUTPUT_DIR="$npm_config_default_reports_output_dir/syntax"
+	[ "$JSHINT_OUTPUT_DIR" ] || JSHINT_OUTPUT_DIR="reports/syntax"
 	[ -d "$JSHINT_OUTPUT_DIR" ] || mkdir -p "$JSHINT_OUTPUT_DIR" || die "ERROR: Unable to mkdir \"$JSHINT_OUTPUT_DIR\", the jshint output dir!"
 
 	# Exec require on all js files
@@ -136,7 +136,7 @@ if [ -z "$NO_SYNTAX" ]; then
 	REPORT_FILE="$JSHINT_OUTPUT_DIR/$PKG_NAME-jshint-jslint.xml"
 	"$JSHINT_BIN" --extra-ext ".js,.json" --jslint-reporter "$CODE_DIR" "$TEST_DIR" &> "$REPORT_FILE"	\
 		|| die "ERROR: JSHint errors on jslint reporter! $(echo; cat "$REPORT_FILE")"
-	[ -n "$VERBOSE" ] && echo "REPORT OUTPUT: $REPORT_FILE" && cat "$REPORT_FILE" && echo
+	[ "$VERBOSE" ] && echo "REPORT OUTPUT: $REPORT_FILE" && cat "$REPORT_FILE" && echo
 	validate_xml "$REPORT_FILE" || die "ERROR: INVALID REPORT FILE!"
 
 	# Exec jshint to get checkstyle output
@@ -146,7 +146,7 @@ if [ -z "$NO_SYNTAX" ]; then
 		|| die "ERROR: JSHint errors on checkstyle reporter! $(echo; cat "$REPORT_FILE")"
 	echo "    ERRORS: $(egrep -c '<error .* severity="error"' "$REPORT_FILE")"
 	echo "    WARNINGS: $(egrep -c '<error .* severity="warning"' "$REPORT_FILE")"
-	[ -n "$VERBOSE" ] && echo "REPORT OUTPUT: $REPORT_FILE" && cat "$REPORT_FILE" && echo
+	[ "$VERBOSE" ] && echo "REPORT OUTPUT: $REPORT_FILE" && cat "$REPORT_FILE" && echo
 	validate_xml "$REPORT_FILE" || die "ERROR: INVALID REPORT FILE!"
 
 	echo "  Checking custom code rules ..."
@@ -158,8 +158,8 @@ fi
 
 # Used by unit and coverage tests.
 MOCHA_BIN="$npm_package_config_mocha_bin"
-[ -n "$MOCHA_BIN" ] && [ -x "$MOCHA_BIN" ] || MOCHA_BIN=$(which mocha || true)
-[ -n "$MOCHA_BIN" ] && [ -x "$MOCHA_BIN" ] || die "ERROR: Unable to find 'mocha' binary! Install via 'npm install mocha' to proceed!"
+[ "$MOCHA_BIN" -a -x "$MOCHA_BIN" ] || MOCHA_BIN=$(which mocha || true)
+[ "$MOCHA_BIN" -a -x "$MOCHA_BIN" ] || die "ERROR: Unable to find 'mocha' binary! Install via 'npm install mocha' to proceed!"
 
 # Unit tests
 [ "$npm_package_config_test_unit" = "false" ] && NO_UNIT=1
@@ -168,19 +168,19 @@ if [ -z "$NO_UNIT" ]; then
 
 	# Prep
 	MOCHA_REPORTER="spec"
-	if [ -n "$BUILD_SYSTEM" ]; then
+	if [ "$BUILD_SYSTEM" ]; then
 		MOCHA_REPORTER="$npm_package_config_test_reporter"
-		[ -n "$MOCHA_REPORTER" ] || MOCHA_REPORTER="xunit"
+		[ "$MOCHA_REPORTER" ] || MOCHA_REPORTER="xunit"
 	fi
 	MOCHA_OUTPUT_DIR="$npm_package_config_mocha_output_dir"
-	[ -n "$MOCHA_OUTPUT_DIR" ] || MOCHA_OUTPUT_DIR="$npm_config_default_mocha_output_dir"
-	[ -n "$MOCHA_OUTPUT_DIR" ] || [ -n "$npm_config_default_reports_output_dir" ] && MOCHA_OUTPUT_DIR="$npm_config_default_reports_output_dir/unit"
-	[ -n "$MOCHA_OUTPUT_DIR" ] || MOCHA_OUTPUT_DIR="reports/unit"
+	[ "$MOCHA_OUTPUT_DIR" ] || MOCHA_OUTPUT_DIR="$npm_config_default_mocha_output_dir"
+	[ "$MOCHA_OUTPUT_DIR" ] || [ "$npm_config_default_reports_output_dir" ] && MOCHA_OUTPUT_DIR="$npm_config_default_reports_output_dir/unit"
+	[ "$MOCHA_OUTPUT_DIR" ] || MOCHA_OUTPUT_DIR="reports/unit"
 	[ -d "$MOCHA_OUTPUT_DIR" ] || mkdir -p "$MOCHA_OUTPUT_DIR" || die "ERROR: Unable to mkdir \"$MOCHA_OUTPUT_DIR\", the mocha output dir!"
 
 	# Exec
 	[ "$MOCHA_REPORTER" == "xunit" ] && UNIT_TEST_EXTENSION=xml || UNIT_TEST_EXTENSION=txt
-	[ "$MOCHA_REPORTER" == "xunit" ] && MOCHA_EXTRA_FLAGS= || MOCHA_EXTRA_FLAGS=--colors
+	[ "$MOCHA_REPORTER" == "xunit" ] && MOCHA_EXTRA_FLAGS="" || MOCHA_EXTRA_FLAGS=--colors
 
 	REPORT_FILE_BASE="$MOCHA_OUTPUT_DIR/$PKG_NAME-report"
 	REPORT_FILE="$REPORT_FILE_BASE.$UNIT_TEST_EXTENSION"
@@ -188,7 +188,7 @@ if [ -z "$NO_UNIT" ]; then
 
 	LOGGER_PREFIX='' LOGGER_LEVEL=NOTICE "$MOCHA_BIN" --ui exports --reporter "$MOCHA_REPORTER" $MOCHA_EXTRA_FLAGS --recursive "$TEST_DIR" 2> "$REPORT_FILE_ERR" 1> "$REPORT_FILE"	\
 		|| die "ERROR: Mocha errors during unit tests! $(echo; cat "$REPORT_FILE"; cat "$REPORT_FILE_ERR")"
-	[ -n "$VERBOSE" ] && echo "REPORT OUTPUT: $REPORT_FILE" && cat "$REPORT_FILE" && echo
+	[ "$VERBOSE" ] && echo "REPORT OUTPUT: $REPORT_FILE" && cat "$REPORT_FILE" && echo
 
 	[ -s "$REPORT_FILE" ] || die "ERROR: no report data, units tests probably failed!"
 
@@ -202,19 +202,19 @@ if [ -z "$NO_COVERAGE" ]; then
 
 	# Deps
 	JSCOVERAGE_BIN="$npm_package_config_jscoverage_bin"
-	#[ -n "$JSCOVERAGE_BIN" ] && [ -x "$JSCOVERAGE_BIN" ] || JSCOVERAGE_BIN=$(which jscoverage || true)
-	[ -n "$JSCOVERAGE_BIN" ] && [ -x "$JSCOVERAGE_BIN" ] || JSCOVERAGE_BIN="./node_modules/.bin/jscoverage"
-	[ -n "$JSCOVERAGE_BIN" ] && [ -x "$JSCOVERAGE_BIN" ] || JSCOVERAGE_BIN=$(node -e 'console.log("%s/bin/jscoverage",require("path").dirname(require.resolve("jscoverage/package.json")))')
-	[ -n "$JSCOVERAGE_BIN" ] && [ -x "$JSCOVERAGE_BIN" ] || die "$(cat<<-ERROR_DOCS_EOF
+	#[ "$JSCOVERAGE_BIN" -a -x "$JSCOVERAGE_BIN" ] || JSCOVERAGE_BIN=$(which jscoverage || true)
+	[ "$JSCOVERAGE_BIN" -a -x "$JSCOVERAGE_BIN" ] || JSCOVERAGE_BIN="./node_modules/.bin/jscoverage"
+	[ "$JSCOVERAGE_BIN" -a -x "$JSCOVERAGE_BIN" ] || JSCOVERAGE_BIN=$(node -e 'console.log("%s/bin/jscoverage",require("path").dirname(require.resolve("jscoverage/package.json")))')
+	[ "$JSCOVERAGE_BIN" -a -x "$JSCOVERAGE_BIN" ] || die "$(cat<<-ERROR_DOCS_EOF
 		ERROR: Unable to find node.js jscoverage binary! Run 'npm install' first!
 	ERROR_DOCS_EOF
 	)"
 
 	# Prep
 	JSCOVERAGE_OUTPUT_DIR="$npm_package_config_jscoverage_output_dir"
-	[ -n "$JSCOVERAGE_OUTPUT_DIR" ] || JSCOVERAGE_OUTPUT_DIR="$npm_config_default_jscoverage_output_dir"
-	[ -n "$JSCOVERAGE_OUTPUT_DIR" ] || [ -n "$npm_config_default_reports_output_dir" ] && JSCOVERAGE_OUTPUT_DIR="$npm_config_default_reports_output_dir/html/jscoverage"
-	[ -n "$JSCOVERAGE_OUTPUT_DIR" ] || JSCOVERAGE_OUTPUT_DIR="reports/html/jscoverage"
+	[ "$JSCOVERAGE_OUTPUT_DIR" ] || JSCOVERAGE_OUTPUT_DIR="$npm_config_default_jscoverage_output_dir"
+	[ "$JSCOVERAGE_OUTPUT_DIR" ] || [ "$npm_config_default_reports_output_dir" ] && JSCOVERAGE_OUTPUT_DIR="$npm_config_default_reports_output_dir/html/jscoverage"
+	[ "$JSCOVERAGE_OUTPUT_DIR" ] || JSCOVERAGE_OUTPUT_DIR="reports/html/jscoverage"
 	[ -d "$JSCOVERAGE_OUTPUT_DIR" ] || mkdir -p "$JSCOVERAGE_OUTPUT_DIR" || die "ERROR: Unable to mkdir \"$MOCHA_OUTPUT_DIR\", the mocha output dir!"
 	JSCOVERAGE_TMP_DIR="$CODE_DIR.jscoverage"
 	if [ -d "$JSCOVERAGE_TMP_DIR" ]; then
@@ -225,7 +225,7 @@ if [ -z "$NO_COVERAGE" ]; then
 	#JSCOVERAGE_EXCLUDES="$(find "$CODE_DIR" -type f -not -path '*/.svn/*' -not -name '*.js' | xargs -n1 basename | sort -u | tr '\n' , | sed 's/,$//')"
 	"$JSCOVERAGE_BIN" "$CODE_DIR" "$JSCOVERAGE_TMP_DIR" --exclude "$JSCOVERAGE_EXCLUDES"
 	# - Backup the actual code and replace it with jscoverage results
-	[ -n "$VERBOSE" ] && echo "Replacing $CODE_DIR with $JSCOVERAGE_TMP_DIR ..."
+	[ "$VERBOSE" ] && echo "Replacing $CODE_DIR with $JSCOVERAGE_TMP_DIR ..."
 
 	REPORT_FILE_BASE="$JSCOVERAGE_OUTPUT_DIR/$PKG_NAME-coverage"
 	REPORT_FILE="$REPORT_FILE_BASE.html"
@@ -233,26 +233,26 @@ if [ -z "$NO_COVERAGE" ]; then
 
 	mv "$CODE_DIR" "$CODE_DIR.ORIGINAL"	\
 		&& mv "$JSCOVERAGE_TMP_DIR" "$CODE_DIR"	\
-		&& LOGGER_PREFIX='' LOGGER_LEVEL=NOTICE "$MOCHA_BIN" --ui "exports" --reporter "html-cov" --recursive "$TEST_DIR" 2> "$REPORT_FILE_ERR" | sed 's|'"`pwd`/lib/"'||g' > "$REPORT_FILE"	\
+		&& LOGGER_PREFIX='' LOGGER_LEVEL=NOTICE "$MOCHA_BIN" --ui "exports" --reporter "html-cov" --recursive "$TEST_DIR" 2> "$REPORT_FILE_ERR" | sed 's|'"$(pwd)/lib/"'||g' > "$REPORT_FILE"	\
 		|| echo "WARNING: JSCoverage: insufficient coverage (exit code $?)."
 #		|| die "ERROR: JSCoverage errors during coverage tests! $(rm -fr "$CODE_DIR" && mv "$CODE_DIR.ORIGINAL" "$CODE_DIR"; echo; cat "$REPORT_FILE")"
-#	[ -n "$VERBOSE" ] && echo "REPORT OUTPUT: $REPORT_FILE" && cat "$REPORT_FILE" && echo
+#	[ "$VERBOSE" ] && echo "REPORT OUTPUT: $REPORT_FILE" && cat "$REPORT_FILE" && echo
 
 	LOGGER_PREFIX='' LOGGER_LEVEL=NOTICE "$MOCHA_BIN" --ui "exports" --reporter "json-cov" --recursive "$TEST_DIR" 2> "$REPORT_FILE_ERR" > "$REPORT_FILE_BASE.json"
 
 	# Cleanup
-	rm -rf "$CODE_DIR"	\
-		&& mv "$CODE_DIR.ORIGINAL" "$CODE_DIR"	\
+	rm -rf "$CODE_DIR" || die "ERROR: Unable to remove temp copy of \"$CODE_DIR\""
+	mv "$CODE_DIR.ORIGINAL" "$CODE_DIR"	\
 		|| die "ERROR: Unable to put code directory \"$CODE_DIR.ORIGNAL\" back where it belongs!"
 
-	node -e "if (JSON.parse(require('fs').readFileSync('$REPORT_FILE_BASE.json')).coverage < 100) { console.error('Less than 100% code coverage! See code coverage report at https://bamboo.rd.rcg.local/$bamboo_buildplanname-$bamboo_buildnumber/artifact/JOB1/code-coverage/$PKG_NAME-coverage.html'); process.exit(1); }"
+	node -e "if (JSON.parse(require('fs').readFileSync('$REPORT_FILE_BASE.json')).coverage < 91) { console.error('Less than 91% code coverage! See code coverage report at https://bamboo.rd.rcg.local/$bamboo_buildplanname-$bamboo_buildnumber/artifact/JOB1/code-coverage/$PKG_NAME-coverage.html'); process.exit(1); }"
 
 	echo
 fi
 
 # This is used by both the PMD and jscheckstyle.
 ANALYSIS_TARGET="$npm_package_config_analyze_dirs"
-[ -n "$ANALYSIS_TARGET" ] || ANALYSIS_TARGET="$CODE_DIR"
+[ "$ANALYSIS_TARGET" ] || ANALYSIS_TARGET="$CODE_DIR"
 
 # Static analysis.
 [ "$npm_package_config_test_static_analysis" = "false" ] && NO_STATIC_ANALYSIS=1
@@ -260,14 +260,14 @@ if [ -z "$NO_STATIC_ANALYSIS" ]; then
 	echo "Running static analysis ..."
 
 	PMD_BIN="$npm_package_config_pmd_bin"
-	[ -n "$PMD_BIN" ] && [ -x "$PMD_BIN" ] || PMD_BIN="/srv/jenkins/tools/pmd/bin/run.sh"
+	[ "$PMD_BIN" -a -x "$PMD_BIN" ] || PMD_BIN="/srv/jenkins/tools/pmd/bin/run.sh"
 
-	if [ -n "$PMD_BIN" ] && [ -x "$PMD_BIN" ]; then
+	if [ "$PMD_BIN" -a -x "$PMD_BIN" ]; then
 
         PMD_OUTPUT_DIR="$npm_package_config_pmd_output_dir"
-        [ -n "$PMD_OUTPUT_DIR" ] || PMD_OUTPUT_DIR="$npm_package_config_pmd_output_dir"
-        [ -n "$PMD_OUTPUT_DIR" ] || [ -n "$npm_config_default_reports_output_dir" ] && PMD_OUTPUT_DIR="$npm_config_default_reports_output_dir/static-analysis"
-        [ -n "$PMD_OUTPUT_DIR" ] || PMD_OUTPUT_DIR="reports/static-analysis"
+        [ "$PMD_OUTPUT_DIR" ] || PMD_OUTPUT_DIR="$npm_package_config_pmd_output_dir"
+        [ "$PMD_OUTPUT_DIR" ] || [ "$npm_config_default_reports_output_dir" ] && PMD_OUTPUT_DIR="$npm_config_default_reports_output_dir/static-analysis"
+        [ "$PMD_OUTPUT_DIR" ] || PMD_OUTPUT_DIR="reports/static-analysis"
         [ -d "$PMD_OUTPUT_DIR" ] || mkdir -p "$PMD_OUTPUT_DIR" || die "ERROR: Unable to mkdir \"$PMD_OUTPUT_DIR\", the PMD static analysis output dir!"
 
         REPORT_FILE="$PMD_OUTPUT_DIR/$PKG_NAME-cpd.xml"
@@ -285,20 +285,19 @@ if [ -z "$NO_JSCHECKSTYLE" ]; then
 	echo "Running jscheckstyle ..."
 
 	JSCHECKSTYLE_BIN="$npm_package_config_jscheckstyle_bin"
-	#[ -n "$JSCHECKSTYLE_BIN" ] && [ -x "$JSCHECKSTYLE_BIN" ] || JSCHECKSTYLE_BIN=$(which jscheckstyle || true)
-	[ -n "$JSCHECKSTYLE_BIN" ] && [ -x "$JSCHECKSTYLE_BIN" ] || JSCHECKSTYLE_BIN="./node_modules/.bin/jscheckstyle"
-	[ -n "$JSCHECKSTYLE_BIN" ] && [ -x "$JSCHECKSTYLE_BIN" ] || JSCHECKSTYLE_BIN=$(node -e 'console.log("%s/bin/jscheckstyle",require("path").dirname(require.resolve("jscheckstyle/package.json")))')
-	[ -n "$JSCHECKSTYLE_BIN" ] && [ -x "$JSCHECKSTYLE_BIN" ] || die "ERROR: Unable to find 'jscheckstyle' binary! Install via 'npm install jscheckstyle' to proceed!"
+	#[ "$JSCHECKSTYLE_BIN" -a -x "$JSCHECKSTYLE_BIN" ] || JSCHECKSTYLE_BIN=$(which jscheckstyle || true)
+	[ "$JSCHECKSTYLE_BIN" -a -x "$JSCHECKSTYLE_BIN" ] || JSCHECKSTYLE_BIN="./node_modules/.bin/jscheckstyle"
+	[ "$JSCHECKSTYLE_BIN" -a -x "$JSCHECKSTYLE_BIN" ] || JSCHECKSTYLE_BIN=$(node -e 'console.log("%s/bin/jscheckstyle",require("path").dirname(require.resolve("jscheckstyle/package.json")))')
+	[ "$JSCHECKSTYLE_BIN" -a -x "$JSCHECKSTYLE_BIN" ] || die "ERROR: Unable to find 'jscheckstyle' binary! Install via 'npm install jscheckstyle' to proceed!"
 
 	JSCHECKSTYLE_OUTPUT_DIR="$npm_package_config_jscheckstyle_output_dir"
-	[ -n "$JSCHECKSTYLE_OUTPUT_DIR" ] || JSCHECKSTYLE_OUTPUT_DIR="$npm_package_config_jscheckstyle_output_dir"
-	[ -n "$JSCHECKSTYLE_OUTPUT_DIR" ] || [ -n "$npm_config_default_reports_output_dir" ] && JSCHECKSTYLE_OUTPUT_DIR="$npm_config_default_reports_output_dir/jscheckstyle"
-	[ -n "$JSCHECKSTYLE_OUTPUT_DIR" ] || JSCHECKSTYLE_OUTPUT_DIR="reports/jscheckstyle"
+	[ "$JSCHECKSTYLE_OUTPUT_DIR" ] || JSCHECKSTYLE_OUTPUT_DIR="$npm_package_config_jscheckstyle_output_dir"
+	[ "$JSCHECKSTYLE_OUTPUT_DIR" ] || [ "$npm_config_default_reports_output_dir" ] && JSCHECKSTYLE_OUTPUT_DIR="$npm_config_default_reports_output_dir/jscheckstyle"
+	[ "$JSCHECKSTYLE_OUTPUT_DIR" ] || JSCHECKSTYLE_OUTPUT_DIR="reports/jscheckstyle"
 	[ -d "$JSCHECKSTYLE_OUTPUT_DIR" ] || mkdir -p "$JSCHECKSTYLE_OUTPUT_DIR" || die "ERROR: Unable to mkdir \"$JSCHECKSTYLE_OUTPUT_DIR\", the jscheckstyle output dir!"
 
     REPORT_FILE="$JSCHECKSTYLE_OUTPUT_DIR/$PKG_NAME-jscheckstyle.xml"
 
-    "$JSCHECKSTYLE_BIN" --checkstyle $ANALYSIS_TARGET 2> /dev/null 1> "$REPORT_FILE" || echo "WARNING: jscheckstyle: code is too complex"
+    "$JSCHECKSTYLE_BIN" --checkstyle $ANALYSIS_TARGET 2>/dev/null 1>"$REPORT_FILE" || echo "WARNING: jscheckstyle: code is too complex"
 	validate_xml "$REPORT_FILE" || die "ERROR: INVALID REPORT FILE!"
 fi
-

+ 9 - 4
package.json

@@ -1,23 +1,28 @@
 {
   "name": "mungedb-aggregate",
-  "version": "0.5.7+2013.09.23",
+  "version": "2.4.0-rc0",
   "description": "A JavaScript data aggregation pipeline based on the MongoDB aggregation framework.",
   "author": "Rivera Group <support@riverainc.com>",
   "contributors": [
     "Adam Bell <ABell@riverainc.com>",
-    "Kyle Davis <KDavis@riverainc.com>",
+    "Charles Ezell <CEzell@riverainc.com>",
+    "Chris Sexton <CSexton@riverainc.com>",
+    "Jake Delaney <JDelaney@riverainc.com>",
+    "Jared Hall <JHall@riverainc.com>",
+    "Kyle P Davis <KDavis@riverainc.com>",
     "Phil Murray <PMurray@riverainc.com>",
     "Spencer Rathbun <SRathbun@riverainc.com>",
-    "Charles Ezell <CEzell@riverainc.com>"
+    "Tony Ennis <TEnnis@riverainc.com>"
   ],
   "main": "./mungedb-aggregate.js",
   "scripts": {
     "test": "npm_scripts/test/test.sh"
   },
   "repository": {
-    "url": "git+https://source.rd.rcg.local/git/private/mungedb-aggregate.git#develop"
+    "url": "git+https://github.com/RiveraGroup/mungedb-aggregate.git#master"
   },
   "keywords": [
+    "aggregation",
     "manipulation",
     "alteration"
   ],

+ 113 - 10
test/lib/aggregate.js

@@ -9,8 +9,18 @@ function testAggregate(opts){
 	// SYNC: test one-off usage
 	var results = aggregate(opts.pipeline, opts.inputs);
 	assert.equal(JSON.stringify(results), JSON.stringify(opts.expected));
+
+	// SYNC: test one-off usage with context
+	results = aggregate(opts.pipeline, {hi: "there"}, opts.inputs);
+	assert.equal(JSON.stringify(results), JSON.stringify(opts.expected));
+
+	// SYNC: test use with context
+	var aggregator = aggregate(opts.pipeline, {hi: "there"});
+	results = aggregator(opts.inputs);
+	assert.equal(JSON.stringify(results), JSON.stringify(opts.expected));
+
 	// SYNC: test reusable aggregator functionality
-	var aggregator = aggregate(opts.pipeline);
+	aggregator = aggregate(opts.pipeline);
 	results = aggregator(opts.inputs);
 	assert.equal(JSON.stringify(results), JSON.stringify(opts.expected));
 
@@ -23,19 +33,34 @@ function testAggregate(opts){
 		assert.ifError(err);
 		assert.equal(JSON.stringify(results), JSON.stringify(opts.expected));
 
-		// ASYNC: test reusable aggregator functionality
-		var aggregator = aggregate(opts.pipeline);
-		aggregator(opts.inputs, function(err, results){
+		// ASYNC: test one-off usage with context
+		aggregate(opts.pipeline, {hi: "there"}, opts.inputs, function(err, results){
 			assert.ifError(err);
 			assert.equal(JSON.stringify(results), JSON.stringify(opts.expected));
 
-			// ASYNC: test that it is actually reusable
-			aggregator(opts.inputs, function(err, results){
+			// ASYNC: test reusable aggregator functionality with context
+			var aggregator = aggregate(opts.pipeline);
+			aggregator({hi: "there"}, opts.inputs, function(err, results){
 				assert.ifError(err);
-				assert.equal(JSON.stringify(results), JSON.stringify(opts.expected), "Reuse of aggregator should yield the same results!");
+				assert.equal(JSON.stringify(results), JSON.stringify(opts.expected));
+
+				// ASYNC: test reusable aggregator functionality
+				var aggregator = aggregate(opts.pipeline);
+				aggregator(opts.inputs, function(err, results){
+					assert.ifError(err);
+					assert.equal(JSON.stringify(results), JSON.stringify(opts.expected));
+
+					// ASYNC: test that it is actually reusable
+					aggregator(opts.inputs, function(err, results){
+						assert.ifError(err);
+						assert.equal(JSON.stringify(results), JSON.stringify(opts.expected), "Reuse of aggregator should yield the same results!");
+
+						// success!
+						return opts.next();
+					});
+
+				});
 
-				// success!
-				return opts.next();
 			});
 
 		});
@@ -261,8 +286,86 @@ module.exports = {
 				],
 				next: next
 			});
-		}
+		},
+
+		"should be able to successfully use comparisions of objects to nulls without throwing an exception": function(next){
+			testAggregate({
+				inputs: [
+					{
+						cond:{$or:[
+							{$eq:["$server","Starmetal.demo.com"]},
+						]},
+						value:"PII"
+					},
+					{
+						cond:{$or:[
+							{$eq:["$server","Specium.demo.com"]},
+							{$eq:["$server","Germanium.demo.com"]},
+							{$eq:["$server","Runite.demo.com"]}
+						]},
+						value:"PI"
+					},
+					{
+						cond:{$or:[
+							{$eq:["$server","Primal.demo.com"]}
+						]},
+						value:"Confidential"
+					},
+					{
+						cond:{$or:[
+							{$eq:["$server","Polarite.demo.com"]},
+							{$eq:["$server","Ryanium.demo.com"]}
+						]},
+						value:"Proprietary"
+					},
+					{
+						cond:{$or:[
+							{$eq:["$server","Phazon.demo.com"]}
+						]},
+						value:"PHI"
+					},
+					{
+						cond:null,
+						value:"Authorized"
+					}
+				],
+				pipeline: [
+					{$skip:1},
+					{$limit:1},
+					{$project:{
+						retValue:{$cond:[
+							{$ne:["$cond", null]},
+							null,
+							"$value"
+						]}
+					}}
+				],
+				expected: [{"retValue":null}],
+				next: next
+			});
+		},
 
+		"should be able to successfully compare a null to a null": function(next){
+			testAggregate({
+				inputs: [
+					{
+						cond:null,
+						value:"Authorized"
+					}
+				],
+				pipeline: [
+					{$project:{
+						retValue:{$cond:[
+							{$eq:["$cond", null]},
+							"$value",
+							null
+						]}
+					}}
+				],
+				expected: [{"retValue":"Authorized"}],
+				next: next
+			});
+		},
 	}
 
 };

+ 56 - 28
test/lib/pipeline/Pipeline.js

@@ -8,10 +8,10 @@ module.exports = {
 
 	"Pipeline": {
 
-		before: function(){
+		before: function () {
 
-			Pipeline.stageDesc.$test = (function(){
-				var klass = function TestDocumentSource(options, ctx){
+			Pipeline.stageDesc.$test = (function () {
+				var klass = function TestDocumentSource(options, ctx) {
 					base.call(this, ctx);
 
 					this.shouldCoalesce = options.coalesce;
@@ -21,17 +21,17 @@ module.exports = {
 
 					this.current = 5;
 
-				}, TestDocumentSource = klass, base = DocumentSource, proto = klass.prototype = Object.create(base.prototype, {constructor:{value:klass}});
+				}, TestDocumentSource = klass, base = DocumentSource, proto = klass.prototype = Object.create(base.prototype, {constructor: {value: klass}});
 
 
-				proto.coalesce = function(){
+				proto.coalesce = function () {
 					this.coalesceWasCalled = true;
 					var c = this.shouldCoalesce;//only coalesce with the first thing we find
 					this.shouldCoalesce = false;
 					return c;
 				};
 
-				proto.optimize = function(){
+				proto.optimize = function () {
 					this.optimizeWasCalled = true;
 				};
 
@@ -49,7 +49,7 @@ module.exports = {
 					}
 				};
 
-				klass.createFromJson = function(options, ctx){
+				klass.createFromJson = function (options, ctx) {
 					return new TestDocumentSource(options, ctx);
 				};
 
@@ -61,42 +61,59 @@ module.exports = {
 		"parseCommand": {
 
 			"should throw Error if given non-objects in the array": function () {
-				assert.throws(function(){
-					Pipeline.parseCommand({pipeline:[5]});
+				assert.throws(function () {
+					Pipeline.parseCommand({pipeline: [5]});
 				});
 			},
 
 			"should throw Error if given objects with more / less than one field": function () {
-				assert.throws(function(){
-					Pipeline.parseCommand({pipeline:[{}]});
-					Pipeline.parseCommand({pipeline:[{a:1,b:2}]});
+				assert.throws(function () {
+					Pipeline.parseCommand({pipeline: [
+						{}
+					]});
+					Pipeline.parseCommand({pipeline: [
+						{a: 1, b: 2}
+					]});
 				});
 			},
 
 			"should throw Error on unknown document sources": function () {
-				assert.throws(function(){
-					Pipeline.parseCommand({pipeline:[{$foo:"$sdfdf"}]});
+				assert.throws(function () {
+					Pipeline.parseCommand({pipeline: [
+						{$foo: "$sdfdf"}
+					]});
 				});
 			},
 
 			"should swap $match and $sort if the $match immediately follows the $sort": function () {
-				var p = Pipeline.parseCommand({pipeline:[{$sort:{"xyz":1}}, {$match:{}}]});
-				assert.equal(p.sources[0].constructor.matchName, "$match");
-				assert.equal(p.sources[1].constructor.sortName, "$sort");
+				var p = Pipeline.parseCommand({pipeline: [
+					{$sort: {"xyz": 1}},
+					{$match: {}}
+				]});
+				assert.equal(p.sourceVector[0].constructor.matchName, "$match");
+				assert.equal(p.sourceVector[1].constructor.sortName, "$sort");
 			},
 
 			"should attempt to coalesce all sources": function () {
-				var p = Pipeline.parseCommand({pipeline:[{$test:{coalesce:false}}, {$test:{coalesce:true}}, {$test:{coalesce:false}}, {$test:{coalesce:false}}]});
-				assert.equal(p.sources.length, 3);
-				p.sources.slice(0,-1).forEach(function(source){
+				var p = Pipeline.parseCommand({pipeline: [
+					{$test: {coalesce: false}},
+					{$test: {coalesce: true}},
+					{$test: {coalesce: false}},
+					{$test: {coalesce: false}}
+				]});
+				assert.equal(p.sourceVector.length, 3);
+				p.sourceVector.slice(0, -1).forEach(function (source) {
 					assert.equal(source.coalesceWasCalled, true);
 				});
 				assert.equal(p.sources[p.sources.length -1].coalesceWasCalled, false);
 			},
 
 			"should optimize all sources": function () {
-				var p = Pipeline.parseCommand({pipeline:[{$test:{coalesce:false}}, {$test:{coalesce:false}}]});
-				p.sources.forEach(function(source){
+				var p = Pipeline.parseCommand({pipeline: [
+					{$test: {coalesce: false}},
+					{$test: {coalesce: false}}
+				]});
+				p.sourceVector.forEach(function (source) {
 					assert.equal(source.optimizeWasCalled, true);
 				});
 			}
@@ -109,7 +126,7 @@ module.exports = {
 				p.stitch();
 				assert.equal(p.sources[1].source, p.sources[0]);
 			}
-		},
+			},
 
 		"#_runSync": {
 
@@ -117,12 +134,23 @@ module.exports = {
 				var p = Pipeline.parseCommand({pipeline:[{$test:{coalesce:false}}, {$test:{coalesce:false}}, {$test:{coalesce:false}}]}),
 					results = p.run(function(err, results) {
 						assert.deepEqual(results.result, [ { val: 5 }, { val: 4 }, { val: 3 }, { val: 2 }, { val: 1 } ]);
-					});
+				});
+			},
+
+			"should catch parse errors": function () {
+				// The $foo part is invalid and causes a throw.
+				assert.throws(function () {
+					Pipeline.parseCommand({pipeline: [
+						{$match: {$foo: {bar: "baz"}}}
+					]});
+				});
 			},
 
 			"should call callback with errors from pipeline components": function (next) {
-				var p = Pipeline.parseCommand({pipeline:[{$test:{coalesce:false}}, {$test:{coalesce:false}}, {$test:{coalesce:false,works:false}}]});
-				p.run(function(err, results){
+				var p = Pipeline.parseCommand({pipeline: [
+					{$match: {foo: {bar: "baz"}}}
+				]});
+				p.run(new DocumentSource({}), function (err, results) {
 					assert(err instanceof Error);
 					return next();
 				});
@@ -136,7 +164,7 @@ module.exports = {
 					results = p.run(function(err, results) {
 						assert.deepEqual(results.result, [ { val: 5 }, { val: 4 }, { val: 3 }, { val: 2 }, { val: 1 } ]);
 					});
-			}
+		}
 		},
 
 		"#addInitialSource": {
@@ -153,7 +181,7 @@ module.exports = {
 				p.addInitialSource(initialSource);
 				p.stitch();
 				assert.equal(p.sources[1].source, p.sources[0]);
-			}
+	}
 		}
 
 	}

+ 1 - 1
test/lib/pipeline/expressions/FieldRangeExpression.js

@@ -106,7 +106,7 @@ module.exports = {
 				assert.throws(function(){
 					new FieldRangeExpression(new FieldPathExpression("a"), "$eq", 0).evaluate({a:[1,0,2]});
 				});
-			}
+            }
 
 		},
 

+ 45 - 45
test/lib/pipeline/expressions/NaryExpression.js

@@ -2,42 +2,42 @@
 var assert = require("assert"),
     VariablesParseState = require("../../../../lib/pipeline/expressions/VariablesParseState"),
     VariablesIdGenerator = require("../../../../lib/pipeline/expressions/VariablesIdGenerator"),
-    NaryExpression = require("../../../../lib/pipeline/expressions/NaryExpression"),
-    ConstantExpression = require("../../../../lib/pipeline/expressions/ConstantExpression"),
-    FieldPathExpression = require("../../../../lib/pipeline/expressions/FieldPathExpression"),
-    Expression = require("../../../../lib/pipeline/expressions/Expression");
+	NaryExpression = require("../../../../lib/pipeline/expressions/NaryExpression"),
+	ConstantExpression = require("../../../../lib/pipeline/expressions/ConstantExpression"),
+	FieldPathExpression = require("../../../../lib/pipeline/expressions/FieldPathExpression"),
+	Expression = require("../../../../lib/pipeline/expressions/Expression");
 
 
 // A dummy child of NaryExpression used for testing
 var TestableExpression = (function(){
-        // CONSTRUCTOR
+	// CONSTRUCTOR
     var klass = function TestableExpression(operands, haveFactory){
-        base.call(this);
-        if (operands) {
-            var self = this;
-            operands.forEach(function(operand) {
-                self.addOperand(operand);
-            });
-        }
-        this.haveFactory = !!haveFactory;
-    }, base = NaryExpression, proto = klass.prototype = Object.create(base.prototype, {constructor:{value:klass}});
-
-    // PROTOTYPE MEMBERS
+		base.call(this);
+		if (operands) {
+			var self = this;
+			operands.forEach(function(operand) {
+				self.addOperand(operand);
+			});
+		}
+		this.haveFactory = !!haveFactory;
+	}, base = NaryExpression, proto = klass.prototype = Object.create(base.prototype, {constructor:{value:klass}});
+
+	// PROTOTYPE MEMBERS
     proto.evaluateInternal = function evaluateInternal(vps) {
-        // Just put all the values in a list.  This is not associative/commutative so
-        // the results will change if a factory is provided and operations are reordered.
-        return this.operands.map(function(operand) {
+		// Just put all the values in a list.  This is not associative/commutative so
+		// the results will change if a factory is provided and operations are reordered.
+		return this.operands.map(function(operand) {
             return operand.evaluateInternal(vps);
-        });
-    };
+		});
+	};
 
     proto.isAssociativeAndCommutative = function isAssociativeAndCommutative(){
         return this.isAssociativeAndCommutative;
-    };
+	};
 
-    proto.getOpName = function getOpName() {
-        return "$testable";
-    };
+	proto.getOpName = function getOpName() {
+		return "$testable";
+	};
 
     klass.createFromOperands = function(operands) {
         var vps = new VariablesParseState(new VariablesIdGenerator()),
@@ -48,19 +48,19 @@ var TestableExpression = (function(){
         return testable;
     };
 
-    return klass;
+	return klass;
 })();
 
 
 module.exports = {
 
-        "NaryExpression": {
+	"NaryExpression": {
 
-                "constructor()": {
+		"constructor()": {
 
-                },
+		},
 
-                "#optimize()": {
+		"#optimize()": {
                     "should suboptimize": function() {
                         var testable = TestableExpression.createFromOperands([{"$and": []}, "$abc"], true);
                         testable = testable.optimize();
@@ -76,14 +76,14 @@ module.exports = {
                         var testable = TestableExpression.createFromOperands([55,65, "$path"], true);
                         testable = testable.optimize();
                         assert.deepEqual(testable.serialize(), {$testable:["$path", [55,66]]});
-                    },
+		},
 
                     "should flatten two layers" : function() {
                         var testable = TestableExpression.createFromOperands([55, "$path", {$add: [5,6,"$q"]}], true);
                         testable.addOperand(TestableExpression.createFromOperands([99,100,"$another_path"], true));
                         testable = testable.optimize();
                         assert.deepEqual(testable.serialize(), {$testable: ["$path", {$add: [5,6,"$q"]}, "$another_path", [55,66,[99,100]]]});
-                    },
+		},
 
                     "should flatten three layers": function(){
                         var bottom = TestableExpression.createFromOperands([5,6,"$c"], true),
@@ -105,7 +105,7 @@ module.exports = {
                     assert.deepEqual(baz,foo);
                         assert.deepEqual(new TestableExpression([new ConstantExpression(9)]).serialize(), {"$testable":[{"$const":9}]});
                         assert.deepEqual(new TestableExpression([new FieldPathExpression("ab.c")]).serialize(), {$testable:["$ab.c"]});
-                },
+		},
 
 
             "#serialize() should convert an object to json": function(){
@@ -117,35 +117,35 @@ module.exports = {
 
 
 
-                //the following test case is eagerly awaiting ObjectExpression
-                "#addDependencies()": function testDependencies(){
-                    var testableExpr = new TestableExpression();
+		//the following test case is eagerly awaiting ObjectExpression
+		"#addDependencies()": function testDependencies(){
+			var testableExpr = new TestableExpression();
                     var deps = {};
-                    // no arguments
+			// no arguments
                     testableExpr.addDependencies(deps);
                     assert.deepEqual(deps, {});
 
-                    // add a constant argument
-                    testableExpr.addOperand(new ConstantExpression(1));
+			// add a constant argument
+			testableExpr.addOperand(new ConstantExpression(1));
 
                     deps = {};
                     testableExpr.addDependencies(deps);
                     assert.deepEqual(deps, {});
 
-                    // add a field path argument
-                    testableExpr.addOperand(new FieldPathExpression("ab.c"));
+			// add a field path argument
+			testableExpr.addOperand(new FieldPathExpression("ab.c"));
                     deps = {};
                     testableExpr.addDependencies(deps);
                     assert.deepEqual(deps, {"ab.c":1});
 
-                    // add an object expression
-                    testableExpr.addOperand(Expression.parseObject({a:"$x",q:"$r"}, new Expression.ObjectCtx({isDocumentOk:1})));
+			// add an object expression
+			testableExpr.addOperand(Expression.parseObject({a:"$x",q:"$r"}, new Expression.ObjectCtx({isDocumentOk:1})));
                     deps = {};
                     testableExpr.addDependencies(deps);
                     assert.deepEqual(deps, {"ab.c":1, "x":1, "r":1});
-                }
+		}
 
-        }
+	}
 
 };