Просмотр исходного кода

Merge pull request #133 from RiveraGroup/feature/mongo_2.6.5_documentSource

Feature/mongo 2.6.5 document source
Chris Sexton 11 лет назад
Родитель
Сommit
0d62b3ca5c
43 измененных файлов с 3154 добавлено и 1728 удалено
  1. 0 30
      lib/Cursor.js
  2. 1 1
      lib/index.js
  3. 1 1
      lib/pipeline/Document.js
  4. 2 1
      lib/pipeline/ParsedDeps.js
  5. 147 82
      lib/pipeline/Pipeline.js
  6. 65 42
      lib/pipeline/PipelineD.js
  7. 177 166
      lib/pipeline/documentSources/CursorDocumentSource.js
  8. 7 21
      lib/pipeline/documentSources/DocumentSource.js
  9. 4 0
      lib/pipeline/documentSources/GeoNearDocumentSource.js
  10. 225 110
      lib/pipeline/documentSources/GroupDocumentSource.js
  11. 29 15
      lib/pipeline/documentSources/LimitDocumentSource.js
  12. 44 10
      lib/pipeline/documentSources/MatchDocumentSource.js
  13. 19 3
      lib/pipeline/documentSources/OutDocumentSource.js
  14. 46 47
      lib/pipeline/documentSources/ProjectDocumentSource.js
  15. 20 13
      lib/pipeline/documentSources/RedactDocumentSource.js
  16. 127 28
      lib/pipeline/documentSources/SkipDocumentSource.js
  17. 285 82
      lib/pipeline/documentSources/SortDocumentSource.js
  18. 148 192
      lib/pipeline/documentSources/UnwindDocumentSource.js
  19. 1 1
      lib/pipeline/expressions/Expression.js
  20. 89 0
      lib/query/ArrayRunner.js
  21. 93 0
      lib/query/DocumentSourceRunner.js
  22. 222 0
      lib/query/Runner.js
  23. 21 0
      lib/query/index.js
  24. 0 93
      test/lib/Cursor.js
  25. 6 12
      test/lib/pipeline/ParsedDeps.js
  26. 103 41
      test/lib/pipeline/Pipeline.js
  27. 67 67
      test/lib/pipeline/PipelineD.js
  28. 37 85
      test/lib/pipeline/documentSources/CursorDocumentSource.js
  29. 2 0
      test/lib/pipeline/documentSources/DocumentSource.js
  30. 7 6
      test/lib/pipeline/documentSources/GeoNearDocumentSource.js
  31. 24 42
      test/lib/pipeline/documentSources/GroupDocumentSource.js
  32. 40 49
      test/lib/pipeline/documentSources/LimitDocumentSource.js
  33. 101 33
      test/lib/pipeline/documentSources/MatchDocumentSource.js
  34. 10 12
      test/lib/pipeline/documentSources/OutDocumentSource.js
  35. 218 221
      test/lib/pipeline/documentSources/ProjectDocumentSource.js
  36. 152 23
      test/lib/pipeline/documentSources/RedactDocumentSource.js
  37. 67 29
      test/lib/pipeline/documentSources/SkipDocumentSource.js
  38. 254 156
      test/lib/pipeline/documentSources/SortDocumentSource.js
  39. 47 0
      test/lib/pipeline/documentSources/TestBase.js
  40. 9 12
      test/lib/pipeline/documentSources/UnwindDocumentSource.js
  41. 1 2
      test/lib/pipeline/expressions/CompareExpression_test.js
  42. 88 0
      test/lib/query/ArrayRunner.js
  43. 148 0
      test/lib/query/DocumentSourceRunner.js

+ 0 - 30
lib/Cursor.js

@@ -1,30 +0,0 @@
-"use strict";
-
-/**
- * This class is a simplified implementation of the cursors used in MongoDB for reading from an Array of documents.
- * @param	{Array}	items	The array source of the data
- **/
-var klass = module.exports = function Cursor(items){
-	if (!(items instanceof Array)) throw new Error("arg `items` must be an Array");
-	this.cachedData = items.slice(0);	// keep a copy so array changes when using async doc srcs do not cause side effects
-	this.length = items.length;
-	this.offset = 0;
-}, base = Object, proto = klass.prototype = Object.create(base.prototype, {constructor:{value:klass}});
-
-proto.ok = function ok(){
-	return (this.offset < this.length) || this.hasOwnProperty("curr");
-};
-
-proto.advance = function advance(){
-	if (this.offset >= this.length){
-		delete this.curr;
-		return false;
-	}
-	this.curr = this.cachedData[this.offset++];
-	return this.curr;
-};
-
-proto.current = function current(){
-	if (!this.hasOwnProperty("curr")) this.advance();
-	return this.curr;
-};

+ 1 - 1
lib/index.js

@@ -79,8 +79,8 @@ exports.SYNC_CALLBACK = function(err, docs){
 exports.aggregate = exports;
 
 //Expose these so that mungedb-aggregate can be extended.
-exports.Cursor = require("./Cursor");
 exports.pipeline = require("./pipeline/");
+exports.query = require("./query/");
 
 // version info
 exports.version = "r2.5.4";

+ 1 - 1
lib/pipeline/Document.js

@@ -151,7 +151,7 @@ klass.cloneDeep = function cloneDeep(doc) {	//there are casese this is actually
 	for (var key in doc) {
 		if (doc.hasOwnProperty(key)) {
 			var val = doc[key];
-			obj[key] = val instanceof Object && val.constructor === Object ? Document.clone(val) : val;
+			obj[key] = val instanceof Object && val.constructor === Object ? Document.cloneDeep(val) : val;
 		}
 	}
 	return obj;

+ 2 - 1
lib/pipeline/ParsedDeps.js

@@ -38,7 +38,8 @@ proto.extractFields = function extractFields(input) {
 proto._arrayHelper = function _arrayHelper(array, neededFields) {
 	var values = [];
 
-	for (var it in array) {
+	for (var i = 0; i < array.length; i++) {
+		var it = array[i];
 		if (it instanceof Array)
 			values.push(_arrayHelper(it, neededFields));
 		else if (it instanceof Object)

+ 147 - 82
lib/pipeline/Pipeline.js

@@ -1,5 +1,5 @@
 "use strict";
-
+var async = require('async');
 /**
  * mongodb "commands" (sent via db.$cmd.findOne(...)) subclass to make a command.  define a singleton object for it.
  * @class Pipeline
@@ -13,7 +13,6 @@ var Pipeline = module.exports = function Pipeline(theCtx){
 	this.explain = false;
 	this.splitMongodPipeline = false;
 	this.ctx = theCtx;
-	this.SYNC_MODE = false;
 }, klass = Pipeline, base = Object, proto = klass.prototype = Object.create(base.prototype, {constructor:{value:klass}});
 
 var DocumentSource = require("./documentSources/DocumentSource"),
@@ -26,7 +25,8 @@ var DocumentSource = require("./documentSources/DocumentSource"),
 	OutDocumentSource = require('./documentSources/OutDocumentSource'),
 	GeoNearDocumentSource = require('./documentSources/GeoNearDocumentSource'),
 	RedactDocumentSource = require('./documentSources/RedactDocumentSource'),
-	SortDocumentSource = require('./documentSources/SortDocumentSource');
+	SortDocumentSource = require('./documentSources/SortDocumentSource'),
+	DepsTracker = require('./DepsTracker');
 
 klass.COMMAND_NAME = "aggregate";
 klass.PIPELINE_NAME = "pipeline";
@@ -50,13 +50,14 @@ klass.nStageDesc = Object.keys(klass.stageDesc).length;
 
 klass.optimizations = {};
 klass.optimizations.local = {};
+klass.optimizations.sharded = {};
 
 /**
  * Moves $match before $sort when they are placed next to one another
  * @static
  * @method moveMatchBeforeSort
  * @param pipelineInst An instance of a Pipeline
- **/
+ */
 klass.optimizations.local.moveMatchBeforeSort = function moveMatchBeforeSort(pipelineInst) {
 	var sources = pipelineInst.sources;
 	for(var srcn = sources.length, srci = 1; srci < srcn; ++srci) {
@@ -77,7 +78,7 @@ klass.optimizations.local.moveMatchBeforeSort = function moveMatchBeforeSort(pip
  * @static
  * @method moveLimitBeforeSkip
  * @param pipelineInst An instance of a Pipeline
- **/
+ */
 klass.optimizations.local.moveLimitBeforeSkip = function moveLimitBeforeSkip(pipelineInst) {
 	var sources = pipelineInst.sources;
 	if(sources.length === 0) return;
@@ -110,7 +111,7 @@ klass.optimizations.local.moveLimitBeforeSkip = function moveLimitBeforeSkip(pip
  * @static
  * @method coalesceAdjacent
  * @param pipelineInst An instance of a Pipeline
- **/
+ */
 klass.optimizations.local.coalesceAdjacent = function coalesceAdjacent(pipelineInst) {
 	var sources = pipelineInst.sources;
 	if(sources.length === 0) return;
@@ -147,7 +148,7 @@ klass.optimizations.local.coalesceAdjacent = function coalesceAdjacent(pipelineI
  * @static
  * @method optimizeEachDocumentSource
  * @param pipelineInst An instance of a Pipeline
- **/
+ */
 klass.optimizations.local.optimizeEachDocumentSource = function optimizeEachDocumentSource(pipelineInst) {
 	var sources = pipelineInst.sources;
 	for(var srci = 0, srcn = sources.length; srci < srcn; ++srci) {
@@ -160,7 +161,7 @@ klass.optimizations.local.optimizeEachDocumentSource = function optimizeEachDocu
  * @static
  * @method duplicateMatchBeforeInitalRedact
  * @param pipelineInst An instance of a Pipeline
- **/
+ */
 klass.optimizations.local.duplicateMatchBeforeInitalRedact = function duplicateMatchBeforeInitalRedact(pipelineInst) {
 	var sources = pipelineInst.sources;
 	if(sources.length >= 2 && sources[0].constructor === RedactDocumentSource) {
@@ -174,6 +175,68 @@ klass.optimizations.local.duplicateMatchBeforeInitalRedact = function duplicateM
 	}
 };
 
+//SKIPPED: addRequiredPrivileges
+
+/**
+ * Perform optimizations for a pipeline through sharding
+ * @method splitForSharded
+ */
+proto.splitForSharded = function splitForSharded() {
+	var shardPipeline = new Pipeline({});
+	shardPipeline.explain = this.explain;
+
+	klass.optimizations.sharded.findSplitPoint(shardPipeline, this);
+	klass.optimizations.sharded.moveFinalUnwindFromShardsToMerger(shardPipeline, this);
+	//klass.optimizations.sharded.limitFieldsSentFromShardsToMerger(shardPipeline, this);
+	return shardPipeline;
+};
+
+/**
+ * Split the source into Merge sources and Shard sources
+ * @static
+ * @method findSplitPoint
+ * @param shardPipe Shard sources
+ * @param mergePipe Merge sources
+ */
+klass.optimizations.sharded.findSplitPoint = function findSplitPoint(shardPipe, mergePipe) {
+	while(mergePipe.sources.length > 0) {
+		var current = mergePipe.sources[0];
+		mergePipe.sources.splice(0, 1);
+
+		if (current.isSplittable && current.isSplittable()) {
+			var shardSource = current.getShardSource(),
+				mergeSource = current.getMergeSource();
+			//if (typeof shardSource != "undefined") { shardPipe.sources.push(shardSource); }		//push_back
+			if (shardSource) { shardPipe.sources.push(shardSource); }		//push_back
+			//if (typeof mergeSource != "undefined") { mergePipe.sources.unshift(mergeSource); }	//push_front
+			if (mergeSource) { mergePipe.sources.unshift(mergeSource); }	//push_front
+			break;
+		}
+		else {
+			if (!shardPipe.sources) { shardPipe.sources = []; }
+			shardPipe.sources.push(current);
+		}
+	}
+};
+
+/**
+ * Optimize pipeline through moving unwind to the end
+ * @static
+ * @method moveFinalUnwindFromShardsToMerger
+ * @param shardPipe shard sources
+ * @param mergePipe merge sources
+ */
+klass.optimizations.sharded.moveFinalUnwindFromShardsToMerger = function moveFinalUnwindFromShardsToMerger(shardPipe, mergePipe) {
+	if (true) {
+		while(shardPipe.sources !== null && (shardPipe.sources.length > 0 && shardPipe.sources[shardPipe.sources.length-1] instanceof UnwindDocumentSource)) {
+			mergePipe.sources.unshift(shardPipe.sources.pop());
+		}
+	}
+};
+
+//SKIPPED: optimizations.sharded.limitFieldsSentFromShardsToMerger.  Somehow what this produces is not handled by Expression.js (err 16404)
+
+
 /**
  * Create an `Array` of `DocumentSource`s from the given JSON pipeline
  * // NOTE: DEVIATION FROM MONGO: split out into a separate function to better allow extensions (was in parseCommand)
@@ -181,7 +244,7 @@ klass.optimizations.local.duplicateMatchBeforeInitalRedact = function duplicateM
  * @method parseDocumentSources
  * @param pipeline  {Array}  The JSON pipeline
  * @returns {Array}  The parsed `DocumentSource`s
- **/
+ */
 klass.parseDocumentSources = function parseDocumentSources(pipeline, ctx){
 	var sources = [];
 	for (var nSteps = pipeline.length, iStep = 0; iStep < nSteps; ++iStep) {
@@ -197,7 +260,7 @@ klass.parseDocumentSources = function parseDocumentSources(pipeline, ctx){
 
 		// Create a DocumentSource pipeline stage from 'stageSpec'.
 		var desc = klass.stageDesc[stageName];
-		if (!desc) throw new Error("Unrecognized pipeline stage name: '" + stageName + "'; code 16435");
+		if (!desc) throw new Error("Unrecognized pipeline stage name: '" + stageName + "'; uassert code 16436");
 
 		// Parse the stage
 		var stage = desc(stageSpec, ctx);
@@ -205,7 +268,7 @@ klass.parseDocumentSources = function parseDocumentSources(pipeline, ctx){
 		sources.push(stage);
 
 		if(stage.constructor === OutDocumentSource && iStep !== nSteps - 1) {
-			throw new Error("$out can only be the final stage in the pipeline; code 16435");
+			throw new Error("$out can only be the final stage in the pipeline; code 16991");
 		}
 	}
 	return sources;
@@ -223,7 +286,7 @@ klass.parseDocumentSources = function parseDocumentSources(pipeline, ctx){
  * @param   cmdObj.splitMongodPipeline	{Boolean}  should split?
  * @param ctx     {Object}  Not used yet in mungedb-aggregate
  * @returns	{Array}	the pipeline, if created, otherwise a NULL reference
- **/
+ */
 klass.parseCommand = function parseCommand(cmdObj, ctx){
 	var pipelineNamespace = require("./"),
 		Pipeline = pipelineNamespace.Pipeline,	// using require in case Pipeline gets replaced with an extension
@@ -248,10 +311,9 @@ klass.parseCommand = function parseCommand(cmdObj, ctx){
 	/**
 	 * If we get here, we've harvested the fields we expect for a pipeline
 	 * Set up the specified document source pipeline.
-	 **/
+	 */
 	// NOTE: DEVIATION FROM MONGO: split this into a separate function to simplify and better allow for extensions (now in parseDocumentSources)
-	var sources = pipelineInst.sources = Pipeline.parseDocumentSources(pipeline, ctx);
-
+	pipelineInst.sources = Pipeline.parseDocumentSources(pipeline, ctx);
 	klass.optimizations.local.moveMatchBeforeSort(pipelineInst);
 	klass.optimizations.local.moveLimitBeforeSkip(pipelineInst);
 	klass.optimizations.local.coalesceAdjacent(pipelineInst);
@@ -261,12 +323,6 @@ klass.parseCommand = function parseCommand(cmdObj, ctx){
 	return pipelineInst;
 };
 
-// sync callback for Pipeline#run if omitted
-klass.SYNC_CALLBACK = function(err, results){
-	if (err) throw err;
-	return results.result;
-};
-
 function ifError(err) {
 	if (err) throw err;
 }
@@ -277,7 +333,7 @@ function ifError(err) {
  * @param	inputSource		{DocumentSource}	The input document source for the pipeline
  * @param	[callback]		{Function}			Optional callback function if using async extensions
  * @return {Object}	An empty object or the match spec
-**/
+ */
 proto.getInitialQuery = function getInitialQuery() {
 	var sources = this.sources;
 	if(sources.length === 0) {
@@ -297,15 +353,18 @@ proto.getInitialQuery = function getInitialQuery() {
  * @param	inputSource		{DocumentSource}	The input document source for the pipeline
  * @param	[callback]		{Function}			Optional callback function if using async extensions
  * @return {Object}	An empty object or the match spec
-**/
+ */
 proto.serialize = function serialize() {
 	var serialized = {},
 		array = [];
 
 	// create an array out of the pipeline operations
-	this.sources.forEach(function(source) {
-		source.serializeToArray(array);
-	});
+	if (this.sources) {
+		for (var i = 0; i < this.sources.length; i++) {
+		//this.sources.forEach(function(source) {
+			this.sources[i].serializeToArray(array);
+		}
+	}
 
 	serialized[klass.COMMAND_NAME] = this.ctx && this.ctx.ns && this.ctx.ns.coll ? this.ctx.ns.coll : '';
 	serialized[klass.PIPELINE_NAME] = array;
@@ -318,7 +377,7 @@ proto.serialize = function serialize() {
 /**
  * Points each source at its previous source
  * @method stitch
-**/
+ */
 proto.stitch = function stitch() {
 	if(this.sources.length <= 0) throw new Error("should not have an empty pipeline; massert code 16600");
 
@@ -334,20 +393,31 @@ proto.stitch = function stitch() {
 /**
  * Run the pipeline
  * @method run
- * @param callback {Function} Optional. Run the pipeline in async mode; callback(err, result)
- * @return result {Object} The result of executing the pipeline
-**/
+ * @param callback {Function} gets called once for each document result from the pipeline
+ */
 proto.run = function run(callback) {
 	// should not get here in the explain case
 	if(this.explain) throw new Error("Should not be running a pipeline in explain mode!");
 
-	/* NOTE: DEVIATION FROM MONGO SOURCE. WE'RE SUPPORTING SYNC AND ASYNC */
-	if(this.SYNC_MODE) {
-		callback();
-		return this._runSync();
-	} else {
-		return this._runAsync(callback);
-	}
+	var doc = null,
+		error = null,
+		finalSource = this._getFinalSource();
+
+	async.doWhilst(
+		function iterator(next){
+			return finalSource.getNext(function (err, obj){
+				callback(err, obj);
+				doc = obj;
+				error = err;
+				next();
+			});
+		},
+		function test(){
+			return doc !== null && !error;
+		},
+		function done(err){
+			//nothing to do here
+		});
 };
 
 /**
@@ -355,58 +425,16 @@ proto.run = function run(callback) {
  * @method _getFinalSource
  * @return {Object}		The DocumentSource at the end of the pipeline
  * @private
-**/
+ */
 proto._getFinalSource = function _getFinalSource() {
 	return this.sources[this.sources.length - 1];
 };
 
-/**
- * Run the pipeline synchronously
- * @method _runSync
- * @return {Object}		The results object {result:resultArray}
- * @private
-**/
-proto._runSync = function _runSync(callback) {
-	var resultArray = [],
-		finalSource = this._getFinalSource(),
-		handleErr = function(err) {
-			if(err) throw err;
-		},
-		next;
-	while((next = finalSource.getNext(handleErr)) !== DocumentSource.EOF) {
-		resultArray.push(next);
-	}
-	return {result:resultArray};
-};
-
-/**
- * Run the pipeline asynchronously
- * @method _runAsync
- * @param callback {Function} callback(err, resultObject)
- * @private
-**/
-proto._runAsync = function _runAsync(callback) {
-	var resultArray = [],
-		finalSource = this._getFinalSource(),
-		gotNext = function(err, doc) {
-			if(err) return callback(err);
-			if(doc !== DocumentSource.EOF) {
-				resultArray.push(doc);
-				return setImmediate(function() { //setImmediate to avoid callstack size issues
-					finalSource.getNext(gotNext);
-				});
-			} else {
-				return callback(null, {result:resultArray});
-			}
-		};
-	finalSource.getNext(gotNext);
-};
-
 /**
  * Get the pipeline explanation
  * @method writeExplainOps
  * @return {Array}	An array of source explanations
-**/
+ */
 proto.writeExplainOps = function writeExplainOps() {
 	var array = [];
 	this.sources.forEach(function(source) {
@@ -419,7 +447,44 @@ proto.writeExplainOps = function writeExplainOps() {
  * Set the source of documents for the pipeline
  * @method addInitialSource
  * @param source {DocumentSource}
-**/
+ */
 proto.addInitialSource = function addInitialSource(source) {
 	this.sources.unshift(source);
 };
+
+//SKIPPED: canRunInMongos
+
+//Note: Deviation from Mongo: Mongo 2.6.5 passes a param to getDependencies
+//	to calculate TextScore.  mungedb-aggregate doesn't do this, so no param is needed.
+proto.getDependencies = function getDependencies () {
+    var deps = new DepsTracker(),
+		knowAllFields = false;
+
+    //NOTE: Deviation from Mongo -- We aren't using Meta and textscore
+    for (var i=0; i < this.sources.length && !knowAllFields; i++) {
+        var localDeps = new DepsTracker(),
+        	status = this.sources[i].getDependencies(localDeps);
+
+        if (status === DocumentSource.GetDepsReturn.NOT_SUPPORTED) {
+            // Assume this stage needs everything. We may still know something about our
+            // dependencies if an earlier stage returned either EXHAUSTIVE_FIELDS or
+            // EXHAUSTIVE_META.
+            break;
+        }
+
+        if (!knowAllFields) {
+            for (var key in localDeps.fields)
+            	deps.fields[key] = localDeps.fields[key];
+
+            if (localDeps.needWholeDocument)
+                deps.needWholeDocument = true;
+            knowAllFields = status & DocumentSource.GetDepsReturn.EXHAUSTIVE_FIELDS;
+        }
+
+    }
+
+    if (!knowAllFields)
+        deps.needWholeDocument = true; // don't know all fields we need
+
+    return deps;
+};

+ 65 - 42
lib/pipeline/PipelineD.js

@@ -14,7 +14,9 @@ var PipelineD = module.exports = function PipelineD(){
 // DEPENDENCIES
 var DocumentSource = require('./documentSources/DocumentSource'),
 	CursorDocumentSource = require('./documentSources/CursorDocumentSource'),
-	Cursor = require('../Cursor');
+	SortDocumentSource = require('./documentSources/SortDocumentSource'),
+	MatchDocumentSource = require('./documentSources/MatchDocumentSource'),
+	getRunner = require('../query').getRunner;
 
 /**
  * Create a Cursor wrapped in a DocumentSourceCursor, which is suitable to be the first source for a pipeline to begin with.
@@ -35,60 +37,81 @@ var DocumentSource = require('./documentSources/DocumentSource'),
 **/
 klass.prepareCursorSource = function prepareCursorSource(pipeline, expCtx){
 
+	// We will be modifying the source vector as we go
 	var sources = pipeline.sources;
 
-	// NOTE: SKIPPED: look for initial match
-	// NOTE: SKIPPED: create a query object
-
-	// Look for an initial simple project; we'll avoid constructing Values for fields that won't make it through the projection
-	var projection = {};
-	var dependencies;
-	var deps = {};
-	var status = DocumentSource.GetDepsReturn.SEE_NEXT;
-	for (var i=0; i < sources.length && status !== DocumentSource.GetDepsReturn.EXHAUSTIVE; i++) {
-		status = sources[i].getDependencies(deps);
-		if(Object.keys(deps).length === 0) {
-			status = DocumentSource.GetDepsReturn.NOT_SUPPORTED;
-		}
-	}
-	if (status === DocumentSource.GetDepsReturn.EXHAUSTIVE) {
-		projection = DocumentSource.depsToProjection(deps);
-		dependencies = DocumentSource.parseDeps(deps);
-	}
+	// Inject a MongodImplementation to sources that need them.
+	// NOTE: SKIPPED
 
-	// NOTE: SKIPPED: Look for an initial sort
-	// NOTE: SKIPPED: Create the sort object
+	// Don't modify the pipeline if we got a DocumentSourceMergeCursor
+	// NOTE: SKIPPED
 
-	//get the full "namespace" name
-	// var fullName = dbName + "." + pipeline.collectionName;
 
-	// NOTE: SKIPPED: if(DEV) log messages
+	// Look for an initial match. This works whether we got an initial query or not.
+	// If not, it results in a "{}" query, which will be what we want in that case.
+	var queryObj = pipeline.getInitialQuery(),
+		match;
+	if (queryObj && queryObj instanceof Object && Object.keys(queryObj).length) {
+		// This will get built in to the Cursor we'll create, so
+		// remove the match from the pipeline
+		match = sources.shift();
+	}
 
-	// Create the necessary context to use a Cursor
-	// NOTE: SKIPPED: pSortedCursor bit
-	// NOTE: SKIPPED: pUnsortedCursor bit
+	// Find the set of fields in the source documents depended on by this pipeline.
+	var deps = pipeline.getDependencies(queryObj);
+
+	// Passing query an empty projection since it is faster to use ParsedDeps::extractFields().
+	// This will need to change to support covering indexes (SERVER-12015). There is an
+	// exception for textScore since that can only be retrieved by a query projection.
+	var projectionForQuery = deps.needTextScore ? deps.toProjection() : {};
+
+	/*
+	Look for an initial sort; we'll try to add this to the
+	Cursor we create.  If we're successful in doing that (further down),
+	we'll remove the $sort from the pipeline, because the documents
+	will already come sorted in the specified order as a result of the
+	index scan.
+	*/
+	var sortStage,
+		sortObj,
+		sortInRunner = false;
+	if (sources.length) {
+		sortStage = sources[0] instanceof SortDocumentSource ? sources[0] : undefined;
+		if (sortStage) {
+			// build the sort key
+			sortObj = sortStage.serializeSortKey(/*explain*/false);
+			sortInRunner = true;
+		}
+	}
+	
+	//munge deviation: the runner is (usually) not actually handling the initial query, so we need to add it back to the pipeline
+	if (match){
+		sources.unshift(match);
+	}
 
-	// NOTE: Deviating from mongo here. We're passing in a source or set of documents instead of collection name in the ctx.ns field
-	var source;
-	if(expCtx.ns instanceof DocumentSource){
-		source = expCtx.ns;
-	} else {
-		var cursorWithContext = new CursorDocumentSource.CursorWithContext(/*fullName*/);
+	// Create the Runner.
+	// NOTE: the logic here is simplified for munge
+	var runner = getRunner(expCtx.ns, queryObj, sortObj, projectionForQuery, sources);
 
-		// Now add the Cursor to cursorWithContext
-		cursorWithContext._cursor = new Cursor( expCtx.ns );	//NOTE: collectionName will likely be an array of documents in munge
+	// DocumentSourceCursor expects a yielding Runner that has had its state saved.
+	//runner.setYieldPolicy(Runner.RunnerState.YIELD_AUTO); //Skipped as we don't really support yielding yet
+	runner.saveState();
 
-		// wrap the cursor with a DocumentSource and return that
-		source = new CursorDocumentSource( cursorWithContext, expCtx );
+	// Put the Runner into a DocumentSourceCursor and add it to the front of the pipeline.
+	var source = new CursorDocumentSource("", runner, expCtx);
 
-		// NOTE: SKIPPED: Note the query and sort
+	// Note the query, sort, and projection for explain.
+	source.setQuery(queryObj);
+	if (sortInRunner)
+		source.setSort(sortObj);
 
-		if (Object.keys(projection).length) source.setProjection(projection, dependencies);
+	source.setProjection(deps.toProjection(), deps.toParsedDeps());
 
-		while(sources.length > 0 && source.coalesce(sources[0])) { //Note: Attempting to coalesce into the cursor source
-			sources.shift();
-		}
+	while (sources.length && source.coalesce(sources[0])) {
+		sources.shift();
 	}
 
 	pipeline.addInitialSource(source);
+
+	return runner;
 };

+ 177 - 166
lib/pipeline/documentSources/CursorDocumentSource.js

@@ -1,14 +1,13 @@
 "use strict";
 
-var DocumentSource = require('./DocumentSource'),
+var async = require('async'),
+	Value = require('../Value'),
+	Runner = require('../../query/Runner'),
+	DocumentSource = require('./DocumentSource'),
 	LimitDocumentSource = require('./LimitDocumentSource');
 
-// Mimicking max memory size from mongo/db/query/new_find.cpp
-// Need to actually decide some size for this?
-var MAX_BATCH_DOCS = 150;
-
 /**
- * Constructs and returns Documents from the objects produced by a supplied Cursor.
+ * Constructs and returns Documents from the BSONObj objects produced by a supplied Runner.
  * An object of this type may only be used by one thread, see SERVER-6123.
  *
  * This is usually put at the beginning of a chain of document sources
@@ -20,46 +19,40 @@ var MAX_BATCH_DOCS = 150;
  * @constructor
  * @param	{CursorDocumentSource.CursorWithContext}	cursorWithContext the cursor to use to fetch data
  **/
-var CursorDocumentSource = module.exports = CursorDocumentSource = function CursorDocumentSource(cursorWithContext, expCtx){
+var CursorDocumentSource = module.exports = CursorDocumentSource = function CursorDocumentSource(namespace, runner, expCtx){
 	base.call(this, expCtx);
 
-	this.current = null;
+	this._docsAddedToBatches = 0;
+	this._ns = namespace;
+	this._runner = runner;
 
-//	this.ns = null;
-//	/*
-//	The bson dependencies must outlive the Cursor wrapped by this
-//	source.  Therefore, bson dependencies must appear before pCursor
-//	in order cause its destructor to be called *after* pCursor's.
-//	*/
-//	this.query = null;
-//	this.sort = null;
+}, klass = CursorDocumentSource, base = DocumentSource, proto = klass.prototype = Object.create(base.prototype, {constructor:{value:klass}});
 
-	this._projection = null;
+klass.MaxDocumentsToReturnToClientAtOnce = 150; //DEVIATION: we are using documents instead of bytes
 
-	this._cursorWithContext = cursorWithContext;
-	this._curIdx = 0;
-	this._currentBatch = [];
-	this._limit = undefined;
-	this._docsAddedToBatches = 0;
+klass.create = function create(ns, runner, expCtx) {
+	return new CursorDocumentSource(ns, runner, expCtx);
+};
 
-	if (!this._cursorWithContext || !this._cursorWithContext._cursor) throw new Error("CursorDocumentSource requires a valid cursorWithContext");
+proto._currentBatch = [];
+proto._currentBatchIndex = 0;
 
-}, klass = CursorDocumentSource, base = require('./DocumentSource'), proto = klass.prototype = Object.create(base.prototype, {constructor:{value:klass}});
+// BSONObj members must outlive _projection and cursor.
+proto._query = undefined;
+proto._sort = undefined;
+proto._projection = undefined;
+proto._dependencies = undefined;
+proto._limit = undefined;
+proto._docsAddedToBatches = undefined; // for _limit enforcement
 
+proto._ns = undefined;
+proto._runner = undefined; // PipelineRunner holds a weak_ptr to this.
 
-klass.CursorWithContext = (function (){
-	/**
-	 * Holds a Cursor and all associated state required to access the cursor.
-	 * @class CursorWithContext
-	 * @namespace mungedb-aggregate.pipeline.documentSources.CursorDocumentSource
-	 * @module mungedb-aggregate
-	 * @constructor
-	 **/
-	var klass = function CursorWithContext(ns){
-		this._cursor = null;
-	};
-	return klass;
-})();
+
+
+proto.isValidInitialSource = function(){
+	return true;
+};
 
 /**
  * Release the Cursor and the read lock it requires, but without changing the other data.
@@ -69,189 +62,207 @@ klass.CursorWithContext = (function (){
  * @method	dispose
  **/
 proto.dispose = function dispose() {
-	this._cursorWithContext = null;
+	if (this._runner) this._runner.reset();
 	this._currentBatch = [];
-	this._curIdx = 0;
 };
 
+/**
+ * Get the source's name.
+ * @method	getSourceName
+ * @returns	{String}	the string name of the source as a constant string; this is static, and there's no need to worry about adopting it
+ **/
 proto.getSourceName = function getSourceName() {
 	return "$cursor";
 };
 
+/**
+ * Returns the next Document if there is one
+ *
+ * @method	getNext
+ **/
 proto.getNext = function getNext(callback) {
-	if (!callback) throw new Error(this.getSourceName() + ' #getNext() requires callback');
-
-	if (this._currentBatch.length <= this._curIdx) {
-		this.loadBatch();
-
-		if (this._currentBatch.length <= this._curIdx) {
-			callback(null, DocumentSource.EOF);
-			return DocumentSource.EOF;
-		}
+	if (this.expCtx && this.expCtx.checkForInterrupt && this.expCtx.checkForInterrupt()){
+		return callback(new Error('Interrupted'));
 	}
 
-	// Don't unshift. It's expensiver.
-	var out = this._currentBatch[this._curIdx];
-	this._curIdx++;
-
-	callback(null, out);
-	return out;
+	var self = this;
+	if (self._currentBatchIndex >= self._currentBatch.length) {
+		self._currentBatchIndex = 0;
+		self._currentBatch = [];
+		return self.loadBatch(function(err){
+			if (err) return callback(err);
+			if (self._currentBatch.length === 0)
+				return callback(null, null);
+
+			return callback(null, self._currentBatch[self._currentBatchIndex++]);
+		});
+	}
+	return callback(null, self._currentBatch[self._currentBatchIndex++]);
 };
 
+/**
+ * Attempt to coalesce this DocumentSource with any $limits that it encounters
+ *
+ * @method	coalesce
+ * @param	{DocumentSource}	nextSource	the next source in the document processing chain.
+ * @returns	{Boolean}	whether or not the attempt to coalesce was successful or not; if the attempt was not successful, nothing has been changed
+ **/
 proto.coalesce = function coalesce(nextSource) {
-	if (this._limit) {
+	// Note: Currently we assume the $limit is logically after any $sort or
+	// $match. If we ever pull in $match or $sort using this method, we
+	// will need to keep track of the order of the sub-stages.
+
+	if (!this._limit) {
+		if (nextSource instanceof LimitDocumentSource) {
+			this._limit = nextSource;
+			return this._limit;
+		}
+		return false;// false if next is not a $limit
+	}
+	else {
 		return this._limit.coalesce(nextSource);
-	} else if (nextSource instanceof LimitDocumentSource) {
-		this._limit = nextSource;
-		return this._limit;
-	} else {
-		return false;
 	}
+
+	return false;
 };
 
-///**
-// * Record the namespace.  Required for explain.
-// *
-// * @method	setNamespace
-// * @param	{String}	ns	the namespace
-// **/
-//proto.setNamespace = function setNamespace(ns) {}
-//
-///**
-// * Record the query that was specified for the cursor this wraps, if any.
-// * This should be captured after any optimizations are applied to
-// * the pipeline so that it reflects what is really used.
-// * This gets used for explain output.
-// *
-// * @method	setQuery
-// * @param	{Object}	pBsonObj	the query to record
-// **/
+
+/**
+ * Record the query that was specified for the cursor this wraps, if
+ * any.
+ *
+ * This should be captured after any optimizations are applied to
+ * the pipeline so that it reflects what is really used.
+ *
+ * This gets used for explain output.
+ *
+ * @method	setQuery
+ * @param	{Object}	pBsonObj	the query to record
+ **/
 proto.setQuery = function setQuery(query) {
 	this._query = query;
 };
 
-///**
-// * Record the sort that was specified for the cursor this wraps, if any.
-// * This should be captured after any optimizations are applied to
-// * the pipeline so that it reflects what is really used.
-// * This gets used for explain output.
-// *
-// * @method	setSort
-// * @param	{Object}	pBsonObj	the query to record
-// **/
-//proto.setSort = function setSort(pBsonObj) {};
+/**
+ * Record the sort that was specified for the cursor this wraps, if
+ * any.
+ *
+ * This should be captured after any optimizations are applied to
+ * the pipeline so that it reflects what is really used.
+ *
+ * This gets used for explain output.
+ *
+ * @method	setSort
+ * @param	{Object}	pBsonObj	the query to record
+ **/
+proto.setSort = function setSort(sort) {
+	this._sort = sort;
+};
 
 /**
- * setProjection method
+ * Informs this object of projection and dependency information.
  *
  * @method	setProjection
  * @param	{Object}	projection
  **/
 proto.setProjection = function setProjection(projection, deps) {
-
-	if (this._projection){
-		throw new Error("projection is already set");
-	}
-
-
-	//dont think we need this yet
-
-//	this._projection = new Projection();
-//	this._projection.init(projection);
-//
-//	this.cursor().fields = this._projection;
-
-	this._projection = projection;  //just for testing
+	this._projection = projection;
 	this._dependencies = deps;
 };
 
-//----------------virtuals from DocumentSource--------------
-
 /**
- * Set the underlying source this source should use to get Documents
- * from.
- * It is an error to set the source more than once.  This is to
- * prevent changing sources once the original source has been started;
- * this could break the state maintained by the DocumentSource.
- * This pointer is not reference counted because that has led to
- * some circular references.  As a result, this doesn't keep
- * sources alive, and is only intended to be used temporarily for
- * the lifetime of a Pipeline::run().
  *
  * @method setSource
  * @param source   {DocumentSource}  the underlying source to use
  * @param callback  {Function}        a `mungedb-aggregate`-specific extension to the API to half-way support reading from async sources
  **/
 proto.setSource = function setSource(theSource) {
-	if (theSource) throw new Error("CursorDocumentSource doesn't take a source"); //TODO: This needs to put back without the if once async is fully and properly supported
+	throw new Error('this doesnt take a source');
 };
 
 proto.serialize = function serialize(explain) {
-	if (!explain)
-		return null;
 
-	if (!this._cursorWithContext)
-		throw new Error("code 17135; Cursor deleted.");
+	// we never parse a documentSourceCursor, so we only serialize for explain
+	if (!explain)
+		return {};
 
-	// A stab at what mongo wants
-	return {
+	var out = {};
+	out[this.getSourceName()] = {
 		query: this._query,
 		sort: this._sort ? this._sort : null,
-		limit: this._limit ? this._limit : null,
+		limit: this._limit ? this._limit.getLimit() : null,
 		fields: this._projection ? this._projection : null,
-		indexonly: false,
-		cursorType: this._cursorWithContext ? "cursor" : null
+		plan: this._runner.getInfo(explain)
 	};
+	return out;
 };
 
-// LimitDocumentSource has the setLimit function which trickles down to any documentsource
+/**
+ * returns -1 for no limit
+ *
+ * @method getLimit
+**/
 proto.getLimit = function getLimit() {
 	return this._limit ? this._limit.getLimit() : -1;
 };
 
-//----------------private--------------
-
-//proto.chunkMgr = function chunkMgr(){};
-
-//proto.canUseCoveredIndex = function canUseCoveredIndex(){};
-
-//proto.yieldSometimes = function yieldSometimes(){};
-
-proto.loadBatch = function loadBatch() {
-	var nDocs = 0,
-		cursor = this._cursorWithContext ? this._cursorWithContext._cursor : null;
-
-	if (!cursor)
-		return this.dispose();
-
-	for(;cursor.ok(); cursor.advance()) {
-		if (!cursor.ok())
-			break;
-
-		// these methods do not exist
-		// if (!cursor.currentMatches() || cursor.currentIsDup())
-		// continue;
-
-		var next = cursor.current();
-		this._currentBatch.push(this._projection ? base.documentFromJsonWithDeps(next, this._dependencies) : next);
-
-		if (this._limit) {
-			this._docsAddedToBatches++;
-			if (this._docsAddedToBatches == this._limit.getLimit())
-				break;
+/**
+ * Load a batch of documents from the Runner into the internal array
+ *
+ * @method loadBatch
+**/
+proto.loadBatch = function loadBatch(callback) {
+	if (!this._runner) {
+		this.dispose();
+		return callback;
+	}
 
-			if (this._docsAddedToBatches >= this._limit.getLimit()) {
-				throw new Error("added documents to the batch over limit size");
+	this._runner.restoreState();
+
+	var self = this,
+		whileBreak = false,		// since we are in an async loop instead of a normal while loop, need to mimic the
+		whileReturn = false;	// functionality.  These flags are similar to saying 'break' or 'return' from inside the loop
+	return async.whilst(
+		function test(){
+			return !whileBreak && !whileReturn;
+		},
+		function(next) {
+			return self._runner.getNext(function(err, obj, state){
+				if (err) return next(err);
+				if (state === Runner.RunnerState.RUNNER_ADVANCED) {
+					if (self._dependencies) {
+						self._currentBatch.push(self._dependencies.extractFields(obj));
+					} else {
+						self._currentBatch.push(obj);
+					}
+
+					if (self._limit) {
+						if (++self._docsAddedToBatches === self._limit.getLimit()) {
+							whileBreak = true;
+							return next();
+						}
+						//this was originally a 'verify' in the mongo code
+						if (self._docsAddedToBatches > self._limit.getLimit()){
+							return next(new Error('documents collected past the end of the limit'));
+						}
+					}
+
+					if (self._currentBatch >= klass.MaxDocumentsToReturnToClientAtOnce) {
+						// End self batch and prepare Runner for yielding.
+						self._runner.saveState();
+						whileReturn = true;
+					}
+				} else {
+					whileBreak = true;
+				}
+				return next();
+			});
+		},
+		function(err){
+			if (!whileReturn){
+				self._runner.reset();
 			}
+			callback(err);
 		}
-
-		// Mongo uses number of bytes, but that doesn't make sense here. Yield when nDocs is over a threshold
-		if (nDocs > MAX_BATCH_DOCS) {
-			this._curIdx++; // advance the deque
-			nDocs++;
-			return;
-		}
-	}
-
-	this._cursorWithContext = undefined;	//NOTE: Trying to emulate erasing the cursor; not exactly how mongo does it
+	);
 };

+ 7 - 21
lib/pipeline/documentSources/DocumentSource.js

@@ -37,22 +37,6 @@ var DocumentSource = module.exports = function DocumentSource(expCtx){
 
 }, klass = DocumentSource, base = Object, proto = klass.prototype = Object.create(base.prototype, {constructor:{value:klass}});
 
-/**
- * Use EOF as boost::none for document sources to signal the end of their document stream.
- **/
-klass.EOF = (function() {
-	/**
-	 * Represents a non-value in a document stream
-	 * @class EOF
-	 * @namespace mungedb-aggregate.pipeline.documentSources.DocumentSource
-	 * @module mungedb-aggregate
-	 * @constructor
-	 **/
-	var klass = function EOF(){
-	};
-	return klass;
-})();
-
 /*
 class DocumentSource :
 public IntrusiveCounterUnsigned,
@@ -83,7 +67,7 @@ proto.getPipelineStep = function getPipelineStep() {
 };
 
 /**
- * Returns the next Document if there is one or DocumentSource.EOF if at EOF.
+ * Returns the next Document if there is one or null if at EOF.
  *
  * some implementations do the equivalent of verify(!eof()) so check eof() first
  * @method	getNext
@@ -168,9 +152,11 @@ proto.optimize = function optimize() {
 };
 
 klass.GetDepsReturn = {
-	NOT_SUPPORTED: "NOT_SUPPORTED", // This means the set should be ignored
-	EXHAUSTIVE: "EXHAUSTIVE", // This means that everything needed should be in the set
-	SEE_NEXT: "SEE_NEXT" // Add the next Source's deps to the set
+	NOT_SUPPORTED: 0x0, // The full object and all metadata may be required
+	SEE_NEXT: 0x1, // Later stages could need either fields or metadata
+	EXHAUSTIVE_FIELDS: 0x2, // Later stages won't need more fields from input
+	EXHAUSTIVE_META: 0x4, // Later stages won't need more metadata from input
+	EXHAUSTIVE_ALL: 0x6 // Later stages won't need either
 };
 
 /**
@@ -204,7 +190,7 @@ proto.serializeToArray = function serializeToArray(array, explain) {
  * @method GET_NEXT_PASS_THROUGH
  * @param callback {Function}
  * @param callback.err {Error} An error or falsey
- * @param callback.doc {Object} The source's next object or DocumentSource.EOF
+ * @param callback.doc {Object} The source's next object or null
  **/
 klass.GET_NEXT_PASS_THROUGH = function GET_NEXT_PASS_THROUGH(callback) {
 	if (!callback) throw new Error(this.getSourceName() + ' #getNext() requires callback');

+ 4 - 0
lib/pipeline/documentSources/GeoNearDocumentSource.js

@@ -25,6 +25,10 @@ var GeoNearDocumentSource = module.exports = function GeoNearDocumentSource(ctx)
 
 klass.geoNearName = "$geoNear";
 
+klass.create = function create(expCtx) {
+	return new GeoNearDocumentSource(expCtx);
+};
+
 proto.getSourceName = function() {
 	return klass.geoNearName;
 };

+ 225 - 110
lib/pipeline/documentSources/GroupDocumentSource.js

@@ -21,22 +21,30 @@ var DocumentSource = require("./DocumentSource"),
  **/
 var GroupDocumentSource = module.exports = function GroupDocumentSource(expCtx) {
 	if (arguments.length > 1) throw new Error("up to one arg expected");
+	expCtx = !expCtx ? {} : expCtx;
 	base.call(this, expCtx);
 
 	this.populated = false;
-	this.idExpression = null;
+	this.doingMerge = false;
+	this.spilled = false;
+	this.extSortAllowed = expCtx.extSortAllowed && !expCtx.inRouter;
+
+	this.accumulatorFactories = [];
+	this.currentAccumulators = [];
 	this.groups = {}; // GroupsType Value -> Accumulators[]
 	this.groupsKeys = []; // This is to faciliate easier look up of groups
-	this.originalGroupsKeys = []; // This stores the original group key un-hashed/stringified/whatever
-	this._variables = null;
+	this.originalGroupsKeys = [];
+	this.variables = null;
 	this.fieldNames = [];
-	this.accumulatorFactories = [];
+	this.idFieldNames = [];
 	this.expressions = [];
-	this.currentDocument = null;
+	this.idExpressions = [];
 	this.currentGroupsKeysIndex = 0;
-
 }, klass = GroupDocumentSource, base = DocumentSource, proto = klass.prototype = Object.create(base.prototype, {constructor:{value:klass}});
 
+klass.isSplittableDocumentSource = true;
+
+// TODO: Do we need this?
 klass.groupOps = {
 	"$addToSet": Accumulators.AddToSetAccumulator.create,
 	"$avg": Accumulators.AvgAccumulator.create,
@@ -72,12 +80,16 @@ proto.getSourceName = function getSourceName() {
 };
 
 /**
- * Gets the next document or DocumentSource.EOF if none
+ * Gets the next document or null if none
  *
  * @method getNext
  * @return {Object}
  **/
 proto.getNext = function getNext(callback) {
+	if (!callback) throw new Error(this.getSourceName() + ' #getNext() requires callback.');
+	if (this.expCtx.checkForInterrupt && this.expCtx.checkForInterrupt() === false)
+		return callback(new Error("Interrupted"));
+
 	var self = this;
 	async.series([
 		function(next) {
@@ -89,25 +101,31 @@ proto.getNext = function getNext(callback) {
 				return next();
 		},
 		function(next) {
-			if(Object.keys(self.groups).length === 0) {
-				return next(null, DocumentSource.EOF);
-			}
-
-			//Note: Skipped the spilled logic
-
-			if(self.currentGroupsKeysIndex === self.groupsKeys.length) {
-				return next(null, DocumentSource.EOF);
-			}
-
-			var id = self.groupsKeys[self.currentGroupsKeysIndex],
-				accumulators = self.groups[id],
-				out = self.makeDocument(id, accumulators /*,mergeableOutput*/);
+			// NOTE: Skipped the spilled functionality
+			if (self.spilled) {
+				throw new Error("Spilled is not implemented.");
+			} else {
+				if(self.currentGroupsKeysIndex === self.groupsKeys.length) {
+					return next(null, null);
+				}
+				
+				var out;
+				try {
+					var id = self.originalGroupsKeys[self.currentGroupsKeysIndex],
+						stringifiedId = self.groupsKeys[self.currentGroupsKeysIndex],
+						accumulators = self.groups[stringifiedId];
+						
+					out = self.makeDocument(id, accumulators, self.expCtx.inShard);
+
+					if(++self.currentGroupsKeysIndex === self.groupsKeys.length) {
+						self.dispose();
+					}
+				} catch (ex) {
+					return next(ex);
+				}
 
-			if(++self.currentGroupsKeysIndex === self.groupsKeys.length) {
-				self.dispose();
+				return next(null, out);
 			}
-
-			return next(null, out);
 		}
 	], function(err, results) {
 		callback(err, results[1]);
@@ -134,8 +152,14 @@ proto.dispose = function dispose() {
  * @method optimize
  **/
 proto.optimize = function optimize() {
+	// TODO if all _idExpressions are ExpressionConstants after optimization, then we know there
+	// will only be one group. We should take advantage of that to avoid going through the hash
+	// table.
 	var self = this;
-	self.idExpression = self.idExpression.optimize();
+	self.idExpressions.forEach(function(expression, i) {
+		self.idExpressions[i] = expression.optimize();
+	});
+
 	self.expressions.forEach(function(expression, i) {
 		self.expressions[i] = expression.optimize();
 	});
@@ -149,25 +173,38 @@ proto.optimize = function optimize() {
  * @param explain {Boolean} Create explain output
  **/
 proto.serialize = function serialize(explain) {
-	var insides = {};
+	var self = this,
+		insides = {};
 
 	// add the _id
-	insides._id = this.idExpression.serialize(explain);
+	if (self.idFieldNames.length === 0) {
+		if (self.idExpressions.length !== 1) throw new Error("Should only have one _id field");
+		insides._id = self.idExpressions[0].serialize(explain);
+	} else {
+		if (self.idExpressions.length !== self.idFieldNames.length)
+			throw new Error("Should have the same number of idExpressions and idFieldNames.");
+
+		var md = {};
+		self.idExpressions.forEach(function(expression, i) {
+			md[self.idFieldNames[i]] = expression.serialize(explain);
+		});
+		insides._id = md;
+	}
 
 	//add the remaining fields
-	var aFacs = this.accumulatorFactories,
+	var aFacs = self.accumulatorFactories,
 		aFacLen = aFacs.length;
 
 	for(var i=0; i < aFacLen; i++) {
-		var aFac = aFacs[i](),
-			serialExpression = this.expressions[i].serialize(explain), //Get the accumulator's expression
+		var aFac = new aFacs[i](),
+			serialExpression = self.expressions[i].serialize(explain), //Get the accumulator's expression
 			serialAccumulator = {}; //Where we'll put the expression
 		serialAccumulator[aFac.getOpName()] = serialExpression;
-		insides[this.fieldNames[i]] = serialAccumulator;
+		insides[self.fieldNames[i]] = serialAccumulator;
 	}
 
 	var serialSource = {};
-	serialSource[this.getSourceName()] = insides;
+	serialSource[self.getSourceName()] = insides;
 	return serialSource;
 };
 
@@ -192,31 +229,13 @@ klass.createFromJson = function createFromJson(elem, expCtx) {
 			var groupField = groupObj[groupFieldName];
 
 			if (groupFieldName === "_id") {
-
 				if(idSet) throw new Error("15948 a group's _id may only be specified once");
 
-				if (groupField instanceof Object && groupField.constructor === Object) {
-					/*
-						Use the projection-like set of field paths to create the
-						group-by key.
-					*/
-					var objCtx = new Expression.ObjectCtx({isDocumentOk:true});
-					group.setIdExpression(Expression.parseObject(groupField, objCtx, vps));
-					idSet = true;
-
-				} else if (typeof groupField === "string") {
-					if (groupField[0] === "$") {
-						group.setIdExpression(FieldPathExpression.parse(groupField, vps));
-						idSet = true;
-					}
-				}
-
-				if (!idSet) {
-					// constant id - single group
-					group.setIdExpression(ConstantExpression.create(groupField));
-					idSet = true;
-				}
+				group.parseIdExpression(groupField, vps);
+				idSet = true;
 
+			} else if (groupFieldName === '$doingMerge' && groupField) {
+				throw new Error("17030 $doingMerge should be true if present");
 			} else {
 				/*
 					Treat as a projection field with the additional ability to
@@ -255,7 +274,7 @@ klass.createFromJson = function createFromJson(elem, expCtx) {
 
 	if (!idSet) throw new Error("15955 a group specification must include an _id");
 
-	group._variables = new Variables(idGenerator.getIdCount());
+	group.variables = new Variables(idGenerator.getIdCount());
 
 	return group;
 };
@@ -269,6 +288,7 @@ klass.createFromJson = function createFromJson(elem, expCtx) {
  **/
 proto.populate = function populate(callback) {
 	var numAccumulators = this.accumulatorFactories.length;
+	// NOTE: this is not in mongo, does it belong here?
 	if(numAccumulators !== this.expressions.length) {
 		callback(new Error("Must have equal number of accumulators and expressions"));
 	}
@@ -277,52 +297,55 @@ proto.populate = function populate(callback) {
 		self = this;
 	async.whilst(
 		function() {
-			return input !== DocumentSource.EOF;
+			return input !== null;
 		},
 		function(cb) {
 			self.source.getNext(function(err, doc) {
 				if(err) return cb(err);
-				if(doc === DocumentSource.EOF) {
+				if(doc === null) {
 					input = doc;
 					return cb(); //Need to stop now, no new input
 				}
+				try {
+					input = doc;
+					self.variables.setRoot(input);
 
-				input = doc;
-				self._variables.setRoot(input);
-
-				/* get the _id value */
-				var id = self.idExpression.evaluate(self._variables);
+					/* get the _id value */
+					var id = self.computeId(self.variables);
 
-				if(undefined === id) id = null;
+					if(undefined === id) id = null;
 
-				var groupKey = JSON.stringify(id),
-					group = self.groups[JSON.stringify(id)];
+					var groupKey = JSON.stringify(id),
+						group = self.groups[groupKey];
 
-				if(!group) {
-					self.groupsKeys.push(groupKey);
-					group = [];
-					self.groups[groupKey] = group;
-					// Add the accumulators
-					for(var afi = 0; afi<self.accumulatorFactories.length; afi++) {
-						group.push(self.accumulatorFactories[afi]());
+					if(!group) {
+						self.originalGroupsKeys.push(id);
+						self.groupsKeys.push(groupKey);
+						group = [];
+						self.groups[groupKey] = group;
+						// Add the accumulators
+						for(var afi = 0; afi<self.accumulatorFactories.length; afi++) {
+							group.push(new self.accumulatorFactories[afi]());
+						}
 					}
-				}
-				//NOTE: Skipped memory usage stuff for case when group already existed
-
-				if(numAccumulators !== group.length) {
-					throw new Error('Group must have one of each accumulator');
-				}
+					//NOTE: Skipped memory usage stuff for case when group already existed
 
-				//NOTE: passing the input to each accumulator
-				for(var gi=0; gi<group.length; gi++) {
-					group[gi].process(self.expressions[gi].evaluate(self._variables /*, doingMerge*/));
-				}
+					if(numAccumulators !== group.length) {
+						throw new Error('Group must have one of each accumulator');
+					}
 
-				// We are done with the ROOT document so release it.
-				self._variables.clearRoot();
+					//NOTE: passing the input to each accumulator
+					for(var gi=0; gi<group.length; gi++) {
+						group[gi].process(self.expressions[gi].evaluate(self.variables, self.doingMerge));
+					}
 
-				//NOTE: Skipped the part about sorted files
+					// We are done with the ROOT document so release it.
+					self.variables.clearRoot();
 
+					//NOTE: Skipped the part about sorted files
+				} catch (ex) {
+					return cb(ex);
+				}
 				return cb();
 			});
 		},
@@ -336,20 +359,6 @@ proto.populate = function populate(callback) {
 	);
 };
 
-/**
- * Get the type of something. Handles objects specially to return their true type; i.e. their constructor
- *
- * @method populate
- * @param obj {Object} The object to get the type of
- * @return {String} The type of the object as a string
- * @async
- **/
-proto._getTypeStr = function _getTypeStr(obj) {
-	var typeofStr = typeof obj,
-		typeStr = (typeofStr == "object" && obj !== null) ? obj.constructor.name : typeofStr;
-	return typeStr;
-};
-
 /**
  * Get the dependencies of the group
  *
@@ -361,13 +370,15 @@ proto._getTypeStr = function _getTypeStr(obj) {
 proto.getDependencies = function getDependencies(deps) {
 	var self = this;
 	// add _id
-	this.idExpression.addDependencies(deps);
+	this.idExpressions.forEach(function(expression, i) {
+		expression.addDependencies(deps);
+	});
 	// add the rest
 	this.fieldNames.forEach(function (field, i) {
 		self.expressions[i].addDependencies(deps);
 	});
 
-	return DocumentSource.GetDepsReturn.EXHAUSTIVE;
+	return DocumentSource.GetDepsReturn.EXHAUSTIVE_ALL;
 };
 
 /**
@@ -392,16 +403,16 @@ proto.addAccumulator = function addAccumulator(fieldName, accumulatorFactory, ex
  * @param accums {Array} An array of accumulators
  * @param epxression {Expression} The expression to be evaluated on incoming documents before they are accumulated
  **/
-proto.makeDocument = function makeDocument(id, accums /*,mergeableOutput*/) {
+proto.makeDocument = function makeDocument(id, accums, mergeableOutput) {
 	var out = {};
 
 	/* add the _id field */
-	out._id = id;
+	out._id = this.expandId(id);
 
 	/* add the rest of the fields */
 	this.fieldNames.forEach(function(fieldName, i) {
-		var val = accums[i].getValue(/*mergeableOutput*/);
-		if(!val) {
+		var val = accums[i].getValue(mergeableOutput);
+		if (!val) {
 			out[fieldName] = null;
 		} else {
 			out[fieldName] = val;
@@ -412,11 +423,115 @@ proto.makeDocument = function makeDocument(id, accums /*,mergeableOutput*/) {
 };
 
 /**
- * Sets the id expression for the group
+ * Computes the internal representation of the group key.
+ *
+ * @method computeId
+ * @param vars a VariablesParseState
+ * @return vals
+ */
+proto.computeId = function computeId(vars) {
+	var self = this;
+	// If only one expression return result directly
+	if (self.idExpressions.length === 1)
+		return self.idExpressions[0].evaluate(vars); // NOTE: self will probably need to be async soon
+
+	// Multiple expressions get results wrapped in an array
+	var vals = [];
+	self.idExpressions.forEach(function(expression, i) {
+		vals.push(expression.evaluate(vars));
+	});
+
+	return vals;
+};
+
+/**
+ * Converts the internal representation of the group key to the _id shape specified by the
+ * user.
+ *
+ * @method expandId
+ * @param val
+ * @return document representing an id
+ */
+proto.expandId = function expandId(val) {
+	var self = this;
+	// _id doesn't get wrapped in a document
+	if (self.idFieldNames.length === 0)
+		return val;
+
+	var doc = {};
+
+	// _id is a single-field document containing val
+	if (self.idFieldNames.length === 1) {
+		doc[self.idFieldNames[0]] = val;
+		return doc;
+	}
+
+	// _id is a multi-field document containing the elements of val
+	val.forEach(function(v, i) {
+		doc[self.idFieldNames[i]] = v;
+	});
+
+	return doc;
+};
+
+/**
+ * Parses the raw id expression into _idExpressions and possibly _idFieldNames.
+ *
+ * @method parseIdExpression
+ * @param groupField {Object} The object with the spec
+ */
+proto.parseIdExpression = function parseIdExpression(groupField, vps) {
+	var self = this;
+	if (self._getTypeStr(groupField) === 'Object' && Object.keys(groupField).length !== 0) {
+		// {_id: {}} is treated as grouping on a constant, not an expression
+
+		var idKeyObj = groupField;
+		if (Object.keys(idKeyObj)[0][0] == '$') {
+			var objCtx = new Expression.ObjectCtx({});
+			self.idExpressions.push(Expression.parseObject(idKeyObj, objCtx, vps));
+		} else {
+			Object.keys(idKeyObj).forEach(function(key, i) {
+				var field = {}; //idKeyObj[key];
+				field[key] = idKeyObj[key];
+				self.idFieldNames.push(key);
+				self.idExpressions.push(Expression.parseOperand(field[key], vps));
+			});
+		}
+	} else if (self._getTypeStr(groupField) === 'string' && groupField[0] === '$') {
+		self.idExpressions.push(FieldPathExpression.parse(groupField, vps));
+	} else {
+		self.idExpressions.push(ConstantExpression.create(groupField));
+	}
+};
+
+/**
+ * Get the type of something. Handles objects specially to return their true type; i.e. their constructor
  *
- * @method setIdExpression
- * @param epxression {Expression} The expression to set
+ * @method _getTypeStr
+ * @param obj {Object} The object to get the type of
+ * @return {String} The type of the object as a string
  **/
-proto.setIdExpression = function setIdExpression(expression) {
-	this.idExpression = expression;
+proto._getTypeStr = function _getTypeStr(obj) {
+	var typeofStr = typeof obj,
+		typeStr = (typeofStr == "object" && obj !== null) ? obj.constructor.name : typeofStr;
+	return typeStr;
+};
+
+proto.getShardSource = function getShardSource() {
+	return this;
+};
+
+proto.getMergeSource = function getMergeSource() {
+	var self = this,
+		merger = klass.create(this.expCtx);
+
+	var idGenerator = new VariablesIdGenerator(),
+		vps = new VariablesParseState(idGenerator);
+
+	merger.idExpressions.push(FieldPathExpression.parse("$$ROOT._id", vps));
+	for (var i = 0; i < self.fieldNames.length; i++) {
+		merger.addAccumulator(self.fieldNames[i], self.accumulatorFactories[i], FieldPathExpression.create("$$ROOT." + self.fieldNames[i], vps));
+	}
+
+	return merger;
 };

+ 29 - 15
lib/pipeline/documentSources/LimitDocumentSource.js

@@ -10,10 +10,10 @@ var DocumentSource = require('./DocumentSource');
  * @constructor
  * @param [ctx] {ExpressionContext}
  **/
-var LimitDocumentSource = module.exports = function LimitDocumentSource(ctx){
-	if (arguments.length > 1) throw new Error("up to one arg expected");
+var LimitDocumentSource = module.exports = function LimitDocumentSource(ctx, limit){
+	if (arguments.length > 2) throw new Error("up to two args expected");
 	base.call(this, ctx);
-	this.limit = 0;
+	this.limit = limit;
 	this.count = 0;
 }, klass = LimitDocumentSource, base = require('./DocumentSource'), proto = klass.prototype = Object.create(base.prototype, {constructor:{value:klass}});
 
@@ -44,32 +44,46 @@ proto.coalesce = function coalesce(nextSource) {
 	return true;
 };
 
+/* Returns the execution of the callback against
+* the next documentSource
+* @param {function} callback
+* @return {bool} indicating end of document reached
+*/
 proto.getNext = function getNext(callback) {
 	if (!callback) throw new Error(this.getSourceName() + ' #getNext() requires callback');
 
+	if (this.expCtx instanceof Object && this.expCtx.checkForInterrupt && this.expCtx.checkForInterrupt() === false)
+		return callback(new Error("Interrupted"));
+
 	if (++this.count > this.limit) {
 		this.source.dispose();
-		callback(null, DocumentSource.EOF);
-		return DocumentSource.EOF;
+		callback(null, null);
+		return null;
 	}
 
 	return this.source.getNext(callback);
 };
 
 /**
- * Creates a new LimitDocumentSource with the input number as the limit
- * @param {Number} JsonElement this thing is *called* Json, but it expects a number
- **/
-klass.createFromJson = function createFromJson(jsonElement, ctx) {
-	if (typeof jsonElement !== "number") throw new Error("code 15957; the limit must be specified as a number");
+Create a limiting DocumentSource from JSON.
 
-	var Limit = proto.getFactory(),
-		nextLimit = new Limit(ctx);
+This is a convenience method that uses the above, and operates on
+a JSONElement that has been deteremined to be an Object with an
+element named $limit.
 
-	nextLimit.limit = jsonElement;
-	if ((nextLimit.limit <= 0) || isNaN(nextLimit.limit)) throw new Error("code 15958; the limit must be positive");
+@param jsonElement the JSONELement that defines the limit
+@param ctx the expression context
+@returns the grouping DocumentSource
+*/
+klass.createFromJson = function createFromJson(jsonElement, ctx) {
+	if (typeof jsonElement !== "number") throw new Error("code 15957; the limit must be specified as a number");
+	var limit = jsonElement;
+	return klass.create(ctx, limit);
+};
 
-	return nextLimit;
+klass.create = function create(ctx, limit){
+	if ((limit <= 0) || isNaN(limit)) throw new Error("code 15958; the limit must be positive");
+	return new LimitDocumentSource(ctx, limit);
 };
 
 proto.getLimit = function getLimit(newLimit) {

+ 44 - 10
lib/pipeline/documentSources/MatchDocumentSource.js

@@ -22,6 +22,13 @@ var MatchDocumentSource = module.exports = function MatchDocumentSource(query, c
 	base.call(this, ctx);
 	this.query = query; // save the query, so we can check it for deps later. THIS IS A DEVIATION FROM THE MONGO IMPLEMENTATION
 	this.matcher = new matcher(query);
+
+	// not supporting currently $text operator
+	// set _isTextQuery to false.
+	// TODO: update after we implement $text.
+	if (klass.isTextQuery(query)) throw new Error("$text pipeline operation not supported");
+	this._isTextQuery = false;
+
 }, klass = MatchDocumentSource, base = require('./DocumentSource'), proto = klass.prototype = Object.create(base.prototype, {constructor:{value:klass}});
 
 klass.matchName = "$match";
@@ -33,15 +40,19 @@ proto.getSourceName = function getSourceName(){
 proto.getNext = function getNext(callback) {
 	if (!callback) throw new Error(this.getSourceName() + ' #getNext() requires callback');
 
+	if (this.expCtx.checkForInterrupt && this.expCtx.checkForInterrupt() === false) {
+		return callback(new Error('Interrupted'));
+	}
+
 	var self = this,
 		next,
 		test = function test(doc) {
 			return self.matcher.matches(doc);
 		},
 		makeReturn = function makeReturn(doc) {
-			if(doc !== DocumentSource.EOF && test(doc)) { // Passes the match criteria
+			if(doc !== null && test(doc)) { // Passes the match criteria
 				return doc;
-			} else if(doc === DocumentSource.EOF){ // Got EOF
+			} else if(doc === null){ // Got EOF
 				return doc;
 			}
 			return undefined; // Didn't match, but not EOF
@@ -49,15 +60,19 @@ proto.getNext = function getNext(callback) {
 	async.doUntil(
 		function(cb) {
 			self.source.getNext(function(err, doc) {
-				if(err) return callback(err);
-				if (makeReturn(doc)) {
-					next = doc;
+				if(err) return cb(err);
+				try {
+					if (makeReturn(doc) !== undefined) {
+						next = doc;
+					}
+				} catch (ex) {
+					return cb(ex);
 				}
 				return cb();
 			});
 		},
 		function() {
-			var foundDoc = (next === DocumentSource.EOF || next !== undefined);
+			var foundDoc = (next === null || next !== undefined);
 			return foundDoc; //keep going until doc is found
 		},
 		function(err) {
@@ -86,11 +101,11 @@ klass.uassertNoDisallowedClauses = function uassertNoDisallowedClauses(query) {
 	for(var key in query){
 		if(query.hasOwnProperty(key)){
 			// can't use the Matcher API because this would segfault the constructor
-			if (query[key] == "$where") throw new Error("code 16395; $where is not allowed inside of a $match aggregation expression");
+			if (key === "$where") throw new Error("code 16395; $where is not allowed inside of a $match aggregation expression");
 			// geo breaks if it is not the first portion of the pipeline
-			if (query[key] == "$near") throw new Error("code 16424; $near is not allowed inside of a $match aggregation expression");
-			if (query[key] == "$within") throw new Error("code 16425; $within is not allowed inside of a $match aggregation expression");
-			if (query[key] == "$nearSphere") throw new Error("code 16426; $nearSphere is not allowed inside of a $match aggregation expression");
+			if (key === "$near") throw new Error("code 16424; $near is not allowed inside of a $match aggregation expression");
+			if (key === "$within") throw new Error("code 16425; $within is not allowed inside of a $match aggregation expression");
+			if (key === "$nearSphere") throw new Error("code 16426; $nearSphere is not allowed inside of a $match aggregation expression");
 			if (query[key] instanceof Object && query[key].constructor === Object) this.uassertNoDisallowedClauses(query[key]);
 		}
 	}
@@ -103,6 +118,25 @@ klass.createFromJson = function createFromJson(jsonElement, ctx) {
 	return matcher;
 };
 
+proto.isTextQuery = function isTextQuery() {
+    return this._isTextQuery;
+};
+
+klass.isTextQuery = function isTextQuery(query) {
+    for (var key in query) {
+        var fieldName = key;
+        if (fieldName === "$text") return true;
+        if (query[key] instanceof Object && query[key].constructor === Object && this.isTextQuery(query[key])) {
+            return true;
+        }
+    }
+    return false;
+};
+
+klass.setSource = function setSource (source) {
+	this.setSource(source);
+};
+
 proto.getQuery = function getQuery() {
 	return this.matcher._pattern;
 };

+ 19 - 3
lib/pipeline/documentSources/OutDocumentSource.js

@@ -10,13 +10,13 @@ var DocumentSource = require('./DocumentSource');
  * @param [ctx] {ExpressionContext}
  **/
 var OutDocumentSource = module.exports = function OutDocumentSource(outputNs, ctx){
-	if (arguments.length > 2) throw new Error("up to two arg expected");
+	if (arguments.length > 2) throw new Error("up to two args expected");
 	base.call(this, ctx);
 	// defaults
 	this._done = false;
 	this._outputNs = outputNs;
 	this._collectionName = "";
-}, klass = OutDocumentSource, base = require('./DocumentSource'), proto = klass.prototype = Object.create(base.prototype, {constructor:{value:klass}});
+}, klass = OutDocumentSource, base = DocumentSource, proto = klass.prototype = Object.create(base.prototype, {constructor:{value:klass}});
 
 klass.outName = "$out";
 
@@ -47,7 +47,23 @@ klass.createFromJson = function(jsonElement, ctx) {
 	return out;
 };
 
+// SplittableDocumentSource implementation.
+// Mongo doesn't fully implement SplittableDocumentSource on DocumentSourceOut.
+//	It doesn't implement getShardSource or getMergeSource
+klass.isSplittableDocumentSource = true;
+
+proto.getShardSource = function getShardSource() {
+	return null;
+};
+
+proto.getMergeSource = function getMergeSource() {
+	return this;
+};
+
+//NeedsMongodDocumentSource implementation
+klass.needsMongodDocumentSource = true;
+
 proto.getDependencies = function(deps) {
 	deps.needWholeDocument = true;
-	return DocumentSource.GetDepsReturn.EXHAUSTIVE;
+	return DocumentSource.GetDepsReturn.EXHAUSTIVE_ALL;
 };

+ 46 - 47
lib/pipeline/documentSources/ProjectDocumentSource.js

@@ -10,11 +10,12 @@ var DocumentSource = require('./DocumentSource');
  * @constructor
  * @param [ctx] {ExpressionContext}
  **/
-var ProjectDocumentSource = module.exports = function ProjectDocumentSource(ctx){
-	if (arguments.length > 1) throw new Error("up to one arg expected");
+var ProjectDocumentSource = module.exports = function ProjectDocumentSource(ctx, exprObj){
+	if (arguments.length > 2) throw new Error("up to two args expected");
 	base.call(this, ctx);
 	this.OE = ObjectExpression.create();
 	this._raw = undefined;
+	this._variables = undefined;
 }, klass = ProjectDocumentSource, base = require('./DocumentSource'), proto = klass.prototype = Object.create(base.prototype, {constructor:{value:klass}});
 
 // DEPENDENCIES
@@ -45,9 +46,9 @@ proto.getNext = function getNext(callback) {
 		if (err)
 			return callback(null, err);
 
-		if (input === DocumentSource.EOF) {
+		if (input === null) {
 			out = input;
-			return callback(null, DocumentSource.EOF);
+			return callback(null, null);
 		}
 
 		/* create the result document */
@@ -59,9 +60,13 @@ proto.getNext = function getNext(callback) {
 		 * If we're excluding fields at the top level, leave out the _id if
 		 * it is found, because we took care of it above.
 		 **/
-		self._variables.setRoot(input);
-		self.OE.addToDocument(out, input, self._variables);
-		self._variables.clearRoot();
+		try {
+			self._variables.setRoot(input);
+			self.OE.addToDocument(out, input, self._variables);
+			self._variables.clearRoot();
+		} catch (ex){
+			return callback(ex);
+		}
 
 		return callback(null, out);
 	});
@@ -69,11 +74,11 @@ proto.getNext = function getNext(callback) {
 };
 
 /**
- * Returns the object that was used to construct the ProjectDocumentSource
- * @return {object} the object that was used to construct the ProjectDocumentSource
+ * Optimizes the internal ObjectExpression
+ * @return
  **/
-proto.getRaw = function getRaw() {
-	return this._raw;
+proto.optimize = function optimize() {
+	this.OE = this.OE.optimize();
 };
 
 proto.serialize = function serialize(explain) {
@@ -82,38 +87,13 @@ proto.serialize = function serialize(explain) {
 	return out;
 };
 
-/**
- * Optimizes the internal ObjectExpression
- * @return
- **/
-proto.optimize = function optimize() {
-	this.OE.optimize();
-};
-
-proto.toJSON = function toJSON(){
-	var obj = {};
-	this.sourceToJson(obj);
-	return obj;
-};
-
-/**
- * Places a $project key inside the builder object with value of this.OE
- * @method sourceToJson
- * @param {builder} An object (was ported from BsonBuilder)
- * @return
- **/
-proto.sourceToJson = function sourceToJson(builder, explain) {
-	var insides = this.OE.toJSON(true);
-	builder[this.getSourceName()] = insides;
-};
-
 /**
  * Builds a new ProjectDocumentSource from an object
  * @method createFromJson
  * @return {ProjectDocmentSource} a ProjectDocumentSource instance
  **/
-klass.createFromJson = function(jsonElement, expCtx) {
-	if (!(jsonElement instanceof Object) || jsonElement.constructor !== Object) throw new Error('Error 15969. Specification must be an object but was ' + typeof jsonElement);
+klass.createFromJson = function(elem, expCtx) {
+	if (!(elem instanceof Object) || elem.constructor !== Object) throw new Error('Error 15969. Specification must be an object but was ' + typeof elem);
 
 	var objectContext = new Expression.ObjectCtx({
 		isDocumentOk: true,
@@ -121,17 +101,22 @@ klass.createFromJson = function(jsonElement, expCtx) {
 		isInclusionOk: true
 	});
 
-	var project = new ProjectDocumentSource(expCtx),
-		idGenerator = new VariablesIdGenerator(),
-		vps = new VariablesParseState(idGenerator);
+	var idGenerator = new VariablesIdGenerator(),
+		vps = new VariablesParseState(idGenerator),
+		parsed = Expression.parseObject(elem, objectContext, vps),
+		exprObj = parsed;
 
-	project._raw = jsonElement;
-	var parsed = Expression.parseObject(jsonElement, objectContext, vps);
-	var exprObj = parsed;
 	if (!exprObj instanceof ObjectExpression) throw new Error("16402, parseObject() returned wrong type of Expression");
-	if (!exprObj.getFieldCount()) throw new Error("16403, $projection requires at least one output field");
-	project.OE = exprObj;
+	//if (!exprObj.getFieldCount() ) throw new Error("uassert 16403: $project requires at least one output field");
+
+	var project = new ProjectDocumentSource(expCtx, exprObj);
 	project._variables = new Variables(idGenerator.getIdCount());
+
+	var projectObj = elem;
+	project.OE = exprObj;
+
+	project._raw = elem;
+
 	return project;
 };
 
@@ -143,5 +128,19 @@ klass.createFromJson = function(jsonElement, expCtx) {
 proto.getDependencies = function getDependencies(deps) {
 	var path = [];
 	this.OE.addDependencies(deps, path);
-	return base.GetDepsReturn.EXHAUSTIVE;
+	return base.GetDepsReturn.EXHAUSTIVE_FIELDS;
+};
+
+/**
+ * Returns the object that was used to construct the ProjectDocumentSource
+ * @return {object} the object that was used to construct the ProjectDocumentSource
+ **/
+proto.getRaw = function getRaw() {
+	return this._raw;
+};
+
+proto.toJSON = function toJSON(){
+	var obj = {};
+	this.sourceToJson(obj);
+	return obj;
 };

+ 20 - 13
lib/pipeline/documentSources/RedactDocumentSource.js

@@ -37,25 +37,32 @@ proto.getNext = function getNext(callback) {
 		doc;
 	async.whilst(
 		function() {
-			return doc !== DocumentSource.EOF;
+			return doc !== null;
 		},
 		function(cb) {
 			self.source.getNext(function(err, input) {
 				doc = input;
-				if (input === DocumentSource.EOF)
+				if (input === null)
 					return cb();
-				self._variables.setRoot(input);
-				self._variables.setValue(self._currentId, input);
-				var result = self.redactObject();
-				if (result !== DocumentSource.EOF)
+				var result;
+				try {
+					self._variables.setRoot(input);
+					self._variables.setValue(self._currentId, input);
+					result = self.redactObject();
+				} catch (ex) {
+					return cb(ex);
+				}
+				if (result !== null)
 					return cb(result); //Using the err argument to pass the result document; this lets us break out without having EOF
 				return cb();
 			});
 		},
 		function(doc) {
-			if (doc)
-				return callback(null, doc);
-			return callback(null, DocumentSource.EOF);
+			if (doc){
+				if (doc instanceof Error) return callback(doc);
+				else return callback(null, doc);
+			}
+			return callback(null, null);
 		}
 	);
 	return doc;
@@ -64,7 +71,7 @@ proto.getNext = function getNext(callback) {
 proto.redactValue = function redactValue(input) {
 	// reorder to make JS happy with types
 	if (input instanceof Array) {
-		var newArr,
+		var newArr = [],
 			arr = input;
 		for (var i = 0; i < arr.length; i++) {
 			if ((arr[i] instanceof Object && arr[i].constructor === Object) || arr[i] instanceof Array) {
@@ -79,7 +86,7 @@ proto.redactValue = function redactValue(input) {
 	} else if (input instanceof Object && input.constructor === Object) {
 		this._variables.setValue(this._currentId, input);
 		var result = this.redactObject();
-		if (result !== DocumentSource.EOF)
+		if (result !== null)
 			return result;
 		return null;
 	} else {
@@ -96,10 +103,10 @@ proto.redactObject = function redactObject() {
 	if (expressionResult === KEEP_VAL) {
 		return this._variables.getDocument(this._currentId);
 	} else if (expressionResult === PRUNE_VAL) {
-		return DocumentSource.EOF;
+		return null;
 	} else if (expressionResult === DESCEND_VAL) {
 		var input = this._variables.getDocument(this._currentId);
-		var out;
+		var out = {};
 
 		var inputKeys = Object.keys(input);
 		for (var i = 0; i < inputKeys.length; i++) {

+ 127 - 28
lib/pipeline/documentSources/SkipDocumentSource.js

@@ -4,64 +4,95 @@ var async = require('async'),
 	DocumentSource = require('./DocumentSource');
 
 /**
- * A document source skipper
+ * A document source skipper.
+ *
  * @class SkipDocumentSource
  * @namespace mungedb-aggregate.pipeline.documentSources
  * @module mungedb-aggregate
  * @constructor
  * @param [ctx] {ExpressionContext}
  **/
-var SkipDocumentSource = module.exports = function SkipDocumentSource(ctx){
-	if (arguments.length > 1) throw new Error("up to one arg expected");
+var SkipDocumentSource = module.exports = function SkipDocumentSource(ctx) {
+	if (arguments.length > 1) {
+		throw new Error('Up to one argument expected.');
+	}
+
 	base.call(this, ctx);
+
 	this.skip = 0;
 	this.count = 0;
-}, klass = SkipDocumentSource, base = require('./DocumentSource'), proto = klass.prototype = Object.create(base.prototype, {constructor:{value:klass}});
 
-klass.skipName = "$skip";
-proto.getSourceName = function getSourceName(){
+	this.needToSkip = true;
+}, klass = SkipDocumentSource, base = require('./DocumentSource'), proto = klass.prototype = Object.create(base.prototype, {constructor: {value: klass}});
+
+klass.skipName = '$skip';
+
+/**
+ * Return the source name.
+ *
+ * @returns {string}
+ */
+proto.getSourceName = function getSourceName() {
 	return klass.skipName;
 };
 
 /**
- * Coalesce skips together
- * @param {Object} nextSource the next source
- * @return {bool} return whether we can coalese together
- **/
+ * Coalesce skips together.
+ *
+ * @param nextSource
+ * @returns {boolean}
+ */
 proto.coalesce = function coalesce(nextSource) {
-	var nextSkip =	nextSource.constructor === SkipDocumentSource?nextSource:null;
+	var nextSkip =	nextSource.constructor === SkipDocumentSource ? nextSource : null;
 
-	// if it's not another $skip, we can't coalesce
-	if (!nextSkip) return false;
+	// If it's not another $skip, we can't coalesce.
+	if (!nextSkip) {
+		return false;
+	}
 
-	// we need to skip over the sum of the two consecutive $skips
+	// We need to skip over the sum of the two consecutive $skips.
 	this.skip += nextSkip.skip;
+
 	return true;
 };
 
+/**
+ * Get next source.
+ *
+ * @param callback
+ * @returns {*}
+ */
 proto.getNext = function getNext(callback) {
-	if (!callback) throw new Error(this.getSourceName() + ' #getNext() requires callback');
+	if (!callback) {
+		throw new Error(this.getSourceName() + ' #getNext() requires callback.');
+	}
+
+	if (this.expCtx.checkForInterrupt && this.expCtx.checkForInterrupt() === false) {
+		return callback(new Error('Interrupted'));
+	}
 
 	var self = this,
 		next;
 
-	if (this.count < this.skip) {
+	if (this.needToSkip) { // May be unnecessary.
+		this.needToSkip = false;
 
 		async.doWhilst(
-			function(cb) {
-				self.source.getNext(function(err, val) {
-					if(err) return cb(err);
-					self.count++;
+			function (cb) {
+				self.source.getNext(function (err, val) {
+					if (err) { return cb(err); }
+
+					++self.count;
 					next = val;
+
 					return cb();
 				});
 			},
 			function() {
-				return self.count < self.skip || next === DocumentSource.EOF;
+				return self.count < self.skip || next === null;
 			},
-			function(err) {
-				if (err)
-					return callback(err);
+			function (err) {
+				if (err) { return callback(err); }
 			}
 		);
 	}
@@ -69,28 +100,96 @@ proto.getNext = function getNext(callback) {
 	return this.source.getNext(callback);
 };
 
+/**
+ * Serialize the source.
+ *
+ * @param explain
+ * @returns {{}}
+ */
 proto.serialize = function serialize(explain) {
 	var out = {};
+
 	out[this.getSourceName()] = this.skip;
+
 	return out;
 };
 
+/**
+ * Get skip value.
+ *
+ * @returns {number}
+ */
 proto.getSkip = function getSkip() {
 	return this.skip;
 };
 
 /**
- * Creates a new SkipDocumentSource with the input number as the skip
+ * Set skip value.
  *
- * @param {Number} JsonElement this thing is *called* Json, but it expects a number
+ * @param newSkip
+ */
+proto.setSkip = function setSkip(newSkip) {
+	this.skip = newSkip;
+};
+
+/**
+ * Create a new SkipDocumentSource.
+ *
+ * @param expCtx
+ * @returns {SkipDocumentSource}
+ */
+klass.create = function create(expCtx) {
+	return new SkipDocumentSource(expCtx);
+};
+
+/**
+ * Creates a new SkipDocumentSource with the input number as the skip.
+ *
+ * @param {Number} JsonElement this thing is *called* JSON, but it expects a number.
  **/
 klass.createFromJson = function createFromJson(jsonElement, ctx) {
-	if (typeof jsonElement !== "number") throw new Error("code 15972; the value to skip must be a number");
+	if (typeof jsonElement !== 'number') {
+		throw new Error('code 15972; the value to skip must be a number');
+	}
 
 	var nextSkip = new SkipDocumentSource(ctx);
 
 	nextSkip.skip = jsonElement;
-	if (nextSkip.skip < 0 || isNaN(nextSkip.skip)) throw new Error("code 15956; the number to skip cannot be negative");
+
+	if (nextSkip.skip < 0 || isNaN(nextSkip.skip)) {
+		throw new Error('code 15956; the number to skip cannot be negative');
+	}
 
 	return nextSkip;
 };
+
+// SplittableDocumentSource implementation.
+klass.isSplittableDocumentSource = true;
+
+/**
+ * Get dependencies.
+ *
+ * @param deps
+ * @returns {number}
+ */
+proto.getDependencies = function getDependencies(deps) {
+	return DocumentSource.GetDepsReturn.SEE_NEXT;
+};
+
+/**
+ * Get shard source.
+ *
+ * @returns {null}
+ */
+proto.getShardSource = function getShardSource() {
+	return null;
+};
+
+/**
+ * Get router source.
+ *
+ * @returns {SkipDocumentSource}
+ */
+proto.getRouterSource = function getRouterSource() {
+	return this;
+};

+ 285 - 82
lib/pipeline/documentSources/SortDocumentSource.js

@@ -2,7 +2,8 @@
 
 var async = require("async"),
 	DocumentSource = require("./DocumentSource"),
-	LimitDocumentSource = require("./LimitDocumentSource");
+	LimitDocumentSource = require("./LimitDocumentSource"),
+	Document = require('../Document');
 
 /**
  * A document source sorter
@@ -34,6 +35,9 @@ var SortDocumentSource = module.exports = function SortDocumentSource(ctx){
 
 // DEPENDENCIES
 var FieldPathExpression = require("../expressions/FieldPathExpression"),
+	VariablesIdGenerator = require("../expressions/VariablesIdGenerator"),
+	VariablesParseState = require("../expressions/VariablesParseState"),
+	Variables = require("../expressions/Variables"),
 	Value = require("../Value");
 
 klass.sortName = "$sort";
@@ -46,13 +50,10 @@ proto.getFactory = function getFactory(){
 	return klass;	// using the ctor rather than a separate .create() method
 };
 
-klass.GetDepsReturn = {
-	SEE_NEXT: "SEE_NEXT" // Add the next Source's deps to the set
-};
-
 proto.dispose = function dispose() {
 	this.docIterator = 0;
 	this.documents = [];
+	this._output.reset();
 	this.source.dispose();
 };
 
@@ -60,13 +61,6 @@ proto.getLimit = function getLimit() {
 	return this.limitSrc ? this.limitSrc.getLimit() : -1;
 };
 
-proto.getDependencies = function getDependencies(deps) {
-	for(var i = 0; i < this.vSortKey.length; ++i) {
-		this.vSortKey[i].addDependencies(deps);
-	}
-	return klass.GetDepsReturn.SEE_NEXT;
-};
-
 proto.coalesce = function coalesce(nextSource) {
 	if (!this.limitSrc) {
 		if (nextSource instanceof LimitDocumentSource) {
@@ -82,28 +76,33 @@ proto.coalesce = function coalesce(nextSource) {
 proto.getNext = function getNext(callback) {
 	if (!callback) throw new Error(this.getSourceName() + ' #getNext() requires callback');
 
+	if (this.expCtx instanceof Object && this.expCtx.checkForInterrupt && this.expCtx.checkForInterrupt() === false)
+		return callback(new Error("Interrupted"));
+
 	var self = this,
 		out;
 	async.series(
 		[
 			function(next) {
 				if (!self.populated)
+				{
 					self.populate(function(err) {
 						return next(err);
 					});
-				else
-					next();
+				} else {
+					return next();
+				}
 			},
 			function(next) {
 				if (self.docIterator >= self.documents.length) {
-					out = DocumentSource.EOF;
-					return next(null, DocumentSource.EOF);
+					out = null;
+					return next(null, null);
 				}
 
 				var output = self.documents[self.docIterator++];
-				if (!output || output === DocumentSource.EOF) {
-					out = DocumentSource.EOF;
-					return next(null, DocumentSource.EOF);
+				if (!output || output === null) {
+					out = null;
+					return next(null, null);
 				}
 
 				out = output;
@@ -118,18 +117,26 @@ proto.getNext = function getNext(callback) {
 	return out;
 };
 
+/**
+* Serialize to Array.
+*
+* @param {Array} array
+* @param {bool} explain
+**/
 proto.serializeToArray = function serializeToArray(array, explain) {
 	var doc = {};
-	if (explain) {
-		doc.sortKey = this.serializeSortKey();
+	if (explain) { // always one obj for combined $sort + $limit
+		doc.sortKey = this.serializeSortKey(explain);
 		doc.mergePresorted = this._mergePresorted;
 		doc.limit = this.limitSrc ? this.limitSrc.getLimit() : undefined;
 		array.push(doc);
-	} else {
-		var inner = this.serializeSortKey();
+	} else { // one Value for $sort and maybe a Value for $limit
+		var inner = {};
+		inner = this.serializeSortKey(explain);
 		if (this._mergePresorted)
 			inner.$mergePresorted = true;
 		doc[this.getSourceName()] = inner;
+
 		array.push(doc);
 
 		if (this.limitSrc)
@@ -151,7 +158,10 @@ proto.serialize = function serialize(explain) {
 * @param {bool} ascending if true, use the key for an ascending sort, otherwise, use it for descending
 **/
 proto.addKey = function addKey(fieldPath, ascending) {
-	var pathExpr = new FieldPathExpression(fieldPath);
+	var idGenerator = new VariablesIdGenerator(),
+		vps = new VariablesParseState(idGenerator);
+
+	var pathExpr = FieldPathExpression.parse("$$ROOT." + fieldPath, vps);
 	this.vSortKey.push(pathExpr);
 	if (ascending === true || ascending === false) {
 		this.vAscending.push(ascending);
@@ -161,62 +171,169 @@ proto.addKey = function addKey(fieldPath, ascending) {
 	}
 };
 
-proto.populate = function populate(callback) {
+proto.makeSortOptions = function makeSortOptions(){
 	/* make sure we've got a sort key */
 	if (!this.vSortKey.length) throw new Error("no sort key for " + this.getSourceName());
 
-	// Skipping stuff about mergeCursors and commandShards
+	// Skipping memory checks
+
+	var opts;
+	if ( this.limitSrc)
+		opts.limit = this.limitSrc.getLimt();
+
+	return opts;
+};
+
+
+proto.populate = function populate(callback) {
+	if ( this._mergePresorted ){
+		// Skipping stuff about mergeCursors and commandShards
+		throw new Error("Merge presorted not implemented.");
+	} else {
+		/* pull everything from the underlying source */
+		var self = this,
+			next;
+
+		async.doWhilst(
+			function (cb) {
+				self.source.getNext(function(err, doc) {
+					next = doc;
+
+					// Don't add EOF; it doesn't sort well.
+					if (doc !== null)
+						self.documents.push(doc);
+
+					return cb();
+				});
+			},
+			function() {
+				return next !== null;
+			},
+			function(err) {
+				try {
+					/* sort the list */
+					self.documents.sort(SortDocumentSource.prototype.compare.bind(self));
+				} catch (ex) {
+					return callback(ex);
+				}
+				/* start the sort iterator */
+				self.docIterator = 0;
+
+				self.populated = true;
+				//self._output.reset(true);
+				return callback();
+		}
+		);
+
 
-	/* pull everything from the underlying source */
-	var self = this,
-		next;
-	async.doWhilst(
-		function (cb) {
-			self.source.getNext(function(err, doc) {
-				next = doc;
-
-				// Don't add EOF; it doesn't sort well.
-				if (doc !== DocumentSource.EOF)
-					self.documents.push(doc);
-				return cb();
-			});
-		},
-		function() {
-			return next !== DocumentSource.EOF;
-		},
-		function(err) {
-	/* sort the list */
-			self.documents.sort(SortDocumentSource.prototype.compare.bind(self));
-
-	/* start the sort iterator */
-			self.docIterator = 0;
-
-			self.populated = true;
-			return callback();
 	}
-	);
+	this.populated = true;
+};
+
+klass.IteratorFromCursor = (function(){
+	/**
+	 * Helper class to unwind arrays within a series of documents.
+	 * @param	{String}	unwindPath is the field path to the array to unwind.
+	 **/
+	var klass = function IteratorFromCursor(sorter, cursor){
+		this._sorter = new SortDocumentSource(sorter);
+		//this._cursor = new DBClientCursor(cursor);
+
+	}, base = Object, proto = klass.prototype = Object.create(base.prototype, {constructor:{value:klass}});
+
+	proto.more = function more() {
+		return this._cursor.more();
+	};
+
+	proto.next = function next() {
+		// var doc = new DocumentSourceMergeCursors(this._cursor);
+		// TODO: make_pair for return
+		//return {this._sorter.extractKey(doc): doc};
+	};
+	return klass;
+})();
+
+proto.populateFromCursors = function populateFromCursors(cursors){
+	for (var i = 0; i < cursors.length; i++) {
+		// TODO Create class
+		//this.iterators.push(boost::make_shared<IteratorFromBsonArray>(this, cursors[i]));
+	}
+
+	this._output.reset( ); // TODO: MySorter::Iterator::merge(iterators, makeSortOptions(), Comparator(*this))
+
+};
+
+klass.IteratorFromBsonArray = (function(){
+	/**
+	 * Helper class to unwind arrays within a series of documents.
+	 * @param	{String}	unwindPath is the field path to the array to unwind.
+	 **/
+	var klass = function IteratorFromBsonArray(sorter, array){
+		this._sorter = new SortDocumentSource(sorter);
+		//this._iterator = new BSONObjIterator(array);
+
+	}, base = Object, proto = klass.prototype = Object.create(base.prototype, {constructor:{value:klass}});
+
+	proto.next = function next() {
+		// var doc = new DocumentSourceMergeCursors(this._cursor);
+		// TODO: make_pair for return
+		//return {this._sorter.extractKey(doc): doc};
+	};
+
+	proto.more = function more() {
+		return this._cursor.more();
+	};
+
+	return klass;
+})();
+
+proto.populateFromBsonArrays = function populateFromBsonArrays(arrays){
+	for (var i = 0; i < arrays.lenth; i++) {
+		// TODO Create class
+		//this.iterators.push(boost::make_shared<IteratorFromBsonArray>(this, arrays[i]));
+	}
+	this._output.reset( ); // TODO: MySorter::Iterator::merge(iterators, makeSortOptions(), Comparator(*this))
+};
+
+/**
+* Extract the key
+*
+* @param  {d} document
+* @returns {keys} extracted key
+**/
+proto.extractKey = function extractKey(d){
+	var vars = new Variables(0,d);
+
+	if ( this.vSortKey.length == 1)
+		return this.vSortKey[0].evaluate(vars);
+
+	var keys;
+	for (var i=0; i < this.vSortKey.length; i++) {
+		keys.push(this.vSortKey[i].evaluate(vars));
+	}
+	return keys;
 };
 
 /**
  * Compare two documents according to the specified sort key.
  *
- * @param {Object} pL the left side doc
- * @param {Object} pR the right side doc
+ * @param {Object} lhs the left side doc
+ * @param {Object} rhs the right side doc
  * @returns {Number} a number less than, equal to, or greater than zero, indicating pL < pR, pL == pR, or pL > pR, respectively
 **/
-proto.compare = function compare(pL,pR) {
-	/**
-	* populate() already checked that there is a non-empty sort key,
-	* so we shouldn't have to worry about that here.
-	*
-	* However, the tricky part is what to do is none of the sort keys are
-	* present.  In this case, consider the document less.
-	**/
-	var n = this.vSortKey.length;
-	for(var i = 0; i < n; ++i) {
+proto.compare = function compare(lhs,rhs) {
+	/*
+	  populate() already checked that there is a non-empty sort key,
+	  so we shouldn't have to worry about that here.
+	  However, the tricky part is what to do is none of the sort keys are
+	  present.  In this case, consider the document less.
+	*/
+
+	for(var i = 0, n = this.vSortKey.length; i < n; ++i) {
+		var pathExpr = FieldPathExpression.create(this.vSortKey[i].getFieldPath(false).fieldNames.slice(1).join('.'));
+
 		/* evaluate the sort keys */
-		var pathExpr = new FieldPathExpression(this.vSortKey[i].getFieldPath(false));
-		var left = pathExpr.evaluate(pL), right = pathExpr.evaluate(pR);
+		var left = pathExpr.evaluate(lhs), right = pathExpr.evaluate(rhs);
 
 		/*
 		Compare the two values; if they differ, return.  If they are
@@ -238,43 +355,129 @@ proto.compare = function compare(pL,pR) {
 };
 
 /**
-* Write out an object whose contents are the sort key.
+ * Write out an object whose contents are the sort key.
+ *
+ * @param {bool} explain
+ * @return {Object} key
 **/
-proto.serializeSortKey = function sortKeyToJson() {
+proto.serializeSortKey = function serializeSortKey(explain) {
 	var keyObj = {};
-
+	// add the key fields
 	var n = this.vSortKey.length;
 	for (var i = 0; i < n; i++) {
-		var fieldPath = this.vSortKey[i].getFieldPath();
-		keyObj[fieldPath] = this.vAscending[i] ? 1 : -1;
+		if ( this.vSortKey[i] instanceof FieldPathExpression ) {
+			var fieldPath = this.vSortKey[i].getFieldPath(false).fieldNames.slice(1).join('.');
+			// append a named integer based on the sort order
+			keyObj[fieldPath] = this.vAscending[i] ? 1 : -1;
+		} else {
+			// other expressions use a made-up field name
+			keyObj[{"$computed":i}] = this.vSortKey[i].serialize(explain);
+		}
 	}
 	return keyObj;
 };
 
+/**
+ * Creates a new SortDocumentSource from Json
+ *
+ * @param {Object} elem
+ * @param {Object} expCtx
+ *
+**/
+klass.createFromJson = function createFromJson(elem, expCtx) {
+	if (typeof elem !== "object") throw new Error("code 15973; the " + klass.sortName + " key specification must be an object");
+
+	return klass.create(expCtx, elem);
+};
+
 /**
  * Creates a new SortDocumentSource
- * @param {Object} jsonElement
+ *
+ * @param {Object} expCtx
+ * @param {object} sortorder
+ * @param {int} limit
+ *
 **/
-klass.createFromJson = function createFromJson(jsonElement, ctx) {
-	if (typeof jsonElement !== "object") throw new Error("code 15973; the " + klass.sortName + " key specification must be an object");
+klass.create = function create(expCtx, sortOrder, limit) {
 
 	var Sort = proto.getFactory(),
-		nextSort = new Sort(ctx);
+		nextSort = new Sort(expCtx);
 
 	/* check for then iterate over the sort object */
 	var sortKeys = 0;
-	for(var key in jsonElement) {
-		var sortOrder = 0;
+	for(var keyField in sortOrder) {
+		var fieldName = keyField.fieldName;
 
-		if (typeof jsonElement[key] !== "number") throw new Error("code 15974; " + klass.sortName + " key ordering must be specified using a number");
+		if ( fieldName === "$mergePresorted" ){
+			Sort._mergePresorted = true;
+			continue;
+		}
 
-		sortOrder = jsonElement[key];
-		if ((sortOrder != 1) && (sortOrder !== -1)) throw new Error("code 15975; " + klass.sortName + " key ordering must be 1 (for ascending) or 0 (for descending)");
+		if ( keyField instanceof Object) {
+			// this restriction is due to needing to figure out sort direction
+			throw new Error("code 17312; " + klass.sortName + "the only expression supported by $sort right now is {$meta: 'textScore'}");
+		}
 
-		nextSort.addKey(key, (sortOrder > 0));
+		if (typeof sortOrder[keyField] !== "number") throw new Error("code 15974; " + klass.sortName + "$sort key ordering must be specified using a number or {$meta: 'text'}");
+
+		// RedBeard0531 can the thanked.
+		var sortDirection = 0;
+		sortDirection = sortOrder[keyField];
+		if ((sortDirection != 1) && (sortDirection !== -1)) throw new Error("code 15975; " + klass.sortName + " $sort key ordering must be 1 (for ascending) or -1 (for descending)");
+
+		nextSort.addKey(keyField, (sortDirection > 0));
 		++sortKeys;
 	}
 
 	if (sortKeys <= 0) throw new Error("code 15976; " + klass.sortName + " must have at least one sort key");
+
+
+	if ( limit > 0) {
+		var coalesced = nextSort.coalesce( create(expCtx, limit));
+		// should always coalesce
+	}
+
 	return nextSort;
 };
+
+// SplittableDocumentSource implementation.
+klass.isSplittableDocumentSource = true;
+
+/**
+ * Get dependencies.
+ *
+ * @param deps
+ * @returns {number}
+ */
+proto.getDependencies = function getDependencies(deps) {
+	for(var i = 0; i < this.vSortKey.length; i++) {
+		this.vSortKey[i].addDependencies(deps);
+	}
+
+	return DocumentSource.GetDepsReturn.SEE_NEXT;
+};
+
+/**
+ * Get shard source.
+ *
+ * @returns {this}
+ */
+proto.getShardSource = function getShardSource() {
+	if (this._mergePresorted) throw new Error("getShardSource", + klass.sortName + " should not be merging presorted");
+	return this;
+};
+
+/**
+ * Get merge source.
+ *
+ * @returns {SortDocumentSource}
+ */
+proto.getMergeSource = function getMergeSource() {
+	if ( this._mergingPresorted) throw new Error("getMergeSource", + klass.sortName + " should not be merging presorted");
+	var other = new SortDocumentSource();
+	other.vAscending = this.vAscending;
+	other.vSortKey = this.vSortKey;
+	other.limitSrc = this.limitSrc;
+	other._mergingPresorted = true;
+	return other;
+};

+ 148 - 192
lib/pipeline/documentSources/UnwindDocumentSource.js

@@ -1,6 +1,11 @@
 "use strict";
 
-var async = require("async");
+var async = require('async'),
+	DocumentSource = require('./DocumentSource'),
+	Expression = require('../expressions/Expression'),
+	FieldPath = require('../FieldPath'),
+	Value = require('../Value'),
+	Document = require('../Document');
 
 /**
  * A document source unwinder
@@ -11,61 +16,51 @@ var async = require("async");
  * @param [ctx] {ExpressionContext}
  **/
 var UnwindDocumentSource = module.exports = function UnwindDocumentSource(ctx){
-	if (arguments.length > 1) throw new Error("up to one arg expected");
-	base.call(this, ctx);
-
-	// Configuration state.
-	this._unwindPath = null;
+	if (arguments.length > 1) {
+		throw new Error('Up to one argument expected.');
+	}
 
-	// Iteration state.
-	this._unwinder = null;
+	base.call(this, ctx);
 
+	this._unwindPath = null; // Configuration state.
+	this._unwinder = null; // Iteration state.
 }, klass = UnwindDocumentSource, base = require('./DocumentSource'), proto = klass.prototype = Object.create(base.prototype, {constructor:{value:klass}});
 
-var DocumentSource = base,
-	FieldPath = require('../FieldPath'),
-	Document = require('../Document'),
-	Expression = require('../expressions/Expression');
+klass.unwindName = '$unwind';
 
-klass.Unwinder = (function(){
+klass.Unwinder = (function() {
 	/**
-	 * Helper class to unwind arrays within a series of documents.
-	 * @param	{String}	unwindPath is the field path to the array to unwind.
-	 **/
-	var klass = function Unwinder(unwindPath){
-		// Path to the array to unwind.
-		this._unwindPath = unwindPath;
-		// The souce document to unwind.
-		this._document = null;
-		// Document indexes of the field path components.
-		this._unwindPathFieldIndexes = [];
-		// Iterator over the array within _document to unwind.
-		this._unwindArrayIterator = null;
-		// The last value returned from _unwindArrayIterator.
-		//this._unwindArrayIteratorCurrent = undefined; //dont define this yet
-	}, base = Object, proto = klass.prototype = Object.create(base.prototype, {constructor:{value:klass}});
+	 * Construct a new Unwinder instance. Used as a parent class for UnwindDocumentSource.
+	 *
+	 * @param unwindPath
+	 * @constructor
+	 */
+	var klass = function Unwinder(unwindPath) {
+		this._unwindPath = new FieldPath(unwindPath);
 
-	/**
-	 * Reset the unwinder to unwind a new document.
-	 * @param	{Object}	document
-	 **/
-	proto.resetDocument = function resetDocument(document){
-		if (!document) throw new Error("document is required!");
+		this._inputArray = undefined;
+		this._document = undefined;
+		this._index = undefined;
+	}, base = Object, proto = klass.prototype = Object.create(base.prototype, {constructor: {value: klass}});
 
-		// Reset document specific attributes.
+	proto.resetDocument = function resetDocument(document) {
+		if (!document) throw new Error('Document is required!');
+
+		this._inputArray = [];
 		this._document = document;
-		this._unwindPathFieldIndexes.length = 0;
-		this._unwindArrayIterator = null;
-		delete this._unwindArrayIteratorCurrent;
+		this._index = 0;
 
-		var pathValue = this.extractUnwindValue(); // sets _unwindPathFieldIndexes
-		if (!pathValue || pathValue.length === 0) return;  // The path does not exist.
+		var pathValue = Document.getNestedField(this._document, this._unwindPath);
 
-		if (!(pathValue instanceof Array)) throw new Error(UnwindDocumentSource.unwindName + ":  value at end of field path must be an array; code 15978");
+		if (!pathValue || pathValue.length === 0) {
+			return;
+		}
 
-		// Start the iterator used to unwind the array.
-		this._unwindArrayIterator = pathValue.slice(0);
-		this._unwindArrayIteratorCurrent = this._unwindArrayIterator.splice(0,1)[0];
+		if (!(pathValue instanceof Array)) {
+			throw new Error(UnwindDocumentSource.unwindName + ':  value at end of field path must be an array; code 15978');
+		}
+
+		this._inputArray = pathValue;
 	};
 
 	/**
@@ -75,199 +70,160 @@ klass.Unwinder = (function(){
 	 * than the original mongo implementation, but should get updated to follow the current API.
 	 **/
 	proto.getNext = function getNext() {
-		if (this.eof())
-			return DocumentSource.EOF;
-
-		var output = this.getCurrent();
-		this.advance();
-		return output;
-	};
-
-	/**
-	 * eof
-	 * @returns	{Boolean}	true if done unwinding the last document passed to resetDocument().
-	 **/
-	proto.eof = function eof(){
-		return !this.hasOwnProperty("_unwindArrayIteratorCurrent");
-	};
-
-	/**
-	 * Try to advance to the next document unwound from the document passed to resetDocument().
-	 * @returns	{Boolean} true if advanced to a new unwound document, but false if done advancing.
-	 **/
-	proto.advance = function advance(){
-		if (!this._unwindArrayIterator) {
-			// resetDocument() has not been called or the supplied document had no results to
-			// unwind.
-			delete this._unwindArrayIteratorCurrent;
-		} else if (!this._unwindArrayIterator.length) {
-			// There are no more results to unwind.
-			delete this._unwindArrayIteratorCurrent;
-		} else {
-			this._unwindArrayIteratorCurrent = this._unwindArrayIterator.splice(0, 1)[0];
-		}
-	};
-
-	/**
-	 * Get the current document unwound from the document provided to resetDocument(), using
-	 * the current value in the array located at the provided unwindPath.  But return
-	 * intrusive_ptr<Document>() if resetDocument() has not been called or the results to unwind
-	 * have been exhausted.
-	 *
-	 * @returns	{Object}
-	 **/
-	proto.getCurrent = function getCurrent(){
-		if (!this.hasOwnProperty("_unwindArrayIteratorCurrent")) {
+		if (this._inputArray === undefined || this._index === this._inputArray.length) {
 			return null;
 		}
 
-		// Clone all the documents along the field path so that the end values are not shared across
-		// documents that have come out of this pipeline operator.  This is a partial deep clone.
-		// Because the value at the end will be replaced, everything along the path leading to that
-		// will be replaced in order not to share that change with any other clones (or the
-		// original).
-
-		var clone = Document.clone(this._document);
-		var current = clone;
-		var n = this._unwindPathFieldIndexes.length;
-		if (!n) throw new Error("unwindFieldPathIndexes are empty");
-		for (var i = 0; i < n; ++i) {
-			var fi = this._unwindPathFieldIndexes[i];
-			var fp = current[fi];
-			if (i + 1 < n) {
-				// For every object in the path but the last, clone it and continue on down.
-				var next = Document.clone(fp);
-				current[fi] = next;
-				current = next;
-			} else {
-				// In the last nested document, subsitute the current unwound value.
-				current[fi] = this._unwindArrayIteratorCurrent;
-			}
-		}
-
-		return clone;
-	};
-
-	/**
-	 * Get the value at the unwind path, otherwise an empty pointer if no such value
-	 * exists.  The _unwindPathFieldIndexes attribute will be set as the field path is traversed
-	 * to find the value to unwind.
-	 *
-	 * @returns	{Object}
-	 **/
-	proto.extractUnwindValue = function extractUnwindValue() {
-		var current = this._document;
-		var pathValue;
-		var pathLength = this._unwindPath.getPathLength();
-		for (var i = 0; i < pathLength; ++i) {
-
-			var idx = this._unwindPath.getFieldName(i);
-
-			if (!current.hasOwnProperty(idx)) return null; // The target field is missing.
-
-			// Record the indexes of the fields down the field path in order to quickly replace them
-			// as the documents along the field path are cloned.
-			this._unwindPathFieldIndexes.push(idx);
-
-			pathValue = current[idx];
+		this._document = Document.cloneDeep(this._document);
+		Document.setNestedField(this._document, this._unwindPath, this._inputArray[this._index++]);
 
-			if (i < pathLength - 1) {
-				if (typeof pathValue !== 'object') return null; // The next field in the path cannot exist (inside a non object).
-				current = pathValue; // Move down the object tree.
-			}
-		}
-
-		return pathValue;
+		return this._document;
 	};
 
 	return klass;
 })();
 
 /**
- * Specify the field to unwind.
-**/
-proto.unwindPath = function unwindPath(fieldPath){
-	// Can't set more than one unwind path.
-	if (this._unwindPath) throw new Error(this.getSourceName() + " can't unwind more than one path; code 15979");
-
-	// Record the unwind path.
-	this._unwindPath = new FieldPath(fieldPath);
-	this._unwinder = new klass.Unwinder(this._unwindPath);
-};
-
-klass.unwindName = "$unwind";
-
-proto.getSourceName = function getSourceName(){
+ * Get the document source name.
+ *
+ * @method getSourceName
+ * @returns {string}
+ */
+proto.getSourceName = function getSourceName() {
 	return klass.unwindName;
 };
 
 /**
- * Get the fields this operation needs to do its job.
- * Deps should be in "a.b.c" notation
+ * Get the next source.
  *
- * @method	getDependencies
- * @param	{Object} deps	set (unique array) of strings
- * @returns	DocumentSource.GetDepsReturn
-**/
-proto.getDependencies = function getDependencies(deps) {
-	if (!this._unwindPath) throw new Error("unwind path does not exist!");
-	deps[this._unwindPath.getPath(false)] = 1;
-	return DocumentSource.GetDepsReturn.SEE_NEXT;
-};
-
+ * @method getNext
+ * @param callback
+ * @returns {*}
+ */
 proto.getNext = function getNext(callback) {
-	if (!callback) throw new Error(this.getSourceName() + ' #getNext() requires callback');
+	if (!callback) {
+		throw new Error(this.getSourceName() + ' #getNext() requires callback.');
+	}
+
+	if (this.expCtx.checkForInterrupt && this.expCtx.checkForInterrupt() === false) {
+		return callback(new Error('Interrupted'));
+	}
 
 	var self = this,
-		out = this._unwinder.getNext(),
+		out,
 		exhausted = false;
+		
+	try {
+		out = this._unwinder.getNext();
+	} catch (ex) {
+		return callback(ex);
+	}
 
 	async.until(
-		function() {
-			if(out === DocumentSource.EOF && exhausted) return true;	// Really is EOF, not just an empty unwinder
-			else if(out !== DocumentSource.EOF) return true; // Return whatever we got that wasn't EOF
+		function () {
+			if (out !== null || exhausted) {
+				return true;
+			}
+
 			return false;
 		},
-		function(cb) {
-			self.source.getNext(function(err, doc) {
-				if(err) return cb(err);
-				out = doc;
-				if(out === DocumentSource.EOF) { // Our source is out of documents, we're done
-					exhausted = true;
-					return cb();
-				} else {
-					self._unwinder.resetDocument(doc);
-					out = self._unwinder.getNext();
-					return cb();
+		function (cb) {
+			self.source.getNext(function (err, doc) {
+				if (err) {
+					return cb(err);
 				}
+
+				try {
+					if (doc === null) {
+						exhausted = true;
+					} else {
+						self._unwinder.resetDocument(doc);
+						out = self._unwinder.getNext();
+					}
+				} catch (ex) {
+					return cb(ex);
+				}
+
+				return cb();
 			});
 		},
 		function(err) {
-			if(err) return callback(err);
+			if (err) {
+				return callback(err);
+			}
+
 			return callback(null, out);
 		}
 	);
 
-	return out; //For sync mode
+	return out;
 };
 
+/**
+ * Serialize the data.
+ *
+ * @method serialize
+ * @param explain
+ * @returns {{}}
+ */
 proto.serialize = function serialize(explain) {
-	if (!this._unwindPath) throw new Error("unwind path does not exist!");
+	if (!this._unwindPath) {
+		throw new Error('unwind path does not exist!');
+	}
+
 	var doc = {};
+
 	doc[this.getSourceName()] = this._unwindPath.getPath(true);
+
 	return doc;
 };
 
+/**
+ * Get the fields this operation needs to do its job.
+ *
+ * @method getDependencies
+ * @param deps
+ * @returns {DocumentSource.GetDepsReturn.SEE_NEXT|*}
+ */
+proto.getDependencies = function getDependencies(deps) {
+	if (!this._unwindPath) {
+		throw new Error('unwind path does not exist!');
+	}
+
+	deps[this._unwindPath.getPath(false)] = 1;
+
+	return DocumentSource.GetDepsReturn.SEE_NEXT;
+};
+
+/**
+ * Unwind path.
+ *
+ * @method unwindPath
+ * @param fieldPath
+ */
+proto.unwindPath = function unwindPath(fieldPath) {
+	if (this._unwindPath) {
+		throw new Error(this.getSourceName() + ' can\'t unwind more than one path; code 15979');
+	}
+
+	// Record the unwind path.
+	this._unwindPath = new FieldPath(fieldPath);
+	this._unwinder = new klass.Unwinder(fieldPath);
+};
+
 /**
  * Creates a new UnwindDocumentSource with the input path as the path to unwind
+ * @method createFromJson
  * @param {String} JsonElement this thing is *called* Json, but it expects a string
 **/
 klass.createFromJson = function createFromJson(jsonElement, ctx) {
-	// The value of $unwind should just be a field path.
-	if (jsonElement.constructor !== String) throw new Error("the " + klass.unwindName + " field path must be specified as a string; code 15981");
+	if (jsonElement.constructor !== String) {
+		throw new Error('the ' + klass.unwindName + ' field path must be specified as a string; code 15981');
+	}
+
+	var pathString = Expression.removeFieldPrefix(jsonElement),
+		unwind = new UnwindDocumentSource(ctx);
 
-	var pathString = Expression.removeFieldPrefix(jsonElement);
-	var unwind = new UnwindDocumentSource(ctx);
 	unwind.unwindPath(pathString);
 
 	return unwind;

+ 1 - 1
lib/pipeline/expressions/Expression.js

@@ -88,7 +88,7 @@ klass.parseObject = function parseObject(obj, ctx, vps) {
 		OPERATOR = 2,
 		kind = UNKNOWN;
 
-	if (obj === undefined || obj === null || (obj instanceof Object && Object.keys(obj).length === 0)) return new ObjectExpression();
+	if (obj === undefined || obj === null || (obj instanceof Object && Object.keys(obj).length === 0)) return new ObjectExpression({});
 	var fieldNames = Object.keys(obj);
 	for (var fieldCount = 0, n = fieldNames.length; fieldCount < n; ++fieldCount) {
 		var fieldName = fieldNames[fieldCount];

+ 89 - 0
lib/query/ArrayRunner.js

@@ -0,0 +1,89 @@
+"use strict";
+
+var Runner = require('./Runner');
+
+/**
+ * This class is an array runner used to run a pipeline against a static array of data
+ * @param	{Array}	items	The array source of the data
+ **/
+var klass = module.exports = function ArrayRunner(array){
+	base.call(this);
+	
+	if (!array || array.constructor !== Array ) throw new Error('Array runner requires an array');
+	this._array = array;
+	this._position = 0;
+	this._state = Runner.RunnerState.RUNNER_ADVANCED;
+}, base = Runner, proto = klass.prototype = Object.create(base.prototype, {constructor:{value:klass}});
+
+/**
+ * Get the next result from the array.
+ * 
+ * @method getNext
+ * @param [callback] {Function}
+ */
+proto.getNext = function getNext(callback) {
+	var obj, err;
+	try {
+		if (this._state === Runner.RunnerState.RUNNER_ADVANCED) {
+			if (this._position < this._array.length){
+				obj = this._array[this._position++];
+			} else {
+				this._state = Runner.RunnerState.RUNNER_EOF;
+			}
+		}
+	} catch (ex) {
+		err = ex;
+		this._state = Runner.RunnerState.RUNNER_ERROR;
+	}
+	
+	return callback(err, obj, this._state);
+};
+
+/**
+ * Save any state required to yield.
+ * 
+ * @method saveState
+ */
+proto.saveState = function saveState() {
+	//nothing to do here
+};
+
+/**
+ * Restore saved state, possibly after a yield.  Return true if the runner is OK, false if
+ * it was killed.
+ * 
+ * @method restoreState
+ */
+proto.restoreState = function restoreState() {
+	//nothing to do here
+};
+
+/**
+ * Returns a description of the Runner
+ * 
+ * @method getInfo
+ * @param [explain]
+ * @param [planInfo]
+ */
+proto.getInfo = function getInfo(explain) {
+	if (explain){
+		return {
+			type: this.constructor.name,
+			nDocs: this._array.length,
+			position: this._position,
+			state: this._state
+		};
+	}
+	return undefined;
+};
+
+/**
+ * dispose of the Runner.
+ * 
+ * @method reset
+ */
+proto.reset = function reset(){
+	this._array = [];
+	this._position = 0;
+	this._state = Runner.RunnerState.RUNNER_DEAD;
+};

+ 93 - 0
lib/query/DocumentSourceRunner.js

@@ -0,0 +1,93 @@
+"use strict";
+
+var Runner = require('./Runner'),
+	DocumentSource = require('../pipeline/documentSources/DocumentSource');
+
+/**
+ * This class is a runner used to wrap a document source
+ * @param	{Array}	items	The array source of the data
+ **/
+var klass = module.exports = function DocumentSourceRunner(docSrc, pipeline){
+	base.call(this);
+
+	if (!docSrc || !(docSrc instanceof DocumentSource) ) throw new Error('DocumentSource runner requires a DocumentSource');
+	if (pipeline && pipeline.constructor != Array ) throw new Error('DocumentSource runner requires pipeline to be an Array');
+	
+	this._docSrc = docSrc;
+	this._pipeline = pipeline || [];
+	
+	while (this._pipeline.length && this._docSrc.coalesce(this._pipeline[0])) {
+		this._pipeline.shift();
+	}
+	
+	this._state = Runner.RunnerState.RUNNER_ADVANCED;
+}, base = Runner, proto = klass.prototype = Object.create(base.prototype, {constructor:{value:klass}});
+
+/**
+ * Get the next result from the array.
+ * 
+ * @method getNext
+ * @param [callback] {Function}
+ */
+proto.getNext = function getNext(callback) {
+	var self = this;
+	if (self._state === Runner.RunnerState.RUNNER_ADVANCED) {
+		return self._docSrc.getNext(function (err, obj){
+			if (err){
+				self._state = Runner.RunnerState.RUNNER_ERROR;
+			}
+			if (obj === null){
+				self._state = Runner.RunnerState.RUNNER_EOF;
+			}
+			return callback(err, obj, self._state);
+		});
+	}
+	return callback(null, null, self._state);
+};
+
+/**
+ * Save any state required to yield.
+ * 
+ * @method saveState
+ */
+proto.saveState = function saveState() {
+	//nothing to do here
+};
+
+/**
+ * Restore saved state, possibly after a yield.  Return true if the runner is OK, false if
+ * it was killed.
+ * 
+ * @method restoreState
+ */
+proto.restoreState = function restoreState() {
+	//nothing to do here
+};
+
+/**
+ * Returns a description of the Runner
+ * 
+ * @method getInfo
+ * @param [explain]
+ * @param [planInfo]
+ */
+proto.getInfo = function getInfo(explain) {
+	if (explain){
+		return {
+			type: this.constructor.name,
+			docSrc: this._docSrc.serialize(explain),
+			state: this._state
+		};
+	}
+	return undefined;
+};
+
+/**
+ * dispose of the Runner.
+ * 
+ * @method reset
+ */
+proto.reset = function reset(){
+	this._docSrc.dispose();
+	this._state = Runner.RunnerState.RUNNER_DEAD;
+};

+ 222 - 0
lib/query/Runner.js

@@ -0,0 +1,222 @@
+"use strict";
+
+/**
+ * This class is an implementation of the base class for runners used in MongoDB
+ * 
+ * Note that a lot of stuff here is not used by our code yet.  Check the existing implementations
+ * for what we currently use
+ * 
+ **/
+var klass = module.exports = function Runner(){
+	
+}, base = Object, proto = klass.prototype = Object.create(base.prototype, {constructor:{value:klass}});
+
+klass.RunnerState = {
+	// We successfully populated the out parameter.
+	RUNNER_ADVANCED: "RUNNER_ADVANCED",
+
+	// We're EOF.  We won't return any more results (edge case exception: capped+tailable).
+	RUNNER_EOF: "RUNNER_EOF",
+
+	// We were killed or had an error.
+	RUNNER_DEAD: "RUNNER_DEAD",
+
+	// getNext was asked for data it cannot provide, or the underlying PlanStage had an
+	// unrecoverable error.
+	// If the underlying PlanStage has any information on the error, it will be available in
+	// the objOut parameter. Call WorkingSetCommon::toStatusString() to retrieve the error
+	// details from the output BSON object.
+	RUNNER_ERROR: "RUNNER_ERROR"
+};
+
+klass.YieldPolicy = {
+	// Any call to getNext() may yield.  In particular, the runner may be killed during any
+	// call to getNext().  If this occurs, getNext() will return RUNNER_DEAD.
+	//
+	// If you are enabling autoyield, you must register the Runner with ClientCursor via
+	// ClientCursor::registerRunner and deregister via ClientCursor::deregisterRunnerwhen
+	// done.  Registered runners are informed about DiskLoc deletions and Namespace
+	// invalidations and other important events.
+	//
+	// Exception: This is not required if the Runner is cached inside of a ClientCursor.
+	// This is only done if the Runner is cached and can be referred to by a cursor id.
+	// This is not a popular thing to do.
+	YIELD_AUTO: "YIELD_AUTO",
+
+	// Owner must yield manually if yields are requested.  How to yield yourself:
+	//
+	// 0. Let's say you have Runner* runner.
+	//
+	// 1. Register your runner with ClientCursor.  Registered runners are informed about
+	// DiskLoc deletions and Namespace invalidation and other important events.  Do this by
+	// calling ClientCursor::registerRunner(runner).  This could be done once when you get
+	// your runner, or per-yield.
+	//
+	// 2. Call runner->saveState() before you yield.
+	//
+	// 3. Call RunnerYieldPolicy::staticYield(runner->ns(), NULL) to yield.  Any state that
+	// may change between yields must be checked by you.  (For example, DiskLocs may not be
+	// valid across yielding, indices may be dropped, etc.)
+	//
+	// 4. Call runner->restoreState() before using the runner again.
+	//
+	// 5. Your runner's next call to getNext may return RUNNER_DEAD.
+	//
+	// 6. When you're done with your runner, deregister it from ClientCursor via
+	// ClientCursor::deregister(runner).
+	YIELD_MANUAL: "YIELD_MANUAL"
+};
+
+
+/**
+ * Set the yielding policy of the underlying runner.  See the RunnerYieldPolicy enum above.
+ * 
+ * @method setYieldPolicy
+ * @param [policy]
+ */
+proto.setYieldPolicy = function setYieldPolicy(policy) {
+	throw new Error('Not implemented');
+};
+
+/**
+ * Get the next result from the query.
+ *
+ * If objOut is not NULL, only results that have a BSONObj are returned.  The BSONObj may
+ * point to on-disk data (isOwned will be false) and must be copied by the caller before
+ * yielding.
+ *
+ * If dlOut is not NULL, only results that have a valid DiskLoc are returned.
+ *
+ * If both objOut and dlOut are not NULL, only results with both a valid BSONObj and DiskLoc
+ * will be returned.  The BSONObj is the object located at the DiskLoc provided.
+ *
+ * If the underlying query machinery produces a result that does not have the data requested
+ * by the user, it will be silently dropped.
+ *
+ * If the caller is running a query, they probably only care about the object.
+ * If the caller is an internal client, they may only care about DiskLocs (index scan), or
+ * about object + DiskLocs (collection scan).
+ *
+ * Some notes on objOut and ownership:
+ *
+ * objOut may be an owned object in certain cases: invalidation of the underlying DiskLoc,
+ * the object is created from covered index key data, the object is projected or otherwise
+ * the result of a computation.
+ *
+ * objOut will also be owned when the underlying PlanStage has provided error details in the
+ * event of a RUNNER_ERROR. Call WorkingSetCommon::toStatusString() to convert the object
+ * to a loggable format.
+ *
+ * objOut will be unowned if it's the result of a fetch or a collection scan.
+ * 
+ * @method getNext
+ * @param [callback] {Function}
+ */
+proto.getNext = function getNext(callback) {
+	throw new Error('Not implemented');
+};
+
+
+/**
+ * Will the next call to getNext() return EOF?  It's useful to know if the runner is done
+ * without having to take responsibility for a result.
+ * 
+ * @method isEOF
+ */
+proto.isEOF = function isEOF(){
+	throw new Error('Not implemented');
+};
+
+/**
+ * Inform the runner about changes to DiskLoc(s) that occur while the runner is yielded.
+ * The runner must take any actions required to continue operating correctly, including
+ * broadcasting the invalidation request to the PlanStage tree being run.
+ *
+ * Called from CollectionCursorCache::invalidateDocument.
+ *
+ * See db/invalidation_type.h for InvalidationType.
+ * 
+ * @method invalidate
+ * @param [dl]
+ * @param [type]
+ */
+proto.invalidate = function invalidate(dl, type) {
+	throw new Error('Not implemented');
+};
+
+/**
+ * Mark the Runner as no longer valid.  Can happen when a runner yields and the underlying
+ * database is dropped/indexes removed/etc.  All future to calls to getNext return
+ * RUNNER_DEAD. Every other call is a NOOP.
+ *
+ * The runner must guarantee as a postcondition that future calls to collection() will
+ * return NULL.
+ * 
+ * @method kill
+ */
+proto.kill = function kill() {
+	throw new Error('Not implemented');
+};
+
+/**
+ * Save any state required to yield.
+ * 
+ * @method saveState
+ */
+proto.saveState = function saveState() {
+	throw new Error('Not implemented');
+};
+
+/**
+ * Restore saved state, possibly after a yield.  Return true if the runner is OK, false if
+ * it was killed.
+ * 
+ * @method restoreState
+ */
+proto.restoreState = function restoreState() {
+	throw new Error('Not implemented');
+};
+
+/**
+ * Return the NS that the query is running over.
+ * 
+ * @method ns
+ */
+proto.ns = function ns() {
+	throw new Error('Not implemented');
+};
+
+/**
+ * Return the Collection that the query is running over.
+ * 
+ * @method collection
+ */
+proto.collection = function collection() {
+	throw new Error('Not implemented');
+};
+
+/**
+ * Returns OK, allocating and filling '*explain' or '*planInfo' with a description of the
+ * chosen plan, depending on which is non-NULL (one of the two should be NULL). Caller
+ * takes onwership of either '*explain' and '*planInfo'. Otherwise, returns false
+ * a detailed error status.
+ *
+ * If 'explain' is NULL, then this out-parameter is ignored. Similarly, if 'staticInfo'
+ * is NULL, then no static debug information is produced.
+ * 
+ * @method getInfo
+ * @param [explain]
+ * @param [planInfo]
+ */
+proto.getInfo = function getInfo(explain, planInfo) {
+	throw new Error('Not implemented');
+};
+
+/**
+ * dispose of the Runner.
+ * 
+ * @method reset
+ */
+proto.reset = function reset(){
+	throw new Error('Not implemented');
+};

+ 21 - 0
lib/query/index.js

@@ -0,0 +1,21 @@
+"use strict";
+
+var DocumentSource = require('../pipeline/documentSources/DocumentSource'),
+	Runner = require("./Runner.js"),
+	ArrayRunner = require("./ArrayRunner.js"),
+	DocumentSourceRunner = require("./DocumentSourceRunner.js");
+
+module.exports = {
+	Runner: Runner,
+	ArrayRunner: ArrayRunner,
+	DocumentSourceRunner: DocumentSourceRunner,
+	getRunner: function(data, queryObj, sortObj, projectionForQuery, sources){
+		if (data && data.constructor === Array){
+			return new ArrayRunner(data);
+		} else if (data && data instanceof DocumentSource){
+			return new DocumentSourceRunner(data, sources);
+		} else {
+			throw new Error('could not construct Runner from given data');
+		}
+	}
+};

+ 0 - 93
test/lib/Cursor.js

@@ -1,93 +0,0 @@
-"use strict";
-var assert = require("assert"),
-	Cursor = require("../../lib/Cursor");
-
-module.exports = {
-
-	"Cursor": {
-
-		"constructor(data)": {
-			"should throw an exception if it does not get a valid array or stream": function(){
-				assert.throws(function(){
-					var c = new Cursor();
-				});
-				assert.throws(function(){
-					var c = new Cursor(5);
-				});
-			}
-		},
-
-		"#ok": {
-			"should return true if there is still data in the array": function(){
-				var c = new Cursor([1,2,3,4,5]);
-				assert.equal(c.ok(), true);
-			},
-			"should return false if there is no data left in the array": function(){
-				var c = new Cursor([]);
-				assert.equal(c.ok(), false);
-			},
-			"should return true if there is no data left in the array, but there is still a current value": function(){
-				var c = new Cursor([1,2]);
-				c.advance();
-				c.advance();
-				assert.equal(c.ok(), true);
-				c.advance();
-				assert.equal(c.ok(), false);
-			}
-//			,
-//			"should return true if there is still data in the stream": function(){
-//				
-//			},
-//			"should return false if there is no data left in the stream": function(){
-//				
-//			}
-
-		},
-		
-		"#advance": {
-			"should return true if there is still data in the array": function(){
-				var c = new Cursor([1,2,3,4,5]);
-				assert.equal(c.advance(), true);
-			},
-			"should return false if there is no data left in the array": function(){
-				var c = new Cursor([1]);
-				c.advance();
-				assert.equal(c.advance(), false);
-			},
-			"should update the current object to the next item in the array": function(){
-				var c = new Cursor([1,"2"]);
-				c.advance();
-				assert.strictEqual(c.current(), 1);
-				c.advance();
-				assert.strictEqual(c.current(), "2");
-				c.advance();
-				assert.strictEqual(c.current(), undefined);
-			}
-//,			"should return true if there is still data in the stream": function(){
-//				
-//			},
-//			"should return false if there is no data left in the stream": function(){
-//				
-//			},
-//			"should update the current object to the next item in the stream": function(){
-//				
-//			}
-		},
-		
-		"#current": {
-			"should return the first value if the cursor has not been advanced yet": function(){
-				var c = new Cursor([1,2,3,4,5]);
-				assert.equal(c.current(), 1);
-			},
-			"should return the first value if the cursor has been advanced once": function(){
-				var c = new Cursor([1,2,3,4,5]);
-				c.advance();
-				assert.equal(c.current(), 1);
-			}
-		}
-
-	}
-
-};
-
-if (!module.parent)(new(require("mocha"))()).ui("exports").reporter("spec").addFile(__filename).run();

+ 6 - 12
test/lib/pipeline/ParsedDeps.js

@@ -30,14 +30,10 @@ module.exports = {
 				assert.deepEqual(expected, parse._documentHelper(json, neededFields));
 			},
 			"should call _arrayHelper on values that are arrays": function() {
-				var json = {'foo':[{'bar':'baz'}]},
+				var json = {'foo':[{'bar':'baz'}], 'a': 'b'},
 					neededFields = {'foo':true},
 					parse = new ParsedDeps(),
-					expected = {'foo':true};
-				// TODO: mock out _arrayHelper to return true
-				parse._arrayHelper = function() {
-					return true;
-				};
+					expected = {'foo':[{bar:'baz'}]};
 				assert.deepEqual(expected, parse._documentHelper(json, neededFields));
 			},
 			"should recurse on values that are objects": function() {
@@ -53,11 +49,7 @@ module.exports = {
 				var array = [{'foo':'bar'}],
 					neededFields = {'foo':true},
 					parse = new ParsedDeps(),
-					expected = [true];
-				// TODO: mock out _documentHelper to return true
-				parse._documentHelper = function() {
-					return true;
-				};
+					expected = [{foo:'bar'}];
 				assert.deepEqual(expected, parse._arrayHelper(array, neededFields));
 			},
 			"should recurse on values that are arrays": function() {
@@ -65,7 +57,9 @@ module.exports = {
 					neededFields = {'foo':true},
 					parse = new ParsedDeps(),
 					expected = [[{'foo':'bar'}]];
-				assert.deepEqual(expected, parse._arrayHelper(array, neededFields));
+
+				var actual = parse._arrayHelper(array, neededFields);
+				assert.deepEqual(actual, expected);
 			}
 		}
 	}

+ 103 - 41
test/lib/pipeline/Pipeline.js

@@ -2,7 +2,33 @@
 var assert = require("assert"),
 	Pipeline = require("../../../lib/pipeline/Pipeline"),
 	FieldPath = require("../../../lib/pipeline/FieldPath"),
-	DocumentSource = require('../../../lib/pipeline/documentSources/DocumentSource');
+	DocumentSource = require('../../../lib/pipeline/documentSources/DocumentSource'),
+	CursorDocumentSource = require("../../../lib/pipeline/documentSources/CursorDocumentSource"),
+	ProjectDocumentSource = require("../../../lib/pipeline/documentSources/ProjectDocumentSource"),
+	ArrayRunner = require("../../../lib/query/ArrayRunner");
+
+var addSource = function addSource(match, data) {
+	var cds = new CursorDocumentSource(null, new ArrayRunner(data), null);
+	match.setSource(cds);
+};
+
+var shardedTest = function(inputPipeString, expectedMergePipeString, expectedShardPipeString) {
+	inputPipeString = '{"pipeline": ' + inputPipeString + '}';
+	expectedMergePipeString = '{"pipeline": ' + expectedMergePipeString + '}';
+	expectedShardPipeString = '{"pipeline": ' + expectedShardPipeString + '}';
+	var inputPipe = JSON.parse(inputPipeString),
+		expectedMergePipe = JSON.parse(expectedMergePipeString),
+		expectedShardPipe = JSON.parse(expectedShardPipeString);
+
+	var mergePipe = Pipeline.parseCommand(inputPipe, {});
+	assert.notEqual(mergePipe, null);
+
+	var shardPipe = mergePipe.splitForSharded();
+	assert.notEqual(shardPipe, null);
+
+	assert.deepEqual(shardPipe.serialize().pipeline, expectedShardPipe.pipeline);
+	assert.deepEqual(mergePipe.serialize().pipeline, expectedMergePipe.pipeline);
+};
 
 module.exports = {
 
@@ -36,7 +62,7 @@ module.exports = {
 				};
 
 				proto.getNext = function(callback){
-					var answer = this.current > 0 ? {val:this.current--} : DocumentSource.EOF,
+					var answer = this.current > 0 ? {val:this.current--} : null,
 						err = null;
 
 					if (!this.works)
@@ -90,8 +116,8 @@ module.exports = {
 					{$sort: {"xyz": 1}},
 					{$match: {}}
 				]});
-				assert.equal(p.sourceVector[0].constructor.matchName, "$match");
-				assert.equal(p.sourceVector[1].constructor.sortName, "$sort");
+				assert.equal(p.sources[0].constructor.matchName, "$match");
+				assert.equal(p.sources[1].constructor.sortName, "$sort");
 			},
 
 			"should attempt to coalesce all sources": function () {
@@ -101,8 +127,8 @@ module.exports = {
 					{$test: {coalesce: false}},
 					{$test: {coalesce: false}}
 				]});
-				assert.equal(p.sourceVector.length, 3);
-				p.sourceVector.slice(0, -1).forEach(function (source) {
+				assert.equal(p.sources.length, 3);
+				p.sources.slice(0, -1).forEach(function (source) {
 					assert.equal(source.coalesceWasCalled, true);
 				});
 				assert.equal(p.sources[p.sources.length -1].coalesceWasCalled, false);
@@ -113,10 +139,48 @@ module.exports = {
 					{$test: {coalesce: false}},
 					{$test: {coalesce: false}}
 				]});
-				p.sourceVector.forEach(function (source) {
+				p.sources.forEach(function (source) {
 					assert.equal(source.optimizeWasCalled, true);
 				});
 			}
+		},
+
+		"sharded": {
+
+			"should handle empty pipeline for sharded": function () {
+				var inputPipe = "[]",
+					expectedMergePipe = "[]",
+					expectedShardPipe = "[]";
+				shardedTest(inputPipe, expectedShardPipe, expectedMergePipe);
+			},
+
+			"should handle one unwind": function () {
+				var inputPipe = '[{"$unwind":"$a"}]',
+					expectedShardPipe = "[]",
+					expectedMergePipe = '[{"$unwind":"$a"}]';
+				shardedTest(inputPipe, expectedMergePipe, expectedShardPipe);
+			},
+
+			"should handle two unwinds": function () {
+				var inputPipe = '[{"$unwind":"$a"}, {"$unwind":"$b"}]',
+					expectedShardPipe = "[]",
+					expectedMergePipe = '[{"$unwind": "$a"}, {"$unwind": "$b"}]';
+				shardedTest(inputPipe, expectedMergePipe, expectedShardPipe);
+			},
+
+			"should handle unwind not final": function () {
+				var inputPipe = '[{"$unwind": "$a"}, {"$match": {"a":1}}]',
+					expectedShardPipe = '[]',
+					expectedMergePipe = '[{"$unwind": "$a"}, {"$match": {"a":1}}]';
+				shardedTest(inputPipe, expectedShardPipe, expectedMergePipe);
+			},
+
+			"should handle unwind with other": function () {
+				var inputPipe = '[{"$match": {"a":1}}, {"$unwind": "$a"}]',
+					expectedShardPipe = '[{"$match":{"a":1}}]',
+					expectedMergePipe = '[{"$unwind":"$a"}]';
+				shardedTest(inputPipe,expectedMergePipe, expectedShardPipe);
+			}
 
 		},
 
@@ -126,45 +190,31 @@ module.exports = {
 				p.stitch();
 				assert.equal(p.sources[1].source, p.sources[0]);
 			}
-			},
+		},
 
-		"#_runSync": {
+		"#run": {
 
-			"should iterate through sources and return resultant array": function () {
+			"should iterate through sources and return resultant array": function (done) {
 				var p = Pipeline.parseCommand({pipeline:[{$test:{coalesce:false}}, {$test:{coalesce:false}}, {$test:{coalesce:false}}]}),
-					results = p.run(function(err, results) {
-						assert.deepEqual(results.result, [ { val: 5 }, { val: 4 }, { val: 3 }, { val: 2 }, { val: 1 } ]);
-				});
-			},
-
-			"should catch parse errors": function () {
-				// The $foo part is invalid and causes a throw.
-				assert.throws(function () {
-					Pipeline.parseCommand({pipeline: [
-						{$match: {$foo: {bar: "baz"}}}
-					]});
+					results = [];
+				p.run(function(err, doc) {
+					if (err) throw err;
+					if (!doc){
+						assert.deepEqual(results, [ { val: 5 }, { val: 4 }, { val: 3 }, { val: 2 }, { val: 1 } ]);
+						done();
+					} else {
+						results.push(doc);
+					}
 				});
 			},
-
-			"should call callback with errors from pipeline components": function (next) {
-				var p = Pipeline.parseCommand({pipeline: [
-					{$match: {foo: {bar: "baz"}}}
-				]});
-				p.run(new DocumentSource({}), function (err, results) {
-					assert(err instanceof Error);
-					return next();
+			"should handle sources that return errors": function (done) {
+				var p = Pipeline.parseCommand({pipeline:[{$test:{works:false}}]}),
+					results = [];
+				p.run(function(err, doc) {
+					assert(err);
+					done();
 				});
 			}
-
-		},
-
-		"#_runAsync": {
-			"should iterate through sources and return resultant array asynchronously": function () {
-				var p = Pipeline.parseCommand({pipeline:[{$test:{coalesce:false}}, {$test:{coalesce:false}}, {$test:{coalesce:false}}]}),
-					results = p.run(function(err, results) {
-						assert.deepEqual(results.result, [ { val: 5 }, { val: 4 }, { val: 3 }, { val: 2 }, { val: 1 } ]);
-					});
-		}
 		},
 
 		"#addInitialSource": {
@@ -181,9 +231,21 @@ module.exports = {
 				p.addInitialSource(initialSource);
 				p.stitch();
 				assert.equal(p.sources[1].source, p.sources[0]);
-	}
-		}
+			}
+		},
+
+		"#getDependencies()": {
 
+			"should properly detect dependencies": function testGetDependencies() {
+				var p = Pipeline.parseCommand({pipeline: [
+					{$sort: {"xyz": 1}},
+					{$project: {"a":"$xyz"}}
+				]});
+				var depsTracker = p.getDependencies();
+				assert.equal(Object.keys(depsTracker.fields).length, 2);
+			}
+
+		}
 	}
 
 };

+ 67 - 67
test/lib/pipeline/PipelineD.js

@@ -10,78 +10,19 @@ module.exports = {
 
 	"PipelineD": {
 
-		before: function(){
-
-			Pipeline.stageDesc.$test = (function(){
-
-				var klass = function TestDocumentSource(options, ctx){
-					base.call(this, ctx);
-
-					this.shouldCoalesce = options.coalesce;
-					this.coalesceWasCalled = false;
-					this.optimizeWasCalled = false;
-					this.resetWasCalled = false;
-
-					this.current = 5;
-				}, TestDocumentSource = klass, base = DocumentSource, proto = klass.prototype = Object.create(base.prototype, {constructor:{value:klass}});
-
-				proto.coalesce = function(){
-					this.coalesceWasCalled = true;
-					var c = this.shouldCoalesce;//only coalesce with the first thing we find
-					this.shouldCoalesce = false;
-					return c;
-				};
-
-				proto.optimize = function(){
-					this.optimizeWasCalled = true;
-				};
-
-				proto.eof = function(){
-					return this.current < 0;
-				};
-
-				proto.advance = function(){
-					this.current = this.current - 1;
-					return !this.eof();
-				};
-
-				proto.getCurrent = function(){
-					return this.current;
-				};
-
-				proto.reset = function(){
-					this.resetWasCalled = true;
-				};
-
-				proto.getDependencies = function(deps){
-					if (!deps.testDep){
-						deps.testDep = 1;
-						return DocumentSource.GetDepsReturn.EXHAUSTIVE;
-					}
-					return DocumentSource.GetDepsReturn.SEE_NEXT;
-				};
-
-				klass.createFromJson = function(options, ctx){
-					return new TestDocumentSource(options, ctx);
-				};
-
-				return klass;
-			})().createFromJson;
-
-		},
-
 		"prepareCursorSource": {
 
 			"should place a CursorDocumentSource in pipeline": function () {
-				var p = Pipeline.parseCommand({pipeline:[{$test:{coalesce:false}}, {$test:{coalesce:false}}], aggregate:[]}),
+				var p = Pipeline.parseCommand({pipeline:[{$match:{a:true}}], aggregate:[]}),
 					cs = PipelineD.prepareCursorSource(p, {ns:[1,2,3,4,5]});
 				assert.equal(p.sources[0].constructor, CursorDocumentSource);
 			},
 
 			"should get projection from all sources": function () {
-				var p = Pipeline.parseCommand({pipeline:[{$test:{coalesce:false}}, {$test:{coalesce:false}}], aggregate:[]}),
+				var p = Pipeline.parseCommand({pipeline:[{$project:{a:"$x"}}], aggregate:[]}),
 					cs = PipelineD.prepareCursorSource(p, {ns:[1,2,3,4,5]});
-				assert.deepEqual(p.sources[0]._projection, {"_id":0,"testDep":1});
+				assert.deepEqual(p.sources[0]._projection, {x:1, _id:1});
+				assert.deepEqual(p.sources[0]._dependencies, {_fields:{_id:true, x:true}});
 			},
 
 			"should get projection's deps": function () {
@@ -105,9 +46,9 @@ module.exports = {
 				};
 				var p = Pipeline.parseCommand(cmdObj),
 					cs = PipelineD.prepareCursorSource(p, {ns:[1,2,3,4,5]});
-				assert.equal(JSON.stringify(p.sources[0]._projection), JSON.stringify({'a.b.c': 1, d: 1, 'e.f.g': 1, _id: 1}));
+				assert.deepEqual(p.sources[0]._projection, {'a.b.c': 1, d: 1, 'e.f.g': 1, _id: 1});
+				assert.deepEqual(p.sources[0]._dependencies, {"_fields":{"_id":true,"a":{"b":{"c":true}},"d":true,"e":{"f":{"g":true}}}});
 			},
-
 			"should get group's deps": function(){
 				var cmdObj = {
 					aggregate: [],
@@ -131,8 +72,67 @@ module.exports = {
 				};
 				var p = Pipeline.parseCommand(cmdObj),
 					cs = PipelineD.prepareCursorSource(p, {ns:[1,2,3,4,5]});
-				assert.equal(JSON.stringify(p.sources[0]._projection), JSON.stringify({ _id: 0, a: 1, b: 1, 'x.y.z': 1 }));
-			}
+				assert.equal(JSON.stringify(p.sources[0]._projection), JSON.stringify({ a: 1, b: 1, 'x.y.z': 1, _id: 0 }));
+				assert.deepEqual(p.sources[0]._dependencies, {"_fields":{"a":true,"b":true,"x":{"y":{"z":true}}}});
+			},
+			"should set the queryObj on the Cursor": function(){
+				var cmdObj = {
+					aggregate: [],
+					pipeline: [
+						{$match:{
+							x:{$exists:true},
+							y:{$exists:false}
+						}}
+					]
+				};
+				var p = Pipeline.parseCommand(cmdObj),
+					cs = PipelineD.prepareCursorSource(p, {ns:[1,2,3,4,5]});
+				assert.deepEqual(p.sources[0]._query, {x:{$exists: true}, y:{$exists:false}});
+			},
+			"should set the sort on the Cursor": function(){
+				var cmdObj = {
+					aggregate: [],
+					pipeline: [
+						{$sort:{
+							x:1,
+							y:-1
+						}}
+					]
+				};
+				var p = Pipeline.parseCommand(cmdObj),
+					cs = PipelineD.prepareCursorSource(p, {ns:[1,2,3,4,5]});
+				assert.deepEqual(p.sources[0]._sort, {x:1, y:-1});
+			},
+			"should set the sort on the Cursor if there is a match first": function(){
+				var cmdObj = {
+					aggregate: [],
+					pipeline: [
+						{$match:{
+							x:{$exists:true},
+							y:{$exists:false}
+						}},
+						{$sort:{
+							x:1,
+							y:-1
+						}}
+					]
+				};
+				var p = Pipeline.parseCommand(cmdObj),
+					cs = PipelineD.prepareCursorSource(p, {ns:[1,2,3,4,5]});
+				assert.deepEqual(p.sources[0]._sort, {x:1, y:-1});
+			},
+			"should coalesce the Cursor with the rest of the pipeline": function(){
+				var cmdObj = {
+					aggregate: [],
+					pipeline: [
+						{$limit:1}
+					]
+				};
+				var p = Pipeline.parseCommand(cmdObj),
+					cs = PipelineD.prepareCursorSource(p, {ns:[1,2,3,4,5]});
+				assert.equal(p.sources[0].getLimit(), 1);
+				assert.equal(p.sources.length, 1);
+			},
 		}
 	}
 

+ 37 - 85
test/lib/pipeline/documentSources/CursorDocumentSource.js

@@ -5,14 +5,13 @@ var assert = require("assert"),
 	CursorDocumentSource = require("../../../../lib/pipeline/documentSources/CursorDocumentSource"),
 	LimitDocumentSource = require("../../../../lib/pipeline/documentSources/LimitDocumentSource"),
 	SkipDocumentSource = require("../../../../lib/pipeline/documentSources/SkipDocumentSource"),
-	Cursor = require("../../../../lib/Cursor");
-
-var getCursor = function(values) {
-	if (!values)
-		values = [1,2,3,4,5];
-	var cwc = new CursorDocumentSource.CursorWithContext();
-	cwc._cursor = new Cursor( values );
-	return new CursorDocumentSource(cwc);
+	ProjectDocumentSource = require("../../../../lib/pipeline/documentSources/ProjectDocumentSource"),
+	DepsTracker = require("../../../../lib/pipeline/DepsTracker"),
+	ArrayRunner = require("../../../../lib/query/ArrayRunner");
+
+var getCursorDocumentSource = function(values) {
+	values = values || [1,2,3,4,5];
+	return new CursorDocumentSource(null, new ArrayRunner(values), null);
 };
 
 
@@ -21,24 +20,15 @@ module.exports = {
 	"CursorDocumentSource": {
 
 		"constructor(data)": {
-			"should fail if CursorWithContext is not provided": function(){
-				assert.throws(function(){
-					var cds = new CursorDocumentSource();
-				});
-			},
 			"should get a accept a CursorWithContext and set it internally": function(){
-				var cwc = new CursorDocumentSource.CursorWithContext();
-				cwc._cursor = new Cursor( [] );
-
-				var cds = new CursorDocumentSource(cwc);
-
-				assert.ok(cds._cursorWithContext);
+				var cds = getCursorDocumentSource([]);
+				assert.ok(cds._runner);
 			}
 		},
 
 		"#coalesce": {
 			"should be able to coalesce a limit into itself": function (){
-				var cds = getCursor(),
+				var cds = getCursorDocumentSource(),
 					lds = LimitDocumentSource.createFromJson(2);
 
 				assert.equal(cds.coalesce(lds) instanceof LimitDocumentSource, true);
@@ -46,7 +36,7 @@ module.exports = {
 			},
 
 			"should keep original limit if coalesced to a larger limit": function() {
-				var cds = getCursor();
+				var cds = getCursorDocumentSource();
 				cds.coalesce(LimitDocumentSource.createFromJson(2));
 				cds.coalesce(LimitDocumentSource.createFromJson(3));
 				assert.equal(cds.getLimit(), 2);
@@ -54,7 +44,7 @@ module.exports = {
 
 
 			"cursor only returns $limit number when coalesced": function(next) {
-				var cds = getCursor(),
+				var cds = getCursorDocumentSource(),
 					lds = LimitDocumentSource.createFromJson(2);
 
 
@@ -69,40 +59,29 @@ module.exports = {
 						});
 					},
 					function() {
-						return docs[i++] !== DocumentSource.EOF;
+						return docs[i++] !== null;
 					},
 					function(err) {
-						assert.deepEqual([1, 2, DocumentSource.EOF], docs);
+						if (err) throw err;
+						assert.deepEqual([1, 2, null], docs);
 						next();
 					}
 				);
 			},
 
 			"should leave non-limit alone": function () {
-				var cwc = new CursorDocumentSource.CursorWithContext();
-				cwc._cursor = new Cursor( [] );
-
 				var sds = new SkipDocumentSource(),
-					cds = new CursorDocumentSource(cwc);
+					cds = getCursorDocumentSource([]);
 
 				assert.equal(cds.coalesce(sds), false);
 			}
 		},
 
 		"#getNext": {
-			"should throw an error if no callback is given": function() {
-				var cwc = new CursorDocumentSource.CursorWithContext();
-				cwc._cursor = new Cursor( [1,2,3,4] );
-				var cds = new CursorDocumentSource(cwc);
-				assert.throws(cds.getNext.bind(cds));
-			},
-
 			"should return the current cursor value async": function(next){
 				var expected = JSON.stringify([1,2]);
-				var cwc = new CursorDocumentSource.CursorWithContext();
-				cwc._cursor = new Cursor( [1,2,3,4] );
 
-				var cds = new CursorDocumentSource(cwc);
+				var cds = getCursorDocumentSource([1,2,3,4]);
 				async.series([
 						cds.getNext.bind(cds),
 						cds.getNext.bind(cds),
@@ -111,18 +90,16 @@ module.exports = {
 						cds.getNext.bind(cds),
 					],
 					function(err,res) {
-						assert.deepEqual([1,2,3,4,DocumentSource.EOF], res);
+						assert.deepEqual([1,2,3,4,null], res);
 						next();
 					}
 				);
 			},
 			"should return values past the batch limit": function(next){
-				var cwc = new CursorDocumentSource.CursorWithContext(),
-					n = 0,
+				var n = 0,
 					arr = Array.apply(0, new Array(200)).map(function() { return n++; });
-				cwc._cursor = new Cursor( arr );
 
-				var cds = new CursorDocumentSource(cwc);
+				var cds = getCursorDocumentSource(arr);
 				async.each(arr,
 					function(a,next) {
 						cds.getNext(function(err,val) {
@@ -135,25 +112,24 @@ module.exports = {
 					}
 				);
 				cds.getNext(function(err,val) {
-					assert.equal(val, DocumentSource.EOF);
+					assert.equal(val, null);
 					next();
 				});
 			},
 		},
 		"#dispose": {
 			"should empty the current cursor": function(next){
-				var cwc = new CursorDocumentSource.CursorWithContext();
-				cwc._cursor = new Cursor( [1,2,3] );
-
-				var cds = new CursorDocumentSource(cwc);
+				var cds = getCursorDocumentSource();
 				async.series([
 						cds.getNext.bind(cds),
 						cds.getNext.bind(cds),
-						cds.getNext.bind(cds),
-						cds.getNext.bind(cds),
+						function(next){
+							cds.dispose();
+							return cds.getNext(next);
+						}
 					],
 					function(err,res) {
-						assert.deepEqual([1,2,3,DocumentSource.EOF], res);
+						assert.deepEqual([1,2,null], res);
 						next();
 					}
 				);
@@ -162,44 +138,20 @@ module.exports = {
 
 		"#setProjection": {
 
-			"should set a projection": function() {
-				var cwc = new CursorDocumentSource.CursorWithContext();
-				cwc._cursor = new Cursor( [1,2,3] );
-
-				var cds = new CursorDocumentSource(cwc);
-				cds.setProjection({a:1}, {a:true});
-				assert.deepEqual(cds._projection, {a:1});
-				assert.deepEqual(cds._dependencies, {a:true});
-			},
-
-			"should throw an error if projection is already set": function (){
-				var cwc = new CursorDocumentSource.CursorWithContext();
-				cwc._cursor = new Cursor( [1,2,3] );
-
-				var cds = new CursorDocumentSource(cwc);
-				cds.setProjection({a:1}, {});
-				assert.throws(function() {
-					cds.setProjection({a:1}, {});
-				});
-			},
-
-			"should project properly": function(next) {
-				var cwc = new CursorDocumentSource.CursorWithContext();
-				cwc._cursor = new Cursor( [{a:1},{a:2,b:3},{c:4,d:5}] );
-
-				var cds = new CursorDocumentSource(cwc);
-				cds.setProjection({a:1}, {a:true});
-				assert.deepEqual(cds._projection, {a:1});
-				assert.deepEqual(cds._dependencies, {a:true});
-
+			"should set a projection": function(next) {
+				var cds = getCursorDocumentSource([{a:1, b:2},{a:2, b:3}]),
+					deps = new DepsTracker(),
+					project = ProjectDocumentSource.createFromJson({"a":1});
+				project.getDependencies(deps);
+				cds.setProjection(deps.toProjection(), deps.toParsedDeps());
+				
 				async.series([
 						cds.getNext.bind(cds),
 						cds.getNext.bind(cds),
-						cds.getNext.bind(cds),
-						cds.getNext.bind(cds),
+						cds.getNext.bind(cds)
 					],
 					function(err,res) {
-						assert.deepEqual([{a:1},{a:2},{},DocumentSource.EOF], res);
+						assert.deepEqual([{a:1},{a:2},null], res);
 						next();
 					}
 				);
@@ -211,4 +163,4 @@ module.exports = {
 
 };
 
-if (!module.parent)(new(require("mocha"))()).ui("exports").reporter("spec").addFile(__filename).run();
+if (!module.parent)(new(require("mocha"))()).ui("exports").reporter("spec").addFile(__filename).grep(process.env.MOCHA_GREP || '').run(process.exit);

+ 2 - 0
test/lib/pipeline/documentSources/DocumentSource.js

@@ -7,6 +7,8 @@ module.exports = {
 
 	"DocumentSource": {
 
+		"should be tested via subclasses": function() {}
+
 	}
 
 };

+ 7 - 6
test/lib/pipeline/documentSources/GeoNearDocumentSource.js

@@ -3,13 +3,17 @@ var assert = require("assert"),
 	DocumentSource = require("../../../../lib/pipeline/documentSources/DocumentSource"),
 	GeoNearDocumentSource = require("../../../../lib/pipeline/documentSources/GeoNearDocumentSource"),
 	CursorDocumentSource = require("../../../../lib/pipeline/documentSources/CursorDocumentSource"),
-	Cursor = require("../../../../lib/Cursor"),
+	ArrayRunner = require("../../../../lib/query/ArrayRunner"),
 	FieldPath = require("../../../../lib/pipeline/FieldPath");
 
 var createGeoNear = function(ctx) {
 	var ds = new GeoNearDocumentSource(ctx);
 	return ds;
 };
+var addSource = function addSource(ds, data) {
+	var cds = new CursorDocumentSource(null, new ArrayRunner(data), null);
+	ds.setSource(cds);
+};
 
 module.exports = {
 
@@ -55,14 +59,11 @@ module.exports = {
 		"#setSource()":{
 
 			"check that setting source of GeoNearDocumentSource throws error":function() {
-				var cwc = new CursorDocumentSource.CursorWithContext();
 				var input = [{}];
-				cwc._cursor = new Cursor( input );
-				var cds = new CursorDocumentSource(cwc);
 				var gnds = createGeoNear();
 
 				assert.throws(function(){
-					gnds.setSource(cds);
+					addSource(gnds, input);
 				});
 			}
 
@@ -95,4 +96,4 @@ module.exports = {
 	}
 };
 
-if (!module.parent)(new(require("mocha"))()).ui("exports").reporter("spec").addFile(__filename).grep(process.env.MOCHA_GREP || '').run(process.exit);
+if (!module.parent)(new(require("mocha"))()).ui("exports").reporter("spec").addFile(__filename).grep(process.env.MOCHA_GREP || '').run(process.exit);

+ 24 - 42
test/lib/pipeline/documentSources/GroupDocumentSource.js

@@ -2,21 +2,13 @@
 var assert = require("assert"),
 	DocumentSource = require("../../../../lib/pipeline/documentSources/DocumentSource"),
 	CursorDocumentSource = require("../../../../lib/pipeline/documentSources/CursorDocumentSource"),
-	Cursor = require("../../../../lib/Cursor"),
 	GroupDocumentSource = require("../../../../lib/pipeline/documentSources/GroupDocumentSource"),
-	async = require('async');
+	ArrayRunner = require("../../../../lib/query/ArrayRunner"),
+	async = require('async'),
+	utils = require("../expressions/utils"),
+	expressions = require("../../../../lib/pipeline/expressions");
 
 
-/**
- * Tests if the given spec is the same as what the DocumentSource resolves to as JSON.
- * MUST CALL WITH A DocumentSource AS THIS (e.g. checkJsonRepresentation.call(this, spec) where this is a DocumentSource and spec is the JSON used to create the source).
- **/
-var checkJsonRepresentation = function checkJsonRepresentation(self, spec) {
-	var rep = {};
-	self.serialize(rep, true);
-	assert.deepEqual(rep, {$group: spec});
-};
-
 /// An assertion for `ObjectExpression` instances based on Mongo's `ExpectedResultBase` class
 function assertExpectedResult(args) {
 	{// check for required args
@@ -29,21 +21,19 @@ function assertExpectedResult(args) {
 	// run implementation
 	if(args.expected && args.docs){
 		var gds = GroupDocumentSource.createFromJson(args.spec),
-			cwc = new CursorDocumentSource.CursorWithContext();
-		cwc._cursor = new Cursor( args.docs );
-		var next,
+			next,
 			results = [],
-			cds = new CursorDocumentSource(cwc);
+			cds = new CursorDocumentSource(null, new ArrayRunner(args.docs), null);
 		gds.setSource(cds);
 		async.whilst(
 			function() {
-				next !== DocumentSource.EOF;
+				return next !== null;
 			},
 			function(done) {
 				gds.getNext(function(err, doc) {
 					if(err) return done(err);
 					next = doc;
-					if(next === DocumentSource.EOF) {
+					if(next === null) {
 						return done();
 					} else {
 						results.push(next);
@@ -52,8 +42,7 @@ function assertExpectedResult(args) {
 				});
 			},
 			function(err) {
-				assert.deepEqual(results, args.expected);
-				checkJsonRepresentation(gds, args.spec);
+				assert.equal(JSON.stringify(results), JSON.stringify(args.expected));
 				if(args.done) {
 					return args.done();
 				}
@@ -67,7 +56,6 @@ function assertExpectedResult(args) {
 		} else {
 			assert.doesNotThrow(function(){
 				var gds = GroupDocumentSource.createFromJson(args.spec);
-				checkJsonRepresentation(gds, args.spec);
 			});
 		}
 	}
@@ -97,7 +85,6 @@ module.exports = {
 
 			// $group _id is an empty object
 			"should not throw when _id is an empty object": function advanceTest(){
-				//NOTE: This is broken until expressions get #serialize methods
 				assertExpectedResult({spec:{_id:{}}, "throw":false});
 			},
 
@@ -116,13 +103,11 @@ module.exports = {
 
 			// $group _id is the empty string
 			"should not throw when _id is an empty string": function advanceTest(){
-				//NOTE: This is broken until expressions get ported to 2.5; specifically, until they get a #create method
 				assertExpectedResult({spec:{_id:""}, "throw":false});
 			},
 
 			// $group _id is a string constant
 			"should not throw when _id is a string constant": function advanceTest(){
-				//NOTE: This is broken until expressions get ported to 2.5; specifically, until they get a #create method
 				assertExpectedResult({spec:{_id:"abc"}, "throw":false});
 			},
 
@@ -133,55 +118,46 @@ module.exports = {
 
 			// $group _id is a numeric constant
 			"should not throw when _id is a numeric constant": function advanceTest(){
-				//NOTE: This is broken until expressions get ported to 2.5; specifically, until they get a #create method
 				assertExpectedResult({spec:{_id:2}, "throw":false});
 			},
 
 			// $group _id is an array constant
 			"should not throw when _id is an array constant": function advanceTest(){
-				//NOTE: This is broken until expressions get ported to 2.5; specifically, until they get a #create method
 				assertExpectedResult({spec:{_id:[1,2]}, "throw":false});
 			},
 
 			// $group _id is a regular expression (not supported)
-			"should throw when _id is a regex": function advanceTest(){
-				//NOTE: This is broken until expressions get ported to 2.5; specifically, until they get a #create method
-				assertExpectedResult({spec:{_id:/a/}});
+			"should not throw when _id is a regex": function advanceTest(){
+				assertExpectedResult({spec:{_id:/a/}, "throw":false});
 			},
 
 			// The name of an aggregate field is specified with a $ prefix
 			"should throw when aggregate field spec is specified with $ prefix": function advanceTest(){
-				//NOTE: This is broken until expressions get ported to 2.5; specifically, until they get a #create method
 				assertExpectedResult({spec:{_id:1, $foo:{$sum:1}}});
 			},
 
 			// An aggregate field spec that is not an object
 			"should throw when aggregate field spec is not an object": function advanceTest(){
-				//NOTE: This is broken until expressions get ported to 2.5; specifically, until they get a #create method
 				assertExpectedResult({spec:{_id:1, a:1}});
 			},
 
 			// An aggregate field spec that is not an object
 			"should throw when aggregate field spec is an empty object": function advanceTest(){
-				//NOTE: This is broken until expressions get ported to 2.5; specifically, until they get a #create method
 				assertExpectedResult({spec:{_id:1, a:{}}});
 			},
 
 			// An aggregate field spec with an invalid accumulator operator
 			"should throw when aggregate field spec is an invalid accumulator": function advanceTest(){
-				//NOTE: This is broken until expressions get ported to 2.5; specifically, until they get a #create method
 				assertExpectedResult({spec:{_id:1, a:{$bad:1}}});
 			},
 
 			// An aggregate field spec with an array argument
 			"should throw when aggregate field spec with an array as an argument": function advanceTest(){
-				//NOTE: This is broken until expressions get ported to 2.5; specifically, until they get a #create method
 				assertExpectedResult({spec:{_id:1, a:{$sum:[]}}});
 			},
 
 			// Multiple accumulator operators for a field
 			"should throw when aggregate field spec with multiple accumulators": function advanceTest(){
-				//NOTE: This is broken until expressions get ported to 2.5; specifically, until they get a #create method
 				assertExpectedResult({spec:{_id:1, a:{$sum:1, $push:1}}});
 			}
 
@@ -202,7 +178,6 @@ module.exports = {
 
 			// $group _id is computed from an object expression
 			"should compute _id from an object expression": function testAdvance_ObjectExpression(){
-				//NOTE: This is broken until expressions get ported to 2.5; specifically, until they get a #create method
 				assertExpectedResult({
 					docs: [{a:6}],
 					spec: {_id:{z:"$a"}},
@@ -212,7 +187,6 @@ module.exports = {
 
 			// $group _id is a field path expression
 			"should compute _id from a field path expression": function testAdvance_FieldPathExpression(){
-				//NOTE: This is broken until expressions get ported to 2.5; specifically, until they get a #create method
 				assertExpectedResult({
 					docs: [{a:5}],
 					spec: {_id:"$a"},
@@ -222,7 +196,6 @@ module.exports = {
 
 			// $group _id is a field path expression
 			"should compute _id from a Date": function testAdvance_Date(){
-				//NOTE: This is broken until expressions get ported to 2.5; specifically, until they get a #create method
 				var d = new Date();
 				assertExpectedResult({
 					docs: [{a:d}],
@@ -233,7 +206,6 @@ module.exports = {
 
 			// Aggregate the value of an object expression
 			"should aggregate the value of an object expression": function testAdvance_ObjectExpression(){
-				//NOTE: This is broken until expressions get ported to 2.5; specifically, until they get a #create method
 				assertExpectedResult({
 					docs: [{a:6}],
 					spec: {_id:0, z:{$first:{x:"$a"}}},
@@ -243,7 +215,6 @@ module.exports = {
 
 			// Aggregate the value of an operator expression
 			"should aggregate the value of an operator expression": function testAdvance_OperatorExpression(){
-				//NOTE: This is broken until expressions get ported to 2.5; specifically, until they get a #create method
 				assertExpectedResult({
 					docs: [{a:6}],
 					spec: {_id:0, z:{$first:"$a"}},
@@ -253,7 +224,6 @@ module.exports = {
 
 			// Aggregate the value of an operator expression
 			"should aggregate the value of an operator expression with a null id": function testAdvance_Null(){
-				//NOTE: This is broken until expressions get ported to 2.5; specifically, until they get a #create method
 				assertExpectedResult({
 					docs: [{a:6}],
 					spec: {_id:null, z:{$first:"$a"}},
@@ -274,7 +244,7 @@ module.exports = {
 			"should make one group with two values": function TwoValuesSingleKey() {
 				assertExpectedResult({
 					docs: [{a:1}, {a:2}],
-					spec: {_id:"$_id", a:{$push:"$a"}},
+					spec: {_id:0, a:{$push:"$a"}},
 					expected: [{_id:0, a:[1,2]}]
 				});
 			},
@@ -331,6 +301,18 @@ module.exports = {
 					spec: {_id:0, first:{$first:"$missing"}},
 					expected: [{_id:0, first:null}]
 				});
+			},
+			
+			"should return errors in the callback": function(done){
+				var gds = GroupDocumentSource.createFromJson({_id:null, sum: {$sum:"$a"}}),
+					next,
+					results = [],
+					cds = new CursorDocumentSource(null, new ArrayRunner([{"a":"foo"}]), null);
+				gds.setSource(cds);
+				gds.getNext(function(err, doc) {
+					assert(err, "Expected Error");
+					done();
+				});
 			}
 		}
 

+ 40 - 49
test/lib/pipeline/documentSources/LimitDocumentSource.js

@@ -1,8 +1,14 @@
 "use strict";
 var assert = require("assert"),
 	DocumentSource = require("../../../../lib/pipeline/documentSources/DocumentSource"),
-	LimitDocumentSource = require("../../../../lib/pipeline/documentSources/LimitDocumentSource");
+	LimitDocumentSource = require("../../../../lib/pipeline/documentSources/LimitDocumentSource"),
+	CursorDocumentSource = require("../../../../lib/pipeline/documentSources/CursorDocumentSource"),
+	ArrayRunner = require("../../../../lib/query/ArrayRunner");
 
+var addSource = function addSource(ds, data) {
+	var cds = new CursorDocumentSource(null, new ArrayRunner(data), null);
+	ds.setSource(cds);
+};
 
 module.exports = {
 
@@ -10,136 +16,121 @@ module.exports = {
 
 		"constructor()": {
 
-			"should not throw Error when constructing without args": function testConstructor(){
+			"should not throw Error when constructing without args": function testConstructor(next){
 				assert.doesNotThrow(function(){
 					new LimitDocumentSource();
+					return next();
 				});
 			}
-
 		},
 
+ 		/** A limit does not introduce any dependencies. */
 		"#getDependencies": {
-			"limits do not create dependencies": function() {
-				var lds = LimitDocumentSource.createFromJson(1),
+			"limits do not create dependencies": function(next) {
+				var lds = LimitDocumentSource.createFromJson(1, null),
 					deps = {};
 
 				assert.equal(DocumentSource.GetDepsReturn.SEE_NEXT, lds.getDependencies(deps));
 				assert.equal(0, Object.keys(deps).length);
+				return next();
 			}
 		},
 
 		"#getSourceName()": {
 
-			"should return the correct source name; $limit": function testSourceName(){
+			"should return the correct source name; $limit": function testSourceName(next){
 				var lds = new LimitDocumentSource();
 				assert.strictEqual(lds.getSourceName(), "$limit");
+				return next();
 			}
-
 		},
 
 		"#getFactory()": {
 
-			"should return the constructor for this class": function factoryIsConstructor(){
+			"should return the constructor for this class": function factoryIsConstructor(next){
 				assert.strictEqual(new LimitDocumentSource().getFactory(), LimitDocumentSource);
+				return next();
 			}
-
 		},
 
 		"#coalesce()": {
 
-			"should return false if nextSource is not $limit": function dontSkip(){
+			"should return false if nextSource is not $limit": function dontSkip(next){
 				var lds = new LimitDocumentSource();
 				assert.equal(lds.coalesce({}), false);
+				return next();
 			},
-			"should return true if nextSource is $limit": function changeLimit(){
+			"should return true if nextSource is $limit": function changeLimit(next){
 				var lds = new LimitDocumentSource();
 				assert.equal(lds.coalesce(new LimitDocumentSource()), true);
+				return next();
 			}
-
 		},
 
 		"#getNext()": {
 
-			"should throw an error if no callback is given": function() {
+			"should throw an error if no callback is given": function(next) {
 				var lds = new LimitDocumentSource();
 				assert.throws(lds.getNext.bind(lds));
+				return next();
 			},
 
+			/** Exhausting a DocumentSourceLimit disposes of the limit's source. */
 			"should return the current document source": function currSource(next){
-				var lds = new LimitDocumentSource();
+				var lds = new LimitDocumentSource({"$limit":[{"a":1},{"a":2}]});
 				lds.limit = 1;
-				lds.source = {getNext:function(cb){cb(null,{ item:1 });}};
+				addSource(lds, [{item:1}]);
 				lds.getNext(function(err,val) {
 					assert.deepEqual(val, { item:1 });
-					next();
+					return next();
 				});
 			},
 
+			/** Exhausting a DocumentSourceLimit disposes of the pipeline's DocumentSourceCursor. */
 			"should return EOF for no sources remaining": function noMoar(next){
-				var lds = new LimitDocumentSource();
-				lds.limit = 10;
-				lds.source = {
-					calls: 0,
-					getNext:function(cb) {
-						if (lds.source.calls)
-							return cb(null,DocumentSource.EOF);
-						lds.source.calls++;
-						return cb(null,{item:1});
-					},
-					dispose:function() { return true; }
-				};
+				var lds = new LimitDocumentSource({"$match":[{"a":1},{"a":1}]});
+				lds.limit = 1;
+				addSource(lds, [{item:1}]);
 				lds.getNext(function(){});
 				lds.getNext(function(err,val) {
-					assert.strictEqual(val, DocumentSource.EOF);
-					next();
+					assert.strictEqual(val, null);
+					return next();
 				});
 			},
 
 			"should return EOF if we hit our limit": function noMoar(next){
 				var lds = new LimitDocumentSource();
 				lds.limit = 1;
-				lds.source = {
-					calls: 0,
-					getNext:function(cb) {
-						if (lds.source.calls)
-							return cb(null,DocumentSource.EOF);
-						return cb(null,{item:1});
-					},
-					dispose:function() { return true; }
-				};
+				addSource(lds, [{item:1},{item:2}]);
 				lds.getNext(function(){});
 				lds.getNext(function (err,val) {
-					assert.strictEqual(val, DocumentSource.EOF);
-					next();
+					assert.strictEqual(val, null);
+					return next();
 				});
 			}
-
 		},
 
 		"#serialize()": {
 
-			"should create an object with a key $limit and the value equal to the limit": function sourceToJsonTest(){
+			"should create an object with a key $limit and the value equal to the limit": function sourceToJsonTest(next){
 				var lds = new LimitDocumentSource();
 				lds.limit = 9;
 				var actual = lds.serialize(false);
 				assert.deepEqual(actual, { "$limit": 9 });
+				return next();
 			}
-
 		},
 
 		"#createFromJson()": {
 
-			"should return a new LimitDocumentSource object from an input number": function createTest(){
+			"should return a new LimitDocumentSource object from an input number": function createTest(next){
 				var t = LimitDocumentSource.createFromJson(5);
 				assert.strictEqual(t.constructor, LimitDocumentSource);
 				assert.strictEqual(t.limit, 5);
+				return next();
 			}
-
 		}
-
-
 	}
-
 };
 
 if (!module.parent)(new(require("mocha"))()).ui("exports").reporter("spec").addFile(__filename).run(process.exit);

+ 101 - 33
test/lib/pipeline/documentSources/MatchDocumentSource.js

@@ -2,13 +2,18 @@
 var assert = require("assert"),
 	async = require("async"),
 	DocumentSource = require("../../../../lib/pipeline/documentSources/DocumentSource"),
-	MatchDocumentSource = require("../../../../lib/pipeline/documentSources/MatchDocumentSource");
+	MatchDocumentSource = require("../../../../lib/pipeline/documentSources/MatchDocumentSource"),
+	CursorDocumentSource = require("../../../../lib/pipeline/documentSources/CursorDocumentSource"),
+	ArrayRunner = require("../../../../lib/query/ArrayRunner");
 
 var testRedactSafe = function testRedactSafe(input, safePortion) {
 	var match = MatchDocumentSource.createFromJson(input);
 	assert.deepEqual(match.redactSafePortion(), safePortion);
 };
-
+var addSource = function addSource(match, data) {
+	var cds = new CursorDocumentSource(null, new ArrayRunner(data), null);
+	match.setSource(cds);
+};
 
 module.exports = {
 
@@ -20,6 +25,12 @@ module.exports = {
 				assert.throws(function(){
 					new MatchDocumentSource();
 				});
+			},
+
+			"should throw Error when trying to using a $text operator": function testTextOp () {
+				assert.throws(function(){
+					new MatchDocumentSource({packet:{ $text:"thisIsntImplemented" } });
+				});
 			}
 
 		},
@@ -61,7 +72,7 @@ module.exports = {
 
 			"should return the current document source": function currSource(next){
 				var mds = new MatchDocumentSource({item: 1});
-				mds.source = {getNext:function(cb){cb(null,{ item:1 });}};
+				addSource(mds, [{ item:1 }]);
 				mds.getNext(function(err,val) {
 					assert.deepEqual(val, { item:1 });
 					next();
@@ -71,15 +82,7 @@ module.exports = {
 			"should return matched sources remaining": function (next){
 				var mds = new MatchDocumentSource({ item: {$lt: 5} }),
 					items = [ 1,2,3,4,5,6,7,8,9 ];
-				mds.source = {
-					calls: 0,
-					getNext:function(cb) {
-						if (this.calls >= items.length)
-							return cb(null,DocumentSource.EOF);
-						return cb(null,{item: items[this.calls++]});
-					},
-					dispose:function() { return true; }
-				};
+				addSource(mds, items.map(function(i){return {item:i};}));
 
 				async.series([
 						mds.getNext.bind(mds),
@@ -89,7 +92,7 @@ module.exports = {
 						mds.getNext.bind(mds),
 					],
 					function(err,res) {
-						assert.deepEqual([{item:1},{item:2},{item:3},{item:4},DocumentSource.EOF], res);
+						assert.deepEqual([{item:1},{item:2},{item:3},{item:4},null], res);
 						next();
 					}
 				);
@@ -98,15 +101,7 @@ module.exports = {
 			"should not return matched out documents for sources remaining": function (next){
 				var mds = new MatchDocumentSource({ item: {$gt: 5} }),
 					items = [ 1,2,3,4,5,6,7,8,9 ];
-				mds.source = {
-					calls: 0,
-					getNext:function(cb) {
-						if (this.calls >= items.length)
-							return cb(null,DocumentSource.EOF);
-						return cb(null,{item: items[this.calls++]});
-					},
-					dispose:function() { return true; }
-				};
+				addSource(mds, items.map(function(i){return {item:i};}));
 
 				async.series([
 						mds.getNext.bind(mds),
@@ -116,7 +111,7 @@ module.exports = {
 						mds.getNext.bind(mds),
 					],
 					function(err,res) {
-						assert.deepEqual([{item:6},{item:7},{item:8},{item:9},DocumentSource.EOF], res);
+						assert.deepEqual([{item:6},{item:7},{item:8},{item:9},null], res);
 						next();
 					}
 				);
@@ -125,21 +120,13 @@ module.exports = {
 			"should return EOF for no sources remaining": function (next){
 				var mds = new MatchDocumentSource({ item: {$gt: 5} }),
 					items = [ ];
-				mds.source = {
-					calls: 0,
-					getNext:function(cb) {
-						if (this.calls >= items.length)
-							return cb(null,DocumentSource.EOF);
-						return cb(null,{item: items[this.calls++]});
-					},
-					dispose:function() { return true; }
-				};
+				addSource(mds, items.map(function(i){return {item:i};}));
 
 				async.series([
 						mds.getNext.bind(mds),
 					],
 					function(err,res) {
-						assert.deepEqual([DocumentSource.EOF], res);
+						assert.deepEqual([null], res);
 						next();
 					}
 				);
@@ -353,6 +340,87 @@ module.exports = {
 					{});
 			}
 
+		},
+
+		"#isTextQuery()": {
+
+			"should return true when $text operator is first stage in pipeline": function () {
+				var query = {$text:'textQuery'};
+				assert.ok(MatchDocumentSource.isTextQuery(query)); // true
+			},
+
+			"should return true when $text operator is nested in the pipeline": function () {
+				var query = {$stage:{$text:'textQuery'}};
+				assert.ok(MatchDocumentSource.isTextQuery(query)); // true
+			},
+
+			"should return false when $text operator is not in pipeline": function () {
+				var query = {$notText:'textQuery'};
+				assert.ok(!MatchDocumentSource.isTextQuery(query)); // false
+			}
+
+		},
+
+		"#uassertNoDisallowedClauses()": {
+
+			"should throw if invalid stage is in match expression": function () {
+				var whereQuery = {$where:'where'};
+				assert.throws(function(){
+					MatchDocumentSource.uassertNoDisallowedClauses(whereQuery);
+				});
+
+				var nearQuery = {$near:'near'};
+				assert.throws(function(){
+					MatchDocumentSource.uassertNoDisallowedClauses(nearQuery);
+				});
+
+				var withinQuery = {$within:'within'};
+				assert.throws(function(){
+					MatchDocumentSource.uassertNoDisallowedClauses(withinQuery);
+				});
+
+				var nearSphereQuery = {$nearSphere:'nearSphere'};
+				assert.throws(function(){
+					MatchDocumentSource.uassertNoDisallowedClauses(nearSphereQuery);
+				});
+			},
+
+			"should throw if invalid stage is nested in the match expression": function () {
+				var whereQuery = {$validStage:{$where:'where'}};
+				assert.throws(function(){
+					MatchDocumentSource.uassertNoDisallowedClauses(whereQuery);
+				});
+
+				var nearQuery = {$validStage:{$near:'near'}};
+				assert.throws(function(){
+					MatchDocumentSource.uassertNoDisallowedClauses(nearQuery);
+				});
+
+				var withinQuery = {$validStage:{$within:'within'}};
+				assert.throws(function(){
+					MatchDocumentSource.uassertNoDisallowedClauses(withinQuery);
+				});
+
+				var nearSphereQuery = {$validStage:{$nearSphere:'nearSphere'}};
+				assert.throws(function(){
+					MatchDocumentSource.uassertNoDisallowedClauses(nearSphereQuery);
+				});
+			},
+
+			"should not throw if invalid stage is not in match expression": function () {
+				var query = {$valid:'valid'};
+				assert.doesNotThrow(function(){
+					MatchDocumentSource.uassertNoDisallowedClauses(query);
+				});
+			},
+
+			"should not throw if invalid stage is not nested in the match expression": function () {
+				var query = {$valid:{$anotherValid:'valid'}};
+				assert.doesNotThrow(function(){
+					MatchDocumentSource.uassertNoDisallowedClauses(query);
+				});
+			},
+
 		}
 
 	}

+ 10 - 12
test/lib/pipeline/documentSources/OutDocumentSource.js

@@ -4,12 +4,16 @@ var assert = require("assert"),
 	DocumentSource = require("../../../../lib/pipeline/documentSources/DocumentSource"),
 	OutDocumentSource = require("../../../../lib/pipeline/documentSources/OutDocumentSource"),
 	CursorDocumentSource = require("../../../../lib/pipeline/documentSources/CursorDocumentSource"),
-	Cursor = require("../../../../lib/Cursor");
+	ArrayRunner = require("../../../../lib/query/ArrayRunner");
 
 var createOut = function(ctx) {
 	var ds = new OutDocumentSource(ctx);
 	return ds;
 };
+var addSource = function addSource(ds, data) {
+	var cds = new CursorDocumentSource(null, new ArrayRunner(data), null);
+	ds.setSource(cds);
+};
 
 module.exports = {
 
@@ -41,14 +45,11 @@ module.exports = {
 				assert.throws(ods.getNext.bind(ods));
 			},
 
-			"should act ass passthrough (for now)": function(next) {
+			"should act as passthrough (for now)": function(next) {
 				var ods = OutDocumentSource.createFromJson("test"),
-					cwc = new CursorDocumentSource.CursorWithContext(),
 					l = [{_id:0,a:[{b:1},{b:2}]}, {_id:1,a:[{b:1},{b:1}]} ];
 
-				cwc._cursor = new Cursor( l );
-				var cds = new CursorDocumentSource(cwc);
-				ods.setSource(cds);
+				addSource(ods, l);
 
 				var docs = [], i = 0;
 				async.doWhilst(
@@ -59,10 +60,10 @@ module.exports = {
 						});
 					},
 					function() {
-						return docs[i++] !== DocumentSource.EOF;
+						return docs[i++] !== null;
 					},
 					function(err) {
-						assert.deepEqual([{_id:0,a:[{b:1},{b:2}]}, {_id:1,a:[{b:1},{b:1}]}, DocumentSource.EOF], docs);
+						assert.deepEqual([{_id:0,a:[{b:1},{b:2}]}, {_id:1,a:[{b:1},{b:1}]}, null], docs);
 						next();
 					}
 				);
@@ -83,13 +84,10 @@ module.exports = {
 		"#serialize()":{
 
 			"serialize":function() {
-				var cwc = new CursorDocumentSource.CursorWithContext();
 				var input = [{_id: 0, a: 1}, {_id: 1, a: 2}];
-				cwc._cursor = new Cursor( input );
-				var cds = new CursorDocumentSource(cwc);
 				var title = "CognitiveScientists";
 				var ods = OutDocumentSource.createFromJson(title);
-				ods.setSource(cds);
+				addSource(ods, input);
 				var srcNm = ods.getSourceName();
 				var serialize = {};
 				serialize[srcNm] = title;

+ 218 - 221
test/lib/pipeline/documentSources/ProjectDocumentSource.js

@@ -1,10 +1,14 @@
 "use strict";
 var assert = require("assert"),
 	async = require("async"),
+	DepsTracker = require("../../../../lib/pipeline/DepsTracker"),
 	DocumentSource = require("../../../../lib/pipeline/documentSources/DocumentSource"),
 	ProjectDocumentSource = require("../../../../lib/pipeline/documentSources/ProjectDocumentSource"),
 	CursorDocumentSource = require("../../../../lib/pipeline/documentSources/CursorDocumentSource"),
-	Cursor = require("../../../../lib/Cursor");
+	ArrayRunner = require("../../../../lib/query/ArrayRunner"),
+	TestBase = require("./TestBase"),
+	And = require("../../../../lib/pipeline/expressions/AndExpression"),
+	Add = require("../../../../lib/pipeline/expressions/AddExpression");
 
 
 /**
@@ -12,8 +16,7 @@ var assert = require("assert"),
  *   MUST CALL WITH A PDS AS THIS (e.g. checkJsonRepresentation.call(this, rep) where this is a PDS)
  **/
 var checkJsonRepresentation = function checkJsonRepresentation(self, rep) {
-	var pdsRep = {};
-	self.sourceToJson(pdsRep, true);
+	var pdsRep = self.serialize();
 	assert.deepEqual(pdsRep, rep);
 };
 
@@ -28,260 +31,254 @@ var createProject = function createProject(projection) {
 			"$project": projection
 		},
 		specElement = projection,
-		project = ProjectDocumentSource.createFromJson(specElement);
-	checkJsonRepresentation(project, spec);
-	return project;
+		_project = ProjectDocumentSource.createFromJson(specElement);
+	checkJsonRepresentation(_project, spec);
+	return _project;
 };
 
 //TESTS
 module.exports = {
 
-	"ProjectDocumentSource": {
+	"constructor()": {
 
-		"constructor()": {
+		"should not throw Error when constructing without args": function testConstructor() {
+			assert.doesNotThrow(function() {
+				new ProjectDocumentSource();
+			});
+		},
 
-			"should not throw Error when constructing without args": function testConstructor() {
-				assert.doesNotThrow(function() {
-					new ProjectDocumentSource();
-				});
-			}
+		"should throw Error when constructing with more than 1 arg": function testConstructor() {
+			assert.throws(function() {
+				new ProjectDocumentSource("a", "b", "c");
+			});
+		}
 
-		},
+	},
 
-		"#getSourceName()": {
+	"#getSourceName()": {
 
-			"should return the correct source name; $project": function testSourceName() {
-				var pds = new ProjectDocumentSource();
-				assert.strictEqual(pds.getSourceName(), "$project");
-			}
+		"should return the correct source name; $project": function testSourceName() {
+			var pds = new ProjectDocumentSource();
+			assert.strictEqual(pds.getSourceName(), "$project");
+		}
+
+	},
 
+	"#getNext()": {
+
+		"should return errors in the callback": function Errors() {
+			var input = [{_id: 0, a: "foo"}];
+			var cds = new CursorDocumentSource(null, new ArrayRunner(input), null);
+			var pds = ProjectDocumentSource.createFromJson({x:{"$add":["$a", "$a"]}});
+			pds.setSource(cds);
+			pds.getNext(function(err, actual) {
+				assert(err, "Expected error");
+			});
 		},
 
-		"#getNext()": {
+		"should return EOF": function testEOF(next) {
+			var pds = createProject({});
+			pds.setSource({
+				getNext: function getNext(cb) {
+					return cb(null, null);
+				}
+			});
+			pds.getNext(function(err, doc) {
+				assert.equal(null, doc);
+				next();
+			});
+		},
 
-			"should return EOF": function testEOF(next) {
-				var pds = createProject();
-				pds.setSource({
-					getNext: function getNext(cb) {
-						return cb(null, DocumentSource.EOF);
-					}
-				});
-				pds.getNext(function(err, doc) {
-					assert.equal(DocumentSource.EOF, doc);
-					next();
-				});
-			},
-
-			"iterator state accessors consistently report the source is exhausted": function assertExhausted() {
-				var cwc = new CursorDocumentSource.CursorWithContext();
-				var input = [{}];
-				cwc._cursor = new Cursor( input );
-				var cds = new CursorDocumentSource(cwc);
-				var pds = createProject();
-				pds.setSource(cds);
-				pds.getNext(function(err, actual) {
-					pds.getNext(function(err, actual1) {
-						assert.equal(DocumentSource.EOF, actual1);
-						pds.getNext(function(err, actual2) {
-							assert.equal(DocumentSource.EOF, actual2);
-							pds.getNext(function(err, actual3) {
-								assert.equal(DocumentSource.EOF, actual3);
-							});
+		"iterator state accessors consistently report the source is exhausted": function assertExhausted() {
+			var input = [{}];
+			var cds = new CursorDocumentSource(null, new ArrayRunner(input), null);
+			var pds = createProject();
+			pds.setSource(cds);
+			pds.getNext(function(err, actual) {
+				pds.getNext(function(err, actual1) {
+					assert.equal(null, actual1);
+					pds.getNext(function(err, actual2) {
+						assert.equal(null, actual2);
+						pds.getNext(function(err, actual3) {
+							assert.equal(null, actual3);
 						});
 					});
 				});
-			},
-
-			"callback is required": function requireCallback() {
-				var pds = createProject();
-				assert.throws(pds.getNext.bind(pds));
-			},
-
-			"should not return EOF when a document is still in cursor": function testNotEOFTrueIfDocPresent() {
-				var cwc = new CursorDocumentSource.CursorWithContext();
-				var input = [{_id: 0, a: 1}, {_id: 1, a: 2}];
-					cwc._cursor = new Cursor( input );
-				var cds = new CursorDocumentSource(cwc);
-				var pds = createProject();
-				pds.setSource(cds);
-				pds.getNext(function(err,actual) {
-					// first go round
-					assert.notEqual(actual, DocumentSource.EOF);
-				});
-			},
+			});
+		},
+
+		"callback is required": function requireCallback() {
+			var pds = createProject();
+			assert.throws(pds.getNext.bind(pds));
+		},
 
-			"can retrieve second document from source": function testAdvanceFirst() {
-				var cwc = new CursorDocumentSource.CursorWithContext();
-				var input = [{_id: 0, a: 1}, {_id: 1, a: 2}];
-				cwc._cursor = new Cursor( input );
-				var cds = new CursorDocumentSource(cwc);
-				var pds = createProject();
-				pds.setSource(cds);
+		"should not return EOF when a document is still in cursor": function testNotEOFTrueIfDocPresent() {
+			var input = [{_id: 0, a: 1}, {_id: 1, a: 2}];
+			var cds = new CursorDocumentSource(null, new ArrayRunner(input), null);
+			var pds = createProject();
+			pds.setSource(cds);
+			pds.getNext(function(err,actual) {
+				// first go round
+				assert.notEqual(actual, null);
+			});
+		},
 
-				pds.getNext(function(err,val) {
-					// eh, ignored
-					pds.getNext(function(err,val) {
-						assert.equal(2, val.a);
-					});
-				});
-			},
-
-			"should get the first document out of a cursor": function getCurrentCalledFirst() {
-				var cwc = new CursorDocumentSource.CursorWithContext();
-				var input = [{_id: 0, a: 1}];
-				cwc._cursor = new Cursor( input );
-				var cds = new CursorDocumentSource(cwc);
-				var pds = createProject();
-				pds.setSource(cds);
-				pds.getNext(function(err, actual) {
-					assert.equal(1, actual.a);
-				});
-			},
-
-			"The a and c.d fields are included but the b field is not": function testFullProject1(next) {
-				var cwc = new CursorDocumentSource.CursorWithContext();
-				var input = [{
-					_id: 0,
-					a: 1,
-					b: 1,
-					c: {
-						d: 1
-					}
-				}];
-				cwc._cursor = new Cursor(input);
-				var cds = new CursorDocumentSource(cwc);
-				var pds = createProject({
-						a: true,
-						c: {
-							d: true
-						}
-					}),
-					expected = {a:1, c:{ d: 1 }};
-				pds.setSource(cds);
+		"can retrieve second document from source": function testAdvanceFirst() {
+			var input = [{_id: 0, a: 1}, {_id: 1, a: 2}];
+			var cds = new CursorDocumentSource(null, new ArrayRunner(input), null);
+			var pds = createProject();
+			pds.setSource(cds);
 
+			pds.getNext(function(err,val) {
+				// eh, ignored
 				pds.getNext(function(err,val) {
-					assert.deepEqual(expected, val);
-					next();
+					assert.equal(2, val.a);
 				});
-			},
-
-			"Two documents": function testTwoDocumentsProject(next) {
-				var cwc = new CursorDocumentSource.CursorWithContext();
-				var input = [{
-					a: 1,
-					b: 2
-				}, {
-					a: 3,
-					b: 4
-				}],
-				expected = [
-					{a:1},
-					{a:3},
-					DocumentSource.EOF
-				];
-				cwc._cursor = new Cursor(input);
-				var cds = new CursorDocumentSource(cwc);
-				var pds = createProject({
+			});
+		},
+
+		"should get the first document out of a cursor": function getCurrentCalledFirst() {
+			var input = [{_id: 0, a: 1}];
+			var cds = new CursorDocumentSource(null, new ArrayRunner(input), null);
+			var pds = createProject();
+			pds.setSource(cds);
+			pds.getNext(function(err, actual) {
+				assert.equal(1, actual.a);
+			});
+		},
+
+		"The a and c.d fields are included but the b field is not": function testFullProject1(next) {
+			var input = [{
+				_id:0,
+				a: 1,
+				b: 1,
+				c: {
+					d: 1
+				}
+			}];
+			var cds = new CursorDocumentSource(null, new ArrayRunner(input), null);
+			var pds = createProject({
 					a: true,
 					c: {
 						d: true
 					}
-				});
-				pds.setSource(cds);
-
-				async.series([
-						pds.getNext.bind(pds),
-						pds.getNext.bind(pds),
-						pds.getNext.bind(pds),
-					],
-					function(err,res) {
-						assert.deepEqual(expected, res);
-						next();
-					}
-				);
-			}
+				}),
+				expected = {_id: 0, a:1, c:{ d: 1 }};
+			pds.setSource(cds);
+
+			pds.getNext(function(err,val) {
+				assert.deepEqual(expected, val);
+				next();
+			});
 		},
 
-		"#optimize()": {
+		"Two documents": function testTwoDocumentsProject(next) {
+			var input = [{
+				a: 1,
+				b: 2
+			}, {
+				a: 3,
+				b: 4
+			}],
+			expected = [
+				{a:1},
+				{a:3},
+				null
+			];
+			var cds = new CursorDocumentSource(null, new ArrayRunner(input), null);
+			var pds = createProject({
+				a: true,
+				c: {
+					d: true
+				}
+			});
+			pds.setSource(cds);
+
+			async.series([
+					pds.getNext.bind(pds),
+					pds.getNext.bind(pds),
+					pds.getNext.bind(pds),
+				],
+				function(err,res) {
+					assert.deepEqual(expected, res);
+					next();
+				}
+			);
+		}
+	},
 
-			"Optimize the projection": function optimizeProject() {
-				var pds = createProject({
-					a: {
-						$and: [true]
-					}
-				});
-				pds.optimize();
-				checkJsonRepresentation(pds, {
-					$project: {
-						a: {
-							$const: true
-						}
-					}
-				});
-			}
+	"#optimize()": {
 
-		},
+		"Optimize the projection": function optimizeProject() {
+			var pds = createProject({
+				a: {
+					$and: [{$const:true}]
+				}
+			});
 
-		"#createFromJson()": {
+			pds.optimize();
+			checkJsonRepresentation(pds, {$project:{a:{$const:true}}});
+		}
 
-			"should error if called with non-object": function testNonObjectPassed() {
-				//String as arg
-				assert.throws(function() {
-					var pds = createProject("not an object");
-				});
-				//Date as arg
-				assert.throws(function() {
-					var pds = createProject(new Date());
-				});
-				//Array as arg
-				assert.throws(function() {
-					var pds = createProject([]);
-				});
-				//Empty args
-				assert.throws(function() {
-					var pds = ProjectDocumentSource.createFromJson();
-				});
-				//Top level operator
-				assert.throws(function() {
-					var pds = createProject({
-						$add: []
-					});
+	},
+
+	"#createFromJson()": {
+
+		"should error if called with non-object": function testNonObjectPassed() {
+			//String as arg
+			assert.throws(function() {
+				var pds = createProject("not an object");
+			});
+			//Date as arg
+			assert.throws(function() {
+				var pds = createProject(new Date());
+			});
+			//Array as arg
+			assert.throws(function() {
+				var pds = createProject([]);
+			});
+			//Empty args
+			assert.throws(function() {
+				var pds = ProjectDocumentSource.createFromJson();
+			});
+			//Top level operator
+			assert.throws(function() {
+				var pds = createProject({
+					$add: []
 				});
-				//Invalid spec
-				assert.throws(function() {
-					var pds = createProject({
-						a: {
-							$invalidOperator: 1
-						}
-					});
+			});
+			//Invalid spec
+			assert.throws(function() {
+				var pds = createProject({
+					a: {
+						$invalidOperator: 1
+					}
 				});
+			});
 
-			}
-
-		},
-
-		"#getDependencies()": {
-
-			"should properly detect dependencies in project": function testGetDependencies() {
-				var cwc = new CursorDocumentSource.CursorWithContext();
-				var input = {
-					a: true,
-					x: '$b',
-					y: {
-						$and: ['$c', '$d']
-					}
-				};
-				var pds = createProject(input);
-				var dependencies = {};
-				assert.equal(DocumentSource.GetDepsReturn.EXHAUSTIVE, pds.getDependencies(dependencies));
-				assert.equal(5, Object.keys(dependencies).length);
-				assert.ok(dependencies._id);
-				assert.ok(dependencies.a);
-				assert.ok(dependencies.b);
-				assert.ok(dependencies.c);
-				assert.ok(dependencies.d);
-			}
+		}
 
+	},
+
+	"#getDependencies()": {
+
+		"should properly detect dependencies in project": function testGetDependencies() {
+			var input = {
+				a: true,
+				x: '$b',
+				y: {
+					$and: ['$c', '$d']
+				}
+			};
+			var pds = createProject(input);
+			var dependencies = new DepsTracker();
+			assert.equal(DocumentSource.GetDepsReturn.EXHAUSTIVE_FIELDS, pds.getDependencies(dependencies));
+			assert.equal(5, Object.keys(dependencies.fields).length);
+			assert.ok(dependencies.fields._id);
+			assert.ok(dependencies.fields.a);
+			assert.ok(dependencies.fields.b);
+			assert.ok(dependencies.fields.c);
+			assert.ok(dependencies.fields.d);
 		}
 
 	}

+ 152 - 23
test/lib/pipeline/documentSources/RedactDocumentSource.js

@@ -4,23 +4,26 @@ var assert = require("assert"),
 	DocumentSource = require("../../../../lib/pipeline/documentSources/DocumentSource"),
 	RedactDocumentSource = require("../../../../lib/pipeline/documentSources/RedactDocumentSource"),
 	CursorDocumentSource = require("../../../../lib/pipeline/documentSources/CursorDocumentSource"),
-	Cursor = require("../../../../lib/Cursor");
-
-var exampleRedact = {$cond: [
-	{$gt:[3, 0]},
-	"$$DESCEND",
-	"$$PRUNE"]
+	ArrayRunner = require("../../../../lib/query/ArrayRunner"),
+	Expressions = require("../../../../lib/pipeline/expressions");
+
+var exampleRedact = {$cond:{
+	if:{$gt:[0,4]},
+	then:"$$DESCEND",
+	else:"$$PRUNE"
+}};
+
+var createCursorDocumentSource = function createCursorDocumentSource (input) {
+	if (!input || input.constructor !== Array) throw new Error('invalid');
+	return new CursorDocumentSource(null, new ArrayRunner(input), null);
 };
 
-////////////////////////////////////////////////////////////////////////////////
-////////////////////////////////////////////////////////////////////////////////
-//////////////////////////////////// BUSTED ////////////////////////////////////
-//           This DocumentSource is busted without new Expressions            //
-////////////////////////////////////////////////////////////////////////////////
-////////////////////////////////////////////////////////////////////////////////
-////////////////////////////////////////////////////////////////////////////////
+var createRedactDocumentSource = function createRedactDocumentSource (src, expression) {
+	var rds = RedactDocumentSource.createFromJson(expression);
+	rds.setSource(src);
+	return rds;
+};
 
-//TESTS
 module.exports = {
 
 	"RedactDocumentSource": {
@@ -50,29 +53,39 @@ module.exports = {
 				var rds = RedactDocumentSource.createFromJson(exampleRedact);
 				rds.setSource({
 					getNext: function getNext(cb) {
-						return cb(null, DocumentSource.EOF);
+						return cb(null, null);
 					}
 				});
 				rds.getNext(function(err, doc) {
-					assert.equal(DocumentSource.EOF, doc);
+					assert.equal(null, doc);
+					next();
+				});
+			},
+			"should return Error in callback": function testError(next) {
+				var rds = RedactDocumentSource.createFromJson({$cond:{
+					if:{$gt:[0,{$add:["$a", 3]}]},
+					then:"$$DESCEND",
+					else:"$$PRUNE"
+				}});
+				rds.setSource(createCursorDocumentSource([{a:"foo"}]));
+				rds.getNext(function(err, doc) {
+					assert(err, "Expected Error");
 					next();
 				});
 			},
 
 			"iterator state accessors consistently report the source is exhausted": function assertExhausted() {
-				var cwc = new CursorDocumentSource.CursorWithContext();
 				var input = [{}];
-				cwc._cursor = new Cursor( input );
-				var cds = new CursorDocumentSource(cwc);
+				var cds = createCursorDocumentSource(input);
 				var rds = RedactDocumentSource.createFromJson(exampleRedact);
 				rds.setSource(cds);
 				rds.getNext(function(err, actual) {
 					rds.getNext(function(err, actual1) {
-						assert.equal(DocumentSource.EOF, actual1);
+						assert.equal(null, actual1);
 						rds.getNext(function(err, actual2) {
-							assert.equal(DocumentSource.EOF, actual2);
+							assert.equal(null, actual2);
 							rds.getNext(function(err, actual3) {
-								assert.equal(DocumentSource.EOF, actual3);
+								assert.equal(null, actual3);
 							});
 						});
 					});
@@ -83,6 +96,7 @@ module.exports = {
 				var rds = new RedactDocumentSource();
 				assert.throws(rds.getNext.bind(rds));
 			},
+
 		},
 
 		"#optimize()": {
@@ -109,9 +123,124 @@ module.exports = {
 			}
 
 		},
+
+		"#redact()": {
+
+			"should redact subsection where tag does not match": function (done) {
+				var cds = createCursorDocumentSource([{
+					_id: 1,
+					title: "123 Department Report",
+					tags: ["G", "STLW"],
+					year: 2014,
+					subsections: [
+						{
+							subtitle: "Section 1: Overview",
+							tags: ["SI", "G"],
+							content: "Section 1: This is the content of section 1."
+						},
+						{
+							subtitle: "Section 2: Analysis",
+							tags: ["STLW"],
+							content: "Section 2: This is the content of section 2."
+						},
+						{
+							subtitle: "Section 3: Budgeting",
+							tags: ["TK"],
+							content: {
+								text: "Section 3: This is the content of section3.",
+								tags: ["HCS"]
+							}
+						}
+					]
+				}]);
+
+				var expression = {$cond:{
+					if:{$gt: [{$size: {$setIntersection: ["$tags", [ "STLW", "G" ]]}},0]},
+					then:"$$DESCEND",
+					else:"$$PRUNE"
+				}};
+
+				var rds = createRedactDocumentSource(cds, expression);
+
+				var result = {
+					"_id": 1,
+					"title": "123 Department Report",
+					"tags": ["G", "STLW"],
+					"year": 2014,
+					"subsections": [{
+						"subtitle": "Section 1: Overview",
+						"tags": ["SI", "G"],
+						"content": "Section 1: This is the content of section 1."
+					}, {
+						"subtitle": "Section 2: Analysis",
+						"tags": ["STLW"],
+						"content": "Section 2: This is the content of section 2."
+					}]
+				};
+
+				rds.getNext(function (err, actual) {
+					assert.deepEqual(actual, result);
+					done();
+				});
+
+			},
+
+			"should redact an entire subsection based on a defined access level": function (done) {
+				var cds = createCursorDocumentSource([{
+					_id: 1,
+					level: 1,
+					acct_id: "xyz123",
+					cc: {
+						level: 5,
+						type: "yy",
+						exp_date: new Date("2015-11-01"),
+						billing_addr: {
+							level: 5,
+							addr1: "123 ABC Street",
+							city: "Some City"
+						},
+						shipping_addr: [
+							{
+								level: 3,
+								addr1: "987 XYZ Ave",
+								city: "Some City"
+							},
+							{
+								level: 3,
+								addr1: "PO Box 0123",
+								city: "Some City"
+							}
+						]
+					},
+					status: "A"
+				}]);
+
+				var expression = {$cond:{
+					if:{$eq:["$level",5]},
+					then:"$$PRUNE",
+					else:"$$DESCEND"
+				}};
+
+				var rds = createRedactDocumentSource(cds, expression);
+
+				var result = {
+					_id:1,
+					level:1,
+					acct_id:"xyz123",
+					status:"A"
+				};
+
+				rds.getNext(function (err, actual) {
+					assert.deepEqual(actual, result);
+					done();
+				});
+
+			}
+
+		}
+
 	}
 
 };
 
 if (!module.parent)(new(require("mocha"))()).ui("exports").reporter("spec").addFile(__filename).grep(process.env.MOCHA_GREP || '').run(process.exit);
-

+ 67 - 29
test/lib/pipeline/documentSources/SkipDocumentSource.js

@@ -1,10 +1,15 @@
 "use strict";
 var assert = require("assert"),
 	async = require("async"),
-	Cursor = require("../../../../lib/Cursor"),
 	DocumentSource = require("../../../../lib/pipeline/documentSources/DocumentSource"),
+	SkipDocumentSource = require("../../../../lib/pipeline/documentSources/SkipDocumentSource"),
 	CursorDocumentSource = require("../../../../lib/pipeline/documentSources/CursorDocumentSource"),
-	SkipDocumentSource = require("../../../../lib/pipeline/documentSources/SkipDocumentSource");
+	ArrayRunner = require("../../../../lib/query/ArrayRunner");
+
+var addSource = function addSource(ds, data) {
+	var cds = new CursorDocumentSource(null, new ArrayRunner(data), null);
+	ds.setSource(cds);
+};
 
 
 module.exports = {
@@ -21,6 +26,15 @@ module.exports = {
 
 		},
 
+		'#create()': {
+			'should create a direct copy of a SkipDocumentSource created through the constructor': function () {
+				var sds1 = new SkipDocumentSource(),
+					sds2 = SkipDocumentSource.create();
+
+				assert.strictEqual(JSON.stringify(sds1), JSON.stringify(sds2));
+			}
+		},
+
 		"#getSourceName()": {
 
 			"should return the correct source name; $skip": function testSourceName(){
@@ -30,6 +44,24 @@ module.exports = {
 
 		},
 
+		'#getSkip()': {
+			'should return the skips': function () {
+				var sds = new SkipDocumentSource();
+
+				assert.strictEqual(sds.getSkip(), 0);
+			}
+		},
+
+		'#setSkip()': {
+			'should return the skips': function () {
+				var sds = new SkipDocumentSource();
+
+				sds.setSkip(10);
+
+				assert.strictEqual(sds.getSkip(), 10);
+			}
+		},
+
 		"#coalesce()": {
 
 			"should return false if nextSource is not $skip": function dontSkip(){
@@ -57,19 +89,15 @@ module.exports = {
 
 				var expected = [
 					{val:4},
-					DocumentSource.EOF
+					null
 				];
-
-				var cwc = new CursorDocumentSource.CursorWithContext();
 				var input = [
 					{val:1},
 					{val:2},
 					{val:3},
 					{val:4},
 				];
-				cwc._cursor = new Cursor( input );
-				var cds = new CursorDocumentSource(cwc);
-				sds.setSource(cds);
+				addSource(sds, input);
 
 				async.series([
 						sds.getNext.bind(sds),
@@ -81,20 +109,17 @@ module.exports = {
 					}
 				);
 				sds.getNext(function(err, actual) {
-					assert.equal(actual, DocumentSource.EOF);
+					assert.equal(actual, null);
 				});
 			},
 			"should return documents if skip count is not hit and there are more documents": function hitSkip(next){
 				var sds = SkipDocumentSource.createFromJson(1);
 
-				var cwc = new CursorDocumentSource.CursorWithContext();
 				var input = [{val:1},{val:2},{val:3}];
-				cwc._cursor = new Cursor( input );
-				var cds = new CursorDocumentSource(cwc);
-				sds.setSource(cds);
+				addSource(sds, input);
 
 				sds.getNext(function(err,actual) {
-					assert.notEqual(actual, DocumentSource.EOF);
+					assert.notEqual(actual, null);
 					assert.deepEqual(actual, {val:2});
 					next();
 				});
@@ -103,11 +128,8 @@ module.exports = {
 			"should return the current document source": function currSource(){
 				var sds = SkipDocumentSource.createFromJson(1);
 
-				var cwc = new CursorDocumentSource.CursorWithContext();
 				var input = [{val:1},{val:2},{val:3}];
-				cwc._cursor = new Cursor( input );
-				var cds = new CursorDocumentSource(cwc);
-				sds.setSource(cds);
+				addSource(sds, input);
 
 				sds.getNext(function(err, actual) {
 					assert.deepEqual(actual, { val:2 });
@@ -120,17 +142,11 @@ module.exports = {
 
 				var expected = [
 					{item:4},
-					DocumentSource.EOF
+					null
 				];
-
-				var i = 1;
-				sds.source = {
-					getNext:function(cb){
-						if (i>=5)
-							return cb(null,DocumentSource.EOF);
-						return cb(null, { item:i++ });
-					}
-				};
+				
+				var input = [{item:1},{item:2},{item:3},{item:4}];
+				addSource(sds, input);
 
 				async.series([
 						sds.getNext.bind(sds),
@@ -164,9 +180,31 @@ module.exports = {
 				assert.strictEqual(t.skip, 5);
 			}
 
-		}
+		},
 
+		'#getDependencies()': {
+			'should return 1 (GET_NEXT)': function () {
+				var sds = new SkipDocumentSource();
 
+				assert.strictEqual(sds.getDependencies(), DocumentSource.GetDepsReturn.SEE_NEXT); // Hackish. We may be getting an enum in somewhere.
+			}
+		},
+
+		'#getShardSource()': {
+			'should return the instance of the SkipDocumentSource': function () {
+				var sds = new SkipDocumentSource();
+
+				assert.strictEqual(sds.getShardSource(), null);
+			}
+		},
+
+		'#getRouterSource()': {
+			'should return null': function () {
+				var sds = new SkipDocumentSource();
+
+				assert.strictEqual(sds.getRouterSource(), sds);
+			}
+		}
 	}
 
 };

+ 254 - 156
test/lib/pipeline/documentSources/SortDocumentSource.js

@@ -5,9 +5,65 @@ var assert = require("assert"),
 	SortDocumentSource = require("../../../../lib/pipeline/documentSources/SortDocumentSource"),
 	LimitDocumentSource = require("../../../../lib/pipeline/documentSources/LimitDocumentSource"),
 	CursorDocumentSource = require("../../../../lib/pipeline/documentSources/CursorDocumentSource"),
-	Cursor = require("../../../../lib/Cursor"),
+	ArrayRunner = require("../../../../lib/query/ArrayRunner"),
 	FieldPathExpression = require("../../../../lib/pipeline/expressions/FieldPathExpression");
 
+var getCursorDocumentSource = function(values) {
+	return new CursorDocumentSource(null, new ArrayRunner(values), null);
+};
+
+
+/// An assertion for `ObjectExpression` instances based on Mongo's `ExpectedResultBase` class
+function assertExpectedResult(args) {
+	{// check for required args
+		if (args === undefined) throw new TypeError("missing arg: `args` is required");
+		if (args.spec && args.throw === undefined) args.throw = true; // Assume that spec only tests expect an error to be thrown
+		//if (args.spec === undefined) throw new Error("missing arg: `args.spec` is required");
+		if (args.expected !== undefined && args.docs === undefined) throw new Error("must provide docs with expected value");
+	}// check for required args
+
+	// run implementation
+	if(args.expected && args.docs){
+		var sds = SortDocumentSource.createFromJson(args.spec),
+			next,
+			results = [],
+			cds = new CursorDocumentSource(null, new ArrayRunner(args.docs), null);
+		sds.setSource(cds);
+		async.whilst(
+			function() {
+				return next !== null;
+			},
+			function(done) {
+				sds.getNext(function(err, doc) {
+					if(err) return done(err);
+					next = doc;
+					if(next === null) {
+						return done();
+					} else {
+						results.push(next);
+						return done();
+					}
+				});
+			},
+			function(err) {
+				assert.equal(JSON.stringify(results), JSON.stringify(args.expected));
+				if(args.done) {
+					return args.done();
+				}
+			}
+		);
+	}else{
+		if(args.throw) {
+			assert.throws(function(){
+				SortDocumentSource.createFromJson(args.spec);
+			});
+		} else {
+			assert.doesNotThrow(function(){
+				var gds = SortDocumentSource.createFromJson(args.spec);
+			});
+		}
+	}
+}
 
 module.exports = {
 
@@ -15,11 +71,28 @@ module.exports = {
 
 		"constructor()": {
 
-			"should not throw Error when constructing without args": function testConstructor(){
-				assert.doesNotThrow(function(){
-					new SortDocumentSource();
+			// $sort spec is not an object
+			"should throw Error when constructing without args": function testConstructor(){
+				assertExpectedResult({"throw":true});
+			},
+
+			// $sort spec is not an object
+			"should throw Error when $sort spec is not an object": function testConstructor(){
+				assertExpectedResult({spec:"Foo"});
+			},
+
+			// $sort spec is an empty object
+			"should throw Error when $sort spec is an empty object": function testConstructor(){
+				assertExpectedResult({spec:{}});
+			},
+
+
+			// $sort _id is specified as an invalid object expression
+			"should throw error when _id is an invalid object expression": function testConstructor(){
+				assertExpectedResult({
+					spec:{_id:{$add:1, $and:1}},
 				});
-			}
+			},
 
 		},
 
@@ -41,71 +114,82 @@ module.exports = {
 		},
 
 		"#getNext()": {
-
+			/** Assert that iterator state accessors consistently report the source is exhausted. */
 			"should return EOF if there are no more sources": function noSources(next){
-				var cwc = new CursorDocumentSource.CursorWithContext();
-				cwc._cursor = new Cursor( [{a: 1}] );
-				var cds = new CursorDocumentSource(cwc);
-				var sds = SortDocumentSource.createFromJson({a:1});
+				var cds = getCursorDocumentSource([{"a": 1}]);
+				var sds = SortDocumentSource.createFromJson({"sort":1});
 				sds.setSource(cds);
 				sds.getNext(function(err, val) {
 					assert.deepEqual(val, {a:1});
 					sds.getNext(function(err, val) {
-						assert.equal(val, DocumentSource.EOF);
-						next();
+						if (err) throw err;
+						assert.equal(val, null);
+						return next();
 					});
 				});
+
 			},
-			"should return EOF if there are more documents": function hitSort(next){
-				var cwc = new CursorDocumentSource.CursorWithContext();
-				cwc._cursor = new Cursor( [{a: 1}] );
-				var cds = new CursorDocumentSource(cwc);
-				var sds = SortDocumentSource.createFromJson({a:1});
+
+			"should not return EOF if there are documents": function hitSort(next){
+				var cds = getCursorDocumentSource([{a: 1}]);
+				var sds = SortDocumentSource.createFromJson({"sort":1});
 				sds.setSource(cds);
-				sds.getNext(function(err, doc) {
-					assert.notEqual(doc, DocumentSource.EOF);
-					next();
-				});
+				async.series([
+						cds.getNext.bind(cds),
+					],
+					function(err,res) {
+						if (err) throw err;
+						assert.notEqual(res, null);
+						return next();
+					}
+				);
 			},
 
 			"should return the current document source": function currSource(next){
-				var cwc = new CursorDocumentSource.CursorWithContext();
-				cwc._cursor = new Cursor( [{a: 1}] );
-				var cds = new CursorDocumentSource(cwc);
-				var sds = SortDocumentSource.createFromJson({a:1});
+				var cds = getCursorDocumentSource([{a: 1}]);
+				var sds = SortDocumentSource.createFromJson({"sort":1});
 				sds.setSource(cds);
-				sds.getNext(function(err, doc) {
-					assert.deepEqual(doc, { a:1 });
-					next();
-				});
+				async.series([
+						cds.getNext.bind(cds),
+					],
+					function(err,res) {
+						if (err) throw err;
+						assert.deepEqual(res, [ { a: 1 } ]);
+						return next();
+					}
+				);
 			},
 
-			"should return next document when moving to the next source": function nextSource(next){
-				var cwc = new CursorDocumentSource.CursorWithContext();
-				cwc._cursor = new Cursor( [{a: 1}, {b:2}] );
-				var cds = new CursorDocumentSource(cwc);
-				var sds = SortDocumentSource.createFromJson({a:1});
+			"should return next document when moving to the next source sorted descending": function nextSource(next){
+				var cds = getCursorDocumentSource([{a: 1}, {b:2}]);
+				var sds = SortDocumentSource.createFromJson({"sort":1});
 				sds.setSource(cds);
-				sds.getNext(function(err, doc) {
-					assert.deepEqual(doc, {b:2});
-					next();
-				});
+				async.series([
+						cds.getNext.bind(cds),
+					],
+					function(err,res) {
+						if (err) throw err;
+						assert.deepEqual(res, [ { a: 1 } ]);
+						return next();
+					}
+				);
 			},
 
-			"should return false for no sources remaining": function noMoar(next){
-				var cwc = new CursorDocumentSource.CursorWithContext();
-				cwc._cursor = new Cursor( [{a: 1}, {b:2}] );
-				var cds = new CursorDocumentSource(cwc);
-				var sds = SortDocumentSource.createFromJson({a:1});
+			"should return false for no sources remaining sorted descending": function noMoar(next){
+				var cds = getCursorDocumentSource([{a: 1}, {b:2}]);
+				var sds = SortDocumentSource.createFromJson({"sort":1});
 				sds.setSource(cds);
-				sds.getNext(function(err, doc) {
-					sds.getNext(function(err, doc) {
-						assert.deepEqual(doc, {a:1});
-						next();
-					});
-				});
+				async.series([
+						cds.getNext.bind(cds),
+						cds.getNext.bind(cds),
+					],
+					function(err,res) {
+						if (err) throw err;
+						assert.deepEqual(res,  [ { a: 1 }, { b: 2 } ]);
+						return next();
+					}
+				);
 			}
-
 		},
 
 		"#serialize()": {
@@ -114,93 +198,108 @@ module.exports = {
 				var sds = new SortDocumentSource();
 				assert.throws(sds.serialize.bind(sds));
 			}
-
 		},
 
 		"#serializeToArray()": {
 
-			"should create an object representation of the SortDocumentSource": function serializeToArrayTest(){
+			/**
+            * Check that the BSON representation generated by the souce matches the BSON it was
+            * created with.
+            */
+            "should have equal json representation": function serializeToArrayCheck(next){
+				var sds = SortDocumentSource.createFromJson({"sort":1}, {});
+				var array = [];
+				sds.serializeToArray(array, false);
+				assert.deepEqual(array, [{"$sort":{"sort":1}}]);
+				return next();
+			},
+
+			"should create an object representation of the SortDocumentSource": function serializeToArrayTest(next){
 				var sds = new SortDocumentSource();
-				sds.vSortKey.push(new FieldPathExpression("b") );
-				var t = [];
-				sds.serializeToArray(t, false);
-				assert.deepEqual(t, [{ "$sort": { "b": -1 } }]);
+				var fieldPathVar;
+				sds.vSortKey.push(new FieldPathExpression("b", fieldPathVar) );
+				var array = [];
+				sds.serializeToArray(array, false);
+				assert.deepEqual(array, [{"$sort":{"":-1}}] );
+				return next();
 			}
 
 		},
 
 		"#createFromJson()": {
 
-			"should return a new SortDocumentSource object from an input JSON object": function createTest(){
+			"should return a new SortDocumentSource object from an input JSON object": function createTest(next){
 				var sds = SortDocumentSource.createFromJson({a:1});
 				assert.strictEqual(sds.constructor, SortDocumentSource);
 				var t = [];
 				sds.serializeToArray(t, false);
-				assert.deepEqual(t, [{ "$sort": { "a": 1 } }]);
+				assert.deepEqual(t, [{"$sort":{"a":1}}] );
+				return next();
 			},
 
-			"should return a new SortDocumentSource object from an input JSON object with a descending field": function createTest(){
+			"should return a new SortDocumentSource object from an input JSON object with a descending field": function createTest(next){
 				var sds = SortDocumentSource.createFromJson({a:-1});
 				assert.strictEqual(sds.constructor, SortDocumentSource);
 				var t = [];
 				sds.serializeToArray(t, false);
-				assert.deepEqual(t, [{ "$sort": { "a": -1 } }]);
+				assert.deepEqual(t,  [{"$sort":{"a":-1}}]);
+				return next();
 			},
 
-			"should return a new SortDocumentSource object from an input JSON object with dotted paths": function createTest(){
+			"should return a new SortDocumentSource object from an input JSON object with dotted paths": function createTest(next){
 				var sds = SortDocumentSource.createFromJson({ "a.b":1 });
 				assert.strictEqual(sds.constructor, SortDocumentSource);
 				var t = [];
 				sds.serializeToArray(t, false);
-				assert.deepEqual(t, [{ "$sort": { "a.b" : 1  } }]);
+				assert.deepEqual(t, [{"$sort":{"a.b":1}}]);
+				return next();
 			},
 
-			"should throw an exception when not passed an object": function createTest(){
+			"should throw an exception when not passed an object": function createTest(next){
 				assert.throws(function() {
 					var sds = SortDocumentSource.createFromJson(7);
 				});
+				return next();
 			},
 
-			"should throw an exception when passed an empty object": function createTest(){
+			"should throw an exception when passed an empty object": function createTest(next){
 				assert.throws(function() {
 					var sds = SortDocumentSource.createFromJson({});
 				});
+				return next();
 			},
 
-			"should throw an exception when passed an object with a non number value": function createTest(){
+			"should throw an exception when passed an object with a non number value": function createTest(next){
 				assert.throws(function() {
 					var sds = SortDocumentSource.createFromJson({a:"b"});
 				});
+				return next();
 			},
 
-			"should throw an exception when passed an object with a non valid number value": function createTest(){
+			"should throw an exception when passed an object with a non valid number value": function createTest(next){
 				assert.throws(function() {
 					var sds = SortDocumentSource.createFromJson({a:14});
 				});
+				next();
 			}
-
 		},
 
 		"#sort": {
 
 			"should sort a single document": function singleValue(next) {
-				var cwc = new CursorDocumentSource.CursorWithContext();
-				cwc._cursor = new Cursor( [{_id:0, a: 1}] );
-				var cds = new CursorDocumentSource(cwc);
+				var cds = getCursorDocumentSource([{_id:0, a: 1}]);
 				var sds = new SortDocumentSource();
 				sds.addKey("_id", false);
 				sds.setSource(cds);
 				sds.getNext(function(err, actual) {
+					if (err) throw err;
 					assert.deepEqual(actual, {_id:0, a:1});
-					next();
+					return next();
 				});
 			},
 
 			"should sort two documents": function twoValue(next) {
-				var cwc = new CursorDocumentSource.CursorWithContext();
-				var l = [{_id:0, a: 1}, {_id:1, a:0}];
-				cwc._cursor = new Cursor( l );
-				var cds = new CursorDocumentSource(cwc);
+				var cds = getCursorDocumentSource([{_id:0, a: 1}, {_id:1, a:0}]);
 				var sds = new SortDocumentSource();
 				sds.addKey("_id", false);
 				sds.setSource(cds);
@@ -210,17 +309,15 @@ module.exports = {
 						sds.getNext.bind(sds),
 					],
 					function(err,res) {
-						assert.deepEqual([{_id:1, a: 0}, {_id:0, a:1}], res);
-						next();
+						if (err) throw err;
+						assert.deepEqual([ { _id: 1, a: 0 }, { _id: 0, a: 1 } ], res);
+						return next();
 					}
 				);
 			},
 
 			"should sort two documents in ascending order": function ascendingValue(next) {
-				var cwc = new CursorDocumentSource.CursorWithContext();
-				var l = [{_id:0, a: 1}, {_id:5, a:12}, {_id:1, a:0}];
-				cwc._cursor = new Cursor( l );
-				var cds = new CursorDocumentSource(cwc);
+				var cds = getCursorDocumentSource([{_id:0, a: 1}, {_id:5, a:12}, {_id:1, a:0}]);
 				var sds = new SortDocumentSource();
 				sds.addKey("_id", true);
 				sds.setSource(cds);
@@ -234,21 +331,20 @@ module.exports = {
 						});
 					},
 					function() {
-						return docs[i++] !== DocumentSource.EOF;
+						return docs[i++] !== null;
 					},
 					function(err) {
-						assert.deepEqual([{_id:0, a: 1}, {_id:1, a:0}, {_id:5, a:12}, DocumentSource.EOF], docs);
-						next();
+						if (err) throw err;
+						assert.deepEqual([{_id:0, a: 1}, {_id:1, a:0}, {_id:5, a:12}, null], docs);
+						return next();
 					}
 				);
 			},
 
 			"should sort documents with a compound key": function compoundKeySort(next) {
-				var cwc = new CursorDocumentSource.CursorWithContext();
-				var l = [{_id:0, a: 1, b:3}, {_id:5, a:12, b:7}, {_id:1, a:0, b:2}];
-				cwc._cursor = new Cursor( l );
-				var cds = new CursorDocumentSource(cwc);
-				var sds = new SortDocumentSource();
+				var cds = getCursorDocumentSource([{_id:0, a: 1, b:3}, {_id:5, a:12, b:7}, {_id:1, a:0, b:2}]);
+				var sds = SortDocumentSource.createFromJson({"sort":1});
+
 				sds.addKey("a", false);
 				sds.addKey("b", false);
 				sds.setSource(cds);
@@ -262,20 +358,18 @@ module.exports = {
 						});
 					},
 					function() {
-						return docs[i++] !== DocumentSource.EOF;
+						return docs[i++] !== null;
 					},
 					function(err) {
-						assert.deepEqual([{_id:5, a:12, b:7}, {_id:0, a:1, b:3}, {_id:1, a:0, b:2}, DocumentSource.EOF], docs);
-						next();
+						if (err) throw err;
+						assert.deepEqual([{_id:5, a:12, b:7}, {_id:0, a:1, b:3}, {_id:1, a:0, b:2}, null], docs);
+						return next();
 					}
 				);
 			},
 
 			"should sort documents with a compound key in ascending order": function compoundAscendingKeySort(next) {
-				var cwc = new CursorDocumentSource.CursorWithContext();
-				var l = [{_id:0, a: 1, b:3}, {_id:5, a:12, b:7}, {_id:1, a:0, b:2}];
-				cwc._cursor = new Cursor( l );
-				var cds = new CursorDocumentSource(cwc);
+				var cds = getCursorDocumentSource([{_id:0, a: 1, b:3}, {_id:5, a:12, b:7}, {_id:1, a:0, b:2}]);
 				var sds = new SortDocumentSource();
 				sds.addKey("a", true);
 				sds.addKey("b", true);
@@ -290,20 +384,18 @@ module.exports = {
 						});
 					},
 					function() {
-						return docs[i++] !== DocumentSource.EOF;
+						return docs[i++] !== null;
 					},
 					function(err) {
-						assert.deepEqual([{_id:1, a:0, b:2}, {_id:0, a:1, b:3}, {_id:5, a:12, b:7}, DocumentSource.EOF], docs);
-						next();
+						if (err) throw err;
+						assert.deepEqual([{_id:1, a:0, b:2}, {_id:0, a:1, b:3}, {_id:5, a:12, b:7}, null], docs);
+						return next();
 					}
 				);
 			},
 
 			"should sort documents with a compound key in mixed order": function compoundMixedKeySort(next) {
-				var cwc = new CursorDocumentSource.CursorWithContext();
-				var l = [{_id:0, a: 1, b:3}, {_id:5, a:12, b:7}, {_id:1, a:0, b:2}, {_id:8, a:7, b:42}];
-				cwc._cursor = new Cursor( l );
-				var cds = new CursorDocumentSource(cwc);
+				var cds = getCursorDocumentSource([{_id:0, a: 1, b:3}, {_id:5, a:12, b:7}, {_id:1, a:0, b:2}, {_id:8, a:7, b:42}]);
 				var sds = new SortDocumentSource();
 				sds.addKey("a", true);
 				sds.addKey("b", false);
@@ -318,30 +410,26 @@ module.exports = {
 						});
 					},
 					function() {
-						return docs[i++] !== DocumentSource.EOF;
+						return docs[i++] !== null;
 					},
 					function(err) {
-						assert.deepEqual([{_id:1, a:0, b:2}, {_id:0, a:1, b:3}, {_id:8, a:7, b:42}, {_id:5, a:12, b:7}, DocumentSource.EOF], docs);
-						next();
+						if (err) throw err;
+						assert.deepEqual([{_id:1, a:0, b:2}, {_id:0, a:1, b:3}, {_id:8, a:7, b:42}, {_id:5, a:12, b:7}, null], docs);
+						return next();
 					}
 				);
 			},
 
-			"should not sort different types": function diffTypesSort() {
-				var cwc = new CursorDocumentSource.CursorWithContext();
-				var l = [{_id:0, a: 1}, {_id:1, a:"foo"}];
-				cwc._cursor = new Cursor( l );
-				var cds = new CursorDocumentSource(cwc);
+			"should not sort different types": function diffTypesSort(next) {
+				var cds = getCursorDocumentSource([{_id:0, a: 1}, {_id:1, a:"foo"}]);
 				var sds = new SortDocumentSource();
 				sds.addKey("a", false);
 				assert.throws(sds.setSource(cds));
+				return next();
 			},
 
 			"should sort docs with missing fields": function missingFields(next) {
-				var cwc = new CursorDocumentSource.CursorWithContext();
-				var l = [{_id:0, a: 1}, {_id:1}];
-				cwc._cursor = new Cursor( l );
-				var cds = new CursorDocumentSource(cwc);
+				var cds = getCursorDocumentSource([{_id:0, a: 1}, {_id:1}]);
 				var sds = new SortDocumentSource();
 				sds.addKey("a", true);
 				sds.setSource(cds);
@@ -355,20 +443,18 @@ module.exports = {
 						});
 					},
 					function() {
-						return docs[i++] !== DocumentSource.EOF;
+						return docs[i++] !== null;
 					},
 					function(err) {
-						assert.deepEqual([{_id:1}, {_id:0, a:1}, DocumentSource.EOF], docs);
-						next();
+						if (err) throw err;
+						assert.deepEqual([{_id:1}, {_id:0, a:1}, null], docs);
+						return next();
 					}
 				);
 			},
 
 			"should sort docs with null fields": function nullFields(next) {
-				var cwc = new CursorDocumentSource.CursorWithContext();
-				var l = [{_id:0, a: 1}, {_id:1, a: null}];
-				cwc._cursor = new Cursor( l );
-				var cds = new CursorDocumentSource(cwc);
+				var cds = getCursorDocumentSource([{_id:0, a: 1}, {_id:1, a: null}]);
 				var sds = new SortDocumentSource();
 				sds.addKey("a", true);
 				sds.setSource(cds);
@@ -382,20 +468,18 @@ module.exports = {
 						});
 					},
 					function() {
-						return docs[i++] !== DocumentSource.EOF;
+						return docs[i++] !== null;
 					},
 					function(err) {
-						assert.deepEqual([{_id:1, a:null}, {_id:0, a:1}, DocumentSource.EOF], docs);
-						next();
+						if (err) throw err;
+						assert.deepEqual([{_id:1, a:null}, {_id:0, a:1}, null], docs);
+						return next();
 					}
 				);
 			},
 
-			"should not support a missing object nested in an array": function missingObjectWithinArray() {
-				var cwc = new CursorDocumentSource.CursorWithContext();
-				var l = [{_id:0, a: [1]}, {_id:1, a:[0]}];
-				cwc._cursor = new Cursor( l );
-				var cds = new CursorDocumentSource(cwc);
+			"should not support a missing object nested in an array": function missingObjectWithinArray(next) {
+				var cds = getCursorDocumentSource([{_id:0, a: [1]}, {_id:1, a:[0]}]);
 				var sds = new SortDocumentSource();
 				assert.throws(function() {
 					sds.addKey("a.b", false);
@@ -406,13 +490,11 @@ module.exports = {
 						sds.advance();
 					}
 				});
+				return next();
 			},
 
 			"should compare nested values from within an array": function extractArrayValues(next) {
-				var cwc = new CursorDocumentSource.CursorWithContext();
-				var l = [{_id:0,a:[{b:1},{b:2}]}, {_id:1,a:[{b:1},{b:1}]} ];
-				cwc._cursor = new Cursor( l );
-				var cds = new CursorDocumentSource(cwc);
+				var cds = getCursorDocumentSource([{_id:0,a:[{b:1},{b:2}]}, {_id:1,a:[{b:1},{b:1}]}]);
 				var sds = new SortDocumentSource();
 				sds.addKey("a.b", true);
 				sds.setSource(cds);
@@ -426,33 +508,37 @@ module.exports = {
 						});
 					},
 					function() {
-						return docs[i++] !== DocumentSource.EOF;
+						return docs[i++] !== null;
 					},
 					function(err) {
-						assert.deepEqual([{_id:1,a:[{b:1},{b:1}]},{_id:0,a:[{b:1},{b:2}]}, DocumentSource.EOF], docs);
-						next();
+						if (err) throw err;
+						assert.deepEqual([{_id:1,a:[{b:1},{b:1}]},{_id:0,a:[{b:1},{b:2}]}, null], docs);
+						return next();
 					}
 				);
 			}
-
 		},
 
 		"#coalesce()": {
-			"should return false when coalescing a non-limit source": function nonLimitSource() {
-				var cwc = new CursorDocumentSource.CursorWithContext();
-				var l = [{_id:0,a:[{b:1},{b:2}]}, {_id:1,a:[{b:1},{b:1}]} ];
-				cwc._cursor = new Cursor( l );
-				var cds = new CursorDocumentSource(cwc),
-					sds = SortDocumentSource.createFromJson({a:1});
+			"should return false when coalescing a non-limit source": function nonLimitSource(next) {
+				var cds = getCursorDocumentSource([{_id:0,a:[{b:1},{b:2}]}, {_id:1,a:[{b:1},{b:1}]} ]);
+				var	sds = SortDocumentSource.createFromJson({a:1});
 
 				var newSrc = sds.coalesce(cds);
 				assert.equal(newSrc, false);
+				return next();
 			},
 
-			"should return limit source when coalescing a limit source": function limitSource() {
+
+			"should return limit source when coalescing a limit source": function limitSource(next) {
 				var sds = SortDocumentSource.createFromJson({a:1}),
 					lds = LimitDocumentSource.createFromJson(1);
 
+				// TODO: add missing test cases.
+				// array json getLimit
+				// getShardSource
+				// getMergeSource
+
 				var newSrc = sds.coalesce(LimitDocumentSource.createFromJson(10));
 				assert.ok(newSrc instanceof LimitDocumentSource);
 				assert.equal(sds.getLimit(), 10);
@@ -464,24 +550,36 @@ module.exports = {
 				var arr = [];
 				sds.serializeToArray(arr);
 				assert.deepEqual(arr, [{$sort: {a:1}}, {$limit: 5}]);
+
+				// TODO: add missing test cases
+				// doc array get limit
+				// getShardSource
+				// get MergeSource
+				return next();
 			},
 		},
 
 		"#dependencies": {
-			"should have Dependant field paths": function dependencies() {
-				var sds = new SortDocumentSource();
-				sds.addKey("a", true);
-				sds.addKey("b.c", false);
-				var deps = {};
-				assert.equal("SEE_NEXT", sds.getDependencies(deps));
-				assert.equal(2, Object.keys(deps).length);
-				assert.ok(deps.a);
-				assert.ok(deps["b.c"]);
+			/** Dependant field paths. */
+			"should have Dependant field paths": function dependencies(next) {
+			 	var sds = SortDocumentSource.createFromJson({sort: 1});
+
+				sds.addKey('a', true);
+			 	sds.addKey('b.c', false);
+
+				var deps = {fields: {}, needWholeDocument: false, needTextScore: false};
+
+				assert.equal(DocumentSource.GetDepsReturn.SEE_NEXT, sds.getDependencies(deps));
+				// Sort keys are now part of deps fields.
+				assert.equal(3, Object.keys(deps.fields).length);
+			 	assert.equal(1, deps.fields.a);
+				assert.equal(1, deps.fields['b.c']);
+				assert.equal(false, deps.needWholeDocument);
+				assert.equal(false, deps.needTextScore);
+				return next();
 			}
 		}
-
 	}
-
 };
 
 if (!module.parent)(new(require("mocha"))()).ui("exports").reporter("spec").addFile(__filename).grep(process.env.MOCHA_GREP || '').run(process.exit);

+ 47 - 0
test/lib/pipeline/documentSources/TestBase.js

@@ -0,0 +1,47 @@
+var TestBase = (function() {
+	var klass = function TestBase(overrides) {
+			//NOTE: DEVIATION FROM MONGO: using this base class to make things easier to initialize
+			for (var key in overrides){
+				this[key] = overrides[key];
+			}
+		},
+		proto = klass.prototype;
+	proto.createSource = function() {
+		//TODO: Fix this once we know proper API
+		this._source = CursorDocumentSource.create();
+	};
+	proto.source = function() {
+		return this._source;
+	};
+	proto.createProject = function(projection) {
+		projection = projection || {a:true};
+		var spec = {$project:projection};
+		this._project = new ProjectDocumentSource(spec /*,ctx()*/);
+		this.checkJsonRepresentation(spec);
+		this._project.setSource(this.source());
+	};
+	proto.project = function() {
+		return this._project;
+	};
+	proto.assertExhausted = function() {
+		var self = this;
+		self._project.getNext(function(err, input1) {
+			assert.strictEqual(input, DocumentSource.EOF);
+			self._project.getNext(function(err, input2) {
+				assert.strictEqual(input2, DocumentSource.EOF);
+				self._project.getNext(function(err, input3) {
+					assert.strictEqual(input3, DocumentSource.EOF);
+				});
+			});
+		});
+	};
+	proto.checkJsonRepresentation = function() {
+		var arr = [];
+		this._project.serializeToArray(arr);
+		var generatedSpec = arr[0];
+		assert.deepEqual(generatedSpec, spec);
+	};
+	return klass;
+})();
+
+module.exports = TestBase;

+ 9 - 12
test/lib/pipeline/documentSources/UnwindDocumentSource.js

@@ -4,7 +4,7 @@ var assert = require("assert"),
 	DocumentSource = require("../../../../lib/pipeline/documentSources/DocumentSource"),
 	UnwindDocumentSource = require("../../../../lib/pipeline/documentSources/UnwindDocumentSource"),
 	CursorDocumentSource = require("../../../../lib/pipeline/documentSources/CursorDocumentSource"),
-	Cursor = require("../../../../lib/Cursor");
+	ArrayRunner = require("../../../../lib/query/ArrayRunner");
 
 
 //HELPERS
@@ -35,10 +35,7 @@ var createUnwind = function createUnwind(unwind) {
 };
 
 var addSource = function addSource(unwind, data) {
-	var cwc = new CursorDocumentSource.CursorWithContext();
-	cwc._cursor = new Cursor(data);
-	var cds = new CursorDocumentSource(cwc);
-	var pds = new UnwindDocumentSource();
+	var cds = new CursorDocumentSource(null, new ArrayRunner(data), null);
 	unwind.setSource(cds);
 };
 
@@ -53,7 +50,7 @@ var checkResults = function checkResults(data, expectedResults, path, next) {
 
 	expectedResults = expectedResults || [];
 
-	expectedResults.push(DocumentSource.EOF);
+	expectedResults.push(null);
 
 	//Load the results from the DocumentSourceUnwind
 	var docs = [], i = 0;
@@ -65,7 +62,7 @@ var checkResults = function checkResults(data, expectedResults, path, next) {
 			});
 		},
 		function() {
-			return docs[i++] !== DocumentSource.EOF;
+			return docs[i++] !== null;
 		},
 		function(err) {
 			assert.deepEqual(expectedResults, docs);
@@ -111,7 +108,7 @@ module.exports = {
 				var pds = createUnwind();
 				addSource(pds, []);
 				pds.getNext(function(err,doc) {
-					assert.strictEqual(doc, DocumentSource.EOF);
+					assert.strictEqual(doc, null);
 					next();
 				});
 			},
@@ -120,7 +117,7 @@ module.exports = {
 				var pds = createUnwind();
 				addSource(pds, [{_id:0, a:[1]}]);
 				pds.getNext(function(err,doc) {
-					assert.notStrictEqual(doc, DocumentSource.EOF);
+					assert.notStrictEqual(doc, null);
 					next();
 				});
 			},
@@ -129,7 +126,7 @@ module.exports = {
 				var pds = createUnwind();
 				addSource(pds, [{_id:0, a:[1,2]}]);
 				pds.getNext(function(err,doc) {
-					assert.notStrictEqual(doc, DocumentSource.EOF);
+					assert.notStrictEqual(doc, null);
 					assert.strictEqual(doc.a, 1);
 					pds.getNext(function(err,doc) {
 						assert.strictEqual(doc.a, 2);
@@ -151,10 +148,10 @@ module.exports = {
 						});
 					},
 					function() {
-						return docs[i++] !== DocumentSource.EOF;
+						return docs[i++] !== null;
 					},
 					function(err) {
-						assert.deepEqual([{_id:0, a:1},{_id:0, a:2},DocumentSource.EOF], docs);
+						assert.deepEqual([{_id:0, a:1},{_id:0, a:2},null], docs);
 						next();
 					}
 				);

+ 1 - 2
test/lib/pipeline/expressions/CompareExpression_test.js

@@ -1,7 +1,6 @@
 "use strict";
 var assert = require("assert"),
-	pipeline = require("../../../../lib/pipeline"),
-	expressions = pipeline.expressions,
+	expressions = require("../../../../lib/pipeline/expressions/"),
 	Expression = expressions.Expression,
 	CompareExpression = require("../../../../lib/pipeline/expressions/CompareExpression"),
 	VariablesParseState = require("../../../../Lib/pipeline/expressions/VariablesParseState"),

+ 88 - 0
test/lib/query/ArrayRunner.js

@@ -0,0 +1,88 @@
+"use strict";
+var assert = require("assert"),
+	Runner = require("../../../lib/query/Runner"),
+	ArrayRunner = require("../../../lib/query/ArrayRunner");
+
+module.exports = {
+
+	"ArrayRunner": {
+		"#constructor": {
+			"should accept an array of data": function(){
+				assert.doesNotThrow(function(){
+					var ar = new ArrayRunner([1,2,3]);
+				});
+			},
+			"should fail if not given an array": function(){
+				assert.throws(function(){
+					var ar = new ArrayRunner();
+				});
+				assert.throws(function(){
+					var ar = new ArrayRunner(123);
+				});
+			}
+		},
+		"#getNext": {
+			"should return the next item in the array": function(done){
+				var ar = new ArrayRunner([1,2,3]);
+				
+				ar.getNext(function(err, out, state){
+					assert.strictEqual(state, Runner.RunnerState.RUNNER_ADVANCED);
+					assert.strictEqual(out, 1);
+					ar.getNext(function(err, out, state){
+						assert.strictEqual(state, Runner.RunnerState.RUNNER_ADVANCED);
+						assert.strictEqual(out, 2);
+						ar.getNext(function(err, out, state){
+							assert.strictEqual(state, Runner.RunnerState.RUNNER_ADVANCED);
+							assert.strictEqual(out, 3);
+							done();
+						});
+					});
+				});
+			},
+			"should return EOF if there is nothing left in the array": function(done){
+				var ar = new ArrayRunner([1]);
+				
+				ar.getNext(function(err, out, state){
+					assert.strictEqual(state, Runner.RunnerState.RUNNER_ADVANCED);
+					assert.strictEqual(out, 1);
+					ar.getNext(function(err, out, state){
+						assert.strictEqual(state, Runner.RunnerState.RUNNER_EOF);
+						assert.strictEqual(out, undefined);
+						done();
+					});
+				});
+			}
+		},
+		"#getInfo": {
+			"should return nothing if explain flag is not set": function(){
+				var ar = new ArrayRunner([1,2,3]);
+				assert.strictEqual(ar.getInfo(), undefined);
+			},
+			"should return information about the runner if explain flag is set": function(){
+				var ar = new ArrayRunner([1,2,3]);
+				assert.deepEqual(ar.getInfo(true), {
+					"type":"ArrayRunner",
+					"nDocs":3,
+					"position":0,
+					"state": Runner.RunnerState.RUNNER_ADVANCED
+				});
+			}
+		},
+		"#reset": {
+			"should clear out the runner": function(){
+				var ar = new ArrayRunner([1,2,3]);
+				ar.reset();
+				
+				assert.deepEqual(ar.getInfo(true), {
+					"type":"ArrayRunner",
+					"nDocs":0,
+					"position":0,
+					"state": Runner.RunnerState.RUNNER_DEAD
+				});				
+			}
+		}
+	}
+
+};
+
+if (!module.parent)(new(require("mocha"))()).ui("exports").reporter("spec").addFile(__filename).run();

+ 148 - 0
test/lib/query/DocumentSourceRunner.js

@@ -0,0 +1,148 @@
+"use strict";
+var assert = require("assert"),
+	Runner = require("../../../lib/query/Runner"),
+	CursorDocumentSource = require("../../../lib/pipeline/documentSources/CursorDocumentSource"),
+	LimitDocumentSource = require("../../../lib/pipeline/documentSources/LimitDocumentSource"),
+	MatchDocumentSource = require("../../../lib/pipeline/documentSources/MatchDocumentSource"),
+	ArrayRunner = require("../../../lib/query/ArrayRunner"),
+	DocumentSourceRunner = require("../../../lib/query/DocumentSourceRunner");
+
+
+module.exports = {
+
+	"ArrayRunner": {
+		"#constructor": {
+			"should accept an array of data": function(){
+				var cds = new CursorDocumentSource(null, new ArrayRunner([]), null),
+					pipeline = [];
+				assert.doesNotThrow(function(){
+					var ar = new DocumentSourceRunner(cds, pipeline);
+				});
+			},
+			"should fail if not given a document source or pipeline": function(){
+				var cds = new CursorDocumentSource(null, new ArrayRunner([]), null);
+				
+				assert.throws(function(){
+					var ar = new DocumentSourceRunner();
+				});
+				assert.throws(function(){
+					var ar = new DocumentSourceRunner(123);
+				});
+				assert.throws(function(){
+					var ar = new DocumentSourceRunner(cds, 123);
+				});
+			},
+			"should coalesce the pipeline into the given documentsource": function(){
+				var cds = new CursorDocumentSource(null, new ArrayRunner([]), null),
+					pipeline = [new LimitDocumentSource(3), new MatchDocumentSource({"a":true})],
+					expected = [{$match:{a:true}}];
+				
+				var ds = new DocumentSourceRunner(cds, pipeline);
+				var actual = pipeline.map(function(d){return d.serialize();});
+				
+				assert.deepEqual(expected, actual);
+			}
+		},
+		"#getNext": {
+			"should return the next item in the given documentsource": function(done){
+				var cds = new CursorDocumentSource(null, new ArrayRunner([1,2,3]), null),
+					pipeline = [new LimitDocumentSource(3)];
+				
+				var ds = new DocumentSourceRunner(cds, pipeline);
+				
+				ds.getNext(function(err, out, state){
+					assert.strictEqual(state, Runner.RunnerState.RUNNER_ADVANCED);
+					assert.strictEqual(out, 1);
+					ds.getNext(function(err, out, state){
+						assert.strictEqual(state, Runner.RunnerState.RUNNER_ADVANCED);
+						assert.strictEqual(out, 2);
+						ds.getNext(function(err, out, state){
+							assert.strictEqual(state, Runner.RunnerState.RUNNER_ADVANCED);
+							assert.strictEqual(out, 3);
+							done();
+						});
+					});
+				});
+			},
+			"should return EOF if there is nothing left in the given documentsource": function(done){
+				var cds = new CursorDocumentSource(null, new ArrayRunner([1,2,3]), null),
+					pipeline = [new LimitDocumentSource({}, 1)];
+				
+				var ds = new DocumentSourceRunner(cds, pipeline);
+				
+				ds.getNext(function(err, out, state){
+					assert.strictEqual(state, Runner.RunnerState.RUNNER_ADVANCED);
+					assert.strictEqual(out, 1);
+					ds.getNext(function(err, out, state){
+						assert.strictEqual(state, Runner.RunnerState.RUNNER_EOF);
+						assert.strictEqual(out, null);
+						done();
+					});
+				});
+			}
+		},
+		"#getInfo": {
+			"should return nothing if explain flag is not set": function(){
+				var cds = new CursorDocumentSource(null, new ArrayRunner([1,2,3]), null),
+					pipeline = [new LimitDocumentSource({}, 1)];
+				
+				var ds = new DocumentSourceRunner(cds, pipeline);
+				assert.strictEqual(ds.getInfo(), undefined);
+			},
+			"should return information about the runner if explain flag is set": function(){
+				var cds = new CursorDocumentSource(null, new ArrayRunner([1,2,3]), null),
+					pipeline = [new LimitDocumentSource({}, 1)];
+				var ds = new DocumentSourceRunner(cds, pipeline);
+				
+				assert.deepEqual(ds.getInfo(true), {
+					"type": "DocumentSourceRunner",
+					"docSrc": {
+						"$cursor": {
+							"query": undefined,
+							"sort": null,
+							"limit": 1,
+							"fields": null,
+							"plan": {
+								"type": "ArrayRunner",
+								"nDocs": 3,
+								"position": 0,
+								"state": "RUNNER_ADVANCED"
+							}
+						}
+					},
+					"state": "RUNNER_ADVANCED"
+				});
+			}
+		},
+		"#reset": {
+			"should dispose of the documentSource": function(){
+				var cds = new CursorDocumentSource(null, new ArrayRunner([1,2,3]), null),
+					pipeline = [new LimitDocumentSource({}, 1)];
+				var ds = new DocumentSourceRunner(cds, pipeline);
+				
+				ds.reset();
+				assert.deepEqual(ds.getInfo(true), {
+					"type": "DocumentSourceRunner",
+					"docSrc": {
+						"$cursor": {
+							"query": undefined,
+							"sort": null,
+							"limit": 1,
+							"fields": null,
+							"plan": {
+								"type": "ArrayRunner",
+								"nDocs": 0,
+								"position": 0,
+								"state": "RUNNER_DEAD"
+							}
+						}
+					},
+					"state": "RUNNER_DEAD"
+				});
+			}
+		}
+	}
+
+};
+
+if (!module.parent)(new(require("mocha"))()).ui("exports").reporter("spec").addFile(__filename).run();