Просмотр исходного кода

Merge pull request #149 from RiveraGroup/feature/mongo_2.6.5_jshint_final

EAGLESIX-3157: fix remaining jshint issues
tonyennis 11 лет назад
Родитель
Сommit
0b568af9fc

+ 9 - 9
lib/pipeline/Document.js

@@ -1,15 +1,15 @@
 "use strict";
 
 /**
- * Represents a `Document` (i.e., an `Object`) in `mongo` but in `munge` this is only a set of static helpers since we treat all `Object`s like `Document`s.
+ * Represents a `Document` but in `munge` this only provides static helpers to interact with Objects rather than wrap them
  * @class Document
  * @namespace mungedb-aggregate.pipeline
  * @module mungedb-aggregate
  * @constructor
  **/
-var Document = module.exports = function Document(){
-	if(this.constructor == Document) throw new Error("Never create instances! Use static helpers only.");
-}, klass = Document, base = Object, proto = klass.prototype = Object.create(base.prototype, {constructor:{value:klass}});
+var Document = module.exports = function Document() {
+	throw new Error("Never create instances! Use static helpers only.");
+}, klass = Document;
 
 var Value = require("./Value"),
 	FieldPath = require("./FieldPath");
@@ -29,14 +29,14 @@ klass.ID_PROPERTY_NAME = "_id";
  * @returns {Object} JSON representation of this Document
  **/
 klass.toJson = function toJson(doc) {
- 	return JSON.parse(JSON.stringify(doc));
+	return JSON.parse(JSON.stringify(doc));
 };
 
 //SKIPPED: metaFieldTextScore
 //SKIPPED: toBsonWithMetaData
 //SKIPPED: fromBsonWithMetaData
 
-//SKIPPED: most of MutableDocument except for getNestedField and setNestedField, squashed into Document here (because that's how they use it)
+//SKIPPED: most of MutableDocument except for getNestedField and setNestedField which were merged into this
 function getNestedFieldHelper(obj, path) {
 	// NOTE: DEVIATION FROM MONGO: from MutableDocument; similar but necessarily different
 	var keys = Array.isArray(path) ? path : (path instanceof FieldPath ? path.fieldNames : path.split(".")),
@@ -81,13 +81,13 @@ klass.setNestedField = function setNestedField(obj, path, val) {
  *           zero, depending on whether lhs < rhs, lhs == rhs, or lhs > rhs
  *  Warning: may return values other than -1, 0, or 1
  */
-klass.compare = function compare(l, r){	//TODO: might be able to replace this with a straight compare of docs using JSON.stringify()
+klass.compare = function compare(l, r) {	//TODO: might be able to replace this with a straight compare of docs using JSON.stringify()
 	var lPropNames = Object.getOwnPropertyNames(l),
 		lPropNamesLength = lPropNames.length,
 		rPropNames = Object.getOwnPropertyNames(r),
 		rPropNamesLength = rPropNames.length;
 
-	for(var i = 0; true; ++i) {
+	for (var i = 0; true; ++i) {
 		if (i >= lPropNamesLength) {
 			if (i >= rPropNamesLength) return 0; // documents are the same length
 			return -1; // left document is shorter
@@ -108,7 +108,7 @@ klass.compare = function compare(l, r){	//TODO: might be able to replace this wi
 //SKIPPED: toString
 
 klass.serializeForSorter = function serializeForSorter(doc) {
-	//NOTE: DEVIATION FROM MONGO: they take a buffer to output the current instance into, ours is static and takes a doc and returns the serialized output
+	//NOTE: DEVIATION FROM MONGO: they take a buffer to output into, ours is static and takes a doc and returns the serialized output
 	return JSON.stringify(doc);
 };
 

+ 2 - 1
lib/pipeline/FieldPath.js

@@ -14,7 +14,8 @@
  */
 var FieldPath = module.exports = function FieldPath(path) {
 	var fieldNames = typeof path === "object" && typeof path.length === "number" ? path : path.split(".");
-	if (fieldNames.length === 0) throw new Error("FieldPath cannot be constructed from an empty vector (String or Array).; massert code 16409");
+	if (fieldNames.length === 0)
+		throw new Error("FieldPath cannot be constructed from an empty vector (String or Array).; massert code 16409");
 	this.fieldNames = [];
 	for (var i = 0, n = fieldNames.length; i < n; ++i) {
 		this._pushFieldName(fieldNames[i]);

+ 47 - 39
lib/pipeline/Pipeline.js

@@ -1,5 +1,20 @@
 "use strict";
-var async = require('async');
+var async = require("async"),
+	DepsTracker = require("./DepsTracker"),
+	documentSources = require("./documentSources/"),
+	DocumentSource = documentSources.DocumentSource,
+	LimitDocumentSource = documentSources.LimitDocumentSource,
+	MatchDocumentSource = documentSources.MatchDocumentSource,
+	ProjectDocumentSource = documentSources.ProjectDocumentSource,
+	SkipDocumentSource = documentSources.SkipDocumentSource,
+	UnwindDocumentSource = documentSources.UnwindDocumentSource,
+	GroupDocumentSource = documentSources.GroupDocumentSource,
+	OutDocumentSource = documentSources.OutDocumentSource,
+	GeoNearDocumentSource = documentSources.GeoNearDocumentSource,
+	RedactDocumentSource = documentSources.RedactDocumentSource,
+	SortDocumentSource = documentSources.SortDocumentSource;
+
+
 /**
  * mongodb "commands" (sent via db.$cmd.findOne(...)) subclass to make a command.  define a singleton object for it.
  * @class Pipeline
@@ -7,26 +22,12 @@ var async = require('async');
  * @module mungedb-aggregate
  * @constructor
  **/
-// CONSTRUCTOR
 var Pipeline = module.exports = function Pipeline(theCtx){
 	this.sources = null;
 	this.explain = false;
 	this.splitMongodPipeline = false;
 	this.ctx = theCtx;
-}, klass = Pipeline, base = Object, proto = klass.prototype = Object.create(base.prototype, {constructor:{value:klass}});
-
-var DocumentSource = require("./documentSources/DocumentSource"),
-	LimitDocumentSource = require('./documentSources/LimitDocumentSource'),
-	MatchDocumentSource = require('./documentSources/MatchDocumentSource'),
-	ProjectDocumentSource = require('./documentSources/ProjectDocumentSource'),
-	SkipDocumentSource = require('./documentSources/SkipDocumentSource'),
-	UnwindDocumentSource = require('./documentSources/UnwindDocumentSource'),
-	GroupDocumentSource = require('./documentSources/GroupDocumentSource'),
-	OutDocumentSource = require('./documentSources/OutDocumentSource'),
-	GeoNearDocumentSource = require('./documentSources/GeoNearDocumentSource'),
-	RedactDocumentSource = require('./documentSources/RedactDocumentSource'),
-	SortDocumentSource = require('./documentSources/SortDocumentSource'),
-	DepsTracker = require('./DepsTracker');
+}, klass = Pipeline, proto = klass.prototype;
 
 klass.COMMAND_NAME = "aggregate";
 klass.PIPELINE_NAME = "pipeline";
@@ -138,7 +139,7 @@ klass.optimizations.local.coalesceAdjacent = function coalesceAdjacent(pipelineI
 		var lastSource = sources[sources.length-1],
 			tempSrc = tempSources[tempi];
 		if(!(lastSource && tempSrc)) {
-			throw new Error('Must have a last and current source'); // verify(lastSource && tempSrc);
+			throw new Error("Must have a last and current source"); // verify(lastSource && tempSrc);
 		}
 		if(!lastSource.coalesce(tempSrc)) sources.push(tempSrc);
 	}
@@ -229,7 +230,9 @@ klass.optimizations.sharded.findSplitPoint = function findSplitPoint(shardPipe,
  */
 klass.optimizations.sharded.moveFinalUnwindFromShardsToMerger = function moveFinalUnwindFromShardsToMerger(shardPipe, mergePipe) {
 	if (true) {
-		while(shardPipe.sources !== null && (shardPipe.sources.length > 0 && shardPipe.sources[shardPipe.sources.length-1] instanceof UnwindDocumentSource)) {
+		while (shardPipe.sources !== null &&
+				shardPipe.sources.length > 0 &&
+				shardPipe.sources[shardPipe.sources.length - 1] instanceof UnwindDocumentSource) {
 			mergePipe.sources.unshift(shardPipe.sources.pop());
 		}
 	}
@@ -255,7 +258,8 @@ klass.parseDocumentSources = function parseDocumentSources(pipeline, ctx){
 		var obj = pipeElement;
 
 		// Parse a pipeline stage from 'obj'.
-		if (Object.keys(obj).length !== 1) throw new Error("A pipeline stage specification object must contain exactly one field; code 16435");
+		if (Object.keys(obj).length !== 1)
+			throw new Error("A pipeline stage specification object must contain exactly one field; code 16435");
 		var stageName = Object.keys(obj)[0],
 			stageSpec = obj[stageName];
 
@@ -280,7 +284,7 @@ klass.parseDocumentSources = function parseDocumentSources(pipeline, ctx){
  * @static
  * @method parseCommand
  * @param cmdObj  {Object}  The command object sent from the client
- * @param   cmdObj.aggregate            {Array}    the thing to aggregate against;	// NOTE: DEVIATION FROM MONGO: expects an Array of inputs rather than a collection name
+ * @param   cmdObj.aggregate            {Array}    the thing to aggregate against // NOTE: DEVIATION FROM MONGO: not a collection name
  * @param   cmdObj.pipeline             {Object}   the JSON pipeline of `DocumentSource` specs
  * @param   cmdObj.explain              {Boolean}  should explain?
  * @param   cmdObj.fromRouter           {Boolean}  is from router?
@@ -295,26 +299,34 @@ klass.parseCommand = function parseCommand(cmdObj, ctx){
 
 	//gather the specification for the aggregation
 	var pipeline;
-	for(var fieldName in cmdObj){
+	for (var fieldName in cmdObj) { //jshint ignore:line
 		var cmdElement = cmdObj[fieldName];
-		if(fieldName[0] == "$")									continue;
-		else if(fieldName == "cursor")							continue;
-		else if(fieldName == klass.COMMAND_NAME)				continue;										//look for the aggregation command
-		else if(fieldName == klass.BATCH_SIZE_NAME)				continue;
-		else if(fieldName == klass.PIPELINE_NAME)				pipeline = cmdElement;							//check for the pipeline of JSON doc srcs
-		else if(fieldName == klass.EXPLAIN_NAME)				pipelineInst.explain = cmdElement;				//check for explain option
-		else if(fieldName == klass.FROM_ROUTER_NAME)			ctx.inShard = cmdElement;						//if the request came from the router, we're in a shard
-		else if(fieldName == "allowDiskUsage") {
-			if(typeof cmdElement !== 'boolean') throw new Error("allowDiskUsage must be a bool, not a " + typeof allowDiskUsage+ "; uassert code 16949");
-		}
-		else throw new Error("unrecognized field " + JSON.stringify(fieldName));
+		if (fieldName[0] === "$")
+			continue;
+		else if (fieldName === "cursor")
+			continue;
+		else if (fieldName === klass.COMMAND_NAME)
+			continue; //look for the aggregation command
+		else if (fieldName === klass.BATCH_SIZE_NAME)
+			continue;
+		else if (fieldName === klass.PIPELINE_NAME)
+			pipeline = cmdElement; //check for the pipeline of JSON doc srcs
+		else if (fieldName === klass.EXPLAIN_NAME)
+			pipelineInst.explain = cmdElement; //check for explain option
+		else if (fieldName === klass.FROM_ROUTER_NAME)
+			ctx.inShard = cmdElement; //if the request came from the router, we're in a shard
+		else if (fieldName === "allowDiskUsage") {
+			if (typeof cmdElement !== "boolean")
+				throw new Error("allowDiskUsage must be a bool, not a " + typeof allowDiskUsage + "; uassert code 16949");
+		} else
+			throw new Error("unrecognized field " + JSON.stringify(fieldName));
 	}
 
 	/**
 	 * If we get here, we've harvested the fields we expect for a pipeline
 	 * Set up the specified document source pipeline.
 	 */
-	// NOTE: DEVIATION FROM MONGO: split this into a separate function to simplify and better allow for extensions (now in parseDocumentSources)
+	// NOTE: DEVIATION FROM MONGO: split this into a separate function to simplify facilitate extensions (now in parseDocumentSources)
 	pipelineInst.sources = Pipeline.parseDocumentSources(pipeline, ctx);
 	klass.optimizations.local.moveMatchBeforeSort(pipelineInst);
 	klass.optimizations.local.moveLimitBeforeSkip(pipelineInst);
@@ -325,10 +337,6 @@ klass.parseCommand = function parseCommand(cmdObj, ctx){
 	return pipelineInst;
 };
 
-function ifError(err) {
-	if (err) throw err;
-}
-
 /**
  * Gets the initial $match query when $match is the first pipeline stage
  * @method run
@@ -368,7 +376,7 @@ proto.serialize = function serialize() {
 		}
 	}
 
-	serialized[klass.COMMAND_NAME] = this.ctx && this.ctx.ns && this.ctx.ns.coll ? this.ctx.ns.coll : '';
+	serialized[klass.COMMAND_NAME] = this.ctx && this.ctx.ns && this.ctx.ns.coll ? this.ctx.ns.coll : "";
 	serialized[klass.PIPELINE_NAME] = array;
 
 	if(this.explain) serialized[klass.EXPLAIN_NAME] = this.explain;
@@ -475,7 +483,7 @@ proto.getDependencies = function getDependencies () {
         }
 
         if (!knowAllFields) {
-            for (var key in localDeps.fields)
+            for (var key in localDeps.fields) //jshint ignore:line
             	deps.fields[key] = localDeps.fields[key];
 
             if (localDeps.needWholeDocument)

+ 7 - 11
lib/pipeline/PipelineD.js

@@ -1,4 +1,7 @@
 "use strict";
+var CursorDocumentSource = require("./documentSources/CursorDocumentSource"),
+	SortDocumentSource = require("./documentSources/SortDocumentSource"),
+	getRunner = require("../query").getRunner;
 
 /**
  * Pipeline helper for reading data
@@ -8,22 +11,15 @@
  * @constructor
  **/
 var PipelineD = module.exports = function PipelineD(){
-	if(this.constructor == PipelineD) throw new Error("Never create instances of this! Use the static helpers only.");
-}, klass = PipelineD, base = Object, proto = klass.prototype = Object.create(base.prototype, {constructor:{value:klass}});
-
-// DEPENDENCIES
-var DocumentSource = require('./documentSources/DocumentSource'),
-	CursorDocumentSource = require('./documentSources/CursorDocumentSource'),
-	SortDocumentSource = require('./documentSources/SortDocumentSource'),
-	MatchDocumentSource = require('./documentSources/MatchDocumentSource'),
-	getRunner = require('../query').getRunner;
+	throw new Error("Never create instances of this! Use the static helpers only.");
+}, klass = PipelineD;
 
 /**
  * Create a Cursor wrapped in a DocumentSourceCursor, which is suitable to be the first source for a pipeline to begin with.
  * This source will feed the execution of the pipeline.
  *
  * //NOTE: Not doing anything here, as we don't use any of these cursor source features
- * //NOTE: DEVIATION FROM THE MONGO: We don't have special optimized cursors; You could support something similar by overriding `Pipeline#run` to call `DocumentSource#coalesce` on the `inputSource` if you really need it.
+ * //NOTE: DEVIATION FROM THE MONGO: we don't have special optimized cursors but could do it if needed
  *
  * This method looks for early pipeline stages that can be folded into
  * the underlying cursor, and when a cursor can absorb those, they
@@ -83,7 +79,7 @@ klass.prepareCursorSource = function prepareCursorSource(pipeline, expCtx){
 			sortInRunner = true;
 		}
 	}
-	
+
 	//munge deviation: the runner is (usually) not actually handling the initial query, so we need to add it back to the pipeline
 	if (match){
 		sources.unshift(match);

+ 13 - 13
lib/pipeline/Value.js

@@ -1,14 +1,14 @@
 "use strict";
 
 /**
- * Represents a `Value` (i.e., an `Object`) in `mongo` but in `munge` this is only a set of static helpers since we treat all `Object`s like `Value`s.
+ * Represents a `Value` but in `munge` this only provides static helpers to interact with Objects rather than wrap them
  * @class Value
  * @namespace mungedb-aggregate.pipeline
  * @module mungedb-aggregate
  * @constructor
  **/
-var Value = module.exports = function Value(){
-	if(this.constructor === Value) throw new Error("Never create instances of this! Use the static helpers only.");
+var Value = module.exports = function Value() {
+	if (this.constructor === Value) throw new Error("Never create instances of this! Use the static helpers only.");
 }, klass = Value;
 
 var Document; // loaded lazily below //TODO: a dirty hack; need to investigate and clean up
@@ -95,7 +95,7 @@ klass.coerceToString = function coerceToString(value) {
  * @method cmp
  * @static
  */
-var cmp = klass.cmp = function cmp(left, right){
+var cmp = klass.cmp = function cmp(left, right) {
 	// The following is lifted directly from compareElementValues
 	// to ensure identical handling of NaN
 	if (left < right)
@@ -113,16 +113,16 @@ var cmp = klass.cmp = function cmp(left, right){
  * @returns an integer less than zero, zero, or an integer greater than zero, depending on whether lhs < rhs, lhs == rhs, or lhs > rhs
  * Warning: may return values other than -1, 0, or 1
  */
-klass.compare = function compare(l, r) {
+klass.compare = function compare(l, r) { //jshint maxcomplexity:24
 	var lType = Value.getType(l),
 		rType = Value.getType(r),
 		ret;
 
 	ret = lType === rType ?
-	 	0 // fast-path common case
+		0 // fast-path common case
 		: cmp(klass.canonicalize(l), klass.canonicalize(r));
 
-	if(ret !== 0)
+	if (ret !== 0)
 		return ret;
 
 	// CW TODO for now, only compare like values
@@ -150,15 +150,15 @@ klass.compare = function compare(l, r) {
 		case "Date": // signed
 			return cmp(l.getTime(), r.getTime());
 
-        // Numbers should compare by equivalence even if different types
+		// Numbers should compare by equivalence even if different types
 		case "number":
 			return cmp(l, r);
 
-        //SKIPPED: case "jstOID":----//TODO: handle case for bson.ObjectID()
+		//SKIPPED: case "jstOID":----//TODO: handle case for bson.ObjectID()
 
-        case "Code":
-        case "Symbol":
-        case "string":
+		case "Code":
+		case "Symbol":
+		case "string":
 			l = String(l);
 			r = String(r);
 			return l < r ? -1 : l > r ? 1 : 0;
@@ -239,7 +239,7 @@ klass.getType = function getType(v) {
 //NOTE: also, because of this we are not throwing if the type does not match like the mongo code would but maybe that's okay
 
 // from bsontypes
-klass.canonicalize = function canonicalize(x) {
+klass.canonicalize = function canonicalize(x) { //jshint maxcomplexity:29
 	var xType = Value.getType(x);
 	switch (xType) {
 		case "MinKey":

+ 3 - 3
lib/pipeline/ValueSet.js

@@ -41,11 +41,11 @@ proto.insertRange = function insertRange(vals) {
 };
 
 proto.equals = function equals(other) {
-	for (var key in this.set) {
+	for (var key in this.set) { //jshint ignore:line
 		if (!other.hasKey(key))
 			return false;
 	}
-	for (var otherKey in other.set) {
+	for (var otherKey in other.set) { //jshint ignore:line
 		if (!this.hasKey(otherKey))
 			return false;
 	}
@@ -54,7 +54,7 @@ proto.equals = function equals(other) {
 
 proto.values = function values() {
 	var vals = [];
-	for (var key in this.set)
+	for (var key in this.set) //jshint ignore:line
 		vals.push(this.set[key]);
 	return vals;
 };