|
@@ -1,14 +1,13 @@
|
|
|
"use strict";
|
|
"use strict";
|
|
|
|
|
|
|
|
-var DocumentSource = require('./DocumentSource'),
|
|
|
|
|
|
|
+var async = require('async'),
|
|
|
|
|
+ Value = require('../Value'),
|
|
|
|
|
+ Runner = require('../../query/Runner'),
|
|
|
|
|
+ DocumentSource = require('./DocumentSource'),
|
|
|
LimitDocumentSource = require('./LimitDocumentSource');
|
|
LimitDocumentSource = require('./LimitDocumentSource');
|
|
|
|
|
|
|
|
-// Mimicking max memory size from mongo/db/query/new_find.cpp
|
|
|
|
|
-// Need to actually decide some size for this?
|
|
|
|
|
-var MAX_BATCH_DOCS = 150;
|
|
|
|
|
-
|
|
|
|
|
/**
|
|
/**
|
|
|
- * Constructs and returns Documents from the objects produced by a supplied Cursor.
|
|
|
|
|
|
|
+ * Constructs and returns Documents from the BSONObj objects produced by a supplied Runner.
|
|
|
* An object of this type may only be used by one thread, see SERVER-6123.
|
|
* An object of this type may only be used by one thread, see SERVER-6123.
|
|
|
*
|
|
*
|
|
|
* This is usually put at the beginning of a chain of document sources
|
|
* This is usually put at the beginning of a chain of document sources
|
|
@@ -20,46 +19,36 @@ var MAX_BATCH_DOCS = 150;
|
|
|
* @constructor
|
|
* @constructor
|
|
|
* @param {CursorDocumentSource.CursorWithContext} cursorWithContext the cursor to use to fetch data
|
|
* @param {CursorDocumentSource.CursorWithContext} cursorWithContext the cursor to use to fetch data
|
|
|
**/
|
|
**/
|
|
|
-var CursorDocumentSource = module.exports = CursorDocumentSource = function CursorDocumentSource(cursorWithContext, expCtx){
|
|
|
|
|
|
|
+var CursorDocumentSource = module.exports = CursorDocumentSource = function CursorDocumentSource(namespace, runner, expCtx){
|
|
|
base.call(this, expCtx);
|
|
base.call(this, expCtx);
|
|
|
|
|
|
|
|
- this.current = null;
|
|
|
|
|
|
|
+ this._docsAddedToBatches = 0;
|
|
|
|
|
+ this._ns = namespace;
|
|
|
|
|
+ this._runner = runner;
|
|
|
|
|
|
|
|
-// this.ns = null;
|
|
|
|
|
-// /*
|
|
|
|
|
-// The bson dependencies must outlive the Cursor wrapped by this
|
|
|
|
|
-// source. Therefore, bson dependencies must appear before pCursor
|
|
|
|
|
-// in order cause its destructor to be called *after* pCursor's.
|
|
|
|
|
-// */
|
|
|
|
|
-// this.query = null;
|
|
|
|
|
-// this.sort = null;
|
|
|
|
|
|
|
+}, klass = CursorDocumentSource, base = DocumentSource, proto = klass.prototype = Object.create(base.prototype, {constructor:{value:klass}});
|
|
|
|
|
|
|
|
- this._projection = null;
|
|
|
|
|
|
|
+klass.MaxDocumentsToReturnToClientAtOnce = 150; //DEVIATION: we are using documents instead of bytes
|
|
|
|
|
|
|
|
- this._cursorWithContext = cursorWithContext;
|
|
|
|
|
- this._curIdx = 0;
|
|
|
|
|
- this._currentBatch = [];
|
|
|
|
|
- this._limit = undefined;
|
|
|
|
|
- this._docsAddedToBatches = 0;
|
|
|
|
|
|
|
+proto._currentBatch = [];
|
|
|
|
|
+proto._currentBatchIndex = 0;
|
|
|
|
|
|
|
|
- if (!this._cursorWithContext || !this._cursorWithContext._cursor) throw new Error("CursorDocumentSource requires a valid cursorWithContext");
|
|
|
|
|
|
|
+// BSONObj members must outlive _projection and cursor.
|
|
|
|
|
+proto._query = undefined;
|
|
|
|
|
+proto._sort = undefined;
|
|
|
|
|
+proto._projection = undefined;
|
|
|
|
|
+proto._dependencies = undefined;
|
|
|
|
|
+proto._limit = undefined;
|
|
|
|
|
+proto._docsAddedToBatches = undefined; // for _limit enforcement
|
|
|
|
|
|
|
|
-}, klass = CursorDocumentSource, base = require('./DocumentSource'), proto = klass.prototype = Object.create(base.prototype, {constructor:{value:klass}});
|
|
|
|
|
|
|
+proto._ns = undefined;
|
|
|
|
|
+proto._runner = undefined; // PipelineRunner holds a weak_ptr to this.
|
|
|
|
|
|
|
|
|
|
|
|
|
-klass.CursorWithContext = (function (){
|
|
|
|
|
- /**
|
|
|
|
|
- * Holds a Cursor and all associated state required to access the cursor.
|
|
|
|
|
- * @class CursorWithContext
|
|
|
|
|
- * @namespace mungedb-aggregate.pipeline.documentSources.CursorDocumentSource
|
|
|
|
|
- * @module mungedb-aggregate
|
|
|
|
|
- * @constructor
|
|
|
|
|
- **/
|
|
|
|
|
- var klass = function CursorWithContext(ns){
|
|
|
|
|
- this._cursor = null;
|
|
|
|
|
- };
|
|
|
|
|
- return klass;
|
|
|
|
|
-})();
|
|
|
|
|
|
|
+
|
|
|
|
|
+proto.isValidInitialSource = function(){
|
|
|
|
|
+ return true;
|
|
|
|
|
+};
|
|
|
|
|
|
|
|
/**
|
|
/**
|
|
|
* Release the Cursor and the read lock it requires, but without changing the other data.
|
|
* Release the Cursor and the read lock it requires, but without changing the other data.
|
|
@@ -69,189 +58,207 @@ klass.CursorWithContext = (function (){
|
|
|
* @method dispose
|
|
* @method dispose
|
|
|
**/
|
|
**/
|
|
|
proto.dispose = function dispose() {
|
|
proto.dispose = function dispose() {
|
|
|
- this._cursorWithContext = null;
|
|
|
|
|
|
|
+ if (this._runner) this._runner.reset();
|
|
|
this._currentBatch = [];
|
|
this._currentBatch = [];
|
|
|
- this._curIdx = 0;
|
|
|
|
|
};
|
|
};
|
|
|
|
|
|
|
|
|
|
+/**
|
|
|
|
|
+ * Get the source's name.
|
|
|
|
|
+ * @method getSourceName
|
|
|
|
|
+ * @returns {String} the string name of the source as a constant string; this is static, and there's no need to worry about adopting it
|
|
|
|
|
+ **/
|
|
|
proto.getSourceName = function getSourceName() {
|
|
proto.getSourceName = function getSourceName() {
|
|
|
return "$cursor";
|
|
return "$cursor";
|
|
|
};
|
|
};
|
|
|
|
|
|
|
|
|
|
+/**
|
|
|
|
|
+ * Returns the next Document if there is one
|
|
|
|
|
+ *
|
|
|
|
|
+ * @method getNext
|
|
|
|
|
+ **/
|
|
|
proto.getNext = function getNext(callback) {
|
|
proto.getNext = function getNext(callback) {
|
|
|
- if (!callback) throw new Error(this.getSourceName() + ' #getNext() requires callback');
|
|
|
|
|
-
|
|
|
|
|
- if (this._currentBatch.length <= this._curIdx) {
|
|
|
|
|
- this.loadBatch();
|
|
|
|
|
-
|
|
|
|
|
- if (this._currentBatch.length <= this._curIdx) {
|
|
|
|
|
- callback(null, DocumentSource.EOF);
|
|
|
|
|
- return DocumentSource.EOF;
|
|
|
|
|
- }
|
|
|
|
|
|
|
+ if (this.expCtx && this.expCtx.checkForInterrupt && this.expCtx.checkForInterrupt()){
|
|
|
|
|
+ return callback(new Error('Interrupted'));
|
|
|
}
|
|
}
|
|
|
-
|
|
|
|
|
- // Don't unshift. It's expensiver.
|
|
|
|
|
- var out = this._currentBatch[this._curIdx];
|
|
|
|
|
- this._curIdx++;
|
|
|
|
|
-
|
|
|
|
|
- callback(null, out);
|
|
|
|
|
- return out;
|
|
|
|
|
|
|
+
|
|
|
|
|
+ var self = this;
|
|
|
|
|
+ if (self._currentBatchIndex >= self._currentBatch.length) {
|
|
|
|
|
+ self._currentBatchIndex = 0;
|
|
|
|
|
+ self._currentBatch = [];
|
|
|
|
|
+ return self.loadBatch(function(err){
|
|
|
|
|
+ if (err) return callback(err);
|
|
|
|
|
+ if (self._currentBatch.length === 0)
|
|
|
|
|
+ return callback(null, null);
|
|
|
|
|
+
|
|
|
|
|
+ return callback(null, self._currentBatch[self._currentBatchIndex++]);
|
|
|
|
|
+ });
|
|
|
|
|
+ }
|
|
|
|
|
+ return callback(null, self._currentBatch[self._currentBatchIndex++]);
|
|
|
};
|
|
};
|
|
|
|
|
|
|
|
|
|
+/**
|
|
|
|
|
+ * Attempt to coalesce this DocumentSource with any $limits that it encounters
|
|
|
|
|
+ *
|
|
|
|
|
+ * @method coalesce
|
|
|
|
|
+ * @param {DocumentSource} nextSource the next source in the document processing chain.
|
|
|
|
|
+ * @returns {Boolean} whether or not the attempt to coalesce was successful or not; if the attempt was not successful, nothing has been changed
|
|
|
|
|
+ **/
|
|
|
proto.coalesce = function coalesce(nextSource) {
|
|
proto.coalesce = function coalesce(nextSource) {
|
|
|
- if (this._limit) {
|
|
|
|
|
|
|
+ // Note: Currently we assume the $limit is logically after any $sort or
|
|
|
|
|
+ // $match. If we ever pull in $match or $sort using this method, we
|
|
|
|
|
+ // will need to keep track of the order of the sub-stages.
|
|
|
|
|
+
|
|
|
|
|
+ if (!this._limit) {
|
|
|
|
|
+ if (nextSource instanceof LimitDocumentSource) {
|
|
|
|
|
+ this._limit = nextSource;
|
|
|
|
|
+ return this._limit;
|
|
|
|
|
+ }
|
|
|
|
|
+ return false;// false if next is not a $limit
|
|
|
|
|
+ }
|
|
|
|
|
+ else {
|
|
|
return this._limit.coalesce(nextSource);
|
|
return this._limit.coalesce(nextSource);
|
|
|
- } else if (nextSource instanceof LimitDocumentSource) {
|
|
|
|
|
- this._limit = nextSource;
|
|
|
|
|
- return this._limit;
|
|
|
|
|
- } else {
|
|
|
|
|
- return false;
|
|
|
|
|
}
|
|
}
|
|
|
|
|
+
|
|
|
|
|
+ return false;
|
|
|
};
|
|
};
|
|
|
|
|
|
|
|
-///**
|
|
|
|
|
-// * Record the namespace. Required for explain.
|
|
|
|
|
-// *
|
|
|
|
|
-// * @method setNamespace
|
|
|
|
|
-// * @param {String} ns the namespace
|
|
|
|
|
-// **/
|
|
|
|
|
-//proto.setNamespace = function setNamespace(ns) {}
|
|
|
|
|
-//
|
|
|
|
|
-///**
|
|
|
|
|
-// * Record the query that was specified for the cursor this wraps, if any.
|
|
|
|
|
-// * This should be captured after any optimizations are applied to
|
|
|
|
|
-// * the pipeline so that it reflects what is really used.
|
|
|
|
|
-// * This gets used for explain output.
|
|
|
|
|
-// *
|
|
|
|
|
-// * @method setQuery
|
|
|
|
|
-// * @param {Object} pBsonObj the query to record
|
|
|
|
|
-// **/
|
|
|
|
|
|
|
+
|
|
|
|
|
+/**
|
|
|
|
|
+ * Record the query that was specified for the cursor this wraps, if
|
|
|
|
|
+ * any.
|
|
|
|
|
+ *
|
|
|
|
|
+ * This should be captured after any optimizations are applied to
|
|
|
|
|
+ * the pipeline so that it reflects what is really used.
|
|
|
|
|
+ *
|
|
|
|
|
+ * This gets used for explain output.
|
|
|
|
|
+ *
|
|
|
|
|
+ * @method setQuery
|
|
|
|
|
+ * @param {Object} pBsonObj the query to record
|
|
|
|
|
+ **/
|
|
|
proto.setQuery = function setQuery(query) {
|
|
proto.setQuery = function setQuery(query) {
|
|
|
this._query = query;
|
|
this._query = query;
|
|
|
};
|
|
};
|
|
|
|
|
|
|
|
-///**
|
|
|
|
|
-// * Record the sort that was specified for the cursor this wraps, if any.
|
|
|
|
|
-// * This should be captured after any optimizations are applied to
|
|
|
|
|
-// * the pipeline so that it reflects what is really used.
|
|
|
|
|
-// * This gets used for explain output.
|
|
|
|
|
-// *
|
|
|
|
|
-// * @method setSort
|
|
|
|
|
-// * @param {Object} pBsonObj the query to record
|
|
|
|
|
-// **/
|
|
|
|
|
-//proto.setSort = function setSort(pBsonObj) {};
|
|
|
|
|
|
|
+/**
|
|
|
|
|
+ * Record the sort that was specified for the cursor this wraps, if
|
|
|
|
|
+ * any.
|
|
|
|
|
+ *
|
|
|
|
|
+ * This should be captured after any optimizations are applied to
|
|
|
|
|
+ * the pipeline so that it reflects what is really used.
|
|
|
|
|
+ *
|
|
|
|
|
+ * This gets used for explain output.
|
|
|
|
|
+ *
|
|
|
|
|
+ * @method setSort
|
|
|
|
|
+ * @param {Object} pBsonObj the query to record
|
|
|
|
|
+ **/
|
|
|
|
|
+proto.setSort = function setSort(sort) {
|
|
|
|
|
+ this._sort = sort;
|
|
|
|
|
+};
|
|
|
|
|
|
|
|
/**
|
|
/**
|
|
|
- * setProjection method
|
|
|
|
|
|
|
+ * Informs this object of projection and dependency information.
|
|
|
*
|
|
*
|
|
|
* @method setProjection
|
|
* @method setProjection
|
|
|
* @param {Object} projection
|
|
* @param {Object} projection
|
|
|
**/
|
|
**/
|
|
|
proto.setProjection = function setProjection(projection, deps) {
|
|
proto.setProjection = function setProjection(projection, deps) {
|
|
|
-
|
|
|
|
|
- if (this._projection){
|
|
|
|
|
- throw new Error("projection is already set");
|
|
|
|
|
- }
|
|
|
|
|
-
|
|
|
|
|
-
|
|
|
|
|
- //dont think we need this yet
|
|
|
|
|
-
|
|
|
|
|
-// this._projection = new Projection();
|
|
|
|
|
-// this._projection.init(projection);
|
|
|
|
|
-//
|
|
|
|
|
-// this.cursor().fields = this._projection;
|
|
|
|
|
-
|
|
|
|
|
- this._projection = projection; //just for testing
|
|
|
|
|
|
|
+ this._projection = projection;
|
|
|
this._dependencies = deps;
|
|
this._dependencies = deps;
|
|
|
};
|
|
};
|
|
|
|
|
|
|
|
-//----------------virtuals from DocumentSource--------------
|
|
|
|
|
-
|
|
|
|
|
/**
|
|
/**
|
|
|
- * Set the underlying source this source should use to get Documents
|
|
|
|
|
- * from.
|
|
|
|
|
- * It is an error to set the source more than once. This is to
|
|
|
|
|
- * prevent changing sources once the original source has been started;
|
|
|
|
|
- * this could break the state maintained by the DocumentSource.
|
|
|
|
|
- * This pointer is not reference counted because that has led to
|
|
|
|
|
- * some circular references. As a result, this doesn't keep
|
|
|
|
|
- * sources alive, and is only intended to be used temporarily for
|
|
|
|
|
- * the lifetime of a Pipeline::run().
|
|
|
|
|
*
|
|
*
|
|
|
* @method setSource
|
|
* @method setSource
|
|
|
* @param source {DocumentSource} the underlying source to use
|
|
* @param source {DocumentSource} the underlying source to use
|
|
|
* @param callback {Function} a `mungedb-aggregate`-specific extension to the API to half-way support reading from async sources
|
|
* @param callback {Function} a `mungedb-aggregate`-specific extension to the API to half-way support reading from async sources
|
|
|
**/
|
|
**/
|
|
|
proto.setSource = function setSource(theSource) {
|
|
proto.setSource = function setSource(theSource) {
|
|
|
- if (theSource) throw new Error("CursorDocumentSource doesn't take a source"); //TODO: This needs to put back without the if once async is fully and properly supported
|
|
|
|
|
|
|
+ throw new Error('this doesnt take a source');
|
|
|
};
|
|
};
|
|
|
|
|
|
|
|
proto.serialize = function serialize(explain) {
|
|
proto.serialize = function serialize(explain) {
|
|
|
- if (!explain)
|
|
|
|
|
- return null;
|
|
|
|
|
|
|
|
|
|
- if (!this._cursorWithContext)
|
|
|
|
|
- throw new Error("code 17135; Cursor deleted.");
|
|
|
|
|
|
|
+ // we never parse a documentSourceCursor, so we only serialize for explain
|
|
|
|
|
+ if (!explain)
|
|
|
|
|
+ return {};
|
|
|
|
|
|
|
|
- // A stab at what mongo wants
|
|
|
|
|
- return {
|
|
|
|
|
|
|
+ var out = {};
|
|
|
|
|
+ out[this.getSourceName()] = {
|
|
|
query: this._query,
|
|
query: this._query,
|
|
|
sort: this._sort ? this._sort : null,
|
|
sort: this._sort ? this._sort : null,
|
|
|
- limit: this._limit ? this._limit : null,
|
|
|
|
|
|
|
+ limit: this._limit ? this._limit.getLimit() : null,
|
|
|
fields: this._projection ? this._projection : null,
|
|
fields: this._projection ? this._projection : null,
|
|
|
- indexonly: false,
|
|
|
|
|
- cursorType: this._cursorWithContext ? "cursor" : null
|
|
|
|
|
|
|
+ plan: this._runner.getInfo(explain)
|
|
|
};
|
|
};
|
|
|
|
|
+ return out;
|
|
|
};
|
|
};
|
|
|
|
|
|
|
|
-// LimitDocumentSource has the setLimit function which trickles down to any documentsource
|
|
|
|
|
|
|
+/**
|
|
|
|
|
+ * returns -1 for no limit
|
|
|
|
|
+ *
|
|
|
|
|
+ * @method getLimit
|
|
|
|
|
+**/
|
|
|
proto.getLimit = function getLimit() {
|
|
proto.getLimit = function getLimit() {
|
|
|
return this._limit ? this._limit.getLimit() : -1;
|
|
return this._limit ? this._limit.getLimit() : -1;
|
|
|
};
|
|
};
|
|
|
|
|
|
|
|
-//----------------private--------------
|
|
|
|
|
-
|
|
|
|
|
-//proto.chunkMgr = function chunkMgr(){};
|
|
|
|
|
-
|
|
|
|
|
-//proto.canUseCoveredIndex = function canUseCoveredIndex(){};
|
|
|
|
|
-
|
|
|
|
|
-//proto.yieldSometimes = function yieldSometimes(){};
|
|
|
|
|
-
|
|
|
|
|
-proto.loadBatch = function loadBatch() {
|
|
|
|
|
- var nDocs = 0,
|
|
|
|
|
- cursor = this._cursorWithContext ? this._cursorWithContext._cursor : null;
|
|
|
|
|
-
|
|
|
|
|
- if (!cursor)
|
|
|
|
|
- return this.dispose();
|
|
|
|
|
-
|
|
|
|
|
- for(;cursor.ok(); cursor.advance()) {
|
|
|
|
|
- if (!cursor.ok())
|
|
|
|
|
- break;
|
|
|
|
|
-
|
|
|
|
|
- // these methods do not exist
|
|
|
|
|
- // if (!cursor.currentMatches() || cursor.currentIsDup())
|
|
|
|
|
- // continue;
|
|
|
|
|
-
|
|
|
|
|
- var next = cursor.current();
|
|
|
|
|
- this._currentBatch.push(this._projection ? base.documentFromJsonWithDeps(next, this._dependencies) : next);
|
|
|
|
|
-
|
|
|
|
|
- if (this._limit) {
|
|
|
|
|
- this._docsAddedToBatches++;
|
|
|
|
|
- if (this._docsAddedToBatches == this._limit.getLimit())
|
|
|
|
|
- break;
|
|
|
|
|
-
|
|
|
|
|
- if (this._docsAddedToBatches >= this._limit.getLimit()) {
|
|
|
|
|
- throw new Error("added documents to the batch over limit size");
|
|
|
|
|
|
|
+/**
|
|
|
|
|
+ * Load a batch of documents from the Runner into the internal array
|
|
|
|
|
+ *
|
|
|
|
|
+ * @method loadBatch
|
|
|
|
|
+**/
|
|
|
|
|
+proto.loadBatch = function loadBatch(callback) {
|
|
|
|
|
+ if (!this._runner) {
|
|
|
|
|
+ this.dispose();
|
|
|
|
|
+ return callback;
|
|
|
|
|
+ }
|
|
|
|
|
+
|
|
|
|
|
+ this._runner.restoreState();
|
|
|
|
|
+
|
|
|
|
|
+ var self = this,
|
|
|
|
|
+ whileBreak = false, // since we are in an async loop instead of a normal while loop, need to mimic the
|
|
|
|
|
+ whileReturn = false; // functionality. These flags are similar to saying 'break' or 'return' from inside the loop
|
|
|
|
|
+ return async.whilst(
|
|
|
|
|
+ function test(){
|
|
|
|
|
+ return !whileBreak && !whileReturn;
|
|
|
|
|
+ },
|
|
|
|
|
+ function(next) {
|
|
|
|
|
+ return self._runner.getNext(function(err, obj, state){
|
|
|
|
|
+ if (err) return next(err);
|
|
|
|
|
+ if (state === Runner.RunnerState.RUNNER_ADVANCED) {
|
|
|
|
|
+ if (self._dependencies) {
|
|
|
|
|
+ self._currentBatch.push(self._dependencies.extractFields(obj));
|
|
|
|
|
+ } else {
|
|
|
|
|
+ self._currentBatch.push(obj);
|
|
|
|
|
+ }
|
|
|
|
|
+
|
|
|
|
|
+ if (self._limit) {
|
|
|
|
|
+ if (++self._docsAddedToBatches === self._limit.getLimit()) {
|
|
|
|
|
+ whileBreak = true;
|
|
|
|
|
+ return next();
|
|
|
|
|
+ }
|
|
|
|
|
+ //this was originally a 'verify' in the mongo code
|
|
|
|
|
+ if (self._docsAddedToBatches > self._limit.getLimit()){
|
|
|
|
|
+ return next(new Error('documents collected past the end of the limit'));
|
|
|
|
|
+ }
|
|
|
|
|
+ }
|
|
|
|
|
+
|
|
|
|
|
+ if (self._currentBatch >= klass.MaxDocumentsToReturnToClientAtOnce) {
|
|
|
|
|
+ // End self batch and prepare Runner for yielding.
|
|
|
|
|
+ self._runner.saveState();
|
|
|
|
|
+ whileReturn = true;
|
|
|
|
|
+ }
|
|
|
|
|
+ } else {
|
|
|
|
|
+ whileBreak = true;
|
|
|
|
|
+ }
|
|
|
|
|
+ return next();
|
|
|
|
|
+ });
|
|
|
|
|
+ },
|
|
|
|
|
+ function(err){
|
|
|
|
|
+ if (!whileReturn){
|
|
|
|
|
+ self._runner.reset();
|
|
|
}
|
|
}
|
|
|
|
|
+ callback(err);
|
|
|
}
|
|
}
|
|
|
-
|
|
|
|
|
- // Mongo uses number of bytes, but that doesn't make sense here. Yield when nDocs is over a threshold
|
|
|
|
|
- if (nDocs > MAX_BATCH_DOCS) {
|
|
|
|
|
- this._curIdx++; // advance the deque
|
|
|
|
|
- nDocs++;
|
|
|
|
|
- return;
|
|
|
|
|
- }
|
|
|
|
|
- }
|
|
|
|
|
-
|
|
|
|
|
- this._cursorWithContext = undefined; //NOTE: Trying to emulate erasing the cursor; not exactly how mongo does it
|
|
|
|
|
|
|
+ );
|
|
|
};
|
|
};
|