MongoDB/Mongoose-如何使用GridFS扩展数据存储?

MongoDB/Mongoose-如何使用GridFS扩展数据存储?,mongodb,express,mongoose,gridfs,gridfs-stream,Mongodb,Express,Mongoose,Gridfs,Gridfs Stream,如何使用GridFS存储数据 我已将数据内容存储在文档中,如下所示: 模式: var mongoose = require("mongoose"); var mongoosePaginate = require('mongoose-paginate'); // Declare schema var streadatamSchema = new mongoose.Schema({ user_id: { type: String, required: tru

如何使用GridFS存储数据

我已将数据内容存储在文档中,如下所示:

模式:

var mongoose = require("mongoose");
var mongoosePaginate = require('mongoose-paginate');

// Declare schema
var streadatamSchema = new mongoose.Schema({
    user_id: {
        type: String,
        required: true
    },
    title: {
        type: String,
        required: true
    },
    description: {
        type: String,
        required: true
    },
    public_key: {
        type: String
    },
    private_key: {
        type: String
    },
    data: {
        type: Object
    },
    entries_number: {
        type: Number,
        default: 0
    },
    last_entry_at: {
        type: Date
    },
    created_at: {
        type: Date,
        default: Date.now,
        index: 1 // Note 1
    },
});

streamSchema.plugin(mongoosePaginate);

// Export schema
// Model.paginate()
mongoose.model("Stream", streamSchema);
示例流:

{
    "_id" : ObjectId("57cfeabd8d9cc38d6d25fd60"),
    "user_id" : "579f52bc53d9e8cc14f504da",
    "title" : "Stream 3",
    "description" : "bla bla bla",
    "public_key" : "CxM2jlAaOHvhC3v4GB",
    "private_key" : "cHeELOOnr2x0WCqdo",
    "data" : {
        "particles" : [ 
            "27", 
            "3", 
            "3", 
            "8", 
            "29", 
            "4", 
            "0", 
            "0", 
            "0", 
            "0", 
            "0", 
            "0", 
            "0", 
            "2", 
            "2", 
            "2", 
            "1", 
            "32", 
            "0", 
            "7", 
            "0", 
            "5", 
            "0", 
            "0", 
            "1", 
            "0", 
            "0", 
            "0", 
            "0", 
            "0", 
            "0", 
            "1", 
            "0", 
            "0", 
            "0", 
            "0", 
            "0", 
            "32", 
            "50", 
            "52", 
            "27", 
            "52", 
            "3", 
            "3", 
            "0", 
            "0", 
            "1", 
            "3", 
            "17", 
            "2", 
            "15", 
            "0", 
            "0", 
            "1", 
            "48", 
            "21", 
            "27", 
            "7", 
            "6", 
            "6", 
            "2", 
            "4", 
            "0", 
            "0", 
            "0", 
            "8", 
            "2", 
            "0", 
            "0", 
            "3"
        ],
        "timestamp" : [ 
            1473244226641.0, 
            1473244254890.0, 
            1473244283134.0, 
            1473244311293.0, 
            1473244339536.0, 
            1473244420579.0, 
            1473246125416.0, 
            1473246153736.0, 
            1473246182281.0, 
            1473246210171.0, 
            1473246238506.0, 
            1473246266681.0, 
            1473246294915.0, 
            1473246325204.0, 
            1473246351579.0, 
            1473246379670.0, 
            1473246408000.0, 
            1473246436252.0, 
            1473246464504.0, 
            1473246492743.0, 
            1473246520906.0, 
            1473246549158.0, 
            1473246577414.0, 
            1473246605652.0, 
            1473246633917.0, 
            1473246695549.0, 
            1473246723868.0, 
            1473246752127.0, 
            1473246780382.0, 
            1473246808543.0, 
            1473246836795.0, 
            1473246865028.0, 
            1473246893295.0, 
            1473246921625.0, 
            1473246949790.0, 
            1473246978115.0, 
            1473247006374.0, 
            1473247034712.0, 
            1473247062773.0, 
            1473247091109.0, 
            1473247119278.0, 
            1473247147609.0, 
            1473247175787.0, 
            1473247204099.0, 
            1473247232287.0, 
            1473247260531.0, 
            1473247288785.0, 
            1473247346870.0, 
            1473247375027.0, 
            1473247414220.0, 
            1473247442496.0, 
            1473247470722.0, 
            1473247498963.0, 
            1473247527122.0, 
            1473247555416.0, 
            1473247583645.0, 
            1473247611975.0, 
            1473247640211.0, 
            1473247668447.0, 
            1473247696712.0, 
            1473247724866.0, 
            1473247753121.0, 
            1473247781412.0, 
            1473247809628.0, 
            1473247837876.0, 
            1473247866137.0, 
            1473247894452.0, 
            1473247922612.0, 
            1473247950961.0, 
            1473247979195.0
        ]
    },
    "created_at" : ISODate("2016-09-07T10:23:57.692Z"),
    "entries_number" : 70,
    "species" : [ 
        {
            "public_name" : "CO2",
            "code_name" : "particles"
        }
    ],
    "__v" : 0,
    "last_entry_at" : ISODate("2016-09-07T11:32:59.195Z")
}
正如您所看到的,
数据中的数据可以一直持续下去,并最终超过限制-16MB

那么,如何使用GridFS扩展
data
中的数据呢?这可能吗

我使用mongoose来存储和读取数据。有两个GridFS包与mongooes一起工作,但我不知道如何将它们与现有代码集成

这就是我将数据注入
数据
字段的方式:

var express = require('express');
var router = express.Router();

// Import dependencies.
var mongoose = require("mongoose");

// Import the User schema and the authentication middleware.
var Stream = mongoose.model("Stream");

// GET or POST request to push data to a stream.
// @format:
// http://127.0.0.1:8080/input/<public_key>?private_key=<private_key>&field1=<value>&field2=<value>
// @example:
// http://127.0.0.1:3000/input/ksdoLOZ99qpdL9?private_key=PSsoE6nXcHN7&particles=1.2
router.get("/", log);
router.get("/:publicKey", log);
router.post("/:publicKey", log);

function log (req, res) {

    // Get values from request arguments
    var publicKey = req.params.publicKey;

    // The private key might come in the header or as a GET var depending on the method used for sending data.
    var privateKey = req.headers['stream-private-key'] ? req.headers['stream-private-key'] : req.query.private_key;

    // Strip out cruft
    delete req.query.private_key;

    var data = {};

    if (req.method === 'GET') {
        data = req.query;
    }

    if (req.method === 'POST') {
        data = req.body;
    }

    // Check for public key
    if (!publicKey) {
        res.set('Content-Type', 'application/json');
        return res.status(404).send('stream not found');
    }

    // Check for private key
    if (!privateKey) {
        res.set('Content-Type', 'application/json');
        return res.status(403).send('forbidden: missing private key');
    }

    // Make sure they sent some data.
    // Check if the array object is empty then don't update the model.
    if (Object.keys(data).length === 0 && data.constructor === Object) {
        res.set('Content-Type', 'application/json');
        return res.status(200).send('no data received');
    }

    var updateQuery = {};data

    // Send status code for each case : -1 if error, 0 if no stream found and 1 if update successful
    Stream.findOne({
        public_key:publicKey,
        private_key:privateKey
    }, function(err, stream) {

        if (err) {
            console.log("Error retrieving stream: " + err);
            return res.sendStatus(500); 
        }

        if (stream === null) {
            console.log("Either no stream was found for this API key: " + privateKey + " or the stream doesn't have any variables set");
            res.set('Content-Type', 'application/json');
            return res.status(200).send('stream not found');
        }

        // Make sure the stream data (object) has keys in
        if (!Object.keys(stream.data).length > 0) {
            res.set('Content-Type', 'application/json');
            return res.status(200).send('update failed');
        }

        // Build $push query with variables passed in POST request.
        // We check that the variable have already been registered otherwise they"ll be ignored.
        for (var property in stream.data) {
            if (data.hasOwnProperty(property) && stream.data.hasOwnProperty(property)) {
                updateQuery["data." + property] = data[property];
            } else {
                updateQuery["data." + property] = null;
            }
        }

        // Current timestamp.
        var timestamp = Date.now();

        // Insert date data.
        updateQuery["data.timestamp"] = timestamp;

        // Update stream with new values and increment entries_number
        stream.update({
            $push: updateQuery,
            $inc: {entries_number: 1},
            last_entry_at: timestamp
        }, function(err, streamID) {

            if (err) {
                console.log("Error updating stream: " + err);
                return res.sendStatus(-1);
            }

            console.log("New entry for stream with API key: " + privateKey);

            res.set('Content-Type', 'application/json');
            return res.status(200).send('success 1');
        });
    });
};
var express=require('express');
var router=express.router();
//导入依赖项。
var mongoose=要求(“mongoose”);
//导入用户架构和身份验证中间件。
var Stream=猫鼬模型(“Stream”);
//获取或发布将数据推送到流的请求。
//@格式:
// http://127.0.0.1:8080/input/?private_key=&field1=&field2=
//@example:
// http://127.0.0.1:3000/input/ksdoLOZ99qpdL9?private_key=PSsoE6nXcHN7&particles=1.2
router.get(“/”,log);
获取(“/:publicKey”,log);
路由器。post(“/:公钥”,日志);
功能日志(req、res){
//从请求参数获取值
var publicKey=req.params.publicKey;
//根据用于发送数据的方法,私钥可能位于报头中,也可能作为GET变量。
var privateKey=req.headers['stream-private-key']?req.headers['stream-private-key']:req.query.private_key;
//脱皮
删除req.query.private_密钥;
变量数据={};
if(req.method==='GET'){
数据=请求查询;
}
如果(请求方法=='POST'){
数据=req.body;
}
//检查公钥
如果(!公钥){
res.set('Content-Type','application/json');
返回res.status(404.send)(“未找到流”);
}
//检查私钥
如果(!privateKey){
res.set('Content-Type','application/json');
返回res.status(403.send)(“禁止:缺少私钥”);
}
//确保他们发送了一些数据。
//检查数组对象是否为空,然后不更新模型。
if(Object.keys(data.length==0&&data.constructor==Object){
res.set('Content-Type','application/json');
返回res.status(200).send('未收到数据');
}
var updateQuery={};数据
//发送每个案例的状态代码:-1(如果错误),0(如果未找到流),1(如果更新成功)
Stream.findOne({
公钥:公钥,
私钥:私钥
},函数(错误,流){
如果(错误){
log(“检索流时出错:+err”);
返回res.sendStatus(500);
}
如果(流===null){
log(“未找到此API密钥的流:“+privateKey+”,或者该流未设置任何变量”);
res.set('Content-Type','application/json');
返回res.status(200).send('stream not found');
}
//确保流数据(对象)中有键
如果(!Object.keys(stream.data).length>0){
res.set('Content-Type','application/json');
返回res.status(200.send)(“更新失败”);
}
//使用POST请求中传递的变量构建$push查询。
//我们检查变量是否已经注册,否则它们将被忽略。
for(stream.data中的var属性){
if(data.hasOwnProperty(property)和&stream.data.hasOwnProperty(property)){
updateQuery[“数据”。+属性]=数据[属性];
}否则{
updateQuery[“数据”+“属性]=null;
}
}
//当前时间戳。
var timestamp=Date.now();
//插入日期数据。
updateQuery[“data.timestamp”]=时间戳;
//使用新值和增量项更新流\u编号
stream.update({
$push:updateQuery,
$inc:{条目编号:1},
最后输入时间:时间戳
},函数(err,streamID){
如果(错误){
log(“错误更新流:+err”);
返回res.sendStatus(-1);
}
log(“具有API密钥的流的新条目:“+privateKey”);
res.set('Content-Type','application/json');
返回res.status(200).send('success1');
});
});
};

我想知道如何将mongoose gridfs或gridfs stream与上述代码集成吗?

我相信你误解了gridfs的概念。请阅读它适用于文件而不是文档。它也没有添加任何魔法来覆盖bson限制。它只是将任意大小的文件拆分为小块,并对其进行管理。它仍然使用2个普通集合在后台使用预定义的模式。@AlexBlex那么我的上述问题该怎么办?我是否应该使用GridFS?显然,您正在使用的带有数组的模式不符合目的,应该更改。例如,您可以创建一个
数据
集合来存储
{particle,timestamp,streamId}
文档,但这实际上取决于用例。@AlexBlex谢谢。
创建一个数据集合来存储{particle,timestamp,streamId}文档
是我现在所想的。但是这个新集合中的文档有一天会超过16MB吗?如果是这样,我将回到同一个问题上。这正是我想说的——这取决于你的情况。我无法提供任何具体建议,因为只有一个文档。请以我的示例为例,说明该文档如何在没有任何上下文的情况下进行重构。如果适合您的用例,您可能会很好地使用GridFS,但这是一个非常重要的架构决策,应该基于对您的业务的良好理解。