Python 在日期范围内动态计算项目数
目前,我正在处理存储在MongoDB中的一大组数据(2000万个较大的集合中有200万个单一集合)。 Feilds:id、项目名称、项目类型、项目描述和日期() 动态计算整个集合在一周和一个月的日期范围内出现的项目数。i、 e从2014-01-01到2014-01-07有20项,从2014-01-08到2014-01-16有50项,。。等 使用python,我如何实现这一点?他们的库是否用于此,或者这将是一个自定义代码Python 在日期范围内动态计算项目数,python,mongodb,mongodb-query,aggregation-framework,Python,Mongodb,Mongodb Query,Aggregation Framework,目前,我正在处理存储在MongoDB中的一大组数据(2000万个较大的集合中有200万个单一集合)。 Feilds:id、项目名称、项目类型、项目描述和日期() 动态计算整个集合在一周和一个月的日期范围内出现的项目数。i、 e从2014-01-01到2014-01-07有20项,从2014-01-08到2014-01-16有50项,。。等 使用python,我如何实现这一点?他们的库是否用于此,或者这将是一个自定义代码 或者,这应该通过MongoDB来完成吗?通常的方法当然是让数据库处理聚合。如
或者,这应该通过MongoDB来完成吗?通常的方法当然是让数据库处理聚合。如果您希望数据在“周范围”内,那么有几种方法可以实现,这取决于您实际需要的案例方法 按ISO周分组 仅通过示例演示“5月份”,您将得到如下结果:
startdate = datetime(2018,5,1)
enddate = datetime(2018,6,1)
result = db.sales.aggregate([
{ '$match': { 'date': { '$gte': startdate, '$lt': enddate } } },
{ '$group': {
'_id': {
'year': { '$year': '$date' },
'week': { '$isoWeek': '$date' }
},
'totalQty': { '$sum': '$qty' },
'count': { '$sum': 1 }
}},
{ '$sort': { '_id': 1 } }
])
这是一个相当简单的调用,根据MongoDB版本的实际支持情况,使用and或甚至可能使用运算符。您只需在的\u id
分组键中指定这些值,然后根据实际需要在该分组中“累积”的内容选择其他累加器
最后一个版本与前面的版本略有不同,后者与Python库的函数以及其他语言的类似功能更为一致。通常,您可以通过添加1
来调整这两个星期。有关更多详细信息,请参阅文档
在这种情况下,您可以选择让数据库执行“聚合”工作,然后根据输出获取“所需日期”。i、 e对于python,您可以使用与每周对应的datetime
值将结果转换为:
result = list(result)
for item in result:
item.update({
'start': datetime.combine(
Week(item['_id']['year'],item['_id']['week']).monday(),
datetime.min.time()
),
'end': datetime.combine(
Week(item['_id']['year'],item['_id']['week']).sunday(),
datetime.max.time()
)
})
item.pop('_id',None)
自定义分组
如果您不适合遵守ISO标准,那么另一种方法是定义您自己的“间隔”,在该间隔上累积“分组”。MongoDB的主要工具是,以及一些预先处理的列表:
cuts = [startdate]
date = startdate
while ( date < enddate ):
date = date + timedelta(days=7)
if ( date > enddate ):
date = enddate
cuts.append(date)
alternate = db.sales.aggregate([
{ '$match': { 'date': { '$gte': startdate, '$lt': enddate } } },
{ '$bucket': {
'groupBy': '$date',
'boundaries': cuts,
'output': {
'totalQty': { '$sum': '$qty' },
'count': { '$sum': 1 }
}
}},
{ '$project': {
'_id': 0,
'start': '$_id',
'end': {
'$cond': {
'if': {
'$gt': [
{ '$add': ['$_id', (1000 * 60 * 60 * 24 * 7) - 1] },
enddate
]
},
'then': { '$add': [ enddate, -1 ] },
'else': {
'$add': ['$_id', (1000 * 60 * 60 * 24 * 7) - 1]
}
}
},
'totalQty': 1,
'count': 1
}}
])
输出:
Week grouping
[
{
"totalQty": 449,
"count": 9,
"start": {
"$date": "2018-04-30T00:00:00Z"
},
"end": {
"$date": "2018-05-06T23:59:59.999Z"
}
},
{
"totalQty": 734,
"count": 14,
"start": {
"$date": "2018-05-07T00:00:00Z"
},
"end": {
"$date": "2018-05-13T23:59:59.999Z"
}
},
{
"totalQty": 686,
"count": 14,
"start": {
"$date": "2018-05-14T00:00:00Z"
},
"end": {
"$date": "2018-05-20T23:59:59.999Z"
}
},
{
"totalQty": 592,
"count": 12,
"start": {
"$date": "2018-05-21T00:00:00Z"
},
"end": {
"$date": "2018-05-27T23:59:59.999Z"
}
},
{
"totalQty": 205,
"count": 6,
"start": {
"$date": "2018-05-28T00:00:00Z"
},
"end": {
"$date": "2018-06-03T23:59:59.999Z"
}
}
]
Bucket grouping
[
{
"totalQty": 489,
"count": 11,
"start": {
"$date": "2018-05-01T00:00:00Z"
},
"end": {
"$date": "2018-05-07T23:59:59.999Z"
}
},
{
"totalQty": 751,
"count": 13,
"start": {
"$date": "2018-05-08T00:00:00Z"
},
"end": {
"$date": "2018-05-14T23:59:59.999Z"
}
},
{
"totalQty": 750,
"count": 15,
"start": {
"$date": "2018-05-15T00:00:00Z"
},
"end": {
"$date": "2018-05-21T23:59:59.999Z"
}
},
{
"totalQty": 493,
"count": 11,
"start": {
"$date": "2018-05-22T00:00:00Z"
},
"end": {
"$date": "2018-05-28T23:59:59.999Z"
}
},
{
"totalQty": 183,
"count": 5,
"start": {
"$date": "2018-05-29T00:00:00Z"
},
"end": {
"$date": "2018-05-31T23:59:59.999Z"
}
}
]
Cond Group
[
{
"totalQty": 489,
"count": 11,
"start": {
"$date": "2018-05-01T00:00:00Z"
},
"end": {
"$date": "2018-05-07T23:59:59.999Z"
}
},
{
"totalQty": 751,
"count": 13,
"start": {
"$date": "2018-05-08T00:00:00Z"
},
"end": {
"$date": "2018-05-14T23:59:59.999Z"
}
},
{
"totalQty": 750,
"count": 15,
"start": {
"$date": "2018-05-15T00:00:00Z"
},
"end": {
"$date": "2018-05-21T23:59:59.999Z"
}
},
{
"totalQty": 493,
"count": 11,
"start": {
"$date": "2018-05-22T00:00:00Z"
},
"end": {
"$date": "2018-05-28T23:59:59.999Z"
}
},
{
"totalQty": 183,
"count": 5,
"start": {
"$date": "2018-05-29T00:00:00Z"
},
"end": {
"$date": "2018-05-31T23:59:59.999Z"
}
}
]
可选JavaScript演示 由于上面的一些方法相当“pythonic”,那么对于主题中常见的JavaScript大脑的更广泛受众来说,应该是:
const { Schema } = mongoose = require('mongoose');
const moment = require('moment');
const uri = 'mongodb://localhost/test';
mongoose.Promise = global.Promise;
//mongoose.set('debug',true);
const saleSchema = new Schema({
date: Date,
qty: Number
})
const Sale = mongoose.model('Sale', saleSchema);
const log = data => console.log(JSON.stringify(data, undefined, 2));
(async function() {
try {
const conn = await mongoose.connect(uri);
let start = new Date("2018-05-01");
let end = new Date("2018-06-01");
let date = new Date(start.valueOf());
await Promise.all(Object.entries(conn.models).map(([k,m]) => m.remove()));
let batch = [];
while ( date.valueOf() < end.valueOf() ) {
let hour = Math.floor(Math.random() * 24) + 1;
date = new Date(date.valueOf() + (1000 * 60 * 60 * hour));
if ( date > end )
date = end;
let qty = Math.floor(Math.random() * 100) + 1;
if (date < end)
batch.push({ date, qty });
if (batch.length >= 1000) {
await Sale.insertMany(batch);
batch = [];
}
}
if (batch.length > 0) {
await Sale.insertMany(batch);
batch = [];
}
let result = await Sale.aggregate([
{ "$match": { "date": { "$gte": start, "$lt": end } } },
{ "$group": {
"_id": {
"year": { "$year": "$date" },
"week": { "$isoWeek": "$date" }
},
"totalQty": { "$sum": "$qty" },
"count": { "$sum": 1 }
}},
{ "$sort": { "_id": 1 } }
]);
result = result.map(({ _id: { year, week }, ...r }) =>
({
start: moment.utc([year]).isoWeek(week).startOf('isoWeek').toDate(),
end: moment.utc([year]).isoWeek(week).endOf('isoWeek').toDate(),
...r
})
);
log({ name: 'ISO group', result });
let cuts = [start];
date = start;
while ( date.valueOf() < end.valueOf() ) {
date = new Date(date.valueOf() + ( 1000 * 60 * 60 * 24 * 7 ));
if ( date.valueOf() > end.valueOf() ) date = end;
cuts.push(date);
}
let alternate = await Sale.aggregate([
{ "$match": { "date": { "$gte": start, "$lt": end } } },
{ "$bucket": {
"groupBy": "$date",
"boundaries": cuts,
"output": {
"totalQty": { "$sum": "$qty" },
"count": { "$sum": 1 }
}
}},
{ "$addFields": {
"_id": "$$REMOVE",
"start": "$_id",
"end": {
"$cond": {
"if": {
"$gt": [
{ "$add": [ "$_id", ( 1000 * 60 * 60 * 24 * 7 ) - 1 ] },
end
]
},
"then": { "$add": [ end, -1 ] },
"else": {
"$add": [ "$_id", ( 1000 * 60 * 60 * 24 * 7 ) - 1 ]
}
}
}
}}
]);
log({ name: "Bucket group", result: alternate });
cuts = [start];
date = start;
while ( date.valueOf() < end.valueOf() ) {
date = new Date(date.valueOf() + ( 1000 * 60 * 60 * 24 * 7 ));
if ( date.valueOf() > end.valueOf() ) date = end;
if ( date.valueOf() < end.valueOf() )
cuts.push(date);
}
let stack = [];
for ( let i = cuts.length - 1; i > 0; i-- ) {
let rec = {
"$cond": [
{ "$lt": [ "$date", cuts[i] ] },
cuts[i-1]
]
};
if ( stack.length === 0 ) {
rec['$cond'].push(cuts[i])
} else {
let lval = stack.pop();
rec['$cond'].push(lval);
}
stack.push(rec);
}
let pipeline = [
{ "$group": {
"_id": stack[0],
"totalQty": { "$sum": "$qty" },
"count": { "$sum": 1 }
}},
{ "$sort": { "_id": 1 } },
{ "$project": {
"_id": 0,
"start": "$_id",
"end": {
"$cond": {
"if": {
"$gt": [
{ "$add": [ "$_id", ( 1000 * 60 * 60 * 24 * 7 ) - 1 ] },
end
]
},
"then": { "$add": [ end, -1 ] },
"else": {
"$add": [ "$_id", ( 1000 * 60 * 60 * 24 * 7 ) - 1 ]
}
}
},
"totalQty": 1,
"count": 1
}}
];
let older = await Sale.aggregate(pipeline);
log({ name: "Cond group", result: older });
mongoose.disconnect();
} catch(e) {
console.error(e)
} finally {
process.exit()
}
})()
谢谢你的详细解释和回答。我需要一点时间来消化和理解它。我将实施解决方案并返回给您。@Jin如果您发现内容有用,那么您应该按照您的新权限进行操作。作为一个相对较新的用户帐户,当您接受回答时,通常会提示您输入此信息,但新UI中可能已经实现了更改,或者您通常不知道其重要性。另见@Jin我想你在投上一票时意外地取消了“接受”复选框。除非有什么原因导致答案不再被接受?我可能犯了错误,我现在将更新它。谢谢你的澄清。另外,我还有一个关于sperate线程的问题,你能看一下吗?
const { Schema } = mongoose = require('mongoose');
const moment = require('moment');
const uri = 'mongodb://localhost/test';
mongoose.Promise = global.Promise;
//mongoose.set('debug',true);
const saleSchema = new Schema({
date: Date,
qty: Number
})
const Sale = mongoose.model('Sale', saleSchema);
const log = data => console.log(JSON.stringify(data, undefined, 2));
(async function() {
try {
const conn = await mongoose.connect(uri);
let start = new Date("2018-05-01");
let end = new Date("2018-06-01");
let date = new Date(start.valueOf());
await Promise.all(Object.entries(conn.models).map(([k,m]) => m.remove()));
let batch = [];
while ( date.valueOf() < end.valueOf() ) {
let hour = Math.floor(Math.random() * 24) + 1;
date = new Date(date.valueOf() + (1000 * 60 * 60 * hour));
if ( date > end )
date = end;
let qty = Math.floor(Math.random() * 100) + 1;
if (date < end)
batch.push({ date, qty });
if (batch.length >= 1000) {
await Sale.insertMany(batch);
batch = [];
}
}
if (batch.length > 0) {
await Sale.insertMany(batch);
batch = [];
}
let result = await Sale.aggregate([
{ "$match": { "date": { "$gte": start, "$lt": end } } },
{ "$group": {
"_id": {
"year": { "$year": "$date" },
"week": { "$isoWeek": "$date" }
},
"totalQty": { "$sum": "$qty" },
"count": { "$sum": 1 }
}},
{ "$sort": { "_id": 1 } }
]);
result = result.map(({ _id: { year, week }, ...r }) =>
({
start: moment.utc([year]).isoWeek(week).startOf('isoWeek').toDate(),
end: moment.utc([year]).isoWeek(week).endOf('isoWeek').toDate(),
...r
})
);
log({ name: 'ISO group', result });
let cuts = [start];
date = start;
while ( date.valueOf() < end.valueOf() ) {
date = new Date(date.valueOf() + ( 1000 * 60 * 60 * 24 * 7 ));
if ( date.valueOf() > end.valueOf() ) date = end;
cuts.push(date);
}
let alternate = await Sale.aggregate([
{ "$match": { "date": { "$gte": start, "$lt": end } } },
{ "$bucket": {
"groupBy": "$date",
"boundaries": cuts,
"output": {
"totalQty": { "$sum": "$qty" },
"count": { "$sum": 1 }
}
}},
{ "$addFields": {
"_id": "$$REMOVE",
"start": "$_id",
"end": {
"$cond": {
"if": {
"$gt": [
{ "$add": [ "$_id", ( 1000 * 60 * 60 * 24 * 7 ) - 1 ] },
end
]
},
"then": { "$add": [ end, -1 ] },
"else": {
"$add": [ "$_id", ( 1000 * 60 * 60 * 24 * 7 ) - 1 ]
}
}
}
}}
]);
log({ name: "Bucket group", result: alternate });
cuts = [start];
date = start;
while ( date.valueOf() < end.valueOf() ) {
date = new Date(date.valueOf() + ( 1000 * 60 * 60 * 24 * 7 ));
if ( date.valueOf() > end.valueOf() ) date = end;
if ( date.valueOf() < end.valueOf() )
cuts.push(date);
}
let stack = [];
for ( let i = cuts.length - 1; i > 0; i-- ) {
let rec = {
"$cond": [
{ "$lt": [ "$date", cuts[i] ] },
cuts[i-1]
]
};
if ( stack.length === 0 ) {
rec['$cond'].push(cuts[i])
} else {
let lval = stack.pop();
rec['$cond'].push(lval);
}
stack.push(rec);
}
let pipeline = [
{ "$group": {
"_id": stack[0],
"totalQty": { "$sum": "$qty" },
"count": { "$sum": 1 }
}},
{ "$sort": { "_id": 1 } },
{ "$project": {
"_id": 0,
"start": "$_id",
"end": {
"$cond": {
"if": {
"$gt": [
{ "$add": [ "$_id", ( 1000 * 60 * 60 * 24 * 7 ) - 1 ] },
end
]
},
"then": { "$add": [ end, -1 ] },
"else": {
"$add": [ "$_id", ( 1000 * 60 * 60 * 24 * 7 ) - 1 ]
}
}
},
"totalQty": 1,
"count": 1
}}
];
let older = await Sale.aggregate(pipeline);
log({ name: "Cond group", result: older });
mongoose.disconnect();
} catch(e) {
console.error(e)
} finally {
process.exit()
}
})()
{
"name": "ISO group",
"result": [
{
"start": "2018-04-30T00:00:00.000Z",
"end": "2018-05-06T23:59:59.999Z",
"totalQty": 576,
"count": 10
},
{
"start": "2018-05-07T00:00:00.000Z",
"end": "2018-05-13T23:59:59.999Z",
"totalQty": 707,
"count": 11
},
{
"start": "2018-05-14T00:00:00.000Z",
"end": "2018-05-20T23:59:59.999Z",
"totalQty": 656,
"count": 12
},
{
"start": "2018-05-21T00:00:00.000Z",
"end": "2018-05-27T23:59:59.999Z",
"totalQty": 829,
"count": 16
},
{
"start": "2018-05-28T00:00:00.000Z",
"end": "2018-06-03T23:59:59.999Z",
"totalQty": 239,
"count": 6
}
]
}
{
"name": "Bucket group",
"result": [
{
"totalQty": 666,
"count": 11,
"start": "2018-05-01T00:00:00.000Z",
"end": "2018-05-07T23:59:59.999Z"
},
{
"totalQty": 727,
"count": 12,
"start": "2018-05-08T00:00:00.000Z",
"end": "2018-05-14T23:59:59.999Z"
},
{
"totalQty": 647,
"count": 12,
"start": "2018-05-15T00:00:00.000Z",
"end": "2018-05-21T23:59:59.999Z"
},
{
"totalQty": 743,
"count": 15,
"start": "2018-05-22T00:00:00.000Z",
"end": "2018-05-28T23:59:59.999Z"
},
{
"totalQty": 224,
"count": 5,
"start": "2018-05-29T00:00:00.000Z",
"end": "2018-05-31T23:59:59.999Z"
}
]
}
{
"name": "Cond group",
"result": [
{
"totalQty": 666,
"count": 11,
"start": "2018-05-01T00:00:00.000Z",
"end": "2018-05-07T23:59:59.999Z"
},
{
"totalQty": 727,
"count": 12,
"start": "2018-05-08T00:00:00.000Z",
"end": "2018-05-14T23:59:59.999Z"
},
{
"totalQty": 647,
"count": 12,
"start": "2018-05-15T00:00:00.000Z",
"end": "2018-05-21T23:59:59.999Z"
},
{
"totalQty": 743,
"count": 15,
"start": "2018-05-22T00:00:00.000Z",
"end": "2018-05-28T23:59:59.999Z"
},
{
"totalQty": 224,
"count": 5,
"start": "2018-05-29T00:00:00.000Z",
"end": "2018-05-31T23:59:59.999Z"
}
]
}