Update document using the result of concatenated fields

前端 未结 1 2000
轻奢々
轻奢々 2021-01-16 12:26

Consider a collection with the following documents

{
        \"_id\" : \"aaaaaaaaaaaa\",
        \"title\" : \"Hello, World!\",
        \"date\" : \"Thursday         


        
相关标签:
1条回答
  • 2021-01-16 13:13

    You need use the .aggregate() method which provides access to the to the aggregation pipelines.

    In your $project stage you need to use the $concat operator to concatenate your field.

    You can then use your aggregation result to update your collection using "bulk" operations for efficiency

    var bulk = db.events.initializeOrderedBulkOp(); 
    var count = 0;
    db.events.aggregate([
        { "$project": {
            "iso_start": { "$concat": [ "$date", " ", "$time" ] }, 
            "iso_end": { "$concat": [ "$date", " ", "$endtime" ] }    
        }}
    ]).forEach(function(doc) { 
        bulk.find({'_id': doc._id}).updateOne({
            "$set": {
                "iso_start": new Date(doc.iso_start),
                "iso_end": new Date(doc.iso_end)
            }
        }); 
        count++; 
        if(count % 200 === 0) { 
            // update per 200 operations and re-init
            bulk.execute();     
            bulk = db.events.initializeOrderedBulkOp(); 
        } 
    })
    // Clean up queues
    if(count > 0) bulk.execute();
    

    After this operation your documents look like this:

    {
            "_id" : "aaaaaaaaaaaa",
            "title" : "Hello, World!",
            "date" : "Thursday, November 12, 2015",
            "time" : "9:30 AM",
            "endtime" : "11:30 AM",
            "iso_start" : ISODate("2015-11-12T06:30:00Z"),
            "iso_end" : ISODate("2015-11-12T08:30:00Z")
    }
    {
            "_id" : "bbbbbbbbbbbb",
            "title" : "To B or not to B",
            "date" : "Thursday, November 12, 2015",
            "time" : "10:30 AM",
            "endtime" : "11:00 AM",
            "iso_start" : ISODate("2015-11-12T07:30:00Z"),
            "iso_end" : ISODate("2015-11-12T08:00:00Z")
    }
    

    That is not the end of the story because the "Bulk" API and his associated methods are deprecated in the forthcoming release (version 3.2 ) thus from that version we will need to use the db.collection.bulkWrite() method.

    var operations = [];
    db.events.aggregate([
        { "$project": {
            "iso_start": { "$concat": [ "$date", " ", "$time" ] }, 
            "iso_end": { "$concat": [ "$date", " ", "$endtime" ] }    
        }}
    ]).forEach(function(doc) {
        var operation = {
            updateOne: { 
                filter: { "_id": doc._id }, 
                update: { 
                    "$set":  { 
                       "iso_start": new Date(doc.iso_start),
                       "iso_end": new Date(doc.iso_end)
                    }
                }
            }
        }; 
        operations.push(operation); 
    })
    operations.push({ ordered: true, writeConcern: { w: "majority", wtimeout: 5000 } });
    db.events.bulkWrite(operations)
    
    0 讨论(0)
提交回复
热议问题