...
A new constraint has been added to MongoDB in this version to avoid duplicates. If there are already duplicates those have to be removed before upgrading. The following script can be used for that.
Code Block |
---|
const indexName = db.fs.files.createIndex({ 'filename': 1, 'metadata._projectId': 1, 'metadata._category': 1 }); db.fs.files.aggregate([ { $group: { _id: {filename: '$filename', category: '$metadata._category', projectId: '$metadata._projectId'}, dups: {'$addToSet': '$_id'}, count: {'$sum': 1}, latest: {'$max': '$uploadDate'} } }, { $match: { count: {'$gt': 1} } } ], {allowDiskUse: true} ).forEach(function (document) { const duplicateIds = document.dups; db.fs.files.find({_id: {$in: duplicateIds}}) .sort({uploadDate: 1}) .skip(1) .forEach(function (fileDocument) { db.fs.chunks.deleteMany({files_id: fileDocument._id}); db.fs.files.deleteMany({_id: fileDocument._id}); }); }); db.fs.files.dropIndex(indexName); |
...