v3 migration: first finished version that contains all models

This commit is contained in:
Matteo Pagliazzi
2016-05-01 16:27:04 +02:00
parent 4457b1c18c
commit e8024f98e1
8 changed files with 610 additions and 59 deletions

View File

@@ -1,9 +1,4 @@
/*
members must be removed
*/
// Migrate users collection to new schema
// This should run AFTER challenges migration
// Migrate challenges collection to new schema (except for members)
// The console-stamp module must be installed (not included in package.json)
@@ -104,6 +99,8 @@ function processChallenges (afterId) {
delete oldChallenge.rewards;
delete oldChallenge.todos;
var createdAt = oldChallenge.timestamp;
oldChallenge.memberCount = oldChallenge.members.length;
if (!oldChallenge.prize <= 0) oldChallenge.prize = 0;
if (!oldChallenge.name) oldChallenge.name = 'challenge name';
@@ -114,6 +111,8 @@ function processChallenges (afterId) {
var newChallenge = new NewChallenge(oldChallenge);
newChallenge.createdAt = createdAt;
oldTasks.forEach(function (oldTask) {
oldTask._id = oldTask.id; // keep the old uuid unless duplicated
delete oldTask.id;
@@ -139,9 +138,12 @@ function processChallenges (afterId) {
batchInsertChallenges.insert(newChallenge.toObject());
});
console.log(`Saving ${oldChallenges.length} users and ${processedTasks} tasks.`);
console.log(`Saving ${oldChallenges.length} challenges and ${processedTasks} tasks.`);
return batchInsertChallenges.execute();
return Q.all([
batchInsertChallenges.execute(),
batchInsertTasks.execute(),
]);
})
.then(function () {
totoalProcessedTasks += processedTasks;
@@ -178,5 +180,5 @@ Q.all([
return processChallenges();
})
.catch(function (err) {
console.error(err);
console.error(err.stack || err);
});

View File

@@ -0,0 +1,135 @@
// Migrate challenges members
// Run AFTER users migration
// The console-stamp module must be installed (not included in package.json)
// It requires two environment variables: MONGODB_OLD and MONGODB_NEW
// Due to some big user profiles it needs more RAM than is allowed by default by v8 (arounf 1.7GB).
// Run the script with --max-old-space-size=4096 to allow up to 4GB of RAM
console.log('Starting migrations/api_v3/challengesMembers.js.');
require('babel-register');
var Q = require('q');
var MongoDB = require('mongodb');
var nconf = require('nconf');
var mongoose = require('mongoose');
var _ = require('lodash');
var uuid = require('uuid');
var consoleStamp = require('console-stamp');
// Add timestamps to console messages
consoleStamp(console);
// Initialize configuration
require('../../website/src/libs/api-v3/setupNconf')();
var MONGODB_OLD = nconf.get('MONGODB_OLD');
var MONGODB_NEW = nconf.get('MONGODB_NEW');
var MongoClient = MongoDB.MongoClient;
mongoose.Promise = Q.Promise; // otherwise mongoose models won't work
// To be defined later when MongoClient connects
var mongoDbOldInstance;
var oldChallengeCollection;
var mongoDbNewInstance;
var newUserCollection;
var BATCH_SIZE = 1000;
var processedChallenges = 0;
// Only process challenges that fall in a interval ie -> up to 0000-4000-0000-0000
var AFTER_CHALLENGE_ID = nconf.get('AFTER_CHALLENGE_ID');
var BEFORE_CHALLENGE_ID = nconf.get('BEFORE_CHALLENGE_ID');
function processChallenges (afterId) {
var processedTasks = 0;
var lastChallenge = null;
var oldChallenges;
var query = {};
if (BEFORE_CHALLENGE_ID) {
query._id = {$lte: BEFORE_CHALLENGE_ID};
}
if ((afterId || AFTER_CHALLENGE_ID) && !query._id) {
query._id = {};
}
if (afterId) {
query._id.$gt = afterId;
} else if (AFTER_CHALLENGE_ID) {
query._id.$gt = AFTER_CHALLENGE_ID;
}
console.log(`Executing challenges query.\nMatching challenges after ${afterId ? afterId : AFTER_CHALLENGE_ID} and before ${BEFORE_CHALLENGE_ID} (included).`);
return oldChallengeCollection
.find(query)
.sort({_id: 1})
.limit(BATCH_SIZE)
.toArray()
.then(function (oldChallengesR) {
oldChallenges = oldChallengesR;
var promises = [];
console.log(`Processing ${oldChallenges.length} challenges. Already processed ${processedChallenges} challenges.`);
if (oldChallenges.length === BATCH_SIZE) {
lastChallenge = oldChallenges[oldChallenges.length - 1]._id;
}
oldChallenges.forEach(function (oldChallenge) {
promises.push(newUserCollection.updateMany({
_id: {$in: oldChallenge.members},
}, {
$push: {challenges: oldChallenge._id},
}, {multi: true}));
});
console.log(`Migrating members of ${oldChallenges.length} challenges.`);
return Q.all(promises);
})
.then(function () {
processedChallenges += oldChallenges.length;
console.log(`Migrated members of ${oldChallenges.length} challenges.`);
if (lastChallenge) {
return processChallenges(lastChallenge);
} else {
return console.log('Done!');
}
});
}
// Connect to the databases
Q.all([
MongoClient.connect(MONGODB_OLD),
MongoClient.connect(MONGODB_NEW),
])
.then(function (result) {
var oldInstance = result[0];
var newInstance = result[1];
mongoDbOldInstance = oldInstance;
oldChallengeCollection = mongoDbOldInstance.collection('challenges');
mongoDbNewInstance = newInstance;
newUserCollection = mongoDbNewInstance.collection('users');
console.log(`Connected with MongoClient to ${MONGODB_OLD} and ${MONGODB_NEW}.`);
return processChallenges();
})
.catch(function (err) {
console.error(err.stack || err);
});

View File

@@ -0,0 +1,135 @@
// Migrate coupons collection to new schema
// The console-stamp module must be installed (not included in package.json)
// It requires two environment variables: MONGODB_OLD and MONGODB_NEW
// Due to some big user profiles it needs more RAM than is allowed by default by v8 (arounf 1.7GB).
// Run the script with --max-old-space-size=4096 to allow up to 4GB of RAM
console.log('Starting migrations/api_v3/coupons.js.');
require('babel-register');
var Q = require('q');
var MongoDB = require('mongodb');
var nconf = require('nconf');
var mongoose = require('mongoose');
var _ = require('lodash');
var uuid = require('uuid');
var consoleStamp = require('console-stamp');
// Add timestamps to console messages
consoleStamp(console);
// Initialize configuration
require('../../website/src/libs/api-v3/setupNconf')();
var MONGODB_OLD = nconf.get('MONGODB_OLD');
var MONGODB_NEW = nconf.get('MONGODB_NEW');
var MongoClient = MongoDB.MongoClient;
mongoose.Promise = Q.Promise; // otherwise mongoose models won't work
// Load new models
var Coupon = require('../../website/src/models/coupon').model;
// To be defined later when MongoClient connects
var mongoDbOldInstance;
var oldCouponCollection;
var mongoDbNewInstance;
var newCouponCollection;
var BATCH_SIZE = 1000;
var processedCoupons = 0;
// Only process coupons that fall in a interval ie -> up to 0000-4000-0000-0000
var AFTER_COUPON_ID = nconf.get('AFTER_COUPON_ID');
var BEFORE_COUPON_ID = nconf.get('BEFORE_COUPON_ID');
function processCoupons (afterId) {
var processedTasks = 0;
var lastCoupon = null;
var oldCoupons;
var query = {};
if (BEFORE_COUPON_ID) {
query._id = {$lte: BEFORE_COUPON_ID};
}
if ((afterId || AFTER_COUPON_ID) && !query._id) {
query._id = {};
}
if (afterId) {
query._id.$gt = afterId;
} else if (AFTER_COUPON_ID) {
query._id.$gt = AFTER_COUPON_ID;
}
var batchInsertCoupons = newCouponCollection.initializeUnorderedBulkOp();
console.log(`Executing coupons query.\nMatching coupons after ${afterId ? afterId : AFTER_COUPON_ID} and before ${BEFORE_COUPON_ID} (included).`);
return oldCouponCollection
.find(query)
.sort({_id: 1})
.limit(BATCH_SIZE)
.toArray()
.then(function (oldCouponsR) {
oldCoupons = oldCouponsR;
console.log(`Processing ${oldCoupons.length} coupons. Already processed ${processedCoupons} coupons.`);
if (oldCoupons.length === BATCH_SIZE) {
lastCoupon = oldCoupons[oldCoupons.length - 1]._id;
}
oldCoupons.forEach(function (oldCoupon) {
var newCoupon = new Coupon(oldCoupon);
batchInsertCoupons.insert(newCoupon.toObject());
});
console.log(`Saving ${oldCoupons.length} coupons.`);
return batchInsertCoupons.execute();
})
.then(function () {
processedCoupons += oldCoupons.length;
console.log(`Saved ${oldCoupons.length} coupons.`);
if (lastCoupon) {
return processCoupons(lastCoupon);
} else {
return console.log('Done!');
}
});
}
// Connect to the databases
Q.all([
MongoClient.connect(MONGODB_OLD),
MongoClient.connect(MONGODB_NEW),
])
.then(function (result) {
var oldInstance = result[0];
var newInstance = result[1];
mongoDbOldInstance = oldInstance;
oldCouponCollection = mongoDbOldInstance.collection('coupons');
mongoDbNewInstance = newInstance;
newCouponCollection = mongoDbNewInstance.collection('coupons');
console.log(`Connected with MongoClient to ${MONGODB_OLD} and ${MONGODB_NEW}.`);
return processCoupons();
})
.catch(function (err) {
console.error(err.stack || err);
});

View File

@@ -1,4 +1,136 @@
/*
email must be lowercase
remove unique: true from mongoose schema
*/
// Migrate unsubscriptions collection to new schema
// The console-stamp module must be installed (not included in package.json)
// It requires two environment variables: MONGODB_OLD and MONGODB_NEW
// Due to some big user profiles it needs more RAM than is allowed by default by v8 (arounf 1.7GB).
// Run the script with --max-old-space-size=4096 to allow up to 4GB of RAM
console.log('Starting migrations/api_v3/unsubscriptions.js.');
require('babel-register');
var Q = require('q');
var MongoDB = require('mongodb');
var nconf = require('nconf');
var mongoose = require('mongoose');
var _ = require('lodash');
var uuid = require('uuid');
var consoleStamp = require('console-stamp');
// Add timestamps to console messages
consoleStamp(console);
// Initialize configuration
require('../../website/src/libs/api-v3/setupNconf')();
var MONGODB_OLD = nconf.get('MONGODB_OLD');
var MONGODB_NEW = nconf.get('MONGODB_NEW');
var MongoClient = MongoDB.MongoClient;
mongoose.Promise = Q.Promise; // otherwise mongoose models won't work
// Load new models
var EmailUnsubscription = require('../../website/src/models/emailUnsubscription').model;
// To be defined later when MongoClient connects
var mongoDbOldInstance;
var oldUnsubscriptionCollection;
var mongoDbNewInstance;
var newUnsubscriptionCollection;
var BATCH_SIZE = 1000;
var processedUnsubscriptions = 0;
// Only process unsubscriptions that fall in a interval ie -> up to 0000-4000-0000-0000
var AFTER_UNSUBSCRIPTION_ID = nconf.get('AFTER_UNSUBSCRIPTION_ID');
var BEFORE_UNSUBSCRIPTION_ID = nconf.get('BEFORE_UNSUBSCRIPTION_ID');
function processUnsubscriptions (afterId) {
var processedTasks = 0;
var lastUnsubscription = null;
var oldUnsubscriptions;
var query = {};
if (BEFORE_UNSUBSCRIPTION_ID) {
query._id = {$lte: BEFORE_UNSUBSCRIPTION_ID};
}
if ((afterId || AFTER_UNSUBSCRIPTION_ID) && !query._id) {
query._id = {};
}
if (afterId) {
query._id.$gt = afterId;
} else if (AFTER_UNSUBSCRIPTION_ID) {
query._id.$gt = AFTER_UNSUBSCRIPTION_ID;
}
var batchInsertUnsubscriptions = newUnsubscriptionCollection.initializeUnorderedBulkOp();
console.log(`Executing unsubscriptions query.\nMatching unsubscriptions after ${afterId ? afterId : AFTER_UNSUBSCRIPTION_ID} and before ${BEFORE_UNSUBSCRIPTION_ID} (included).`);
return oldUnsubscriptionCollection
.find(query)
.sort({_id: 1})
.limit(BATCH_SIZE)
.toArray()
.then(function (oldUnsubscriptionsR) {
oldUnsubscriptions = oldUnsubscriptionsR;
console.log(`Processing ${oldUnsubscriptions.length} unsubscriptions. Already processed ${processedUnsubscriptions} unsubscriptions.`);
if (oldUnsubscriptions.length === BATCH_SIZE) {
lastUnsubscription = oldUnsubscriptions[oldUnsubscriptions.length - 1]._id;
}
oldUnsubscriptions.forEach(function (oldUnsubscription) {
oldUnsubscription.email = oldUnsubscription.email.toLowerCase();
var newUnsubscription = new EmailUnsubscription(oldUnsubscription);
batchInsertUnsubscriptions.insert(newUnsubscription.toObject());
});
console.log(`Saving ${oldUnsubscriptions.length} unsubscriptions.`);
return batchInsertUnsubscriptions.execute();
})
.then(function () {
processedUnsubscriptions += oldUnsubscriptions.length;
console.log(`Saved ${oldUnsubscriptions.length} unsubscriptions.`);
if (lastUnsubscription) {
return processUnsubscriptions(lastUnsubscription);
} else {
return console.log('Done!');
}
});
}
// Connect to the databases
Q.all([
MongoClient.connect(MONGODB_OLD),
MongoClient.connect(MONGODB_NEW),
])
.then(function (result) {
var oldInstance = result[0];
var newInstance = result[1];
mongoDbOldInstance = oldInstance;
oldUnsubscriptionCollection = mongoDbOldInstance.collection('emailunsubscriptions');
mongoDbNewInstance = newInstance;
newUnsubscriptionCollection = mongoDbNewInstance.collection('emailunsubscriptions');
console.log(`Connected with MongoClient to ${MONGODB_OLD} and ${MONGODB_NEW}.`);
return processUnsubscriptions();
})
.catch(function (err) {
console.error(err.stack || err);
});

View File

@@ -1,17 +1,184 @@
/*
name is required
leader is required
type is required
privacy is required
leaderOnly.challenges is required
members are not stored anymore
invites are not stored anymore
challenges are not stored anymore
balance > 0
memberCount must be checked
challengeCount must be checked
quest.leader must be present (default to party leader)
quest.key must be valid (otherwise remove)
tavern id and leader must be updated
*/
// Migrate groups collection to new schema
// Run AFTER users migration
// The console-stamp module must be installed (not included in package.json)
// It requires two environment variables: MONGODB_OLD and MONGODB_NEW
// Due to some big user profiles it needs more RAM than is allowed by default by v8 (arounf 1.7GB).
// Run the script with --max-old-space-size=4096 to allow up to 4GB of RAM
console.log('Starting migrations/api_v3/groups.js.');
require('babel-register');
var Q = require('q');
var MongoDB = require('mongodb');
var nconf = require('nconf');
var mongoose = require('mongoose');
var _ = require('lodash');
var uuid = require('uuid');
var consoleStamp = require('console-stamp');
// Add timestamps to console messages
consoleStamp(console);
// Initialize configuration
require('../../website/src/libs/api-v3/setupNconf')();
var MONGODB_OLD = nconf.get('MONGODB_OLD');
var MONGODB_NEW = nconf.get('MONGODB_NEW');
var MongoClient = MongoDB.MongoClient;
mongoose.Promise = Q.Promise; // otherwise mongoose models won't work
// Load new models
var NewGroup = require('../../website/src/models/group').model;
// To be defined later when MongoClient connects
var mongoDbOldInstance;
var oldGroupCollection;
var mongoDbNewInstance;
var newGroupCollection;
var newUserCollection;
var BATCH_SIZE = 1000;
var processedGroups = 0;
// Only process groups that fall in a interval ie -> up to 0000-4000-0000-0000
var AFTER_GROUP_ID = nconf.get('AFTER_GROUP_ID');
var BEFORE_GROUP_ID = nconf.get('BEFORE_GROUP_ID');
function processGroups (afterId) {
var processedTasks = 0;
var lastGroup = null;
var oldGroups;
var query = {};
if (BEFORE_GROUP_ID) {
query._id = {$lte: BEFORE_GROUP_ID};
}
if ((afterId || AFTER_GROUP_ID) && !query._id) {
query._id = {};
}
if (afterId) {
query._id.$gt = afterId;
} else if (AFTER_GROUP_ID) {
query._id.$gt = AFTER_GROUP_ID;
}
var batchInsertGroups = newGroupCollection.initializeUnorderedBulkOp();
console.log(`Executing groups query.\nMatching groups after ${afterId ? afterId : AFTER_GROUP_ID} and before ${BEFORE_GROUP_ID} (included).`);
return oldGroupCollection
.find(query)
.sort({_id: 1})
.limit(BATCH_SIZE)
.toArray()
.then(function (oldGroupsR) {
oldGroups = oldGroupsR;
var promises = [];
console.log(`Processing ${oldGroups.length} groups. Already processed ${processedGroups} groups.`);
if (oldGroups.length === BATCH_SIZE) {
lastGroup = oldGroups[oldGroups.length - 1]._id;
}
oldGroups.forEach(function (oldGroup) {
if (!oldGroup.members || oldGroup.members.length === 0) return; // delete empty groups
oldGroup.memberCount = oldGroup.members.length;
if (oldGroup.challenges) oldGroup.challengeCount = oldGroup.challenges.length;
if (!oldGroup.balance <= 0) oldGroup.balance = 0;
if (!oldGroup.name) oldGroup.name = 'group name';
if (!oldGroup.leaderOnly) oldGroup.leaderOnly = {};
if (!oldGroup.leaderOnly.challenges) oldGroup.leaderOnly.challenges = false;
if (!oldGroup.type) {
//console.log(oldGroup);
console.error('group.type is required');
}
if (!oldGroup.leader) {
//console.log(oldGroup);
console.error('group.leader is required');
}
if (!oldGroup.privacy) {
//console.log(oldGroup);
console.error('group.privacy is required');
}
var updateMembers = {};
if (oldGroup.type === 'guild') {
updateMembers.$push = {guilds: oldGroup._id};
} else if (oldGroup.type === 'party') {
updateMembers.$set = {'party._id': oldGroup._id};
}
if (oldGroup.type) {
promises.push(newUserCollection.updateMany({
_id: {$in: oldGroup.members},
}, updateMembers, {multi: true}));
}
var newGroup = new NewGroup(oldGroup);
batchInsertGroups.insert(newGroup.toObject());
});
console.log(`Saving ${oldGroups.length} groups and migrating members to users collection.`);
promises.push(batchInsertGroups.execute());
return Q.all(promises);
})
.then(function () {
processedGroups += oldGroups.length;
console.log(`Saved ${oldGroups.length} groups and migrated their members to the user collection.`);
if (lastGroup) {
return processGroups(lastGroup);
} else {
return console.log('Done!');
}
});
}
// Connect to the databases
Q.all([
MongoClient.connect(MONGODB_OLD),
MongoClient.connect(MONGODB_NEW),
])
.then(function (result) {
var oldInstance = result[0];
var newInstance = result[1];
mongoDbOldInstance = oldInstance;
oldGroupCollection = mongoDbOldInstance.collection('groups');
mongoDbNewInstance = newInstance;
newGroupCollection = mongoDbNewInstance.collection('groups');
newUserCollection = mongoDbNewInstance.collection('users');
console.log(`Connected with MongoClient to ${MONGODB_OLD} and ${MONGODB_NEW}.`);
return processGroups();
})
.catch(function (err) {
console.error(err.stack || err);
});

View File

@@ -59,7 +59,6 @@ var BEFORE_USER_ID = nconf.get('BEFORE_USER_ID');
- groups
- invitations
- challenges' tasks
- checklists from .id to ._id (reminders too!)
*/
function processUsers (afterId) {
@@ -111,8 +110,8 @@ function processUsers (afterId) {
oldUser.tags = oldUser.tags.map(function (tag) {
return {
_id: tag.id,
name: tag.name,
id: tag.id,
name: tag.name || 'tag name',
challenge: tag.challenge,
};
});
@@ -125,7 +124,11 @@ function processUsers (afterId) {
oldTask.legacyId = oldTask.id; // store the old task id
delete oldTask.id;
oldTask.challenge = {};
oldTask.challenge = oldTask.challenge || {};
if (oldTask.challenge.id) {
oldTask.challenge.taskId = oldTask.legacyId;
}
if (!oldTask.text) oldTask.text = 'task text'; // required
oldTask.tags = _.map(oldTask.tags, function (tagPresent, tagId) {
return tagPresent && tagId;
@@ -165,30 +168,6 @@ function processUsers (afterId) {
});
}
/*
TODO var challengeTasksChangedId = {};
tasksArr.forEach(function(task){
task.challenge = task.challenge || {};
if(task.challenge.id) {
// If challengeTasksChangedId[task._id] then we got on of the duplicates from the challenges migration
if (challengeTasksChangedId[task.legacyId]) {
var res = _.find(challengeTasksChangedId[task.legacyId], function(arr){
return arr[1] === task.challenge.id;
});
// If res, id changed, otherwise matches the original one
task.challenge.taskId = res ? res[0] : task.legacyId;
} else {
task.challenge.taskId = task.legacyId;
}
}
if(!task.type) console.log('Task without type ', task._id, ' user ', user._id);
});
*/
// Connect to the databases
Q.all([
MongoClient.connect(MONGODB_OLD),
@@ -210,5 +189,5 @@ Q.all([
return processUsers();
})
.catch(function (err) {
console.error(err);
console.error(err.stack || err);
});

View File

@@ -1,18 +1,15 @@
import mongoose from 'mongoose';
import common from '../../../common';
import validator from 'validator';
import baseModel from '../libs/api-v3/baseModel';
// A collection used to store mailing list unsubscription for non registered email addresses
export let schema = new mongoose.Schema({
_id: {
type: String,
default: common.uuid,
},
email: {
type: String,
required: true,
trim: true,
lowercase: true, // TODO migrate existing to lowerCase
lowercase: true,
validator: [validator.isEmail, 'Invalid email.'],
},
}, {
@@ -20,4 +17,8 @@ export let schema = new mongoose.Schema({
minimize: false, // So empty objects are returned
});
schema.plugin(baseModel, {
noSet: ['_id'],
});
export let model = mongoose.model('EmailUnsubscription', schema);

View File

@@ -44,7 +44,7 @@ export let schema = new Schema({
*/
leaderOnly: { // restrict group actions to leader (members can't do them)
challenges: {type: Boolean, default: false, required: true},
// invites: {type:Boolean, 'default':false}
// invites: {type: Boolean, default: false, required: true},
},
memberCount: {type: Number, default: 1},
challengeCount: {type: Number, default: 0},