mirror of
https://github.com/HabitRPG/habitica.git
synced 2025-10-28 03:32:29 +01:00
Compare commits
5 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4fd5de1bf7 | ||
|
|
bd902e14cf | ||
|
|
3a0878c9a1 | ||
|
|
9b37f1e538 | ||
|
|
5774ac23e8 |
8
.babelrc
8
.babelrc
@@ -1,6 +1,10 @@
|
||||
{
|
||||
"presets": ["es2015"],
|
||||
"plugins": [
|
||||
"transform-es2015-modules-commonjs",
|
||||
"syntax-object-rest-spread",
|
||||
"transform-object-rest-spread",
|
||||
["transform-async-to-module-method", {
|
||||
"module": "bluebird",
|
||||
"method": "coroutine"
|
||||
}]
|
||||
]
|
||||
}
|
||||
|
||||
3
.bowerrc
Normal file
3
.bowerrc
Normal file
@@ -0,0 +1,3 @@
|
||||
{
|
||||
"directory": "website/client-old/bower_components"
|
||||
}
|
||||
@@ -20,7 +20,11 @@ container_commands:
|
||||
command: "touch /tmp/.babel.json"
|
||||
02_ownBabel:
|
||||
command: "chmod a+rw /tmp/.babel.json"
|
||||
03_installGulp:
|
||||
03_installBower:
|
||||
command: "$NODE_HOME/bin/npm install -g bower"
|
||||
04_installGulp:
|
||||
command: "$NODE_HOME/bin/npm install -g gulp"
|
||||
04_runGulp:
|
||||
05_runBower:
|
||||
command: "$NODE_HOME/lib/node_modules/bower/bin/bower --config.interactive=false --allow-root install -f"
|
||||
06_runGulp:
|
||||
command: "$NODE_HOME/lib/node_modules/gulp/bin/gulp.js build"
|
||||
|
||||
@@ -6,9 +6,15 @@ website/transpiled-babel/
|
||||
website/common/transpiled-babel/
|
||||
dist/
|
||||
dist-client/
|
||||
apidoc_build/
|
||||
content_cache/
|
||||
node_modules/
|
||||
|
||||
# Old migrations, disabled
|
||||
migrations/archive/*
|
||||
# Not linted
|
||||
website/client-old/
|
||||
test/client-old/spec/**/*
|
||||
|
||||
# Temporarilly disabled. These should be removed when the linting errors are fixed TODO
|
||||
migrations/*
|
||||
scripts/*
|
||||
website/common/browserify.js
|
||||
Gruntfile.js
|
||||
gulpfile.js
|
||||
gulp
|
||||
4
.github/CONTRIBUTING.md
vendored
4
.github/CONTRIBUTING.md
vendored
@@ -4,7 +4,7 @@
|
||||
|
||||
# Pull Request
|
||||
|
||||
[Please see these instructions for adding a pull request](http://habitica.fandom.com/wiki/Using_Your_Local_Install_to_Modify_Habitica%27s_Website_and_API)
|
||||
[Please see these instructions for adding a pull request](http://habitica.wikia.com/wiki/Using_Habitica_Git#Pull_Request)
|
||||
|
||||
# Requesting a feature
|
||||
|
||||
@@ -12,4 +12,4 @@ Habitica uses [Trello](https://trello.com/b/EpoYEYod/habitica) to track feature
|
||||
|
||||
# Contributing Code
|
||||
|
||||
See [Contributing to Habitica](http://habitica.fandom.com/wiki/Contributing_to_Habitica#Coders_.28Web_.26_Mobile.29)
|
||||
See [Contributing to Habitica](http://habitica.wikia.com/wiki/Contributing_to_Habitica#Coders_.28Web_.26_Mobile.29)
|
||||
|
||||
2
.github/ISSUE_TEMPLATE.md
vendored
2
.github/ISSUE_TEMPLATE.md
vendored
@@ -6,7 +6,7 @@
|
||||
|
||||
[//]: # (For more guidelines see https://github.com/HabitRPG/habitica/issues/2760)
|
||||
|
||||
[//]: # (Fill out relevant information - UUID is found from the Habitia website at User Icon > Settings > API)
|
||||
[//]: # (Fill out relevant information - UUID is found in Settings -> API)
|
||||
### General Info
|
||||
* UUID:
|
||||
* Browser:
|
||||
|
||||
8
.github/PULL_REQUEST_TEMPLATE.md
vendored
8
.github/PULL_REQUEST_TEMPLATE.md
vendored
@@ -1,14 +1,14 @@
|
||||
[//]: # (Note: See http://habitica.fandom.com/wiki/Using_Your_Local_Install_to_Modify_Habitica%27s_Website_and_API for more info)
|
||||
[//]: # (Note: See http://habitica.wikia.com/wiki/Using_Habitica_Git#Pull_Request for more info)
|
||||
|
||||
[//]: # (Put Issue # here, if applicable. This will automatically close the issue if your PR is merged in)
|
||||
Fixes put_#_and_issue_numer_here
|
||||
[//]: # (Put Issue # or URL here, if applicable. This will automatically close the issue if your PR is merged in)
|
||||
Fixes put_issue_url_here
|
||||
|
||||
### Changes
|
||||
[//]: # (Describe the changes that were made in detail here. Include pictures if necessary)
|
||||
|
||||
|
||||
|
||||
[//]: # (Put User ID in here - found on the Habitica website at User Icon > Settings > API)
|
||||
[//]: # (Put User ID in here - found in Settings -> API)
|
||||
|
||||
----
|
||||
UUID:
|
||||
|
||||
5
.gitignore
vendored
5
.gitignore
vendored
@@ -2,14 +2,11 @@
|
||||
website/client-old/gen
|
||||
website/client-old/common
|
||||
website/client-old/apidoc
|
||||
website/build
|
||||
website/client-old/js/habitrpg-shared.js*
|
||||
website/client-old/css/habitrpg-shared.css
|
||||
website/transpiled-babel/
|
||||
website/common/transpiled-babel/
|
||||
node_modules
|
||||
content_cache
|
||||
apidoc_build
|
||||
*.swp
|
||||
.idea*
|
||||
config.json
|
||||
@@ -39,8 +36,6 @@ dist-client
|
||||
test/client/unit/coverage
|
||||
test/client/e2e/reports
|
||||
test/client-old/spec/mocks/translations.js
|
||||
yarn.lock
|
||||
.gitattributes
|
||||
|
||||
# Elastic Beanstalk Files
|
||||
.elasticbeanstalk/*
|
||||
|
||||
@@ -4,7 +4,6 @@ node_modules/**
|
||||
.bower-registry/**
|
||||
website/client-old/**
|
||||
website/client/**
|
||||
website/client/store/**
|
||||
website/views/**
|
||||
website/build/**
|
||||
dist/**
|
||||
@@ -17,4 +16,3 @@ CHANGELOG.md
|
||||
newrelic_agent.log
|
||||
*.swp
|
||||
*.swx
|
||||
website/raw_sprites/**
|
||||
|
||||
26
.travis.yml
26
.travis.yml
@@ -1,28 +1,36 @@
|
||||
language: node_js
|
||||
node_js:
|
||||
- '10'
|
||||
- '6'
|
||||
sudo: required
|
||||
dist: precise
|
||||
services:
|
||||
- mongodb
|
||||
cache:
|
||||
directories:
|
||||
- 'node_modules'
|
||||
addons:
|
||||
chrome: stable
|
||||
apt:
|
||||
sources:
|
||||
- ubuntu-toolchain-r-test
|
||||
packages:
|
||||
- g++-4.8
|
||||
before_install:
|
||||
- $CXX --version
|
||||
- npm install -g npm@5
|
||||
- if [ $REQUIRES_SERVER ]; then sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv 7F0CEB10; echo 'deb http://downloads-distro.mongodb.org/repo/ubuntu-upstart dist 10gen' | sudo tee /etc/apt/sources.list.d/mongodb.list; sudo apt-get update; sudo apt-get install mongodb-org-server; fi
|
||||
install:
|
||||
- npm install &> npm.install.log || (cat npm.install.log; false)
|
||||
before_script:
|
||||
- npm run test:build
|
||||
- cp config.json.example config.json
|
||||
- sleep 5
|
||||
- sleep 15
|
||||
script:
|
||||
- npm run $TEST
|
||||
- if [ $COVERAGE ]; then ./node_modules/.bin/lcov-result-merger 'coverage/**/*.info' | ./node_modules/coveralls/bin/coveralls.js; fi
|
||||
env:
|
||||
global:
|
||||
- CXX=g++-4.8
|
||||
- DISABLE_REQUEST_LOGGING=true
|
||||
matrix:
|
||||
- TEST="lint"
|
||||
- TEST="test:api:unit" REQUIRES_SERVER=true COVERAGE=true
|
||||
- TEST="test:api-v3:integration" REQUIRES_SERVER=true COVERAGE=true
|
||||
- TEST="test:api-v4:integration" REQUIRES_SERVER=true COVERAGE=true
|
||||
- TEST="test:api-v3" REQUIRES_SERVER=true COVERAGE=true
|
||||
- TEST="test:sanity"
|
||||
- TEST="test:content" COVERAGE=true
|
||||
- TEST="test:common" COVERAGE=true
|
||||
|
||||
52
Dockerfile
52
Dockerfile
@@ -1,30 +1,22 @@
|
||||
FROM node:10
|
||||
|
||||
ENV ADMIN_EMAIL admin@habitica.com
|
||||
ENV AMAZON_PAYMENTS_CLIENT_ID amzn1.application-oa2-client.68ed9e6904ef438fbc1bf86bf494056e
|
||||
ENV AMAZON_PAYMENTS_SELLER_ID AMQ3SB4SG5E91
|
||||
ENV AMPLITUDE_KEY e8d4c24b3d6ef3ee73eeba715023dd43
|
||||
ENV BASE_URL https://habitica.com
|
||||
ENV FACEBOOK_KEY 128307497299777
|
||||
ENV GA_ID UA-33510635-1
|
||||
ENV GOOGLE_CLIENT_ID 1035232791481-32vtplgnjnd1aufv3mcu1lthf31795fq.apps.googleusercontent.com
|
||||
ENV LOGGLY_CLIENT_TOKEN ab5663bf-241f-4d14-8783-7d80db77089a
|
||||
ENV NODE_ENV production
|
||||
ENV STRIPE_PUB_KEY pk_85fQ0yMECHNfHTSsZoxZXlPSwSNfA
|
||||
|
||||
# Install global packages
|
||||
RUN npm install -g gulp-cli mocha
|
||||
|
||||
# Clone Habitica repo and install dependencies
|
||||
RUN mkdir -p /usr/src/habitrpg
|
||||
WORKDIR /usr/src/habitrpg
|
||||
RUN git clone --branch release https://github.com/HabitRPG/habitica.git /usr/src/habitrpg
|
||||
RUN npm install
|
||||
RUN gulp build:prod --force
|
||||
|
||||
# Create Build dir
|
||||
RUN mkdir -p ./website/build
|
||||
|
||||
# Start Habitica
|
||||
EXPOSE 3000
|
||||
CMD ["node", "./website/transpiled-babel/index.js"]
|
||||
FROM node:boron
|
||||
|
||||
# Upgrade NPM to v5 (Yarn is needed because of this bug https://github.com/npm/npm/issues/16807)
|
||||
# The used solution is suggested here https://github.com/npm/npm/issues/16807#issuecomment-313591975
|
||||
RUN yarn global add npm@5
|
||||
# Install global packages
|
||||
RUN npm install -g gulp grunt-cli bower mocha
|
||||
|
||||
# Clone Habitica repo and install dependencies
|
||||
RUN mkdir -p /usr/src/habitrpg
|
||||
WORKDIR /usr/src/habitrpg
|
||||
RUN git clone https://github.com/HabitRPG/habitica.git /usr/src/habitrpg
|
||||
RUN cp config.json.example config.json
|
||||
RUN npm install
|
||||
RUN bower install --allow-root
|
||||
|
||||
# Create Build dir
|
||||
RUN mkdir -p ./website/build
|
||||
|
||||
# Start Habitica
|
||||
EXPOSE 3000
|
||||
CMD ["npm", "start"]
|
||||
|
||||
@@ -1,18 +0,0 @@
|
||||
FROM node:10
|
||||
|
||||
# Install global packages
|
||||
RUN npm install -g gulp-cli mocha
|
||||
|
||||
# Clone Habitica repo and install dependencies
|
||||
RUN mkdir -p /usr/src/habitrpg
|
||||
WORKDIR /usr/src/habitrpg
|
||||
RUN git clone https://github.com/HabitRPG/habitica.git /usr/src/habitrpg
|
||||
RUN cp config.json.example config.json
|
||||
RUN npm install
|
||||
|
||||
# Create Build dir
|
||||
RUN mkdir -p ./website/build
|
||||
|
||||
# Start Habitica
|
||||
EXPOSE 3000
|
||||
CMD ["npm", "start"]
|
||||
22
Dockerfile-Production
Normal file
22
Dockerfile-Production
Normal file
@@ -0,0 +1,22 @@
|
||||
FROM node:boron
|
||||
|
||||
# Upgrade NPM to v5 (Yarn is needed because of this bug https://github.com/npm/npm/issues/16807)
|
||||
# The used solution is suggested here https://github.com/npm/npm/issues/16807#issuecomment-313591975
|
||||
RUN yarn global add npm@5
|
||||
# Install global packages
|
||||
RUN npm install -g gulp grunt-cli bower mocha
|
||||
|
||||
# Clone Habitica repo and install dependencies
|
||||
RUN mkdir -p /usr/src/habitrpg
|
||||
WORKDIR /usr/src/habitrpg
|
||||
RUN git clone --branch release https://github.com/HabitRPG/habitica.git /usr/src/habitrpg
|
||||
RUN npm install
|
||||
RUN bower install --allow-root
|
||||
RUN gulp build:prod --force
|
||||
|
||||
# Create Build dir
|
||||
RUN mkdir -p ./website/build
|
||||
|
||||
# Start Habitica
|
||||
EXPOSE 3000
|
||||
CMD ["node", "./website/transpiled-babel/index.js"]
|
||||
142
Gruntfile.js
Normal file
142
Gruntfile.js
Normal file
@@ -0,0 +1,142 @@
|
||||
/*global module:false*/
|
||||
require('babel-register');
|
||||
var _ = require('lodash');
|
||||
module.exports = function(grunt) {
|
||||
|
||||
// Project configuration.
|
||||
grunt.initConfig({
|
||||
pkg: grunt.file.readJSON('package.json'),
|
||||
|
||||
karma: {
|
||||
unit: {
|
||||
configFile: 'test/client-old/spec/karma.conf.js'
|
||||
},
|
||||
continuous: {
|
||||
configFile: 'test/client-old/spec/karma.conf.js',
|
||||
singleRun: true,
|
||||
autoWatch: false
|
||||
}
|
||||
},
|
||||
|
||||
clean: {
|
||||
build: ['website/build']
|
||||
},
|
||||
|
||||
cssmin: {
|
||||
dist: {
|
||||
options: {
|
||||
report: 'gzip'
|
||||
},
|
||||
files:{
|
||||
"website/client-old/css/habitrpg-shared.css": [
|
||||
"website/assets/sprites/dist/spritesmith*.css",
|
||||
"website/assets/sprites/css/backer.css",
|
||||
"website/assets/sprites/css/Mounts.css",
|
||||
"website/assets/sprites/css/index.css"
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
stylus: {
|
||||
build: {
|
||||
options: {
|
||||
compress: false, // AFTER
|
||||
'include css': true,
|
||||
paths: ['website/client-old']
|
||||
},
|
||||
files: {
|
||||
'website/build/app.css': ['website/client-old/css/index.styl'],
|
||||
'website/build/static.css': ['website/client-old/css/static.styl']
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
copy: {
|
||||
build: {
|
||||
files: [
|
||||
{expand: true, cwd: 'website/client-old/', src: 'favicon.ico', dest: 'website/build/'},
|
||||
{expand: true, cwd: 'website/client-old/', src: 'favicon_192x192.png', dest: 'website/build/'},
|
||||
{expand: true, cwd: 'website/assets/sprites/dist/', src: 'spritesmith*.png', dest: 'website/build/static/sprites'},
|
||||
{expand: true, cwd: 'website/assets/sprites/', src: 'backer-only/*.gif', dest: 'website/build/'},
|
||||
{expand: true, cwd: 'website/assets/sprites/', src: 'npc_ian.gif', dest: 'website/build/'},
|
||||
{expand: true, cwd: 'website/assets/sprites/', src: 'quest_*.gif', dest: 'website/build/'},
|
||||
{expand: true, cwd: 'website/client-old/', src: 'bower_components/bootstrap/dist/fonts/*', dest: 'website/build/'}
|
||||
]
|
||||
}
|
||||
},
|
||||
|
||||
// UPDATE IT WHEN YOU ADD SOME FILES NOT ALREADY MATCHED!
|
||||
hashres: {
|
||||
build: {
|
||||
options: {
|
||||
fileNameFormat: '${name}-${hash}.${ext}'
|
||||
},
|
||||
src: [
|
||||
'website/build/*.js',
|
||||
'website/build/*.css',
|
||||
'website/build/favicon.ico',
|
||||
'website/build/favicon_192x192.png',
|
||||
'website/build/*.png',
|
||||
'website/build/static/sprites/*.png',
|
||||
'website/build/*.gif',
|
||||
'website/build/bower_components/bootstrap/dist/fonts/*'
|
||||
],
|
||||
dest: 'website/build/*.css'
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
//Load build files from client-old/manifest.json
|
||||
grunt.registerTask('loadManifestFiles', 'Load all build files from client-old/manifest.json', function(){
|
||||
var files = grunt.file.readJSON('./website/client-old/manifest.json');
|
||||
var uglify = {};
|
||||
var cssmin = {};
|
||||
|
||||
_.each(files, function(val, key){
|
||||
|
||||
var js = uglify['website/build/' + key + '.js'] = [];
|
||||
|
||||
_.each(files[key].js, function(val){
|
||||
var path = "./";
|
||||
if( val.indexOf('common/') == -1)
|
||||
path = './website/client-old/';
|
||||
js.push(path + val);
|
||||
});
|
||||
|
||||
var css = cssmin['website/build/' + key + '.css'] = [];
|
||||
|
||||
_.each(files[key].css, function(val){
|
||||
var path = "./";
|
||||
if( val.indexOf('common/') == -1) {
|
||||
path = (val == 'app.css' || val == 'static.css') ? './website/build/' : './website/client-old/';
|
||||
}
|
||||
css.push(path + val)
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
grunt.config.set('uglify.build.files', uglify);
|
||||
grunt.config.set('uglify.build.options', {compress: false});
|
||||
|
||||
grunt.config.set('cssmin.build.files', cssmin);
|
||||
// Rewrite urls to relative path
|
||||
grunt.config.set('cssmin.build.options', {'target': 'website/client-old/css/whatever-css.css'});
|
||||
});
|
||||
|
||||
// Register tasks.
|
||||
grunt.registerTask('build:prod', ['loadManifestFiles', 'clean:build', 'uglify', 'stylus', 'cssmin', 'copy:build', 'hashres']);
|
||||
grunt.registerTask('build:dev', ['cssmin', 'stylus']);
|
||||
grunt.registerTask('build:test', ['build:dev']);
|
||||
|
||||
// Load tasks
|
||||
grunt.loadNpmTasks('grunt-contrib-uglify');
|
||||
grunt.loadNpmTasks('grunt-contrib-clean');
|
||||
grunt.loadNpmTasks('grunt-contrib-stylus');
|
||||
grunt.loadNpmTasks('grunt-contrib-cssmin');
|
||||
grunt.loadNpmTasks('grunt-contrib-copy');
|
||||
grunt.loadNpmTasks('grunt-contrib-watch');
|
||||
grunt.loadNpmTasks('grunt-hashres');
|
||||
if (process.env.NODE_ENV !== 'production') grunt.loadNpmTasks('grunt-karma');
|
||||
|
||||
};
|
||||
@@ -1,12 +1,11 @@
|
||||
Habitica [](https://travis-ci.org/HabitRPG/habitica) [](https://codeclimate.com/github/HabitRPG/habitrpg) [](https://coveralls.io/github/HabitRPG/habitica?branch=develop) [](https://www.bountysource.com/trackers/68393-habitrpg?utm_source=68393&utm_medium=shield&utm_campaign=TRACKER_BADGE) [](https://www.codetriage.com/habitrpg/habitica)
|
||||
Habitica [](https://travis-ci.org/HabitRPG/habitica) [](https://codeclimate.com/github/HabitRPG/habitrpg) [](https://coveralls.io/github/HabitRPG/habitica?branch=develop) [](https://www.bountysource.com/trackers/68393-habitrpg?utm_source=68393&utm_medium=shield&utm_campaign=TRACKER_BADGE)
|
||||
===============
|
||||
|
||||
[](https://greenkeeper.io/)
|
||||
|
||||
[Habitica](https://habitica.com) is an open source habit building program which treats your life like a Role Playing Game. Level up as you succeed, lose HP as you fail, earn money to buy weapons and armor.
|
||||
|
||||
We need more programmers! Your assistance will be greatly appreciated.
|
||||
|
||||
For an introduction to the technologies used and how the software is organized, refer to [Guidance for Blacksmiths](http://habitica.fandom.com/wiki/Guidance_for_Blacksmiths).
|
||||
For an introduction to the technologies used and how the software is organized, refer to [Guidance for Blacksmiths](http://habitica.wikia.com/wiki/Guidance_for_Blacksmiths).
|
||||
|
||||
To set up a local install of Habitica for development and testing on various platforms, see [Setting up Habitica Locally](http://habitica.wikia.com/wiki/Setting_up_Habitica_Locally).
|
||||
|
||||
To set up a local install of Habitica for development and testing on various platforms, see [Setting up Habitica Locally](http://habitica.fandom.com/wiki/Setting_up_Habitica_Locally).
|
||||
|
||||
@@ -8,4 +8,4 @@ minimal dependencies on the developer's local platform. It can be used
|
||||
on a variety of systems including Windows, Mac OS X, and Linux.
|
||||
|
||||
Instructions for using the Habitica Vagrant environment are in
|
||||
[Setting up Habitica Locally](http://habitica.fandom.com/wiki/Setting_up_Habitica_Locally).
|
||||
[Setting up Habitica Locally](http://habitica.wikia.com/wiki/Setting_up_Habitica_Locally).
|
||||
|
||||
@@ -16,7 +16,5 @@ Vagrant.configure(VAGRANTFILE_API_VERSION) do |config|
|
||||
config.vm.hostname = "habitrpg"
|
||||
config.vm.network "forwarded_port", guest: 3000, host: 3000, auto_correct: true
|
||||
config.vm.usable_port_range = (3000..3050)
|
||||
config.vm.network "forwarded_port", guest: 8080, host: 8080, auto_correct: true
|
||||
config.vm.usable_port_range = (8080..8130)
|
||||
config.vm.provision :shell, :path => "vagrant_scripts/vagrant.sh"
|
||||
end
|
||||
|
||||
56
bower.json
Normal file
56
bower.json
Normal file
@@ -0,0 +1,56 @@
|
||||
{
|
||||
"name": "HabitRPG",
|
||||
"version": "0.1.1",
|
||||
"homepage": "https://github.com/lefnire/habitrpg",
|
||||
"authors": [
|
||||
"Tyler Renelle <tylerrenelle@gmail.com>"
|
||||
],
|
||||
"private": true,
|
||||
"ignore": [
|
||||
"**/.*",
|
||||
"node_modules",
|
||||
"website/client-old/bower_components",
|
||||
"test",
|
||||
"tests"
|
||||
],
|
||||
"dependencies": {
|
||||
"Angular-At-Directive": "snicker/Angular-At-Directive#c27bae207aa06d1e",
|
||||
"angular": "1.3.9",
|
||||
"angular-bootstrap": "0.13.0",
|
||||
"angular-filter": "0.5.1",
|
||||
"angular-loading-bar": "0.6.0",
|
||||
"angular-resource": "1.3.9",
|
||||
"angular-sanitize": "1.3.9",
|
||||
"angular-ui": "0.4.0",
|
||||
"angular-ui-router": "0.2.13",
|
||||
"angular-ui-select2": "angular-ui/ui-select2#afa6589a54cb72815f",
|
||||
"angular-ui-utils": "0.1.0",
|
||||
"bootstrap": "3.1.0",
|
||||
"bootstrap-growl": "ifightcrime/bootstrap-growl#162daa41cd1155f",
|
||||
"bootstrap-tour": "0.10.1",
|
||||
"css-social-buttons": "samcollins/css-social-buttons#v1.1.1 ",
|
||||
"github-buttons": "mdo/github-buttons#v3.0.0",
|
||||
"hello": "1.14.1",
|
||||
"jquery": "2.1.0",
|
||||
"jquery-colorbox": "1.4.36",
|
||||
"jquery-ui": "1.10.3",
|
||||
"jquery.cookie": "1.4.0",
|
||||
"js-emoji": "snicker/js-emoji#f25d8a303f",
|
||||
"ngInfiniteScroll": "1.1.0",
|
||||
"pnotify": "1.3.1",
|
||||
"sticky": "1.0.3",
|
||||
"swagger-ui": "wordnik/swagger-ui#v2.0.24",
|
||||
"smart-app-banner": "78ef9c0679723b25be1a0ae04f7b4aef7cbced4f",
|
||||
"habitica-markdown": "1.2.2",
|
||||
"pusher-js-auth": "^2.0.0",
|
||||
"pusher-websocket-iso": "pusher#^3.2.0",
|
||||
"taggle": "^1.11.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"angular-mocks": "1.3.9"
|
||||
},
|
||||
"resolutions": {
|
||||
"angular": "1.3.9",
|
||||
"jquery": ">=1.9.0"
|
||||
}
|
||||
}
|
||||
@@ -1,83 +1,106 @@
|
||||
{
|
||||
"ADMIN_EMAIL": "you@example.com",
|
||||
"AMAZON_PAYMENTS_CLIENT_ID": "CLIENT_ID",
|
||||
"AMAZON_PAYMENTS_MODE": "sandbox",
|
||||
"AMAZON_PAYMENTS_MWS_KEY": "MWS_KEY",
|
||||
"AMAZON_PAYMENTS_MWS_SECRET": "MWS_SECRET",
|
||||
"AMAZON_PAYMENTS_SELLER_ID": "SELLER_ID",
|
||||
"AMPLITUDE_KEY": "AMPLITUDE_KEY",
|
||||
"AMPLITUDE_SECRET": "AMPLITUDE_SECRET",
|
||||
"BASE_URL": "http://localhost:3000",
|
||||
"CRON_SAFE_MODE": "false",
|
||||
"CRON_SEMI_SAFE_MODE": "false",
|
||||
"DISABLE_REQUEST_LOGGING": "true",
|
||||
"EMAILS_COMMUNITY_MANAGER_EMAIL": "admin@habitica.com",
|
||||
"EMAILS_PRESS_ENQUIRY_EMAIL": "admin@habitica.com",
|
||||
"EMAILS_TECH_ASSISTANCE_EMAIL": "admin@habitica.com",
|
||||
"EMAIL_SERVER_AUTH_PASSWORD": "password",
|
||||
"EMAIL_SERVER_AUTH_USER": "user",
|
||||
"EMAIL_SERVER_URL": "http://example.com",
|
||||
"ENABLE_CONSOLE_LOGS_IN_PROD": "false",
|
||||
"ENABLE_CONSOLE_LOGS_IN_TEST": "false",
|
||||
"FACEBOOK_KEY": "123456789012345",
|
||||
"FACEBOOK_SECRET": "aaaabbbbccccddddeeeeffff00001111",
|
||||
"FLAG_REPORT_EMAIL": "email@example.com, email2@example.com",
|
||||
"GA_ID": "GA_ID",
|
||||
"GOOGLE_CLIENT_ID": "123456789012345",
|
||||
"GOOGLE_CLIENT_SECRET": "aaaabbbbccccddddeeeeffff00001111",
|
||||
"IAP_GOOGLE_KEYDIR": "/path/to/google/public/key/dir/",
|
||||
"IGNORE_REDIRECT": "true",
|
||||
"ITUNES_SHARED_SECRET": "aaaabbbbccccddddeeeeffff00001111",
|
||||
"LOGGLY_CLIENT_TOKEN": "token",
|
||||
"LOGGLY_SUBDOMAIN": "example-subdomain",
|
||||
"LOGGLY_TOKEN": "example-token",
|
||||
"MAINTENANCE_MODE": "false",
|
||||
"NODE_DB_URI": "mongodb://localhost/habitrpg",
|
||||
"NODE_ENV": "development",
|
||||
"PATH": "bin:node_modules/.bin:/usr/local/bin:/usr/bin:/bin",
|
||||
"PAYPAL_BILLING_PLANS_basic_12mo": "basic_12mo",
|
||||
"PAYPAL_BILLING_PLANS_basic_3mo": "basic_3mo",
|
||||
"PAYPAL_BILLING_PLANS_basic_6mo": "basic_6mo",
|
||||
"PAYPAL_BILLING_PLANS_basic_earned": "basic_earned",
|
||||
"PAYPAL_BILLING_PLANS_google_6mo": "google_6mo",
|
||||
"PAYPAL_CLIENT_ID": "client_id",
|
||||
"PAYPAL_CLIENT_SECRET": "client_secret",
|
||||
"PAYPAL_EXPERIENCE_PROFILE_ID": "xp_profile_id",
|
||||
"PAYPAL_MODE": "sandbox",
|
||||
"PLAY_API_ACCESS_TOKEN": "aaaabbbbccccddddeeeeffff00001111",
|
||||
"PLAY_API_CLIENT_ID": "aaaabbbbccccddddeeeeffff00001111",
|
||||
"PLAY_API_CLIENT_SECRET": "aaaabbbbccccddddeeeeffff00001111",
|
||||
"PLAY_API_REFRESH_TOKEN": "aaaabbbbccccddddeeeeffff00001111",
|
||||
"PORT": 3000,
|
||||
"PUSH_CONFIGS_APN_ENABLED": "false",
|
||||
"PUSH_CONFIGS_APN_KEY": "xxxxxxxxxx",
|
||||
"PUSH_CONFIGS_APN_KEY_ID": "xxxxxxxxxx",
|
||||
"PUSH_CONFIGS_APN_TEAM_ID": "aaabbbcccd",
|
||||
"PUSH_CONFIGS_FCM_SERVER_API_KEY": "aaabbbcccd",
|
||||
"S3_ACCESS_KEY_ID": "accessKeyId",
|
||||
"S3_BUCKET": "bucket",
|
||||
"S3_SECRET_ACCESS_KEY": "secretAccessKey",
|
||||
"SESSION_SECRET": "YOUR SECRET HERE",
|
||||
"SESSION_SECRET_IV": "12345678912345678912345678912345",
|
||||
"SESSION_SECRET_KEY": "1234567891234567891234567891234567891234567891234567891234567891",
|
||||
"SITE_HTTP_AUTH_ENABLED": "false",
|
||||
"SITE_HTTP_AUTH_PASSWORD": "password",
|
||||
"SITE_HTTP_AUTH_USERNAME": "admin",
|
||||
"SLACK_FLAGGING_FOOTER_LINK": "https://habitrpg.github.io/flag-o-rama/",
|
||||
"SLACK_FLAGGING_URL": "https://hooks.slack.com/services/id/id/id",
|
||||
"SLACK_SUBSCRIPTIONS_URL": "https://hooks.slack.com/services/id/id/id",
|
||||
"SLACK_URL": "https://hooks.slack.com/services/some-url",
|
||||
"SMTP_HOST": "example.com",
|
||||
"SMTP_PASS": "password",
|
||||
"SMTP_PORT": 587,
|
||||
"SMTP_SERVICE": "Gmail",
|
||||
"SMTP_TLS": "true",
|
||||
"SMTP_USER": "user@example.com",
|
||||
"STRIPE_API_KEY": "aaaabbbbccccddddeeeeffff00001111",
|
||||
"STRIPE_PUB_KEY": "22223333444455556666777788889999",
|
||||
"TEST_DB_URI": "mongodb://localhost/habitrpg_test",
|
||||
"TRANSIFEX_SLACK_CHANNEL": "transifex",
|
||||
"WEB_CONCURRENCY": 1,
|
||||
"SKIP_SSL_CHECK_KEY": "key",
|
||||
"ENABLE_STACKDRIVER_TRACING": "false"
|
||||
"PORT":3000,
|
||||
"ENABLE_CONSOLE_LOGS_IN_PROD":"false",
|
||||
"IP":"0.0.0.0",
|
||||
"WEB_CONCURRENCY":1,
|
||||
"BASE_URL":"http://localhost:3000",
|
||||
"FACEBOOK_KEY":"123456789012345",
|
||||
"FACEBOOK_SECRET":"aaaabbbbccccddddeeeeffff00001111",
|
||||
"GOOGLE_CLIENT_ID":"123456789012345",
|
||||
"GOOGLE_CLIENT_SECRET":"aaaabbbbccccddddeeeeffff00001111",
|
||||
"PLAY_API": {
|
||||
"CLIENT_ID": "aaaabbbbccccddddeeeeffff00001111",
|
||||
"CLIENT_SECRET": "aaaabbbbccccddddeeeeffff00001111",
|
||||
"ACCESS_TOKEN":"aaaabbbbccccddddeeeeffff00001111",
|
||||
"REFRESH_TOKEN":"aaaabbbbccccddddeeeeffff00001111"
|
||||
},
|
||||
"NODE_DB_URI":"mongodb://localhost/habitrpg",
|
||||
"TEST_DB_URI":"mongodb://localhost/habitrpg_test",
|
||||
"NODE_ENV":"development",
|
||||
"ENABLE_CONSOLE_LOGS_IN_TEST": false,
|
||||
"CRON_SAFE_MODE":"false",
|
||||
"CRON_SEMI_SAFE_MODE":"false",
|
||||
"MAINTENANCE_MODE": "false",
|
||||
"SESSION_SECRET":"YOUR SECRET HERE",
|
||||
"ADMIN_EMAIL": "you@example.com",
|
||||
"SMTP_USER":"user@example.com",
|
||||
"SMTP_PASS":"password",
|
||||
"SMTP_SERVICE":"Gmail",
|
||||
"SMTP_HOST":"example.com",
|
||||
"SMTP_PORT": 587,
|
||||
"SMTP_TLS": true,
|
||||
"STRIPE_API_KEY":"aaaabbbbccccddddeeeeffff00001111",
|
||||
"STRIPE_PUB_KEY":"22223333444455556666777788889999",
|
||||
"NEW_RELIC_LICENSE_KEY":"NEW_RELIC_LICENSE_KEY",
|
||||
"NEW_RELIC_NO_CONFIG_FILE":"true",
|
||||
"NEW_RELIC_APPLICATION_ID":"NEW_RELIC_APPLICATION_ID",
|
||||
"NEW_RELIC_API_KEY":"NEW_RELIC_API_KEY",
|
||||
"GA_ID": "GA_ID",
|
||||
"AMPLITUDE_KEY": "AMPLITUDE_KEY",
|
||||
"AMAZON_PAYMENTS": {
|
||||
"SELLER_ID": "SELLER_ID",
|
||||
"CLIENT_ID": "CLIENT_ID",
|
||||
"MWS_KEY": "",
|
||||
"MWS_SECRET": ""
|
||||
},
|
||||
"FLAG_REPORT_EMAIL": "email@mod.com,email2@mod.com",
|
||||
"EMAIL_SERVER": {
|
||||
"url": "http://example.com",
|
||||
"authUser": "user",
|
||||
"authPassword": "password"
|
||||
},
|
||||
"S3":{
|
||||
"bucket":"bucket",
|
||||
"accessKeyId":"accessKeyId",
|
||||
"secretAccessKey":"secretAccessKey"
|
||||
},
|
||||
"SLACK_URL": "https://hooks.slack.com/services/some-url",
|
||||
"TRANSIFEX_SLACK_CHANNEL": "transifex",
|
||||
"PAYPAL":{
|
||||
"billing_plans": {
|
||||
"basic_earned":"basic_earned",
|
||||
"basic_3mo":"basic_3mo",
|
||||
"basic_6mo":"basic_6mo",
|
||||
"google_6mo":"google_6mo",
|
||||
"basic_12mo":"basic_12mo"
|
||||
},
|
||||
"mode":"sandbox",
|
||||
"client_id":"client_id",
|
||||
"client_secret":"client_secret",
|
||||
"experience_profile_id": ""
|
||||
},
|
||||
"IAP_GOOGLE_KEYDIR": "/path/to/google/public/key/dir/",
|
||||
"LOGGLY_TOKEN": "token",
|
||||
"LOGGLY_ACCOUNT": "account",
|
||||
"PUSH_CONFIGS": {
|
||||
"GCM_SERVER_API_KEY": "",
|
||||
"APN_ENABLED": "false",
|
||||
"FCM_SERVER_API_KEY": ""
|
||||
},
|
||||
"SITE_HTTP_AUTH": {
|
||||
"ENABLED": "false",
|
||||
"USERNAME": "admin",
|
||||
"PASSWORD": "password"
|
||||
},
|
||||
"PUSHER": {
|
||||
"ENABLED": "false",
|
||||
"APP_ID": "appId",
|
||||
"KEY": "key",
|
||||
"SECRET": "secret"
|
||||
},
|
||||
"SLACK": {
|
||||
"FLAGGING_URL": "https://hooks.slack.com/services/id/id/id",
|
||||
"FLAGGING_FOOTER_LINK": "https://habitrpg.github.io/flag-o-rama/",
|
||||
"SUBSCRIPTIONS_URL": "https://hooks.slack.com/services/id/id/id"
|
||||
},
|
||||
"ITUNES_SHARED_SECRET": "aaaabbbbccccddddeeeeffff00001111",
|
||||
"EMAILS" : {
|
||||
"COMMUNITY_MANAGER_EMAIL" : "leslie@habitica.com",
|
||||
"TECH_ASSISTANCE_EMAIL" : "admin@habitica.com",
|
||||
"PRESS_ENQUIRY_EMAIL" : "leslie@habitica.com"
|
||||
},
|
||||
"LOGGLY" : {
|
||||
"TOKEN" : "example-token",
|
||||
"SUBDOMAIN" : "exmaple-subdomain"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,100 +0,0 @@
|
||||
import max from 'lodash/max';
|
||||
import mean from 'lodash/mean';
|
||||
import monk from 'monk';
|
||||
import round from 'lodash/round';
|
||||
import sum from 'lodash/sum';
|
||||
|
||||
/*
|
||||
* Output data on subscribers' task histories, formatted for CSV.
|
||||
* User ID,Count of Dailies,Count of Habits,Total History Size,Max History Size,Mean History Size,Median History Size
|
||||
*/
|
||||
const connectionString = 'mongodb://localhost:27017/habitrpg?auto_reconnect=true'; // FOR TEST DATABASE
|
||||
|
||||
let dbUsers = monk(connectionString).get('users', { castIds: false });
|
||||
let dbTasks = monk(connectionString).get('tasks', { castIds: false });
|
||||
|
||||
function usersReport () {
|
||||
let allHistoryLengths = [];
|
||||
|
||||
console.info('User ID,Count of Dailies,Count of Habits,Total History Size,Max History Size,Mean History Size,Median History Size');
|
||||
|
||||
dbUsers.find(
|
||||
{
|
||||
$and:
|
||||
[
|
||||
{'purchased.plan.planId': {$ne:null}},
|
||||
{'purchased.plan.planId': {$ne:''}},
|
||||
],
|
||||
$or:
|
||||
[
|
||||
{'purchased.plan.dateTerminated': null},
|
||||
{'purchased.plan.dateTerminated': ''},
|
||||
{'purchased.plan.dateTerminated': {$gt:new Date()}},
|
||||
],
|
||||
},
|
||||
{
|
||||
fields: {_id: 1},
|
||||
}
|
||||
).each((user, {close, pause, resume}) => {
|
||||
let historyLengths = [];
|
||||
let habitCount = 0;
|
||||
let dailyCount = 0;
|
||||
|
||||
pause();
|
||||
return dbTasks.find(
|
||||
{
|
||||
userId: user._id,
|
||||
$or:
|
||||
[
|
||||
{type: 'habit'},
|
||||
{type: 'daily'},
|
||||
],
|
||||
},
|
||||
{
|
||||
fields: {
|
||||
type: 1,
|
||||
history: 1,
|
||||
},
|
||||
}
|
||||
).each((task) => {
|
||||
if (task.type === 'habit') {
|
||||
habitCount++;
|
||||
}
|
||||
if (task.type === 'daily') {
|
||||
dailyCount++;
|
||||
}
|
||||
if (task.history.length > 0) {
|
||||
allHistoryLengths.push(task.history.length);
|
||||
historyLengths.push(task.history.length);
|
||||
}
|
||||
}).then(() => {
|
||||
const totalHistory = sum(historyLengths);
|
||||
const maxHistory = historyLengths.length > 0 ? max(historyLengths) : 0;
|
||||
const meanHistory = historyLengths.length > 0 ? round(mean(historyLengths)) : 0;
|
||||
const medianHistory = historyLengths.length > 0 ? median(historyLengths) : 0;
|
||||
console.info(`${user._id},${dailyCount},${habitCount},${totalHistory},${maxHistory},${meanHistory},${medianHistory}`);
|
||||
resume();
|
||||
});
|
||||
}).then(() => {
|
||||
console.info(`Total Subscriber History Entries: ${sum(allHistoryLengths)}`);
|
||||
console.info(`Largest History Size: ${max(allHistoryLengths)}`);
|
||||
console.info(`Mean History Size: ${round(mean(allHistoryLengths))}`);
|
||||
console.info(`Median History Size: ${median(allHistoryLengths)}`);
|
||||
return process.exit(0);
|
||||
});
|
||||
}
|
||||
|
||||
function median(values) { // https://gist.github.com/caseyjustus/1166258
|
||||
values.sort( function(a,b) {return a - b;} );
|
||||
|
||||
var half = Math.floor(values.length/2);
|
||||
|
||||
if (values.length % 2) {
|
||||
return values[half];
|
||||
}
|
||||
else {
|
||||
return (values[half-1] + values[half]) / 2.0;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = usersReport;
|
||||
@@ -1,48 +0,0 @@
|
||||
import monk from 'monk';
|
||||
import nconf from 'nconf';
|
||||
|
||||
/*
|
||||
* Output data on users who completed all the To-Do tasks in the 2018 Back-to-School Challenge.
|
||||
* User ID,Profile Name
|
||||
*/
|
||||
const CONNECTION_STRING = nconf.get('MIGRATION_CONNECT_STRING');
|
||||
const CHALLENGE_ID = '0acb1d56-1660-41a4-af80-9259f080b62b';
|
||||
|
||||
let dbUsers = monk(CONNECTION_STRING).get('users', { castIds: false });
|
||||
let dbTasks = monk(CONNECTION_STRING).get('tasks', { castIds: false });
|
||||
|
||||
function usersReport() {
|
||||
console.info('User ID,Profile Name');
|
||||
let userCount = 0;
|
||||
|
||||
dbUsers.find(
|
||||
{challenges: CHALLENGE_ID},
|
||||
{fields:
|
||||
{_id: 1, 'profile.name': 1}
|
||||
},
|
||||
).each((user, {close, pause, resume}) => {
|
||||
pause();
|
||||
userCount++;
|
||||
let completedTodos = 0;
|
||||
return dbTasks.find(
|
||||
{
|
||||
userId: user._id,
|
||||
'challenge.id': CHALLENGE_ID,
|
||||
type: 'todo',
|
||||
},
|
||||
{fields: {completed: 1}}
|
||||
).each((task) => {
|
||||
if (task.completed) completedTodos++;
|
||||
}).then(() => {
|
||||
if (completedTodos >= 7) {
|
||||
console.info(`${user._id},${user.profile.name}`);
|
||||
}
|
||||
resume();
|
||||
});
|
||||
}).then(() => {
|
||||
console.info(`${userCount} users reviewed`);
|
||||
return process.exit(0);
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = usersReport;
|
||||
@@ -1,14 +1,3 @@
|
||||
version: "3"
|
||||
services:
|
||||
|
||||
client:
|
||||
environment:
|
||||
- NODE_ENV=development
|
||||
volumes:
|
||||
- '.:/usr/src/habitrpg'
|
||||
|
||||
server:
|
||||
environment:
|
||||
- NODE_ENV=development
|
||||
volumes:
|
||||
- '.:/usr/src/habitrpg'
|
||||
web:
|
||||
volumes:
|
||||
- '.:/usr/src/habitrpg'
|
||||
|
||||
@@ -1,36 +1,13 @@
|
||||
version: "3"
|
||||
services:
|
||||
web:
|
||||
build: .
|
||||
ports:
|
||||
- "3000:3000"
|
||||
links:
|
||||
- mongo
|
||||
environment:
|
||||
- NODE_DB_URI=mongodb://mongo/habitrpg
|
||||
|
||||
client:
|
||||
build: .
|
||||
networks:
|
||||
- habitica
|
||||
environment:
|
||||
- BASE_URL=http://server:3000
|
||||
ports:
|
||||
- "8080:8080"
|
||||
command: ["npm", "run", "client:dev"]
|
||||
depends_on:
|
||||
- server
|
||||
|
||||
server:
|
||||
build: .
|
||||
ports:
|
||||
- "3000:3000"
|
||||
networks:
|
||||
- habitica
|
||||
environment:
|
||||
- NODE_DB_URI=mongodb://mongo/habitrpg
|
||||
depends_on:
|
||||
- mongo
|
||||
|
||||
mongo:
|
||||
image: mongo:3.4
|
||||
ports:
|
||||
- "27017:27017"
|
||||
networks:
|
||||
- habitica
|
||||
|
||||
networks:
|
||||
habitica:
|
||||
driver: bridge
|
||||
mongo:
|
||||
image: mongo
|
||||
ports:
|
||||
- "27017:27017"
|
||||
|
||||
10
gulp/.eslintrc
Normal file
10
gulp/.eslintrc
Normal file
@@ -0,0 +1,10 @@
|
||||
{
|
||||
"root": true,
|
||||
"env": {
|
||||
"node": true,
|
||||
},
|
||||
"extends": [
|
||||
"habitrpg/server",
|
||||
"habitrpg/babel"
|
||||
],
|
||||
}
|
||||
@@ -2,13 +2,13 @@ import gulp from 'gulp';
|
||||
import clean from 'rimraf';
|
||||
import apidoc from 'apidoc';
|
||||
|
||||
const APIDOC_DEST_PATH = './apidoc_build';
|
||||
const APIDOC_DEST_PATH = './website/build/apidoc';
|
||||
const APIDOC_SRC_PATH = './website/server';
|
||||
gulp.task('apidoc:clean', (done) => {
|
||||
clean(APIDOC_DEST_PATH, done);
|
||||
});
|
||||
|
||||
gulp.task('apidoc', gulp.series('apidoc:clean', (done) => {
|
||||
gulp.task('apidoc', ['apidoc:clean'], (done) => {
|
||||
let result = apidoc.createDoc({
|
||||
src: APIDOC_SRC_PATH,
|
||||
dest: APIDOC_DEST_PATH,
|
||||
@@ -19,8 +19,8 @@ gulp.task('apidoc', gulp.series('apidoc:clean', (done) => {
|
||||
} else {
|
||||
done();
|
||||
}
|
||||
}));
|
||||
});
|
||||
|
||||
gulp.task('apidoc:watch', gulp.series('apidoc', (done) => {
|
||||
return gulp.watch(`${APIDOC_SRC_PATH}/**/*.js`, gulp.series('apidoc', done));
|
||||
}));
|
||||
gulp.task('apidoc:watch', ['apidoc'], () => {
|
||||
return gulp.watch(APIDOC_SRC_PATH + '/**/*.js', ['apidoc']);
|
||||
});
|
||||
|
||||
31
gulp/gulp-babelify.js
Normal file
31
gulp/gulp-babelify.js
Normal file
@@ -0,0 +1,31 @@
|
||||
import gulp from 'gulp';
|
||||
import browserify from 'browserify';
|
||||
import source from 'vinyl-source-stream';
|
||||
import buffer from 'vinyl-buffer';
|
||||
import uglify from 'gulp-uglify';
|
||||
import sourcemaps from 'gulp-sourcemaps';
|
||||
import babel from 'babelify';
|
||||
|
||||
gulp.task('browserify', function () {
|
||||
let bundler = browserify({
|
||||
entries: './website/common/browserify.js',
|
||||
debug: true,
|
||||
transform: [[babel, { compact: false }]],
|
||||
});
|
||||
|
||||
return bundler.bundle()
|
||||
.pipe(source('habitrpg-shared.js'))
|
||||
.pipe(buffer())
|
||||
.pipe(sourcemaps.init({loadMaps: true}))
|
||||
.pipe(uglify())
|
||||
.on('error', function (err) {
|
||||
console.error(err);
|
||||
this.emit('end');
|
||||
})
|
||||
.pipe(sourcemaps.write('./'))
|
||||
.pipe(gulp.dest('./website/client-old/js/'));
|
||||
});
|
||||
|
||||
gulp.task('browserify:watch', () => {
|
||||
gulp.watch('./website/common/script/**/*.js', ['browserify']);
|
||||
});
|
||||
36
gulp/gulp-bootstrap.js
Normal file
36
gulp/gulp-bootstrap.js
Normal file
@@ -0,0 +1,36 @@
|
||||
import gulp from 'gulp';
|
||||
import fs from 'fs';
|
||||
|
||||
// Copy Bootstrap 4 config variables from /website /node_modules so we can check
|
||||
// them into Git
|
||||
|
||||
const BOOSTRAP_NEW_CONFIG_PATH = 'website/client/assets/scss/bootstrap_config.scss';
|
||||
const BOOTSTRAP_ORIGINAL_CONFIG_PATH = 'node_modules/bootstrap/scss/_custom.scss';
|
||||
|
||||
// https://stackoverflow.com/a/14387791/969528
|
||||
function copyFile(source, target, cb) {
|
||||
let cbCalled = false;
|
||||
|
||||
function done(err) {
|
||||
if (!cbCalled) {
|
||||
cb(err);
|
||||
cbCalled = true;
|
||||
}
|
||||
}
|
||||
|
||||
let rd = fs.createReadStream(source);
|
||||
rd.on('error', done);
|
||||
let wr = fs.createWriteStream(target);
|
||||
wr.on('error', done);
|
||||
wr.on('close', () => done());
|
||||
rd.pipe(wr);
|
||||
}
|
||||
|
||||
gulp.task('bootstrap', (done) => {
|
||||
// use new config
|
||||
copyFile(
|
||||
BOOSTRAP_NEW_CONFIG_PATH,
|
||||
BOOTSTRAP_ORIGINAL_CONFIG_PATH,
|
||||
done,
|
||||
);
|
||||
});
|
||||
@@ -1,6 +1,16 @@
|
||||
import gulp from 'gulp';
|
||||
import runSequence from 'run-sequence';
|
||||
import babel from 'gulp-babel';
|
||||
import webpackProductionBuild from '../webpack/build';
|
||||
require('gulp-grunt')(gulp);
|
||||
|
||||
gulp.task('build', () => {
|
||||
if (process.env.NODE_ENV === 'production') {
|
||||
gulp.start('build:prod');
|
||||
} else {
|
||||
gulp.start('build:dev');
|
||||
}
|
||||
});
|
||||
|
||||
gulp.task('build:src', () => {
|
||||
return gulp.src('website/server/**/*.js')
|
||||
@@ -14,30 +24,33 @@ gulp.task('build:common', () => {
|
||||
.pipe(gulp.dest('website/common/transpiled-babel/'));
|
||||
});
|
||||
|
||||
gulp.task('build:server', gulp.series('build:src', 'build:common', done => done()));
|
||||
gulp.task('build:server', ['build:src', 'build:common']);
|
||||
|
||||
// Client Production Build
|
||||
gulp.task('build:client', (done) => {
|
||||
gulp.task('build:client', ['bootstrap'], (done) => {
|
||||
webpackProductionBuild((err, output) => {
|
||||
if (err) return done(err);
|
||||
console.log(output); // eslint-disable-line no-console
|
||||
done();
|
||||
console.log(output);
|
||||
});
|
||||
});
|
||||
|
||||
gulp.task('build:prod', gulp.series(
|
||||
'build:server',
|
||||
gulp.task('build:dev', ['browserify', 'prepare:staticNewStuff'], (done) => {
|
||||
gulp.start('grunt-build:dev', done);
|
||||
});
|
||||
|
||||
gulp.task('build:dev:watch', ['build:dev'], () => {
|
||||
gulp.watch(['website/client-old/**/*.styl', 'website/common/script/*']);
|
||||
});
|
||||
|
||||
gulp.task('build:prod', [
|
||||
'browserify',
|
||||
'build:server',
|
||||
'prepare:staticNewStuff',
|
||||
'build:client',
|
||||
'apidoc',
|
||||
done => done()
|
||||
));
|
||||
|
||||
let buildArgs = [];
|
||||
|
||||
if (process.env.NODE_ENV === 'production') { // eslint-disable-line no-process-env
|
||||
buildArgs.push('build:prod');
|
||||
}
|
||||
|
||||
gulp.task('build', gulp.series(buildArgs, (done) => {
|
||||
done();
|
||||
}));
|
||||
], (done) => {
|
||||
runSequence(
|
||||
'grunt-build:prod',
|
||||
'apidoc',
|
||||
done
|
||||
);
|
||||
});
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import mongoose from 'mongoose';
|
||||
import autoinc from 'mongoose-id-autoinc';
|
||||
import logger from '../website/server/libs/logger';
|
||||
import nconf from 'nconf';
|
||||
import repl from 'repl';
|
||||
@@ -6,11 +7,10 @@ import gulp from 'gulp';
|
||||
|
||||
// Add additional properties to the repl's context
|
||||
let improveRepl = (context) => {
|
||||
|
||||
// Let "exit" and "quit" terminate the console
|
||||
['exit', 'quit'].forEach((term) => {
|
||||
Object.defineProperty(context, term, { get () {
|
||||
process.exit();
|
||||
}});
|
||||
Object.defineProperty(context, term, { get () { process.exit(); }});
|
||||
});
|
||||
|
||||
// "clear" clears the screen
|
||||
@@ -18,28 +18,30 @@ let improveRepl = (context) => {
|
||||
process.stdout.write('\u001B[2J\u001B[0;0f');
|
||||
}});
|
||||
|
||||
context.Challenge = require('../website/server/models/challenge').model; // eslint-disable-line global-require
|
||||
context.Group = require('../website/server/models/group').model; // eslint-disable-line global-require
|
||||
context.User = require('../website/server/models/user').model; // eslint-disable-line global-require
|
||||
context.Challenge = require('../website/server/models/challenge').model;
|
||||
context.Group = require('../website/server/models/group').model;
|
||||
context.User = require('../website/server/models/user').model;
|
||||
|
||||
const isProd = nconf.get('NODE_ENV') === 'production';
|
||||
const mongooseOptions = !isProd ? {} : {
|
||||
keepAlive: 1,
|
||||
connectTimeoutMS: 30000,
|
||||
var isProd = nconf.get('NODE_ENV') === 'production';
|
||||
var mongooseOptions = !isProd ? {} : {
|
||||
replset: { socketOptions: { keepAlive: 1, connectTimeoutMS: 30000 } },
|
||||
server: { socketOptions: { keepAlive: 1, connectTimeoutMS: 30000 } },
|
||||
};
|
||||
mongoose.connect(
|
||||
nconf.get('NODE_DB_URI'),
|
||||
mongooseOptions,
|
||||
(err) => {
|
||||
if (err) throw err;
|
||||
logger.info('Connected with Mongoose');
|
||||
}
|
||||
autoinc.init(
|
||||
mongoose.connect(
|
||||
nconf.get('NODE_DB_URI'),
|
||||
mongooseOptions,
|
||||
function (err) {
|
||||
if (err) throw err;
|
||||
logger.info('Connected with Mongoose');
|
||||
}
|
||||
)
|
||||
);
|
||||
|
||||
};
|
||||
|
||||
gulp.task('console', (done) => {
|
||||
gulp.task('console', (cb) => {
|
||||
improveRepl(repl.start({
|
||||
prompt: 'Habitica > ',
|
||||
}).context);
|
||||
done();
|
||||
});
|
||||
|
||||
10
gulp/gulp-newstuff.js
Normal file
10
gulp/gulp-newstuff.js
Normal file
@@ -0,0 +1,10 @@
|
||||
import gulp from 'gulp';
|
||||
import jade from 'jade';
|
||||
import {writeFileSync} from 'fs';
|
||||
|
||||
gulp.task('prepare:staticNewStuff', () => {
|
||||
writeFileSync(
|
||||
'./website/client-old/new-stuff.html',
|
||||
jade.compileFile('./website/views/shared/new-stuff.jade')()
|
||||
);
|
||||
});
|
||||
@@ -7,43 +7,85 @@ import mergeStream from 'merge-stream';
|
||||
import {basename} from 'path';
|
||||
import {sync} from 'glob';
|
||||
import {each} from 'lodash';
|
||||
import vinylBuffer from 'vinyl-buffer';
|
||||
|
||||
// https://github.com/Ensighten/grunt-spritesmith/issues/67#issuecomment-34786248
|
||||
const MAX_SPRITESHEET_SIZE = 1024 * 1024 * 3;
|
||||
const DIST_PATH = 'website/assets/sprites/dist/';
|
||||
|
||||
const IMG_DIST_PATH = 'website/client/assets/images/sprites/';
|
||||
const CSS_DIST_PATH = 'website/client/assets/css/sprites/';
|
||||
const IMG_DIST_PATH_NEW_CLIENT = 'website/static/sprites/';
|
||||
const CSS_DIST_PATH_NEW_CLIENT = 'website/client/assets/css/sprites/';
|
||||
|
||||
function checkForSpecialTreatment (name) {
|
||||
let regex = /^hair|skin|beard|mustach|shirt|flower|^headAccessory_special_\w+Ears|^eyewear_special_\w+TopFrame/;
|
||||
return name.match(regex) || name === 'head_0';
|
||||
}
|
||||
gulp.task('sprites:compile', ['sprites:clean', 'sprites:main', 'sprites:largeSprites', 'sprites:checkCompiledDimensions']);
|
||||
|
||||
function calculateImgDimensions (img, addPadding) {
|
||||
let dims = sizeOf(img);
|
||||
gulp.task('sprites:main', () => {
|
||||
let mainSrc = sync('website/assets/sprites/spritesmith/**/*.png');
|
||||
return createSpritesStream('main', mainSrc);
|
||||
});
|
||||
|
||||
let requiresSpecialTreatment = checkForSpecialTreatment(img);
|
||||
if (requiresSpecialTreatment) {
|
||||
let newWidth = dims.width < 90 ? 90 : dims.width;
|
||||
let newHeight = dims.height < 90 ? 90 : dims.height;
|
||||
dims = {
|
||||
width: newWidth,
|
||||
height: newHeight,
|
||||
};
|
||||
gulp.task('sprites:largeSprites', () => {
|
||||
let largeSrc = sync('website/assets/sprites/spritesmith_large/**/*.png');
|
||||
return createSpritesStream('largeSprites', largeSrc);
|
||||
});
|
||||
|
||||
gulp.task('sprites:clean', (done) => {
|
||||
clean(`{${DIST_PATH}spritesmith*,${IMG_DIST_PATH_NEW_CLIENT}spritesmith*,${CSS_DIST_PATH_NEW_CLIENT}spritesmith*}`, done);
|
||||
});
|
||||
|
||||
gulp.task('sprites:checkCompiledDimensions', ['sprites:main', 'sprites:largeSprites'], () => {
|
||||
console.log('Verifiying that images do not exceed max dimensions');
|
||||
|
||||
let numberOfSheetsThatAreTooBig = 0;
|
||||
|
||||
let distSpritesheets = sync(`${DIST_PATH}*.png`);
|
||||
|
||||
each(distSpritesheets, (img, index) => {
|
||||
let spriteSize = calculateImgDimensions(img);
|
||||
|
||||
if (spriteSize > MAX_SPRITESHEET_SIZE) {
|
||||
numberOfSheetsThatAreTooBig++;
|
||||
let name = basename(img, '.png');
|
||||
console.error(`WARNING: ${name} might be too big - ${spriteSize} > ${MAX_SPRITESHEET_SIZE}`);
|
||||
}
|
||||
});
|
||||
|
||||
if (numberOfSheetsThatAreTooBig > 0) {
|
||||
console.error(`${numberOfSheetsThatAreTooBig} sheets might too big for mobile Safari to be able to handle them, but there is a margin of error in these calculations so it is probably okay. Mention this to an admin so they can test a staging site on mobile Safari after your PR is merged.`); // https://github.com/HabitRPG/habitica/pull/6683#issuecomment-185462180
|
||||
} else {
|
||||
console.log('All images are within the correct dimensions');
|
||||
}
|
||||
});
|
||||
|
||||
let padding = 0;
|
||||
function createSpritesStream (name, src) {
|
||||
let spritesheetSliceIndicies = calculateSpritesheetsSrcIndicies(src);
|
||||
let stream = mergeStream();
|
||||
|
||||
if (addPadding) {
|
||||
padding = dims.width * 8 + dims.height * 8;
|
||||
}
|
||||
each(spritesheetSliceIndicies, (start, index) => {
|
||||
let slicedSrc = src.slice(start, spritesheetSliceIndicies[index + 1]);
|
||||
|
||||
if (!dims.width || !dims.height) console.error('MISSING DIMENSIONS:', dims); // eslint-disable-line no-console
|
||||
let spriteData = gulp.src(slicedSrc)
|
||||
.pipe(spritesmith({
|
||||
imgName: `spritesmith-${name}-${index}.png`,
|
||||
cssName: `spritesmith-${name}-${index}.css`,
|
||||
algorithm: 'binary-tree',
|
||||
padding: 1,
|
||||
cssTemplate: 'website/assets/sprites/css/css.template.handlebars',
|
||||
cssVarMap: cssVarMap,
|
||||
}));
|
||||
|
||||
let totalPixelSize = dims.width * dims.height + padding;
|
||||
let imgStream = spriteData.img
|
||||
.pipe(imagemin())
|
||||
.pipe(gulp.dest(IMG_DIST_PATH_NEW_CLIENT))
|
||||
.pipe(gulp.dest(DIST_PATH));
|
||||
|
||||
return totalPixelSize;
|
||||
let cssStream = spriteData.css
|
||||
.pipe(gulp.dest(CSS_DIST_PATH_NEW_CLIENT))
|
||||
.pipe(gulp.dest(DIST_PATH));
|
||||
|
||||
stream.add(imgStream);
|
||||
stream.add(cssStream);
|
||||
});
|
||||
|
||||
return stream;
|
||||
}
|
||||
|
||||
function calculateSpritesheetsSrcIndicies (src) {
|
||||
@@ -63,6 +105,37 @@ function calculateSpritesheetsSrcIndicies (src) {
|
||||
return slices;
|
||||
}
|
||||
|
||||
function calculateImgDimensions (img, addPadding) {
|
||||
let dims = sizeOf(img);
|
||||
|
||||
let requiresSpecialTreatment = checkForSpecialTreatment(img);
|
||||
if (requiresSpecialTreatment) {
|
||||
let newWidth = dims.width < 90 ? 90 : dims.width;
|
||||
let newHeight = dims.height < 90 ? 90 : dims.height;
|
||||
dims = {
|
||||
width: newWidth,
|
||||
height: newHeight,
|
||||
};
|
||||
}
|
||||
|
||||
let padding = 0;
|
||||
|
||||
if (addPadding) {
|
||||
padding = (dims.width * 8) + (dims.height * 8);
|
||||
}
|
||||
|
||||
if (!dims.width || !dims.height) console.error('MISSING DIMENSIONS:', dims);
|
||||
|
||||
let totalPixelSize = (dims.width * dims.height) + padding;
|
||||
|
||||
return totalPixelSize;
|
||||
}
|
||||
|
||||
function checkForSpecialTreatment (name) {
|
||||
let regex = /^hair|skin|beard|mustach|shirt|flower|^headAccessory_special_\w+Ears|^eyewear_special_\w+TopFrame/;
|
||||
return name.match(regex) || name === 'head_0';
|
||||
}
|
||||
|
||||
function cssVarMap (sprite) {
|
||||
// For hair, skins, beards, etc. we want to output a '.customize-options.WHATEVER' class, which works as a
|
||||
// 60x60 image pointing at the proper part of the 90x90 sprite.
|
||||
@@ -71,95 +144,18 @@ function cssVarMap (sprite) {
|
||||
if (requiresSpecialTreatment) {
|
||||
sprite.custom = {
|
||||
px: {
|
||||
offsetX: `-${ sprite.x + 25 }px`,
|
||||
offsetY: `-${ sprite.y + 15 }px`,
|
||||
offset_x: `-${ sprite.x + 25 }px`,
|
||||
offset_y: `-${ sprite.y + 15 }px`,
|
||||
width: '60px',
|
||||
height: '60px',
|
||||
},
|
||||
};
|
||||
}
|
||||
if (sprite.name.indexOf('shirt') !== -1)
|
||||
sprite.custom.px.offsetY = `-${ sprite.y + 35 }px`; // even more for shirts
|
||||
if (sprite.name.indexOf('hair_base') !== -1) {
|
||||
let styleArray = sprite.name.split('_').slice(2, 3);
|
||||
if (~sprite.name.indexOf('shirt'))
|
||||
sprite.custom.px.offset_y = `-${ sprite.y + 30 }px`; // even more for shirts
|
||||
if (~sprite.name.indexOf('hair_base')) {
|
||||
let styleArray = sprite.name.split('_').slice(2,3);
|
||||
if (Number(styleArray[0]) > 14)
|
||||
sprite.custom.px.offsetY = `-${ sprite.y }px`; // don't crop updos
|
||||
sprite.custom.px.offset_y = `-${ sprite.y }px`; // don't crop updos
|
||||
}
|
||||
}
|
||||
|
||||
function createSpritesStream (name, src) {
|
||||
let spritesheetSliceIndicies = calculateSpritesheetsSrcIndicies(src);
|
||||
let stream = mergeStream();
|
||||
|
||||
each(spritesheetSliceIndicies, (start, index) => {
|
||||
let slicedSrc = src.slice(start, spritesheetSliceIndicies[index + 1]);
|
||||
|
||||
let spriteData = gulp.src(slicedSrc)
|
||||
.pipe(spritesmith({
|
||||
imgName: `spritesmith-${name}-${index}.png`,
|
||||
cssName: `spritesmith-${name}-${index}.css`,
|
||||
algorithm: 'binary-tree',
|
||||
padding: 1,
|
||||
cssTemplate: 'website/raw_sprites/css/css.template.handlebars',
|
||||
cssVarMap,
|
||||
}));
|
||||
|
||||
let imgStream = spriteData.img
|
||||
.pipe(vinylBuffer())
|
||||
.pipe(imagemin())
|
||||
.pipe(gulp.dest(IMG_DIST_PATH));
|
||||
|
||||
let cssStream = spriteData.css
|
||||
.pipe(gulp.dest(CSS_DIST_PATH));
|
||||
|
||||
stream.add(imgStream);
|
||||
stream.add(cssStream);
|
||||
});
|
||||
|
||||
return stream;
|
||||
}
|
||||
|
||||
gulp.task('sprites:main', () => {
|
||||
let mainSrc = sync('website/raw_sprites/spritesmith/**/*.png');
|
||||
return createSpritesStream('main', mainSrc);
|
||||
});
|
||||
|
||||
gulp.task('sprites:largeSprites', () => {
|
||||
let largeSrc = sync('website/raw_sprites/spritesmith_large/**/*.png');
|
||||
return createSpritesStream('largeSprites', largeSrc);
|
||||
});
|
||||
|
||||
gulp.task('sprites:clean', (done) => {
|
||||
clean(`${IMG_DIST_PATH}spritesmith*,${CSS_DIST_PATH}spritesmith*}`, done);
|
||||
});
|
||||
|
||||
gulp.task('sprites:checkCompiledDimensions', gulp.series('sprites:main', 'sprites:largeSprites', (done) => {
|
||||
console.log('Verifiying that images do not exceed max dimensions'); // eslint-disable-line no-console
|
||||
|
||||
let numberOfSheetsThatAreTooBig = 0;
|
||||
|
||||
let distSpritesheets = sync(`${IMG_DIST_PATH}*.png`);
|
||||
|
||||
each(distSpritesheets, (img) => {
|
||||
let spriteSize = calculateImgDimensions(img);
|
||||
|
||||
if (spriteSize > MAX_SPRITESHEET_SIZE) {
|
||||
numberOfSheetsThatAreTooBig++;
|
||||
let name = basename(img, '.png');
|
||||
console.error(`WARNING: ${name} might be too big - ${spriteSize} > ${MAX_SPRITESHEET_SIZE}`); // eslint-disable-line no-console
|
||||
}
|
||||
});
|
||||
|
||||
if (numberOfSheetsThatAreTooBig > 0) {
|
||||
// https://github.com/HabitRPG/habitica/pull/6683#issuecomment-185462180
|
||||
console.error( // eslint-disable-line no-console
|
||||
`${numberOfSheetsThatAreTooBig} sheets might too big for mobile Safari to be able to handle
|
||||
them, but there is a margin of error in these calculations so it is probably okay. Mention
|
||||
this to an admin so they can test a staging site on mobile Safari after your PR is merged.`);
|
||||
} else {
|
||||
console.log('All images are within the correct dimensions'); // eslint-disable-line no-console
|
||||
}
|
||||
done();
|
||||
}));
|
||||
|
||||
gulp.task('sprites:compile', gulp.series('sprites:clean', 'sprites:main', 'sprites:largeSprites', 'sprites:checkCompiledDimensions', done => done()));
|
||||
|
||||
@@ -3,7 +3,9 @@ import nodemon from 'gulp-nodemon';
|
||||
|
||||
let pkg = require('../package.json');
|
||||
|
||||
gulp.task('nodemon', (done) => {
|
||||
gulp.task('run:dev', ['nodemon', 'build:dev:watch']);
|
||||
|
||||
gulp.task('nodemon', () => {
|
||||
nodemon({
|
||||
script: pkg.main,
|
||||
ignore: [
|
||||
@@ -12,5 +14,4 @@ gulp.task('nodemon', (done) => {
|
||||
'common/dist/script/content/*',
|
||||
],
|
||||
});
|
||||
done();
|
||||
});
|
||||
|
||||
@@ -1,11 +1,21 @@
|
||||
import {
|
||||
pipe,
|
||||
awaitPort,
|
||||
kill,
|
||||
runMochaTests,
|
||||
} from './taskHelper';
|
||||
import { server as karma } from 'karma';
|
||||
import mongoose from 'mongoose';
|
||||
import { exec } from 'child_process';
|
||||
import psTree from 'ps-tree';
|
||||
import gulp from 'gulp';
|
||||
import Bluebird from 'bluebird';
|
||||
import runSequence from 'run-sequence';
|
||||
import os from 'os';
|
||||
import nconf from 'nconf';
|
||||
import fs from 'fs';
|
||||
|
||||
const i18n = require('../website/server/libs/i18n');
|
||||
|
||||
// TODO rewrite
|
||||
|
||||
@@ -14,23 +24,25 @@ let server;
|
||||
|
||||
const TEST_DB_URI = nconf.get('TEST_DB_URI');
|
||||
|
||||
const API_V3_TEST_COMMAND = 'npm run test:api-v3';
|
||||
const SANITY_TEST_COMMAND = 'npm run test:sanity';
|
||||
const COMMON_TEST_COMMAND = 'npm run test:common';
|
||||
const CONTENT_TEST_COMMAND = 'npm run test:content';
|
||||
const CONTENT_OPTIONS = {maxBuffer: 1024 * 500};
|
||||
const KARMA_TEST_COMMAND = 'npm run test:karma';
|
||||
|
||||
/* Helper methods for reporting test summary */
|
||||
let testResults = [];
|
||||
let testCount = (stdout, regexp) => {
|
||||
let match = stdout.match(regexp);
|
||||
return parseInt(match && match[1] || 0, 10);
|
||||
return parseInt(match && match[1] || 0);
|
||||
};
|
||||
|
||||
let testBin = (string, additionalEnvVariables = '') => {
|
||||
if (os.platform() === 'win32') {
|
||||
if (additionalEnvVariables !== '') {
|
||||
if (additionalEnvVariables != '') {
|
||||
additionalEnvVariables = additionalEnvVariables.split(' ').join('&&set ');
|
||||
additionalEnvVariables = `set ${additionalEnvVariables}&&`;
|
||||
additionalEnvVariables = 'set ' + additionalEnvVariables + '&&';
|
||||
}
|
||||
return `set NODE_ENV=test&&${additionalEnvVariables}${string}`;
|
||||
} else {
|
||||
@@ -38,48 +50,56 @@ let testBin = (string, additionalEnvVariables = '') => {
|
||||
}
|
||||
};
|
||||
|
||||
gulp.task('test:nodemon', gulp.series(function setupNodemon (done) {
|
||||
process.env.PORT = TEST_SERVER_PORT; // eslint-disable-line no-process-env
|
||||
process.env.NODE_DB_URI = TEST_DB_URI; // eslint-disable-line no-process-env
|
||||
done();
|
||||
}, 'nodemon'));
|
||||
gulp.task('test:nodemon', (done) => {
|
||||
process.env.PORT = TEST_SERVER_PORT;
|
||||
process.env.NODE_DB_URI = TEST_DB_URI;
|
||||
|
||||
runSequence('nodemon');
|
||||
});
|
||||
|
||||
gulp.task('test:prepare:mongo', (cb) => {
|
||||
mongoose.connect(TEST_DB_URI, (err) => {
|
||||
if (err) return cb(`Unable to connect to mongo database. Are you sure it's running? \n\n${err}`);
|
||||
mongoose.connection.dropDatabase((err2) => {
|
||||
if (err2) return cb(err2);
|
||||
mongoose.connection.close(cb);
|
||||
});
|
||||
mongoose.connection.db.dropDatabase();
|
||||
mongoose.connection.close();
|
||||
cb();
|
||||
});
|
||||
});
|
||||
|
||||
gulp.task('test:prepare:server', gulp.series('test:prepare:mongo', (done) => {
|
||||
gulp.task('test:prepare:server', ['test:prepare:mongo'], () => {
|
||||
if (!server) {
|
||||
server = exec(testBin('node ./website/server/index.js', `NODE_DB_URI=${TEST_DB_URI} PORT=${TEST_SERVER_PORT}`), (error, stdout, stderr) => {
|
||||
if (error) {
|
||||
throw new Error(`Problem with the server: ${error}`);
|
||||
}
|
||||
if (stderr) {
|
||||
console.error(stderr); // eslint-disable-line no-console
|
||||
}
|
||||
done();
|
||||
if (error) { throw `Problem with the server: ${error}`; }
|
||||
if (stderr) { console.error(stderr); }
|
||||
});
|
||||
}
|
||||
}));
|
||||
});
|
||||
|
||||
gulp.task('test:prepare:build', gulp.series('build', done => done()));
|
||||
gulp.task('test:prepare:translations', (cb) => {
|
||||
fs.writeFile(
|
||||
'test/client-old/spec/mocks/translations.js',
|
||||
`if(!window.env) window.env = {};
|
||||
window.env.translations = ${JSON.stringify(i18n.translations['en'])};`, cb);
|
||||
|
||||
gulp.task('test:prepare', gulp.series(
|
||||
});
|
||||
|
||||
gulp.task('test:prepare:build', ['build', 'test:prepare:translations']);
|
||||
// exec(testBin('grunt build:test'), cb);
|
||||
|
||||
gulp.task('test:prepare:webdriver', (cb) => {
|
||||
exec('npm run test:prepare:webdriver', cb);
|
||||
});
|
||||
|
||||
gulp.task('test:prepare', [
|
||||
'test:prepare:build',
|
||||
'test:prepare:mongo',
|
||||
done => done()
|
||||
));
|
||||
'test:prepare:webdriver',
|
||||
]);
|
||||
|
||||
gulp.task('test:sanity', (cb) => {
|
||||
let runner = exec(
|
||||
testBin(SANITY_TEST_COMMAND),
|
||||
(err) => {
|
||||
(err, stdout, stderr) => {
|
||||
if (err) {
|
||||
process.exit(1);
|
||||
}
|
||||
@@ -89,10 +109,10 @@ gulp.task('test:sanity', (cb) => {
|
||||
pipe(runner);
|
||||
});
|
||||
|
||||
gulp.task('test:common', gulp.series('test:prepare:build', (cb) => {
|
||||
gulp.task('test:common', ['test:prepare:build'], (cb) => {
|
||||
let runner = exec(
|
||||
testBin(COMMON_TEST_COMMAND),
|
||||
(err) => {
|
||||
(err, stdout, stderr) => {
|
||||
if (err) {
|
||||
process.exit(1);
|
||||
}
|
||||
@@ -100,20 +120,20 @@ gulp.task('test:common', gulp.series('test:prepare:build', (cb) => {
|
||||
}
|
||||
);
|
||||
pipe(runner);
|
||||
}));
|
||||
});
|
||||
|
||||
gulp.task('test:common:clean', (cb) => {
|
||||
pipe(exec(testBin(COMMON_TEST_COMMAND), () => cb()));
|
||||
});
|
||||
|
||||
gulp.task('test:common:watch', gulp.series('test:common:clean', () => {
|
||||
return gulp.watch(['common/script/**/*', 'test/common/**/*'], gulp.series('test:common:clean', done => done()));
|
||||
}));
|
||||
gulp.task('test:common:watch', ['test:common:clean'], () => {
|
||||
gulp.watch(['common/script/**/*', 'test/common/**/*'], ['test:common:clean']);
|
||||
});
|
||||
|
||||
gulp.task('test:common:safe', gulp.series('test:prepare:build', (cb) => {
|
||||
gulp.task('test:common:safe', ['test:prepare:build'], (cb) => {
|
||||
let runner = exec(
|
||||
testBin(COMMON_TEST_COMMAND),
|
||||
(err, stdout) => { // eslint-disable-line handle-callback-err
|
||||
(err, stdout, stderr) => {
|
||||
testResults.push({
|
||||
suite: 'Common Specs\t',
|
||||
pass: testCount(stdout, /(\d+) passing/),
|
||||
@@ -124,13 +144,13 @@ gulp.task('test:common:safe', gulp.series('test:prepare:build', (cb) => {
|
||||
}
|
||||
);
|
||||
pipe(runner);
|
||||
}));
|
||||
});
|
||||
|
||||
gulp.task('test:content', gulp.series('test:prepare:build', (cb) => {
|
||||
gulp.task('test:content', ['test:prepare:build'], (cb) => {
|
||||
let runner = exec(
|
||||
testBin(CONTENT_TEST_COMMAND),
|
||||
CONTENT_OPTIONS,
|
||||
(err) => {
|
||||
(err, stdout, stderr) => {
|
||||
if (err) {
|
||||
process.exit(1);
|
||||
}
|
||||
@@ -138,21 +158,21 @@ gulp.task('test:content', gulp.series('test:prepare:build', (cb) => {
|
||||
}
|
||||
);
|
||||
pipe(runner);
|
||||
}));
|
||||
});
|
||||
|
||||
gulp.task('test:content:clean', (cb) => {
|
||||
pipe(exec(testBin(CONTENT_TEST_COMMAND), CONTENT_OPTIONS, () => cb()));
|
||||
});
|
||||
|
||||
gulp.task('test:content:watch', gulp.series('test:content:clean', () => {
|
||||
return gulp.watch(['common/script/content/**', 'test/**'], gulp.series('test:content:clean', done => done()));
|
||||
}));
|
||||
gulp.task('test:content:watch', ['test:content:clean'], () => {
|
||||
gulp.watch(['common/script/content/**', 'test/**'], ['test:content:clean']);
|
||||
});
|
||||
|
||||
gulp.task('test:content:safe', gulp.series('test:prepare:build', (cb) => {
|
||||
gulp.task('test:content:safe', ['test:prepare:build'], (cb) => {
|
||||
let runner = exec(
|
||||
testBin(CONTENT_TEST_COMMAND),
|
||||
CONTENT_OPTIONS,
|
||||
(err, stdout) => { // eslint-disable-line handle-callback-err
|
||||
(err, stdout, stderr) => {
|
||||
testResults.push({
|
||||
suite: 'Content Specs\t',
|
||||
pass: testCount(stdout, /(\d+) passing/),
|
||||
@@ -163,12 +183,105 @@ gulp.task('test:content:safe', gulp.series('test:prepare:build', (cb) => {
|
||||
}
|
||||
);
|
||||
pipe(runner);
|
||||
}));
|
||||
});
|
||||
|
||||
gulp.task('test:api:unit', (done) => {
|
||||
gulp.task('test:karma', ['test:prepare:build'], (cb) => {
|
||||
let runner = exec(
|
||||
testBin('istanbul cover --dir coverage/api-unit node_modules/mocha/bin/_mocha -- test/api/unit --recursive --require ./test/helpers/start-server'),
|
||||
(err) => {
|
||||
testBin(KARMA_TEST_COMMAND),
|
||||
(err, stdout) => {
|
||||
if (err) {
|
||||
process.exit(1);
|
||||
}
|
||||
cb();
|
||||
}
|
||||
);
|
||||
pipe(runner);
|
||||
});
|
||||
|
||||
gulp.task('test:karma:watch', ['test:prepare:build'], (cb) => {
|
||||
let runner = exec(
|
||||
testBin(`${KARMA_TEST_COMMAND}:watch`),
|
||||
(err, stdout) => {
|
||||
cb(err);
|
||||
}
|
||||
);
|
||||
pipe(runner);
|
||||
});
|
||||
|
||||
gulp.task('test:karma:safe', ['test:prepare:build'], (cb) => {
|
||||
let runner = exec(
|
||||
testBin(KARMA_TEST_COMMAND),
|
||||
(err, stdout) => {
|
||||
testResults.push({
|
||||
suite: 'Karma Specs\t',
|
||||
pass: testCount(stdout, /(\d+) tests? completed/),
|
||||
fail: testCount(stdout, /(\d+) tests? failed/),
|
||||
pend: testCount(stdout, /(\d+) tests? skipped/),
|
||||
});
|
||||
cb();
|
||||
}
|
||||
);
|
||||
pipe(runner);
|
||||
});
|
||||
|
||||
gulp.task('test:e2e', ['test:prepare', 'test:prepare:server'], (cb) => {
|
||||
let support = [
|
||||
'Xvfb :99 -screen 0 1024x768x24 -extension RANDR',
|
||||
testBin('npm run test:e2e:webdriver', 'DISPLAY=:99'),
|
||||
].map(exec);
|
||||
support.push(server);
|
||||
|
||||
Bluebird.all([
|
||||
awaitPort(TEST_SERVER_PORT),
|
||||
awaitPort(4444),
|
||||
]).then(() => {
|
||||
let runner = exec(
|
||||
'npm run test:e2e',
|
||||
(err, stdout, stderr) => {
|
||||
support.forEach(kill);
|
||||
if (err) {
|
||||
process.exit(1);
|
||||
}
|
||||
cb();
|
||||
}
|
||||
);
|
||||
pipe(runner);
|
||||
});
|
||||
});
|
||||
|
||||
gulp.task('test:e2e:safe', ['test:prepare', 'test:prepare:server'], (cb) => {
|
||||
let support = [
|
||||
'Xvfb :99 -screen 0 1024x768x24 -extension RANDR',
|
||||
'npm run test:e2e:webdriver',
|
||||
].map(exec);
|
||||
|
||||
Bluebird.all([
|
||||
awaitPort(TEST_SERVER_PORT),
|
||||
awaitPort(4444),
|
||||
]).then(() => {
|
||||
let runner = exec(
|
||||
'npm run test:e2e',
|
||||
(err, stdout, stderr) => {
|
||||
let match = stdout.match(/(\d+) tests?.*(\d) failures?/);
|
||||
|
||||
testResults.push({
|
||||
suite: 'End-to-End Specs\t',
|
||||
pass: testCount(stdout, /(\d+) passing/),
|
||||
fail: testCount(stdout, /(\d+) failing/),
|
||||
pend: testCount(stdout, /(\d+) pending/),
|
||||
});
|
||||
support.forEach(kill);
|
||||
cb();
|
||||
}
|
||||
);
|
||||
pipe(runner);
|
||||
});
|
||||
});
|
||||
|
||||
gulp.task('test:api-v3:unit', (done) => {
|
||||
let runner = exec(
|
||||
testBin('node_modules/.bin/istanbul cover --dir coverage/api-v3-unit --report lcovonly node_modules/mocha/bin/_mocha -- test/api/v3/unit --recursive --require ./test/helpers/start-server'),
|
||||
(err, stdout, stderr) => {
|
||||
if (err) {
|
||||
process.exit(1);
|
||||
}
|
||||
@@ -179,15 +292,15 @@ gulp.task('test:api:unit', (done) => {
|
||||
pipe(runner);
|
||||
});
|
||||
|
||||
gulp.task('test:api:unit:watch', () => {
|
||||
return gulp.watch(['website/server/libs/*', 'test/api/unit/**/*', 'website/server/controllers/**/*'], gulp.series('test:api:unit', done => done()));
|
||||
gulp.task('test:api-v3:unit:watch', () => {
|
||||
gulp.watch(['website/server/libs/*', 'test/api/v3/unit/**/*', 'website/server/controllers/**/*'], ['test:api-v3:unit']);
|
||||
});
|
||||
|
||||
gulp.task('test:api-v3:integration', (done) => {
|
||||
let runner = exec(
|
||||
testBin('istanbul cover --dir coverage/api-v3-integration --report lcovonly node_modules/mocha/bin/_mocha -- test/api/v3/integration --recursive --require ./test/helpers/start-server'),
|
||||
testBin('node_modules/.bin/istanbul cover --dir coverage/api-v3-integration --report lcovonly node_modules/mocha/bin/_mocha -- test/api/v3/integration --recursive --require ./test/helpers/start-server'),
|
||||
{maxBuffer: 500 * 1024},
|
||||
(err) => {
|
||||
(err, stdout, stderr) => {
|
||||
if (err) {
|
||||
process.exit(1);
|
||||
}
|
||||
@@ -199,59 +312,36 @@ gulp.task('test:api-v3:integration', (done) => {
|
||||
});
|
||||
|
||||
gulp.task('test:api-v3:integration:watch', () => {
|
||||
return gulp.watch([
|
||||
'website/server/controllers/api-v3/**/*', 'common/script/ops/*', 'website/server/libs/*.js',
|
||||
'test/api/v3/integration/**/*',
|
||||
], gulp.series('test:api-v3:integration', done => done()));
|
||||
gulp.watch(['website/server/controllers/api-v3/**/*', 'common/script/ops/*', 'website/server/libs/*.js',
|
||||
'test/api/v3/integration/**/*'], ['test:api-v3:integration']);
|
||||
});
|
||||
|
||||
gulp.task('test:api-v3:integration:separate-server', (done) => {
|
||||
let runner = exec(
|
||||
testBin('mocha test/api/v3/integration --recursive --require ./test/helpers/start-server', 'LOAD_SERVER=0'),
|
||||
{maxBuffer: 500 * 1024},
|
||||
(err) => done(err)
|
||||
(err, stdout, stderr) => done(err)
|
||||
);
|
||||
|
||||
pipe(runner);
|
||||
});
|
||||
|
||||
gulp.task('test:api-v4:integration', (done) => {
|
||||
let runner = exec(
|
||||
testBin('istanbul cover --dir coverage/api-v4-integration --report lcovonly node_modules/mocha/bin/_mocha -- test/api/v4 --recursive --require ./test/helpers/start-server'),
|
||||
{maxBuffer: 500 * 1024},
|
||||
(err) => {
|
||||
if (err) {
|
||||
process.exit(1);
|
||||
}
|
||||
done();
|
||||
}
|
||||
gulp.task('test', (done) => {
|
||||
runSequence(
|
||||
'test:sanity',
|
||||
'test:content',
|
||||
'test:common',
|
||||
'test:karma',
|
||||
'test:api-v3:unit',
|
||||
'test:api-v3:integration',
|
||||
done
|
||||
);
|
||||
|
||||
pipe(runner);
|
||||
});
|
||||
|
||||
gulp.task('test:api-v4:integration:separate-server', (done) => {
|
||||
let runner = exec(
|
||||
testBin('mocha test/api/v4 --recursive --require ./test/helpers/start-server', 'LOAD_SERVER=0'),
|
||||
{maxBuffer: 500 * 1024},
|
||||
(err) => done(err)
|
||||
gulp.task('test:api-v3', (done) => {
|
||||
runSequence(
|
||||
'test:api-v3:unit',
|
||||
'test:api-v3:integration',
|
||||
done
|
||||
);
|
||||
|
||||
pipe(runner);
|
||||
});
|
||||
|
||||
gulp.task('test', gulp.series(
|
||||
'test:sanity',
|
||||
'test:content',
|
||||
'test:common',
|
||||
'test:api:unit',
|
||||
'test:api-v3:integration',
|
||||
'test:api-v4:integration',
|
||||
done => done()
|
||||
));
|
||||
|
||||
gulp.task('test:api-v3', gulp.series(
|
||||
'test:api:unit',
|
||||
'test:api-v3:integration',
|
||||
done => done()
|
||||
));
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import fs from 'fs';
|
||||
import _ from 'lodash';
|
||||
import nconf from 'nconf';
|
||||
import gulp from 'gulp';
|
||||
import { postToSlack, conf } from './taskHelper';
|
||||
|
||||
@@ -11,89 +12,18 @@ const SLACK_CONFIG = {
|
||||
|
||||
const LOCALES = './website/common/locales/';
|
||||
const ENGLISH_LOCALE = `${LOCALES}en/`;
|
||||
|
||||
|
||||
function getArrayOfLanguages () {
|
||||
let languages = fs.readdirSync(LOCALES);
|
||||
languages.shift(); // Remove README.md from array of languages
|
||||
|
||||
return languages;
|
||||
}
|
||||
|
||||
const ALL_LANGUAGES = getArrayOfLanguages();
|
||||
|
||||
function stripOutNonJsonFiles (collection) {
|
||||
let onlyJson = _.filter(collection, (file) => {
|
||||
return file.match(/[a-zA-Z]*\.json/);
|
||||
});
|
||||
|
||||
return onlyJson;
|
||||
}
|
||||
|
||||
function eachTranslationFile (languages, cb) {
|
||||
let jsonFiles = stripOutNonJsonFiles(fs.readdirSync(ENGLISH_LOCALE));
|
||||
|
||||
_.each(languages, (lang) => {
|
||||
_.each(jsonFiles, (filename) => {
|
||||
let parsedTranslationFile;
|
||||
try {
|
||||
const translationFile = fs.readFileSync(`${LOCALES}${lang}/${filename}`);
|
||||
parsedTranslationFile = JSON.parse(translationFile);
|
||||
} catch (err) {
|
||||
return cb(err);
|
||||
}
|
||||
|
||||
let englishFile = fs.readFileSync(ENGLISH_LOCALE + filename);
|
||||
let parsedEnglishFile = JSON.parse(englishFile);
|
||||
|
||||
cb(null, lang, filename, parsedEnglishFile, parsedTranslationFile);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function eachTranslationString (languages, cb) {
|
||||
eachTranslationFile(languages, (error, language, filename, englishJSON, translationJSON) => {
|
||||
if (error) return;
|
||||
_.each(englishJSON, (string, key) => {
|
||||
const translationString = translationJSON[key];
|
||||
cb(language, filename, key, string, translationString);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function formatMessageForPosting (msg, items) {
|
||||
let body = `*Warning:* ${msg}`;
|
||||
body += '\n\n```\n';
|
||||
body += items.join('\n');
|
||||
body += '\n```';
|
||||
|
||||
return body;
|
||||
}
|
||||
|
||||
function getStringsWith (json, interpolationRegex) {
|
||||
let strings = {};
|
||||
|
||||
_.each(json, (fileName) => {
|
||||
const rawFile = fs.readFileSync(ENGLISH_LOCALE + fileName);
|
||||
const parsedJson = JSON.parse(rawFile);
|
||||
|
||||
strings[fileName] = {};
|
||||
_.each(parsedJson, (value, key) => {
|
||||
const match = value.match(interpolationRegex);
|
||||
if (match) strings[fileName][key] = match;
|
||||
});
|
||||
});
|
||||
|
||||
return strings;
|
||||
}
|
||||
|
||||
const malformedStringExceptions = {
|
||||
messageDropFood: true,
|
||||
armoireFood: true,
|
||||
feedPet: true,
|
||||
};
|
||||
|
||||
gulp.task('transifex:missingFiles', (done) => {
|
||||
gulp.task('transifex', ['transifex:missingFiles', 'transifex:missingStrings', 'transifex:malformedStrings']);
|
||||
|
||||
gulp.task('transifex:missingFiles', () => {
|
||||
|
||||
let missingStrings = [];
|
||||
|
||||
eachTranslationFile(ALL_LANGUAGES, (error) => {
|
||||
@@ -107,10 +37,10 @@ gulp.task('transifex:missingFiles', (done) => {
|
||||
let formattedMessage = formatMessageForPosting(message, missingStrings);
|
||||
postToSlack(formattedMessage, SLACK_CONFIG);
|
||||
}
|
||||
done();
|
||||
});
|
||||
|
||||
gulp.task('transifex:missingStrings', (done) => {
|
||||
gulp.task('transifex:missingStrings', () => {
|
||||
|
||||
let missingStrings = [];
|
||||
|
||||
eachTranslationString(ALL_LANGUAGES, (language, filename, key, englishString, translationString) => {
|
||||
@@ -125,10 +55,10 @@ gulp.task('transifex:missingStrings', (done) => {
|
||||
let formattedMessage = formatMessageForPosting(message, missingStrings);
|
||||
postToSlack(formattedMessage, SLACK_CONFIG);
|
||||
}
|
||||
done();
|
||||
});
|
||||
|
||||
gulp.task('transifex:malformedStrings', (done) => {
|
||||
gulp.task('transifex:malformedStrings', () => {
|
||||
|
||||
let jsonFiles = stripOutNonJsonFiles(fs.readdirSync(ENGLISH_LOCALE));
|
||||
let interpolationRegex = /<%= [a-zA-Z]* %>/g;
|
||||
let stringsToLookFor = getStringsWith(jsonFiles, interpolationRegex);
|
||||
@@ -136,23 +66,25 @@ gulp.task('transifex:malformedStrings', (done) => {
|
||||
let stringsWithMalformedInterpolations = [];
|
||||
let stringsWithIncorrectNumberOfInterpolations = [];
|
||||
|
||||
_.each(ALL_LANGUAGES, (lang) => {
|
||||
_.each(stringsToLookFor, (strings, filename) => {
|
||||
let translationFile = fs.readFileSync(`${LOCALES}${lang}/${filename}`);
|
||||
let count = 0;
|
||||
_.each(ALL_LANGUAGES, function (lang) {
|
||||
|
||||
_.each(stringsToLookFor, function (strings, file) {
|
||||
let translationFile = fs.readFileSync(LOCALES + lang + '/' + file);
|
||||
let parsedTranslationFile = JSON.parse(translationFile);
|
||||
|
||||
_.each(strings, (value, key) => { // eslint-disable-line max-nested-callbacks
|
||||
_.each(strings, function (value, key) {
|
||||
let translationString = parsedTranslationFile[key];
|
||||
if (!translationString) return;
|
||||
|
||||
let englishOccurences = stringsToLookFor[filename][key];
|
||||
let englishOccurences = stringsToLookFor[file][key];
|
||||
let translationOccurences = translationString.match(interpolationRegex);
|
||||
|
||||
if (!translationOccurences) {
|
||||
let malformedString = `${lang} - ${filename} - ${key} - ${translationString}`;
|
||||
let malformedString = `${lang} - ${file} - ${key} - ${translationString}`;
|
||||
stringsWithMalformedInterpolations.push(malformedString);
|
||||
} else if (englishOccurences.length !== translationOccurences.length && !malformedStringExceptions[key]) {
|
||||
let missingInterpolationString = `${lang} - ${filename} - ${key} - ${translationString}`;
|
||||
let missingInterpolationString = `${lang} - ${file} - ${key} - ${translationString}`;
|
||||
stringsWithIncorrectNumberOfInterpolations.push(missingInterpolationString);
|
||||
}
|
||||
});
|
||||
@@ -170,11 +102,75 @@ gulp.task('transifex:malformedStrings', (done) => {
|
||||
let formattedMessage = formatMessageForPosting(message, stringsWithIncorrectNumberOfInterpolations);
|
||||
postToSlack(formattedMessage, SLACK_CONFIG);
|
||||
}
|
||||
done();
|
||||
});
|
||||
|
||||
gulp.task(
|
||||
'transifex',
|
||||
gulp.series('transifex:missingFiles', 'transifex:missingStrings', 'transifex:malformedStrings'),
|
||||
(done) => done()
|
||||
);
|
||||
function getArrayOfLanguages () {
|
||||
let languages = fs.readdirSync(LOCALES);
|
||||
languages.shift(); // Remove README.md from array of languages
|
||||
|
||||
return languages;
|
||||
}
|
||||
|
||||
function eachTranslationFile (languages, cb) {
|
||||
let jsonFiles = stripOutNonJsonFiles(fs.readdirSync(ENGLISH_LOCALE));
|
||||
|
||||
_.each(languages, (lang) => {
|
||||
_.each(jsonFiles, (filename) => {
|
||||
try {
|
||||
var translationFile = fs.readFileSync(LOCALES + lang + '/' + filename);
|
||||
var parsedTranslationFile = JSON.parse(translationFile);
|
||||
} catch (err) {
|
||||
return cb(err);
|
||||
}
|
||||
|
||||
let englishFile = fs.readFileSync(ENGLISH_LOCALE + filename);
|
||||
let parsedEnglishFile = JSON.parse(englishFile);
|
||||
|
||||
cb(null, lang, filename, parsedEnglishFile, parsedTranslationFile);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function eachTranslationString (languages, cb) {
|
||||
eachTranslationFile(languages, (error, language, filename, englishJSON, translationJSON) => {
|
||||
if (error) return;
|
||||
_.each(englishJSON, (string, key) => {
|
||||
var translationString = translationJSON[key];
|
||||
cb(language, filename, key, string, translationString);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function formatMessageForPosting (msg, items) {
|
||||
let body = `*Warning:* ${msg}`;
|
||||
body += '\n\n```\n';
|
||||
body += items.join('\n');
|
||||
body += '\n```';
|
||||
|
||||
return body;
|
||||
}
|
||||
|
||||
function getStringsWith (json, interpolationRegex) {
|
||||
var strings = {};
|
||||
|
||||
_.each(json, function (file_name) {
|
||||
var raw_file = fs.readFileSync(ENGLISH_LOCALE + file_name);
|
||||
var parsed_json = JSON.parse(raw_file);
|
||||
|
||||
strings[file_name] = {};
|
||||
_.each(parsed_json, function (value, key) {
|
||||
var match = value.match(interpolationRegex);
|
||||
if (match) strings[file_name][key] = match;
|
||||
});
|
||||
});
|
||||
|
||||
return strings;
|
||||
}
|
||||
|
||||
function stripOutNonJsonFiles (collection) {
|
||||
let onlyJson = _.filter(collection, (file) => {
|
||||
return file.match(/[a-zA-Z]*\.json/);
|
||||
});
|
||||
|
||||
return onlyJson;
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@ import { exec } from 'child_process';
|
||||
import psTree from 'ps-tree';
|
||||
import nconf from 'nconf';
|
||||
import net from 'net';
|
||||
import Bluebird from 'bluebird';
|
||||
import { post } from 'superagent';
|
||||
import { sync as glob } from 'glob';
|
||||
import Mocha from 'mocha';
|
||||
@@ -11,7 +12,7 @@ import { resolve } from 'path';
|
||||
* Get access to configruable values
|
||||
*/
|
||||
nconf.argv().env().file({ file: 'config.json' });
|
||||
export const conf = nconf;
|
||||
export var conf = nconf;
|
||||
|
||||
/*
|
||||
* Kill a child process and any sub-children that process may have spawned.
|
||||
@@ -25,12 +26,11 @@ export function kill (proc) {
|
||||
pids.forEach(kill); return;
|
||||
}
|
||||
try {
|
||||
exec(/^win/.test(process.platform) ?
|
||||
`taskkill /PID ${pid} /T /F` :
|
||||
`kill -9 ${pid}`);
|
||||
} catch (e) {
|
||||
console.log(e); // eslint-disable-line no-console
|
||||
exec(/^win/.test(process.platform)
|
||||
? `taskkill /PID ${pid} /T /F`
|
||||
: `kill -9 ${pid}`);
|
||||
}
|
||||
catch (e) { console.log(e); }
|
||||
});
|
||||
};
|
||||
|
||||
@@ -44,25 +44,21 @@ export function kill (proc) {
|
||||
* before failing.
|
||||
*/
|
||||
export function awaitPort (port, max = 60) {
|
||||
return new Promise((rej, res) => {
|
||||
let socket;
|
||||
let timeout;
|
||||
let interval;
|
||||
return new Bluebird((reject, resolve) => {
|
||||
let socket, timeout, interval;
|
||||
|
||||
timeout = setTimeout(() => {
|
||||
clearInterval(interval);
|
||||
rej(`Timed out after ${max} seconds`);
|
||||
reject(`Timed out after ${max} seconds`);
|
||||
}, max * 1000);
|
||||
|
||||
interval = setInterval(() => {
|
||||
socket = net.connect({port}, () => {
|
||||
socket = net.connect({port: port}, () => {
|
||||
clearInterval(interval);
|
||||
clearTimeout(timeout);
|
||||
socket.destroy();
|
||||
res();
|
||||
}).on('error', () => {
|
||||
socket.destroy();
|
||||
});
|
||||
resolve();
|
||||
}).on('error', () => { socket.destroy; });
|
||||
}, 1000);
|
||||
});
|
||||
}
|
||||
@@ -71,12 +67,8 @@ export function awaitPort (port, max = 60) {
|
||||
* Pipe the child's stdin and stderr to the parent process.
|
||||
*/
|
||||
export function pipe (child) {
|
||||
child.stdout.on('data', (data) => {
|
||||
process.stdout.write(data);
|
||||
});
|
||||
child.stderr.on('data', (data) => {
|
||||
process.stderr.write(data);
|
||||
});
|
||||
child.stdout.on('data', (data) => { process.stdout.write(data); });
|
||||
child.stderr.on('data', (data) => { process.stderr.write(data); });
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -86,8 +78,8 @@ export function postToSlack (msg, config = {}) {
|
||||
let slackUrl = nconf.get('SLACK_URL');
|
||||
|
||||
if (!slackUrl) {
|
||||
console.error('No slack post url specified. Your message was:'); // eslint-disable-line no-console
|
||||
console.log(msg); // eslint-disable-line no-console
|
||||
console.error('No slack post url specified. Your message was:');
|
||||
console.log(msg);
|
||||
|
||||
return;
|
||||
}
|
||||
@@ -97,15 +89,15 @@ export function postToSlack (msg, config = {}) {
|
||||
channel: `#${config.channel || '#general'}`,
|
||||
username: config.username || 'gulp task',
|
||||
text: msg,
|
||||
icon_emoji: `:${config.emoji || 'gulp'}:`, // eslint-disable-line camelcase
|
||||
icon_emoji: `:${config.emoji || 'gulp'}:`,
|
||||
})
|
||||
.end((err) => {
|
||||
if (err) console.error('Unable to post to slack', err); // eslint-disable-line no-console
|
||||
.end((err, res) => {
|
||||
if (err) console.error('Unable to post to slack', err);
|
||||
});
|
||||
}
|
||||
|
||||
export function runMochaTests (files, server, cb) {
|
||||
require('../test/helpers/globals.helper'); // eslint-disable-line global-require
|
||||
require('../test/helpers/globals.helper');
|
||||
|
||||
let mocha = new Mocha({reporter: 'spec'});
|
||||
let tests = glob(files);
|
||||
@@ -116,7 +108,7 @@ export function runMochaTests (files, server, cb) {
|
||||
});
|
||||
|
||||
mocha.run((numberOfFailures) => {
|
||||
if (!process.env.RUN_INTEGRATION_TEST_FOREVER) { // eslint-disable-line no-process-env
|
||||
if (!process.env.RUN_INTEGRATION_TEST_FOREVER) {
|
||||
if (server) kill(server);
|
||||
process.exit(numberOfFailures);
|
||||
}
|
||||
|
||||
15
gulpfile.js
15
gulpfile.js
@@ -8,12 +8,13 @@
|
||||
|
||||
require('babel-register');
|
||||
|
||||
const gulp = require('gulp');
|
||||
|
||||
if (process.env.NODE_ENV === 'production') { // eslint-disable-line no-process-env
|
||||
require('./gulp/gulp-apidoc'); // eslint-disable-line global-require
|
||||
require('./gulp/gulp-build'); // eslint-disable-line global-require
|
||||
if (process.env.NODE_ENV === 'production') {
|
||||
require('./gulp/gulp-apidoc');
|
||||
require('./gulp/gulp-newstuff');
|
||||
require('./gulp/gulp-build');
|
||||
require('./gulp/gulp-babelify');
|
||||
require('./gulp/gulp-bootstrap');
|
||||
} else {
|
||||
require('glob').sync('./gulp/gulp-*').forEach(require); // eslint-disable-line global-require
|
||||
require('gulp').task('default', gulp.series('test')); // eslint-disable-line global-require
|
||||
require('glob').sync('./gulp/gulp-*').forEach(require);
|
||||
require('gulp').task('default', ['test']);
|
||||
}
|
||||
|
||||
@@ -1,7 +0,0 @@
|
||||
{
|
||||
"root": false,
|
||||
"rules": {
|
||||
"no-console": 0,
|
||||
"no-use-before-define": ["error", { "functions": false }]
|
||||
}
|
||||
}
|
||||
5
migrations/20130128_add_missing_crons.js
Normal file
5
migrations/20130128_add_missing_crons.js
Normal file
@@ -0,0 +1,5 @@
|
||||
db.users.update(
|
||||
{ lastCron: { $exists: false} },
|
||||
{ $set: { lastCron: +new Date } },
|
||||
{ multi: true }
|
||||
);
|
||||
15
migrations/20130128_merge_completed_todo_ids.js
Normal file
15
migrations/20130128_merge_completed_todo_ids.js
Normal file
@@ -0,0 +1,15 @@
|
||||
db.users.find({ completedIds: { $exists: true } }).forEach(function(user) {
|
||||
var newTodoIds = user.todoIds;
|
||||
user.completedIds.forEach(function(value) {
|
||||
if (newTodoIds.indexOf(value) === -1) {
|
||||
newTodoIds.push(value)
|
||||
}
|
||||
});
|
||||
db.users.update(
|
||||
{ _id: user._id },
|
||||
{
|
||||
$set: { todoIds: newTodoIds },
|
||||
$unset: { completedIds: 1 }
|
||||
}
|
||||
);
|
||||
});
|
||||
5
migrations/20130129_add_missing_preferences.js
Normal file
5
migrations/20130129_add_missing_preferences.js
Normal file
@@ -0,0 +1,5 @@
|
||||
db.users.update(
|
||||
{preferences:{$exists:false}},
|
||||
{$set:{preferences:{gender: 'm', armorSet: 'v1'}}},
|
||||
{multi:true}
|
||||
)
|
||||
20
migrations/20130204_count_habits.js
Normal file
20
migrations/20130204_count_habits.js
Normal file
@@ -0,0 +1,20 @@
|
||||
// %mongo server:27017/dbname underscore.js my_commands.js
|
||||
// %mongo server:27017/dbname underscore.js --shell
|
||||
|
||||
// IMPORTANT NOTE: this migration was written when we were using version 3 of lodash.
|
||||
// We've now upgraded to lodash v4 but the code used in this migration has not been
|
||||
// adapted to work with it. Before this migration is used again any lodash method should
|
||||
// be checked for compatibility against the v4 changelog and changed if necessary.
|
||||
// https://github.com/lodash/lodash/wiki/Changelog#v400
|
||||
|
||||
var habits = 0,
|
||||
dailies = 0,
|
||||
todos = 0,
|
||||
registered = { $or: [ { 'auth.local': { $exists: true } }, { 'auth.facebook': { $exists: true} } ]};
|
||||
|
||||
db.user.find(registered).forEach(function(u){
|
||||
//TODO this isn't working??
|
||||
habits += _.where(u.tasks, {type:'habit'}).length;
|
||||
dailies += _.where(u.tasks, {type:'daily'}).length;
|
||||
todos += _.where(u.tasks, {type:'todo'}).length;
|
||||
})
|
||||
102
migrations/20130204_user_public_private_paths.js
Normal file
102
migrations/20130204_user_public_private_paths.js
Normal file
@@ -0,0 +1,102 @@
|
||||
// %mongo server:27017/dbname underscore.js my_commands.js
|
||||
// %mongo server:27017/dbname underscore.js --shell
|
||||
|
||||
//db.users.find({'auth.facebook.email': 'tylerrenelle@gmail.com'}).forEach(function(user){
|
||||
db.users.find().forEach(function(user){
|
||||
|
||||
if (!user._id) {
|
||||
print("User has null _id");
|
||||
return; // need to figure out how to delete these buggers if they don't have an id to delete from
|
||||
}
|
||||
|
||||
if (!!user.idLists) {
|
||||
print("User " + user._id + " has already been migrated")
|
||||
return
|
||||
}
|
||||
|
||||
if (user._id.indexOf("$") === 0) {
|
||||
print("User id starts with $ (" + user._id + ")")
|
||||
return;
|
||||
}
|
||||
|
||||
// even though we're clobbering user later, sometimes these are undefined and crash the script
|
||||
// this saves us some ternaries
|
||||
user.stats = user.stats || {};
|
||||
user.items = user.items || {};
|
||||
user.preferences = user.preferences || {};
|
||||
user.notifications = user.notifications || {};
|
||||
user.flags = user.flags || {};
|
||||
user.habitIds = user.habitIds || [];
|
||||
user.dailyIds = user.dailyIds || [];
|
||||
user.todoIds = user.todoIds || [];
|
||||
user.rewardIds = user.rewardIds|| [];
|
||||
|
||||
_.each(user.tasks, function(task, key){
|
||||
if (!task.type) {
|
||||
delete user.tasks[key];
|
||||
// idList will take care of itself on page-load
|
||||
return
|
||||
}
|
||||
if (key == '$spec') {
|
||||
print("$spec was found: " + user._id);
|
||||
return
|
||||
}
|
||||
if (key.indexOf("$_") === 0) {
|
||||
var newKey = key.replace("$_", ''),
|
||||
index = user[task.type + "Ids"].indexOf(key)
|
||||
user[task.type + "Ids"][index] = newKey;
|
||||
task.id = newKey
|
||||
user.tasks[newKey] = task
|
||||
// TODO make sure this is ok, that we're not deleting the original
|
||||
// Otherwise use lodash.cloneDeep
|
||||
delete user.tasks[key]
|
||||
}
|
||||
});
|
||||
|
||||
// New user schema has public and private paths, so we can setup proper access control with racer
|
||||
// Note 'public' and 'private' are reserved words
|
||||
var newUser = {
|
||||
auth: user.auth, // we need this top-level due to derby-auth
|
||||
apiToken: user.preferences.api_token || null, // set on update, we need derby.uuid()
|
||||
preferences: {
|
||||
armorSet: user.preferences.armorSet || 'v1',
|
||||
gender: user.preferences.gender || 'm'
|
||||
},
|
||||
balance: user.balance || 2,
|
||||
lastCron: user.lastCron || +new Date,
|
||||
history: user.history || [],
|
||||
stats: {
|
||||
gp: user.stats.money || 0,
|
||||
hp: user.stats.hp || 50,
|
||||
exp: user.stats.exp || 0,
|
||||
lvl: user.stats.lvl || 1
|
||||
},
|
||||
items: {
|
||||
armor: user.items.armor || 0,
|
||||
weapon: user.items.weapon || 0
|
||||
},
|
||||
tasks: user.tasks || {},
|
||||
idLists: {
|
||||
habit: user.habitIds || [],
|
||||
daily: user.dailyIds || [],
|
||||
todo: user.todoIds || [],
|
||||
reward: user.rewardIds || []
|
||||
},
|
||||
flags: {
|
||||
partyEnabled: false,
|
||||
itemsEnabled: user.items.itemsEnabled || false,
|
||||
kickstarter: user.notifications.kickstarter || 'show',
|
||||
ads: user.flags.ads || null // null because it's set on registration
|
||||
},
|
||||
party: {
|
||||
current: null,
|
||||
invitation: null
|
||||
}
|
||||
};
|
||||
|
||||
try {
|
||||
db.users.update({_id:user._id}, newUser);
|
||||
} catch(e) {
|
||||
print(e);
|
||||
}
|
||||
})
|
||||
19
migrations/20130208_idLists_to_typeIds.js
Normal file
19
migrations/20130208_idLists_to_typeIds.js
Normal file
@@ -0,0 +1,19 @@
|
||||
// move idList back to root-level, is what's causing the sort bug - see https://github.com/codeparty/racer/pull/73
|
||||
|
||||
// We could just delete user.idLists, since it's re-created on refresh. However, users's first refresh will scare them
|
||||
// since everything will dissappear - second refresh will bring everything back.
|
||||
db.users.find().forEach(function(user){
|
||||
if (!user.idLists) return;
|
||||
db.users.update(
|
||||
{_id:user._id},
|
||||
{
|
||||
$set:{
|
||||
'habitIds':user.idLists.habit,
|
||||
'dailyIds':user.idLists.daily,
|
||||
'todoIds':user.idLists.todo,
|
||||
'rewardIds':user.idLists.reward
|
||||
}
|
||||
//$unset:{idLists:true} // run this after the code has been pushed
|
||||
}
|
||||
)
|
||||
})
|
||||
20
migrations/20130208_user_customizations.js
Normal file
20
migrations/20130208_user_customizations.js
Normal file
@@ -0,0 +1,20 @@
|
||||
db.users.update(
|
||||
{items:{$exists:0}},
|
||||
{$set:{items:{weapon: 0, armor: 0, head: 0, shield: 0 }}},
|
||||
{multi:true}
|
||||
);
|
||||
|
||||
db.users.find().forEach(function(user){
|
||||
|
||||
var updates = {
|
||||
// I'm not racist, these were just the defaults before ;)
|
||||
'preferences.skin': 'white',
|
||||
'preferences.hair': 'blond',
|
||||
|
||||
'items.head': user.items.armor,
|
||||
'items.shield': user.items.armor,
|
||||
}
|
||||
|
||||
db.users.update({_id:user._id}, {$set:updates});
|
||||
|
||||
})
|
||||
39
migrations/20130307_exp_overflow.js
Normal file
39
migrations/20130307_exp_overflow.js
Normal file
@@ -0,0 +1,39 @@
|
||||
// mongo habitrpg ./node_modules/underscore/underscore.js ./migrations/20130307_normalize_algo_values.js
|
||||
|
||||
/**
|
||||
* Make sure people aren't overflowing their exp with the new system
|
||||
*/
|
||||
db.users.find().forEach(function(user){
|
||||
function oldTnl(level) {
|
||||
return (Math.pow(level,2)*10)+(level*10)+80
|
||||
}
|
||||
|
||||
function newTnl(level) {
|
||||
var value = 0;
|
||||
if (level >= 100) {
|
||||
value = 0
|
||||
} else {
|
||||
value = Math.round(((Math.pow(level,2)*0.25)+(10 * level) + 139.75)/10)*10; // round to nearest 10
|
||||
}
|
||||
return value
|
||||
}
|
||||
|
||||
var newTnl = newTnl(user.stats.lvl);
|
||||
if (user.stats.exp > newTnl) {
|
||||
var percent = user.stats.exp / oldTnl(user.stats.lvl);
|
||||
percent = (percent>1) ? 1 : percent;
|
||||
user.stats.exp = newTnl * percent;
|
||||
|
||||
try {
|
||||
db.users.update(
|
||||
{_id:user._id},
|
||||
{$set: {'stats.exp': user.stats.exp}},
|
||||
{multi:true}
|
||||
);
|
||||
} catch(e) {
|
||||
print(e);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
})
|
||||
47
migrations/20130307_normalize_algo_values.js
Normal file
47
migrations/20130307_normalize_algo_values.js
Normal file
@@ -0,0 +1,47 @@
|
||||
// mongo habitrpg ./node_modules/underscore/underscore.js ./migrations/20130307_normalize_algo_values.js
|
||||
|
||||
/**
|
||||
* Users were experiencing a lot of extreme Exp multiplication (https://github.com/lefnire/habitrpg/issues/594).
|
||||
* This sets things straight, and in preparation for another algorithm overhaul
|
||||
*/
|
||||
db.users.find().forEach(function(user){
|
||||
if (user.stats.exp >= 3580) {
|
||||
user.stats.exp = 0;
|
||||
}
|
||||
|
||||
if (user.stats.lvl > 100) {
|
||||
user.stats.lvl = 100;
|
||||
}
|
||||
|
||||
_.each(user.tasks, function(task, key){
|
||||
// remove corrupt tasks
|
||||
if (!task) {
|
||||
delete user.tasks[key];
|
||||
return;
|
||||
}
|
||||
|
||||
// Fix busted values
|
||||
if (task.value > 21.27) {
|
||||
task.value = 21.27;
|
||||
}
|
||||
else if (task.value < -47.27) {
|
||||
task.value = -47.27;
|
||||
}
|
||||
});
|
||||
|
||||
try {
|
||||
db.users.update(
|
||||
{_id:user._id},
|
||||
{$set:
|
||||
{
|
||||
'stats.lvl': user.stats.lvl,
|
||||
'stats.exp': user.stats.exp,
|
||||
'tasks' : user.tasks
|
||||
}
|
||||
},
|
||||
{multi:true}
|
||||
);
|
||||
} catch(e) {
|
||||
print(e);
|
||||
}
|
||||
})
|
||||
28
migrations/20130307_remove_duff_histories.js
Normal file
28
migrations/20130307_remove_duff_histories.js
Normal file
@@ -0,0 +1,28 @@
|
||||
/**
|
||||
* Remove duff histories for dailies
|
||||
*/
|
||||
// mongo habitrpg ./node_modules/underscore/underscore.js ./migrations/20130307_remove_duff_histories.js
|
||||
db.users.find().forEach(function(user){
|
||||
|
||||
|
||||
_.each(user.tasks, function(task, key){
|
||||
if (task.type === "daily") {
|
||||
// remove busted history entries
|
||||
task.history = _.filter(task.history, function(h){return !!h.value})
|
||||
}
|
||||
});
|
||||
|
||||
try {
|
||||
db.users.update(
|
||||
{_id:user._id},
|
||||
{$set:
|
||||
{
|
||||
'tasks' : user.tasks
|
||||
}
|
||||
},
|
||||
{multi:true}
|
||||
);
|
||||
} catch(e) {
|
||||
print(e);
|
||||
}
|
||||
})
|
||||
98
migrations/20130326_migrate_pets.js
Normal file
98
migrations/20130326_migrate_pets.js
Normal file
@@ -0,0 +1,98 @@
|
||||
/**
|
||||
* Migrate old pets to new system
|
||||
*/
|
||||
// mongo habitrpg ./node_modules/underscore/underscore.js ./migrations/20130326_migrate_pets.js
|
||||
|
||||
// IMPORTANT NOTE: this migration was written when we were using version 3 of lodash.
|
||||
// We've now upgraded to lodash v4 but the code used in this migration has not been
|
||||
// adapted to work with it. Before this migration is used again any lodash method should
|
||||
// be checked for compatibility against the v4 changelog and changed if necessary.
|
||||
// https://github.com/lodash/lodash/wiki/Changelog#v400
|
||||
|
||||
var mapping = {
|
||||
bearcub: {name:'BearCub', modifier: 'Base'},
|
||||
cactus: {name:'Cactus', modifier:'Base'},
|
||||
dragon: {name:'Dragon', modifier:'Base'},
|
||||
flyingpig: {name:'FlyingPig', modifier:'Base'},
|
||||
fox: {name:'Fox', modifier:'Base'},
|
||||
lioncub: {name:'LionCub', modifier:'Base'},
|
||||
pandacub: {name:'PandaCub', modifier:'Base'},
|
||||
tigercub: {name:'TigerCub', modifier:'Base'},
|
||||
wolfBorder: {name:'Wolf', modifier:'Base'},
|
||||
wolfDesert: {name:'Wolf', modifier:'Desert'},
|
||||
wolfGolden: {name:'Wolf', modifier:'Golden'},
|
||||
wolfRed: {name:'Wolf', modifier:'Red'},
|
||||
wolfShade: {name:'Wolf', modifier:'Shade'},
|
||||
wolfSkeleton: {name:'Wolf', modifier:'Skeleton'},
|
||||
wolfVeteran: {name:'Wolf', modifier:'Veteran'},
|
||||
wolfWhite: {name:'Wolf', modifier:'White'},
|
||||
wolfZombie: {name:'Wolf', modifier:'Zombie'}
|
||||
}
|
||||
|
||||
/**
|
||||
== Old Style ==
|
||||
pet: Object
|
||||
icon: "Pet-Wolf-White.png"
|
||||
index: 14
|
||||
name: "wolfWhite"
|
||||
text: "White Wolf"
|
||||
value: 3
|
||||
pets: Object
|
||||
bearcub: true
|
||||
cactus: true
|
||||
|
||||
== New Style ==
|
||||
currentPet: Object
|
||||
modifier: "Red"
|
||||
name: "Wolf"
|
||||
notes: "Find some Hatching Powder to sprinkle on this egg, and one day it will hatch into a loyal pet."
|
||||
str: "Wolf-Red"
|
||||
text: "Wolf"
|
||||
value: 3
|
||||
pets: Array
|
||||
0: "PandaCub-Base"
|
||||
1: "Wolf-Base"
|
||||
*/
|
||||
|
||||
|
||||
db.users.find().forEach(function(user){
|
||||
if (!user.items || (!user.items.pets && !user.items.pet)) return;
|
||||
|
||||
// migrate items.pet to items.currentPet
|
||||
if (!!user.items.pet) {
|
||||
var mapped = mapping[user.items.pet.name];
|
||||
delete user.items.pet;
|
||||
user.items.currentPet = {
|
||||
modifier: mapped.modifier,
|
||||
name: mapped.name,
|
||||
str: mapped.name + "-" + mapped.modifier,
|
||||
text: '' // FIXME?
|
||||
}
|
||||
}
|
||||
|
||||
// migrate items.pets
|
||||
if (!!user.items.pets) {
|
||||
var newPets = [];
|
||||
_.each(user.items.pets, function(val, key){
|
||||
if (_.isNumber(key)) {
|
||||
newPets.push(val)
|
||||
//FIXME why is this happening? seems the user gets migrated already...
|
||||
//throw "Error: User appears already migrated, this shouldn't be happening!"
|
||||
} else {
|
||||
newPets.push(mapping[key].name + "-" + mapping[key].modifier);
|
||||
}
|
||||
});
|
||||
user.items.pets = newPets;
|
||||
}
|
||||
|
||||
try {
|
||||
db.users.update(
|
||||
{_id:user._id},
|
||||
{$set:
|
||||
{ 'items' : user.items }
|
||||
}
|
||||
);
|
||||
} catch(e) {
|
||||
print(e);
|
||||
}
|
||||
})
|
||||
110
migrations/20130327_apply_tokens.js
Normal file
110
migrations/20130327_apply_tokens.js
Normal file
@@ -0,0 +1,110 @@
|
||||
/**
|
||||
* Applies backer tokens & items (this file will be updated periodically
|
||||
*/
|
||||
|
||||
// mongo habitrpg ./node_modules/underscore/underscore.js migrations/20130327_apply_tokens.js
|
||||
|
||||
// IMPORTANT NOTE: this migration was written when we were using version 3 of lodash.
|
||||
// We've now upgraded to lodash v4 but the code used in this migration has not been
|
||||
// adapted to work with it. Before this migration is used again any lodash method should
|
||||
// be checked for compatibility against the v4 changelog and changed if necessary.
|
||||
// https://github.com/lodash/lodash/wiki/Changelog#v400
|
||||
|
||||
var mapping = [
|
||||
{
|
||||
tier: 1,
|
||||
tokens: 0,
|
||||
users: []
|
||||
},
|
||||
{
|
||||
tier: 5,
|
||||
tokens: 20,
|
||||
users: []
|
||||
},
|
||||
{
|
||||
tier: 10,
|
||||
tokens: 50,
|
||||
users: []
|
||||
},
|
||||
{
|
||||
tier: 15,
|
||||
tokens: 100,
|
||||
users: []
|
||||
},
|
||||
{
|
||||
tier: 30,
|
||||
tokens: 150,
|
||||
users: []
|
||||
},
|
||||
{
|
||||
tier: 45,
|
||||
tokens: 170,
|
||||
users: []
|
||||
},
|
||||
{
|
||||
tier: 60,
|
||||
tokens: 200,
|
||||
users: []
|
||||
},
|
||||
{
|
||||
tier: 70,
|
||||
tokens: 240,
|
||||
users: []
|
||||
},
|
||||
{
|
||||
tier: 80,
|
||||
tokens: 240,
|
||||
users: []
|
||||
},
|
||||
{
|
||||
tier: 90,
|
||||
tokens: 280,
|
||||
users: []
|
||||
},
|
||||
{
|
||||
tier: 300,
|
||||
tokens: 500,
|
||||
users: []
|
||||
},
|
||||
{
|
||||
tier: 800,
|
||||
tokens: 500,
|
||||
users: []
|
||||
}
|
||||
];
|
||||
|
||||
db.users.find().forEach(function(user){
|
||||
if (!user._id) return;
|
||||
|
||||
var possibleUserIds = [user._id];
|
||||
if (!!user.local) {
|
||||
if (!!user.local.username) possibleUserIds.push(user.local.username);
|
||||
if (!!user.local.email) possibleUserIds.push(user.local.email);
|
||||
}
|
||||
|
||||
_.each(mapping, function(tier){
|
||||
var userInTier = !_.isEmpty(_.intersection(tier.users, possibleUserIds));
|
||||
if (userInTier) {
|
||||
var tokenInc = 0,
|
||||
backer = user.backer || {};
|
||||
if (!backer.tokensApplied) {
|
||||
tokenInc = tier.tokens;
|
||||
backer.tokensApplied = true;
|
||||
}
|
||||
backer.tier = tier.tier;
|
||||
|
||||
try {
|
||||
db.users.update(
|
||||
{_id:user._id},
|
||||
{
|
||||
$set: { backer: backer, 'flags.ads': 'hide' },
|
||||
$inc: { balance: (tokenInc/4) }
|
||||
}
|
||||
);
|
||||
} catch(e) {
|
||||
print(e);
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
})
|
||||
22
migrations/20130503_max_gear_achievement.js
Normal file
22
migrations/20130503_max_gear_achievement.js
Normal file
@@ -0,0 +1,22 @@
|
||||
/**
|
||||
* For users who already have max gear, they earned the achievement
|
||||
*/
|
||||
// mongo habitrpg ./node_modules/underscore/underscore.js ./migrations/20130503_max_gear_achievement.js
|
||||
db.users.find().forEach(function(user){
|
||||
var items = user.items;
|
||||
if (!items) { return; }
|
||||
if ( parseInt(items.armor) == 5 &&
|
||||
parseInt(items.head) == 5 &&
|
||||
parseInt(items.shield) == 5 &&
|
||||
parseInt(items.weapon) == 6) {
|
||||
|
||||
try {
|
||||
db.users.update(
|
||||
{_id:user._id},
|
||||
{$set: {'achievements.ultimateGear':true}}
|
||||
);
|
||||
} catch(e) {
|
||||
print(e);
|
||||
}
|
||||
}
|
||||
})
|
||||
12
migrations/20130507_fix_broken_tags.js
Normal file
12
migrations/20130507_fix_broken_tags.js
Normal file
@@ -0,0 +1,12 @@
|
||||
/**
|
||||
* users getting broken tags when they try to edit the first blank tag on accident
|
||||
*
|
||||
* mongo habitrpg ./node_modules/underscore/underscore.js ./migrations/20130507_fix_broken_tags.js
|
||||
*/
|
||||
|
||||
db.users.find().forEach(function(user){
|
||||
if(!_.isArray(user.tags)) {
|
||||
db.users.update({_id:user._id}, {$set:{tags:[]}});
|
||||
}
|
||||
|
||||
})
|
||||
1
migrations/20130508_add_backer_pets.js
Normal file
1
migrations/20130508_add_backer_pets.js
Normal file
@@ -0,0 +1 @@
|
||||
db.users.update({'backer.tier':{$gte:80}}, {$push:{'items.pets':'Wolf-Cerberus'}}, {multi:true});
|
||||
41
migrations/20130508_fix_duff_party_subscriptions.js
Normal file
41
migrations/20130508_fix_duff_party_subscriptions.js
Normal file
@@ -0,0 +1,41 @@
|
||||
/**
|
||||
* 745612d and fedc5b6 added a db-subscription optimization to the initial subscribe.
|
||||
* However, it requires the user only be to one party. That should be the case anyway, but user.party.current was letting
|
||||
* us look past the fact that a user was erroneously subscribed to multiple parties. This fixes
|
||||
*
|
||||
* mongo habitrpg ./node_modules/underscore/underscore.js ./migrations/20130508_fix_duff_party_subscriptions.js
|
||||
*/
|
||||
|
||||
// IMPORTANT NOTE: this migration was written when we were using version 3 of lodash.
|
||||
// We've now upgraded to lodash v4 but the code used in this migration has not been
|
||||
// adapted to work with it. Before this migration is used again any lodash method should
|
||||
// be checked for compatibility against the v4 changelog and changed if necessary.
|
||||
// https://github.com/lodash/lodash/wiki/Changelog#v400
|
||||
|
||||
// since our primary subscription will first hit parties now, we *definitely* need an index there
|
||||
db.parties.ensureIndex( { 'members': 1}, {background: true} );
|
||||
|
||||
db.parties.find().forEach(function(party){
|
||||
|
||||
if(!party.members) {
|
||||
return db.parties.remove({_id:party._id});
|
||||
}
|
||||
|
||||
// Find all members
|
||||
db.users.find( {_id: {$in:party.members} }, {_id:1,party:1} ).forEach(function(user){
|
||||
// user somehow is subscribed to this party in the background, but they're it's not their primary party
|
||||
if (user.party && user.party.current !== party._id) {
|
||||
var i = party.members.indexOf(user._id);
|
||||
party.members.splice(i, 1);
|
||||
}
|
||||
|
||||
// if after we remove the user, the party is empty - delete this party
|
||||
if (_.isEmpty(party.members)) {
|
||||
db.parties.remove({_id:party._id});
|
||||
|
||||
// else just set it
|
||||
} else {
|
||||
db.parties.update({_id:party._id}, {$set:{members:party.members}});
|
||||
}
|
||||
})
|
||||
})
|
||||
48
migrations/20130518_setup_groups.js
Normal file
48
migrations/20130518_setup_groups.js
Normal file
@@ -0,0 +1,48 @@
|
||||
/**
|
||||
* In adding the Guilds feature (which supports the Challenges feature), we are consolidating parties and guilds
|
||||
* into one collection: groups, with group.type either 'party' or 'guild'. We are also creating the 'habitrpg' guild,
|
||||
* which everyone is auto-subscribed to, and moving tavern chat into that guild
|
||||
*
|
||||
* mongo habitrpg ./node_modules/lodash/lodash.js ./migrations/20130518_setup_groups.js
|
||||
*/
|
||||
|
||||
/**
|
||||
* TODO
|
||||
* 1) rename collection parties => groups
|
||||
* 2) add group.type = 'party' for each current group
|
||||
* 3) create habitrpg group, .type='guild'
|
||||
* 4) move tavern.chat.chat into habitrpg guild
|
||||
* 5) subscribe everyone to habitrpg (be sure to set that for default user too!)
|
||||
*/
|
||||
|
||||
db.parties.renameCollection('groups',true);
|
||||
//db.parties.dropCollection(); // doesn't seem to do this step during rename...
|
||||
//db.parties.ensureIndex( { 'members': 1, 'background': 1} );
|
||||
|
||||
db.groups.update({}, {$set:{type:'party'}}, {multi:true});
|
||||
|
||||
//migrate invitation mechanisms
|
||||
db.users.update(
|
||||
{},
|
||||
{
|
||||
$remove:{party:1},
|
||||
$set:{invitations:{party:null,guilds:[]}}
|
||||
},
|
||||
{multi:1}
|
||||
);
|
||||
|
||||
tavern = db.tavern.findOne();
|
||||
db.tavern.drop();
|
||||
|
||||
//TODO make as a callback of previous, or make sure group.type is still 'guild' for habitrpg in the end
|
||||
db.groups.insert({
|
||||
_id: "habitrpg",
|
||||
leader: '9',
|
||||
type: 'guild',
|
||||
name: "HabitRPG",
|
||||
chat: tavern.messages,
|
||||
info: {
|
||||
blurb: '',
|
||||
websites: []
|
||||
}
|
||||
});
|
||||
31
migrations/20130602_survey_rewards.js
Normal file
31
migrations/20130602_survey_rewards.js
Normal file
@@ -0,0 +1,31 @@
|
||||
//mongo habitrpg ./node_modules/lodash/lodash.js migrations/20130602_survey_rewards.js
|
||||
|
||||
// IMPORTANT NOTE: this migration was written when we were using version 3 of lodash.
|
||||
// We've now upgraded to lodash v4 but the code used in this migration has not been
|
||||
// adapted to work with it. Before this migration is used again any lodash method should
|
||||
// be checked for compatibility against the v4 changelog and changed if necessary.
|
||||
// https://github.com/lodash/lodash/wiki/Changelog#v400
|
||||
|
||||
var members = []
|
||||
members = _.uniq(members);
|
||||
|
||||
var query = {
|
||||
_id: {$exists:1},
|
||||
$or:[
|
||||
{_id: {$in: members}},
|
||||
//{'profile.name': {$in: members}},
|
||||
{'auth.facebook.name': {$in: members}},
|
||||
{'auth.local.username': {$in: members}},
|
||||
{'auth.local.email': {$in: members}}
|
||||
]
|
||||
};
|
||||
|
||||
print(db.users.count(query));
|
||||
|
||||
db.users.update(query,
|
||||
{
|
||||
$set: { 'achievements.helpedHabit': true },
|
||||
$inc: { balance: 2.5 }
|
||||
},
|
||||
{multi:true}
|
||||
)
|
||||
9
migrations/20130612_survey_rewards_individual.js
Normal file
9
migrations/20130612_survey_rewards_individual.js
Normal file
@@ -0,0 +1,9 @@
|
||||
//mongo habitrpg migrations/20130612_survey_rewards_individual.js
|
||||
|
||||
var query = {_id: ""};
|
||||
|
||||
db.users.update(query,
|
||||
{
|
||||
$set: { 'achievements.helpedHabit': true },
|
||||
$inc: { balance: 2.5 }
|
||||
})
|
||||
4
migrations/20130615_add_extra_indexes.js
Normal file
4
migrations/20130615_add_extra_indexes.js
Normal file
@@ -0,0 +1,4 @@
|
||||
db.users.ensureIndex( { _id: 1, apiToken: 1 }, {background: true} )
|
||||
db.groups.ensureIndex( { members: 1 }, {background: true} )
|
||||
db.groups.ensureIndex( { type: 1 }, {background: true} )
|
||||
db.groups.ensureIndex( { type: 1, privacy: 1 }, {background: true} )
|
||||
16
migrations/20130908_cleanup_corrupt_tags.js
Normal file
16
migrations/20130908_cleanup_corrupt_tags.js
Normal file
@@ -0,0 +1,16 @@
|
||||
//mongo habitrpg ./node_modules/lodash/lodash.js migrations/20130908_cleanup_corrupt_tags.js
|
||||
|
||||
// Racer was notorious for adding duplicates, randomly deleting documents, etc. Once we pull the plug on old.habit,
|
||||
// run this migration to cleanup all the corruption
|
||||
|
||||
db.users.find().forEach(function(user){
|
||||
user.tags = _.filter(user.tags, (function(t) {
|
||||
return !!t ? t.id : false;
|
||||
}));
|
||||
|
||||
try {
|
||||
db.users.update({_id:user._id}, {$set:{tags:user.tags}});
|
||||
} catch(e) {
|
||||
print(e);
|
||||
}
|
||||
})
|
||||
57
migrations/20130908_cleanup_derby_corruption.js
Normal file
57
migrations/20130908_cleanup_derby_corruption.js
Normal file
@@ -0,0 +1,57 @@
|
||||
//mongo habitrpg ./node_modules/lodash/lodash.js migrations/20130908_cleanup_derby_corruption.js
|
||||
|
||||
// Racer was notorious for adding duplicates, randomly deleting documents, etc. Once we pull the plug on old.habit,
|
||||
// run this migration to cleanup all the corruption
|
||||
|
||||
// IMPORTANT NOTE: this migration was written when we were using version 3 of lodash.
|
||||
// We've now upgraded to lodash v4 but the code used in this migration has not been
|
||||
// adapted to work with it. Before this migration is used again any lodash method should
|
||||
// be checked for compatibility against the v4 changelog and changed if necessary.
|
||||
// https://github.com/lodash/lodash/wiki/Changelog#v400
|
||||
|
||||
db.users.find().forEach(function(user){
|
||||
|
||||
// remove corrupt tasks, which will either be null-value or no id
|
||||
user.tasks = _.reduce(user.tasks, function(m,task,k) {
|
||||
if (!task || !task.id) return m;
|
||||
if (isNaN(+task.value)) task.value = 0;
|
||||
m[k] = task;
|
||||
return m;
|
||||
}, {});
|
||||
|
||||
// fix NaN stats
|
||||
_.each(user.stats, function(v,k) {
|
||||
if (!v || isNaN(+v)) user.stats[k] = 0;
|
||||
return true;
|
||||
});
|
||||
|
||||
// remove duplicates, restore ghost tasks
|
||||
['habit', 'daily', 'todo', 'reward'].forEach(function(type) {
|
||||
var idList = user[type + "Ids"];
|
||||
var taskIds = _.pluck(_.where(user.tasks, {type: type}), 'id');
|
||||
var union = _.union(idList, taskIds);
|
||||
var preened = _.filter(union, function(id) {
|
||||
return id && _.contains(taskIds, id);
|
||||
});
|
||||
if (!_.isEqual(idList, preened)) {
|
||||
user[type + "Ids"] = preened;
|
||||
}
|
||||
});
|
||||
|
||||
// temporarily remove broken eggs. we'll need to write a migration script to grant gems for and remove these instead
|
||||
if (user.items && user.items.eggs) {
|
||||
user.items.eggs = _.filter(user.items.eggs,function(egg){
|
||||
if (_.isString(egg)) {
|
||||
user.balance += 0.75; // give them 3 gems for each broken egg
|
||||
} else {
|
||||
return true;
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
try {
|
||||
db.users.update({_id:user._id}, user);
|
||||
} catch(e) {
|
||||
print(e);
|
||||
}
|
||||
})
|
||||
@@ -8,7 +8,7 @@
|
||||
* If we experience any troubles with removed staging users, come back to a snapshot and restore accounts. This will
|
||||
* give a peak into possible conflict accounts:
|
||||
*/
|
||||
/* db.users.count({
|
||||
/*db.users.count({
|
||||
"auth.local": {$exists: false},
|
||||
"auth.facebook": {$exists: false},
|
||||
"history.exp.5": {$exists: 1},
|
||||
@@ -22,9 +22,9 @@
|
||||
* in we'll be using localStorage anyway instead of creating a new database record
|
||||
*/
|
||||
db.users.remove({
|
||||
// Un-registered users
|
||||
'auth.local': {$exists: false},
|
||||
'auth.facebook': {$exists: false},
|
||||
// Un-registered users
|
||||
"auth.local": {$exists: false},
|
||||
"auth.facebook": {$exists: false}
|
||||
});
|
||||
|
||||
/**
|
||||
@@ -32,6 +32,6 @@ db.users.remove({
|
||||
* Another vestige of Racer. Empty parties shouldn't be being created anymore in the new site
|
||||
*/
|
||||
db.groups.remove({
|
||||
type: 'party',
|
||||
$where: 'return this.members.length === 0',
|
||||
'type': 'party',
|
||||
$where: "return this.members.length === 0"
|
||||
});
|
||||
5
migrations/20131022_purchased_and_newStuff.js
Normal file
5
migrations/20131022_purchased_and_newStuff.js
Normal file
@@ -0,0 +1,5 @@
|
||||
db.users.find().forEach(function(user){
|
||||
if (!user.purchased) user.purchased = {hair: {}, skin: {}};
|
||||
user.purchased.ads = user.flags && !!user.flags.ads;
|
||||
db.users.update({_id:user._id}, {$set:{'purchased': user.purchased, 'flags.newStuff': true}, $unset: {'flags.ads':1}});
|
||||
});
|
||||
12
migrations/20131022_restore_ads.js
Normal file
12
migrations/20131022_restore_ads.js
Normal file
@@ -0,0 +1,12 @@
|
||||
// node .migrations/20131022_restore_ads.js
|
||||
var mongo = require('mongoskin');
|
||||
var _ = require('lodash');
|
||||
var dbBackup = mongo.db('localhost:27017/habitrpg?auto_reconnect');
|
||||
var dbLive = mongo.db('localhost:27017/habitrpg2?auto_reconnect');
|
||||
var count = 89474;
|
||||
dbBackup.collection('users').findEach({$or: [{'flags.ads':'show'}, {'flags.ads': null}]}, {batchSize:10}, function(err, item) {
|
||||
if (err) return console.error({err:err});
|
||||
if (!item || !item._id) return console.error('blank user');
|
||||
dbLive.collection('users').update({_id:item._id}, {$set:{'purchased.ads':false}, $unset: {'flags.ads': 1}});
|
||||
if (--count <= 0) console.log("DONE!");
|
||||
});
|
||||
132
migrations/20131028_task_subdocs_tags_invites.js
Normal file
132
migrations/20131028_task_subdocs_tags_invites.js
Normal file
@@ -0,0 +1,132 @@
|
||||
// mongo habitrpg ./node_modules/lodash/lodash.js ./migrations/20131028_task_subdocs_tags_invites.js
|
||||
|
||||
// TODO it might be better we just find() and save() all user objects using mongoose, and rely on our defined pre('save')
|
||||
// and default values to "migrate" users. This way we can make sure those parts are working properly too
|
||||
// @see http://stackoverflow.com/questions/14867697/mongoose-full-collection-scan
|
||||
//Also, what do we think of a Mongoose Migration module? something like https://github.com/madhums/mongoose-migrate
|
||||
|
||||
// IMPORTANT NOTE: this migration was written when we were using version 3 of lodash.
|
||||
// We've now upgraded to lodash v4 but the code used in this migration has not been
|
||||
// adapted to work with it. Before this migration is used again any lodash method should
|
||||
// be checked for compatibility against the v4 changelog and changed if necessary.
|
||||
// https://github.com/lodash/lodash/wiki/Changelog#v400
|
||||
|
||||
db.users.find().forEach(function(user){
|
||||
|
||||
// Add invites to groups
|
||||
// -------------------------
|
||||
if(user.invitations){
|
||||
if(user.invitations.party){
|
||||
db.groups.update({_id: user.invitations.party.id}, {$addToSet:{invites:user._id}});
|
||||
}
|
||||
|
||||
if(user.invitations.guilds){
|
||||
_.each(user.invitations.guilds, function(guild){
|
||||
db.groups.update({_id: guild.id}, {$addToSet:{invites:user._id}});
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Cleanup broken tags
|
||||
// -------------------------
|
||||
_.each(user.tasks, function(task){
|
||||
_.each(task.tags, function(val, key){
|
||||
_.each(user.tags, function(tag){
|
||||
if(key == tag.id) delete task.tags[key];
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
// Fix corrupt dates
|
||||
// -------------------------
|
||||
user.lastCron = new Date(user.lastCron);
|
||||
if (user.lastCron == 'Invalid Date') user.lastCron = new Date();
|
||||
if (user.auth) { // what to do with !auth?
|
||||
_.defaults(user.auth, {timestamps: {created:undefined, loggedin: undefined}});
|
||||
_.defaults(user.auth.timestamps, {created: new Date(user.lastCron), loggedin: new Date(user.lastCron)});
|
||||
}
|
||||
|
||||
// Fix missing history
|
||||
// -------------------------
|
||||
_.defaults(user, {history:{}});
|
||||
_.defaults(user.history,{exp:[], todos:[]});
|
||||
|
||||
// Add username
|
||||
// -------------------------
|
||||
if (!user.profile) user.profile = {name:undefined};
|
||||
if (_.isEmpty(user.profile.name) && user.auth) {
|
||||
var fb = user.auth.facebook;
|
||||
user.profile.name =
|
||||
(user.auth.local && user.auth.local.username) ||
|
||||
(fb && (fb.displayName || fb.name || fb.username || (fb.first_name && fb.first_name + ' ' + fb.last_name))) ||
|
||||
'Anonymous';
|
||||
}
|
||||
|
||||
// Migrate to TaskSchema Sub-Docs!
|
||||
// -------------------------
|
||||
if (!user.tasks) {
|
||||
// So evidentaly users before 02/2013 were ALREADY setup based on habits[], dailys[], etcs... I don't remember our schema
|
||||
// ever being that way... Anyway, print busted users here (they don't have tasks, but also don't have the right schema)
|
||||
if (!user.habits || !user.dailys || !user.todos || !user.rewards) {
|
||||
print(user._id);
|
||||
}
|
||||
} else {
|
||||
_.each(['habit', 'daily', 'todo', 'reward'], function(type) {
|
||||
// we use _.transform instead of a simple _.where in order to maintain sort-order
|
||||
user[type + "s"] = _.reduce(user[type + "Ids"], function(m, tid) {
|
||||
var task = user.tasks[tid],
|
||||
newTask = {};
|
||||
if (!task) return m; // remove null tasks
|
||||
|
||||
// Cleanup tasks for TaskSchema
|
||||
newTask._id = newTask.id = task.id;
|
||||
newTask.text = (_.isString(task.text)) ? task.text : '';
|
||||
if (_.isString(task.notes)) newTask.notes = task.notes;
|
||||
newTask.tags = (_.isObject(task.tags)) ? task.tags : {};
|
||||
newTask.type = (_.isString(task.type)) ? task.type : 'habit';
|
||||
newTask.value = (_.isNumber(task.value)) ? task.value : 0;
|
||||
newTask.priority = (_.isString(task.priority)) ? task.priority : '!';
|
||||
switch (newTask.type) {
|
||||
case 'habit':
|
||||
newTask.up = (_.isBoolean(task.up)) ? task.up : true;
|
||||
newTask.down = (_.isBoolean(task.down)) ? task.down : true;
|
||||
newTask.history = (_.isArray(task.history)) ? task.history : [];
|
||||
break;
|
||||
case 'daily':
|
||||
newTask.repeat = (_.isObject(task.repeat)) ? task.repeat : {m:1, t:1, w:1, th:1, f:1, s:1, su:1};
|
||||
newTask.streak = (_.isNumber(task.streak)) ? task.streak : 0;
|
||||
newTask.completed = (_.isBoolean(task.completed)) ? task.completed : false;
|
||||
newTask.history = (_.isArray(task.history)) ? task.history : [];
|
||||
break;
|
||||
case 'todo':
|
||||
newTask.completed = (_.isBoolean(task.completed)) ? task.completed : false;
|
||||
break;
|
||||
}
|
||||
|
||||
m.push(newTask);
|
||||
return m;
|
||||
}, []);
|
||||
delete user[type + 'Ids'];
|
||||
});
|
||||
delete user.tasks;
|
||||
}
|
||||
|
||||
try {
|
||||
db.users.update({_id:user._id}, user);
|
||||
} catch(e) {
|
||||
print(e);
|
||||
}
|
||||
});
|
||||
|
||||
// Remove old groups.*.challenges, they're not compatible with the new system, set member counts
|
||||
// -------------------------
|
||||
db.groups.find().forEach(function(group){
|
||||
db.groups.update({_id:group._id}, {
|
||||
$set:{memberCount: _.size(group.members)},
|
||||
$pull:{challenges:1}
|
||||
})
|
||||
});
|
||||
|
||||
// HabitRPG => Tavern
|
||||
// -------------------------
|
||||
db.groups.update({_id:'habitrpg'}, {$set:{name:'Tavern'}});
|
||||
25
migrations/20131102_restore_task_ids.js
Normal file
25
migrations/20131102_restore_task_ids.js
Normal file
@@ -0,0 +1,25 @@
|
||||
// mongo habitrpg ./node_modules/lodash/lodash.js ./migrations/20131028_task_subdocs_tags_invites.js
|
||||
|
||||
db.challenges.find().forEach(function(chal){
|
||||
_.each(chal.habits.concat(chal.dailys).concat(chal.todos).concat(chal.rewards), function(task){
|
||||
task.id = task.id || task._id;
|
||||
})
|
||||
try {
|
||||
db.challenges.update({_id:chal._id}, chal);
|
||||
db.groups.update({_id:chal.group}, {$addToSet:{challenges:chal._id}})
|
||||
} catch(e) {
|
||||
print(e);
|
||||
}
|
||||
});
|
||||
|
||||
db.users.find().forEach(function(user){
|
||||
_.each(user.habits.concat(user.dailys).concat(user.todos).concat(user.rewards), function(task){
|
||||
task.id = task.id || task._id;
|
||||
})
|
||||
try {
|
||||
db.users.update({_id:user._id}, user);
|
||||
} catch(e) {
|
||||
print(e);
|
||||
}
|
||||
});
|
||||
|
||||
7
migrations/20131104_remove_invalid_dues.js
Normal file
7
migrations/20131104_remove_invalid_dues.js
Normal file
@@ -0,0 +1,7 @@
|
||||
db.users.find({},{todos:1}).forEach(function(user){
|
||||
_.each(user.todos, function(task){
|
||||
if (moment(task.date).toDate() == 'Invalid Date')
|
||||
task.date = moment().format('MM/DD/YYYY');
|
||||
})
|
||||
db.users.update({_id:user._id}, {$set:{todos: user.todos}});
|
||||
});
|
||||
60
migrations/20131104_restore_lost_task_data.js
Normal file
60
migrations/20131104_restore_lost_task_data.js
Normal file
@@ -0,0 +1,60 @@
|
||||
// node .migrations/20131104_restore_lost_task_data.js
|
||||
|
||||
/**
|
||||
* After the great challenges migration, quite a few things got inadvertently dropped from tasks since their
|
||||
* schemas became more strict. See conversation at https://github.com/HabitRPG/habitrpg/issues/1712 ,
|
||||
* this restores task tags, streaks, due-dates, values
|
||||
*/
|
||||
var mongo = require('mongoskin');
|
||||
var _ = require('lodash');
|
||||
|
||||
// IMPORTANT NOTE: this migration was written when we were using version 3 of lodash.
|
||||
// We've now upgraded to lodash v4 but the code used in this migration has not been
|
||||
// adapted to work with it. Before this migration is used again any lodash method should
|
||||
// be checked for compatibility against the v4 changelog and changed if necessary.
|
||||
// https://github.com/lodash/lodash/wiki/Changelog#v400
|
||||
|
||||
var backupUsers = mongo.db('localhost:27017/habitrpg_old?auto_reconnect').collection('users');
|
||||
var liveUsers = mongo.db('localhost:27017/habitrpg_new?auto_reconnect').collection('users');
|
||||
|
||||
backupUsers.count(function(err, count){
|
||||
if (err) return console.error(err);
|
||||
backupUsers.findEach({}, {batchSize:250}, function(err, before){
|
||||
if (err) return console.error(err);
|
||||
if (!before) return console.log('!before');
|
||||
liveUsers.findById(before._id, function(err, after){
|
||||
if (err) return console.error(err);
|
||||
if (!after) {
|
||||
count--;
|
||||
return console.log(before._id + ' deleted?');
|
||||
}
|
||||
if (before._id == '9') console.log('lefnire processed');
|
||||
_.each(before.tasks, function(tBefore){
|
||||
var tAfter = _.find(after[tBefore.type+'s'], {id:tBefore.id});
|
||||
if (!tAfter) return; // task has been deleted since launch
|
||||
|
||||
// Restore deleted tags
|
||||
if (!_.isEmpty(tBefore.tags) && _.isEmpty(tAfter.tags))
|
||||
tAfter.tags = tBefore.tags;
|
||||
// Except tags which are no longer available on the updated user
|
||||
_.each(tAfter.tags, function(v,k){ //value is true, key is tag.id
|
||||
if (!_.find(after.tags,{id:k})) delete tAfter.tags[k];
|
||||
})
|
||||
|
||||
// Restore deleted streaks
|
||||
if (+tBefore.streak > tAfter.streak)
|
||||
tAfter.streak = +tBefore.streak;
|
||||
|
||||
if (!!tBefore.date && !tAfter.date)
|
||||
tAfter.date = tBefore.date;
|
||||
|
||||
// Restore deleted values
|
||||
if (+tBefore.value != 0 && tAfter.value == 0)
|
||||
tAfter.value = +tBefore.value;
|
||||
})
|
||||
after._v++;
|
||||
liveUsers.update({_id:after._id}, after);
|
||||
if (--count <= 0) console.log("DONE!");
|
||||
})
|
||||
});
|
||||
});
|
||||
21
migrations/20131105_remove_history_ids.js
Normal file
21
migrations/20131105_remove_history_ids.js
Normal file
@@ -0,0 +1,21 @@
|
||||
function deleteId(h){
|
||||
delete h._id;
|
||||
}
|
||||
|
||||
db.users.find({},{habits:1,dailys:1,history:1}).forEach(function(user){
|
||||
if (user.history) {
|
||||
_.each(['todos','exp'], function(type){
|
||||
if (user.history[type]) {
|
||||
_.each(user.history.exp, deleteId);
|
||||
}
|
||||
})
|
||||
} else {
|
||||
user.history = {exp:[],todos:[]};
|
||||
}
|
||||
|
||||
_.each(['habits', 'dailys'], function(type){
|
||||
_.each(user[type].history, deleteId);
|
||||
});
|
||||
|
||||
db.users.update({_id:user._id}, {$set:{history: user.history, habits: user.habits, dailys: user.dailys}});
|
||||
});
|
||||
18
migrations/20131107_from_backer_to_contributor.js
Normal file
18
migrations/20131107_from_backer_to_contributor.js
Normal file
@@ -0,0 +1,18 @@
|
||||
db.users.find({
|
||||
$or: [
|
||||
{'backer.admin':{$exists:1}},
|
||||
{'backer.contributor':{$exists:1}}
|
||||
]
|
||||
},{backer:1}).forEach(function(user){
|
||||
user.contributor = {};
|
||||
user.contributor.admin = user.backer.admin;
|
||||
delete user.backer.admin;
|
||||
|
||||
// this isnt' the proper storage format, but I'm going to be going through the admin utility manually and setting things properly
|
||||
if (user.backer.contributor) {
|
||||
user.contributor.text = user.backer.contributor;
|
||||
delete user.backer.contributor;
|
||||
}
|
||||
|
||||
db.users.update({_id:user._id}, {$set:{backer:user.backer, contributor:user.contributor}});
|
||||
});
|
||||
4
migrations/20131108_add_gems_for_contribs.js
Normal file
4
migrations/20131108_add_gems_for_contribs.js
Normal file
@@ -0,0 +1,4 @@
|
||||
// Increase everyone's gems per their contribution level
|
||||
db.users.find({'contributor.level':{$gt:0}},{contributor:1, balance:1}).forEach(function(user){
|
||||
db.users.update({_id:user._id}, {$inc: {balance: (user.contributor.level * .5)} });
|
||||
});
|
||||
39
migrations/20131109_refactor_pets.js
Normal file
39
migrations/20131109_refactor_pets.js
Normal file
@@ -0,0 +1,39 @@
|
||||
db.users.find(
|
||||
{$where: "Array.isArray(this.items.pets) || Array.isArray(this.items.eggs) || Array.isArray(this.items.hatchingPotions)"},
|
||||
{backer: 1, items:1}
|
||||
).forEach(function(user){
|
||||
|
||||
if (_.isArray(user.items.pets)) {
|
||||
user.items.pets = _.reduce(user.items.pets, function(m,v){ m[v] = 5; return m;}, {});
|
||||
}
|
||||
|
||||
if (!_.isString(user.items.currentPet)) {
|
||||
user.items.currentPet = user.items.currentPet ? user.items.currentPet.str : '';
|
||||
}
|
||||
|
||||
if (_.isArray(user.items.eggs)) {
|
||||
user.items.eggs = _.reduce(user.items.eggs, function(m,v){
|
||||
if (!m[v.name]) m[v.name] = 0;
|
||||
m[v.name]++;
|
||||
return m;
|
||||
}, {});
|
||||
}
|
||||
|
||||
if (_.isArray(user.items.hatchingPotions)) {
|
||||
user.items.hatchingPotions = _.reduce(user.items.hatchingPotions, function(m,v){
|
||||
if (!m[v]) m[v] = 0;
|
||||
m[v]++;
|
||||
return m;
|
||||
}, {});
|
||||
}
|
||||
|
||||
user.items.food = {};
|
||||
user.items.mounts = {};
|
||||
user.items.currentMount = '';
|
||||
|
||||
if (user.backer && user.backer.tier && user.backer.tier >= 90) {
|
||||
user.items.mounts['LionCub-Ethereal'] = true;
|
||||
}
|
||||
|
||||
db.users.update({_id:user._id}, {$set:{items:user.items}});
|
||||
});
|
||||
15
migrations/20131111_task_NaN.js
Normal file
15
migrations/20131111_task_NaN.js
Normal file
@@ -0,0 +1,15 @@
|
||||
// This migration has already been run in the past. It's vital to fix these users presently, but we need to find
|
||||
// out why task values are ever getting in as NaN. My guess is API PUT /tasks/:tid routes
|
||||
db.users.find({},{habits:1,dailys:1,todos:1,rewards:1}).forEach(function(user){
|
||||
_.each(['habits','dailys','todos','rewards'], function(type){
|
||||
_.each(user[type], function(task){
|
||||
task.value = +task.value;
|
||||
if (_.isNaN(task.value)) {
|
||||
task.value = 0;
|
||||
print(user._id);
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
db.users.update({_id:user._id}, {$set:{habits: user.habits, dailys: user.dailys, todos: user.todos, rewards: user.rewards}});
|
||||
});
|
||||
14
migrations/20131114_migrate_websites_to_blurb.js
Normal file
14
migrations/20131114_migrate_websites_to_blurb.js
Normal file
@@ -0,0 +1,14 @@
|
||||
// Migrate all users websites to the profile blurb field
|
||||
db.users.find({'profile.websites':{$exists: true}}).forEach(function(user){
|
||||
db.users.update({_id: user._id}, {
|
||||
$set: {"profile.blurb": user.profile.blurb + '\n * ' + user.profile.websites.join('\n * ')},
|
||||
$unset: {'profile.websites': 1}
|
||||
})
|
||||
})
|
||||
|
||||
db.groups.find({'websites.0':{$exists: true}}).forEach(function(group){
|
||||
db.groups.update({_id: group._id}, {
|
||||
$set: {"description": group.description + '\n * ' + group.websites.join('\n * ')},
|
||||
$unset: {websites: 1}
|
||||
})
|
||||
})
|
||||
10
migrations/20131115_update_gear_preferences.js
Normal file
10
migrations/20131115_update_gear_preferences.js
Normal file
@@ -0,0 +1,10 @@
|
||||
//Add defaults to show gears in all users
|
||||
db.users.update(
|
||||
{},
|
||||
{$set:{
|
||||
'preferences.showWeapon': true,
|
||||
'preferences.showShield': true,
|
||||
'preferences.showArmor': true,
|
||||
}},
|
||||
{multi:true}
|
||||
)
|
||||
18
migrations/20131117_fix_task_types.js
Normal file
18
migrations/20131117_fix_task_types.js
Normal file
@@ -0,0 +1,18 @@
|
||||
// TODO figure out why this is happening in the first place
|
||||
|
||||
db.users.find({},{habits:1, dailys:1, todos:1, rewards:1}).forEach(function(user){
|
||||
_.each(user.habits, function(task){
|
||||
task.type = 'habit';
|
||||
})
|
||||
_.each(user.dailys, function(task){
|
||||
task.type = 'daily';
|
||||
})
|
||||
_.each(user.todos, function(task){
|
||||
task.type = 'todo';
|
||||
})
|
||||
_.each(user.rewards, function(task){
|
||||
task.type = 'reward';
|
||||
})
|
||||
|
||||
db.users.update({_id:user._id}, {$set:{habits: user.habits, dailys: user.dailys, todos: user.todos, rewards: user.rewards}});
|
||||
});
|
||||
12
migrations/20131117_remove_undefined_pets.js
Normal file
12
migrations/20131117_remove_undefined_pets.js
Normal file
@@ -0,0 +1,12 @@
|
||||
// once and for all!
|
||||
|
||||
db.users.find({'items.pets':{$exists:1}},{'items.pets':1}).forEach(function(user){
|
||||
_.reduce(user.items.pets, function(m,v,k){
|
||||
if (!k.indexOf('undefined')) m.push(k);
|
||||
return m;
|
||||
}, []).forEach(function(key){
|
||||
delete user.items.pets[key];
|
||||
})
|
||||
|
||||
db.users.update({_id:user._id}, { $set:{'items.pets':user.items.pets} });
|
||||
});
|
||||
13
migrations/20131122_deleted_tags.js
Normal file
13
migrations/20131122_deleted_tags.js
Normal file
@@ -0,0 +1,13 @@
|
||||
// Cleanup broken tags
|
||||
// -------------------------
|
||||
db.users.find().forEach(function(user){
|
||||
var tasks = user.habits.concat(user.dailys).concat(user.todos).concat(user.rewards);
|
||||
|
||||
_.each(tasks, function(task){
|
||||
_.each(task.tags, function(value, key){ //value is true, key is tag.id
|
||||
if (!_.find(user.tags,{id:key})) delete task.tags[key];
|
||||
});
|
||||
});
|
||||
|
||||
db.users.update({_id:user._id}, user);
|
||||
});
|
||||
8
migrations/20131123_set_default_party_order.js
Normal file
8
migrations/20131123_set_default_party_order.js
Normal file
@@ -0,0 +1,8 @@
|
||||
//Add default to randomize party members list
|
||||
db.users.update(
|
||||
{},
|
||||
{$set:{
|
||||
'party.order': 'random',
|
||||
}},
|
||||
{multi:true}
|
||||
)
|
||||
5
migrations/20131126_clean_dayStart.js
Normal file
5
migrations/20131126_clean_dayStart.js
Normal file
@@ -0,0 +1,5 @@
|
||||
db.users.find({'preferences.dayStart':{$exists:1}},{'preferences.dayStart':1}).forEach(function(user){
|
||||
var dayStart = +user.preferences.dayStart;
|
||||
dayStart = (_.isNaN(dayStart) || dayStart < 0 || dayStart > 24) ? 0 : dayStart;
|
||||
db.users.update({_id:user._id}, {$set:{'preferences.dayStart':dayStart}});
|
||||
});
|
||||
1
migrations/20131126_turkey_pet.js
Normal file
1
migrations/20131126_turkey_pet.js
Normal file
@@ -0,0 +1 @@
|
||||
db.users.update({},{$set:{'items.pets.Turkey-Base':5, 'flags.newStuff':true}}, {multi:true});
|
||||
38
migrations/20131127_restore_dayStart.js
Normal file
38
migrations/20131127_restore_dayStart.js
Normal file
@@ -0,0 +1,38 @@
|
||||
// node .migrations/20131127_restore_dayStart.js
|
||||
|
||||
// IMPORTANT NOTE: this migration was written when we were using version 3 of lodash.
|
||||
// We've now upgraded to lodash v4 but the code used in this migration has not been
|
||||
// adapted to work with it. Before this migration is used again any lodash method should
|
||||
// be checked for compatibility against the v4 changelog and changed if necessary.
|
||||
// https://github.com/lodash/lodash/wiki/Changelog#v400
|
||||
|
||||
var mongo = require('mongoskin');
|
||||
var _ = require('lodash');
|
||||
|
||||
var backupUsers = mongo.db('localhost:27017/habitrpg_old?auto_reconnect').collection('users');
|
||||
var liveUsers = mongo.db('localhost:27017/habitrpg_new?auto_reconnect').collection('users');
|
||||
|
||||
var query = {'preferences.dayStart':{$exists:1,$ne:0}};
|
||||
var select = {'preferences.dayStart': 1};
|
||||
|
||||
backupUsers.count(query, function(err, count){
|
||||
if (err) return console.error(err);
|
||||
backupUsers.findEach(query, select, {batchSize:20}, function(err, before){
|
||||
if (err) return console.error(err);
|
||||
if (!before) { count--; return console.log('!before'); }
|
||||
liveUsers.findById(before._id, function(err, after){
|
||||
if (err) return console.error(err);
|
||||
if (!after) { count--; return console.log(before._id + ' deleted?'); }
|
||||
|
||||
var dayStart = +before.preferences.dayStart;
|
||||
if (after.preferences.dayStart == 0 && dayStart != 0){
|
||||
dayStart = (_.isNaN(dayStart) || dayStart < 0 || dayStart > 24) ? 0 : dayStart;
|
||||
} else {
|
||||
dayStart = after.preferences.dayStart;
|
||||
}
|
||||
|
||||
liveUsers.update({_id:after._id}, {$inc:{_v:1}, $set:{'preferences.dayStart':dayStart}});
|
||||
if (--count <= 0) console.log("DONE!");
|
||||
})
|
||||
});
|
||||
});
|
||||
20
migrations/20131217_unearned_backer_gear.js
Normal file
20
migrations/20131217_unearned_backer_gear.js
Normal file
@@ -0,0 +1,20 @@
|
||||
var query = {
|
||||
'$or': [
|
||||
{'items.gear.owned.weapon_special_0': true},
|
||||
{'items.gear.owned.armor_special_0': true},
|
||||
{'items.gear.owned.head_special_0': true},
|
||||
{'items.gear.owned.shield_special_0': true}
|
||||
]
|
||||
};
|
||||
|
||||
db.users.find(query, {'items.gear.owned':1,backer:1}).forEach(function(user){
|
||||
var owned = user.items.gear.owned;
|
||||
var tier = (user.backer && user.backer.tier) || 0;
|
||||
if (tier < 70) delete owned.weapon_special_0;
|
||||
if (tier < 45) delete owned.armor_special_0;
|
||||
if (tier < 45) delete owned.head_special_0;
|
||||
if (tier < 45) delete owned.shield_special_0;
|
||||
|
||||
|
||||
db.users.update({_id:user._id}, {$set:{'items.gear.owned':owned}});
|
||||
});
|
||||
51
migrations/20131221_restore_NaN_history.js
Normal file
51
migrations/20131221_restore_NaN_history.js
Normal file
@@ -0,0 +1,51 @@
|
||||
// node .migrations/20131221_restore_NaN_history.js
|
||||
|
||||
// IMPORTANT NOTE: this migration was written when we were using version 3 of lodash.
|
||||
// We've now upgraded to lodash v4 but the code used in this migration has not been
|
||||
// adapted to work with it. Before this migration is used again any lodash method should
|
||||
// be checked for compatibility against the v4 changelog and changed if necessary.
|
||||
// https://github.com/lodash/lodash/wiki/Changelog#v400
|
||||
|
||||
/**
|
||||
* After the classes migration, users lost some history entries
|
||||
*/
|
||||
var mongo = require('mongoskin');
|
||||
var _ = require('lodash');
|
||||
|
||||
var backupUsers = mongo.db('localhost:27017/habitrpg_old?auto_reconnect').collection('users');
|
||||
var liveUsers = mongo.db('localhost:27017/habitrpg?auto_reconnect').collection('users');
|
||||
|
||||
function filterNaNs(h) {
|
||||
return h && _.isNumber(+h.value) && !_.isNaN(+h.value);
|
||||
}
|
||||
|
||||
var fields = {history:1,habits:1,dailys:1,migration:1};
|
||||
var count = 0;
|
||||
liveUsers.findEach({migration: {$ne:'20131221_restore_NaN_history'}}, fields, {batchSize:500}, function(err, after){
|
||||
if (!after) err = '!after';
|
||||
if (err) {count++;return console.error(err);}
|
||||
|
||||
backupUsers.findById(after._id, fields, function(err, before){
|
||||
if (err) {count++;return console.error(err);}
|
||||
|
||||
_.each(['todos','exp'],function(type){
|
||||
if (!_.isEmpty(after.history[type]))
|
||||
after.history[type] = _.filter(after.history[type], filterNaNs);
|
||||
if (before && !_.isEmpty(before.history[type]))
|
||||
after.history[type] = before.history[type].concat(after.history[type]);
|
||||
})
|
||||
|
||||
_.each(['habits','dailys'], function(type){
|
||||
_.each(after[type], function(t){
|
||||
t.history = _.filter(t.history, filterNaNs);
|
||||
var found = before && _.find(before[type],{id:t.id});
|
||||
if (found && found.history) t.history = found.history.concat(t.history);
|
||||
})
|
||||
})
|
||||
|
||||
liveUsers.update({_id:after._id}, {$set:{history:after.history, dailys:after.dailys, habits:after.habits, migration:'20131221_restore_NaN_history'}, $inc:{_v:1}});
|
||||
//if (--count <= 0) console.log("DONE! " + after._id);
|
||||
if (++count%1000 == 0) console.log(count);
|
||||
if (after._id == '9') console.log('lefnire processed');
|
||||
})
|
||||
});
|
||||
38
migrations/20131225_restore_streaks.js
Normal file
38
migrations/20131225_restore_streaks.js
Normal file
@@ -0,0 +1,38 @@
|
||||
// node .migrations/20131225_restore_streaks.js
|
||||
|
||||
// IMPORTANT NOTE: this migration was written when we were using version 3 of lodash.
|
||||
// We've now upgraded to lodash v4 but the code used in this migration has not been
|
||||
// adapted to work with it. Before this migration is used again any lodash method should
|
||||
// be checked for compatibility against the v4 changelog and changed if necessary.
|
||||
// https://github.com/lodash/lodash/wiki/Changelog#v400
|
||||
|
||||
/**
|
||||
* After the classes migration, users lost some history entries
|
||||
*/
|
||||
var mongo = require('mongoskin');
|
||||
var _ = require('lodash');
|
||||
|
||||
var backupUsers = mongo.db('localhost:27017/habitrpg_old?auto_reconnect').collection('users');
|
||||
var liveUsers = mongo.db('lefnire:mAdn3s5s@charlotte.mongohq.com:10015/habitrpg_large?auto_reconnect').collection('users');
|
||||
|
||||
var fields = {dailys:1,migration:1};
|
||||
var count = 0;
|
||||
liveUsers.findEach({migration: {$ne:'20131225_restore_streaks'}}, fields, {batchSize:250}, function(err, after){
|
||||
if (!after) err = '!after';
|
||||
if (err) {count++;return console.error(err);}
|
||||
|
||||
backupUsers.findById(after._id, fields, function(err, before){
|
||||
if (!before) err = '!before';
|
||||
if (err) {count++;return console.error(err);}
|
||||
|
||||
_.each(before.dailys,function(d){
|
||||
var found = _.find(after.dailys,{id: d.id});
|
||||
if (found && !found.streak) found.streak = d.streak;
|
||||
})
|
||||
|
||||
liveUsers.update({_id:after._id}, {$set:{dailys:after.dailys, migration:'20131225_restore_streaks'}, $inc:{_v:1}});
|
||||
//if (--count <= 0) console.log("DONE! " + after._id);
|
||||
if (++count%1000 == 0) console.log(count);
|
||||
if (after._id == '9') console.log('lefnire processed');
|
||||
})
|
||||
});
|
||||
8
migrations/20140119_task_creation_completion_dates.js
Normal file
8
migrations/20140119_task_creation_completion_dates.js
Normal file
@@ -0,0 +1,8 @@
|
||||
db.users.find({},{todos:1,dailys:1,rewards:1,habits:1}).forEach(function(user){
|
||||
_.each(user.habits.concat(user.dailys).concat(user.todos).concat(user.rewards), function(t){
|
||||
t.dateCreated = t.created || new Date;
|
||||
delete t.created;
|
||||
if (t.type == 'todo' && t.completed) t.dateCompleted = new Date;
|
||||
})
|
||||
db.users.update({_id:user._id}, {$set:{habits:user.habits,dailys:user.dailys,todos:user.todos,rewards:user.rewards}});
|
||||
});
|
||||
1
migrations/20140130_birthdayEnd.js
Normal file
1
migrations/20140130_birthdayEnd.js
Normal file
@@ -0,0 +1 @@
|
||||
db.users.update({},{$set:{'achievements.habitBirthday':true}},{multi:1})
|
||||
12
migrations/20140130_birthdayStart.js
Normal file
12
migrations/20140130_birthdayStart.js
Normal file
@@ -0,0 +1,12 @@
|
||||
db.users.update({},{$set:{
|
||||
'items.food.Cake_Skeleton':1,
|
||||
'items.food.Cake_Base':1,
|
||||
'items.food.Cake_CottonCandyBlue':1,
|
||||
'items.food.Cake_CottonCandyPink':1,
|
||||
'items.food.Cake_Shade':1,
|
||||
'items.food.Cake_White':1,
|
||||
'items.food.Cake_Golden':1,
|
||||
'items.food.Cake_Zombie':1,
|
||||
'items.food.Cake_Desert':1,
|
||||
'items.food.Cake_Red':1
|
||||
}},{multi:1})
|
||||
3
migrations/20140220_challenge_memberCount.js
Normal file
3
migrations/20140220_challenge_memberCount.js
Normal file
@@ -0,0 +1,3 @@
|
||||
db.challenges.find({},{members:1}).forEach(function(chal){
|
||||
db.challenges.update({_id:chal._id}, {$set:{memberCount:chal.members.length}});
|
||||
});
|
||||
14
migrations/20140301_missing_mysteries.js
Normal file
14
migrations/20140301_missing_mysteries.js
Normal file
@@ -0,0 +1,14 @@
|
||||
db.users.update(
|
||||
{
|
||||
'purchased.plan.dateCreated':{$gte:new Date('2014-02-22'),$lt:new Date('2014-02-29')},
|
||||
'items.gear.owned.armor_mystery_201402':null,
|
||||
'items.gear.owned.head_mystery_201402': null,
|
||||
'items.gear.owned.back_mystery_201402': null,
|
||||
'purchased.plan.mysteryItems':{$nin:['armor_mystery_201402','head_mystery_201402','back_mystery_201402']}
|
||||
},
|
||||
//{_id:1,'purchased.plan':1,'items.gear.owned':1}
|
||||
{$push: {'purchased.plan.mysteryItems':{$each:['armor_mystery_201402','head_mystery_201402','back_mystery_201402']}}},
|
||||
{multi:true}
|
||||
)/*.forEach(function(user){
|
||||
printjson(user);
|
||||
});*/
|
||||
1
migrations/20140610_missing_backer_mount.js
Normal file
1
migrations/20140610_missing_backer_mount.js
Normal file
@@ -0,0 +1 @@
|
||||
db.users.update({'backer.tier':{$gt:69}},{$set:{'items.mounts.LionCub-Ethereal':true}},{multi:1})
|
||||
11
migrations/20140712_wiped_quest_membership.js
Normal file
11
migrations/20140712_wiped_quest_membership.js
Normal file
@@ -0,0 +1,11 @@
|
||||
//mongo habitrpg node_modules/lodash/lodash.js ./migrations/20140712_wiped_quest_membership.js
|
||||
db.groups.find({type:'party','quest.key':{$ne:null},'quest.active':true},{quest:1}).forEach(function(group){
|
||||
var activeMembers = _.reduce(group.quest.members, function(m,v,k){
|
||||
if (v===true) m.push(k); return m;
|
||||
},[]);
|
||||
db.users.update(
|
||||
{_id:{$in: activeMembers}},
|
||||
{$set:{'party.quest.key':group.quest.key,'party.quest.completed':null}},
|
||||
{multi:true}
|
||||
);
|
||||
});
|
||||
13
migrations/20140803_remove_undefined_notifications.js
Normal file
13
migrations/20140803_remove_undefined_notifications.js
Normal file
@@ -0,0 +1,13 @@
|
||||
var _ = require('lodash');
|
||||
|
||||
db.users.find({}).forEach(function(user){
|
||||
var newNewMessages = {};
|
||||
|
||||
_.each(user.newMessages, function(val, key){
|
||||
if(key != "undefined"){
|
||||
newNewMessages[key] = val;
|
||||
};
|
||||
});
|
||||
|
||||
db.users.update({_id: user._id}, {$set: {'newMessages': newNewMessages}});
|
||||
});
|
||||
@@ -0,0 +1,51 @@
|
||||
// node .migrations/20140823_remove_undefined_and_false_notifications.js
|
||||
|
||||
var migrationName = '20140823_remove_undefined_and_false_notifications';
|
||||
|
||||
var authorName = 'Alys'; // in case script author needs to know when their ...
|
||||
var authorUuid = 'd904bd62-da08-416b-a816-ba797c9ee265'; //... own data is done
|
||||
|
||||
// IMPORTANT NOTE: this migration was written when we were using version 3 of lodash.
|
||||
// We've now upgraded to lodash v4 but the code used in this migration has not been
|
||||
// adapted to work with it. Before this migration is used again any lodash method should
|
||||
// be checked for compatibility against the v4 changelog and changed if necessary.
|
||||
// https://github.com/lodash/lodash/wiki/Changelog#v400
|
||||
|
||||
/**
|
||||
* https://github.com/HabitRPG/habitrpg/pull/3907
|
||||
*/
|
||||
|
||||
var mongo = require('mongoskin');
|
||||
var _ = require('lodash');
|
||||
|
||||
// XXX @lefnire, choose wisely:
|
||||
// var liveUsers = mongo.db('lefnire:mAdn3s5s@charlotte.mongohq.com:10015/habitrpg_large?auto_reconnect').collection('users');
|
||||
// var liveUsers = mongo.db('localhost:27017/habitrpg_old?auto_reconnect').collection('users');
|
||||
|
||||
// For local testing by script author:
|
||||
// var liveUsers = mongo.db('localhost:27017/habitrpg?auto_reconnect').collection('users');
|
||||
|
||||
|
||||
var fields = {migration:1,newMessages:1};
|
||||
var progressCount = 1000;
|
||||
// var progressCount = 1;
|
||||
var count = 0;
|
||||
liveUsers.findEach({migration: {$ne:migrationName}}, fields, {batchSize:250}, function(err, user){
|
||||
count++;
|
||||
if (!user) err = '!user';
|
||||
if (err) {return console.error(err);}
|
||||
|
||||
var newNewMessages = {};
|
||||
_.each(user.newMessages,function(val,key){
|
||||
// console.log(key + " " + val.name);
|
||||
if(key != "undefined" && val['value']){
|
||||
newNewMessages[key] = val;
|
||||
}
|
||||
})
|
||||
|
||||
liveUsers.update({_id:user._id}, {$set:{newMessages:newNewMessages, migration:migrationName}, $inc:{_v:1}});
|
||||
|
||||
if (count%progressCount == 0) console.log(count + ' ' + user._id);
|
||||
if (user._id == '9') console.log('lefnire processed');
|
||||
if (user._id == authorUuid) console.log(authorName + ' processed');
|
||||
});
|
||||
81
migrations/20140829_change_headAccessory_to_eyewear.js
Normal file
81
migrations/20140829_change_headAccessory_to_eyewear.js
Normal file
@@ -0,0 +1,81 @@
|
||||
// node .migrations/20140829_change_headAccessory_to_eyewear.js
|
||||
|
||||
var migrationName = '20140829_change_headAccessory_to_eyewear';
|
||||
var authorName = 'Alys'; // in case script author needs to know when their ...
|
||||
var authorUuid = 'd904bd62-da08-416b-a816-ba797c9ee265'; //... own data is done
|
||||
|
||||
// IMPORTANT NOTE: this migration was written when we were using version 3 of lodash.
|
||||
// We've now upgraded to lodash v4 but the code used in this migration has not been
|
||||
// adapted to work with it. Before this migration is used again any lodash method should
|
||||
// be checked for compatibility against the v4 changelog and changed if necessary.
|
||||
// https://github.com/lodash/lodash/wiki/Changelog#v400
|
||||
|
||||
/**
|
||||
* https://github.com/HabitRPG/habitrpg/issues/3645
|
||||
*/
|
||||
var mongo = require('mongoskin');
|
||||
var _ = require('lodash');
|
||||
var liveUsers = mongo.db('localhost:27017/habitrpg2?auto_reconnect').collection('users');
|
||||
|
||||
var fields = {'migration':1,
|
||||
'items.gear.costume.headAccessory':1,
|
||||
'items.gear.equipped.headAccessory':1,
|
||||
'items.gear.owned.headAccessory_special_wondercon_black':1,
|
||||
'items.gear.owned.headAccessory_special_wondercon_red':1,
|
||||
'items.gear.owned.headAccessory_special_summerRogue':1,
|
||||
'items.gear.owned.headAccessory_special_summerWarrior':1
|
||||
};
|
||||
|
||||
var progressCount = 1000;
|
||||
var count = 0;
|
||||
liveUsers.findEach({ $and: [
|
||||
{ migration: {$ne:migrationName} },
|
||||
{ $or: [
|
||||
{'items.gear.owned.headAccessory_special_summerRogue': {'$exists':true}},
|
||||
{'items.gear.owned.headAccessory_special_summerWarrior':{'$exists':true}},
|
||||
{'items.gear.owned.headAccessory_special_wondercon_red':{'$exists':true}},
|
||||
{'items.gear.owned.headAccessory_special_wondercon_black':{'$exists':true}}
|
||||
]}
|
||||
]}, fields, {batchSize:250}, function(err, user){
|
||||
count++;
|
||||
if (!user) err = '!user';
|
||||
if (err) {return console.error(err);}
|
||||
|
||||
var set = {'migration': migrationName};
|
||||
var unset = {};
|
||||
|
||||
var oldToNew = {
|
||||
'headAccessory_special_summerRogue': 'eyewear_special_summerRogue',
|
||||
'headAccessory_special_summerWarrior': 'eyewear_special_summerWarrior',
|
||||
'headAccessory_special_wondercon_red': 'eyewear_special_wondercon_red',
|
||||
'headAccessory_special_wondercon_black':'eyewear_special_wondercon_black'
|
||||
};
|
||||
|
||||
// items.gear.costume, items.gear.equipped:
|
||||
_.each(['costume','equipped'],function(type){
|
||||
_.each(oldToNew,function(newName,oldName){
|
||||
if (user.items.gear[type].headAccessory === oldName) {
|
||||
unset['items.gear.'+type+'.headAccessory'] = "";
|
||||
set['items.gear.'+type+'.eyewear'] = newName;
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
// items.gear.owned:
|
||||
_.each(oldToNew,function(newName,oldName){
|
||||
if (oldName in user.items.gear.owned) {
|
||||
unset['items.gear.owned.'+oldName] = "";
|
||||
set['items.gear.owned.'+newName] = user.items.gear.owned[oldName];
|
||||
}
|
||||
});
|
||||
|
||||
//console.log(JSON.stringify(user, null, " "));
|
||||
//console.log("set: " + JSON.stringify(set, null, " "));
|
||||
//console.log("unset: " + JSON.stringify(unset, null, " "));
|
||||
|
||||
liveUsers.update({_id:user._id}, {$set:set, $unset:unset, $inc:{_v:1}});
|
||||
|
||||
if (count%progressCount == 0) console.log(count + ' ' + user._id);
|
||||
if (user._id == '9') console.log('lefnire processed');
|
||||
if (user._id == authorUuid) console.log(authorName + ' processed');
|
||||
});
|
||||
131
migrations/20140831_increase_gems_for_previous_contributions.js
Normal file
131
migrations/20140831_increase_gems_for_previous_contributions.js
Normal file
@@ -0,0 +1,131 @@
|
||||
// IMPORTANT:
|
||||
//
|
||||
// run like this to capture all output:
|
||||
//
|
||||
// node 20140831_increase_gems_for_previous_contributions.js > 20140831_increase_gems_for_previous_contributions_output.txt
|
||||
|
||||
// IMPORTANT NOTE: this migration was written when we were using version 3 of lodash.
|
||||
// We've now upgraded to lodash v4 but the code used in this migration has not been
|
||||
// adapted to work with it. Before this migration is used again any lodash method should
|
||||
// be checked for compatibility against the v4 changelog and changed if necessary.
|
||||
// https://github.com/lodash/lodash/wiki/Changelog#v400
|
||||
|
||||
var migrationName = '20140831_increase_gems_for_previous_contributions';
|
||||
|
||||
/**
|
||||
* https://github.com/HabitRPG/habitrpg/issues/3933
|
||||
* Increase Number of Gems for Contributors
|
||||
* author: Alys (d904bd62-da08-416b-a816-ba797c9ee265)
|
||||
*
|
||||
* Increase everyone's gems per their contribution level.
|
||||
* Originally they were given 2 gems per tier.
|
||||
* Now they are given 3 gems per tier for tiers 1,2,3
|
||||
* and 4 gems per tier for tiers 4,5,6,7
|
||||
* So that means an EXTRA 1 for tier 1,
|
||||
* 2 for tier 2,
|
||||
* 3 for tier 3,
|
||||
* 5 for tier 4,
|
||||
* 7 for tier 5,
|
||||
* 9 for tier 6,
|
||||
* 11 for tier 7,
|
||||
* 11 for tier 8 (moderators = tier 7 + admin privileges),
|
||||
* none for tier 9 (staff)
|
||||
*/
|
||||
|
||||
var mongo = require('mongoskin');
|
||||
var _ = require('lodash');
|
||||
|
||||
|
||||
var dbUsers = mongo.db('localhost:27017/habitrpg?auto_reconnect').collection('users');
|
||||
|
||||
|
||||
var query = {
|
||||
'contributor.level': {$gt: 0, $lt: 9},
|
||||
'migration': {$ne: migrationName}
|
||||
};
|
||||
|
||||
var fields = {
|
||||
'migration':1,
|
||||
'contributor.level':1,
|
||||
'balance':1
|
||||
};
|
||||
|
||||
var userResults = {}; // each key is a UUID, each value is a string
|
||||
// describing what changed for that user
|
||||
|
||||
console.warn('Updating users...');
|
||||
var progressCount = 50;
|
||||
var count = 0;
|
||||
dbUsers.findEach(query, fields, function(err, user) {
|
||||
if (err) { return exiting(1, 'ERROR! ' + err); }
|
||||
if (!user) {
|
||||
console.warn('All users found. Fetching final balances...');
|
||||
return fetchFinalBalances();
|
||||
}
|
||||
count++;
|
||||
|
||||
var set = {'migration': migrationName};
|
||||
|
||||
var tier = user.contributor.level;
|
||||
var extraGems = tier; // tiers 1,2,3
|
||||
if (tier > 3) { extraGems = 3 + (tier - 3) * 2; }
|
||||
if (tier == 8) { extraGems = 11; }
|
||||
extraBalance = extraGems / 4;
|
||||
set['balance'] = user.balance + extraBalance;
|
||||
|
||||
// Capture current state of user:
|
||||
userResults[user._id] =
|
||||
user._id + ' ' + ':\n' +
|
||||
' contrib tier : ' + tier + '\n' +
|
||||
' balance before : ' + user.balance + '\n' +
|
||||
' balance (gems) added : ' + extraBalance + ' (' +
|
||||
extraGems + ')' + '\n' +
|
||||
' expected balance after: ' + (user.balance + extraBalance) + '\n';
|
||||
|
||||
// Update user:
|
||||
dbUsers.update({_id:user._id}, {$set:set, $inc:{_v:1}});
|
||||
if (count%progressCount == 0) console.warn(count + ' ' + user._id);
|
||||
});
|
||||
|
||||
|
||||
function fetchFinalBalances() {
|
||||
var query = {_id: {$in: Object.keys(userResults)}};
|
||||
var fields = {
|
||||
'balance':1,
|
||||
};
|
||||
|
||||
var count1 = 0;
|
||||
dbUsers.findEach(query, fields, function(err, user) {
|
||||
if (err) { return exiting(1, 'ERROR! ' + err); }
|
||||
if (!user) {
|
||||
console.warn('All final balances found.');
|
||||
return displayData();
|
||||
}
|
||||
count1++;
|
||||
userResults[user._id] = userResults[user._id] +
|
||||
user._id + ' ' + ':\n' +
|
||||
' actual balance after : ' + user.balance + '\n';
|
||||
if (count1%progressCount == 0) console.warn(count1 + ' ' + user._id);
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
function displayData() {
|
||||
_.each(userResults, function(text, uuid) {
|
||||
console.log(text); // text contains uuid
|
||||
});
|
||||
console.log('\n' + count +
|
||||
' users processed (should be roughly 335 according to the Hall)\n');
|
||||
return exiting(0);
|
||||
}
|
||||
|
||||
|
||||
function exiting(code, msg) {
|
||||
code = code || 0; // 0 = success
|
||||
if (code && !msg) { msg = 'ERROR!'; }
|
||||
if (msg) {
|
||||
if (code) { console.error(msg); }
|
||||
else { console.log( msg); }
|
||||
}
|
||||
process.exit(code);
|
||||
}
|
||||
79
migrations/20140914_upgrade_admin_contrib_tiers.js
Normal file
79
migrations/20140914_upgrade_admin_contrib_tiers.js
Normal file
@@ -0,0 +1,79 @@
|
||||
var migrationName = '20140914_upgrade_admin_contrib_tiers';
|
||||
var authorName = 'Alys'; // in case script author needs to know when their ...
|
||||
var authorUuid = 'd904bd62-da08-416b-a816-ba797c9ee265'; //... own data is done
|
||||
|
||||
/**
|
||||
* https://github.com/HabitRPG/habitrpg/issues/3801
|
||||
* Convert Tier 8 contributors to Tier 9 (staff) (all current Tier 8s are admins).
|
||||
* Convert Tier 7 contributors with admin flag to Tier 8 (moderators).
|
||||
*/
|
||||
|
||||
// IMPORTANT NOTE: this migration was written when we were using version 3 of lodash.
|
||||
// We've now upgraded to lodash v4 but the code used in this migration has not been
|
||||
// adapted to work with it. Before this migration is used again any lodash method should
|
||||
// be checked for compatibility against the v4 changelog and changed if necessary.
|
||||
// https://github.com/lodash/lodash/wiki/Changelog#v400
|
||||
|
||||
var mongo = require('mongoskin');
|
||||
var _ = require('lodash');
|
||||
|
||||
var dbUsers = mongo.db('localhost:27017/habitrpg?auto_reconnect').collection('users');
|
||||
|
||||
var query =
|
||||
{ 'contributor.level':{$gte:7}, 'contributor.admin':true, 'migration': {$ne: migrationName} };
|
||||
|
||||
var fields = {'migration':1,
|
||||
'contributor.admin':1,
|
||||
'contributor.level':1,
|
||||
'auth.local.username':1,
|
||||
'profile.name':1,
|
||||
};
|
||||
|
||||
var userResults = {}; // each key is a UUID, each value is a username;
|
||||
// contains only the users changed
|
||||
|
||||
console.warn('Updating users...');
|
||||
var progressCount = 1000;
|
||||
var count = 0;
|
||||
dbUsers.findEach(query, fields, {batchSize:250}, function(err, user) {
|
||||
if (err) { return exiting(1, 'ERROR! ' + err); }
|
||||
if (!user) {
|
||||
console.warn('All appropriate users found and modified.');
|
||||
return displayData();
|
||||
}
|
||||
count++;
|
||||
|
||||
var set = {'migration': migrationName};
|
||||
var inc = {'contributor.level':1, _v:1};
|
||||
|
||||
userResults[user._id] = user.profile.name;
|
||||
|
||||
dbUsers.update({_id:user._id}, {$set:set, $inc:inc});
|
||||
|
||||
if (count%progressCount == 0) console.warn(count + ' ' + user._id);
|
||||
if (user._id == authorUuid) console.warn(authorName + ' processed');
|
||||
if (user._id == '9' ) console.warn('lefnire' + ' processed');
|
||||
});
|
||||
|
||||
|
||||
function displayData() {
|
||||
console.log('users modified:');
|
||||
_.each(userResults, function(name, uuid) {
|
||||
console.log(name);
|
||||
});
|
||||
console.warn('\n' + count +
|
||||
' users processed (should be 11 according to the Hall)\n');
|
||||
return exiting(0);
|
||||
}
|
||||
|
||||
|
||||
function exiting(code, msg) {
|
||||
code = code || 0; // 0 = success
|
||||
if (code && !msg) { msg = 'ERROR!'; }
|
||||
if (msg) {
|
||||
if (code) { console.error(msg); }
|
||||
else { console.log( msg); }
|
||||
}
|
||||
process.exit(code);
|
||||
}
|
||||
|
||||
18
migrations/20140922_free_candy.js
Normal file
18
migrations/20140922_free_candy.js
Normal file
@@ -0,0 +1,18 @@
|
||||
db.users.update(
|
||||
{},
|
||||
{
|
||||
$inc: {
|
||||
'items.food.Candy_Base':1,
|
||||
'items.food.Candy_CottonCandyBlue':1,
|
||||
'items.food.Candy_CottonCandyPink':1,
|
||||
'items.food.Candy_Desert':1,
|
||||
'items.food.Candy_Golden':1,
|
||||
'items.food.Candy_Red':1,
|
||||
'items.food.Candy_Shade':1,
|
||||
'items.food.Candy_Skeleton':1,
|
||||
'items.food.Candy_White':1,
|
||||
'items.food.Candy_Zombie':1
|
||||
}
|
||||
},
|
||||
{multi:1}
|
||||
);
|
||||
1
migrations/20141006_jackolantern_pet.js
Normal file
1
migrations/20141006_jackolantern_pet.js
Normal file
@@ -0,0 +1 @@
|
||||
db.users.update({_id:'9'},{$set:{'items.pets.JackOLantern-Base':5, 'flags.newStuff':true}}, {multi:true});
|
||||
20
migrations/20141117_consecutive_months.js
Normal file
20
migrations/20141117_consecutive_months.js
Normal file
@@ -0,0 +1,20 @@
|
||||
// IMPORTANT NOTE: this migration was written when we were using version 3 of lodash.
|
||||
// We've now upgraded to lodash v4 but the code used in this migration has not been
|
||||
// adapted to work with it. Before this migration is used again any lodash method should
|
||||
// be checked for compatibility against the v4 changelog and changed if necessary.
|
||||
// https://github.com/lodash/lodash/wiki/Changelog#v400
|
||||
|
||||
// require moment, lodash
|
||||
db.users.find(
|
||||
{'purchased.plan.customerId':{$ne:null}},
|
||||
{_id:1, 'purchased.plan':1}
|
||||
).forEach(function(user){
|
||||
var p = user.purchased.plan
|
||||
, latestMonth = p.dateTerminated || new Date() // their last sub date, or on-going (now)
|
||||
, count = moment(latestMonth).diff(p.dateCreated, 'months');
|
||||
db.users.update({_id: user._id}, {$set: {
|
||||
'purchased.plan.consecutive.count': count,
|
||||
'purchased.plan.consecutive.gemCapExtra': _.min([ Math.floor(count/3)*5, 25 ]),
|
||||
'purchased.plan.consecutive.trinkets': Math.floor(count/3)
|
||||
}});
|
||||
});
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user