diff --git a/Dockerfile b/Dockerfile index 2f617adbf..ededbc988 100644 --- a/Dockerfile +++ b/Dockerfile @@ -4,18 +4,19 @@ FROM mhart/alpine-node:5.10.1 RUN apk update && apk add git curl wget bash # Java -# RUN apk update && apk add openjdk8=8.77.03-r0 +# as of 2016-06-12, running on alpine openjdk causes a unsatisfiedlinkerror crash in Java +#RUN apk update && apk add openjdk8=8.92.14-r1 # Here we install GNU libc (aka glibc) and set C.UTF-8 locale as default. -RUN ALPINE_GLIBC_BASE_URL="https://github.com/andyshinn/alpine-pkg-glibc/releases/download" && \ - ALPINE_GLIBC_PACKAGE_VERSION="2.23-r1" && \ +RUN ALPINE_GLIBC_BASE_URL="https://github.com/sgerrand/alpine-pkg-glibc/releases/download" && \ + ALPINE_GLIBC_PACKAGE_VERSION="2.23-r2" && \ ALPINE_GLIBC_BASE_PACKAGE_FILENAME="glibc-$ALPINE_GLIBC_PACKAGE_VERSION.apk" && \ ALPINE_GLIBC_BIN_PACKAGE_FILENAME="glibc-bin-$ALPINE_GLIBC_PACKAGE_VERSION.apk" && \ ALPINE_GLIBC_I18N_PACKAGE_FILENAME="glibc-i18n-$ALPINE_GLIBC_PACKAGE_VERSION.apk" && \ apk add --no-cache --virtual=build-dependencies wget ca-certificates && \ wget \ - "https://raw.githubusercontent.com/andyshinn/alpine-pkg-glibc/master/andyshinn.rsa.pub" \ - -O "/etc/apk/keys/andyshinn.rsa.pub" && \ + "https://github.com/sgerrand/alpine-pkg-glibc/releases/download/$ALPINE_GLIBC_PACKAGE_VERSION/sgerrand.rsa.pub" \ + -O "/etc/apk/keys/sgerrand.rsa.pub" && \ wget \ "$ALPINE_GLIBC_BASE_URL/$ALPINE_GLIBC_PACKAGE_VERSION/$ALPINE_GLIBC_BASE_PACKAGE_FILENAME" \ "$ALPINE_GLIBC_BASE_URL/$ALPINE_GLIBC_PACKAGE_VERSION/$ALPINE_GLIBC_BIN_PACKAGE_FILENAME" \ @@ -25,7 +26,7 @@ RUN ALPINE_GLIBC_BASE_URL="https://github.com/andyshinn/alpine-pkg-glibc/release "$ALPINE_GLIBC_BIN_PACKAGE_FILENAME" \ "$ALPINE_GLIBC_I18N_PACKAGE_FILENAME" && \ \ - rm "/etc/apk/keys/andyshinn.rsa.pub" && \ + rm "/etc/apk/keys/sgerrand.rsa.pub" && \ /usr/glibc-compat/bin/localedef --force --inputfile POSIX --charmap UTF-8 C.UTF-8 || true && \ echo "export LANG=C.UTF-8" > /etc/profile.d/locale.sh && \ \ @@ -91,7 +92,8 @@ RUN apk add --no-cache --virtual=build-dependencies wget ca-certificates && \ rm "/tmp/"* # Maven -# RUN apk update && apk add maven=3.3.9-r0 +# as of 2016-06-12 maven apk is still in edge branch and not accessible by default +#RUN apk update && apk add maven=3.3.9-r0 ENV MAVEN_HOME="/usr/share/maven" ENV MAVEN_VERSION="3.3.9" RUN cd / && \ @@ -115,4 +117,4 @@ RUN npm run webpack ENV PATH /usr/src/yasp/node_modules/pm2/bin:$PATH -CMD [ "node", "deploy.js" ] \ No newline at end of file +CMD [ "npm", "start" ] \ No newline at end of file diff --git a/README.md b/README.md index 67a44209d..20c511d5e 100644 --- a/README.md +++ b/README.md @@ -20,15 +20,12 @@ Tech Stack * Storage: PostgreSQL/Redis/Cassandra * Parser: Java (powered by [clarity](https://github.com/skadistats/clarity)) -Quickstart +Quickstart (Docker) ---- -* Using Docker will let you run the code in a container with all dependencies properly installed. * Install Docker: `curl -sSL https://get.docker.com/ | sh` * Clone the repo: `git clone https://github.com/yasp-dota/yasp` * Go into the directory: `cd yasp` -* Create .env file with required config values in KEY=VALUE format (see config.js for a full listing of options) `cp .env_example .env` - * `STEAM_API_KEY` You need this in order to access the Steam Web API. - * `STEAM_USER, STEAM_PASS` The retriever requires a Steam account in order to fetch replay salts. We recommend creating a new account for this purpose (you won't be able to log into the account while the retriever is using it). If you don't care about getting replay salts/downloading replays then you can skip this step. +* Build the Docker container: `sudo docker build -t yasp/yasp .` * Start a new container running the image, and map your local directory into the container: `sudo docker run -v $(pwd):/usr/src/yasp -di --name yasp --net=host yasp/yasp:latest` * Start the external dependencies in separate containers. * `sudo docker run -d --name postgres --net=host postgres:9.5` @@ -37,20 +34,17 @@ Quickstart * Initialize Postgres: `sudo docker exec -i postgres psql -U postgres < sql/init.sql` * Create tables: `sudo docker exec -i postgres psql -U postgres yasp < sql/create_tables.sql` * Set up Cassandra (optional): `sudo docker exec -i cassandra cqlsh < sql/cassandra.cql` +* Create .env file with required config values in KEY=VALUE format (see config.js for a full listing of options) `cp .env_example .env` + * `STEAM_API_KEY` You need this in order to access the Steam Web API. + * `STEAM_USER, STEAM_PASS` The retriever requires a Steam account in order to fetch replay salts. We recommend creating a new account for this purpose (you won't be able to log into the account while the retriever is using it). If you don't care about getting replay salts/downloading replays then you can skip this step. * Get a terminal into the running container: `sudo docker exec -it yasp bash` -* Build inside the container: `npm run build` * Start the services you want to run: - * `pm2 start profiles/basic.json` This starts all the basic services to be able to open the site in a browser and request parses by ID (which is a useful end-to-end test). Use `profiles/everything.json` to start everything. - * `pm2 start svc/web.js --watch` This starts a specific service and watches it for changes. - * `pm2 logs web` You can use this command to inspect the output of a service. - * `pm2 delete all` Stop and remove all the services. -* Alternatively, if you have Docker Compose [installed](https://docs.docker.com/compose/install/) you can just run `docker-compose up`. - * 3 containers will be built and launched - one with postgres database, one with redis and one with web service. - * Database is inited and tables are created automatically. - * By default, minimal configuration necessairy to open the site in a browser and request parses by ID is started. This can be overridden via `docker-compose.override.yml`. - * `sudo docker exec -it yasp_web_1 bash` will give you a terminal into the running web container. + * `pm2 start profiles/basic.json` This starts all the basic services to be able to read the API and request parses (which is a useful end-to-end test). Use `profiles/everything.json` to start everything. + * Useful PM2 commands: + * `pm2 start svc/web.js --watch` This starts a specific service and watches it for changes. + * `pm2 logs web` You can use this command to inspect the output of a service. + * `pm2 delete all` Stop and remove all the services. * Useful commands - * `npm run watch`: If you want to make changes to client side JS, you will want to run the watch script in order to automatically rebuild after making changes. * `npm test` runs the full test suite. Use `mocha` for more fine-grained control over the tests you want to run. * `node tasks/updateconstants` pulls latest constants data and saves to `json` directory. * Get some starter data @@ -61,6 +55,14 @@ Quickstart * Submit a pull request. Wait for it to be reviewed and merged. * Congratulations! You're a contributor. +Docker Compose +---- +* Alternatively, if you have Docker Compose [installed](https://docs.docker.com/compose/install/) you can just run `docker-compose up`. + * 3 containers will be built and launched - one with postgres database, one with redis and one with web service. + * Database is inited and tables are created automatically. + * By default, minimal configuration necessairy to open the site in a browser and request parses by ID is started. This can be overridden via `docker-compose.override.yml`. + * `sudo docker exec -it yasp_web_1 bash` will give you a terminal into the running web container. + Getting Help ---- * Feel free to open a new issue to ask questions/get help! diff --git a/config.js b/config.js index 658b0ab0a..beccd9af9 100644 --- a/config.js +++ b/config.js @@ -39,10 +39,13 @@ var defaults = { "PROXY_URLS": "", //comma separated list of proxy urls to use "STEAM_API_HOST": "api.steampowered.com", //the list of hosts to fetch Steam API data from "ROLE": "", //for specifying the file that should be run when deploy.js is invoked + "GROUP": "", //for specifying the group of apps that should be run when deploy.js is invoked "MMSTATS_DATA_INTERVAL": 3, //minutes between requests for MMStats data "DEFAULT_DELAY": 1000, // delay between API requests (default: 1000) "SCANNER_DELAY": 300, //delay for scanner API requests (more time-sensitive) "SCANNER_PARALLELISM": 1, //Number of simultaneous API requests to make in scanner + "MMR_PARALLELISM": 15, + "PARSER_PARALLELISM": 1, "PLAYER_MATCH_LIMIT": 50000, //max results to return from player matches "BENCHMARK_RETENTION_HOURS": 1, //hours in block to retain benchmark data for percentile "MATCH_RATING_RETENTION_HOURS": 12, //hours in block to retain match rating data for percentile @@ -51,7 +54,6 @@ var defaults = { "UI_HOST": "", //The host of the UI, redirect traffic from / and /return here "ENABLE_RECAPTCHA": "", //set to enable the recaptcha on the Request page "ENABLE_ADS": "", //set to turn on ads - "ENABLE_PRO_PARSING": "", // set to parse pro matches from sequential API "ENABLE_MATCH_CACHE": "", // set to enable caching matches (Redis) "ENABLE_PLAYER_CACHE": "", // set to enable caching players (Cassandra) "ENABLE_INSERT_ALL_MATCHES": "", //set to enable inserting all matches diff --git a/deploy.js b/deploy.js index 47f0df51a..37e4de4f6 100644 --- a/deploy.js +++ b/deploy.js @@ -2,6 +2,7 @@ * Deployment entry point for the application. **/ var args = process.argv.slice(2); +var group = args[0] || process.env.GROUP; var cp = require('child_process'); if (process.env.PROVIDER === "gce") { @@ -12,17 +13,16 @@ if (process.env.ROLE) //if role variable is set just run that script require('./svc/' + process.env.ROLE + ".js"); } -else if (args[0]) +else if (group) { var pm2 = require('pm2'); var async = require('async'); var manifest = require('./profiles/everything.json').apps; - //if argument supplied use pm2 to run processes in that group pm2.connect(function() { async.each(manifest, function start(app, cb) { - if (args[0] === app.group) + if (group === app.group) { console.log(app.script, app.instances); pm2.start(app.script, diff --git a/java_parser/src/main/java/yasp/Main.java b/java_parser/src/main/java/yasp/Main.java index f507f938f..55dcdcca6 100644 --- a/java_parser/src/main/java/yasp/Main.java +++ b/java_parser/src/main/java/yasp/Main.java @@ -10,6 +10,7 @@ import skadistats.clarity.model.s1.GameRulesStateType; import skadistats.clarity.processor.entities.Entities; import skadistats.clarity.processor.entities.OnEntityEntered; +import skadistats.clarity.processor.entities.OnEntityLeft; import skadistats.clarity.processor.entities.UsesEntities; import skadistats.clarity.processor.gameevents.CombatLog; import skadistats.clarity.processor.gameevents.OnCombatLogEntry; @@ -70,14 +71,15 @@ private class Entry { public Integer gold_reason; public Integer xp_reason; public String valuename; - public Float stun_duration; - public Float slow_duration; + //public Float stun_duration; + //public Float slow_duration; //entity fields public Integer gold; public Integer lh; public Integer xp; public Integer x; public Integer y; + public Integer z; public Float stuns; public Integer hero_id; public Integer life_state; @@ -87,7 +89,9 @@ private class Entry { public Integer assists; public Integer denies; //public Boolean hasPredictedVictory; - + public Boolean entityleft; + public Integer ehandle; + public Entry() { } @@ -234,8 +238,8 @@ public void onCombatLogEntry(Context ctx, CombatLogEntry cle) { combatLogEntry.attackerillusion = cle.isAttackerIllusion(); combatLogEntry.targetillusion = cle.isTargetIllusion(); combatLogEntry.value = cle.getValue(); - combatLogEntry.stun_duration = cle.getStunDuration(); - combatLogEntry.slow_duration = cle.getSlowDuration(); + //combatLogEntry.stun_duration = cle.getStunDuration(); + //combatLogEntry.slow_duration = cle.getSlowDuration(); //value may be out of bounds in string table, we can only get valuename if a purchase (type 11) if (cle.getType() == DOTA_COMBATLOG_TYPES.DOTA_COMBATLOG_PURCHASE) { combatLogEntry.valuename = cle.getValueName(); @@ -255,28 +259,12 @@ public void onCombatLogEntry(Context ctx, CombatLogEntry cle) { @OnEntityEntered public void onEntityEntered(Context ctx, Entity e) { - //CDOTA_NPC_Observer_Ward - //CDOTA_NPC_Observer_Ward_TrueSight - //s1 "DT_DOTA_NPC_Observer_Ward" - //s1 "DT_DOTA_NPC_Observer_Ward_TrueSight" - boolean isObserver = e.getDtClass().getDtName().equals("CDOTA_NPC_Observer_Ward"); - boolean isSentry = e.getDtClass().getDtName().equals("CDOTA_NPC_Observer_Ward_TrueSight"); - if (isObserver || isSentry) { - //System.err.println(e); - Entry entry = new Entry(time); - Integer x = getEntityProperty(e, "CBodyComponent.m_cellX", null); - Integer y = getEntityProperty(e, "CBodyComponent.m_cellY", null); - Integer[] pos = {x, y}; - entry.type = isObserver ? "obs" : "sen"; - entry.key = Arrays.toString(pos); - //System.err.println(entry.key); - Integer owner = getEntityProperty(e, "m_hOwnerEntity", null); - Entity ownerEntity = ctx.getProcessor(Entities.class).getByHandle(owner); - entry.slot = ownerEntity != null ? (Integer) getEntityProperty(ownerEntity, "m_iPlayerID", null) : null; - //2/3 radiant/dire - //entry.team = e.getProperty("m_iTeamNum"); - output(entry); - } + processWardEntity(ctx, e, false); + } + + @OnEntityLeft + public void onEntityLeft(Context ctx, Entity e) { + processWardEntity(ctx, e, true); } @UsesEntities @@ -448,6 +436,45 @@ public T getEntityProperty(Entity e, String property, Integer idx) { FieldPath fp = e.getDtClass().getFieldPathForName(property); return e.getPropertyForFieldPath(fp); } + + public void processWardEntity(Context ctx, Entity e, boolean entityLeft) + { + //CDOTA_NPC_Observer_Ward + //CDOTA_NPC_Observer_Ward_TrueSight + //s1 "DT_DOTA_NPC_Observer_Ward" + //s1 "DT_DOTA_NPC_Observer_Ward_TrueSight" + boolean isObserver = e.getDtClass().getDtName().equals("CDOTA_NPC_Observer_Ward"); + boolean isSentry = e.getDtClass().getDtName().equals("CDOTA_NPC_Observer_Ward_TrueSight"); + if (isObserver || isSentry) { + //System.err.println(e); + Entry entry = new Entry(time); + Integer x = getEntityProperty(e, "CBodyComponent.m_cellX", null); + Integer y = getEntityProperty(e, "CBodyComponent.m_cellY", null); + Integer z = getEntityProperty(e, "CBodyComponent.m_cellZ", null); + Integer[] pos = {x, y}; + entry.x = x; + entry.y = y; + entry.z = z; + if (entityLeft) + { + entry.type = isObserver ? "obs_left" : "sen_left"; + } + else + { + entry.type = isObserver ? "obs" : "sen"; + } + entry.key = Arrays.toString(pos); + entry.entityleft = entityLeft; + entry.ehandle = e.getHandle(); + //System.err.println(entry.key); + Integer owner = getEntityProperty(e, "m_hOwnerEntity", null); + Entity ownerEntity = ctx.getProcessor(Entities.class).getByHandle(owner); + entry.slot = ownerEntity != null ? (Integer) getEntityProperty(ownerEntity, "m_iPlayerID", null) : null; + //2/3 radiant/dire + //entry.team = e.getProperty("m_iTeamNum"); + output(entry); + } + } public void run(String[] args) throws Exception { long tStart = System.currentTimeMillis(); diff --git a/json/navbar_pages.json b/json/navbar_pages.json index 6ddb4efd9..d57cb79fa 100644 --- a/json/navbar_pages.json +++ b/json/navbar_pages.json @@ -9,10 +9,6 @@ "benchmarks": { "name": "Benchmarks" }, - "top": { - "name": "Top", - "hide": true - }, "distributions": { "name": "Distributions" }, diff --git a/json/patch.json b/json/patch.json index 0d84d0909..29f97b544 100644 --- a/json/patch.json +++ b/json/patch.json @@ -70,5 +70,9 @@ { "name": "6.87", "date": "2016-04-26T01:00:00Z" +}, +{ + "name": "6.88", + "date": "2016-06-12T08:00:00Z" } ] diff --git a/package.json b/package.json index 80cf85db5..011261da3 100644 --- a/package.json +++ b/package.json @@ -17,7 +17,8 @@ "kube": "kubectl get rc -o name --selector tier=backend | cut -d '/' -f2 | xargs -n1 kubectl rolling-update --image=yasp/yasp:latest", "rediskeys": "redis-cli keys '*' | cut -d':' -f1 | sort | uniq -c", "resetpicks": "redis-cli keys 'picks_*' | xargs redis-cli del", - "updateconstants": "node tasks/updateconstants" + "updateconstants": "node tasks/updateconstants", + "dockerbuild": "sudo docker build -t yasp/yasp ." }, "repository": { "type": "git", @@ -52,13 +53,11 @@ "font-awesome": "^4.5.0", "heatmap.js": "2.0.0", "http-proxy": "^1.13.1", - "isomorphic-fetch": "^2.2.1", "jade": "^1.11.0", "jquery": "^2.2.0", - "knex": "^0.10.0", + "knex": "^0.11.7", "moment": "^2.11.2", "multer": "^1.1.0", - "ndjson": "^1.4.3", "numeral": "^1.5.3", "passport": "0.3.2", "passport-steam": "1.0.1", @@ -92,9 +91,5 @@ "supertest": "^1.1.0", "url-loader": "^0.5.7", "webpack": "^1.12.12" - }, - "engines": { - "node": "6.0.0", - "npm": "3.8.8" } } diff --git a/processors/populate.js b/processors/populate.js index 39d61bc47..35581c816 100644 --- a/processors/populate.js +++ b/processors/populate.js @@ -89,5 +89,4 @@ function populate(e, container) break; } } - module.exports = populate; \ No newline at end of file diff --git a/processors/processExpand.js b/processors/processExpand.js index c91c15ca3..ea92cd0b5 100644 --- a/processors/processExpand.js +++ b/processors/processExpand.js @@ -226,7 +226,8 @@ function processExpand(entries, meta) //multikill e.unit = e.attackername; //add the "minimum value", as of 2016-02-06 - e.key = e.value + 2; + //remove the "minimum value", as of 2016-06-23 + e.key = e.value; e.value = 1; e.type = "multi_kills"; expand(e); @@ -236,7 +237,8 @@ function processExpand(entries, meta) //killstreak e.unit = e.attackername; //add the "minimum value", as of 2016-02-06 - e.key = e.value + 3; + //remove the "minimum value", as of 2016-06-23 + e.key = e.value; e.value = 1; e.type = "kill_streaks"; expand(e); @@ -425,25 +427,25 @@ function processExpand(entries, meta) }, "obs": function(e) { - //key is a JSON array of position data - e.key = JSON.parse(e.key); - e.posData = true; - expand(e); var e2 = JSON.parse(JSON.stringify(e)); - e2.posData = false; e2.type = "obs_log"; expand(e2); + var e3 = JSON.parse(JSON.stringify(e)); + //key is a JSON array of position data + e3.key = JSON.parse(e3.key); + e3.posData = true; + expand(e3); }, "sen": function(e) { - e.key = JSON.parse(e.key); - e.posData = true; - expand(e); var e2 = JSON.parse(JSON.stringify(e)); - e2.posData = false; e2.type = "sen_log"; expand(e2); - } + var e3 = JSON.parse(JSON.stringify(e)); + e3.key = JSON.parse(e3.key); + e3.posData = true; + expand(e3); + }, }; //define the types we want to put into each array //null means all types diff --git a/processors/processReduce.js b/processors/processReduce.js index 41493a8fa..682bf2a38 100644 --- a/processors/processReduce.js +++ b/processors/processReduce.js @@ -1,22 +1,85 @@ /** - * A processor to reduce the event stream by grouping similar events. - * NOT CURRENTLY IN PRODUCTION USE + * A processor to reduce the event stream to only logs we want to persist **/ -function processReduce(entries) +function processReduce(entries, match, meta) { - var reduceMap = {}; - //group by player_slot, type, targethero, targetillusion - for (var i = 0; i < entries.length; i++) + //for now, disable log parsing for regular matches + if (!match.doLogParse) { - //group big categories: actions, combat log damage - var e = entries[i]; - var identifier = [e.player_slot, e.type, e.key].join(":"); - e.value = e.value || 1; - //var identifier = e.type; - //e.value = 1; - reduceMap[identifier] = reduceMap[identifier] ? reduceMap[identifier] + e.value : e.value || 1; + return; } - //var fs = require('fs'); - //fs.writeFileSync('./output3.json', JSON.stringify(reduceMap, null , 2)); + var basicLogTypes = { + "obs": 1, + "sen": 1, + "obs_left": 1, + "sen_left": 1, + }; + var result = entries.filter(function(e) + { + if (!match.doLogParse) + { + return (e.type in basicLogTypes); + } + else + { + if (e.type === "actions") + { + return false; + } + if (e.type === "DOTA_COMBATLOG_MODIFIER_REMOVE") + { + return false; + } + if (e.type === "DOTA_COMBATLOG_DAMAGE" || e.type === "DOTA_COMBATLOG_MODIFIER_ADD" || e.type === "DOTA_COMBATLOG_HEAL") + { + if (!e.targethero || e.targetillusion) + { + return false; + } + } + if (e.type === "interval" && e.time % 60 !== 0) + { + return false; + } + if (!e.time) + { + return false; + } + return true; + } + }).map(function(e) + { + var e2 = Object.assign( + {}, e, + { + match_id: match.match_id, + attackername_slot: meta.hero_to_slot[e.attackername], + targetname_slot: meta.hero_to_slot[e.targetname], + sourcename_slot: meta.hero_to_slot[e.sourcename], + targetsourcename_slot: meta.hero_to_slot[e.targetname], + player1_slot: meta.slot_to_playerslot[e.player1], + player_slot: e.player_slot || meta.slot_to_playerslot[e.slot], + inflictor: translate(e.inflictor), + }); + delete e2.attackername; + delete e2.targetname; + delete e2.sourcename; + delete e2.targetsourcename; + return e2; + }); + /* + var count = {}; + result.forEach(function(r) + { + count[r.type] = (count[r.type] || 0) + 1; + }); + console.log(count); + */ + return result; +} + +function translate(s) +{ + return s === "dota_unknown" ? null : s; } module.exports = processReduce; \ No newline at end of file diff --git a/routes/api.js b/routes/api.js index 5c24912d1..e901914fd 100644 --- a/routes/api.js +++ b/routes/api.js @@ -134,6 +134,93 @@ module.exports = function(db, redis, cassandra) }); }); */ + /* + api.get('/match_logs/:match_id', function(req, res, cb) + { + db.raw(`SELECT * FROM match_logs WHERE match_id = ? ORDER BY time ASC`, [req.params.match_id]).asCallback(function(err, result) + { + if (err) + { + return cb(err); + } + res.json(result.rows); + }); + }); + */ + api.get('/pro_matches', function(req, res, cb) + { + db.raw(` + SELECT match_id, start_time, duration, ma.leagueid, name + FROM matches ma + JOIN leagues le + ON ma.leagueid = le.leagueid + WHERE ma.leagueid > 0 + ORDER BY match_id DESC + `).asCallback(function(err, result) + { + if (err) + { + return cb(err); + } + res.json(result.rows); + }); + }); + api.get('/pro_players', function(req, res, cb) + { + queries.getProPlayers(db, redis, function(err, result) + { + if (err) + { + return cb(err); + } + res.json(result); + }); + }); + api.get('/drafts', function(req, res, cb) + { + db.raw(` + SELECT pb.hero_id, + sum(case when ((pm.player_slot < 128) = m.radiant_win) then 1 else 0 end) wins, + sum(case when is_pick is true then 1 else 0 end) picks, + sum(case when is_pick is false then 1 else 0 end) bans + FROM picks_bans pb + LEFT JOIN matches m + ON pb.match_id = m.match_id + LEFT JOIN player_matches pm + ON pb.hero_id = pm.hero_id + AND pm.match_id = m.match_id + GROUP BY pb.hero_id; + `).asCallback(function(err, result) + { + if (err) + { + return cb(err); + } + res.json(result.rows); + }); + }); + api.get('/pick_order', function(req, res, cb) + { + db.raw(`SELECT hero_id, ord, count( * ) FROM picks_bans WHERE is_pick is true GROUP BY hero_id, ord;`).asCallback(function(err, result) + { + if (err) + { + return cb(err); + } + res.json(result.rows); + }); + }); + api.get('/leagues', function(req, res, cb) + { + db.raw(`SELECT * FROM leagues ORDER BY leagueid DESC`).asCallback(function(err, result) + { + if (err) + { + return cb(err); + } + res.json(result.rows); + }); + }); api.get('/distributions', function(req, res, cb) { queries.getDistributions(redis, function(err, result) @@ -145,9 +232,9 @@ module.exports = function(db, redis, cassandra) res.json(result); }); }); - api.get('/rankings/:hero_id', function(req, res, cb) + api.get('/rankings', function(req, res, cb) { - queries.getHeroRankings(db, redis, req.params.hero_id, + queries.getHeroRankings(db, redis, req.query.hero_id, {}, function(err, result) { if (err) @@ -157,11 +244,11 @@ module.exports = function(db, redis, cassandra) res.json(result); }); }); - api.get('/benchmarks/:hero_id', function(req, res, cb) + api.get('/benchmarks', function(req, res, cb) { queries.getBenchmarks(db, redis, { - hero_id: req.params.hero_id + hero_id: req.query.hero_id }, function(err, result) { if (err) diff --git a/scripts/deploy.sh b/scripts/deploy.sh index 5ef923192..a511a5d14 100644 --- a/scripts/deploy.sh +++ b/scripts/deploy.sh @@ -1,29 +1,2 @@ #!/bin/bash -#deploy -DATETIME=$(date +%s) - -if [ "$1" = "parser" ] || [[ $# -eq 0 ]]; then -gcloud compute instance-templates create parser-$DATETIME --machine-type n1-highcpu-2 --image container-vm --preemptible --boot-disk-size 10GB --boot-disk-type pd-ssd --metadata startup-script='#!/bin/bash -sudo docker run -d --name=parser --restart=always -e PROVIDER=gce -e ROLE=parser yasp/yasp:latest sh -c "node deploy.js" -sudo docker start parser -' -gcloud alpha compute rolling-updates start --group parser-group-1 --template parser-$DATETIME -fi - -if [ "$1" = "backend" ] || [[ $# -eq 0 ]]; then -gcloud compute instance-templates create backend-$DATETIME --machine-type n1-highcpu-4 --image container-vm --preemptible --boot-disk-size 10GB --boot-disk-type pd-ssd --tags "http-server" --metadata startup-script='#!/bin/bash -sudo docker run -d --name yasp --restart=always --net=host -e PROVIDER=gce yasp/yasp:latest sh -c "node deploy.js core" -sudo docker start yasp -' -gcloud alpha compute rolling-updates start --group backend-group-1 --template backend-$DATETIME -fi - -if [ "$1" = "web" ] || [[ $# -eq 0 ]]; then -gcloud compute instance-templates create web-$DATETIME --machine-type g1-small --image container-vm --preemptible --boot-disk-size 10GB --boot-disk-type pd-ssd --tags "http-server" --metadata startup-script='#!/bin/bash -sudo docker run -d --name=web --restart=always --net=host -e FRONTEND_PORT=80 -e PROVIDER=gce -e ROLE=web yasp/yasp:latest sh -c "node deploy.js" -sudo docker start web -' -gcloud alpha compute rolling-updates start --group web-group-1 --template web-$DATETIME --min-instance-update-time 180 -fi - -gcloud alpha compute rolling-updates list \ No newline at end of file +gcloud compute instance-groups managed list-instances $1-group-1 --format "value(NAME)" | xargs -n1 gcloud compute instance-groups managed recreate-instances $1-group-1 --instances \ No newline at end of file diff --git a/scripts/gce.sh b/scripts/gce.sh index 9e5e36d43..4c16a1d65 100644 --- a/scripts/gce.sh +++ b/scripts/gce.sh @@ -26,7 +26,7 @@ gcloud compute http-health-checks delete -q lb-check gcloud compute instance-groups managed delete -q web-group-1 gcloud compute instance-templates delete -q web-1 gcloud compute instance-templates create web-1 --machine-type g1-small --image container-vm --preemptible --boot-disk-size 10GB --boot-disk-type pd-ssd --tags "http-server" --metadata startup-script='#!/bin/bash -sudo docker run -d --name=web --restart=always --net=host -e FRONTEND_PORT=80 -e PROVIDER=gce -e ROLE=web yasp/yasp:latest sh -c "node deploy.js" +sudo docker run -d --name=web --restart=always --net=host -e FRONTEND_PORT=80 -e PROVIDER=gce -e ROLE=web yasp/yasp:latest sh -c "npm start" sudo docker start web ' gcloud compute instance-groups managed create "web-group-1" --base-instance-name "web-group-1" --template "web-1" --size "0" @@ -40,7 +40,7 @@ gcloud compute instance-groups managed set-autoscaling "web-group-1" --cool-down gcloud compute instance-groups managed delete -q backend-group-1 gcloud compute instance-templates delete -q backend-1 gcloud compute instance-templates create backend-1 --machine-type n1-highcpu-4 --image container-vm --preemptible --boot-disk-size 10GB --boot-disk-type pd-ssd --tags "http-server" --metadata startup-script='#!/bin/bash -sudo docker run -d --name yasp --restart=always --net=host -e PROVIDER=gce yasp/yasp:latest sh -c "node deploy.js core" +sudo docker run -d --name yasp --restart=always --net=host -e PROVIDER=gce -e GROUP=core yasp/yasp:latest sh -c "npm start" sudo docker start yasp ' gcloud compute instance-groups managed create "backend-group-1" --base-instance-name "backend-group-1" --template "backend-1" --size "1" @@ -49,7 +49,7 @@ gcloud compute instance-groups managed create "backend-group-1" --base-instance- gcloud compute instance-groups managed delete -q parser-group-1 gcloud compute instance-templates delete -q parser-1 gcloud compute instance-templates create parser-1 --machine-type n1-highcpu-2 --image container-vm --preemptible --boot-disk-size 10GB --boot-disk-type pd-ssd --metadata startup-script='#!/bin/bash - sudo docker run -d --name=parser --restart=always -e PROVIDER=gce -e ROLE=parser yasp/yasp:latest sh -c "node deploy.js" + sudo docker run -d --name=parser --restart=always -e PROVIDER=gce -e ROLE=parser yasp/yasp:latest sh -c "npm start" sudo docker start parser ' gcloud compute instance-groups managed create "parser-group-1" --base-instance-name "parser-group-1" --template "parser-1" --size "1" diff --git a/sql/cassandra.cql b/sql/cassandra.cql index 4c1572049..5b075cbf6 100644 --- a/sql/cassandra.cql +++ b/sql/cassandra.cql @@ -25,7 +25,7 @@ CREATE TABLE matches ( radiant_gold_adv text, radiant_xp_adv text, teamfights text, - version int, + version int ); CREATE TABLE player_matches ( diff --git a/sql/create_tables.sql b/sql/create_tables.sql index 134bda3f7..6093d6c3b 100644 --- a/sql/create_tables.sql +++ b/sql/create_tables.sql @@ -21,17 +21,16 @@ CREATE TABLE matches ( game_mode integer, engine integer, picks_bans json[], - --radiant_team_name varchar(255), - --dire_team_name varchar(255), - --radiant_captain integer, - --dire_captain integer, - --radiant_logo integer - --dire_logo integer, - --radiant_team_complete integer, - --dire_team_complete integer, - --radiant_team_id integer, - --dire_team_id integer, - --parsed data below + radiant_team_id integer, + dire_team_id integer, + radiant_team_name varchar(255), + dire_team_name varchar(255), + radiant_team_complete smallint, + dire_team_complete smallint, + radiant_captain bigint, + dire_captain bigint, + radiant_logo bigint, + dire_logo bigint, chat json[], objectives json[], radiant_gold_adv integer[], @@ -39,6 +38,7 @@ CREATE TABLE matches ( teamfights json[], version integer ); +CREATE INDEX on matches(leagueid) WHERE leagueid > 0; CREATE TABLE player_matches ( PRIMARY KEY(match_id, player_slot), @@ -101,9 +101,6 @@ CREATE TABLE player_matches ( multi_kills json, life_state json, damage_inflictor_received json - --disabled due to incompatibility - --kill_streaks_log json[][], --an array of kill streak values - --multi_kill_id_vals integer[] --an array of multi kill values (the length of each multi kill) ); CREATE INDEX on player_matches(account_id) WHERE account_id IS NOT NULL; @@ -174,3 +171,70 @@ CREATE TABLE notable_players ( is_pro boolean, locked_until integer ); + +CREATE TABLE match_logs ( + match_id bigint REFERENCES matches(match_id) ON DELETE CASCADE, + time int, + type varchar(100), + team smallint, + unit varchar(100), + key varchar(1000), + value int, + slot smallint, + player_slot smallint, + player1 int, + player2 int, + attackerhero boolean, + targethero boolean, + attackerillusion boolean, + targetillusion boolean, + inflictor varchar(100), + gold_reason smallint, + xp_reason smallint, + valuename varchar(100), + gold int, + lh int, + xp int, + x smallint, + y smallint, + z smallint, + entityleft boolean, + ehandle int, + stuns real, + hero_id smallint, + life_state smallint, + level smallint, + kills smallint, + deaths smallint, + assists smallint, + denies smallint, + attackername_slot smallint, + targetname_slot smallint, + sourcename_slot smallint, + targetsourcename_slot smallint, + player1_slot smallint +); +CREATE INDEX ON match_logs(match_id); +CREATE INDEX ON match_logs(match_id, player_slot) WHERE player_slot IS NOT NULL; +CREATE INDEX ON match_logs(match_id, player1_slot) WHERE player1_slot IS NOT NULL; +CREATE INDEX ON match_logs(match_id, attackername_slot) WHERE attackername_slot IS NOT NULL; +CREATE INDEX ON match_logs(match_id, targetname_slot) WHERE targetname_slot IS NOT NULL; +CREATE INDEX ON match_logs(match_id, sourcename_slot) WHERE sourcename_slot IS NOT NULL; +CREATE INDEX ON match_logs(match_id, targetsourcename_slot) WHERE targetsourcename_slot IS NOT NULL; + +CREATE TABLE picks_bans( + match_id bigint REFERENCES matches(match_id) ON DELETE CASCADE, + is_pick boolean, + hero_id int, + team smallint, + ord smallint, + PRIMARY KEY (match_id, ord) +); + +CREATE TABLE leagues( + leagueid bigint PRIMARY KEY, + ticket varchar(255), + banner varchar(255), + tier varchar(255), + name varchar(255) +); diff --git a/store/buildPlayer.js b/store/buildPlayer.js index 84606c068..05d0b5157 100644 --- a/store/buildPlayer.js +++ b/store/buildPlayer.js @@ -199,12 +199,16 @@ function buildPlayer(options, cb) { for (var id in aggData.hero_id.counts) { - heroes_list.push( + //exclude invalid hero_ids + if (Number(id)) { - hero_id: id, - games: aggData.hero_id.counts[id], - win: aggData.hero_id.win_counts[id] - }); + heroes_list.push( + { + hero_id: id, + games: aggData.hero_id.counts[id], + win: aggData.hero_id.win_counts[id] + }); + } } } else if (aggData.heroes) diff --git a/store/buildSets.js b/store/buildSets.js index fed99d429..2c3794d77 100644 --- a/store/buildSets.js +++ b/store/buildSets.js @@ -18,20 +18,6 @@ module.exports = function buildSets(db, redis, cb) { cb(err, t); }); }, - //users in this set have their matches added - "userPlayers": function(cb) { - db.select(['account_id']).from('players').whereNotNull('last_login').asCallback(function(err, docs) { - if (err) { - return cb(err); - } - var t = {}; - docs.forEach(function(player) { - t[player.account_id] = true; - }); - //console.log(t); - cb(err, t); - }); - }, //users in this set are added to the trackedPlayers set "donators": function(cb) { db.select(['account_id']).from('players').where('cheese', '>', 0).asCallback(function(err, docs) { diff --git a/store/queries.js b/store/queries.js index c4952d6ed..91f31c436 100644 --- a/store/queries.js +++ b/store/queries.js @@ -32,13 +32,6 @@ function getSets(redis, cb) cb(err, JSON.parse(tps || "{}")); }); }, - "userPlayers": function(cb) - { - redis.get("userPlayers", function(err, ups) - { - cb(err, JSON.parse(ups || "{}")); - }); - }, "donators": function(cb) { redis.get("donators", function(err, ds) @@ -131,7 +124,7 @@ function upsert(db, table, row, conflict, cb) return util.format("%s=%s", key, "EXCLUDED." + key); }); var query = util.format("INSERT INTO %s (%s) VALUES (%s) ON CONFLICT (%s) DO UPDATE SET %s", table, Object.keys(row).join(','), values, Object.keys(conflict).join(','), update.join(',')); - //require('fs').writeFileSync('output.json', query); + //console.log(query.toString()); db.raw(query, Object.keys(row).map(function(key) { return row[key]; @@ -168,7 +161,7 @@ function insertMatch(db, redis, match, options, cb) players.forEach(function(p, i) { match.pgroup[p.player_slot] = { - account_id: p.account_id, + account_id: p.account_id || null, hero_id: p.hero_id, player_slot: p.player_slot }; @@ -191,6 +184,7 @@ function insertMatch(db, redis, match, options, cb) //we want to insert into matches, then insert into player_matches for each entry in players async.series( { + "dlp": decideLogParse, "u": upsertMatch, "uc": upsertMatchCassandra, "upc": updatePlayerCaches, @@ -198,29 +192,55 @@ function insertMatch(db, redis, match, options, cb) "cmc": clearMatchCache, "t": telemetry, "dm": decideMmr, + "dpro": decideProfile, "dp": decideParse, }, function(err, results) { return cb(err, results.dp); }); + function decideLogParse(cb) + { + if (match.leagueid) + { + redis.sismember('pro_leagueids', match.leagueid, function(err, result) + { + options.doLogParse = options.doLogParse || Boolean(Number(result)); + cb(err); + }); + } + else + { + cb(); + } + } + function upsertMatch(cb) { - if (!config.ENABLE_POSTGRES_MATCH_STORE_WRITE) + if (!config.ENABLE_POSTGRES_MATCH_STORE_WRITE && !options.doLogParse) { return cb(); } db.transaction(function(trx) { - upsert(trx, 'matches', match, + async.series( { - match_id: match.match_id - }, function(err) + "m": upsertMatch, + "pm": upsertPlayerMatches, + "pb": upsertPicksBans, + "l": upsertMatchLogs, + }, exit); + + function upsertMatch(cb) { - if (err) + upsert(trx, 'matches', match, { - return exit(err); - } + match_id: match.match_id + }, cb); + } + + function upsertPlayerMatches(cb) + { async.each(players || [], function(pm, cb) { pm.match_id = match.match_id; @@ -229,22 +249,59 @@ function insertMatch(db, redis, match, options, cb) match_id: pm.match_id, player_slot: pm.player_slot }, cb); - }, exit); + }, cb); + } - function exit(err) + function upsertPicksBans(cb) + { + async.each(match.picks_bans || [], function(p, cb) { - if (err) + //order is a reserved keyword + p.ord = p.order; + p.match_id = match.match_id; + upsert(trx, 'picks_bans', p, { - console.error(err); - trx.rollback(err); - } - else + match_id: p.match_id, + ord: p.ord + }, cb); + }, cb); + } + + function upsertMatchLogs(cb) + { + if (!match.logs) + { + return cb(); + } + else + { + trx.raw(`DELETE FROM match_logs WHERE match_id = ?`, [match.match_id]).asCallback(function(err) { - trx.commit(); - } - cb(err); + if (err) + { + return cb(err); + } + async.eachLimit(match.logs, 10000, function(e, cb) + { + trx('match_logs').insert(e).asCallback(cb); + }, cb); + }); } - }); + } + + function exit(err) + { + if (err) + { + console.error(err); + trx.rollback(err); + } + else + { + trx.commit(); + } + cb(err); + } }); } @@ -362,7 +419,7 @@ function insertMatch(db, redis, match, options, cb) { async.each(match.players, function(p, cb) { - if (options.origin === "scanner" && match.lobby_type === 7 && p.account_id !== constants.anonymous_account_id && (p.account_id in options.userPlayers || (config.ENABLE_RANDOM_MMR_UPDATE && match.match_id % 3 === 0))) + if (options.origin === "scanner" && match.lobby_type === 7 && p.account_id && p.account_id !== constants.anonymous_account_id && config.ENABLE_RANDOM_MMR_UPDATE) { addToQueue(mQueue, { @@ -381,9 +438,22 @@ function insertMatch(db, redis, match, options, cb) }, cb); } + function decideProfile(cb) + { + async.each(match.players, function(p, cb) + { + if (options.origin === "scanner" && p.account_id && p.account_id !== constants.anonymous_account_id) + { + redis.lpush('profilerQueue', p.account_id); + redis.ltrim('profilerQueue', 0, 99); + } + cb(); + }, cb); + } + function decideParse(cb) { - if (match.parse_status !== 0) + if (options.skipParse) { //not parsing this match //this isn't a error, although we want to report that we refused to parse back to user if it was a request @@ -400,6 +470,7 @@ function insertMatch(db, redis, match, options, cb) duration: match.duration, replay_blob_key: match.replay_blob_key, pgroup: match.pgroup, + doLogParse: options.doLogParse, }, { lifo: options.lifo, @@ -653,7 +724,7 @@ function expectedWin(rates) return 1 - rates.reduce((prev, curr) => (100 - curr * 100) * prev, 1) / (Math.pow(50, rates.length - 1) * 100); } -function getTop(db, redis, cb) +function getProPlayers(db, redis, cb) { db.raw(` SELECT * from notable_players @@ -663,14 +734,7 @@ function getTop(db, redis, cb) { return cb(err); } - getLeaderboard(db, redis, 'solo_competitive_rank', 500, function(err, result2) - { - return cb(err, - { - notables: result.rows, - leaderboard: result2 - }); - }); + return cb(err, result.rows); }); } @@ -892,7 +956,7 @@ module.exports = { insertMatchSkill, getDistributions, getPicks, - getTop, + getProPlayers, getHeroRankings, getBenchmarks, benchmarkMatch, diff --git a/store/queue.js b/store/queue.js index 1909fcaca..1598f73e4 100644 --- a/store/queue.js +++ b/store/queue.js @@ -6,6 +6,7 @@ var config = require('../config'); var bull = require('bull'); var url = require('url'); var async = require('async'); +var types = ["request", "mmr", "parse", "cache", "fullhistory"]; // parse the url var conn_info = url.parse(config.REDIS_URL, true /* parse query string */ ); if (conn_info.protocol !== 'redis:') @@ -50,7 +51,6 @@ function addToQueue(queue, payload, options, cb) function getCounts(redis, cb) { - var types = ["request", "mmr", "parse", "cache"]; async.map(types, getQueueCounts, function(err, result) { var obj = {}; @@ -91,7 +91,6 @@ function getCounts(redis, cb) function cleanup(redis, cb) { - var types = ["request", "mmr", "parse", "cache"]; async.each(types, function(key, cb) { var queue = getQueue(key); @@ -111,4 +110,4 @@ module.exports = { addToQueue: addToQueue, getCounts: getCounts, cleanup: cleanup -}; \ No newline at end of file +}; diff --git a/svc/cacher.js b/svc/cacher.js index ffb8e45d9..08844f0fd 100644 --- a/svc/cacher.js +++ b/svc/cacher.js @@ -248,7 +248,7 @@ function updateMatchRating(match, cb) { //push into list, limit elements redis.lpush('mmr_estimates:' + player.account_id, avg); - redis.ltrim('mmr_estimates:' + player.account_id, 0, 24); + redis.ltrim('mmr_estimates:' + player.account_id, 0, 14); } }); cb(); @@ -258,4 +258,4 @@ function updateMatchRating(match, cb) return cb(err); } }); -} \ No newline at end of file +} diff --git a/svc/fullhistory.js b/svc/fullhistory.js index 312034a51..d5707ac4a 100644 --- a/svc/fullhistory.js +++ b/svc/fullhistory.js @@ -90,6 +90,7 @@ function processFullHistory(job, cb) type: "api", cassandra: cassandra, skipAbilityUpgrades: true, + skipParse: true, }, cb); }); }, function(err) diff --git a/svc/mmr.js b/svc/mmr.js index 8071b283c..68916d074 100644 --- a/svc/mmr.js +++ b/svc/mmr.js @@ -10,7 +10,7 @@ var config = require('../config'); var mQueue = queue.getQueue('mmr'); var getData = utility.getData; var retrieverArr = config.RETRIEVER_HOST.split(","); -mQueue.process(retrieverArr.length * 10, processMmr); +mQueue.process(retrieverArr.length * config.MMR_PARALLELISM, processMmr); mQueue.on('completed', function(job) { job.remove(); diff --git a/svc/parser.js b/svc/parser.js index 7843e0609..51d2b0151 100644 --- a/svc/parser.js +++ b/svc/parser.js @@ -27,7 +27,7 @@ var progress = require('request-progress'); var stream = require('stream'); var pQueue = queue.getQueue('parse'); var async = require('async'); -var ndjson = require('ndjson'); +const readline = require('readline'); var spawn = cp.spawn; var insertMatch = queries.insertMatch; var benchmarkMatch = queries.benchmarkMatch; @@ -59,7 +59,7 @@ app.get('/redis/:key', function(req, res, cb) }); app.listen(config.PARSER_PORT); //END EXPRESS -pQueue.process(1, function(job, cb) +pQueue.process(config.PARSER_PARALLELISM, function(job, cb) { console.log("parse job: %s", job.jobId); var match = job.data.payload; @@ -91,7 +91,7 @@ pQueue.process(1, function(job, cb) parsed_data.start_time = match.start_time; parsed_data.duration = match.duration; parsed_data.replay_blob_key = match.replay_blob_key; - parsed_data.parse_status = 2; + parsed_data.doLogParse = match.doLogParse; if (match.replay_blob_key) { insertUploadedParse(parsed_data, cb); @@ -158,6 +158,8 @@ function insertStandardParse(match, cb) { type: "parsed", cassandra: cassandra, + skipParse: true, + doLogParse: match.doLogParse, }, cb); } @@ -176,7 +178,8 @@ function runParse(match, job, cb) // Streams var inStream = progress(request( { - url: url + url: url, + encoding: null, })); inStream.on('progress', function(state) { @@ -211,10 +214,11 @@ function runParse(match, job, cb) } bz.stdin.on('error', exit); bz.stdout.on('error', exit); + inStream.pipe(bz.stdin); var parser = spawn("java", [ "-jar", - "-Xmx64m", - "./java_parser/target/stats-0.1.0.jar" + "-Xmx128m", + "./java_parser/target/stats-0.1.0.jar", ], { stdio: ['pipe', 'pipe', 'pipe'], @@ -226,22 +230,24 @@ function runParse(match, job, cb) { console.log(data.toString()); }); - var parseStream = ndjson.parse(); - parseStream.on('data', function handleStream(e) + bz.stdout.pipe(parser.stdin); + const parseStream = readline.createInterface( + { + input: parser.stdout + }); + parseStream.on('line', function handleStream(e) { + e = JSON.parse(e); if (e.type === 'epilogue') { console.log('received epilogue'); incomplete = false; + parseStream.close(); + exit(); } entries.push(e); }); - parseStream.on('end', exit); - parseStream.on('error', exit); - // Pipe together the streams - inStream.pipe(bz.stdin); - bz.stdout.pipe(parser.stdin); - parser.stdout.pipe(parseStream); + request.debug = true; function exit(err) { @@ -263,6 +269,7 @@ function runParse(match, job, cb) var message = "time spent on post-processing match "; console.time(message); var meta = processMetadata(entries); + var logs = processReduce(entries, match, meta); var res = processExpand(entries, meta); var parsed_data = processParsedData(res.parsed_data); var teamfights = processTeamfights(res.tf_data, meta); @@ -272,8 +279,8 @@ function runParse(match, job, cb) parsed_data.radiant_gold_adv = ap.radiant_gold_adv; parsed_data.radiant_xp_adv = ap.radiant_xp_adv; parsed_data.upload = upload; + parsed_data.logs = logs; //processMultiKillStreaks(); - //processReduce(res.expanded); console.timeEnd(message); return cb(err, parsed_data); } diff --git a/svc/profiler.js b/svc/profiler.js index 68582f244..cfd2d4aae 100644 --- a/svc/profiler.js +++ b/svc/profiler.js @@ -2,8 +2,10 @@ * Worker to fetch updated player profiles **/ var constants = require('../constants'); +var config = require('../config'); var queries = require('../store/queries'); var db = require('../store/db'); +var redis = require('../store/redis'); var utility = require('../util/utility'); var insertPlayer = queries.insertPlayer; var getData = utility.getData; @@ -24,25 +26,22 @@ function start() function getSummaries(cb) { - db.raw(` - SELECT account_id - FROM players - TABLESAMPLE SYSTEM_ROWS(100) - `).asCallback(function(err, results) + redis.lrange('profilerQueue', 0, -1, function(err, results) { if (err) { return cb(err); } - if (results.rows.length === 0) + console.log('players sampled: %s', results.length); + results = results.map(function(account_id) { - console.log('No account_ids found...'); - return cb(); - } - console.log('players sampled: %s', results.rows.length); + return { + account_id: account_id + }; + }); var container = utility.generateJob("api_summaries", { - players: results.rows + players: results }); getData(container.url, function(err, body) { diff --git a/svc/requests.js b/svc/requests.js index c6007a733..08901defb 100644 --- a/svc/requests.js +++ b/svc/requests.js @@ -29,13 +29,13 @@ function processRequest(job, cb) } //match details response var match = body.result; - match.parse_status = 0; insertMatch(db, redis, match, { type: "api", attempts: 1, lifo: true, cassandra: cassandra, + skipParse: false, }, waitParse); }); } diff --git a/svc/scanner.js b/svc/scanner.js index 00d6e3215..819db762d 100644 --- a/svc/scanner.js +++ b/svc/scanner.js @@ -13,7 +13,6 @@ var getData = utility.getData; var generateJob = utility.generateJob; var async = require('async'); var trackedPlayers; -var userPlayers; var parallelism = config.SCANNER_PARALLELISM; var PAGE_SIZE = 100; buildSets(db, redis, function(err) @@ -72,7 +71,6 @@ function start() } //set local vars trackedPlayers = result.trackedPlayers; - userPlayers = result.userPlayers; var arr = []; var matchBuffer = {}; var completePages = {}; @@ -112,26 +110,19 @@ function start() function processMatch(match, cb) { - if (config.ENABLE_PRO_PARSING && match.leagueid) - { - //parse tournament games - match.parse_status = 0; - } - else if (match.players.some(function(p) + var insert = false; + var skipParse = true; + if (match.players.some(function(p) { return (p.account_id in trackedPlayers); })) { - //queued - match.parse_status = 0; + insert = true; + skipParse = false; } - else if (match.players.some(function(p) - { - return (config.ENABLE_INSERT_ALL_MATCHES || p.account_id in userPlayers); - })) + else if (config.ENABLE_INSERT_ALL_MATCHES) { - //skipped - match.parse_status = 3; + insert = true; } //check if match was previously processed redis.get('scanner_insert:' + match.match_id, function(err, result) @@ -142,7 +133,7 @@ function start() } //don't insert this match if we already processed it recently //deduplicate matches in this page set - if ((match.parse_status === 0 || match.parse_status === 3) && !result && !matchBuffer[match.match_id]) + if (insert && !result && !matchBuffer[match.match_id]) { matchBuffer[match.match_id] = 1; insertMatch(db, redis, match, @@ -150,7 +141,7 @@ function start() type: "api", origin: "scanner", cassandra: cassandra, - userPlayers: userPlayers, + skipParse: skipParse, }, function(err) { if (!err) diff --git a/svc/web.js b/svc/web.js index d78e2b004..50ee45663 100644 --- a/svc/web.js +++ b/svc/web.js @@ -80,6 +80,7 @@ app.locals.getAggs = utility.getAggs; app.use(compression()); app.use("/apps/dota2/images/:group_name/:image_name", function(req, res) { + res.header('Cache-Control', 'max-age=604800, public'); request("http://cdn.dota2.com/apps/dota2/images/" + req.params.group_name + "/" + req.params.image_name).pipe(res); }); app.use("/public", express.static(path.join(__dirname, '/../public'))); @@ -97,7 +98,7 @@ app.use(function rateLimit(req, res, cb) var ip = req.headers['x-forwarded-for'] || req.connection.remoteAddress || ""; ip = ip.replace(/^.*:/, '').split(',')[0]; var key = 'rate_limit:' + ip; - console.log("%s visit %s, ip %s", req.user ? req.user.account_id : "anonymous", req.path, ip); + console.log("%s visit %s, ip %s", req.user ? req.user.account_id : "anonymous", req.originalUrl, ip); redis.multi().incr(key).expire(key, 1).exec(function(err, resp) { if (err) @@ -120,13 +121,9 @@ app.use(function rateLimit(req, res, cb) app.use(function telemetry(req, res, cb) { var timeStart = new Date(); - if (req.path.indexOf('/names') === 0) + if (req.originalUrl.indexOf('/api') === 0) { - redis.zadd("alias_hits", moment().format('X'), moment().valueOf() + req.path); - } - if (req.path.indexOf('/api') === 0) - { - redis.zadd("api_hits", moment().format('X'), moment().valueOf() + req.path); + redis.zadd("api_hits", moment().format('X'), req.originalUrl); } if (req.user) { @@ -135,14 +132,13 @@ app.use(function telemetry(req, res, cb) res.once('finish', function() { var timeEnd = new Date(); - /* - var obj = JSON.stringify({ - path: req.path, - time: timeEnd - timeStart - }; - */ - redis.lpush("load_times", timeEnd - timeStart); - redis.ltrim("load_times", 0, 10000); + var elapsed = timeEnd - timeStart; + if (elapsed > 1000) + { + console.log("[SLOWLOG] %s, %s", req.originalUrl, elapsed); + } + redis.lpush("load_times", elapsed); + redis.ltrim("load_times", 0, 9999); }); cb(); }); @@ -288,17 +284,6 @@ app.get('/picks/:n?', function(req, res, cb) }); }); }); -app.get('/top', function(req, res, cb) -{ - queries.getTop(db, redis, function(err, result) - { - if (err) - { - return cb(err); - } - res.render('top', result); - }); -}); app.get('/rankings/:hero_id?', function(req, res, cb) { if (!req.params.hero_id) @@ -388,6 +373,7 @@ app.use(function(req, res, next) { if (config.UI_HOST) { + //route not found, redirect to SPA return res.redirect(config.UI_HOST + req.url); } var err = new Error("Not Found"); @@ -398,7 +384,7 @@ app.use(function(err, req, res, next) { res.status(err.status || 500); console.log(err); - redis.zadd("error_500", moment().format('X'), req.path); + redis.zadd("error_500", moment().format('X'), req.originalUrl); if (config.NODE_ENV !== "development") { return res.render('error/' + (err.status === 404 ? '404' : '500'), diff --git a/svc/worker.js b/svc/worker.js index 4c492994e..68fec5d89 100644 --- a/svc/worker.js +++ b/svc/worker.js @@ -60,7 +60,7 @@ invokeInterval(function buildDistributions(cb) }); results.rows = results.rows.map(function(r, i) { - r.cumulative_sum = results.rows.slice(0, i).reduce(function(prev, current) + r.cumulative_sum = results.rows.slice(0, i + 1).reduce(function(prev, current) { return { count: prev.count + current.count @@ -124,6 +124,43 @@ invokeInterval(function notablePlayers(cb) }, cb); }); }, 10 * 60 * 1000); +invokeInterval(function leagues(cb) +{ + var container = utility.generateJob("api_leagues", + {}); + utility.getData(container.url, function(err, api_leagues) + { + if (err) + { + return cb(err); + } + utility.getData('https://raw.githubusercontent.com/dotabuff/d2vpkr/master/dota/scripts/items/leagues.json', function(err, leagues) + { + if (err) + { + return cb(err); + } + async.each(api_leagues.result.leagues, function(l, cb) + { + if (leagues[l.leagueid]) + { + l.tier = leagues[l.leagueid].tier; + l.ticket = leagues[l.leagueid].ticket; + l.banner = leagues[l.leagueid].banner; + } + l.name = l.description.substring("#DOTA_Item_Desc_".length).split('_').join(' '); + if (l.tier === "professional" || l.tier === "premium") + { + redis.sadd('pro_leagueids', l.leagueid); + } + queries.upsert(db, 'leagues', l, + { + leagueid: l.league_id + }, cb); + }, cb); + }); + }); +}, 10 * 60 * 1000); function invokeInterval(func, delay) { diff --git a/tasks/getMatches.js b/tasks/getMatches.js new file mode 100644 index 000000000..0362a1b95 --- /dev/null +++ b/tasks/getMatches.js @@ -0,0 +1,50 @@ +var async = require('async'); +var utility = require('../util/utility'); +var generateJob = utility.generateJob; +var getData = utility.getData; +var db = require('../store/db'); +var redis = require('../store/redis'); +var cassandra = require('../store/cassandra'); +var queries = require('../store/queries'); +var insertMatch = queries.insertMatch; +var args = process.argv.slice(2); +var match_id = Number(args[0]); +var delay = 1000; +var job = generateJob("api_details", +{ + match_id: match_id +}); +var url = job.url; +getData( +{ + url: url, + delay: delay +}, function(err, body) +{ + if (err) + { + throw err; + } + if (body.result) + { + var match = body.result; + insertMatch(db, redis, match, + { + skipCounts: true, + skipAbilityUpgrades: true, + skipParse: false, + cassandra: cassandra, + }, function(err) + { + if (err) + { + throw err; + } + process.exit(0); + }); + } + else + { + throw body; + } +}); \ No newline at end of file diff --git a/dev/allMatches.js b/tasks/getMatchesSeq.js similarity index 98% rename from dev/allMatches.js rename to tasks/getMatchesSeq.js index ebd445e36..331c5bb1c 100644 --- a/dev/allMatches.js +++ b/tasks/getMatchesSeq.js @@ -86,6 +86,7 @@ function getPage(match_seq_num, bucket) { skipCounts: true, skipAbilityUpgrades: true, + skipParse: true, cassandra: cassandra, }, cb); }, function(err) diff --git a/test/test.js b/test/test.js index 73af32d1d..e6e4cebd7 100644 --- a/test/test.js +++ b/test/test.js @@ -128,7 +128,8 @@ before(function(done) { queries.insertMatch(db, redis, m, { - type: "api" + type: "api", + skipParse: true, }, cb); }, cb); }, diff --git a/util/analysis.js b/util/analysis.js index 8f9f2f163..6aca06a63 100644 --- a/util/analysis.js +++ b/util/analysis.js @@ -4,6 +4,7 @@ **/ var util = require('util'); var constants = require('../constants.js'); + function generatePlayerAnalysis(match, pm) { //define condition check for each advice point @@ -150,7 +151,8 @@ function generatePlayerAnalysis(match, pm) { var flying_available = 180; var time; - if (pm.purchase && pm.first_purchase_time && pm.first_purchase_time.flying_courier) { + if (pm.purchase && pm.first_purchase_time && pm.first_purchase_time.flying_courier) + { time = pm.first_purchase_time.flying_courier; } return { @@ -172,7 +174,7 @@ function generatePlayerAnalysis(match, pm) wards: function(m, pm) { var ward_cooldown = 60 * 7; - var wards = pm.obs_log ? pm.obs_log.length : 0; + var wards = getObsWardsPlaced(pm); //divide game length by ward cooldown //2 wards respawn every interval //split responsibility between 2 supports @@ -304,7 +306,19 @@ function generatePlayerAnalysis(match, pm) function isSupport(pm) { - return pm.obs_log && pm.obs_log.length >= 2 && pm.lh_t && pm.lh_t[10] < 20; + return getObsWardsPlaced(pm) >= 2 && pm.lh_t && pm.lh_t[10] < 20; + } + + function getObsWardsPlaced(pm) + { + if (!pm.obs_log) + { + return 0; + } + return pm.obs_log.filter(function(l) + { + return !l.entityleft; + }).length; } function isRoshHero(pm) diff --git a/util/benchmarks.js b/util/benchmarks.js index 1797a93cc..f8b5fa0a3 100644 --- a/util/benchmarks.js +++ b/util/benchmarks.js @@ -19,25 +19,13 @@ var benchmarks = { { return (p.hero_damage / m.duration * 60); }, - "kills": function(m, p) + "hero_healing_per_min": function(m, p) { - return p.kills; + return (p.hero_healing / m.duration * 60); }, - "last_hits": function(m, p) + "tower_damage_per_min": function(m, p) { - return p.last_hits; - }, - "hero_damage": function(m, p) - { - return p.hero_damage; - }, - "tower_damage": function(m, p) - { - return p.tower_damage; - }, - "hero_healing": function(m, p) - { - return p.hero_healing; + return (p.tower_damage / m.duration * 60); }, }; -module.exports = benchmarks; \ No newline at end of file +module.exports = benchmarks; diff --git a/views/match/match.jade b/views/match/match.jade index 9b080ccde..9e777c93f 100644 --- a/views/match/match.jade +++ b/views/match/match.jade @@ -32,7 +32,6 @@ block content th Percentile tbody tr - //td= constants.parse_status[match.parse_status] td= constants.game_mode[match.game_mode] ? constants.game_mode[match.game_mode].name : match.game_mode td= constants.region[match.region] ? constants.region[match.region] : match.region td= moment().startOf('day').seconds(match.duration).format("H:mm:ss") diff --git a/views/match/match_performances.jade b/views/match/match_performances.jade index b78029092..2edb420df 100644 --- a/views/match/match_performances.jade +++ b/views/match/match_performances.jade @@ -9,8 +9,6 @@ block match_content tr th: abbr(title=tooltips.hero_id) Hero th: abbr(title=tooltips.lane) Lane - //th: abbr(title=tooltips.used_ward_observer) Observer - //th: abbr(title=tooltips.used_ward_sentry) Sentry th: abbr(title=tooltips.lane_efficiency) EFF@10 th: abbr(title=tooltips.lhten) LH@10 th: abbr(title=tooltips.stuns) Stuns @@ -19,8 +17,6 @@ block match_content tr.activate(data-index=i, data-type="lane_pos", class = player.isRadiant ? "radiant" : "dire") +hero_td(player) td #{constants.lane_ids[player.lane] || "-"} (#{constants.lane_role[player.lane_role] || "-"}) - //td.activate(data-index=i, data-type="obs") #{player.obs_log.length || "-"} - //td.activate(data-index=i, data-type="sen") #{player.sen_log.length || "-"} td.rankable #{(Number(player.lane_efficiency)*100).toFixed(2)} td.rankable #{player.lh_t && player.lh_t[10] ? player.lh_t[10] : "-"} td.rankable #{Number(Math.max(player.stuns, 0)).toFixed(2)} diff --git a/views/mixins/hero_table.jade b/views/mixins/hero_table.jade index a5d76a6a6..eaa19f786 100644 --- a/views/mixins/hero_table.jade +++ b/views/mixins/hero_table.jade @@ -19,8 +19,9 @@ mixin hero_table(rows, short) - var hero = constants.heroes[h.hero_id] tr td.text-left.small - img.img-sm(src=hero.img, title=hero.localized_name) - =hero.localized_name + if hero + img.img-sm(src=hero.img, title=hero.localized_name) + =hero.localized_name td.rankable=h.games td.rankable +progress_bar(h.win, h.games)