+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/404/fernetjs-invaders.png b/404/fernetjs-invaders.png
new file mode 100644
index 0000000..ea93398
Binary files /dev/null and b/404/fernetjs-invaders.png differ
diff --git a/404/game.min.js b/404/game.min.js
new file mode 100644
index 0000000..b4c791a
--- /dev/null
+++ b/404/game.min.js
@@ -0,0 +1 @@
+function Controls(){throw"Controls class is Static."}function Keyboard(){throw"KeyboardCode class is Static."}function ImageMapper(){throw"ImageMapper class is Static."}function ImageCreator(){throw"ImageCreator class is Static."}(function(){for(var t=0,i=["ms","moz","webkit","o"],e=0;i.length>e&&!window.requestAnimationFrame;++e)window.requestAnimationFrame=window[i[e]+"RequestAnimationFrame"],window.cancelAnimationFrame=window[i[e]+"CancelAnimationFrame"]||window[i[e]+"CancelRequestAnimationFrame"];window.requestAnimationFrame||(window.requestAnimationFrame=function(i){var e=(new Date).getTime(),s=Math.max(0,17-(e-t)),n=window.setTimeout(function(){i(e+s)},s);return t=e+s,n}),window.cancelAnimationFrame||(window.cancelAnimationFrame=function(t){window.clearTimeout(t)})})(),window.gameTime={lastTime:Date.now(),frameTime:0,typicalFrameTime:20,minFrameTime:12,time:0},window.gameTime.tick=function(){var t=Date.now(),i=t-this.lastTime;return this.minFrameTime>i?!1:(this.frameTime=i>2*this.typicalFrameTime?this.typicalFrameTime:i,this.time+=this.frameTime,this.lastTime=t,!0)},window.camera=function(){function t(t,i){return Math.floor(Math.random()*i+t)}function i(){return Math.round(Math.random())?1:-1}var e=.1,s=0,n=[0,0];return{pos:function(){return[n[0],n[1]]},shake:function(i){i=i||3,s=e,n=[t(-i,i),t(-i,i)]},update:function(t){t/=1e3,s-=t,0>s?n=[0,0]:(n[0]*=i(),n[1]*=i())}}}(),window.particles=function(){function t(t,i){return Math.floor(Math.random()*i+t)}function i(){return Math.round(Math.random())?1:-1}function e(t){function i(t){return"#"==t.charAt(0)?t.substring(1,7):t}function e(t){return parseInt(i(t).substring(0,2),16)}function s(t){return parseInt(i(t).substring(2,4),16)}function n(t){return parseInt(i(t).substring(4,6),16)}return-1===t.indexOf("#")?t:[e(t),s(t),n(t),1]}var s,n,o=[],h=[2,10];return{init:function(t,i){s=t,n=i,o=[]},create:function(s,n,h){for(var r=e(h),a=0;n>a;a++){var l=[t(10,30)*i(),-1*t(10,30)];o.push({pos:[s[0]+t(1,3)*i(),s[1]+t(1,3)*i()],vel:l,c:r,t:2})}},update:function(t){t/=500;for(var i=0;o.length>i;i++){var e=o[i];e.t-=t,e.vel[0]+=h[0]*t,e.vel[1]+=h[1]*t,e.pos[0]+=e.vel[0]*t,e.pos[1]+=e.vel[1]*t,e.pos[1]>n.h||0>e.t?o.splice(i,1):e.c[3]=e.t.toFixed(2)}},draw:function(){for(var t=0;o.length>t;t++){var i=o[t];s.save(),s.fillStyle="rgba("+i.c[0]+","+i.c[1]+","+i.c[2]+","+i.c[3]+")",s.fillRect(i.pos[0],i.pos[1],3,3),s.restore()}}}}(),function(){var t=!1,i=/xyz/.test(function(){})?/\b_super\b/:/.*/;this.Class=function(){},Class.extend=function(e){function s(){!t&&this.init&&this.init.apply(this,arguments)}var n=this.prototype;t=!0;var o=new this;t=!1;for(var h in e)o[h]="function"==typeof e[h]&&"function"==typeof n[h]&&i.test(e[h])?function(t,i){return function(){var e=this._super;this._super=n[t];var s=i.apply(this,arguments);return this._super=e,s}}(h,e[h]):e[h];return s.prototype=o,s.prototype.constructor=s,s.extend=arguments.callee,s}}(),Controls.Left="Left",Controls.Right="Right",Controls.Shoot="Shoot",Keyboard.Left=37,Keyboard.Right=39,Keyboard.Up=38,Keyboard.Down=40,Keyboard.Space=32,ImageMapper.Ship=function(){return[[0,0,0,0,0,0,0,1,1,1,1,0,0,0,0,0,0,0],[0,0,0,0,0,1,1,1,1,1,1,1,1,0,0,0,0,0],[0,0,0,0,1,1,1,1,1,1,1,1,1,1,0,0,0,0],[0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0],[0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0],[0,1,1,1,0,1,1,0,1,1,0,1,1,0,1,1,1,0],[1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1],[1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1],[0,0,1,1,1,0,0,0,0,0,0,0,0,1,1,1,0,0],[0,0,1,1,1,0,0,0,0,0,0,0,0,1,1,1,0,0],[0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0]]},ImageMapper.ShipShoot=function(){return[[1],[1],[1],[1],[1],[1],[1]]},ImageMapper.Invasion=function(){return[[2,2,2,2,2,2,2,2,2,2,2,2,2],[2,2,2,1,2,1,1,1,2,2,2,1,2],[2,2,1,1,2,1,2,1,2,2,1,1,2],[2,1,2,1,2,1,2,1,2,1,2,1,2],[2,1,1,1,2,1,2,1,2,1,1,1,2],[2,2,2,1,2,1,1,1,2,2,2,1,2],[2,2,2,2,2,2,2,2,2,2,2,2,2]]},ImageMapper.AlienCrab=function(){return[[0,0,1,0,0,0,0,0,1,0,0],[3,0,0,1,0,0,0,1,0,0,3],[3,0,0,1,0,0,0,1,0,0,3],[3,0,1,1,1,1,1,1,1,0,3],[3,0,1,0,1,1,1,0,1,0,3],[3,1,1,1,1,1,1,1,1,1,3],[2,1,1,1,1,1,1,1,1,1,2],[2,0,1,1,1,1,1,1,1,0,2],[2,0,1,1,1,1,1,1,1,0,2],[2,0,1,0,0,0,0,0,1,0,2],[2,0,1,0,0,0,0,0,1,0,2],[0,3,0,2,2,0,2,2,0,3,0]]},ImageMapper.AlienSquid=function(){return[[0,0,0,0,0,1,0,0,0,0,0],[0,0,0,0,1,1,1,0,0,0,0],[0,0,0,1,1,1,1,1,0,0,0],[0,0,1,1,1,1,1,1,1,0,0],[0,1,1,0,1,1,1,0,1,1,0],[1,1,1,1,1,1,1,1,1,1,1],[1,1,1,1,1,1,1,1,1,1,1],[1,1,1,1,1,1,1,1,1,1,1],[0,0,1,0,0,0,0,0,1,0,0],[0,0,1,0,0,0,0,0,1,0,0],[0,1,0,3,0,0,0,3,0,1,0],[3,0,1,0,3,0,3,0,1,0,3]]},ImageMapper.DeadAlien=function(){return[[1,0,0,0,0,0,0,0,0,0,1],[0,1,0,0,0,1,0,0,0,1,0],[0,0,1,0,0,1,0,0,1,0,0],[0,0,0,1,0,1,0,1,0,0,0],[0,0,0,0,0,0,0,0,0,0,0],[1,1,1,1,0,0,0,1,1,1,1],[0,0,0,0,0,0,0,0,0,0,0],[0,0,0,1,0,1,0,1,0,0,0],[0,0,1,0,0,1,0,0,1,0,0],[0,1,0,0,0,1,0,0,0,1,0],[1,0,0,0,0,1,0,0,0,0,1]]},ImageMapper.AlienShoot=function(){return[[0,1,0],[1,0,0],[0,1,0],[0,0,1],[0,1,0],[1,0,0],[0,1,0]]},ImageMapper.Shield=function(){return[[1,0,0,1,0,1,1,1,0,1,1,1,0,0,0,0,1,1,1,0,1,1,1,0,1,0,1,0,1,0,0,1,0,1,1,0],[1,1,0,1,0,1,0,1,0,0,1,0,0,0,0,0,1,0,0,0,1,0,1,0,1,0,1,0,1,1,0,1,0,1,0,1],[1,1,1,1,0,1,0,1,0,0,1,0,0,0,0,0,1,1,0,0,1,0,1,0,1,0,1,0,1,1,1,1,0,1,0,1],[1,0,1,1,0,1,0,1,0,0,1,0,0,0,0,0,1,0,0,0,1,0,1,0,1,0,1,0,1,0,1,1,0,1,0,1],[1,0,0,1,0,1,1,1,0,0,1,0,0,0,0,0,1,0,0,0,1,1,1,0,1,1,1,0,1,0,0,1,0,1,1,0]]},ImageMapper.ShieldBrick=function(){return[[[1,1,1,1,1,1],[1,1,1,1,1,1],[1,1,1,1,1,1],[1,1,1,1,1,1],[1,1,1,1,1,1],[1,1,1,1,1,1]],[[0,1,1,1,0,1],[1,1,1,0,0,0],[1,1,0,1,1,0],[0,0,1,0,1,1],[1,0,0,1,0,1],[1,1,0,0,1,1]],[[0,0,0,1,0,1],[0,0,0,0,0,0],[1,0,0,1,0,0],[0,0,1,0,1,1],[1,0,0,1,0,1],[1,1,0,0,0,0]]]},ImageCreator.getImages=function(t){function i(){for(var t=o.length,i=0;t>i;i++)for(var e=o[i].length,s=0;e>s;s++){var h=o[i][s];if(h){var r=new Brick({ctx:d,x:s*l,y:i*l,width:l,height:l,color:u,value:h});n.push(r)}}}function e(t){d.clearRect(0,0,h,r);for(var i=n.length,e=0;i>e;e++)(1===n[e].value||n[e].value===t)&&n[e].draw();var o=c.toDataURL("image/png"),a=new Image;a.src=o,s.push(a)}var s=[],n=[],o=t.mapper||[],h=t.width||100,r=t.height||100,a=t.states||[],l=t.brickSize||5,u=t.color||"#000",c=document.createElement("canvas");c.width=h,c.height=r;var d=c.getContext("2d");i();for(var f=0;a.length>f;f++)e(a[f]);var f=n.length-1;do n[f]=null;while(f--);return s};var DrawableElement=Class.extend({init:function(t){this.ctx=t.ctx?t.ctx:null,this.size={width:t.width||0,height:t.height||0},this.position={x:t.x||0,y:t.y||0},this.brickSize=t.brickSize||1,this.color=t.color||"#000",this.bricks=[],this.onDestroy=t.onDestroy||function(){}},build:function(){},update:function(){},draw:function(t){null!=this.ctx&&this.ctx.drawImage(t,this.position.x+window.camera.pos()[0],this.position.y+window.camera.pos()[1])},destroy:function(){this.ctx=null,null!=this.size&&(this.size.width=null,this.size.height=null,this.size=null),null!=this.position&&(this.position.x=null,this.position.y=null,this.position=null),this.brickSize=null,this.color=null;var t=this.bricks;if(null!=t){for(var i=t.length,e=0;i>e;e++)t[e]=null;this.bricks=null}}}),Shoot=DrawableElement.extend({init:function(t){this._super(t),this.MOVE_FACTOR=5,this.dir=t.dir,this.shootImage=t.shootImage,this.collateBricks=t.collateBricks,this.collateAliens=t.collateAliens,this.timer=null},build:function(){},update:function(){var t=this.dir,i=this.MOVE_FACTOR;return this.position.y+=i*t,this.hasCollision()?(this.collided(),void 0):void 0},draw:function(){this._super(this.shootImage)},collided:function(){this.destroy()},destroy:function(){clearInterval(this.timer),this.collateBricks=null,this.collateAliens=null,this.onDestroy(this),this._super()},hasCollision:function(){function t(t){if(!t)return!1;for(var s=t,n=s.length,o=0;n>o;o++){var h=s[o],r=h.position.x,a=h.position.y,l=r+h.size.width,u=a+h.size.height;if(i>=r&&l>=i&&e>=a&&u>=e&&!h.destroyed)return t[o].collided(),!0}return!1}var i=this.position.x,e=this.position.y;return 0>e||e>400?!0:t(this.collateBricks)?!0:this.collateAliens&&t(this.collateAliens)?!0:void 0}}),Ship=DrawableElement.extend({init:function(t){this._super(t),this.maxMove={left:t.maxMoveLeft,right:t.maxMoveRight},this.onShipHit=t.onShipHit||function(){},this.MOVE_FACTOR=.2,this.SHOOT_TIME=200,this.brickSize=2,this.shootImage=null,this.shoots=[],this.lastShoot=0,this.imgs=[];var i=ImageMapper.Ship();this.size={width:this.brickSize*i[0].length,height:this.brickSize*i.length},this.build(),this.shield=t.shield,this.invasion={}},build:function(){this.buildShootImage();var t={width:this.size.width,height:this.size.height,states:[1],brickSize:this.brickSize,mapper:ImageMapper.Ship(),color:this.color};this.imgs=ImageCreator.getImages(t)},update:function(t,i){var e=this.MOVE_FACTOR;t.indexOf(Controls.Left)>-1?this.position.x>this.maxMove.left&&(this.position.x-=e*i):t.indexOf(Controls.Right)>-1&&this.position.x-1&&0>=this.lastShoot&&(this.lastShoot=this.SHOOT_TIME,t.splice(s,1),this.makeShoot());for(var n=this.shoots,o=n.length,h=0;o>h;h++)n[h]&&n[h].update(i)},draw:function(){this._super(this.imgs[0]);for(var t=this.shoots,i=t.length,e=0;i>e;e++)t[e]&&t[e].draw()},collided:function(){this.onShipHit()},destroy:function(){this.onShipHit=null,this.shootImage=null;for(var t=0;this.shoots.length>t;t++)this.shoots[t].destroy();this.shoots=[],this.imgs=[],this.shield=null,this.invasion=null,this._super()},makeShoot:function(){var t=this,i=new Shoot({ctx:this.ctx,x:this.position.x+this.size.width/2,y:this.position.y,dir:-1,shootImage:this.shootImage,onDestroy:function(i){for(var e=0;t.shoots.length>e;e++)if(t.shoots[e]===i){t.shoots.splice(e,1);break}},collateBricks:this.shield.bricks,collateAliens:this.invasion.aliens});this.shoots.push(i)},buildShootImage:function(){var t=ImageMapper.ShipShoot(),i=2,e=i*t[0].length,s=i*t.length,n={width:e,height:s,states:[1],brickSize:i,mapper:t,color:this.color};this.shootImage=ImageCreator.getImages(n)[0]}}),Invasion=DrawableElement.extend({init:function(t){this._super(t),this.colors={crab:"#FF2727",squid:"#F8FF41"},this.size={width:390,height:210},this.shield=t.shield,this.ship=t.ship,this.MOVE_FACTOR=10,this.DOWN_FACTOR=12,this.CURR_VEL=600,this.VEL_FACTOR=50,this.MOVE_TIME=500,this.lastMove=0,this.dir=1,this.lastDir=1,this.lastPer=100,this.state=0,this.alienSize=30,this.aliens=[],this.crabImages=[],this.squidImages=[],this.deadAlienImgs=[],this.shootImage=null,this.shoots=[],this.build(),this.aliensAmm=this.aliens.length,this.hadAlienCollision=!1,this.onAliensClean=t.onAliensClean||function(){},this.timer=null},build:function(){var t=this;this.buildShootImage(),this.buildAliensImages();for(var i=this.alienSize,e=this.position.x,s=this.position.y,n=this.ctx,o=ImageMapper.Invasion(),h=o.length,r=0;h>r;r++)for(var a=o[r].length,l=0;a>l;l++)if(o[r][l]){var u,c={ctx:n,x:l*i+e,y:r*i+s,width:i,height:i,destroyedImg:this.deadAlienImgs,shield:this.shield,ship:this.ship,onDestroy:function(i){for(var e=0;t.aliens.length>e;e++)if(t.aliens[e]===i){t.aliens.splice(e,1);break}},onWallCollision:function(){t.hadAlienCollision=!0}};switch(o[r][l]){case 1:c.stateImgs=this.crabImages,c.color=this.colors.crab;break;case 2:c.stateImgs=this.squidImages,c.color=this.colors.squid}u=new Alien(c),this.aliens.push(u)}},loop:function(){this.state=!this.state;var t=this.MOVE_FACTOR,i=0,e=0,s=this.aliens,n=s.length;0===n&&this.onAliensClean(),this.hadAlienCollision&&(this.dir*=-1,this.hadAlienCollision=!1,e=this.DOWN_FACTOR,this.lastDir=this.dir),i=t*this.dir,this.position.x+=i,this.position.y+=e;var o=!1;if(this.state&&Math.floor(2*Math.random())){o=!0,shooterIdx=[];for(var h=0;2>h;h++)shooterIdx.push(Math.floor(Math.random()*n))}for(var h=0;n>h;h++)s[h].position.x+=i,s[h].position.y+=e,o&&shooterIdx.indexOf(h)>-1&&this.makeShoot(s[h]);this.vMove>0&&(this.vMove=0);var r=100*n/this.aliensAmm;return this.lastPer-r>9?(this.CURR_VEL-=this.VEL_FACTOR,this.MOVE_TIME-=this.VEL_FACTOR,200>this.MOVE_TIME&&(this.MOVE_TIME=200),this.lastPer=r,void 0):void 0},update:function(t){if(this.lastMove-=t,0>=this.lastMove){this.loop(),this.lastMove=this.MOVE_TIME,this.state;for(var i=this.aliens,e=i.length,s=0;e>s;s++)void 0!==i[s]&&i[s].update(t)}for(var n=this.shoots,o=n.length,s=0;o>s;s++)n[s]&&n[s].update(t)},draw:function(){for(var t=this.state,i=this.aliens,e=i.length,s=0;e>s;s++)void 0!==i[s]&&i[s].draw(t);for(var n=this.shoots,o=n.length,s=0;o>s;s++)n[s].draw()},destroy:function(){clearInterval(this.timer),this.shield=null,this.ship=null;for(var t=0;this.shoots.length>t;t++)this.shoots[t].destroy();this.shoots=[],this._super()},makeShoot:function(t){var i=this.shield,e=this.ship,s=this,n=new Shoot({ctx:this.ctx,x:t.position.x+t.size.width/2,y:t.position.y,dir:1,shootImage:this.shootImage,onDestroy:function(t){for(var i=0;s.shoots.length>i;i++)if(s.shoots[i]===t){s.shoots.splice(i,1);break}},collateBricks:i.bricks,collateAliens:[e]});this.shoots.push(n)},buildShootImage:function(){var t=ImageMapper.AlienShoot(),i=2,e=i*t[0].length,s=i*t.length,n={width:e,height:s,states:[1],brickSize:i,mapper:t,color:"yellow"};this.shootImage=ImageCreator.getImages(n)[0]},buildAliensImages:function(){var t={width:30,height:30,states:[1],brickSize:2};t.mapper=ImageMapper.DeadAlien(),t.color="white",this.deadAlienImgs=ImageCreator.getImages(t),t.states=[2,3],t.mapper=ImageMapper.AlienCrab(),t.color=this.colors.crab,this.crabImages=ImageCreator.getImages(t),t.mapper=ImageMapper.AlienSquid(),t.color=this.colors.squid,this.squidImages=ImageCreator.getImages(t)}}),Alien=DrawableElement.extend({init:function(t){this._super(t),this.images=t.stateImgs||[],this.destroyedImg=t.destroyedImg||[],this.onWallCollision=t.onWallCollision||[],this.shield=t.shield||null,this.ship=t.ship||null,this.destroyed=!1,this.shoots=[]},build:function(){},update:function(){this.hasCollision();var t=this.position.x;(20>t||t>590-this.size.width)&&this.onWallCollision();var i=this.position.y+this.size.height;0>i&&this.ship.collided()},draw:function(t){if(this.destroyed)this._super(this.destroyedImg[0]),this.destroy(),this.onDestroy(this);else{var i=t?0:1;this._super(this.images[i])}},hasCollision:function(){function t(t){if(!t)return!1;for(var s=t,n=s.length,o=0;n>o;o++){var h=s[o],r=h.position.x,a=h.position.y,l=r+h.size.width,u=a+h.size.height;if(i>=r&&l>=i&&e>=a&&u>=e&&!h.destroyed)return t[o].collided(!0),!0}return!1}var i=this.position.x+this.size.width/2,e=this.position.y+.8*this.size.height;return t(this.shield.bricks)?!0:t([this.ship])?!0:void 0},collided:function(){this.destroyed=!0,window.camera.shake(3),window.particles.create([this.position.x+this.size.width/2,this.position.y+this.size.height/2],10,this.color)},destroy:function(){this._super()}}),Brick=DrawableElement.extend({init:function(t){this._super(t),this.destroyed=!1,this.value=t.value||1},build:function(){},update:function(){},draw:function(){this.destroyed||(this.ctx.beginPath(),this.ctx.rect(this.position.x,this.position.y,this.size.width,this.size.height),this.ctx.fillStyle=this.color,this.ctx.fill())},destroy:function(){this.destroyed=!0}}),ShieldBrick=DrawableElement.extend({init:function(t){this._super(t),this.state=0,this.imgsState=t.imgsState,this.destroyed=!1},build:function(){},update:function(){},draw:function(){this.destroyed||this._super(this.imgsState[this.state])},collided:function(t){window.camera.shake(1),window.particles.create([this.position.x+this.size.width/2,this.position.y+this.size.height/2],4,this.color),t?this.state=Math.floor(3*Math.random()+2):this.state++,this.state>1&&(this.destroyed=!0)},destroy:function(){this._super()}}),Shield=DrawableElement.extend({init:function(t){this._super(t),this.imgs=[],this.build()},build:function(){this.createImagesStateBricks();for(var t=this.brickSize,i=this.position.x,e=this.position.y,s=this.ctx,n=this.color,o=ImageMapper.Shield(),h=o.length,r=0;h>r;r++)for(var a=o[r].length,l=0;a>l;l++)if(o[r][l]){var u=new ShieldBrick({ctx:s,x:l*t+i,y:r*t+e,width:t,height:t,color:n,imgsState:this.imgs});this.bricks.push(u)}},update:function(t){for(var i=this.bricks,e=i.length,s=0;e>s;s++)i[s]&&i[s].update(t)},draw:function(){var t=this.bricks;if(t)for(var i=t.length,e=0;i>e;e++)t[e]&&t[e].draw()},destroy:function(){for(var t=this.bricks,i=t.length,e=0;i>e;e++)t[e].destroy();this.bricks=[],this._super()},createImagesStateBricks:function(){for(var t={width:this.brickSize,height:this.brickSize,states:[1],brickSize:2,color:this.color},i=ImageMapper.ShieldBrick(),e=0;i.length>e;e++)t.mapper=i[e],this.imgs.push(ImageCreator.getImages(t)[0])}}),Invaders404=Class.extend({init:function(t){this.canvas=null,this.ctx=null,this.loopInterval=10,this.currentDir=[],this.shield={},this.ship={},this.invasion={},this.initCanvas(t.canvasId),this.onLoose=t.onLoose||function(){},this.onWin=t.onWin||function(){},this.isOnGame=!1,this.boundGameRun=this.gameRun.bind(this),this.fps=0,this.now=null,this.lastUpdate=1*new Date-1,this.fpsFilter=this.loopInterval;var i=this,e=document.getElementById("fps");setInterval(function(){e.innerHTML=i.fps.toFixed(1)+"fps"},1e3)},initCanvas:function(t){this.canvas=document.getElementById(t||"canvas"),this.ctx=this.canvas.getContext("2d"),window.particles.init(this.ctx,{w:this.canvas.width,h:this.canvas.height})},start:function(){this.build(),this.gameRun()},gameRun:function(){window.gameTime.tick()&&this.loop(),this.tLoop=window.requestAnimationFrame(this.boundGameRun)},build:function(){var t=this;this.shield=new Shield({ctx:this.ctx,x:70,y:290,brickSize:12,color:"#ffffff"});var i=this.canvas.width;this.ship=new Ship({ctx:this.ctx,shield:this.shield,maxMoveLeft:5,maxMoveRight:i-10,x:(i-10)/2,y:370,color:"#1be400",onShipHit:function(){t.stop(),t.onLoose()}}),this.invasion=new Invasion({ctx:this.ctx,x:60,y:10,shield:this.shield,ship:this.ship,onAliensClean:function(){t.stop(),t.onWin()}}),this.ship.invasion=this.invasion,this.currentDir=[],this.isOnGame=!0,this.bindControls()},loop:function(){this.isOnGame&&(this.update(window.gameTime.frameTime),this.draw())},update:function(t){window.camera.update(t),this.shield.update(t),this.ship.update(this.currentDir,t),this.invasion.update(t),window.particles.update(t)},draw:function(){this.ctx.clearRect(0,0,this.canvas.width,this.canvas.height),this.shield.draw(),this.ship.draw(),this.invasion.draw(),window.particles.draw();var t=1e3/((this.now=new Date)-this.lastUpdate);this.fps+=(t-this.fps)/this.fpsFilter,this.lastUpdate=this.now},bindControls:function(){function t(t){switch(t){case Keyboard.Space:return Controls.Shoot;case Keyboard.Left:return Controls.Left;case Keyboard.Right:return Controls.Right}return null}var i=this,e=[Keyboard.Space,Keyboard.Left,Keyboard.Right];document.addEventListener("keydown",function(s){if(i.isOnGame){var n=s.keyCode;if(e.indexOf(n)>-1){var o=t(n);return-1===i.currentDir.indexOf(o)&&i.currentDir.push(o),s.stopPropagation(),s.preventDefault(),!1}}}),document.addEventListener("keyup",function(e){if(i.isOnGame){var s=e.keyCode,n=t(s),o=i.currentDir.indexOf(n);o>-1&&i.currentDir.splice(o,1)}})},unbindControls:function(){document.removeEventListener("keydown",function(){}),document.removeEventListener("keyup",function(){})},destroy:function(){this.shield.destroy(),this.invasion.destroy(),this.ship.destroy()},stop:function(){this.isOnGame=!1;for(var t=0;this.currentDir.length>t;t++)this.currentDir[t]=null;this.currentDir=[],this.destroy()},drawSplash:function(t){function i(t,i){var e=20,o=t*e-e;s.save(),s.fillStyle=i,s.fillRect(o,0,e,n),s.restore()}function e(){for(var s=0;5>s;s++)i(h+s,"rgba(240,219,79,"+(s?s/10:1)+")");h++,o/10>h?setTimeout(e,r):t()}var s=this.ctx,n=this.canvas.height,o=this.canvas.width,h=0,r=2*this.loopInterval;e()}});(function(){function t(){for(var t in l)if(-1!==navigator.platform.indexOf(l[t]))return l[t];return"Unknown"}function i(t,i,e,s,n){return s+(n-s)*((t-i)/(e-i))}function e(t,i,e,s){Object.defineProperty(e,s,{enumerable:!0,get:function(){return t.axes[i.axes[s]]}})}function s(t,i,e,s){Object.defineProperty(e,s,{enumerable:!0,get:function(){return 0}})}function n(t,i,e,s){Object.defineProperty(e,s,{enumerable:!0,get:function(){return t.buttons[i.buttons[s]]}})}function o(t,e,s,n){var o=e.axes[n]instanceof Array;Object.defineProperty(s,n,{enumerable:!0,get:function(){return o?i(t.axes[e.axes[n][0]],e.axes[n][1],e.axes[n][2],0,1):t.axes[e.axes[n]]}})}function h(t,i){Object.defineProperty(t,i,{enumerable:!0,get:function(){return 0}})}var r={"45e":{"28e":{Mac:{axes:{Left_Stick_X:0,Left_Stick_Y:1,Right_Stick_X:2,Right_Stick_Y:3,Left_Trigger_2:[4,-1,1],Right_Trigger_2:[5,-1,1]},buttons:{A_Button:0,B_Button:1,X_Button:2,Y_Button:3,Left_Trigger_1:4,Right_Trigger_1:5,Left_Stick_Button:6,Right_Stick_Button:7,Start_Button:8,Back_Button:9,Home_Button:10,Pad_Up:11,Pad_Down:12,Pad_Left:13,Pad_Right:14}},Win:{axes:{Left_Stick_X:0,Left_Stick_Y:1,Right_Stick_X:3,Right_Stick_Y:4,Pad_Left:[5,0,-1],Pad_Right:[5,0,1],Pad_Up:[6,0,-1],Pad_Down:[6,0,1],Left_Trigger_2:[2,0,1],Right_Trigger_2:[2,0,-1]},buttons:{A_Button:0,B_Button:1,X_Button:2,Y_Button:3,Left_Trigger_1:4,Right_Trigger_1:5,Back_Button:6,Start_Button:7,Left_Stick_Button:8,Right_Stick_Button:9}}}},"54c":{268:{Mac:{axes:{Left_Stick_X:0,Left_Stick_Y:1,Right_Stick_X:2,Right_Stick_Y:3},buttons:{Back_Button:0,Left_Stick_Button:1,Right_Stick_Button:2,Start_Button:3,Pad_Up:4,Pad_Down:6,Pad_Right:5,Pad_Left:7,Left_Trigger_2:8,Right_Trigger_2:9,Left_Trigger_1:10,Right_Trigger_1:11,Y_Button:12,B_Button:13,A_Button:14,X_Button:15,Home_Button:16}}}},"46d":{c242:{Win:{axes:{Left_Stick_X:0,Left_Stick_Y:1,Right_Stick_Y:4,Right_Stick_X:3,Left_Trigger_2:[2,0,1],Right_Trigger_2:[2,-1,0],Pad_Left:[5,-1,0],Pad_Right:[5,0,1],Pad_Up:[6,-1,0],Pad_Down:[6,0,1]},buttons:{A_Button:0,X_Button:2,B_Button:1,Y_Button:3,Left_Trigger_1:4,Right_Trigger_1:5,Back_Button:6,Start_Button:7,Left_Stick_Button:8,Right_Stick_Button:9}}},c216:{Mac:{axes:{Left_Stick_X:1,Left_Stick_Y:2,Right_Stick_X:3,Right_Stick_Y:4,Pad_Left:[1,0,-1],Pad_Right:[1,0,1],Pad_Up:[2,0,-1],Pad_Down:[2,0,1]},buttons:{X_Button:0,A_Button:1,B_Button:2,Y_Button:3,Left_Trigger_1:4,Right_Trigger_1:5,Left_Trigger_2:6,Right_Trigger_2:7,Back_Button:8,Start_Button:9,Left_Stick_Button:10,Right_Stick_Button:11}}}},"40b":{6533:{Mac:{axes:{Pad_Left:[0,0,-1],Pad_Right:[0,0,1],Pad_Up:[1,0,-1],Pad_Down:[1,0,1]},buttons:{A_Button:0,B_Button:1,X_Button:2,Y_Button:3}}}},Firefox:{"Fake Gamepad":{Mac:{axes:{},buttons:{A_Button:0,B_Button:1,X_Button:2,Y_Button:3,Pad_Up:4,Pad_Down:5,Pad_Left:6,Pad_Right:7}}}}},a={axes:["Left_Stick_X","Left_Stick_Y","Right_Stick_X","Right_Stick_Y"],buttons:["A_Button","B_Button","X_Button","Y_Button","Left_Stick_Button","Right_Stick_Button","Start_Button","Back_Button","Home_Button","Pad_Up","Pad_Down","Pad_Left","Pad_Right","Left_Trigger_1","Right_Trigger_1","Left_Trigger_2","Right_Trigger_2"]},l=["Win","Mac","Linux"],u=window.Input={};u.Device=function(i){if(!i)throw"You didn't pass a valid gamepad to the constructor";var l=i,u=i.id.split("-")[0],c=i.id.split("-")[1],d=t(),f=r,g=this.axes={},p=this.buttons={};if(!(f&&f[u]&&f[u][c]&&f[u][c][d]))throw"A physical device layout for "+u+"-"+c+"-"+d+" isn't available";f=f[u][c][d];for(var m in a.axes)void 0!==f.axes[a.axes[m]]?e(l,f,g,a.axes[m]):void 0!==f.buttons[a.axes[m]]?s(l,f,g,a.axes[m]):h(g,a.axes[m]);for(var _ in a.buttons)void 0!==f.buttons[a.buttons[_]]?n(l,f,p,a.buttons[_]):void 0!==f.axes[a.buttons[_]]?o(l,f,p,a.buttons[_]):h(p,a.buttons[_]);Object.defineProperty(this,"connected",{enumerable:!0,get:function(){return l.connected}}),Object.defineProperty(this,"id",{enumerable:!0,get:function(){return l.id}}),Object.defineProperty(this,"index",{enumerable:!0,get:function(){return l.index}})}})();
\ No newline at end of file
diff --git a/404/start.js b/404/start.js
new file mode 100644
index 0000000..069934f
--- /dev/null
+++ b/404/start.js
@@ -0,0 +1,46 @@
+var invaders,
+ gamepad;
+
+window.addEventListener("MozGamepadConnected", function(e) {
+ gamepad = new Input.Device(e.gamepad);
+});
+
+window.addEventListener('load', function(){
+ initInvaders404();
+});
+
+function play (){
+ var splash = document.getElementById('splash');
+ splash.style.display = "none";
+ splash.style.opacity = 0;
+
+ invaders.start();
+}
+
+function showSplash(){
+ invaders.drawSplash(function (){
+ var splash = document.getElementById('splash');
+ splash.style.display = "block";
+
+ setInterval(function(){
+ var opa = parseFloat(splash.style.opacity) || 0;
+ if (opa < 1){
+ splash.style.opacity = opa + 0.2;
+ }
+ }, 200);
+ });
+}
+
+function initInvaders404(){
+ invaders = new Invaders404({
+ canvasId: "game-canvas",
+ onLoose: function(){
+ showSplash();
+ },
+ onWin: function(){
+ showSplash();
+ }
+ });
+
+ invaders.start();
+}
\ No newline at end of file
diff --git a/POSTS/README.txt b/POSTS/README.txt
deleted file mode 100644
index 5df6228..0000000
--- a/POSTS/README.txt
+++ /dev/null
@@ -1 +0,0 @@
-I intend this space, ~/workspace_1/compumetrika/ , to become my main space for my personal/professional webpage. I may change the items which reside in here (particularly the style of the webpage and static site generator), but I also intend to maintain my major "thoughts and ideas" here.
diff --git a/POSTS/WHAT_I_DO.md b/POSTS/WHAT_I_DO.md
deleted file mode 100644
index 461eca4..0000000
--- a/POSTS/WHAT_I_DO.md
+++ /dev/null
@@ -1,34 +0,0 @@
-Things I work on:
-
-
-* Simulation-based inference: indirect inference and the bootstrap
- * a "direct" method of indirect inference: simulated method of moments
- * model-based indirect inference using a *detailed model* and *auxiliary model*
- * model-based ("parametric") bootstrap
- * non-parametric bootstrap
-
-* Welfare ranking for arbitrary policy functions: heuristic policies, social policies
- * employs simulation-based value estimation
- * provides estiamtes of the true welfare cost of social learning (exploitation vs exploration tradeoffs)
- * provides
-
-* First-principles approach to learning solutions to dynamic optimization problems with arbitrary model uncertainty
-
- * Drows from extensive and established literature: Approximate dynamic programming, reinforcement learning, stochastic approximation
- * Asynchronous dynamic programming as a foundational baseline
- * establishes efficiency (or inefficiency) of finding optimal solutions under learning-from-experience
- * and conveniently, a conceptual bridge from dynamic programming
- * intuitive explanation for *why* agents may 'optimally choose to approximate' (spoiler: signal process is to slow for finding complete dynamic solution to be optimal, once we account for exploration/exploitation tradeoff. Welfare loss from explore/exploit tradeoff implies there is actually an optimal level of approximation, which we can establish in concrete ways, using traditional Policy Iteration as overarching framework.)
- * basic version does not require knowledge about shock process
- * key elements:
- * exploiting knowledge about the shape of the optimal solution
- * approximation of
- * learning from "regret"
- * widely applicable to nearly any policy setting
- * also suggests straightforward ways in which agents may "go off the rails" and learn poorly.
- * approach from first principles implies straightforward framework/taxonomy for "boundely rational" mistakes on part of agents.
- * eg. conditional on claiming
- * intuitive exp
- * Note: it may be the case that some of population is modeled *best* as learning to optimize, some modeled best as purely replication-dynamic learners, and some simply modeled as "confused," not making discernable optimizing choices (NOTE that this may go one of two ways -- optimal policy plus noise, or simply noise within what is feasible.)
- * GREAT EXPERIMENTAL REFERENCE: Houser et al. (200_), Bayesian estimation of number of types of learners.
- * THIS FINDS EVIDENCE for N distinct types of learners! Which at first blush appear to fit the framework above very nicely! That is, "near optimimal" learners, "mistakes in a particular way" learners, and "pure confusion" (non)learners. Very very cool result.
diff --git a/POSTS/see_email_to_alex_kaufman_for_musing_on_abm.txt b/POSTS/see_email_to_alex_kaufman_for_musing_on_abm.txt
deleted file mode 100644
index 49d2ba3..0000000
--- a/POSTS/see_email_to_alex_kaufman_for_musing_on_abm.txt
+++ /dev/null
@@ -1,5 +0,0 @@
-
-
-It's funny -- the ABM guys originally introduced a lot of "heuristic" behavior half because of principle ("people don't really optimize!"), but also half because, if you want to build a complicated enough structural model (for example, mildly realistic range of mortgage options, or large numbers of assets, or the need to save for retirement as well as "for a rainy day" as well as for a car and a house and education, and maybe job choice, and maybe add marriage choice and number of kids choice) -- it quickly becomes purely a *practical* issue that you need heuristics just to make the solution computationally tractable. Of
-
-And really -- a huge aspect to all of this is that it usually takes someone a couple years to write down, run, get the bugs out, and estimate a new model.
diff --git a/REREFENCE_CONTENT/MR_opaque_intelligence_alex_tabarrok/The Rise of Opaque Intelligence.html b/REREFENCE_CONTENT/MR_opaque_intelligence_alex_tabarrok/The Rise of Opaque Intelligence.html
deleted file mode 100644
index 12b69d6..0000000
--- a/REREFENCE_CONTENT/MR_opaque_intelligence_alex_tabarrok/The Rise of Opaque Intelligence.html
+++ /dev/null
@@ -1,1109 +0,0 @@
-
-
-
-
-The Rise of Opaque Intelligence
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
Many years ago I had a job picking up and delivering packages in Toronto. Once the boss told me to deliver package A then C then B when A and B were closer together and delivering ACB would lengthen the trip. I delivered ABC and when the boss found out he wasn’t happy because C needed their package a lot sooner than B and distance wasn’t the only variable to be optimized. I recall (probably inaccurately) the boss yelling:
-
Listen college boy, I’m not paying you to think. I’m paying you to do what I tell you to do.
-
It isn’t easy suppressing my judgment in favor of someone else’s judgment even if the other person has better judgment (ask my wife) but once it was explained to me I at least understood why my boss’s judgment made sense. More and more, however, we are being asked to suppress our judgment in favor of that of an artificial intelligence, a theme in Tyler’s Average is Over. As Tyler notes notes:
-
…there will be Luddites of a sort. “Here are all these new devices telling me what to do—but screw them; I’m a human being! I’m still going to buy bread every week and throw two-thirds of it out all the time.” It will be alienating in some ways. We won’t feel that comfortable with it. We’ll get a lot of better results, but it won’t feel like utopia.
-
I put this slightly differently, the problem isn’t artificial intelligence but opaque intelligence. Algorithms have now become so sophisticated that we human’s can’t really understand why they are telling us what they are telling us. The WSJ writes about driver’s using UPS’s super algorithm, Orion, to plan their delivery route:
-
Driver reaction to Orion is mixed. The experience can be frustrating for some who might not want to give up a degree of autonomy, or who might not follow Orion’s logic. For example, some drivers don’t understand why it makes sense to deliver a package in one neighborhood in the morning, and come back to the same area later in the day for another delivery. But Orion often can see a payoff, measured in small amounts of time and money that the average person might not see.
-
One driver, who declined to speak for attribution, said he has been on Orion since mid-2014 and dislikes it, because it strikes him as illogical.
-
Human drivers think Orion is illogical because they can’t grok Orion’s super-logic. Perhaps any sufficiently advanced logic is indistinguishable from stupidity.
-
-
-dan in Philly
-February 20, 2015 at 8:00 am
-
-
-
-
-
Exactly my problem with what I’ve leared about big data analysis, especially neural networks. It works, but I don’t understand how to out-think it, which how I tend to get comfortable with things I do and systems I have to follow. So I feel like a blind man steering a bus down broad street with only my GPS to help me. I just can’t seem to get comfortable now matter how long we go without a crash.
I’ve been feeding Amazon data for more than a decade and its recommendations still lag Barnes and Noble’s in store employee picks. When I used Netflix I was also unimpressed with their recommendations.
Perhaps the employee was picking up on the human’s emotional state of the moment as well as their long term trends. The former Amazon and Netflix cannot yet do, wlthough I suspect they are much better than humans at the latter.
-
If the employee giving me recommendations was attractive I might also have reasons of affiliation to like their pick more than a pick made by algorithm.
To clarify – there is a section of Barnes & Noble titled “Employee Picks” – I always find several interesting books there while it takes a half hour of browsing on Amazon to find a similar number of books. I’m not interacting with any humans at Barnes & Noble.
The employee was not given the information which formed the basis of the employer’s irrational preferences, given the failure to disclose what is being optimized.
-
Now, if you don’t know nuclear physics, and you are told by your boss at the nuclear power plant to flood the reactor if the bell sounds, you are more likely to follow his advice, given that you know that you don’t know and that he does. Or she, does.
“Human drivers think Orion is illogical because they can’t grok Orion’s super-logic. Perhaps any sufficiently advanced logic is indistinguishable from stupidity.”
-
Or maybe the humans actively doing the work are more attuned to the variables than the humans who wrote the code. This seems to be consistently misssed. It isn’t computers vs humans, it is coders vs others.
-
It may get to the point that the collective code is superior, but at this moment in time is not always true. Code is like a book of regulations being dictated by someone in an air conditioned office to the guy in the dirt being shot at. Conflict as to the right course of action is fundementally assured.
-
-
-Adrian Ratnapala
-February 20, 2015 at 8:43 am
-
-
-
-
-
It isn’t even coders vs. the rest. It is more like central planners vs. the market: the code can only take into account certain things things and the driver might well know some other factor that isn’t included. Now I hope UPS has measured the effectiveness of Orion compared to the old fashioned way, and it could turn out that on average it’s errors are better than the ones that drivers make.
-
But it’s pretty hard on a driver who, from everything he can see, is being told to do something irrational. And likely to cause the driver to disobey. Like Alex’ boss, these systems should try and explain their rationale — although such code easier to request than to write.
That’s a good point. Ideally, they should also incorporate some sort of feedback mechanism, so that drivers, and others closer to the real world conditions, can add information and variables to the model that the coders or planners didn’t account for.
-
-
-mbutuomalley
-February 20, 2015 at 9:20 am
-
-
-
-
-
I ran into a similar issue when talking about code for routing aircraft at a large airport. I pointed out that despite the rigorous QA process some defects will not be obvious unless a post comparison is done about other possible solutions. If nothing else that the calculated result needs to be compared to the actual result to see how much variation there is and perhaps introduce logic that corrects for that difference over time (or at least trigger additional analysis of it). Same issue exists in complicated pricing systems too.
Are we really sure that Orion’s schedule is more efficient?
-
I wonder how much this is like (Hayek’s?) information problem in central planning. Does it really have more information than the local drivers about the idiosyncrasies of particular routes?
-
I suspect the drivers and Orion are trying to optimize different things and that the Postal System’s problem is that its drivers’ incentives do not line up as closely with USPS objectives as do the formula Orion is trying to optimize.
Good point. Maybe the driver wants to hit up a particular sushi joint for lunch but the computer puts them on the other side of town.. Trivial example, but the loss of control over small personal decisions would really bother me.
Part of it for me is recognizing the weaknesses in an imperfect system. If I can pinpoint the potential shortfalls or blind spots of a method or system (which all systems inevitable have), then I know what to plan for. If the algorithm is smarter than I am, I just have to trust blindly. I think that’s one of the limit’s that driverless cars face. They may be demonstrably safer than human drivers, but I’m giving up all autonomy to trust it.
Alex has come upon the impetus behind the growing research area of interpretable machine learning algorithms. UPS may be able to force its drivers to follow an algorithmic master, but doctors aren’t going to pay attention to the recommendations of an AI system (even if it can beat Ken Jennings in Jeopardy) unless the system can provide an “explanation” for its answer.
When the algorithms are able to demonstrably provide better treatment outcomes we better learn to follow them whether we understand the explanation or not. I will take an algorithm that has a 90% chance of ameliorating my illness over a human doc with a 70% chance.
There will be software to assist you in a self-diagnosis, so maybe you won’t need to worry about what the doctor thinks or doesn’t think. You’ll just need him to do the surgery/write the prescription.
If I’m playing high-stakes chess and I’m given access to Deep Blue (or whatever is best now) I’m going to follow its recommendations blindly without caring why… especially since I know the “why” consists of a n-ply deep evaluation of positions using some opaque function.
The problem with these algorithms is that they are based on aggregate information. The further the driver’s route and particulars are from the mean, the less effective the algorithm will be for his (or her) particular route, and the more frustration the driver will feel.
-
I suspect this will also change the way the drivers approach their job. For example, when I play Scrabble, I am ransacking my mind for words that might fit the board, and the others are unlikely to challenge or win a challenge with. But when I play Words With Friends, it doesn’t matter what words I know; it only matters what WWF knows. So when the going gets tough I just randomly switch out tiles until something works. It’s opaque, less satisfying, and only rarely do I bother looking up the so-called word I just played. If UPS drivers adopt that mentality, they’ll be less able to deal with the unexpected when it arises.
Of course, if judgment is inferior to data, counterproductive even, then what’s the value of experience; and if experience lacks value, if experience is counterproductive, then why hire anyone (other than quants) with an IQ above functional.
It used to be the FedEx truck could be found idling in the municipal park lot at around 3:30 each day when I took my dog for a walk. Now the truck careens through the neighborhood back and forth like some kind of possessed Christine all the way up until about 5:30.
Ahhhh, the intelligent guy thinks he’s more efficient than any route planning algorithm and he may be right. However, he’s not intelligent enough to imagine himself in the manager shoes. If the company could hire only brilliant and cooperative guys like him no algorithm would be needed, ever. But reality is different, Orion is not designed to help intelligent people but the majority of people. If the majority improves, the average result is better and managers get savings. Intelligent people can adapt or go home.
-
-
-Bruce Cleaver
-February 20, 2015 at 9:40 am
-
-
-
-
-
This sort of thing has been recognized in computer chess for years after the advent of N-man databases. The following link shows a ‘mate in 549′ which is completely opaque – there appears to be no progress whatsoever for the first 548 moves…..
For many people, “opaque intelligence” includes the intelligence of human experts. My Facebook feed is full of low-grade propaganda graphics denying expert consensus.
-
-
-bellisaurius
-February 20, 2015 at 9:46 am
-
-
-
-
-
One step closer to becoming space hippies and joining The Culture.
-
Although, as a control engineer who gets to sometimes interact/override these things, I’d make the list as:
-
“Person who knows system intimately and has knowledge of what goes on ‘in the black box'” + computer > “computer where coders have half a clue”> guy on his own with half a clue> pretty much anything else.
Buy direct at Crucial.com! -Free shipping -45 day money-back guarantee -World-class service and support -Guaranteed compatibility for PC and Mac® systems*
For all the many strengths of Python, people often express frustration in finding, building, and installing third party packages.
-This pain can be especially acute with scientific and analytical libraries, which are often a mix of Python with compiled, platform-dependent C, C++, or Fortran code.
-One day, tools like PyPy and Numba may rescue us from this state of affairs, but data scientists working today need solutions—today.
-When we began building Wakari, a cloud-based platform for shareable, reproducible analytics, we also experienced this pain.
-Our users need to work with different versions of Python, NumPy, SciPy, and a variety of other packages.
-Moreover, they must be able to easily share live, runnable versions of their work, including all supporting packages, to their colleagues or the general public.
-
We created the conda package and environment management system to solve these problems.
-It allows users to install multiple versions of binary packages (and any required libraries) appropriate for their platform and easily switch between them, as well as easily download updates from an upstream repository.
-Continuum hosts a number of repository channels that provide many free open source packages (as well commercial channels for distributing commercial packages).
-It’s also possible for conda users to host their own channels, so that they may pull their own packages easily into conda environments.
-Think of it as git branching for site-packages, combined with yum for Python packages.
-Because we found conda to be so useful for managing packages in Anaconda and Wakari, we have open-sourced it, so that others might benefit from it as well.
-
Having been involved in the python world for so long, we are all aware of pip, easy_install, and virtualenv, but these tools did not meet all of our specific requirements.
-The main problem is that they are focused around Python, neglecting non-Python library dependencies, such as HDF5, MKL, LLVM, etc., which do not have a setup.py in their source code and also do not install files into Python’s site-packages directory.
-
Under the hood, we have created a concept of environments which are conceptually similar to virtualenvs, but which use filesystem-level
-hard links to create entirely self-contained Python runtime layouts.
-By using the ‘conda’ command line tool, users can easily switch between environments, create environments, and install different versions of libraries and modules into them.
-
The conda documentation is available online, and contains a complete command reference as well as a number of examples.
-With the release of conda 1.3, we’d like to show off some of conda’s more important features and provide a helpful “Getting Started” guide as a blog post as well.
-Let’s take a look at some common scenarios!
-
Downloading and Installing Packages
-
The primary use case of conda is for managing packages and their dependencies in a platform independent fashion.
-Let’s see how conda can help us find and install packages we are interested in.
-First, let’s look at some information about our conda setup:
If the package is in one of the repository channels we have configured, then installing a package is as simple as using the conda install command.
-We can install packages into different environments, but if you don’t otherwise specify, conda will install packages into the default Anaconda environment.
-We’ll visit creating new environments a littler later, but let’s start with some examples with the default environment.
-Let’s say there is a different version of matplotlib we wish to try out:
-
$ conda install matplotlib=1.2
-
-Package plan for installation in environment /Users/bryan/anaconda13:
-
-The following packages will be DE-activated:
-
- package | build
- ------------------------- | ---------------
- matplotlib-1.1.1 | np17py27_2
-
-The following packages will be activated:
-
- package | build
- ------------------------- | ---------------
- matplotlib-1.2.0 | np17py27_0
-
-Proceed (y/n)?
-
-
-
Of course, if we just want to update to the latest newer version of a package that is compatible with other currently installed packages we can often just use the conda update command:
-
$ conda update matplotlib
-Updating Anaconda environment at /Users/bryan/anaconda13
-
-The following packages will be DE-activated:
-
- package | build
- ------------------------- | ---------------
- matplotlib-1.1.1 | np17py27_2
-
-The following packages will be activated:
-
- package | build
- ------------------------- | ---------------
- matplotlib-1.2.0 | np17py27_0
-
-Proceed (y/n)?
-
-
-
If there are packages that we want that are in other additional channels, we can add those channels in our condarc file.
-Let’s look at the default condarc file:
-
# channel locations. These override conda defaults, i.e., conda will
-# search only the channels listed here, in the order given.
- channels:
-# - http://repo.continuum.io/pkgs/dev
-# - http://repo.continuum.io/pkgs/gpl
-# - http://repo.continuum.io/pkgs/pro
- - http://repo.continuum.io/pkgs/free
-
-If we would like to allow GPL licensed packages to be installed into our Anaconda environments, we can simply uncomment the line with the “gpl” channel.
-Afterwards, this new channel will show up in our conda info output:
-
Now we can install, for instance, the GPL licensed rope library:
-
$ conda install rope
-
-Package plan for installation in environment /Users/bryan/anaconda:
-
-The following packages will be downloaded:
-
- rope-0.9.4-py27_g0.tar.bz2 [http://repo.continuum.io/pkgs/gpl/osx-64/]
-
-The following packages will be activated:
-
- package | build
- ------------------------- | ---------------
- rope-0.9.4 | py27_g0
-
-Proceed (y/n)?
-
-
-
It’s also possible to explicitly supply a package file to install.
-
$ conda install ~/redis-py-2.7.2-py27_0.tar.bz2
-redis-py-2.7.2-py27_0:
- already available - removing
- making available
- activating
-
-
-
This is a bit lower level, but can be useful if you have your own package files to install (we will talk about creating your own packages a bit later).
-
Creating and Using Environments
-
Let’s look a bit into creating new Anaconda environments.
-At the core, Anaconda environments are just like directories that contain particular versions of packages.
-These can be located anywhere, but if they are within the Anaconda installation directory, conda will know about them.
-Let’s take a look:
-
$ conda info -e
- Known Anaconda environments:
-
- /Users/maggie/anaconda
-
-
-
A fresh install, there is just the default environment.
-Now we’d like to create some new environments.
-Maybe we have some existing libraries that perform some interesting analysis, and we’d like to test and compare our library with NumPy 1.6 and also the upcoming NumPy 1.7 release.
-Let’s see what versions of NumPy are available on our known package channels:
We see there are packages for both versions of NumPy.
-Let’s keep things simple and create environments with the anaconda meta-package (which will install lots of packages in one go), but simply specify the version of NumPy we want in each.
-Let’s create an environment with NumPy 1.6 (click on the commands to expand their output):
We can list these new environments using the conda ‘info’ command:
-
$ conda info -e
- Known Anaconda environments:
-
- /Users/maggie/anaconda
- /Users/maggie/anaconda/envs/np1.6
- /Users/maggie/anaconda/envs/np1.7
-
-
-
To use the python version together with all the packages installed in a given environment, simply run the python executable form that environment.
-From a bash shell:
-
$ ~/anaconda/envs/myenv/bin/python
-Python 2.7.3 |AnacondaCE 1.3.0 (x86_64)| (default, Jan 10 2013, 12:10:41)
-[GCC 4.0.1 (Apple Inc. build 5493)] on darwin
-Type "help", "copyright", "credits" or "license" for more information.
-
-
-
From a Windows command window:
-
> C:\Anaconda\envs\myenv\python.exe
-C:\Windows\system32>c:\Anaconda\envs\test\python.exe
-Python 2.7.3 |Continuum Analytics, Inc.| (default, Jn 7 2013, 09:47:12) [MSC
- v.1500 64 bit (AMD64)] on win32
-Type "help", "copyright", "credits" or "license" for more information.
->>>
-
-
-
If we’d like to make one of these environments the “default”, we simply need to set our PATH appropriately.
-From a bash shell:
-
$ export PATH=~/anaconda/envs/myenv/bin:$PATH
-
-
-
From a Windows command window:
-
> set PATH=C:\Anaconda\envs\myenv\Scripts;%PATH%
-
-
-
Sometimes we don’t want to create environments with all the packages that the anaconda meta-package brings in.
-Maybe we want to do some testing in a minimal environment, and conda lets us create those, too.
-Let’s say we want to create an environment with scikit-learn and its dependencies, but nothing else.
-First, let’s see what versions of scikit-learn are available:
There are quite a few! By default, conda will install the latest compatible version, so we will just do that.
-But before we create an environment, let’s take a look at what the dependencies of scikit-learn are:
-The content of this section is outdated.
-If you are interested in creating your own packages, you should read
-
-this newer conda blog post.
-
-
Conda allows you to create your own packages, i.e. packages which can be installed using the conda command and added to a conda package repository.
-As an example, we demonstrate how to build the pyephem package, create a repository for it, and install it into an existing Anaconda installation on a different system.
-
Whenever a conda package is installed, the information about which files belong to the package is also stored (as part of the conda install metadata in <sys.prefix>/conda-meta/).
-Therefore, it is possible to determine which files have been installed into a prefix manually (not using the conda command).
-A fresh installation of Anaconda reveals that no such files exist:
We can now use the package command to bundle up the untracked files into a conda package:
-
$ conda package --pkg-name=pyephem --pkg-version=3.7.5.1
-prefix: /home/ilan/a13
-Number of files: 76
-pyephem-3.7.5.1-py27_0.tar.bz2 created successfully
-
-
Note that conda is not limited to creating Python packages, you can basically install any type of package into the prefix and bundle it into a conda package, e.g. using ./configure --prefix=/home/ilan/a13; make; make install.
-
All of the above (including retrieving the pyephem source code) can be done using conda pip pyephem, which basically calls out to pip to do the source installation and then creates the conda package.
-
Creating your own package repository
-
Having successfully created a conda package, we now want to create a repository, such that others can easily install pyephem into their Anaconda installation.
-A conda repository is simply a directory of conda packages plus a conda index file.
-So, we create a new directory with the newly created conda package, and run:
-
$ conda index
-updating index in: /home/ilan/conda-repo/linux-64
-updating: pyephem-3.7.5.1-py27_0.tar.bz2
-$ ls -l
-total 896
--rw-r--r-- 1 ilan users 908801 Jan 24 17:24 pyephem-3.7.5.1-py27_0.tar.bz2
--rw-r--r-- 1 ilan users 313 Jan 24 17:37 repodata.json
--rw-r--r-- 1 ilan users 230 Jan 24 17:37 repodata.json.bz2
-
-
-
The file repodata.json.bz2 is used by the conda install command to detect which packages are available in a given conda repository.
-We now make this repository available over HTTP, and tell people who wish to access the repository to add it to their ~/.condarc file.
-When we serve the above directory on http://localhost/conda-repo/linux-64/, the following URL needs to be added to the ~/.condarc file:
-
channels:
- - http://localhost/conda-repo
-
-
-
Note that the channel URL does not include the platform specific sub-directory (this way the same configuration file may be shared across platforms).
-Now we can install the pyephem package into another Anaconda system:
-
$ conda install pyephem
-Package plan for installation in environment /home/ilan/a121:
-The following packages will be downloaded:
-
- pyephem-3.7.5.1-py27_0.tar.bz2 [http://localhost/conda-repo/linux-64/]
-
-The following packages will be activated:
-
- package | build
- ------------------------- | ---------------
- pyephem-3.7.5.1 | py27_0
-
-Proceed (y/n)? y
-...
-$ python
-Python 2.7.3 |Anaconda 1.3.0 (64-bit)| (default, Jan 22 2013, 14:14:25)
-[GCC 4.1.2 20080704 (Red Hat 4.1.2-52)] on linux2
-Type "help", "copyright", "credits" or "license" for more information.
->>> import ephem
->>> mars = ephem.Mars()
->>> mars.compute('2008/1/1')
->>> print mars.ra, mars.dec
-5:59:27.35 26:56:27.4
-
-
-
It installed the package, and it appears to be working.
-We should mention that this package was built for 64-bit Linux and will not work on other systems, such as 32-bit Linux, MacOSX or Windows, as it contains platform specific C extensions which are linked (during import time) to the Python process.
-
Future directions
-
A standard refrain regarding package management in python is that it is “an active topic.”
-In fact, there are some recent enhancement proposals covering related areas, including: Package Metadata (PEP 345), Package DataBases (PEP 376), Standardized Package Version Numbers (PEP 386) and the Wheel PEP (PEP 427).
-As it happens, some of the ideas in these PEPs are already reflected within conda.
-We intend to watch the evolution and development of these proposals to make conda compatible and interoperable with whatever standard comes out of the enhancement process.
-
We should also note that conda were created originally to solve problems we had on linux backend platforms.
-However, we quickly realized that it could be valuable on Windows platforms as well.
-Conda already works well in Windows, but there are still a few areas where it could behave more like native Windows applications.
-Improving Windows integration is another priority for us.
-
You can check out conda in action at Wakari, or by installing Anaconda. You can also follow and contribute to conda development at the conda GitHub page.
--
-
-
-dan in Philly
-February 20, 2015 at 8:00 am
-
-
- -
-
-
-
- -
-
-
-Max Factor
-February 20, 2015 at 8:13 am
-
-
- -
-
-
-
- -
-
-
-Bill
-February 20, 2015 at 8:16 am
-
-
- -
-
-
-
- -
-
-
-NPW
-February 20, 2015 at 8:38 am
-
-
- -
-
-
-
- -
-
-
-harryh
-February 20, 2015 at 8:39 am
-
-
- -
-
-
-
- -
-
-
-Kevin
-February 20, 2015 at 8:42 am
-
-
- -
-
-
-
- -
-
-
-Alex
-February 20, 2015 at 8:49 am
-
-
- -
-
-
-
- -
-
-
-J
-February 20, 2015 at 8:52 am
-
-
- -
-
-
-
- -
-
-
-gamma
-February 20, 2015 at 9:03 am
-
-
- -
-
-
-
- -
-
-
-rayward
-February 20, 2015 at 9:11 am
-
-
- -
-
-
-
- -
-
-
-moo cow
-February 20, 2015 at 9:20 am
-
-
- -
-
-
-
- -
-
-
-dearieme
-February 20, 2015 at 9:24 am
-
-
- -
-
-
-
- -
-
-
-Axa
-February 20, 2015 at 9:27 am
-
-
- -
-
-
-
- -
-
-
-Bruce Cleaver
-February 20, 2015 at 9:40 am
-
-
- -
-
-
-
- -
-
-
-Btone
-February 20, 2015 at 9:43 am
-
-
- -
-
-
-
- -
-
-
-Bob Knaus
-February 20, 2015 at 9:44 am
-
-
- -
-
-
-
- -
-
-
-bellisaurius
-February 20, 2015 at 9:46 am
-
-
- -
-
-
-
- -
-
-
-derek
-February 20, 2015 at 10:02 am
-
-
- -
-
-
-
-
- -Exactly my problem with what I’ve leared about big data analysis, especially neural networks. It works, but I don’t understand how to out-think it, which how I tend to get comfortable with things I do and systems I have to follow. So I feel like a blind man steering a bus down broad street with only my GPS to help me. I just can’t seem to get comfortable now matter how long we go without a crash.
--
I’ve been feeding Amazon data for more than a decade and its recommendations still lag Barnes and Noble’s in store employee picks. When I used Netflix I was also unimpressed with their recommendations.
--
--
-
-
-Bill
-February 20, 2015 at 8:18 am
-
-
- -
-
-
-
- -
-
-
-Max Factor
-February 20, 2015 at 9:27 am
-
-
- -
-
-
-
-
-+1 The Bayesian algorithm needed more data from other users, but the employee knew you.
-Who is smarter, man or machine.
--
--
-
-
-Pshrnk
-February 20, 2015 at 9:10 am
-
-
- -
-
-
-
- -
-
-
-Urstoff
-February 20, 2015 at 9:12 am
-
-
- -
-
-
-
-
-Perhaps the employee was picking up on the human’s emotional state of the moment as well as their long term trends. The former Amazon and Netflix cannot yet do, wlthough I suspect they are much better than humans at the latter.
-If the employee giving me recommendations was attractive I might also have reasons of affiliation to like their pick more than a pick made by algorithm.
--
cyborg
--
To clarify – there is a section of Barnes & Noble titled “Employee Picks” – I always find several interesting books there while it takes a half hour of browsing on Amazon to find a similar number of books. I’m not interacting with any humans at Barnes & Noble.
--
--
-
-
-Pshrnk
-February 20, 2015 at 9:33 am
-
-
- -
-
-
-
-
-Thanks. I wonder how well the employee picks would work as you go to bookstores further from your home community.
--
The example is silly.
-The employee was not given the information which formed the basis of the employer’s irrational preferences, given the failure to disclose what is being optimized.
-Now, if you don’t know nuclear physics, and you are told by your boss at the nuclear power plant to flood the reactor if the bell sounds, you are more likely to follow his advice, given that you know that you don’t know and that he does. Or she, does.
--
“Human drivers think Orion is illogical because they can’t grok Orion’s super-logic. Perhaps any sufficiently advanced logic is indistinguishable from stupidity.”
-Or maybe the humans actively doing the work are more attuned to the variables than the humans who wrote the code. This seems to be consistently misssed. It isn’t computers vs humans, it is coders vs others.
-It may get to the point that the collective code is superior, but at this moment in time is not always true. Code is like a book of regulations being dictated by someone in an air conditioned office to the guy in the dirt being shot at. Conflict as to the right course of action is fundementally assured.
--
--
-
-
-Adrian Ratnapala
-February 20, 2015 at 8:43 am
-
-
- -
-
-
-
- -
-
-
-mbutuomalley
-February 20, 2015 at 9:20 am
-
-
- -
-
-
-
-
-It isn’t even coders vs. the rest. It is more like central planners vs. the market: the code can only take into account certain things things and the driver might well know some other factor that isn’t included. Now I hope UPS has measured the effectiveness of Orion compared to the old fashioned way, and it could turn out that on average it’s errors are better than the ones that drivers make.
-But it’s pretty hard on a driver who, from everything he can see, is being told to do something irrational. And likely to cause the driver to disobey. Like Alex’ boss, these systems should try and explain their rationale — although such code easier to request than to write.
--
--
-
-
-Matt
-February 20, 2015 at 9:15 am
-
-
- -
-
-
-
-
-That’s a good point. Ideally, they should also incorporate some sort of feedback mechanism, so that drivers, and others closer to the real world conditions, can add information and variables to the model that the coders or planners didn’t account for.
--
I ran into a similar issue when talking about code for routing aircraft at a large airport. I pointed out that despite the rigorous QA process some defects will not be obvious unless a post comparison is done about other possible solutions. If nothing else that the calculated result needs to be compared to the actual result to see how much variation there is and perhaps introduce logic that corrects for that difference over time (or at least trigger additional analysis of it). Same issue exists in complicated pricing systems too.
--
Surprised you didn’t mention the obvious anti-vaxxer tie in to this concept.
--
--
-
-
-Pshrnk
-February 20, 2015 at 9:12 am
-
-
- -
-
-
-
-
-Obvious?
--
Are we really sure that Orion’s schedule is more efficient?
-I wonder how much this is like (Hayek’s?) information problem in central planning. Does it really have more information than the local drivers about the idiosyncrasies of particular routes?
-I suspect the drivers and Orion are trying to optimize different things and that the Postal System’s problem is that its drivers’ incentives do not line up as closely with USPS objectives as do the formula Orion is trying to optimize.
--
--
-
-
-Ross
-February 20, 2015 at 8:52 am
-
-
- -
-
-
-
-
-Good point. Maybe the driver wants to hit up a particular sushi joint for lunch but the computer puts them on the other side of town.. Trivial example, but the loss of control over small personal decisions would really bother me.
--
Part of it for me is recognizing the weaknesses in an imperfect system. If I can pinpoint the potential shortfalls or blind spots of a method or system (which all systems inevitable have), then I know what to plan for. If the algorithm is smarter than I am, I just have to trust blindly. I think that’s one of the limit’s that driverless cars face. They may be demonstrably safer than human drivers, but I’m giving up all autonomy to trust it.
--
Alex has come upon the impetus behind the growing research area of interpretable machine learning algorithms. UPS may be able to force its drivers to follow an algorithmic master, but doctors aren’t going to pay attention to the recommendations of an AI system (even if it can beat Ken Jennings in Jeopardy) unless the system can provide an “explanation” for its answer.
--
--
-
-
-Pshrnk
-February 20, 2015 at 9:15 am
-
-
- -
-
-
-
- -
-
-
-Jonathan
-February 20, 2015 at 9:42 am
-
-
- -
-
-
-
-
-When the algorithms are able to demonstrably provide better treatment outcomes we better learn to follow them whether we understand the explanation or not. I will take an algorithm that has a 90% chance of ameliorating my illness over a human doc with a 70% chance.
--
--
-
-
-Jeff R.
-February 20, 2015 at 10:05 am
-
-
- -
-
-
-
-
-There will be software to assist you in a self-diagnosis, so maybe you won’t need to worry about what the doctor thinks or doesn’t think. You’ll just need him to do the surgery/write the prescription.
--
If I’m playing high-stakes chess and I’m given access to Deep Blue (or whatever is best now) I’m going to follow its recommendations blindly without caring why… especially since I know the “why” consists of a n-ply deep evaluation of positions using some opaque function.
--
The problem with these algorithms is that they are based on aggregate information. The further the driver’s route and particulars are from the mean, the less effective the algorithm will be for his (or her) particular route, and the more frustration the driver will feel.
-I suspect this will also change the way the drivers approach their job. For example, when I play Scrabble, I am ransacking my mind for words that might fit the board, and the others are unlikely to challenge or win a challenge with. But when I play Words With Friends, it doesn’t matter what words I know; it only matters what WWF knows. So when the going gets tough I just randomly switch out tiles until something works. It’s opaque, less satisfying, and only rarely do I bother looking up the so-called word I just played. If UPS drivers adopt that mentality, they’ll be less able to deal with the unexpected when it arises.
--
Of course, if judgment is inferior to data, counterproductive even, then what’s the value of experience; and if experience lacks value, if experience is counterproductive, then why hire anyone (other than quants) with an IQ above functional.
--
--
-
-
-Pshrnk
-February 20, 2015 at 9:17 am
-
-
- -
-
-
-
-
-If judgment is not based on data then what is the experience judgment is based on.
--
It used to be the FedEx truck could be found idling in the municipal park lot at around 3:30 each day when I took my dog for a walk. Now the truck careens through the neighborhood back and forth like some kind of possessed Christine all the way up until about 5:30.
--
To assume that the code is necessarily doing a better job than an experienced driver is simply begging the question.
--
Ahhhh, the intelligent guy thinks he’s more efficient than any route planning algorithm and he may be right. However, he’s not intelligent enough to imagine himself in the manager shoes. If the company could hire only brilliant and cooperative guys like him no algorithm would be needed, ever. But reality is different, Orion is not designed to help intelligent people but the majority of people. If the majority improves, the average result is better and managers get savings. Intelligent people can adapt or go home.
--
--
-
-
-Pshrnk
-February 20, 2015 at 9:35 am
-
-
- -
-
-
-
-
-So AVERAGE IS NOT OVER.
--
This sort of thing has been recognized in computer chess for years after the advent of N-man databases. The following link shows a ‘mate in 549′ which is completely opaque – there appears to be no progress whatsoever for the first 548 moves…..
-http://timkr.home.xs4all.nl/chess2/diary.htm (See #393)
--
I seem to recall that in Asimov, the “higher” logic eventually comes to different conclusions about ends as well as means.
--
--
-
-
-Pshrnk
-February 20, 2015 at 10:14 am
-
-
- -
-
-
-
-
-Yep. I don’t always take the most efficient route for my commute to and from work. I sometimes vary it for esthetic reasons.
--
For many people, “opaque intelligence” includes the intelligence of human experts. My Facebook feed is full of low-grade propaganda graphics denying expert consensus.
--
One step closer to becoming space hippies and joining The Culture.
-Although, as a control engineer who gets to sometimes interact/override these things, I’d make the list as:
-“Person who knows system intimately and has knowledge of what goes on ‘in the black box'” + computer > “computer where coders have half a clue”> guy on his own with half a clue> pretty much anything else.
--
And when 20% of your drivers don’t show up on time in the morning, then nothing gets delivered.
--
-- 1p – The Rise of Opaque Intelligence | blog.offeryour.com
- - 1p – The Rise of Opaque Intelligence – Exploding Ads
-
-