Node.js uses an event-driven, non-blocking I/O model
We've been writing badly-scaling code
echo("hello");
sleep(1);
echo("world");
setTimeout(function() {
console.log("world");
}, 1000);
console.log("hello");
void main() {
s = new Socket("127.0.0.1", 80);
while(conn = s.accept()) {
in = conn.read();
/* STUFF */
conn.write("200 OK");
/* STUFF++ */
conn.close();
}
s.close()
}
void main() {
s = new Socket("127.0.0.1", 80);
conns = []
while(conn = s.accept()) {
conns.push(conn);
if (fork() == 0) {
in = conn.read();
/* STUFF */
conn.write("200 OK");
/* STUFF++ */
conns.remove(conn);
conn.close();
}
}
s.close();
}
code++
code++
code++
In reality apache pre-forks & maintains a pool
The premise is the same: 1 process per request (by default)
var http = require("http");
http.createServer(function(request, response) {
response.writeHead(200, {"Content-Type": "text/plain"});
response.end("response body");
}).listen(8765);
We map events to behaviour
Event checking is delegated to Node
Event handling is implemented in JS
Our code is only called when something has to be done
Potentially more testable?
Async is increasingly important
Doing more with 1 thread
More with 1 thread = more with 1 core
More with 1 core = linear scaling to 4, 8, 32 and beyond