nodejs introduction

callback / asnyc

1
2
3
4
5
6
var fs = require("fs") ;
fs.readFile("input.txt", function(err, data){
if(err) return console.error(err);
console.log(data.toString());
})

the async fun take the callback fun as its last parameter.

event loop

1
2
3
4
5
6
7
8
9
10
11
var events = require('events')
var emitter = new events.EventEmitter();
var connectH = function connected(){
console.log("connected")
emitter.emit('data_received'); // trig 'data_received'
}
emitter.on('connection', connectH);
emitter.on('data_received', function(){
console.log('data received');
})
emitter.emit('connection'); // trig 'connection'

event emitter

when async IO is done, will send an event to the Event queue. e.g. when fs.readStream() open a file will trig an event. e.t.c

1
2
3
addListener(event, listener)
on(event, listener) #listening
emit(evnet, [arg1], ...) #trig

file system

1
2
3
4
5
6
7
8
9
10
11
12
13
14
var fs = require("fs")
fs.open("input.log", "r+", function(err, fd){});
fs.state("input.log", function(err, stats_info){});
fs.readFile("input.log", function(err, data){
if(err){
return console.error(err);
}
console.log(data.toString());
});
fs.writeFile("output.log", function(err){
if(err){ return console.error(err);}
console.log("write successfully")
});
fs.read(fd,buffer, [args..]) #read binary stream

buffer

as js language has only txt bytes data, to deal with binary data, introduce Buffer

1
2
3
4
5
Buffer.alloc(size)
Buffer.from(buffer||array||string)
Buffer.write() #write to buffer
buffer.toString() #read from buffer
buffer.toJSON()

stream

1
2
3
4
5
6
var fs=require("fs");
var readerStream = fs.createReadstream("input.file")
readerStream.on('data', function(chunk, err){})
var writeStream = fs.createWriteStream("output.file")
writeStream.on('finished', function(){})
readerStream.pipe(writeStream); #pipe from a reader stream to a writer stream

module system

to enable different nodejs files can use each other, there is a module system, the module can be a nodejs file, or JSON, or compiled C/C++ code.

nodejs has exports and require used to export modules’ APIs to external usage, or access external APIs.

1
2
module.exports = function(){}
exports.method = function(){}

the first way export the object itself, the second way only export the certain method.

Global Object

1
2
console.log()
console.error()

common modules

  • path
1
2
3
var path = require("path");
path.join("/user/", "test1");
path.dirname(p_);
  • http server
1
2
3
4
5
6
7
8
9
10
11
12
13
14
var http = require("http");
http.createServer(function(request, response){
var url_path = request.url ;
server(url_path, function(err, data){
if(err){
console.log(err);
response.writeHead(404, "xx");
}else{
response.writeHeead(200, "yy");
response.write(data.toString());
}
response.end();
});
}).listen(8080);
  • http client
1
2
3
4
5
6
7
8
9
10
11
12
var http = require('http')
url = "http://localhost:8080/index.html"
var req = http.request(url, callback);
var callback = function(response){
var body = '' ;
response.on('data', function(data){
body += data;
});
response.on('end', function(){
console.log(body);
});
};

Express

Express has requst and response object to handle request and reponse. express.static can handle static resources, e.g. image, css e.t.c

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
var express = require("repress");
var app = express();
app.use('/public', express.static('public'));
app.get('/index', function(req, res){})
app.get('/user', function(req, res){
var response = {
"name": req.query.name,
"id": req.query.id
};
res.send(JSON.stringify(response));
});
app.post('/user', function(req, res){
var response = {
"name" : req.body.name ;
"id" : req.body.id
};
res.send(JSON.stringify(response));
});
app.post('file_upload', function(req, res){
var des_file = __dirname + "/" + req.files[0].originalname ;
fs.readFile(req.files[0].path, function(err, data){
fs.writeFile(des_file, data, function(err){
if(err){console.error(err);}
else{
var response = {
message: "file uploaded successfully" ,
filename: req.files[0].originalname
};
}
res.send(JSON.stringify(response));
});
});
})
var server = app.listen(8080, function(){})

res is what send from server to client, for both /get, /post methods. req object represents the HTTP request and has properties for the request query string, parameters, body, HTTP headers e.t.c

  • req.body

contains key-value pairs of data submitted in the request body. by default, it’s undefined, and is populated when using body-parsing middleware. e.g. body-parser

  • req.cookies

when using cookie-parser middleware, this property is an object that contains cookies send by the request

  • req.path

contains the path part of the request url

  • req.query

an object containing a property for each query string parameter in the route

  • req.route

the current mathced route, a string

data access object(DAO)

Dao pattern is used to separate low level data accessing API or operations form high level business services. usually there are three parts:

  • DAO interface, which defines the standard operations to be performed on a model object

  • DAO class, the class that implement DAO interfaces, this class is responsible to get data from database, or other storage mechanism

  • model object, a simple POJO containing get/set methods to store data retrieved using DAO class

o/r mapping (orm) is used a lot to map database itme to a special class, and it’s easy to use, but a little drawback of orm is it assume the database is normalized well. DAO is a middleware to do directly SQL mapping, who mapes SQL query language to the output class.

separting models, logic and daos

  • routes.js, where to put routes, usually referenced as controllers

  • models.js, where to put functions talk to database, usually referenced as dao layer

  • views.js

these three components can put under app; all static data usually put under public folder; the Express package.json and index.js are at the same level as app.

refer

nodejs at runoob.com

nodejs & mysql

bearcat-dao introduction

koa

chokidar