Node.js - ttulka/programming GitHub Wiki
Node.js imagines JavaScript as a systems language. As a systems language, JavaScript can manipulate memory buffers, processes and streams, and files and sockets.
-
Command-line tools (in shell script style) -
$ node
or$node script.js
-
Process control functions to oversee child processes.
-
Buffer object to deal with binary data.
-
TCP or UDP sockets with comprehensive event-driven callbacks.
-
DNS lookup.
-
HTTP and HTTPS client/server.
-
Filesystem access.
-
Built-in rudimentary unit testing support through assertions.
-
No DOM, nor any other browser capability.
-
Asynchronous event-driven model (via
EventEmitter
) - non-blocking. - A single execution thread (less overhead than thread-based architectures - no waiting on I/O or context switching).
- Callbacks-based simpler concurrency model.
Global object process
holds information about the environment:
-
process.argv[N]
- program arguments. -
process.env.ENV_VAR
- environment variables.
const EventEmitter = require('events').EventEmitter;
class Dog extends EventEmitter {};
let santasLittleHelper = new Dog();
santasLittleHelper.on('bark', subject => {
console.log(`Santa's Little Helper is barking at ${subject}`);
});
santasLittleHelper.emit('bark', 'Ned Flanders');
// prints: "Santa's Little Helper is barking at Ned Flanders"
santasLittleHelper.removeListener('bark', subject);
process.on('SIGTERM', function() {
// graceful shutdown...
process.exit(0);
});
The process.env
property returns an object containing the user environment.
The process.argv
is an array containing the command line arguments.
The first element will be node, the second element will be the name of the JavaScript file. The next elements will be any additional command line arguments.
process.on('uncaughtException', function (err) {
console.log(err);// some logging mechanisam ...
process.exit(1); // terminates process
});
Core modules are a part of Node.js and doesn't have to be declared as a dependency.
assert
, buffer
, c/c++_addons
, child_process
, cluster
, console
, crypto
, deprecated_apis
, dns
, domain
, Events
, fs
, http
, https
, http2
, module
, net
, os
, path
, punycode
, querystring
, readline
, repl
, stream
, string_decoder
-
.editor
- Enter editor mode (-D to finish, -C to cancel) -
.exit
- Close the I/O stream, causing the REPL to exit. -
.save
- Save the current REPL session to a file: > .save ./file/to/save.js -
.load
- Load a file into the current REPL session. > .load ./file/to/load.js -
.break
- When in the process of inputting a multi-line expression, entering the .break command (or pressing the -C key combination) will abort further input or processing of that expression. -
.clear
- Resets the REPL context to an empty object and clears any multi-line expression currently being input. -
.help
- Show this list of special commands.
There are many stream objects provided by Node.js. For instance, a request to an HTTP server and process.stdout
are both stream instances.
Streams can be readable, writable, or both. All streams are instances of EventEmitter
.
Readable streams effectively operate in one of two modes: flowing and paused.
When in flowing mode, data is read from the underlying system automatically and provided to an application as quickly as possible using events via the EventEmitter interface.
In paused mode, the stream.read() method must be called explicitly to read chunks of data from the stream.
All Readable streams begin in paused mode but can be switched to flowing mode in one of the following ways:
- Adding a
'data'
event handler. - Calling the
stream.resume()
method. - Calling the
stream.pipe()
method to send the data to a Writable.
const readable = getReadableStreamSomehow();
readable.on('data', (chunk) => {
console.log(`Received ${chunk.length} bytes of data.`);
});
const myStream = getWritableStreamSomehow();
myStream.write('some data');
myStream.write('some more data');
myStream.end('done writing data');
Duplex streams are streams that implement both the Readable
and Writable
interfaces.
-
stream.Duplex
Transform streams areDuplex
streams where the output is in some way related to the input. stream.Transform
const fs = require('fs');
All the methods have an async and sync variant.
fs.readFile('/tmp/hello.txt', { encoding: 'utf8' }, (err, content) => {
if(err) return console.error(err);
console.log(content);
});
try {
let binaryContent = fs.readFileSync('binary.dat');
console.log(binaryContent.toString('hex'));
} catch(err) {
console.error(err);
}
fs.writeFile('/tmp/hello.txt', 'Hello world!', function(err) {
if(err) return console.error(err);
// Successfully wrote to the file!
});
var buffer = new Buffer([ 0x48, 0x65, 0x6c, 0x6c, 0x6f ]);
try {
fs.writeFileSync('binary.txt', buffer);
} catch(err) {
console.error(err);
}
__dirname
and __filename
fs.readdir('/usr/local/bin', (err, files) => {
if(err) return console.error(err);
console.log(files.join(' '));
}
var readable = fs.createReadStream(__dirname + '/node.txt', { encoding: 'utf8', highWaterMark: 16 * 1024 });
var writable = fs.createWriteStream(__dirname + '/nodeCopy.txt');
readable.once('error', (err) => console.error(err));
readable.once('end', () => console.log("EOF reached."));
readable.on('data', (chunk) => writable.write(chunk));
var readable = fs.createReadStream(__dirname + '/node.txt', { encoding: 'utf8', highWaterMark: 16 * 1024 });
var writable = fs.createWriteStream(__dirname + '/nodePipe.txt');
// use pipe to copy readable to writable
readable.pipe(writable);
const fs = require('fs');
const readline = require('readline');
const rl = readline.createInterface({
input: fs.createReadStream('text.txt')
});
// Each new line emits an event - every time the stream receives \r, \n, or \r\n
rl.on('line', (line) => {
console.log(line);
});
rl.on('close', () => {
console.log('Done reading file');
});
const readline = require('readline');
const rl = readline.createInterface({
input: process.stdin,
output: process.stdout
});
rl.question('What is your name?', (name) => {
console.log(`Hello ${name}!`);
rl.close();
});
var http = require('http');
http.createServer(handler).listen(8080, start_callback);
function handler(req, res) {
var clientIP = req.connection.remoteAddress;
var connectUsing = req.connection.encrypted ? 'SSL' : 'HTTP';
console.log('Request received: '+ connectUsing + ' ' + req.method + ' ' + req.url);
console.log('Client IP: ' + clientIP);
res.writeHead(200, "OK", {'Content-Type': 'text/plain'});
res.write("OK");
res.end();
return;
}
function start_callback(){
console.log('Start HTTP on port ' + httpPort)
}
var http = require('http');
var options = {
hostname: '127.0.0.1',
port: 80,
path: '/',
method: 'GET'
};
var req = http.request(options, function(res) {
console.log('STATUS: ' + res.statusCode);
console.log('HEADERS: ' + JSON.stringify(res.headers));
res.setEncoding('utf8');
res.on('data', function (chunk) {
console.log('Response: ' + chunk);
});
res.on('end', function (chunk) {
console.log('Response ENDED');
});
});
req.on('error', function(e) {
console.log('problem with request: ' + e.message);
});
req.end();
const https = require('https');
const fs = require('fs');
const httpsOptions = {
key: fs.readFileSync('path/to/server-key.pem'),
cert: fs.readFileSync('path/to/server-crt.pem')
};
const app = function (req, res) {
res.writeHead(200);
res.end("hello world\n");
}
https.createServer(httpsOptions, app).listen(4433);
const http2 = require('http2');
const server = http2.createServer();
server.on('stream', (stream, requestHeaders) => {
stream.respond({ ':status': 200, 'content-type': 'text/plain' });
stream.write('hello ');
stream.end('world');
});
server.listen(8080);
┌─────────────────────────────────────────────────────────────────────────────────────────────┐
│ href │
├──────────┬──┬─────────────────────┬─────────────────────┬───────────────────────────┬───────┤
│ protocol │ │ auth │ host │ path │ hash │
│ │ │ ├──────────────┬──────┼──────────┬────────────────┤ │
│ │ │ │ hostname │ port │ pathname │ search │ │
│ │ │ │ │ │ ├─┬──────────────┤ │
│ │ │ │ │ │ │ │ query │ │
" https: // user : pass @ sub.host.com : 8080 /p/a/t/h ? query=string #hash "
│ │ │ │ │ hostname │ port │ │ │ │
│ │ │ │ ├──────────────┴──────┤ │ │ │
│ protocol │ │ username │ password │ host │ │ │ │
├──────────┴──┼──────────┴──────────┼─────────────────────┤ │ │ │
│ origin │ │ origin │ pathname │ search │ hash │
├─────────────┴─────────────────────┴─────────────────────┴──────────┴────────────────┴───────┤
│ href │
└─────────────────────────────────────────────────────────────────────────────────────────────┘
const { URL } = require('url');
const myURL = new URL('https://user:[email protected]:8080/p/a/t/h?query=string#hash');
const spawn = require('child_process').spawn;
const ls = spawn('ls', ['-lh', '/usr']);
ls.stdout.on('data', (data) => {
console.log(`stdout: ${data}`);
});
ls.stderr.on('data', (data) => {
console.log(`stderr: ${data}`);
});
ls.on('close', (code) => {
console.log(`child process exited with code ${code}`);
});
const exec = require('child_process').exec;
exec('cat *.js file | wc -l', (err, stdout, stderr) => {
if (err) {
return console.error(`exec error: ${err}`);
}
console.log(`stdout: ${stdout}`);
console.log(`stderr: ${stderr}`);
});
const execFile = require('child_process').execFile;
const child = execFile('node', ['--version'], (err, stdout, stderr) => {
if (err) throw err;
console.log(stdout);
});
A module encapsulates related code into a single unit of code.
Node provides the module.exports
interface to expose functions and variables to other files.
- Every module injected only once.
- Modules can be required without using relative paths by putting them in
node_modules
.
// export only one object
module.exports = function(subject) {
console.log('Hello ' + subject);
};
// load it
var hw = require('./hello-world.js');
hw('World!');
// the function definition is done separately then added
function hello(subject) {
console.log('Hello ' + subject);
}
module.exports = {
hello: hello
};
// load it
var hw = require('./hello-world.js');
hw.hello('World!');
// the function definition are directly put
module.exports = {
hello: function(subject) {
console.log('Hello ' + subject);
}
};
// load it
var hw = require('./hello-world.js');
hw.hello('World!');
// the function definition is directly declared
exports.hello = function(subject) { // exports is a short version of module.exports
console.log('Hello ' + subject);
};
// load it
var hw = require('./hello-world.js');
hw.hello('World!');
export function printHelloWorld() {
console.log("Hello World!");
}
- Online repositories for Node.js packages/modules.
- Command line utility to install Node.js packages.
npm config set proxy http://<username>:<password>@<proxy-server-url>:<port>
npm config set https-proxy http://<username>:<password>@<proxy-server-url>:<port>
Node.js package configurations are contained in a file called package.json
that you can find at the root of each project.
npm init
npm init --yes # with default values
https://docs.npmjs.com/files/package.json
npm supports semantic versioning, this means there are patch, minor and major releases.
npm version patch # 1.0.0 => 1.0.1
npm version minor # 1.0.1 => 1.1.0
npm version major # 1.1.0 => 2.0.0
npm version 3.1.4 # 2.0.0 => 3.1.4
npm install <package-name>[,<package-name>,...]
# or
npm i ...
# packages available locally
npm install <local path>
# packages available as a git repository
npm install <git remote url>
# packages from a specific repository
npm install --registry=http://myreg.mycompany.com <package name>
If you already have a package.json
file in your current working directory and dependencies are defined in it, then npm install
will automatically resolve and install all dependencies listed in the file.
npm install <package-name>@<version>
npm install <package-name>@<version range>
# eg.: npm install lodash@">=4.10.1 <4.11.1"
npm install <name>@latest
npm install --save <name> # Install dependencies
# or
npm install -S <name> # shortcut version --save
# or
npm i -S <name>
npm install --save-dev <name> # Install dependencies for development purposes
# or
npm install -D <name> # shortcut version --save-dev
# or
npm i -D <name>
npm install --global <name>
# or
npm install -g <name>
# or
npm i -g <name>
npm list
npm list <package-name>
npm uninstall <package-name>
{
"scripts": {
"pretest": "scripts/pretest.js",
"test": "scripts/test.js",
"posttest": "scripts/posttest.js"
} }
npm run <command-name>
# or npm run-script <command-name>
Pre-defined scripts | Description |
---|---|
prepublish | Run before the package is published. |
publish, postpublish | Run after the package is published. |
preinstall | Run before the package is installed. |
install, postinstall | Run after the package is installed. |
preuninstall, uninstall | Run before the package is uninstalled. |
postuninstall | Run after the package is uninstalled. |
preversion, version | Run before bump the package version. |
postversion | Run after bump the package version. |
pretest, test, posttest | Run by the npm test command. |
prestop, stop, poststop | Run by the npm stop command. |
prestart, start, poststart | Run by the npm start command. |
prerestart, restart, postrestart | Run by the npm restart command. |
{
"scripts": {
"preci": "scripts/preci.js",
"ci": "scripts/ci.js",
"postci": "scripts/postci.js"
} }
npm run ci
# install a remote version
nvm install <version>
# list available local versions of node
nvm ls
# switch to a version
nvm use <version>
if (process.env.NODE_ENV === 'production') {
// We are running in production mode
} else {
// We are running in development mode
}
npm install --production
# or set NODE_ENV=production
- David Herron: Node.JS Web Development
- Sandro Pasquali: Mastering Node.js
- https://nodejs.org/dist/latest/docs/api
- https://medium.freecodecamp.org/node-js-streams-everything-you-need-to-know-c9141306be93
- https://medium.freecodecamp.org/node-js-child-processes-everything-you-need-to-know-e69498fe970a
- https://blog.risingstack.com/getting-node-js-testing-and-tdd-right-node-js-at-scale
- https://blog.codeship.com/advanced-node-js-project-structure-tutorial
- https://medium.freecodecamp.org/scaling-node-js-applications-8492bd8afadc