Skip to content

Commit 68dd181

Browse files
author
Dan McGhan
committed
Added streaming files example
1 parent f106f8f commit 68dd181

File tree

14 files changed

+1264
-0
lines changed

14 files changed

+1264
-0
lines changed
Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,10 @@
1+
module.exports = {
2+
hrPool: {
3+
user: process.env.HR_USER,
4+
password: process.env.HR_PASSWORD,
5+
connectString: process.env.HR_CONNECTIONSTRING,
6+
poolMin: 10,
7+
poolMax: 10,
8+
poolIncrement: 0
9+
}
10+
};
Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
module.exports = {
2+
port: process.env.HTTP_PORT || 3000
3+
};
Lines changed: 100 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,100 @@
1+
const employees = require('../db_apis/employees.js');
2+
3+
async function get(req, res, next) {
4+
try {
5+
const context = {};
6+
7+
context.id = parseInt(req.params.id, 10);
8+
context.skip = parseInt(req.query.skip, 10);
9+
context.limit = parseInt(req.query.limit, 10);
10+
context.sort = req.query.sort;
11+
context.department_id = parseInt(req.query.department_id, 10);
12+
context.manager_id = parseInt(req.query.manager_id, 10);
13+
14+
const rows = await employees.find(context);
15+
16+
if (req.params.id) {
17+
if (rows.length === 1) {
18+
res.status(200).json(rows[0]);
19+
} else {
20+
res.status(404).end();
21+
}
22+
} else {
23+
res.status(200).json(rows);
24+
}
25+
} catch (err) {
26+
next(err);
27+
}
28+
}
29+
30+
module.exports.get = get;
31+
32+
function getEmployeeFromRec(req) {
33+
const employee = {
34+
first_name: req.body.first_name,
35+
last_name: req.body.last_name,
36+
email: req.body.email,
37+
phone_number: req.body.phone_number,
38+
hire_date: req.body.hire_date,
39+
job_id: req.body.job_id,
40+
salary: req.body.salary,
41+
commission_pct: req.body.commission_pct,
42+
manager_id: req.body.manager_id,
43+
department_id: req.body.department_id
44+
};
45+
46+
return employee;
47+
}
48+
49+
async function post(req, res, next) {
50+
try {
51+
let employee = getEmployeeFromRec(req);
52+
53+
employee = await employees.create(employee);
54+
55+
res.status(201).json(employee);
56+
} catch (err) {
57+
next(err);
58+
}
59+
}
60+
61+
module.exports.post = post;
62+
63+
async function put(req, res, next) {
64+
try {
65+
let employee = getEmployeeFromRec(req);
66+
67+
employee.employee_id = parseInt(req.params.id, 10);
68+
69+
employee = await employees.update(employee);
70+
71+
if (employee !== null) {
72+
res.status(200).json(employee);
73+
} else {
74+
res.status(404).end();
75+
}
76+
} catch (err) {
77+
next(err);
78+
}
79+
}
80+
81+
module.exports.put = put;
82+
83+
async function del(req, res, next) {
84+
try {
85+
const id = parseInt(req.params.id, 10);
86+
87+
const success = await employees.delete(id);
88+
89+
if (success) {
90+
res.status(204).end();
91+
} else {
92+
res.status(404).end();
93+
}
94+
} catch (err) {
95+
next(err);
96+
}
97+
}
98+
99+
module.exports.delete = del;
100+
Lines changed: 25 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,25 @@
1+
const fileDetails = require('../db_apis/file_details.js');
2+
3+
async function get(req, res, next) {
4+
try {
5+
const context = {};
6+
7+
context.id = parseInt(req.params.id, 10);
8+
9+
const rows = await fileDetails.find(context);
10+
11+
if (req.params.id) {
12+
if (rows.length === 1) {
13+
res.status(200).json(rows[0]);
14+
} else {
15+
res.status(404).end();
16+
}
17+
} else {
18+
res.status(200).json(rows);
19+
}
20+
} catch (err) {
21+
next(err);
22+
}
23+
}
24+
25+
module.exports.get = get;
Lines changed: 150 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,150 @@
1+
const files = require('../db_apis/files.js');
2+
const { Transform } = require('stream');
3+
4+
// Create a new transform stream class that can validate files.
5+
class FileValidator extends Transform {
6+
constructor(options) {
7+
super(options.streamOptions);
8+
9+
this.maxFileSize = options.maxFileSize;
10+
this.totalBytesInBuffer = 0;
11+
}
12+
13+
_transform (chunk, encoding, callback) {
14+
this.totalBytesInBuffer += chunk.length;
15+
16+
// Look to see if the file size is too large.
17+
if (this.totalBytesInBuffer > this.maxFileSize) {
18+
const err = new Error(`The file size exceeded the limit of ${this.maxFileSize} bytes`);
19+
err.code = 'MAXFILESIZEEXCEEDED';
20+
callback(err);
21+
return;
22+
}
23+
24+
this.push(chunk);
25+
26+
callback(null);
27+
}
28+
29+
_flush (done) {
30+
done();
31+
}
32+
}
33+
34+
async function post(req, res, next) {
35+
try {
36+
// Get a new instance of the transform stream class.
37+
const fileValidator = new FileValidator({
38+
maxFileSize: 1024 * 1024 * 5000 // 50 MB
39+
});
40+
let contentType = req.headers['content-type'] || 'application/octet';
41+
let fileName = req.headers['x-file-name'];
42+
43+
if (fileName === '') {
44+
res.status(400).json({error: 'The file name must be passed in the via x-file-name header'});
45+
return;
46+
}
47+
48+
// Pipe the request stream into the transform stream.
49+
req.pipe(fileValidator);
50+
51+
// Could happen if the client cancels the upload. Forward upstream as an error.
52+
req.on('aborted', function() {
53+
fileValidator.emit('error', new Error('Upload aborted.'));
54+
});
55+
56+
try {
57+
const fileId = await files.create(fileName, contentType, fileValidator);
58+
59+
res.status(201).json({fileId: fileId});
60+
} catch (err) {
61+
console.error(err);
62+
63+
res.header('Connection', 'close');
64+
65+
if (err.code === 'MAXFILESIZEEXCEEDED') {
66+
res.status(413).json({error: err.message});
67+
} else {
68+
res.status(500).json({error: 'Oops, something broke!'});
69+
}
70+
71+
req.connection.destroy();
72+
}
73+
} catch (err) {
74+
next(err);
75+
}
76+
}
77+
78+
module.exports.post = post;
79+
80+
async function get(req, res, next) {
81+
try {
82+
let aborted = false;
83+
let row;
84+
const id = parseInt(req.params.id, 10);
85+
86+
if (isNaN(id)) {
87+
res.status(400).json({error: 'Missing or invalid file id'});
88+
return;
89+
}
90+
91+
// Could happen if the client cancels the download. Forward upstream as an error.
92+
req.on('aborted', function() {
93+
aborted = true;
94+
95+
if (row) {
96+
row.blob_data.emit('error', new Error('Download aborted.'));
97+
}
98+
});
99+
100+
row = await files.get(id);
101+
102+
// It's possible the aborted event happened before the readable stream was
103+
// obtained. Reemit the event to handle the error.
104+
if (aborted) {
105+
row.blob_data.emit('aborted');
106+
}
107+
108+
if (row) {
109+
res.status(200);
110+
111+
res.set({
112+
'Cache-Control': 'no-cache',
113+
'Content-Type': row.content_type,
114+
'Content-Length': row.file_length,
115+
'Content-Disposition': 'attachment; filename=' + row.file_name
116+
});
117+
118+
row.blob_data.pipe(res);
119+
} else {
120+
res.status(404).end();
121+
}
122+
} catch (err) {
123+
next(err);
124+
}
125+
}
126+
127+
module.exports.get = get;
128+
129+
async function del(req, res, next) {
130+
try {
131+
const id = parseInt(req.params.id, 10);
132+
133+
if (isNaN(id)) {
134+
res.status(400).json({error: 'Missing or invalid file id'});
135+
return;
136+
}
137+
138+
const success = await files.delete(id);
139+
140+
if (success) {
141+
res.status(204).end();
142+
} else {
143+
res.status(404).end();
144+
}
145+
} catch (err) {
146+
next(err);
147+
}
148+
}
149+
150+
module.exports.delete = del;

0 commit comments

Comments
 (0)