This repository has been archived by the owner on Feb 13, 2020. It is now read-only.
forked from Coggle/s3-streamlogger
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathindex.js
171 lines (142 loc) · 5.74 KB
/
index.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
var stream = require('stream');
var util = require('util');
var strftime = require('strftime');
var aws = require('aws-sdk');
var branch = require('git-branch');
var os = require('os');
// Constants
var SERVER_SIDE_ENCRYPTION = "AES256";
// Public API
function S3StreamLogger(options){
stream.Writable.call(this, options);
if(!(options.bucket || process.env.BUCKET_NAME))
throw new Error("options.bucket or BUCKET_NAME environment variable is required");
this.bucket = options.bucket || process.env.BUCKET_NAME;
this.folder = options.folder || '';
this.name_format = options.name_format;
this.rotate_every = options.rotate_every || 60*60*1000; // default to 60 minutes
this.max_file_size = options.max_file_size || 200000; // or 200k, whichever is sooner
this.upload_every = options.upload_every || 20*1000; // default to 20 seconds
this.buffer_size = options.buffer_size || 10000; // or every 10k, which ever is sooner
this.server_side_encryption = options.server_side_encryption || false;
this.date_folders = options.date_folders || false // automatcially nest folders in date structure {this.folder_name}/YYYY/MM/DD/{this.file_name}, defaults to false
// Backwards compatible API changes
options.config = options.config || {};
if(options.access_key_id) {
options.config.accessKeyId = options.access_key_id;
}
if(options.secret_access_key) {
options.config.secretAccessKey = options.secret_access_key;
}
if(options.config.sslEnabled === undefined) {
options.config.sslEnabled = true;
}
if(!options.name_format) {
// Get branch and host name for default file name
var _current_branch;
try {
_current_branch = branch.sync();
} catch (e) {
_current_branch = 'unknown'
}
this.name_format = `%Y-%m-%d-%H-%M-%S-%L-${_current_branch}-${os.hostname()}.log`;
}
this.s3 = new aws.S3(options.config);
this.timeout = null;
this.object_name = null;
this.file_started = null;
this.last_write = null;
this.buffers = [];
this.unwritten = 0;
this._newFile();
}
util.inherits(S3StreamLogger, stream.Writable);
// write anything outstanding to the current file, and start a new one
S3StreamLogger.prototype.flushFile = function(){
this._upload(true);
};
// Private API
S3StreamLogger.prototype._upload = function(forceNewFile) {
if(this.timeout){
clearTimeout(this.timeout);
this.timeout = null;
}
this.last_write = new Date();
var buffer = Buffer.concat(this.buffers);
var param = {
Bucket: this.bucket,
Key: this.object_name,
Body: buffer
};
if (this.server_side_encryption) {
param.ServerSideEncryption = SERVER_SIDE_ENCRYPTION;
}
this.unwritten = 0;
var elapsed = (new Date()).getTime() - this.file_started.getTime();
if( forceNewFile ||
elapsed > this.rotate_every ||
buffer.length > this.max_file_size){
this._newFile();
}
// do everything else before calling putObject to avoid the
// possibility that this._write is called again, losing data.
this.s3.putObject(param, function(err){
if(err){
this.emit('error', err);
}
}.bind(this));
};
// _newFile should ONLY be called when there is no un-uploaded data (i.e.
// from _upload or initialization), otherwise data will be lost.
S3StreamLogger.prototype._newFile = function(){
this.buffers = [];
this.file_started = new Date();
this.last_write = this.file_started;
// create a date object with the UTC version of the date to use with
// strftime, so that the commonly use formatters return the UTC values.
// This breaks timezone-converting specifiers (as they will convert against
// the wrong timezone).
var date_as_utc = new Date(
this.file_started.getUTCFullYear(),
this.file_started.getUTCMonth(),
this.file_started.getUTCDate(),
this.file_started.getUTCHours(),
this.file_started.getUTCMinutes(),
this.file_started.getUTCSeconds(),
this.file_started.getUTCMilliseconds()
);
if (this.date_folders) {
// set folder name to automatically reflect {this.folder}/YYYY/MM/DD/{this.name_format} format,
// automatically organizes S3 folder structure into dated folders
this.object_name = (this.folder === '' ? '' : this.folder + '/' + this.file_started.getFullYear() + '/' + (this.file_started.getMonth() + 1) + '/' + this.file_started.getDay() + '/') + strftime(this.name_format, date_as_utc);
} else {
this.object_name = (this.folder === '' ? '' : this.folder + '/') + strftime(this.name_format, date_as_utc);
};
};
S3StreamLogger.prototype._write = function(chunk, encoding, cb){
if(typeof chunk === 'string')
chunk = new Buffer(chunk, encoding);
if(chunk){
this.buffers.push(chunk);
this.unwritten += chunk.length;
}
if(this.timeout){
clearTimeout(this.timeout);
this.timeout = null;
}
if((new Date()).getTime() - this.last_write.getTime() > this.upload_every ||
this.unwritten > this.buffer_size){
this._upload();
}else{
this.timeout = setTimeout(function(){
this._upload();
}.bind(this), this.upload_every);
}
// Call the callback immediately, as we may not actually write for some
// time. If there is an upload error, we trigger our 'error' event.
if(cb && typeof cb === 'function')
setImmediate(cb);
};
module.exports = {
S3StreamLogger: S3StreamLogger
};