aboutsummaryrefslogtreecommitdiffstats
path: root/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js
diff options
context:
space:
mode:
Diffstat (limited to 'vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js')
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/.bob.json12
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/.jshintrc15
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/.npmignore7
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/.travis.yml8
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/LICENSE13
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/README.md144
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/double-stack.js12
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/examples/example-connect-logger.js46
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/examples/example-socket.js45
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/examples/example.js60
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/examples/flush-on-exit.js27
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/examples/fromreadme.js19
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/examples/hipchat-appender.js54
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/examples/log-rolling.js27
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/examples/logFaces-appender.js24
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/examples/loggly-appender.js24
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/examples/logstashUDP.js39
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/examples/memory-test.js37
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/examples/patternLayout-tokens.js21
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/examples/slack-appender.js24
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/examples/smtp-appender.js43
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/appenders/categoryFilter.js20
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/appenders/clustered.js153
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/appenders/console.js21
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/appenders/dateFile.js90
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/appenders/file.js119
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/appenders/fileSync.js195
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/appenders/gelf.js155
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/appenders/hipchat.js90
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/appenders/logFacesAppender.js71
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/appenders/logLevelFilter.js23
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/appenders/loggly.js90
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/appenders/logstashUDP.js68
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/appenders/mailgun.js43
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/appenders/multiprocess.js135
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/appenders/slack.js44
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/appenders/smtp.js152
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/appenders/stderr.js21
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/connect-logger.js262
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/date_format.js74
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/debug.js15
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/layouts.js365
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/levels.js66
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/log4js.js504
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/log4js.json7
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/logger.js123
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/streams/BaseRollingFileStream.js94
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/streams/DateRollingFileStream.js91
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/streams/RollingFileStream.js117
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/streams/index.js3
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/node_modules/isarray/README.md54
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/node_modules/isarray/build/build.js209
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/node_modules/isarray/component.json19
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/node_modules/isarray/index.js3
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/node_modules/isarray/package.json86
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/node_modules/readable-stream/.npmignore5
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/node_modules/readable-stream/LICENSE18
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/node_modules/readable-stream/README.md15
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/node_modules/readable-stream/duplex.js1
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/node_modules/readable-stream/lib/_stream_duplex.js89
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/node_modules/readable-stream/lib/_stream_passthrough.js46
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/node_modules/readable-stream/lib/_stream_readable.js982
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/node_modules/readable-stream/lib/_stream_transform.js210
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/node_modules/readable-stream/lib/_stream_writable.js386
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/node_modules/readable-stream/package.json112
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/node_modules/readable-stream/passthrough.js1
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/node_modules/readable-stream/readable.js11
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/node_modules/readable-stream/transform.js1
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/node_modules/readable-stream/writable.js1
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/package.json111
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/categoryFilter-test.js84
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/clusteredAppender-test.js166
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/configuration-test.js149
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/configureNoLevels-test.js173
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/connect-logger-test.js303
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/consoleAppender-test.js33
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/dateFileAppender-test.js223
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/date_format-test.js58
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/debug-test.js72
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/fileAppender-test.js442
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/fileSyncAppender-test.js185
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/gelfAppender-test.js257
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/global-log-level-test.js121
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/hipchatAppender-test.js112
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/layouts-test.js330
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/levels-test.js464
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/log-abspath-test.js77
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/log4js.json16
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/logFacesAppender-test.js96
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/logLevelFilter-test.js93
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/logger-test.js81
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/logging-test.js636
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/logglyAppender-test.js110
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/logstashUDP-test.js126
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/mailgunAppender-test.js190
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/multiprocess-test.js317
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/newLevel-test.js138
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/nolog-test.js297
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/reloadConfiguration-test.js340
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/setLevel-asymmetry-test.js100
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/slackAppender-test.js168
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/smtpAppender-test.js318
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/stderrAppender-test.js35
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/streams/BaseRollingFileStream-test.js93
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/streams/DateRollingFileStream-test.js227
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/streams/rollingFileStream-test.js207
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/subcategories-test.js86
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/with-categoryFilter.json23
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/with-dateFile.json17
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/with-log-rolling.json10
-rw-r--r--vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/with-logLevelFilter.json41
111 files changed, 13286 insertions, 0 deletions
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/.bob.json b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/.bob.json
new file mode 100644
index 00000000..c8c1e02c
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/.bob.json
@@ -0,0 +1,12 @@
+{
+ "build": "clean lint coverage test",
+ "lint": {
+ "type": "jshint"
+ },
+ "coverage": {
+ "type": "vows"
+ },
+ "test": {
+ "type": "vows"
+ }
+}
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/.jshintrc b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/.jshintrc
new file mode 100644
index 00000000..a4ef28b2
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/.jshintrc
@@ -0,0 +1,15 @@
+{
+ "node": true,
+ "laxcomma": true,
+ "indent": 2,
+ "globalstrict": true,
+ "maxparams": 6,
+ "maxdepth": 3,
+ "maxstatements": 20,
+ "maxcomplexity": 10,
+ "maxlen": 100,
+ "globals": {
+ "describe": true,
+ "it": true
+ }
+}
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/.npmignore b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/.npmignore
new file mode 100644
index 00000000..12925cdf
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/.npmignore
@@ -0,0 +1,7 @@
+*.log*
+build
+node_modules
+.bob/
+test/streams/test-*
+.idea
+.DS_Store
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/.travis.yml b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/.travis.yml
new file mode 100644
index 00000000..a9eeacc1
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/.travis.yml
@@ -0,0 +1,8 @@
+language: node_js
+sudo: false
+node_js:
+ - "6"
+ - "5"
+ - "4"
+ - "0.12"
+ - "0.10"
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/LICENSE b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/LICENSE
new file mode 100644
index 00000000..e1f47dcf
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/LICENSE
@@ -0,0 +1,13 @@
+Copyright 2015 Gareth Jones (with contributions from many other people)
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/README.md b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/README.md
new file mode 100644
index 00000000..eaf9851b
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/README.md
@@ -0,0 +1,144 @@
+# log4js-node [![Build Status](https://secure.travis-ci.org/nomiddlename/log4js-node.png?branch=master)](http://travis-ci.org/nomiddlename/log4js-node)
+
+[![NPM](https://nodei.co/npm/log4js.png?downloads=true&downloadRank=true&stars=true)](https://nodei.co/npm/log4js/)
+
+This is a conversion of the [log4js](https://github.com/stritti/log4js)
+framework to work with [node](http://nodejs.org). I've mainly stripped out the browser-specific code and tidied up some of the javascript.
+
+Out of the box it supports the following features:
+
+* coloured console logging to stdout or stderr
+* replacement of node's console.log functions (optional)
+* file appender, with log rolling based on file size
+* SMTP appender
+* GELF appender
+* hook.io appender
+* Loggly appender
+* Logstash UDP appender
+* logFaces appender
+* multiprocess appender (useful when you've got worker processes)
+* a logger for connect/express servers
+* configurable log message layout/patterns
+* different log levels for different log categories (make some parts of your app log as DEBUG, others only ERRORS, etc.)
+
+NOTE: from log4js 0.5 onwards you'll need to explicitly enable replacement of node's console.log functions. Do this either by calling `log4js.replaceConsole()` or configuring with an object or json file like this:
+
+```javascript
+{
+ appenders: [
+ { type: "console" }
+ ],
+ replaceConsole: true
+}
+```
+
+## installation
+
+npm install log4js
+
+
+## usage
+
+Minimalist version:
+```javascript
+var log4js = require('log4js');
+var logger = log4js.getLogger();
+logger.debug("Some debug messages");
+```
+By default, log4js outputs to stdout with the coloured layout (thanks to [masylum](http://github.com/masylum)), so for the above you would see:
+```bash
+[2010-01-17 11:43:37.987] [DEBUG] [default] - Some debug messages
+```
+See example.js for a full example, but here's a snippet (also in fromreadme.js):
+```javascript
+var log4js = require('log4js');
+//console log is loaded by default, so you won't normally need to do this
+//log4js.loadAppender('console');
+log4js.loadAppender('file');
+//log4js.addAppender(log4js.appenders.console());
+log4js.addAppender(log4js.appenders.file('logs/cheese.log'), 'cheese');
+
+var logger = log4js.getLogger('cheese');
+logger.setLevel('ERROR');
+
+logger.trace('Entering cheese testing');
+logger.debug('Got cheese.');
+logger.info('Cheese is Gouda.');
+logger.warn('Cheese is quite smelly.');
+logger.error('Cheese is too ripe!');
+logger.fatal('Cheese was breeding ground for listeria.');
+```
+Output:
+```bash
+[2010-01-17 11:43:37.987] [ERROR] cheese - Cheese is too ripe!
+[2010-01-17 11:43:37.990] [FATAL] cheese - Cheese was breeding ground for listeria.
+```
+The first 5 lines of the code above could also be written as:
+```javascript
+var log4js = require('log4js');
+log4js.configure({
+ appenders: [
+ { type: 'console' },
+ { type: 'file', filename: 'logs/cheese.log', category: 'cheese' }
+ ]
+});
+```
+
+## configuration
+
+You can configure the appenders and log levels manually (as above), or provide a
+configuration file (`log4js.configure('path/to/file.json')`), or a configuration object. The
+configuration file location may also be specified via the environment variable
+LOG4JS_CONFIG (`export LOG4JS_CONFIG=path/to/file.json`).
+An example file can be found in `test/log4js.json`. An example config file with log rolling is in `test/with-log-rolling.json`.
+You can configure log4js to check for configuration file changes at regular intervals, and if changed, reload. This allows changes to logging levels to occur without restarting the application.
+
+To turn it on and specify a period:
+
+```javascript
+log4js.configure('file.json', { reloadSecs: 300 });
+```
+For FileAppender you can also pass the path to the log directory as an option where all your log files would be stored.
+
+```javascript
+log4js.configure('my_log4js_configuration.json', { cwd: '/absolute/path/to/log/dir' });
+```
+If you have already defined an absolute path for one of the FileAppenders in the configuration file, you could add a "absolute": true to the particular FileAppender to override the cwd option passed. Here is an example configuration file:
+
+#### my_log4js_configuration.json ####
+```json
+{
+ "appenders": [
+ {
+ "type": "file",
+ "filename": "relative/path/to/log_file.log",
+ "maxLogSize": 20480,
+ "backups": 3,
+ "category": "relative-logger"
+ },
+ {
+ "type": "file",
+ "absolute": true,
+ "filename": "/absolute/path/to/log_file.log",
+ "maxLogSize": 20480,
+ "backups": 10,
+ "category": "absolute-logger"
+ }
+ ]
+}
+```
+Documentation for most of the core appenders can be found on the [wiki](https://github.com/nomiddlename/log4js-node/wiki/Appenders), otherwise take a look at the tests and the examples.
+
+## Documentation
+See the [wiki](https://github.com/nomiddlename/log4js-node/wiki). Improve the [wiki](https://github.com/nomiddlename/log4js-node/wiki), please.
+
+There's also [an example application](https://github.com/nomiddlename/log4js-example).
+
+## Contributing
+Contributions welcome, but take a look at the [rules](https://github.com/nomiddlename/log4js-node/wiki/Contributing) first.
+
+## License
+
+The original log4js was distributed under the Apache 2.0 License, and so is this. I've tried to
+keep the original copyright and author credits in place, except in sections that I have rewritten
+extensively.
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/double-stack.js b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/double-stack.js
new file mode 100644
index 00000000..f0817703
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/double-stack.js
@@ -0,0 +1,12 @@
+var log4js = require('./lib/log4js');
+log4js.configure({
+ appenders: [
+ {
+ type: 'console'
+ }
+ ],
+ replaceConsole: true
+});
+var logger = log4js.getLogger();
+
+logger.error(new Error("my error"));
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/examples/example-connect-logger.js b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/examples/example-connect-logger.js
new file mode 100644
index 00000000..ed7b0133
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/examples/example-connect-logger.js
@@ -0,0 +1,46 @@
+//The connect/express logger was added to log4js by danbell. This allows connect/express servers to log using log4js.
+//https://github.com/nomiddlename/log4js-node/wiki/Connect-Logger
+
+// load modules
+var log4js = require('log4js');
+var express = require("express");
+var app = express();
+
+//config
+log4js.configure({
+ appenders: [
+ { type: 'console' },
+ { type: 'file', filename: 'logs/log4jsconnect.log', category: 'log4jslog' }
+ ]
+});
+
+//define logger
+var logger = log4js.getLogger('log4jslog');
+
+// set at which time msg is logged print like: only on error & above
+// logger.setLevel('ERROR');
+
+//express app
+app.configure(function() {
+ app.use(express.favicon(''));
+ // app.use(log4js.connectLogger(logger, { level: log4js.levels.INFO }));
+ // app.use(log4js.connectLogger(logger, { level: 'auto', format: ':method :url :status' }));
+
+ //### AUTO LEVEL DETECTION
+ //http responses 3xx, level = WARN
+ //http responses 4xx & 5xx, level = ERROR
+ //else.level = INFO
+ app.use(log4js.connectLogger(logger, { level: 'auto' }));
+});
+
+//route
+app.get('/', function(req,res) {
+ res.send('hello world');
+});
+
+//start app
+app.listen(5000);
+
+console.log('server runing at localhost:5000');
+console.log('Simulation of normal response: goto localhost:5000');
+console.log('Simulation of error response: goto localhost:5000/xxx');
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/examples/example-socket.js b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/examples/example-socket.js
new file mode 100644
index 00000000..31bb5ab2
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/examples/example-socket.js
@@ -0,0 +1,45 @@
+var log4js = require('./lib/log4js')
+, cluster = require('cluster')
+, numCPUs = require('os').cpus().length
+, i = 0;
+
+if (cluster.isMaster) {
+ log4js.configure({
+ appenders: [
+ {
+ type: "multiprocess",
+ mode: "master",
+ appender: {
+ type: "console"
+ }
+ }
+ ]
+ });
+
+ console.info("Master creating %d workers", numCPUs);
+ for (i=0; i < numCPUs; i++) {
+ cluster.fork();
+ }
+
+ cluster.on('death', function(worker) {
+ console.info("Worker %d died.", worker.pid);
+ });
+} else {
+ log4js.configure({
+ appenders: [
+ {
+ type: "multiprocess",
+ mode: "worker"
+ }
+ ]
+ });
+ var logger = log4js.getLogger('example-socket');
+
+ console.info("Worker %d started.", process.pid);
+ for (i=0; i < 1000; i++) {
+ logger.info("Worker %d - logging something %d", process.pid, i);
+ }
+}
+
+
+
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/examples/example.js b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/examples/example.js
new file mode 100644
index 00000000..d304cc45
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/examples/example.js
@@ -0,0 +1,60 @@
+var log4js = require('../lib/log4js');
+//log the cheese logger messages to a file, and the console ones as well.
+log4js.configure({
+ appenders: [
+ {
+ type: "file",
+ filename: "cheese.log",
+ category: [ 'cheese','console' ]
+ },
+ {
+ type: "console"
+ }
+ ],
+ replaceConsole: true
+});
+
+//to add an appender programmatically, and without clearing other appenders
+//loadAppender is only necessary if you haven't already configured an appender of this type
+log4js.loadAppender('file');
+log4js.addAppender(log4js.appenders.file('pants.log'), 'pants');
+//a custom logger outside of the log4js/lib/appenders directory can be accessed like so
+//log4js.loadAppender('what/you/would/put/in/require');
+//log4js.addAppender(log4js.appenders['what/you/would/put/in/require'](args));
+//or through configure as:
+//log4js.configure({
+// appenders: [ { type: 'what/you/would/put/in/require', otherArgs: 'blah' } ]
+//});
+
+var logger = log4js.getLogger('cheese');
+//only errors and above get logged.
+//you can also set this log level in the config object
+//via the levels field.
+logger.setLevel('ERROR');
+
+//console logging methods have been replaced with log4js ones.
+//so this will get coloured output on console, and appear in cheese.log
+console.error("AAArgh! Something went wrong", { some: "otherObject", useful_for: "debug purposes" });
+console.log("This should appear as info output");
+
+//these will not appear (logging level beneath error)
+logger.trace('Entering cheese testing');
+logger.debug('Got cheese.');
+logger.info('Cheese is Gouda.');
+logger.log('Something funny about cheese.');
+logger.warn('Cheese is quite smelly.');
+//these end up on the console and in cheese.log
+logger.error('Cheese %s is too ripe!', "gouda");
+logger.fatal('Cheese was breeding ground for listeria.');
+
+//these don't end up in cheese.log, but will appear on the console
+var anotherLogger = log4js.getLogger('another');
+anotherLogger.debug("Just checking");
+
+//one for pants.log
+//will also go to console, since that's configured for all categories
+var pantsLog = log4js.getLogger('pants');
+pantsLog.debug("Something for pants");
+
+
+
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/examples/flush-on-exit.js b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/examples/flush-on-exit.js
new file mode 100644
index 00000000..19c661c4
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/examples/flush-on-exit.js
@@ -0,0 +1,27 @@
+/**
+ * run this, then "ab -c 10 -n 100 localhost:4444/" to test (in
+ * another shell)
+ */
+var log4js = require('../lib/log4js');
+log4js.configure({
+ appenders: [
+ { type: 'file', filename: 'cheese.log', category: 'cheese' },
+ { type: 'console'}
+ ]
+});
+
+var logger = log4js.getLogger('cheese');
+logger.setLevel('INFO');
+
+var http=require('http');
+
+var server = http.createServer(function(request, response){
+ response.writeHead(200, {'Content-Type': 'text/plain'});
+ var rd = Math.random() * 50;
+ logger.info("hello " + rd);
+ response.write('hello ');
+ if (Math.floor(rd) == 30){
+ log4js.shutdown(function() { process.exit(1); });
+ }
+ response.end();
+}).listen(4444);
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/examples/fromreadme.js b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/examples/fromreadme.js
new file mode 100644
index 00000000..71b399ad
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/examples/fromreadme.js
@@ -0,0 +1,19 @@
+//remember to change the require to just 'log4js' if you've npm install'ed it
+var log4js = require('./lib/log4js');
+//by default the console appender is loaded
+//log4js.loadAppender('console');
+//you'd only need to add the console appender if you
+//had previously called log4js.clearAppenders();
+//log4js.addAppender(log4js.appenders.console());
+log4js.loadAppender('file');
+log4js.addAppender(log4js.appenders.file('cheese.log'), 'cheese');
+
+var logger = log4js.getLogger('cheese');
+logger.setLevel('ERROR');
+
+logger.trace('Entering cheese testing');
+logger.debug('Got cheese.');
+logger.info('Cheese is Gouda.');
+logger.warn('Cheese is quite smelly.');
+logger.error('Cheese is too ripe!');
+logger.fatal('Cheese was breeding ground for listeria.');
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/examples/hipchat-appender.js b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/examples/hipchat-appender.js
new file mode 100644
index 00000000..1cdf674a
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/examples/hipchat-appender.js
@@ -0,0 +1,54 @@
+/**
+ * !!! The hipchat-appender requires `hipchat-notifier` from npm, e.g.
+ * - list as a dependency in your application's package.json ||
+ * - npm install hipchat-notifier
+ */
+
+var log4js = require('../lib/log4js');
+
+log4js.configure({
+ "appenders": [
+ {
+ "type" : "hipchat",
+ "hipchat_token": process.env.HIPCHAT_TOKEN || '< User token with Notification Privileges >',
+ "hipchat_room": process.env.HIPCHAT_ROOM || '< Room ID or Name >'
+ }
+ ]
+});
+
+var logger = log4js.getLogger("hipchat");
+logger.warn("Test Warn message");
+logger.info("Test Info message");
+logger.debug("Test Debug Message");
+logger.trace("Test Trace Message");
+logger.fatal("Test Fatal Message");
+logger.error("Test Error Message");
+
+
+// alternative configuration demonstrating callback + custom layout
+///////////////////////////////////////////////////////////////////
+
+// use a custom layout function (in this case, the provided basicLayout)
+// format: [TIMESTAMP][LEVEL][category] - [message]
+var customLayout = require('../lib/layouts').basicLayout;
+
+log4js.configure({
+ "appenders": [
+ {
+ "type" : "hipchat",
+ "hipchat_token": process.env.HIPCHAT_TOKEN || '< User token with Notification Privileges >',
+ "hipchat_room": process.env.HIPCHAT_ROOM || '< Room ID or Name >',
+ "hipchat_from": "Mr. Semantics",
+ "hipchat_notify": false,
+ "hipchat_response_callback": function(err, response, body){
+ if(err || response.statusCode > 300){
+ throw new Error('hipchat-notifier failed');
+ }
+ console.log('mr semantics callback success');
+ },
+ "layout": customLayout
+ }
+ ]
+});
+
+logger.info("Test customLayout from Mr. Semantics");
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/examples/log-rolling.js b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/examples/log-rolling.js
new file mode 100644
index 00000000..7519c5f2
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/examples/log-rolling.js
@@ -0,0 +1,27 @@
+var log4js = require('../lib/log4js')
+, log
+, i = 0;
+log4js.configure({
+ "appenders": [
+ {
+ type: "console"
+ , category: "console"
+ },
+ {
+ "type": "file",
+ "filename": "tmp-test.log",
+ "maxLogSize": 1024,
+ "backups": 3,
+ "category": "test"
+ }
+ ]
+});
+log = log4js.getLogger("test");
+
+function doTheLogging(x) {
+ log.info("Logging something %d", x);
+}
+
+for ( ; i < 5000; i++) {
+ doTheLogging(i);
+} \ No newline at end of file
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/examples/logFaces-appender.js b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/examples/logFaces-appender.js
new file mode 100644
index 00000000..2f398f0c
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/examples/logFaces-appender.js
@@ -0,0 +1,24 @@
+var log4js = require('../lib/log4js');
+
+/*
+ logFaces server configured with UDP receiver, using JSON format,
+ listening on port 55201 will receive the logs from the appender below.
+*/
+
+log4js.configure({
+ "appenders": [
+ {
+ "type": "logFacesAppender", // (mandatory) appender type
+ "application": "MY-NODEJS", // (optional) name of the application (domain)
+ "remoteHost": "localhost", // (optional) logFaces server host or IP address
+ "port": 55201, // (optional) logFaces UDP receiver port (must use JSON format)
+ "layout": { // (optional) the layout to use for messages
+ "type": "pattern",
+ "pattern": "%m"
+ }
+ }
+ ]
+});
+
+var logger = log4js.getLogger("myLogger");
+logger.info("Testing message %s", "arg1");
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/examples/loggly-appender.js b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/examples/loggly-appender.js
new file mode 100644
index 00000000..1465c922
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/examples/loggly-appender.js
@@ -0,0 +1,24 @@
+//Note that loggly appender needs node-loggly to work.
+//If you haven't got node-loggly installed, you'll get cryptic
+//"cannot find module" errors when using the loggly appender
+var log4js = require('../lib/log4js');
+
+log4js.configure({
+ "appenders": [
+ {
+ type: "console",
+ category: "test"
+ },
+ {
+ "type" : "loggly",
+ "token" : "12345678901234567890",
+ "subdomain": "your-subdomain",
+ "tags" : ["test"],
+ "category" : "loggly"
+ }
+ ]
+});
+
+var logger = log4js.getLogger("loggly");
+logger.info("Test log message");
+//logger.debug("Test log message"); \ No newline at end of file
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/examples/logstashUDP.js b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/examples/logstashUDP.js
new file mode 100644
index 00000000..871f1570
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/examples/logstashUDP.js
@@ -0,0 +1,39 @@
+var log4js = require('../lib/log4js');
+
+/*
+ Sample logstash config:
+ udp {
+ codec => json
+ port => 10001
+ queue_size => 2
+ workers => 2
+ type => myAppType
+ }
+*/
+
+log4js.configure({
+ "appenders": [
+ {
+ type: "console",
+ category: "myLogger"
+ },
+ {
+ "host": "127.0.0.1",
+ "port": 10001,
+ "type": "logstashUDP",
+ "logType": "myAppType", // Optional, defaults to 'category'
+ "fields": { // Optional, will be added to the 'fields' object in logstash
+ "field1": "value1",
+ "field2": "value2"
+ },
+ "layout": {
+ "type": "pattern",
+ "pattern": "%m"
+ },
+ "category": "myLogger"
+ }
+ ]
+});
+
+var logger = log4js.getLogger("myLogger");
+logger.info("Test log message %s", "arg1", "arg2");
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/examples/memory-test.js b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/examples/memory-test.js
new file mode 100644
index 00000000..ac2ae044
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/examples/memory-test.js
@@ -0,0 +1,37 @@
+var log4js = require('./lib/log4js')
+, logger
+, usage
+, i;
+
+log4js.configure(
+ {
+ appenders: [
+ {
+ category: "memory-test"
+ , type: "file"
+ , filename: "memory-test.log"
+ },
+ {
+ type: "console"
+ , category: "memory-usage"
+ },
+ {
+ type: "file"
+ , filename: "memory-usage.log"
+ , category: "memory-usage"
+ , layout: {
+ type: "messagePassThrough"
+ }
+ }
+ ]
+ }
+);
+logger = log4js.getLogger("memory-test");
+usage = log4js.getLogger("memory-usage");
+
+for (i=0; i < 1000000; i++) {
+ if ( (i % 5000) === 0) {
+ usage.info("%d %d", i, process.memoryUsage().rss);
+ }
+ logger.info("Doing something.");
+}
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/examples/patternLayout-tokens.js b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/examples/patternLayout-tokens.js
new file mode 100644
index 00000000..84b171c4
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/examples/patternLayout-tokens.js
@@ -0,0 +1,21 @@
+var log4js = require('./lib/log4js');
+
+var config = {
+ "appenders": [
+ {
+ "type": "console",
+ "layout": {
+ "type": "pattern",
+ "pattern": "%[%r (%x{pid}) %p %c -%] %m%n",
+ "tokens": {
+ "pid" : function() { return process.pid; }
+ }
+ }
+ }
+ ]
+ };
+
+log4js.configure(config, {});
+
+var logger = log4js.getLogger("app");
+logger.info("Test log message"); \ No newline at end of file
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/examples/slack-appender.js b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/examples/slack-appender.js
new file mode 100644
index 00000000..eb8d4198
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/examples/slack-appender.js
@@ -0,0 +1,24 @@
+//Note that slack appender needs slack-node package to work.
+var log4js = require('../lib/log4js');
+
+log4js.configure({
+ "appenders": [
+ {
+ "type" : "slack",
+ "token": 'TOKEN',
+ "channel_id": "#CHANNEL",
+ "username": "USERNAME",
+ "format": "text",
+ "category" : "slack",
+ "icon_url" : "ICON_URL"
+ }
+ ]
+});
+
+var logger = log4js.getLogger("slack");
+logger.warn("Test Warn message");
+logger.info("Test Info message");
+logger.debug("Test Debug Message");
+logger.trace("Test Trace Message");
+logger.fatal("Test Fatal Message");
+logger.error("Test Error Message");
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/examples/smtp-appender.js b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/examples/smtp-appender.js
new file mode 100644
index 00000000..134ce900
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/examples/smtp-appender.js
@@ -0,0 +1,43 @@
+//Note that smtp appender needs nodemailer to work.
+//If you haven't got nodemailer installed, you'll get cryptic
+//"cannot find module" errors when using the smtp appender
+var log4js = require('../lib/log4js')
+, log
+, logmailer
+, i = 0;
+log4js.configure({
+ "appenders": [
+ {
+ type: "console",
+ category: "test"
+ },
+ {
+ "type": "smtp",
+ "recipients": "logfilerecipient@logging.com",
+ "sendInterval": 5,
+ "transport": "SMTP",
+ "SMTP": {
+ "host": "smtp.gmail.com",
+ "secureConnection": true,
+ "port": 465,
+ "auth": {
+ "user": "someone@gmail",
+ "pass": "********************"
+ },
+ "debug": true
+ },
+ "category": "mailer"
+ }
+ ]
+});
+log = log4js.getLogger("test");
+logmailer = log4js.getLogger("mailer");
+
+function doTheLogging(x) {
+ log.info("Logging something %d", x);
+ logmailer.info("Logging something %d", x);
+}
+
+for ( ; i < 500; i++) {
+ doTheLogging(i);
+}
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/appenders/categoryFilter.js b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/appenders/categoryFilter.js
new file mode 100644
index 00000000..c51dbfce
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/appenders/categoryFilter.js
@@ -0,0 +1,20 @@
+"use strict";
+var log4js = require('../log4js');
+
+function categoryFilter (excludes, appender) {
+ if (typeof(excludes) === 'string') excludes = [excludes];
+ return function(logEvent) {
+ if (excludes.indexOf(logEvent.categoryName) === -1) {
+ appender(logEvent);
+ }
+ };
+}
+
+function configure(config, options) {
+ log4js.loadAppender(config.appender.type);
+ var appender = log4js.appenderMakers[config.appender.type](config.appender, options);
+ return categoryFilter(config.exclude, appender);
+}
+
+exports.appender = categoryFilter;
+exports.configure = configure;
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/appenders/clustered.js b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/appenders/clustered.js
new file mode 100644
index 00000000..3af1ecac
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/appenders/clustered.js
@@ -0,0 +1,153 @@
+"use strict";
+
+var cluster = require('cluster');
+var log4js = require('../log4js');
+
+/**
+ * Takes a loggingEvent object, returns string representation of it.
+ */
+function serializeLoggingEvent(loggingEvent) {
+ // JSON.stringify(new Error('test')) returns {}, which is not really useful for us.
+ // The following allows us to serialize errors correctly.
+ for (var i = 0; i < loggingEvent.data.length; i++) {
+ var item = loggingEvent.data[i];
+ // Validate that we really are in this case
+ if (item && item.stack && JSON.stringify(item) === '{}') {
+ loggingEvent.data[i] = {stack : item.stack};
+ }
+ }
+ return JSON.stringify(loggingEvent);
+}
+
+/**
+ * Takes a string, returns an object with
+ * the correct log properties.
+ *
+ * This method has been "borrowed" from the `multiprocess` appender
+ * by `nomiddlename`
+ * (https://github.com/nomiddlename/log4js-node/blob/master/lib/appenders/multiprocess.js)
+ *
+ * Apparently, node.js serializes everything to strings when using `process.send()`,
+ * so we need smart deserialization that will recreate log date and level for further
+ * processing by log4js internals.
+ */
+function deserializeLoggingEvent(loggingEventString) {
+
+ var loggingEvent;
+
+ try {
+
+ loggingEvent = JSON.parse(loggingEventString);
+ loggingEvent.startTime = new Date(loggingEvent.startTime);
+ loggingEvent.level = log4js.levels.toLevel(loggingEvent.level.levelStr);
+ // Unwrap serialized errors
+ for (var i = 0; i < loggingEvent.data.length; i++) {
+ var item = loggingEvent.data[i];
+ if (item && item.stack) {
+ loggingEvent.data[i] = item.stack;
+ }
+ }
+
+ } catch (e) {
+
+ // JSON.parse failed, just log the contents probably a naughty.
+ loggingEvent = {
+ startTime: new Date(),
+ categoryName: 'log4js',
+ level: log4js.levels.ERROR,
+ data: [ 'Unable to parse log:', loggingEventString ]
+ };
+ }
+ return loggingEvent;
+}
+
+/**
+ * Creates an appender.
+ *
+ * If the current process is a master (`cluster.isMaster`), then this will be a "master appender".
+ * Otherwise this will be a worker appender, that just sends loggingEvents to the master process.
+ *
+ * If you are using this method directly, make sure to provide it with `config.actualAppenders`
+ * array of actual appender instances.
+ *
+ * Or better use `configure(config, options)`
+ */
+function createAppender(config) {
+
+ if (cluster.isMaster) {
+
+ var masterAppender = function(loggingEvent) {
+
+ if (config.actualAppenders) {
+ var size = config.actualAppenders.length;
+ for(var i = 0; i < size; i++) {
+ if (
+ !config.appenders[i].category ||
+ config.appenders[i].category === loggingEvent.categoryName
+ ) {
+ // Relying on the index is not a good practice but otherwise
+ // the change would have been bigger.
+ config.actualAppenders[i](loggingEvent);
+ }
+ }
+ }
+ };
+
+ // Listen on new workers
+ cluster.on('fork', function(worker) {
+
+ worker.on('message', function(message) {
+ if (message.type && message.type === '::log-message') {
+ var loggingEvent = deserializeLoggingEvent(message.event);
+
+ // Adding PID metadata
+ loggingEvent.pid = worker.process.pid;
+ loggingEvent.cluster = {
+ master: process.pid,
+ worker: worker.process.pid,
+ workerId: worker.id
+ };
+
+ masterAppender(loggingEvent);
+ }
+ });
+
+ });
+
+ return masterAppender;
+
+ } else {
+
+ return function(loggingEvent) {
+ // If inside the worker process, then send the logger event to master.
+ if (cluster.isWorker) {
+ // console.log("worker " + cluster.worker.id + " is sending message");
+ process.send({ type: '::log-message', event: serializeLoggingEvent(loggingEvent)});
+ }
+ };
+ }
+}
+
+function configure(config, options) {
+
+ if (config.appenders && cluster.isMaster) {
+
+ var size = config.appenders.length;
+ config.actualAppenders = new Array(size);
+
+ for(var i = 0; i < size; i++) {
+
+ log4js.loadAppender(config.appenders[i].type);
+ config.actualAppenders[i] = log4js.appenderMakers[config.appenders[i].type](
+ config.appenders[i],
+ options
+ );
+
+ }
+ }
+
+ return createAppender(config);
+}
+
+exports.appender = createAppender;
+exports.configure = configure;
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/appenders/console.js b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/appenders/console.js
new file mode 100644
index 00000000..20f80b13
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/appenders/console.js
@@ -0,0 +1,21 @@
+"use strict";
+var layouts = require('../layouts')
+, consoleLog = console.log.bind(console);
+
+function consoleAppender (layout, timezoneOffset) {
+ layout = layout || layouts.colouredLayout;
+ return function(loggingEvent) {
+ consoleLog(layout(loggingEvent, timezoneOffset));
+ };
+}
+
+function configure(config) {
+ var layout;
+ if (config.layout) {
+ layout = layouts.layout(config.layout.type, config.layout);
+ }
+ return consoleAppender(layout, config.timezoneOffset);
+}
+
+exports.appender = consoleAppender;
+exports.configure = configure;
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/appenders/dateFile.js b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/appenders/dateFile.js
new file mode 100644
index 00000000..55c8fd81
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/appenders/dateFile.js
@@ -0,0 +1,90 @@
+"use strict";
+var streams = require('../streams')
+, layouts = require('../layouts')
+, path = require('path')
+, os = require('os')
+, eol = os.EOL || '\n'
+, openFiles = [];
+
+//close open files on process exit.
+process.on('exit', function() {
+ openFiles.forEach(function (file) {
+ file.end();
+ });
+});
+
+/**
+ * File appender that rolls files according to a date pattern.
+ * @filename base filename.
+ * @pattern the format that will be added to the end of filename when rolling,
+ * also used to check when to roll files - defaults to '.yyyy-MM-dd'
+ * @layout layout function for log messages - defaults to basicLayout
+ * @timezoneOffset optional timezone offset in minutes - defaults to system local
+ */
+function appender(filename, pattern, alwaysIncludePattern, layout, timezoneOffset) {
+ layout = layout || layouts.basicLayout;
+
+ var logFile = new streams.DateRollingFileStream(
+ filename,
+ pattern,
+ { alwaysIncludePattern: alwaysIncludePattern }
+ );
+ openFiles.push(logFile);
+
+ return function(logEvent) {
+ logFile.write(layout(logEvent, timezoneOffset) + eol, "utf8");
+ };
+
+}
+
+function configure(config, options) {
+ var layout;
+
+ if (config.layout) {
+ layout = layouts.layout(config.layout.type, config.layout);
+ }
+
+ if (!config.alwaysIncludePattern) {
+ config.alwaysIncludePattern = false;
+ }
+
+ if (options && options.cwd && !config.absolute) {
+ config.filename = path.join(options.cwd, config.filename);
+ }
+
+ return appender(
+ config.filename,
+ config.pattern,
+ config.alwaysIncludePattern,
+ layout,
+ config.timezoneOffset
+ );
+}
+
+function shutdown(cb) {
+ var completed = 0;
+ var error;
+ var complete = function(err) {
+ error = error || err;
+ completed++;
+ if (completed >= openFiles.length) {
+ cb(error);
+ }
+ };
+ if (!openFiles.length) {
+ return cb();
+ }
+ openFiles.forEach(function(file) {
+ if (!file.write(eol, "utf-8")) {
+ file.once('drain', function() {
+ file.end(complete);
+ });
+ } else {
+ file.end(complete);
+ }
+ });
+}
+
+exports.appender = appender;
+exports.configure = configure;
+exports.shutdown = shutdown;
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/appenders/file.js b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/appenders/file.js
new file mode 100644
index 00000000..60883777
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/appenders/file.js
@@ -0,0 +1,119 @@
+"use strict";
+var layouts = require('../layouts')
+, path = require('path')
+, fs = require('fs')
+, streams = require('../streams')
+, os = require('os')
+, eol = os.EOL || '\n'
+, openFiles = []
+, levels = require('../levels');
+
+//close open files on process exit.
+process.on('exit', function() {
+ openFiles.forEach(function (file) {
+ file.end();
+ });
+});
+
+/**
+ * File Appender writing the logs to a text file. Supports rolling of logs by size.
+ *
+ * @param file file log messages will be written to
+ * @param layout a function that takes a logevent and returns a string
+ * (defaults to basicLayout).
+ * @param logSize - the maximum size (in bytes) for a log file,
+ * if not provided then logs won't be rotated.
+ * @param numBackups - the number of log files to keep after logSize
+ * has been reached (default 5)
+ * @param compress - flag that controls log file compression
+ * @param timezoneOffset - optional timezone offset in minutes (default system local)
+ */
+function fileAppender (file, layout, logSize, numBackups, compress, timezoneOffset) {
+ var bytesWritten = 0;
+ file = path.normalize(file);
+ layout = layout || layouts.basicLayout;
+ numBackups = numBackups === undefined ? 5 : numBackups;
+ //there has to be at least one backup if logSize has been specified
+ numBackups = numBackups === 0 ? 1 : numBackups;
+
+ function openTheStream(file, fileSize, numFiles) {
+ var stream;
+ if (fileSize) {
+ stream = new streams.RollingFileStream(
+ file,
+ fileSize,
+ numFiles,
+ { "compress": compress }
+ );
+ } else {
+ stream = fs.createWriteStream(
+ file,
+ { encoding: "utf8",
+ mode: parseInt('0644', 8),
+ flags: 'a' }
+ );
+ }
+ stream.on("error", function (err) {
+ console.error("log4js.fileAppender - Writing to file %s, error happened ", file, err);
+ });
+ return stream;
+ }
+
+ var logFile = openTheStream(file, logSize, numBackups);
+
+ // push file to the stack of open handlers
+ openFiles.push(logFile);
+
+ return function(loggingEvent) {
+ logFile.write(layout(loggingEvent, timezoneOffset) + eol, "utf8");
+ };
+
+}
+
+function configure(config, options) {
+ var layout;
+ if (config.layout) {
+ layout = layouts.layout(config.layout.type, config.layout);
+ }
+
+ if (options && options.cwd && !config.absolute) {
+ config.filename = path.join(options.cwd, config.filename);
+ }
+
+ return fileAppender(
+ config.filename,
+ layout,
+ config.maxLogSize,
+ config.backups,
+ config.compress,
+ config.timezoneOffset
+ );
+}
+
+function shutdown(cb) {
+ var completed = 0;
+ var error;
+ var complete = function(err) {
+ error = error || err;
+ completed++;
+ if (completed >= openFiles.length) {
+ cb(error);
+ }
+ };
+ if (!openFiles.length) {
+ return cb();
+ }
+ openFiles.forEach(function(file) {
+ if (!file.write(eol, "utf-8")) {
+ file.once('drain', function() {
+ file.end(complete);
+ });
+ } else {
+ file.end(complete);
+ }
+ });
+}
+
+exports.appender = fileAppender;
+exports.configure = configure;
+exports.shutdown = shutdown;
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/appenders/fileSync.js b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/appenders/fileSync.js
new file mode 100644
index 00000000..a8befc96
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/appenders/fileSync.js
@@ -0,0 +1,195 @@
+"use strict";
+var debug = require('../debug')('fileSync')
+, layouts = require('../layouts')
+, path = require('path')
+, fs = require('fs')
+, streams = require('../streams')
+, os = require('os')
+, eol = os.EOL || '\n'
+;
+
+function RollingFileSync (filename, size, backups, options) {
+ debug("In RollingFileStream");
+
+ function throwErrorIfArgumentsAreNotValid() {
+ if (!filename || !size || size <= 0) {
+ throw new Error("You must specify a filename and file size");
+ }
+ }
+
+ throwErrorIfArgumentsAreNotValid();
+
+ this.filename = filename;
+ this.size = size;
+ this.backups = backups || 1;
+ this.options = options || { encoding: 'utf8', mode: parseInt('0644', 8), flags: 'a' };
+ this.currentSize = 0;
+
+ function currentFileSize(file) {
+ var fileSize = 0;
+ try {
+ fileSize = fs.statSync(file).size;
+ } catch (e) {
+ // file does not exist
+ fs.appendFileSync(filename, '');
+ }
+ return fileSize;
+ }
+
+ this.currentSize = currentFileSize(this.filename);
+}
+
+RollingFileSync.prototype.shouldRoll = function() {
+ debug("should roll with current size %d, and max size %d", this.currentSize, this.size);
+ return this.currentSize >= this.size;
+};
+
+RollingFileSync.prototype.roll = function(filename) {
+ var that = this,
+ nameMatcher = new RegExp('^' + path.basename(filename));
+
+ function justTheseFiles (item) {
+ return nameMatcher.test(item);
+ }
+
+ function index(filename_) {
+ return parseInt(filename_.substring((path.basename(filename) + '.').length), 10) || 0;
+ }
+
+ function byIndex(a, b) {
+ if (index(a) > index(b)) {
+ return 1;
+ } else if (index(a) < index(b) ) {
+ return -1;
+ } else {
+ return 0;
+ }
+ }
+
+ function increaseFileIndex (fileToRename) {
+ var idx = index(fileToRename);
+ debug('Index of ' + fileToRename + ' is ' + idx);
+ if (idx < that.backups) {
+ //on windows, you can get a EEXIST error if you rename a file to an existing file
+ //so, we'll try to delete the file we're renaming to first
+ try {
+ fs.unlinkSync(filename + '.' + (idx+1));
+ } catch(e) {
+ //ignore err: if we could not delete, it's most likely that it doesn't exist
+ }
+
+ debug('Renaming ' + fileToRename + ' -> ' + filename + '.' + (idx+1));
+ fs.renameSync(path.join(path.dirname(filename), fileToRename), filename + '.' + (idx + 1));
+ }
+ }
+
+ function renameTheFiles() {
+ //roll the backups (rename file.n to file.n+1, where n <= numBackups)
+ debug("Renaming the old files");
+
+ var files = fs.readdirSync(path.dirname(filename));
+ files.filter(justTheseFiles).sort(byIndex).reverse().forEach(increaseFileIndex);
+ }
+
+ debug("Rolling, rolling, rolling");
+ renameTheFiles();
+};
+
+RollingFileSync.prototype.write = function(chunk, encoding) {
+ var that = this;
+
+
+ function writeTheChunk() {
+ debug("writing the chunk to the file");
+ that.currentSize += chunk.length;
+ fs.appendFileSync(that.filename, chunk);
+ }
+
+ debug("in write");
+
+
+ if (this.shouldRoll()) {
+ this.currentSize = 0;
+ this.roll(this.filename);
+ }
+
+ writeTheChunk();
+};
+
+
+/**
+ * File Appender writing the logs to a text file. Supports rolling of logs by size.
+ *
+ * @param file file log messages will be written to
+ * @param layout a function that takes a logevent and returns a string
+ * (defaults to basicLayout).
+ * @param logSize - the maximum size (in bytes) for a log file,
+ * if not provided then logs won't be rotated.
+ * @param numBackups - the number of log files to keep after logSize
+ * has been reached (default 5)
+ * @param timezoneOffset - optional timezone offset in minutes
+ * (default system local)
+ */
+function fileAppender (file, layout, logSize, numBackups, timezoneOffset) {
+ debug("fileSync appender created");
+ var bytesWritten = 0;
+ file = path.normalize(file);
+ layout = layout || layouts.basicLayout;
+ numBackups = numBackups === undefined ? 5 : numBackups;
+ //there has to be at least one backup if logSize has been specified
+ numBackups = numBackups === 0 ? 1 : numBackups;
+
+ function openTheStream(file, fileSize, numFiles) {
+ var stream;
+
+ if (fileSize) {
+ stream = new RollingFileSync(
+ file,
+ fileSize,
+ numFiles
+ );
+ } else {
+ stream = (function(f) {
+ // create file if it doesn't exist
+ if (!fs.existsSync(f))
+ fs.appendFileSync(f, '');
+
+ return {
+ write: function(data) {
+ fs.appendFileSync(f, data);
+ }
+ };
+ })(file);
+ }
+
+ return stream;
+ }
+
+ var logFile = openTheStream(file, logSize, numBackups);
+
+ return function(loggingEvent) {
+ logFile.write(layout(loggingEvent, timezoneOffset) + eol);
+ };
+}
+
+function configure(config, options) {
+ var layout;
+ if (config.layout) {
+ layout = layouts.layout(config.layout.type, config.layout);
+ }
+
+ if (options && options.cwd && !config.absolute) {
+ config.filename = path.join(options.cwd, config.filename);
+ }
+
+ return fileAppender(
+ config.filename,
+ layout,
+ config.maxLogSize,
+ config.backups,
+ config.timezoneOffset
+ );
+}
+
+exports.appender = fileAppender;
+exports.configure = configure;
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/appenders/gelf.js b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/appenders/gelf.js
new file mode 100644
index 00000000..a367ef57
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/appenders/gelf.js
@@ -0,0 +1,155 @@
+"use strict";
+var zlib = require('zlib');
+var layouts = require('../layouts');
+var levels = require('../levels');
+var dgram = require('dgram');
+var util = require('util');
+var debug = require('../debug')('GELF Appender');
+
+var LOG_EMERG=0; // system is unusable
+var LOG_ALERT=1; // action must be taken immediately
+var LOG_CRIT=2; // critical conditions
+var LOG_ERR=3; // error conditions
+var LOG_ERROR=3; // because people WILL typo
+var LOG_WARNING=4; // warning conditions
+var LOG_NOTICE=5; // normal, but significant, condition
+var LOG_INFO=6; // informational message
+var LOG_DEBUG=7; // debug-level message
+
+var levelMapping = {};
+levelMapping[levels.ALL] = LOG_DEBUG;
+levelMapping[levels.TRACE] = LOG_DEBUG;
+levelMapping[levels.DEBUG] = LOG_DEBUG;
+levelMapping[levels.INFO] = LOG_INFO;
+levelMapping[levels.WARN] = LOG_WARNING;
+levelMapping[levels.ERROR] = LOG_ERR;
+levelMapping[levels.FATAL] = LOG_CRIT;
+
+var client;
+
+/**
+ * GELF appender that supports sending UDP packets to a GELF compatible server such as Graylog
+ *
+ * @param layout a function that takes a logevent and returns a string (defaults to none).
+ * @param host - host to which to send logs (default:localhost)
+ * @param port - port at which to send logs to (default:12201)
+ * @param hostname - hostname of the current host (default:os hostname)
+ * @param facility - facility to log to (default:nodejs-server)
+ */
+ /* jshint maxstatements:21 */
+function gelfAppender (layout, host, port, hostname, facility) {
+ var config, customFields;
+ if (typeof(host) === 'object') {
+ config = host;
+ host = config.host;
+ port = config.port;
+ hostname = config.hostname;
+ facility = config.facility;
+ customFields = config.customFields;
+ }
+
+ host = host || 'localhost';
+ port = port || 12201;
+ hostname = hostname || require('os').hostname();
+ layout = layout || layouts.messagePassThroughLayout;
+
+ var defaultCustomFields = customFields || {};
+
+ if(facility) {
+ defaultCustomFields._facility = facility;
+ }
+
+ client = dgram.createSocket("udp4");
+
+ process.on('exit', function() {
+ if (client) client.close();
+ });
+
+ /**
+ * Add custom fields (start with underscore )
+ * - if the first object passed to the logger contains 'GELF' field,
+ * copy the underscore fields to the message
+ * @param loggingEvent
+ * @param msg
+ */
+ function addCustomFields(loggingEvent, msg){
+
+ /* append defaultCustomFields firsts */
+ Object.keys(defaultCustomFields).forEach(function(key) {
+ // skip _id field for graylog2, skip keys not starts with UNDERSCORE
+ if (key.match(/^_/) && key !== "_id") {
+ msg[key] = defaultCustomFields[key];
+ }
+ });
+
+ /* append custom fields per message */
+ var data = loggingEvent.data;
+ if (!Array.isArray(data) || data.length === 0) return;
+ var firstData = data[0];
+
+ if (!firstData.GELF) return; // identify with GELF field defined
+ // Remove the GELF key, some gelf supported logging systems drop the message with it
+ delete firstData.GELF;
+ Object.keys(firstData).forEach(function(key) {
+ // skip _id field for graylog2, skip keys not starts with UNDERSCORE
+ if (key.match(/^_/) || key !== "_id") {
+ msg[key] = firstData[key];
+ }
+ });
+
+ /* the custom field object should be removed, so it will not be looged by the later appenders */
+ loggingEvent.data.shift();
+ }
+
+ function preparePacket(loggingEvent) {
+ var msg = {};
+ addCustomFields(loggingEvent, msg);
+ msg.short_message = layout(loggingEvent);
+
+ msg.version="1.1";
+ msg.timestamp = msg.timestamp || new Date().getTime() / 1000; // log should use millisecond
+ msg.host = hostname;
+ msg.level = levelMapping[loggingEvent.level || levels.DEBUG];
+ return msg;
+ }
+
+ function sendPacket(packet) {
+ client.send(packet, 0, packet.length, port, host, function(err) {
+ if (err) { console.error(err); }
+ });
+ }
+
+ return function(loggingEvent) {
+ var message = preparePacket(loggingEvent);
+ zlib.gzip(new Buffer(JSON.stringify(message)), function(err, packet) {
+ if (err) {
+ console.error(err.stack);
+ } else {
+ if (packet.length > 8192) {
+ debug("Message packet length (" + packet.length + ") is larger than 8k. Not sending");
+ } else {
+ sendPacket(packet);
+ }
+ }
+ });
+ };
+}
+
+function configure(config) {
+ var layout;
+ if (config.layout) {
+ layout = layouts.layout(config.layout.type, config.layout);
+ }
+ return gelfAppender(layout, config);
+}
+
+function shutdown(cb) {
+ if (client) {
+ client.close(cb);
+ client = null;
+ }
+}
+
+exports.appender = gelfAppender;
+exports.configure = configure;
+exports.shutdown = shutdown;
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/appenders/hipchat.js b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/appenders/hipchat.js
new file mode 100644
index 00000000..3fd65df0
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/appenders/hipchat.js
@@ -0,0 +1,90 @@
+"use strict";
+
+var hipchat = require('hipchat-notifier');
+var layouts = require('../layouts');
+
+exports.name = 'hipchat';
+exports.appender = hipchatAppender;
+exports.configure = hipchatConfigure;
+
+/**
+ @invoke as
+
+ log4js.configure({
+ "appenders": [
+ {
+ "type" : "hipchat",
+ "hipchat_token": "< User token with Notification Privileges >",
+ "hipchat_room": "< Room ID or Name >",
+ // optionl
+ "hipchat_from": "[ additional from label ]",
+ "hipchat_notify": "[ notify boolean to bug people ]",
+ "hipchat_host" : "api.hipchat.com"
+ }
+ ]
+ });
+
+ var logger = log4js.getLogger("hipchat");
+ logger.warn("Test Warn message");
+
+ @invoke
+ */
+
+function hipchatNotifierResponseCallback(err, response, body){
+ if(err) {
+ throw err;
+ }
+}
+
+function hipchatAppender(config) {
+
+ var notifier = hipchat.make(config.hipchat_room, config.hipchat_token);
+
+ // @lint W074 This function's cyclomatic complexity is too high. (10)
+ return function(loggingEvent){
+
+ var notifierFn;
+
+ notifier.setRoom(config.hipchat_room);
+ notifier.setFrom(config.hipchat_from || '');
+ notifier.setNotify(config.hipchat_notify || false);
+
+ if(config.hipchat_host) {
+ notifier.setHost(config.hipchat_host);
+ }
+
+ switch (loggingEvent.level.toString()) {
+ case "TRACE":
+ case "DEBUG":
+ notifierFn = "info";
+ break;
+ case "WARN":
+ notifierFn = "warning";
+ break;
+ case "ERROR":
+ case "FATAL":
+ notifierFn = "failure";
+ break;
+ default:
+ notifierFn = "success";
+ }
+
+ // @TODO, re-work in timezoneOffset ?
+ var layoutMessage = config.layout(loggingEvent);
+
+ // dispatch hipchat api request, do not return anything
+ // [overide hipchatNotifierResponseCallback]
+ notifier[notifierFn](layoutMessage, config.hipchat_response_callback ||
+ hipchatNotifierResponseCallback);
+ };
+}
+
+function hipchatConfigure(config) {
+ var layout;
+
+ if (!config.layout) {
+ config.layout = layouts.messagePassThroughLayout;
+ }
+
+ return hipchatAppender(config, layout);
+}
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/appenders/logFacesAppender.js b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/appenders/logFacesAppender.js
new file mode 100644
index 00000000..851510cc
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/appenders/logFacesAppender.js
@@ -0,0 +1,71 @@
+/**
+* logFaces appender sends JSON formatted log events to logFaces server UDP receivers.
+* Events contain the following properties:
+* - application name (taken from configuration)
+* - host name (taken from underlying os)
+* - time stamp
+* - level
+* - logger name (e.g. category)
+* - thread name (current process id)
+* - message text
+*/
+
+"use strict";
+var dgram = require('dgram'),
+ layouts = require('../layouts'),
+ os = require('os'),
+ util = require('util');
+
+try{
+ var process = require('process');
+}
+catch(error){
+ //this module is optional as it may not be available
+ //in older versions of node.js, so ignore if it failes to load
+}
+
+function logFacesAppender (config, layout) {
+ var lfsSock = dgram.createSocket('udp4');
+ var localhost = "";
+
+ if(os && os.hostname())
+ localhost = os.hostname().toString();
+
+ var pid = "";
+ if(process && process.pid)
+ pid = process.pid;
+
+ return function log(loggingEvent) {
+ var lfsEvent = {
+ a: config.application || "", // application name
+ h: localhost, // this host name
+ t: loggingEvent.startTime.getTime(), // time stamp
+ p: loggingEvent.level.levelStr, // level (priority)
+ g: loggingEvent.categoryName, // logger name
+ r: pid, // thread (process id)
+ m: layout(loggingEvent) // message text
+ };
+
+ var buffer = new Buffer(JSON.stringify(lfsEvent));
+ var lfsHost = config.remoteHost || "127.0.0.1";
+ var lfsPort = config.port || 55201;
+ lfsSock.send(buffer, 0, buffer.length, lfsPort, lfsHost, function(err, bytes) {
+ if(err) {
+ console.error("log4js.logFacesAppender send to %s:%d failed, error: %s",
+ config.host, config.port, util.inspect(err));
+ }
+ });
+ };
+}
+
+function configure(config) {
+ var layout;
+ if (config.layout)
+ layout = layouts.layout(config.layout.type, config.layout);
+ else
+ layout = layouts.layout("pattern", {"type": "pattern", "pattern": "%m"});
+ return logFacesAppender(config, layout);
+}
+
+exports.appender = logFacesAppender;
+exports.configure = configure;
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/appenders/logLevelFilter.js b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/appenders/logLevelFilter.js
new file mode 100644
index 00000000..cdd273f0
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/appenders/logLevelFilter.js
@@ -0,0 +1,23 @@
+"use strict";
+var levels = require('../levels')
+, log4js = require('../log4js');
+
+function logLevelFilter (minLevelString, maxLevelString, appender) {
+ var minLevel = levels.toLevel(minLevelString);
+ var maxLevel = levels.toLevel(maxLevelString, levels.FATAL);
+ return function(logEvent) {
+ var eventLevel = logEvent.level;
+ if (eventLevel.isGreaterThanOrEqualTo(minLevel) && eventLevel.isLessThanOrEqualTo(maxLevel)) {
+ appender(logEvent);
+ }
+ };
+}
+
+function configure(config, options) {
+ log4js.loadAppender(config.appender.type);
+ var appender = log4js.appenderMakers[config.appender.type](config.appender, options);
+ return logLevelFilter(config.level, config.maxLevel, appender);
+}
+
+exports.appender = logLevelFilter;
+exports.configure = configure;
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/appenders/loggly.js b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/appenders/loggly.js
new file mode 100644
index 00000000..085f8b3c
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/appenders/loggly.js
@@ -0,0 +1,90 @@
+'use strict';
+var layouts = require('../layouts')
+, loggly = require('loggly')
+, os = require('os')
+, passThrough = layouts.messagePassThroughLayout;
+
+
+function isAnyObject(value) {
+ return value !== null && (typeof value === 'object' || typeof value === 'function');
+}
+
+function numKeys(o) {
+ var res = 0;
+ for (var k in o) {
+ if (o.hasOwnProperty(k)) res++;
+ }
+ return res;
+}
+
+/**
+ * @param msg - array of args for logging.
+ * @returns { deTaggedMsg: [], additionalTags: [] }
+ */
+function processTags(msgListArgs) {
+ var msgList = (msgListArgs.length === 1 ? [msgListArgs[0]] : Array.apply(null, msgListArgs));
+
+ return msgList.reduce(function (accum, element, currentIndex, array) {
+ if (isAnyObject(element) && Array.isArray(element.tags) && numKeys(element) == 1) {
+ accum.additionalTags = accum.additionalTags.concat(element.tags);
+ } else {
+ accum.deTaggedData.push(element);
+ }
+ return accum;
+ }, { deTaggedData: [], additionalTags: [] });
+}
+
+/**
+ * Loggly Appender. Sends logging events to Loggly using node-loggly, optionally adding tags.
+ *
+ * This appender will scan the msg from the logging event, and pull out any argument of the
+ * shape `{ tags: [] }` so that it's possibleto add tags in a normal logging call.
+ *
+ * For example:
+ *
+ * logger.info({ tags: ['my-tag-1', 'my-tag-2'] }, 'Some message', someObj, ...)
+ *
+ * And then this appender will remove the tags param and append it to the config.tags.
+ *
+ * @param config object with loggly configuration data
+ * {
+ * token: 'your-really-long-input-token',
+ * subdomain: 'your-subdomain',
+ * tags: ['loggly-tag1', 'loggly-tag2', .., 'loggly-tagn']
+ * }
+ * @param layout a function that takes a logevent and returns a string (defaults to objectLayout).
+ */
+function logglyAppender(config, layout) {
+ var client = loggly.createClient(config);
+ if(!layout) layout = passThrough;
+
+ return function(loggingEvent) {
+ var result = processTags(loggingEvent.data);
+ var deTaggedData = result.deTaggedData;
+ var additionalTags = result.additionalTags;
+
+ // Replace the data property with the deTaggedData
+ loggingEvent.data = deTaggedData;
+
+ var msg = layout(loggingEvent);
+
+ client.log({
+ msg: msg,
+ level: loggingEvent.level.levelStr,
+ category: loggingEvent.categoryName,
+ hostname: os.hostname().toString(),
+ }, additionalTags);
+ };
+}
+
+function configure(config) {
+ var layout;
+ if (config.layout) {
+ layout = layouts.layout(config.layout.type, config.layout);
+ }
+ return logglyAppender(config, layout);
+}
+
+exports.name = 'loggly';
+exports.appender = logglyAppender;
+exports.configure = configure;
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/appenders/logstashUDP.js b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/appenders/logstashUDP.js
new file mode 100644
index 00000000..504f3ee4
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/appenders/logstashUDP.js
@@ -0,0 +1,68 @@
+"use strict";
+var layouts = require('../layouts')
+, dgram = require('dgram')
+, util = require('util');
+
+function logstashUDP (config, layout) {
+ var udp = dgram.createSocket('udp4');
+ var type = config.logType ? config.logType : config.category;
+ layout = layout || layouts.dummyLayout;
+ if(!config.fields) {
+ config.fields = {};
+ }
+ return function log(loggingEvent) {
+
+ /*
+ https://gist.github.com/jordansissel/2996677
+ {
+ "message" => "hello world",
+ "@version" => "1",
+ "@timestamp" => "2014-04-22T23:03:14.111Z",
+ "type" => "stdin",
+ "host" => "hello.local"
+ }
+ @timestamp is the ISO8601 high-precision timestamp for the event.
+ @version is the version number of this json schema
+ Every other field is valid and fine.
+ */
+
+ if (loggingEvent.data.length > 1) {
+ var secondEvData = loggingEvent.data[1];
+ for (var k in secondEvData) {
+ config.fields[k] = secondEvData[k];
+ }
+ }
+ config.fields.level = loggingEvent.level.levelStr;
+
+ var logObject = {
+ "@version" : "1",
+ "@timestamp" : (new Date(loggingEvent.startTime)).toISOString(),
+ "type" : config.logType ? config.logType : config.category,
+ "message" : layout(loggingEvent),
+ "fields" : config.fields
+ };
+ sendLog(udp, config.host, config.port, logObject);
+ };
+}
+
+function sendLog(udp, host, port, logObject) {
+ var buffer = new Buffer(JSON.stringify(logObject));
+ udp.send(buffer, 0, buffer.length, port, host, function(err, bytes) {
+ if(err) {
+ console.error(
+ "log4js.logstashUDP - %s:%p Error: %s", host, port, util.inspect(err)
+ );
+ }
+ });
+}
+
+function configure(config) {
+ var layout;
+ if (config.layout) {
+ layout = layouts.layout(config.layout.type, config.layout);
+ }
+ return logstashUDP(config, layout);
+}
+
+exports.appender = logstashUDP;
+exports.configure = configure;
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/appenders/mailgun.js b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/appenders/mailgun.js
new file mode 100644
index 00000000..c2f3f56f
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/appenders/mailgun.js
@@ -0,0 +1,43 @@
+"use strict";
+var layouts = require('../layouts');
+var layout;
+var config;
+var mailgun;
+
+function mailgunAppender(_config, _layout) {
+
+ config = _config;
+ layout = _layout || layouts.basicLayout;
+
+ return function (loggingEvent) {
+
+ var data = {
+ from: _config.from,
+ to: _config.to,
+ subject: _config.subject,
+ text: layout(loggingEvent, config.timezoneOffset)
+ };
+
+ mailgun.messages().send(data, function (error, body) {
+ if (error !== null) console.error("log4js.mailgunAppender - Error happened", error);
+ });
+ };
+}
+
+function configure(_config) {
+ config = _config;
+
+ if (_config.layout) {
+ layout = layouts.layout(_config.layout.type, _config.layout);
+ }
+
+ mailgun = require('mailgun-js')({
+ apiKey: _config.apikey,
+ domain: _config.domain
+ });
+
+ return mailgunAppender(_config, layout);
+}
+
+exports.appender = mailgunAppender;
+exports.configure = configure;
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/appenders/multiprocess.js b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/appenders/multiprocess.js
new file mode 100644
index 00000000..0f142493
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/appenders/multiprocess.js
@@ -0,0 +1,135 @@
+"use strict";
+var log4js = require('../log4js')
+, net = require('net')
+, END_MSG = '__LOG4JS__';
+
+/**
+ * Creates a server, listening on config.loggerPort, config.loggerHost.
+ * Output goes to config.actualAppender (config.appender is used to
+ * set up that appender).
+ */
+function logServer(config) {
+
+ /**
+ * Takes a utf-8 string, returns an object with
+ * the correct log properties.
+ */
+ function deserializeLoggingEvent(clientSocket, msg) {
+ var loggingEvent;
+ try {
+ loggingEvent = JSON.parse(msg);
+ loggingEvent.startTime = new Date(loggingEvent.startTime);
+ loggingEvent.level = log4js.levels.toLevel(loggingEvent.level.levelStr);
+ } catch (e) {
+ // JSON.parse failed, just log the contents probably a naughty.
+ loggingEvent = {
+ startTime: new Date(),
+ categoryName: 'log4js',
+ level: log4js.levels.ERROR,
+ data: [ 'Unable to parse log:', msg ]
+ };
+ }
+
+ loggingEvent.remoteAddress = clientSocket.remoteAddress;
+ loggingEvent.remotePort = clientSocket.remotePort;
+
+ return loggingEvent;
+ }
+
+ var actualAppender = config.actualAppender,
+ server = net.createServer(function serverCreated(clientSocket) {
+ clientSocket.setEncoding('utf8');
+ var logMessage = '';
+
+ function logTheMessage(msg) {
+ if (logMessage.length > 0) {
+ actualAppender(deserializeLoggingEvent(clientSocket, msg));
+ }
+ }
+
+ function chunkReceived(chunk) {
+ var event;
+ logMessage += chunk || '';
+ if (logMessage.indexOf(END_MSG) > -1) {
+ event = logMessage.substring(0, logMessage.indexOf(END_MSG));
+ logTheMessage(event);
+ logMessage = logMessage.substring(event.length + END_MSG.length) || '';
+ //check for more, maybe it was a big chunk
+ chunkReceived();
+ }
+ }
+
+ clientSocket.on('data', chunkReceived);
+ clientSocket.on('end', chunkReceived);
+ });
+
+ server.listen(config.loggerPort || 5000, config.loggerHost || 'localhost');
+
+ return actualAppender;
+}
+
+function workerAppender(config) {
+ var canWrite = false,
+ buffer = [],
+ socket;
+
+ createSocket();
+
+ function createSocket() {
+ socket = net.createConnection(config.loggerPort || 5000, config.loggerHost || 'localhost');
+ socket.on('connect', function() {
+ emptyBuffer();
+ canWrite = true;
+ });
+ socket.on('timeout', socket.end.bind(socket));
+ //don't bother listening for 'error', 'close' gets called after that anyway
+ socket.on('close', createSocket);
+ }
+
+ function emptyBuffer() {
+ var evt;
+ while ((evt = buffer.shift())) {
+ write(evt);
+ }
+ }
+
+ function write(loggingEvent) {
+ // JSON.stringify(new Error('test')) returns {}, which is not really useful for us.
+ // The following allows us to serialize errors correctly.
+ // Validate that we really are in this case
+ if (loggingEvent && loggingEvent.stack && JSON.stringify(loggingEvent) === '{}') {
+ loggingEvent = {stack : loggingEvent.stack};
+ }
+ socket.write(JSON.stringify(loggingEvent), 'utf8');
+ socket.write(END_MSG, 'utf8');
+ }
+
+ return function log(loggingEvent) {
+ if (canWrite) {
+ write(loggingEvent);
+ } else {
+ buffer.push(loggingEvent);
+ }
+ };
+}
+
+function createAppender(config) {
+ if (config.mode === 'master') {
+ return logServer(config);
+ } else {
+ return workerAppender(config);
+ }
+}
+
+function configure(config, options) {
+ var actualAppender;
+ if (config.appender && config.mode === 'master') {
+ log4js.loadAppender(config.appender.type);
+ actualAppender = log4js.appenderMakers[config.appender.type](config.appender, options);
+ config.actualAppender = actualAppender;
+ }
+ return createAppender(config);
+}
+
+exports.appender = createAppender;
+exports.configure = configure;
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/appenders/slack.js b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/appenders/slack.js
new file mode 100644
index 00000000..da8a2c1d
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/appenders/slack.js
@@ -0,0 +1,44 @@
+"use strict";
+var Slack = require('slack-node');
+var layouts = require('../layouts');
+var layout;
+
+var slack, config;
+
+function slackAppender(_config, _layout) {
+
+ layout = _layout || layouts.basicLayout;
+
+ return function (loggingEvent) {
+
+ var data = {
+ channel_id: _config.channel_id,
+ text: layout(loggingEvent, _config.timezoneOffset),
+ icon_url: _config.icon_url,
+ username: _config.username
+ };
+
+ slack.api('chat.postMessage', {
+ channel: data.channel_id,
+ text: data.text,
+ icon_url: data.icon_url,username: data.username}, function (err, response) {
+ if (err) { throw err; }
+ });
+
+ };
+}
+
+function configure(_config) {
+
+ if (_config.layout) {
+ layout = layouts.layout(_config.layout.type, _config.layout);
+ }
+
+ slack = new Slack(_config.token);
+
+ return slackAppender(_config, layout);
+}
+
+exports.name = 'slack';
+exports.appender = slackAppender;
+exports.configure = configure;
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/appenders/smtp.js b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/appenders/smtp.js
new file mode 100644
index 00000000..161e72c5
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/appenders/smtp.js
@@ -0,0 +1,152 @@
+"use strict";
+
+var layouts = require("../layouts");
+var mailer = require("nodemailer");
+var os = require('os');
+
+var logEventBuffer = [];
+var subjectLayout;
+var layout;
+
+var unsentCount = 0;
+var shutdownTimeout;
+
+var sendInterval;
+var sendTimer;
+
+var config;
+
+function sendBuffer() {
+ if (logEventBuffer.length > 0) {
+
+ var transportOpts = getTransportOptions(config);
+ var transport = mailer.createTransport(transportOpts);
+ var firstEvent = logEventBuffer[0];
+ var body = "";
+ var count = logEventBuffer.length;
+ while (logEventBuffer.length > 0) {
+ body += layout(logEventBuffer.shift(), config.timezoneOffset) + "\n";
+ }
+
+ var msg = {
+ to: config.recipients,
+ subject: config.subject || subjectLayout(firstEvent),
+ headers: {"Hostname": os.hostname()}
+ };
+
+ if (true === config.attachment.enable) {
+ msg[config.html ? "html" : "text"] = config.attachment.message;
+ msg.attachments = [
+ {
+ filename: config.attachment.filename,
+ contentType: 'text/x-log',
+ content: body
+ }
+ ];
+ } else {
+ msg[config.html ? "html" : "text"] = body;
+ }
+
+ if (config.sender) {
+ msg.from = config.sender;
+ }
+ transport.sendMail(msg, function (error) {
+ if (error) {
+ console.error("log4js.smtpAppender - Error happened", error);
+ }
+ transport.close();
+ unsentCount -= count;
+ });
+ }
+}
+
+function getTransportOptions() {
+ var transportOpts = null;
+ if (config.SMTP) {
+ transportOpts = config.SMTP;
+ } else if (config.transport) {
+ var plugin = config.transport.plugin || 'smtp';
+ var transportModule = 'nodemailer-' + plugin + '-transport';
+ var transporter = require(transportModule);
+ transportOpts = transporter(config.transport.options);
+ }
+
+ return transportOpts;
+}
+
+function scheduleSend() {
+ if (!sendTimer) {
+ sendTimer = setTimeout(function () {
+ sendTimer = null;
+ sendBuffer();
+ }, sendInterval);
+ }
+}
+
+/**
+ * SMTP Appender. Sends logging events using SMTP protocol.
+ * It can either send an email on each event or group several
+ * logging events gathered during specified interval.
+ *
+ * @param _config appender configuration data
+ * config.sendInterval time between log emails (in seconds), if 0
+ * then every event sends an email
+ * config.shutdownTimeout time to give up remaining emails (in seconds; defaults to 5).
+ * @param _layout a function that takes a logevent and returns a string (defaults to basicLayout).
+ */
+function smtpAppender(_config, _layout) {
+ config = _config;
+
+ if (!config.attachment) {
+ config.attachment = {};
+ }
+
+ config.attachment.enable = !!config.attachment.enable;
+ config.attachment.message = config.attachment.message || "See logs as attachment";
+ config.attachment.filename = config.attachment.filename || "default.log";
+ layout = _layout || layouts.basicLayout;
+ subjectLayout = layouts.messagePassThroughLayout;
+ sendInterval = config.sendInterval * 1000 || 0;
+
+ shutdownTimeout = ('shutdownTimeout' in config ? config.shutdownTimeout : 5) * 1000;
+
+ return function (loggingEvent) {
+ unsentCount++;
+ logEventBuffer.push(loggingEvent);
+ if (sendInterval > 0) {
+ scheduleSend();
+ } else {
+ sendBuffer();
+ }
+ };
+}
+
+function configure(_config) {
+ config = _config;
+ if (_config.layout) {
+ layout = layouts.layout(_config.layout.type, _config.layout);
+ }
+ return smtpAppender(_config, layout);
+}
+
+function shutdown(cb) {
+ if (shutdownTimeout > 0) {
+ setTimeout(function () {
+ if (sendTimer)
+ clearTimeout(sendTimer);
+ sendBuffer();
+ }, shutdownTimeout);
+ }
+ (function checkDone() {
+ if (unsentCount > 0) {
+ setTimeout(checkDone, 100);
+ } else {
+ cb();
+ }
+ })();
+}
+
+exports.name = "smtp";
+exports.appender = smtpAppender;
+exports.configure = configure;
+exports.shutdown = shutdown;
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/appenders/stderr.js b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/appenders/stderr.js
new file mode 100644
index 00000000..c733865d
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/appenders/stderr.js
@@ -0,0 +1,21 @@
+"use strict";
+
+var layouts = require('../layouts');
+
+function stderrAppender(layout, timezoneOffset) {
+ layout = layout || layouts.colouredLayout;
+ return function(loggingEvent) {
+ process.stderr.write(layout(loggingEvent, timezoneOffset) + '\n');
+ };
+}
+
+function configure(config) {
+ var layout;
+ if (config.layout) {
+ layout = layouts.layout(config.layout.type, config.layout);
+ }
+ return stderrAppender(layout, config.timezoneOffset);
+}
+
+exports.appender = stderrAppender;
+exports.configure = configure;
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/connect-logger.js b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/connect-logger.js
new file mode 100644
index 00000000..2bded775
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/connect-logger.js
@@ -0,0 +1,262 @@
+"use strict";
+var levels = require("./levels");
+var DEFAULT_FORMAT = ':remote-addr - -' +
+ ' ":method :url HTTP/:http-version"' +
+ ' :status :content-length ":referrer"' +
+ ' ":user-agent"';
+/**
+ * Log requests with the given `options` or a `format` string.
+ *
+ * Options:
+ *
+ * - `format` Format string, see below for tokens
+ * - `level` A log4js levels instance. Supports also 'auto'
+ *
+ * Tokens:
+ *
+ * - `:req[header]` ex: `:req[Accept]`
+ * - `:res[header]` ex: `:res[Content-Length]`
+ * - `:http-version`
+ * - `:response-time`
+ * - `:remote-addr`
+ * - `:date`
+ * - `:method`
+ * - `:url`
+ * - `:referrer`
+ * - `:user-agent`
+ * - `:status`
+ *
+ * @param {String|Function|Object} format or options
+ * @return {Function}
+ * @api public
+ */
+
+function getLogger(logger4js, options) {
+ if ('object' == typeof options) {
+ options = options || {};
+ } else if (options) {
+ options = { format: options };
+ } else {
+ options = {};
+ }
+
+ var thislogger = logger4js
+ , level = levels.toLevel(options.level, levels.INFO)
+ , fmt = options.format || DEFAULT_FORMAT
+ , nolog = options.nolog ? createNoLogCondition(options.nolog) : null;
+
+ return function (req, res, next) {
+ // mount safety
+ if (req._logging) return next();
+
+ // nologs
+ if (nolog && nolog.test(req.originalUrl)) return next();
+ if (thislogger.isLevelEnabled(level) || options.level === 'auto') {
+
+ var start = new Date()
+ , statusCode
+ , writeHead = res.writeHead
+ , url = req.originalUrl;
+
+ // flag as logging
+ req._logging = true;
+
+ // proxy for statusCode.
+ res.writeHead = function(code, headers){
+ res.writeHead = writeHead;
+ res.writeHead(code, headers);
+ res.__statusCode = statusCode = code;
+ res.__headers = headers || {};
+
+ //status code response level handling
+ if(options.level === 'auto'){
+ level = levels.INFO;
+ if(code >= 300) level = levels.WARN;
+ if(code >= 400) level = levels.ERROR;
+ } else {
+ level = levels.toLevel(options.level, levels.INFO);
+ }
+ };
+
+ //hook on end request to emit the log entry of the HTTP request.
+ res.on('finish', function() {
+ res.responseTime = new Date() - start;
+ //status code response level handling
+ if(res.statusCode && options.level === 'auto'){
+ level = levels.INFO;
+ if(res.statusCode >= 300) level = levels.WARN;
+ if(res.statusCode >= 400) level = levels.ERROR;
+ }
+ if (thislogger.isLevelEnabled(level)) {
+ var combined_tokens = assemble_tokens(req, res, options.tokens || []);
+ if (typeof fmt === 'function') {
+ var line = fmt(req, res, function(str){ return format(str, combined_tokens); });
+ if (line) thislogger.log(level, line);
+ } else {
+ thislogger.log(level, format(fmt, combined_tokens));
+ }
+ }
+ });
+ }
+
+ //ensure next gets always called
+ next();
+ };
+}
+
+/**
+ * Adds custom {token, replacement} objects to defaults,
+ * overwriting the defaults if any tokens clash
+ *
+ * @param {IncomingMessage} req
+ * @param {ServerResponse} res
+ * @param {Array} custom_tokens
+ * [{ token: string-or-regexp, replacement: string-or-replace-function }]
+ * @return {Array}
+ */
+function assemble_tokens(req, res, custom_tokens) {
+ var array_unique_tokens = function(array) {
+ var a = array.concat();
+ for(var i=0; i<a.length; ++i) {
+ for(var j=i+1; j<a.length; ++j) {
+ if(a[i].token == a[j].token) { // not === because token can be regexp object
+ a.splice(j--, 1);
+ }
+ }
+ }
+ return a;
+ };
+
+ var default_tokens = [];
+ default_tokens.push({ token: ':url', replacement: getUrl(req) });
+ default_tokens.push({ token: ':protocol', replacement: req.protocol });
+ default_tokens.push({ token: ':hostname', replacement: req.hostname });
+ default_tokens.push({ token: ':method', replacement: req.method });
+ default_tokens.push({ token: ':status', replacement: res.__statusCode || res.statusCode });
+ default_tokens.push({ token: ':response-time', replacement: res.responseTime });
+ default_tokens.push({ token: ':date', replacement: new Date().toUTCString() });
+ default_tokens.push({
+ token: ':referrer',
+ replacement: req.headers.referer || req.headers.referrer || ''
+ });
+ default_tokens.push({
+ token: ':http-version',
+ replacement: req.httpVersionMajor + '.' + req.httpVersionMinor
+ });
+ default_tokens.push({
+ token: ':remote-addr',
+ replacement:
+ req.headers['x-forwarded-for'] ||
+ req.ip ||
+ req._remoteAddress ||
+ (req.socket &&
+ (req.socket.remoteAddress ||
+ (req.socket.socket && req.socket.socket.remoteAddress)
+ )
+ )
+ }
+ );
+ default_tokens.push({ token: ':user-agent', replacement: req.headers['user-agent'] });
+ default_tokens.push({
+ token: ':content-length',
+ replacement:
+ (res._headers && res._headers['content-length']) ||
+ (res.__headers && res.__headers['Content-Length']) ||
+ '-'
+ }
+ );
+ default_tokens.push({ token: /:req\[([^\]]+)\]/g, replacement: function(_, field) {
+ return req.headers[field.toLowerCase()];
+ } });
+ default_tokens.push({ token: /:res\[([^\]]+)\]/g, replacement: function(_, field) {
+ return res._headers ?
+ (res._headers[field.toLowerCase()] || res.__headers[field])
+ : (res.__headers && res.__headers[field]);
+ } });
+
+ return array_unique_tokens(custom_tokens.concat(default_tokens));
+}
+
+/**
+ * Return request url path,
+ * adding this function prevents the Cyclomatic Complexity,
+ * for the assemble_tokens function at low, to pass the tests.
+ *
+ * @param {IncomingMessage} req
+ * @return {String}
+ * @api private
+ */
+
+function getUrl(req){
+ return req.originalUrl || req.url;
+}
+/**
+ * Return formatted log line.
+ *
+ * @param {String} str
+ * @param {IncomingMessage} req
+ * @param {ServerResponse} res
+ * @return {String}
+ * @api private
+ */
+
+function format(str, tokens) {
+ for (var i = 0; i < tokens.length; i++) {
+ str = str.replace(tokens[i].token, tokens[i].replacement);
+ }
+ return str;
+}
+
+/**
+ * Return RegExp Object about nolog
+ *
+ * @param {String} nolog
+ * @return {RegExp}
+ * @api private
+ *
+ * syntax
+ * 1. String
+ * 1.1 "\\.gif"
+ * NOT LOGGING http://example.com/hoge.gif and http://example.com/hoge.gif?fuga
+ * LOGGING http://example.com/hoge.agif
+ * 1.2 in "\\.gif|\\.jpg$"
+ * NOT LOGGING http://example.com/hoge.gif and
+ * http://example.com/hoge.gif?fuga and http://example.com/hoge.jpg?fuga
+ * LOGGING http://example.com/hoge.agif,
+ * http://example.com/hoge.ajpg and http://example.com/hoge.jpg?hoge
+ * 1.3 in "\\.(gif|jpe?g|png)$"
+ * NOT LOGGING http://example.com/hoge.gif and http://example.com/hoge.jpeg
+ * LOGGING http://example.com/hoge.gif?uid=2 and http://example.com/hoge.jpg?pid=3
+ * 2. RegExp
+ * 2.1 in /\.(gif|jpe?g|png)$/
+ * SAME AS 1.3
+ * 3. Array
+ * 3.1 ["\\.jpg$", "\\.png", "\\.gif"]
+ * SAME AS "\\.jpg|\\.png|\\.gif"
+ */
+function createNoLogCondition(nolog) {
+ var regexp = null;
+
+ if (nolog) {
+ if (nolog instanceof RegExp) {
+ regexp = nolog;
+ }
+
+ if (typeof nolog === 'string') {
+ regexp = new RegExp(nolog);
+ }
+
+ if (Array.isArray(nolog)) {
+ var regexpsAsStrings = nolog.map(
+ function convertToStrings(o) {
+ return o.source ? o.source : o;
+ }
+ );
+ regexp = new RegExp(regexpsAsStrings.join('|'));
+ }
+ }
+
+ return regexp;
+}
+
+exports.connectLogger = getLogger;
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/date_format.js b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/date_format.js
new file mode 100644
index 00000000..b9e0131b
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/date_format.js
@@ -0,0 +1,74 @@
+"use strict";
+exports.ISO8601_FORMAT = "yyyy-MM-dd hh:mm:ss.SSS";
+exports.ISO8601_WITH_TZ_OFFSET_FORMAT = "yyyy-MM-ddThh:mm:ssO";
+exports.DATETIME_FORMAT = "dd MM yyyy hh:mm:ss.SSS";
+exports.ABSOLUTETIME_FORMAT = "hh:mm:ss.SSS";
+
+function padWithZeros(vNumber, width) {
+ var numAsString = vNumber + "";
+ while (numAsString.length < width) {
+ numAsString = "0" + numAsString;
+ }
+ return numAsString;
+}
+
+function addZero(vNumber) {
+ return padWithZeros(vNumber, 2);
+}
+
+/**
+ * Formats the TimeOffest
+ * Thanks to http://www.svendtofte.com/code/date_format/
+ * @private
+ */
+function offset(timezoneOffset) {
+ // Difference to Greenwich time (GMT) in hours
+ var os = Math.abs(timezoneOffset);
+ var h = String(Math.floor(os/60));
+ var m = String(os%60);
+ if (h.length == 1) {
+ h = "0" + h;
+ }
+ if (m.length == 1) {
+ m = "0" + m;
+ }
+ return timezoneOffset < 0 ? "+"+h+m : "-"+h+m;
+}
+
+exports.asString = function(/*format,*/ date, timezoneOffset) {
+ /*jshint -W071 */
+ var format = exports.ISO8601_FORMAT;
+ if (typeof(date) === "string") {
+ format = arguments[0];
+ date = arguments[1];
+ timezoneOffset = arguments[2];
+ }
+ // make the date independent of the system timezone by working with UTC
+ if (timezoneOffset === undefined) {
+ timezoneOffset = date.getTimezoneOffset();
+ }
+ date.setUTCMinutes(date.getUTCMinutes() - timezoneOffset);
+ var vDay = addZero(date.getUTCDate());
+ var vMonth = addZero(date.getUTCMonth()+1);
+ var vYearLong = addZero(date.getUTCFullYear());
+ var vYearShort = addZero(date.getUTCFullYear().toString().substring(2,4));
+ var vYear = (format.indexOf("yyyy") > -1 ? vYearLong : vYearShort);
+ var vHour = addZero(date.getUTCHours());
+ var vMinute = addZero(date.getUTCMinutes());
+ var vSecond = addZero(date.getUTCSeconds());
+ var vMillisecond = padWithZeros(date.getUTCMilliseconds(), 3);
+ var vTimeZone = offset(timezoneOffset);
+ date.setUTCMinutes(date.getUTCMinutes() + timezoneOffset);
+ var formatted = format
+ .replace(/dd/g, vDay)
+ .replace(/MM/g, vMonth)
+ .replace(/y{1,4}/g, vYear)
+ .replace(/hh/g, vHour)
+ .replace(/mm/g, vMinute)
+ .replace(/ss/g, vSecond)
+ .replace(/SSS/g, vMillisecond)
+ .replace(/O/g, vTimeZone);
+ return formatted;
+
+};
+/*jshint +W071 */
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/debug.js b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/debug.js
new file mode 100644
index 00000000..e3e65816
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/debug.js
@@ -0,0 +1,15 @@
+"use strict";
+
+module.exports = function(label) {
+ var debug;
+
+ if (process.env.NODE_DEBUG && /\blog4js\b/.test(process.env.NODE_DEBUG)) {
+ debug = function(message) {
+ console.error('LOG4JS: (%s) %s', label, message);
+ };
+ } else {
+ debug = function() { };
+ }
+
+ return debug;
+};
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/layouts.js b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/layouts.js
new file mode 100644
index 00000000..75b6e1e6
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/layouts.js
@@ -0,0 +1,365 @@
+"use strict";
+var dateFormat = require('./date_format')
+, os = require('os')
+, eol = os.EOL || '\n'
+, util = require('util')
+, semver = require('semver')
+, replacementRegExp = /%[sdj]/g
+, layoutMakers = {
+ "messagePassThrough": function() { return messagePassThroughLayout; },
+ "basic": function() { return basicLayout; },
+ "colored": function() { return colouredLayout; },
+ "coloured": function() { return colouredLayout; },
+ "pattern": function (config) {
+ return patternLayout(config && config.pattern, config && config.tokens);
+ },
+ "dummy": function() { return dummyLayout; }
+}
+, colours = {
+ ALL: "grey",
+ TRACE: "blue",
+ DEBUG: "cyan",
+ INFO: "green",
+ WARN: "yellow",
+ ERROR: "red",
+ FATAL: "magenta",
+ OFF: "grey"
+};
+
+function wrapErrorsWithInspect(items) {
+ return items.map(function(item) {
+ if ((item instanceof Error) && item.stack) {
+ return { inspect: function() {
+ if (semver.satisfies(process.version, '>=6')) {
+ return util.format(item);
+ } else {
+ return util.format(item) + '\n' + item.stack;
+ }
+ } };
+ } else {
+ return item;
+ }
+ });
+}
+
+function formatLogData(logData) {
+ var data = Array.isArray(logData) ? logData : Array.prototype.slice.call(arguments);
+ return util.format.apply(util, wrapErrorsWithInspect(data));
+}
+
+var styles = {
+ //styles
+ 'bold' : [1, 22],
+ 'italic' : [3, 23],
+ 'underline' : [4, 24],
+ 'inverse' : [7, 27],
+ //grayscale
+ 'white' : [37, 39],
+ 'grey' : [90, 39],
+ 'black' : [90, 39],
+ //colors
+ 'blue' : [34, 39],
+ 'cyan' : [36, 39],
+ 'green' : [32, 39],
+ 'magenta' : [35, 39],
+ 'red' : [31, 39],
+ 'yellow' : [33, 39]
+};
+
+function colorizeStart(style) {
+ return style ? '\x1B[' + styles[style][0] + 'm' : '';
+}
+function colorizeEnd(style) {
+ return style ? '\x1B[' + styles[style][1] + 'm' : '';
+}
+/**
+ * Taken from masylum's fork (https://github.com/masylum/log4js-node)
+ */
+function colorize (str, style) {
+ return colorizeStart(style) + str + colorizeEnd(style);
+}
+
+function timestampLevelAndCategory(loggingEvent, colour, timezoneOffest) {
+ var output = colorize(
+ formatLogData(
+ '[%s] [%s] %s - '
+ , dateFormat.asString(loggingEvent.startTime, timezoneOffest)
+ , loggingEvent.level
+ , loggingEvent.categoryName
+ )
+ , colour
+ );
+ return output;
+}
+
+/**
+ * BasicLayout is a simple layout for storing the logs. The logs are stored
+ * in following format:
+ * <pre>
+ * [startTime] [logLevel] categoryName - message\n
+ * </pre>
+ *
+ * @author Stephan Strittmatter
+ */
+function basicLayout (loggingEvent, timezoneOffset) {
+ return timestampLevelAndCategory(
+ loggingEvent,
+ undefined,
+ timezoneOffset
+ ) + formatLogData(loggingEvent.data);
+}
+
+/**
+ * colouredLayout - taken from masylum's fork.
+ * same as basicLayout, but with colours.
+ */
+function colouredLayout (loggingEvent, timezoneOffset) {
+ return timestampLevelAndCategory(
+ loggingEvent,
+ colours[loggingEvent.level.toString()],
+ timezoneOffset
+ ) + formatLogData(loggingEvent.data);
+}
+
+function messagePassThroughLayout (loggingEvent) {
+ return formatLogData(loggingEvent.data);
+}
+
+function dummyLayout(loggingEvent) {
+ return loggingEvent.data[0];
+}
+
+/**
+ * PatternLayout
+ * Format for specifiers is %[padding].[truncation][field]{[format]}
+ * e.g. %5.10p - left pad the log level by 5 characters, up to a max of 10
+ * Fields can be any of:
+ * - %r time in toLocaleTimeString format
+ * - %p log level
+ * - %c log category
+ * - %h hostname
+ * - %m log data
+ * - %d date in various formats
+ * - %% %
+ * - %n newline
+ * - %z pid
+ * - %x{<tokenname>} add dynamic tokens to your log. Tokens are specified in the tokens parameter
+ * You can use %[ and %] to define a colored block.
+ *
+ * Tokens are specified as simple key:value objects.
+ * The key represents the token name whereas the value can be a string or function
+ * which is called to extract the value to put in the log message. If token is not
+ * found, it doesn't replace the field.
+ *
+ * A sample token would be: { "pid" : function() { return process.pid; } }
+ *
+ * Takes a pattern string, array of tokens and returns a layout function.
+ * @param {String} Log format pattern String
+ * @param {object} map object of different tokens
+ * @param {number} timezone offset in minutes
+ * @return {Function}
+ * @author Stephan Strittmatter
+ * @author Jan Schmidle
+ */
+function patternLayout (pattern, tokens, timezoneOffset) {
+ // jshint maxstatements:22
+ var TTCC_CONVERSION_PATTERN = "%r %p %c - %m%n";
+ var regex = /%(-?[0-9]+)?(\.?[0-9]+)?([\[\]cdhmnprzxy%])(\{([^\}]+)\})?|([^%]+)/;
+
+ pattern = pattern || TTCC_CONVERSION_PATTERN;
+
+ function categoryName(loggingEvent, specifier) {
+ var loggerName = loggingEvent.categoryName;
+ if (specifier) {
+ var precision = parseInt(specifier, 10);
+ var loggerNameBits = loggerName.split(".");
+ if (precision < loggerNameBits.length) {
+ loggerName = loggerNameBits.slice(loggerNameBits.length - precision).join(".");
+ }
+ }
+ return loggerName;
+ }
+
+ function formatAsDate(loggingEvent, specifier) {
+ var format = dateFormat.ISO8601_FORMAT;
+ if (specifier) {
+ format = specifier;
+ // Pick up special cases
+ if (format == "ISO8601") {
+ format = dateFormat.ISO8601_FORMAT;
+ } else if (format == "ISO8601_WITH_TZ_OFFSET") {
+ format = dateFormat.ISO8601_WITH_TZ_OFFSET_FORMAT;
+ } else if (format == "ABSOLUTE") {
+ format = dateFormat.ABSOLUTETIME_FORMAT;
+ } else if (format == "DATE") {
+ format = dateFormat.DATETIME_FORMAT;
+ }
+ }
+ // Format the date
+ return dateFormat.asString(format, loggingEvent.startTime, timezoneOffset);
+ }
+
+ function hostname() {
+ return os.hostname().toString();
+ }
+
+ function formatMessage(loggingEvent) {
+ return formatLogData(loggingEvent.data);
+ }
+
+ function endOfLine() {
+ return eol;
+ }
+
+ function logLevel(loggingEvent) {
+ return loggingEvent.level.toString();
+ }
+
+ function startTime(loggingEvent) {
+ return dateFormat.asString('hh:mm:ss', loggingEvent.startTime, timezoneOffset);
+ }
+
+ function startColour(loggingEvent) {
+ return colorizeStart(colours[loggingEvent.level.toString()]);
+ }
+
+ function endColour(loggingEvent) {
+ return colorizeEnd(colours[loggingEvent.level.toString()]);
+ }
+
+ function percent() {
+ return '%';
+ }
+
+ function pid(loggingEvent) {
+ if (loggingEvent && loggingEvent.pid) {
+ return loggingEvent.pid;
+ } else {
+ return process.pid;
+ }
+ }
+
+ function clusterInfo(loggingEvent, specifier) {
+ if (loggingEvent.cluster && specifier) {
+ return specifier
+ .replace('%m', loggingEvent.cluster.master)
+ .replace('%w', loggingEvent.cluster.worker)
+ .replace('%i', loggingEvent.cluster.workerId);
+ } else if (loggingEvent.cluster) {
+ return loggingEvent.cluster.worker+'@'+loggingEvent.cluster.master;
+ } else {
+ return pid();
+ }
+ }
+
+ function userDefined(loggingEvent, specifier) {
+ if (typeof(tokens[specifier]) !== 'undefined') {
+ if (typeof(tokens[specifier]) === 'function') {
+ return tokens[specifier](loggingEvent);
+ } else {
+ return tokens[specifier];
+ }
+ }
+ return null;
+ }
+
+ var replacers = {
+ 'c': categoryName,
+ 'd': formatAsDate,
+ 'h': hostname,
+ 'm': formatMessage,
+ 'n': endOfLine,
+ 'p': logLevel,
+ 'r': startTime,
+ '[': startColour,
+ ']': endColour,
+ 'y': clusterInfo,
+ 'z': pid,
+ '%': percent,
+ 'x': userDefined
+ };
+
+ function replaceToken(conversionCharacter, loggingEvent, specifier) {
+ return replacers[conversionCharacter](loggingEvent, specifier);
+ }
+
+ function truncate(truncation, toTruncate) {
+ var len;
+ if (truncation) {
+ len = parseInt(truncation.substr(1), 10);
+ return toTruncate.substring(0, len);
+ }
+
+ return toTruncate;
+ }
+
+ function pad(padding, toPad) {
+ var len;
+ if (padding) {
+ if (padding.charAt(0) == "-") {
+ len = parseInt(padding.substr(1), 10);
+ // Right pad with spaces
+ while (toPad.length < len) {
+ toPad += " ";
+ }
+ } else {
+ len = parseInt(padding, 10);
+ // Left pad with spaces
+ while (toPad.length < len) {
+ toPad = " " + toPad;
+ }
+ }
+ }
+ return toPad;
+ }
+
+ function truncateAndPad(toTruncAndPad, truncation, padding) {
+ var replacement = toTruncAndPad;
+ replacement = truncate(truncation, replacement);
+ replacement = pad(padding, replacement);
+ return replacement;
+ }
+
+ return function(loggingEvent) {
+ var formattedString = "";
+ var result;
+ var searchString = pattern;
+
+ while ((result = regex.exec(searchString))) {
+ var matchedString = result[0];
+ var padding = result[1];
+ var truncation = result[2];
+ var conversionCharacter = result[3];
+ var specifier = result[5];
+ var text = result[6];
+
+ // Check if the pattern matched was just normal text
+ if (text) {
+ formattedString += "" + text;
+ } else {
+ // Create a raw replacement string based on the conversion
+ // character and specifier
+ var replacement = replaceToken(conversionCharacter, loggingEvent, specifier);
+ formattedString += truncateAndPad(replacement, truncation, padding);
+ }
+ searchString = searchString.substr(result.index + result[0].length);
+ }
+ return formattedString;
+ };
+
+}
+
+module.exports = {
+ basicLayout: basicLayout,
+ messagePassThroughLayout: messagePassThroughLayout,
+ patternLayout: patternLayout,
+ colouredLayout: colouredLayout,
+ coloredLayout: colouredLayout,
+ dummyLayout: dummyLayout,
+ addLayout: function(name, serializerGenerator) {
+ layoutMakers[name] = serializerGenerator;
+ },
+ layout: function(name, config) {
+ return layoutMakers[name] && layoutMakers[name](config);
+ }
+};
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/levels.js b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/levels.js
new file mode 100644
index 00000000..cb9243a2
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/levels.js
@@ -0,0 +1,66 @@
+"use strict";
+
+function Level(level, levelStr) {
+ this.level = level;
+ this.levelStr = levelStr;
+}
+
+/**
+ * converts given String to corresponding Level
+ * @param {String} sArg String value of Level OR Log4js.Level
+ * @param {Log4js.Level} defaultLevel default Level, if no String representation
+ * @return Level object
+ * @type Log4js.Level
+ */
+function toLevel(sArg, defaultLevel) {
+ if (!sArg) {
+ return defaultLevel;
+ }
+ if (sArg instanceof Level) {
+ module.exports[sArg.toString()] = sArg;
+ return sArg;
+ }
+ if (typeof sArg === "string") {
+ return module.exports[sArg.toUpperCase()] || defaultLevel;
+ }
+ return toLevel(sArg.toString());
+}
+
+Level.prototype.toString = function() {
+ return this.levelStr;
+};
+
+Level.prototype.isLessThanOrEqualTo = function(otherLevel) {
+ if (typeof otherLevel === "string") {
+ otherLevel = toLevel(otherLevel);
+ }
+ return this.level <= otherLevel.level;
+};
+
+Level.prototype.isGreaterThanOrEqualTo = function(otherLevel) {
+ if (typeof otherLevel === "string") {
+ otherLevel = toLevel(otherLevel);
+ }
+ return this.level >= otherLevel.level;
+};
+
+Level.prototype.isEqualTo = function(otherLevel) {
+ if (typeof otherLevel === "string") {
+ otherLevel = toLevel(otherLevel);
+ }
+ return this.level === otherLevel.level;
+};
+
+module.exports = {
+ ALL: new Level(Number.MIN_VALUE, "ALL"),
+ TRACE: new Level(5000, "TRACE"),
+ DEBUG: new Level(10000, "DEBUG"),
+ INFO: new Level(20000, "INFO"),
+ WARN: new Level(30000, "WARN"),
+ ERROR: new Level(40000, "ERROR"),
+ FATAL: new Level(50000, "FATAL"),
+ MARK: new Level(9007199254740992, "MARK"), // 2^53
+ OFF: new Level(Number.MAX_VALUE, "OFF"),
+ toLevel: toLevel,
+ Level: Level
+};
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/log4js.js b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/log4js.js
new file mode 100644
index 00000000..629aed51
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/log4js.js
@@ -0,0 +1,504 @@
+"use strict";
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * @fileoverview log4js is a library to log in JavaScript in similar manner
+ * than in log4j for Java. The API should be nearly the same.
+ *
+ * <h3>Example:</h3>
+ * <pre>
+ * var logging = require('log4js');
+ * //add an appender that logs all messages to stdout.
+ * logging.addAppender(logging.consoleAppender());
+ * //add an appender that logs "some-category" to a file
+ * logging.addAppender(logging.fileAppender("file.log"), "some-category");
+ * //get a logger
+ * var log = logging.getLogger("some-category");
+ * log.setLevel(logging.levels.TRACE); //set the Level
+ *
+ * ...
+ *
+ * //call the log
+ * log.trace("trace me" );
+ * </pre>
+ *
+ * NOTE: the authors below are the original browser-based log4js authors
+ * don't try to contact them about bugs in this version :)
+ * @version 1.0
+ * @author Stephan Strittmatter - http://jroller.com/page/stritti
+ * @author Seth Chisamore - http://www.chisamore.com
+ * @since 2005-05-20
+ * @static
+ * Website: http://log4js.berlios.de
+ */
+var events = require('events')
+, fs = require('fs')
+, path = require('path')
+, util = require('util')
+, layouts = require('./layouts')
+, levels = require('./levels')
+, loggerModule = require('./logger')
+, LoggingEvent = loggerModule.LoggingEvent
+, Logger = loggerModule.Logger
+, ALL_CATEGORIES = '[all]'
+, appenders = {}
+, loggers = {}
+, appenderMakers = {}
+, appenderShutdowns = {}
+, defaultConfig = {
+ appenders: [
+ { type: "console" }
+ ],
+ replaceConsole: false
+};
+
+require('./appenders/console');
+
+function hasLogger(logger) {
+ return loggers.hasOwnProperty(logger);
+}
+
+levels.forName = function(levelStr, levelVal) {
+ var level;
+ if (typeof levelStr === "string" && typeof levelVal === "number") {
+ var levelUpper = levelStr.toUpperCase();
+ level = new levels.Level(levelVal, levelUpper);
+ loggerModule.addLevelMethods(level);
+ }
+ return level;
+};
+
+levels.getLevel = function(levelStr) {
+ var level;
+ if (typeof levelStr === "string") {
+ var levelUpper = levelStr.toUpperCase();
+ level = levels.toLevel(levelStr);
+ }
+ return level;
+};
+
+function getBufferedLogger(categoryName) {
+ var base_logger = getLogger(categoryName);
+ var logger = {};
+ logger.temp = [];
+ logger.target = base_logger;
+ logger.flush = function () {
+ for (var i = 0; i < logger.temp.length; i++) {
+ var log = logger.temp[i];
+ logger.target[log.level](log.message);
+ delete logger.temp[i];
+ }
+ };
+ logger.trace = function (message) { logger.temp.push({level: 'trace', message: message}); };
+ logger.debug = function (message) { logger.temp.push({level: 'debug', message: message}); };
+ logger.info = function (message) { logger.temp.push({level: 'info', message: message}); };
+ logger.warn = function (message) { logger.temp.push({level: 'warn', message: message}); };
+ logger.error = function (message) { logger.temp.push({level: 'error', message: message}); };
+ logger.fatal = function (message) { logger.temp.push({level: 'fatal', message: message}); };
+
+ return logger;
+}
+
+function normalizeCategory (category) {
+ return category + '.';
+}
+
+function doesLevelEntryContainsLogger (levelCategory, loggerCategory) {
+ var normalizedLevelCategory = normalizeCategory(levelCategory);
+ var normalizedLoggerCategory = normalizeCategory(loggerCategory);
+ return normalizedLoggerCategory.substring(0, normalizedLevelCategory.length) == normalizedLevelCategory; //jshint ignore:line
+}
+
+function doesAppenderContainsLogger (appenderCategory, loggerCategory) {
+ var normalizedAppenderCategory = normalizeCategory(appenderCategory);
+ var normalizedLoggerCategory = normalizeCategory(loggerCategory);
+ return normalizedLoggerCategory.substring(0, normalizedAppenderCategory.length) == normalizedAppenderCategory; //jshint ignore:line
+}
+
+
+/**
+ * Get a logger instance. Instance is cached on categoryName level.
+ * @param {String} categoryName name of category to log to.
+ * @return {Logger} instance of logger for the category
+ * @static
+ */
+function getLogger (loggerCategoryName) {
+
+ // Use default logger if categoryName is not specified or invalid
+ if (typeof loggerCategoryName !== "string") {
+ loggerCategoryName = Logger.DEFAULT_CATEGORY;
+ }
+
+ if (!hasLogger(loggerCategoryName)) {
+
+ var level;
+
+ /* jshint -W073 */
+ // If there's a "levels" entry in the configuration
+ if (levels.config) {
+ // Goes through the categories in the levels configuration entry,
+ // starting with the "higher" ones.
+ var keys = Object.keys(levels.config).sort();
+ for (var idx = 0; idx < keys.length; idx++) {
+ var levelCategory = keys[idx];
+ if (doesLevelEntryContainsLogger(levelCategory, loggerCategoryName)) {
+ // level for the logger
+ level = levels.config[levelCategory];
+ }
+ }
+ }
+ /* jshint +W073 */
+
+ // Create the logger for this name if it doesn't already exist
+ loggers[loggerCategoryName] = new Logger(loggerCategoryName, level);
+
+ /* jshint -W083 */
+ var appenderList;
+ for(var appenderCategory in appenders) {
+ if (doesAppenderContainsLogger(appenderCategory, loggerCategoryName)) {
+ appenderList = appenders[appenderCategory];
+ appenderList.forEach(function(appender) {
+ loggers[loggerCategoryName].addListener("log", appender);
+ });
+ }
+ }
+ /* jshint +W083 */
+
+ if (appenders[ALL_CATEGORIES]) {
+ appenderList = appenders[ALL_CATEGORIES];
+ appenderList.forEach(function(appender) {
+ loggers[loggerCategoryName].addListener("log", appender);
+ });
+ }
+ }
+
+ return loggers[loggerCategoryName];
+}
+
+/**
+ * args are appender, then zero or more categories
+ */
+function addAppender () {
+ var args = Array.prototype.slice.call(arguments);
+ var appender = args.shift();
+ if (args.length === 0 || args[0] === undefined) {
+ args = [ ALL_CATEGORIES ];
+ }
+ //argument may already be an array
+ if (Array.isArray(args[0])) {
+ args = args[0];
+ }
+
+ args.forEach(function(appenderCategory) {
+ addAppenderToCategory(appender, appenderCategory);
+
+ if (appenderCategory === ALL_CATEGORIES) {
+ addAppenderToAllLoggers(appender);
+ } else {
+
+ for(var loggerCategory in loggers) {
+ if (doesAppenderContainsLogger(appenderCategory,loggerCategory)) {
+ loggers[loggerCategory].addListener("log", appender);
+ }
+ }
+
+ }
+ });
+}
+
+function addAppenderToAllLoggers(appender) {
+ for (var logger in loggers) {
+ if (hasLogger(logger)) {
+ loggers[logger].addListener("log", appender);
+ }
+ }
+}
+
+function addAppenderToCategory(appender, category) {
+ if (!appenders[category]) {
+ appenders[category] = [];
+ }
+ appenders[category].push(appender);
+}
+
+function clearAppenders () {
+ appenders = {};
+ for (var logger in loggers) {
+ if (hasLogger(logger)) {
+ loggers[logger].removeAllListeners("log");
+ }
+ }
+}
+
+function configureAppenders(appenderList, options) {
+ clearAppenders();
+ if (appenderList) {
+ appenderList.forEach(function(appenderConfig) {
+ loadAppender(appenderConfig.type);
+ var appender;
+ appenderConfig.makers = appenderMakers;
+ try {
+ appender = appenderMakers[appenderConfig.type](appenderConfig, options);
+ addAppender(appender, appenderConfig.category);
+ } catch(e) {
+ throw new Error("log4js configuration problem for " + util.inspect(appenderConfig), e);
+ }
+ });
+ }
+}
+
+function configureLevels(_levels) {
+ levels.config = _levels; // Keep it so we can create loggers later using this cfg
+ if (_levels) {
+ var keys = Object.keys(levels.config).sort();
+ for (var idx in keys) {
+ var category = keys[idx];
+ if(category === ALL_CATEGORIES) {
+ setGlobalLogLevel(_levels[category]);
+ }
+ /* jshint -W073 */
+ for(var loggerCategory in loggers) {
+ if (doesLevelEntryContainsLogger(category, loggerCategory)) {
+ loggers[loggerCategory].setLevel(_levels[category]);
+ }
+ }
+ /* jshint +W073 */
+ }
+ }
+}
+
+function setGlobalLogLevel(level) {
+ Logger.prototype.level = levels.toLevel(level, levels.TRACE);
+}
+
+/**
+ * Get the default logger instance.
+ * @return {Logger} instance of default logger
+ * @static
+ */
+function getDefaultLogger () {
+ return getLogger(Logger.DEFAULT_CATEGORY);
+}
+
+var configState = {};
+
+function loadConfigurationFile(filename) {
+ if (filename) {
+ return JSON.parse(fs.readFileSync(filename, "utf8"));
+ }
+ return undefined;
+}
+
+function configureOnceOff(config, options) {
+ if (config) {
+ try {
+ configureLevels(config.levels);
+ configureAppenders(config.appenders, options);
+
+ if (config.replaceConsole) {
+ replaceConsole();
+ } else {
+ restoreConsole();
+ }
+ } catch (e) {
+ throw new Error(
+ "Problem reading log4js config " + util.inspect(config) +
+ ". Error was \"" + e.message + "\" (" + e.stack + ")"
+ );
+ }
+ }
+}
+
+function reloadConfiguration(options) {
+ var mtime = getMTime(configState.filename);
+ if (!mtime) return;
+
+ if (configState.lastMTime && (mtime.getTime() > configState.lastMTime.getTime())) {
+ configureOnceOff(loadConfigurationFile(configState.filename), options);
+ }
+ configState.lastMTime = mtime;
+}
+
+function getMTime(filename) {
+ var mtime;
+ try {
+ mtime = fs.statSync(configState.filename).mtime;
+ } catch (e) {
+ getLogger('log4js').warn('Failed to load configuration file ' + filename);
+ }
+ return mtime;
+}
+
+function initReloadConfiguration(filename, options) {
+ if (configState.timerId) {
+ clearInterval(configState.timerId);
+ delete configState.timerId;
+ }
+ configState.filename = filename;
+ configState.lastMTime = getMTime(filename);
+ configState.timerId = setInterval(reloadConfiguration, options.reloadSecs*1000, options);
+}
+
+function configure(configurationFileOrObject, options) {
+ var config = configurationFileOrObject;
+ config = config || process.env.LOG4JS_CONFIG;
+ options = options || {};
+
+ if (config === undefined || config === null || typeof(config) === 'string') {
+ if (options.reloadSecs) {
+ initReloadConfiguration(config, options);
+ }
+ config = loadConfigurationFile(config) || defaultConfig;
+ } else {
+ if (options.reloadSecs) {
+ getLogger('log4js').warn(
+ 'Ignoring configuration reload parameter for "object" configuration.'
+ );
+ }
+ }
+ configureOnceOff(config, options);
+}
+
+var originalConsoleFunctions = {
+ log: console.log,
+ debug: console.debug,
+ info: console.info,
+ warn: console.warn,
+ error: console.error
+};
+
+function replaceConsole(logger) {
+ function replaceWith(fn) {
+ return function() {
+ fn.apply(logger, arguments);
+ };
+ }
+ logger = logger || getLogger("console");
+ ['log','debug','info','warn','error'].forEach(function (item) {
+ console[item] = replaceWith(item === 'log' ? logger.info : logger[item]);
+ });
+}
+
+function restoreConsole() {
+ ['log', 'debug', 'info', 'warn', 'error'].forEach(function (item) {
+ console[item] = originalConsoleFunctions[item];
+ });
+}
+
+/**
+ * Load an appenderModule based on the provided appender filepath. Will first
+ * check if the appender path is a subpath of the log4js "lib/appenders" directory.
+ * If not, it will attempt to load the the appender as complete path.
+ *
+ * @param {string} appender The filepath for the appender.
+ * @returns {Object|null} The required appender or null if appender could not be loaded.
+ * @private
+ */
+function requireAppender(appender) {
+ var appenderModule;
+ try {
+ appenderModule = require('./appenders/' + appender);
+ } catch (e) {
+ appenderModule = require(appender);
+ }
+ return appenderModule;
+}
+
+/**
+ * Load an appender. Provided the appender path to be loaded. If appenderModule is defined,
+ * it will be used in place of requiring the appender module.
+ *
+ * @param {string} appender The path to the appender module.
+ * @param {Object|void} [appenderModule] The pre-required appender module. When provided,
+ * instead of requiring the appender by its path, this object will be used.
+ * @returns {void}
+ * @private
+ */
+function loadAppender(appender, appenderModule) {
+ appenderModule = appenderModule || requireAppender(appender);
+
+ if (!appenderModule) {
+ throw new Error("Invalid log4js appender: " + util.inspect(appender));
+ }
+
+ module.exports.appenders[appender] = appenderModule.appender.bind(appenderModule);
+ if (appenderModule.shutdown) {
+ appenderShutdowns[appender] = appenderModule.shutdown.bind(appenderModule);
+ }
+ appenderMakers[appender] = appenderModule.configure.bind(appenderModule);
+}
+
+/**
+ * Shutdown all log appenders. This will first disable all writing to appenders
+ * and then call the shutdown function each appender.
+ *
+ * @params {Function} cb - The callback to be invoked once all appenders have
+ * shutdown. If an error occurs, the callback will be given the error object
+ * as the first argument.
+ * @returns {void}
+ */
+function shutdown(cb) {
+ // First, disable all writing to appenders. This prevents appenders from
+ // not being able to be drained because of run-away log writes.
+ loggerModule.disableAllLogWrites();
+
+ // Call each of the shutdown functions in parallel
+ var completed = 0;
+ var error;
+ var shutdownFcts = [];
+ var complete = function(err) {
+ error = error || err;
+ completed++;
+ if (completed >= shutdownFcts.length) {
+ cb(error);
+ }
+ };
+ for (var category in appenderShutdowns) {
+ if (appenderShutdowns.hasOwnProperty(category)) {
+ shutdownFcts.push(appenderShutdowns[category]);
+ }
+ }
+ if (!shutdownFcts.length) {
+ return cb();
+ }
+ shutdownFcts.forEach(function(shutdownFct) { shutdownFct(complete); });
+}
+
+module.exports = {
+ getBufferedLogger: getBufferedLogger,
+ getLogger: getLogger,
+ getDefaultLogger: getDefaultLogger,
+ hasLogger: hasLogger,
+
+ addAppender: addAppender,
+ loadAppender: loadAppender,
+ clearAppenders: clearAppenders,
+ configure: configure,
+ shutdown: shutdown,
+
+ replaceConsole: replaceConsole,
+ restoreConsole: restoreConsole,
+
+ levels: levels,
+ setGlobalLogLevel: setGlobalLogLevel,
+
+ layouts: layouts,
+ appenders: {},
+ appenderMakers: appenderMakers,
+ connectLogger: require('./connect-logger').connectLogger
+};
+
+//set ourselves up
+configure();
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/log4js.json b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/log4js.json
new file mode 100644
index 00000000..7b6d3e7d
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/log4js.json
@@ -0,0 +1,7 @@
+{
+ "appenders": [
+ {
+ "type": "console"
+ }
+ ]
+} \ No newline at end of file
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/logger.js b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/logger.js
new file mode 100644
index 00000000..984bd384
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/logger.js
@@ -0,0 +1,123 @@
+"use strict";
+var levels = require('./levels')
+, util = require('util')
+, events = require('events')
+, DEFAULT_CATEGORY = '[default]';
+
+var logWritesEnabled = true;
+
+/**
+ * Models a logging event.
+ * @constructor
+ * @param {String} categoryName name of category
+ * @param {Log4js.Level} level level of message
+ * @param {Array} data objects to log
+ * @param {Log4js.Logger} logger the associated logger
+ * @author Seth Chisamore
+ */
+function LoggingEvent (categoryName, level, data, logger) {
+ this.startTime = new Date();
+ this.categoryName = categoryName;
+ this.data = data;
+ this.level = level;
+ this.logger = logger;
+}
+
+/**
+ * Logger to log messages.
+ * use {@see Log4js#getLogger(String)} to get an instance.
+ * @constructor
+ * @param name name of category to log to
+ * @author Stephan Strittmatter
+ */
+function Logger (name, level) {
+ this.category = name || DEFAULT_CATEGORY;
+
+ if (level) {
+ this.setLevel(level);
+ }
+}
+util.inherits(Logger, events.EventEmitter);
+Logger.DEFAULT_CATEGORY = DEFAULT_CATEGORY;
+Logger.prototype.level = levels.TRACE;
+
+Logger.prototype.setLevel = function(level) {
+ this.level = levels.toLevel(level, this.level || levels.TRACE);
+};
+
+Logger.prototype.removeLevel = function() {
+ delete this.level;
+};
+
+Logger.prototype.log = function() {
+ var logLevel = levels.toLevel(arguments[0], levels.INFO);
+ if (!this.isLevelEnabled(logLevel)) {
+ return;
+ }
+ var numArgs = arguments.length - 1;
+ var args = new Array(numArgs);
+ for (var i = 0; i < numArgs; i++) {
+ args[i] = arguments[i + 1];
+ }
+ this._log(logLevel, args);
+};
+
+Logger.prototype.isLevelEnabled = function(otherLevel) {
+ return this.level.isLessThanOrEqualTo(otherLevel);
+};
+
+['Trace','Debug','Info','Warn','Error','Fatal', 'Mark'].forEach(
+ function(levelString) {
+ addLevelMethods(levelString);
+ }
+);
+
+function addLevelMethods(level) {
+ level = levels.toLevel(level);
+
+ var levelStrLower = level.toString().toLowerCase();
+ var levelMethod = levelStrLower.replace(/_([a-z])/g, function(g) { return g[1].toUpperCase(); } );
+ var isLevelMethod = levelMethod[0].toUpperCase() + levelMethod.slice(1);
+
+ Logger.prototype['is'+isLevelMethod+'Enabled'] = function() {
+ return this.isLevelEnabled(level.toString());
+ };
+
+ Logger.prototype[levelMethod] = function () {
+ if (logWritesEnabled && this.isLevelEnabled(level)) {
+ var numArgs = arguments.length;
+ var args = new Array(numArgs);
+ for (var i = 0; i < numArgs; i++) {
+ args[i] = arguments[i];
+ }
+ this._log(level, args);
+ }
+ };
+}
+
+Logger.prototype._log = function(level, data) {
+ var loggingEvent = new LoggingEvent(this.category, level, data, this);
+ this.emit('log', loggingEvent);
+};
+
+/**
+ * Disable all log writes.
+ * @returns {void}
+ */
+function disableAllLogWrites() {
+ logWritesEnabled = false;
+}
+
+/**
+ * Enable log writes.
+ * @returns {void}
+ */
+function enableAllLogWrites() {
+ logWritesEnabled = true;
+}
+
+exports.LoggingEvent = LoggingEvent;
+exports.Logger = Logger;
+exports.disableAllLogWrites = disableAllLogWrites;
+exports.enableAllLogWrites = enableAllLogWrites;
+exports.addLevelMethods = addLevelMethods; \ No newline at end of file
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/streams/BaseRollingFileStream.js b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/streams/BaseRollingFileStream.js
new file mode 100644
index 00000000..9c441ad9
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/streams/BaseRollingFileStream.js
@@ -0,0 +1,94 @@
+"use strict";
+var fs = require('fs')
+, stream
+, debug = require('../debug')('BaseRollingFileStream')
+, util = require('util')
+, semver = require('semver');
+
+if (semver.satisfies(process.version, '>=0.10.0')) {
+ stream = require('stream');
+} else {
+ stream = require('readable-stream');
+}
+
+module.exports = BaseRollingFileStream;
+
+function BaseRollingFileStream(filename, options) {
+ debug("In BaseRollingFileStream");
+ this.filename = filename;
+ this.options = options || {};
+ this.options.encoding = this.options.encoding || 'utf8';
+ this.options.mode = this.options.mode || parseInt('0644', 8);
+ this.options.flags = this.options.flags || 'a';
+
+ this.currentSize = 0;
+
+ function currentFileSize(file) {
+ var fileSize = 0;
+ try {
+ fileSize = fs.statSync(file).size;
+ } catch (e) {
+ // file does not exist
+ }
+ return fileSize;
+ }
+
+ function throwErrorIfArgumentsAreNotValid() {
+ if (!filename) {
+ throw new Error("You must specify a filename");
+ }
+ }
+
+ throwErrorIfArgumentsAreNotValid();
+ debug("Calling BaseRollingFileStream.super");
+ BaseRollingFileStream.super_.call(this);
+ this.openTheStream();
+ this.currentSize = currentFileSize(this.filename);
+}
+util.inherits(BaseRollingFileStream, stream.Writable);
+
+BaseRollingFileStream.prototype._write = function(chunk, encoding, callback) {
+ var that = this;
+ function writeTheChunk() {
+ debug("writing the chunk to the underlying stream");
+ that.currentSize += chunk.length;
+ try {
+ that.theStream.write(chunk, encoding, callback);
+ }
+ catch (err){
+ debug(err);
+ callback();
+ }
+ }
+
+ debug("in _write");
+
+ if (this.shouldRoll()) {
+ this.currentSize = 0;
+ this.roll(this.filename, writeTheChunk);
+ } else {
+ writeTheChunk();
+ }
+};
+
+BaseRollingFileStream.prototype.openTheStream = function(cb) {
+ debug("opening the underlying stream");
+ this.theStream = fs.createWriteStream(this.filename, this.options);
+ if (cb) {
+ this.theStream.on("open", cb);
+ }
+};
+
+BaseRollingFileStream.prototype.closeTheStream = function(cb) {
+ debug("closing the underlying stream");
+ this.theStream.end(cb);
+};
+
+BaseRollingFileStream.prototype.shouldRoll = function() {
+ return false; // default behaviour is never to roll
+};
+
+BaseRollingFileStream.prototype.roll = function(filename, callback) {
+ callback(); // default behaviour is not to do anything
+};
+
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/streams/DateRollingFileStream.js b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/streams/DateRollingFileStream.js
new file mode 100644
index 00000000..5ef2081f
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/streams/DateRollingFileStream.js
@@ -0,0 +1,91 @@
+"use strict";
+var BaseRollingFileStream = require('./BaseRollingFileStream')
+, debug = require('../debug')('DateRollingFileStream')
+, format = require('../date_format')
+, fs = require('fs')
+, util = require('util');
+
+module.exports = DateRollingFileStream;
+
+function findTimestampFromFileIfExists(filename, now) {
+ return fs.existsSync(filename) ? fs.statSync(filename).mtime : new Date(now());
+}
+
+function DateRollingFileStream(filename, pattern, options, now) {
+ debug("Now is " + now);
+ if (pattern && typeof(pattern) === 'object') {
+ now = options;
+ options = pattern;
+ pattern = null;
+ }
+ this.pattern = pattern || '.yyyy-MM-dd';
+ this.now = now || Date.now;
+ this.lastTimeWeWroteSomething = format.asString(
+ this.pattern,
+ findTimestampFromFileIfExists(filename, this.now)
+ );
+
+ this.baseFilename = filename;
+ this.alwaysIncludePattern = false;
+
+ if (options) {
+ if (options.alwaysIncludePattern) {
+ this.alwaysIncludePattern = true;
+ filename = this.baseFilename + this.lastTimeWeWroteSomething;
+ }
+ delete options.alwaysIncludePattern;
+ if (Object.keys(options).length === 0) {
+ options = null;
+ }
+ }
+ debug("this.now is " + this.now + ", now is " + now);
+
+ DateRollingFileStream.super_.call(this, filename, options);
+}
+util.inherits(DateRollingFileStream, BaseRollingFileStream);
+
+DateRollingFileStream.prototype.shouldRoll = function() {
+ var lastTime = this.lastTimeWeWroteSomething,
+ thisTime = format.asString(this.pattern, new Date(this.now()));
+
+ debug("DateRollingFileStream.shouldRoll with now = " +
+ this.now() + ", thisTime = " + thisTime + ", lastTime = " + lastTime);
+
+ this.lastTimeWeWroteSomething = thisTime;
+ this.previousTime = lastTime;
+
+ return thisTime !== lastTime;
+};
+
+DateRollingFileStream.prototype.roll = function(filename, callback) {
+ var that = this;
+
+ debug("Starting roll");
+
+ if (this.alwaysIncludePattern) {
+ this.filename = this.baseFilename + this.lastTimeWeWroteSomething;
+ this.closeTheStream(this.openTheStream.bind(this, callback));
+ } else {
+ var newFilename = this.baseFilename + this.previousTime;
+ this.closeTheStream(
+ deleteAnyExistingFile.bind(null,
+ renameTheCurrentFile.bind(null,
+ this.openTheStream.bind(this,
+ callback))));
+ }
+
+ function deleteAnyExistingFile(cb) {
+ //on windows, you can get a EEXIST error if you rename a file to an existing file
+ //so, we'll try to delete the file we're renaming to first
+ fs.unlink(newFilename, function (err) {
+ //ignore err: if we could not delete, it's most likely that it doesn't exist
+ cb();
+ });
+ }
+
+ function renameTheCurrentFile(cb) {
+ debug("Renaming the " + filename + " -> " + newFilename);
+ fs.rename(filename, newFilename, cb);
+ }
+
+};
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/streams/RollingFileStream.js b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/streams/RollingFileStream.js
new file mode 100644
index 00000000..af1e52e2
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/streams/RollingFileStream.js
@@ -0,0 +1,117 @@
+"use strict";
+var BaseRollingFileStream = require('./BaseRollingFileStream')
+, debug = require('../debug')('RollingFileStream')
+, util = require('util')
+, path = require('path')
+, child_process = require('child_process')
+, zlib = require("zlib")
+, fs = require('fs');
+
+module.exports = RollingFileStream;
+
+function RollingFileStream (filename, size, backups, options) {
+ this.size = size;
+ this.backups = backups || 1;
+
+ function throwErrorIfArgumentsAreNotValid() {
+ if (!filename || !size || size <= 0) {
+ throw new Error("You must specify a filename and file size");
+ }
+ }
+
+ throwErrorIfArgumentsAreNotValid();
+
+ RollingFileStream.super_.call(this, filename, options);
+}
+util.inherits(RollingFileStream, BaseRollingFileStream);
+
+RollingFileStream.prototype.shouldRoll = function() {
+ debug("should roll with current size " + this.currentSize + " and max size " + this.size);
+ return this.currentSize >= this.size;
+};
+
+RollingFileStream.prototype.roll = function(filename, callback) {
+ var that = this,
+ nameMatcher = new RegExp('^' + path.basename(filename));
+
+ function justTheseFiles (item) {
+ return nameMatcher.test(item);
+ }
+
+ function index(filename_) {
+ debug('Calculating index of '+filename_);
+ return parseInt(filename_.substring((path.basename(filename) + '.').length), 10) || 0;
+ }
+
+ function byIndex(a, b) {
+ if (index(a) > index(b)) {
+ return 1;
+ } else if (index(a) < index(b) ) {
+ return -1;
+ } else {
+ return 0;
+ }
+ }
+
+ function compress (filename, cb) {
+
+ var gzip = zlib.createGzip();
+ var inp = fs.createReadStream(filename);
+ var out = fs.createWriteStream(filename+".gz");
+ inp.pipe(gzip).pipe(out);
+ fs.unlink(filename, cb);
+
+ }
+
+ function increaseFileIndex (fileToRename, cb) {
+ var idx = index(fileToRename);
+ debug('Index of ' + fileToRename + ' is ' + idx);
+ if (idx < that.backups) {
+
+ var ext = path.extname(fileToRename);
+ var destination = filename + '.' + (idx+1);
+ if (that.options.compress && /^gz$/.test(ext.substring(1))) {
+ destination+=ext;
+ }
+ //on windows, you can get a EEXIST error if you rename a file to an existing file
+ //so, we'll try to delete the file we're renaming to first
+ fs.unlink(destination, function (err) {
+ //ignore err: if we could not delete, it's most likely that it doesn't exist
+ debug('Renaming ' + fileToRename + ' -> ' + destination);
+ fs.rename(path.join(path.dirname(filename), fileToRename), destination, function(err) {
+ if (err) {
+ cb(err);
+ } else {
+ if (that.options.compress && ext!=".gz") {
+ compress(destination, cb);
+ } else {
+ cb();
+ }
+ }
+ });
+ });
+ } else {
+ cb();
+ }
+ }
+
+ function renameTheFiles(cb) {
+ //roll the backups (rename file.n to file.n+1, where n <= numBackups)
+ debug("Renaming the old files");
+ fs.readdir(path.dirname(filename), function (err, files) {
+ var filesToProcess = files.filter(justTheseFiles).sort(byIndex);
+ (function processOne(err) {
+ var file = filesToProcess.pop();
+ if (!file || err) { return cb(err); }
+ increaseFileIndex(file, processOne);
+ })();
+ });
+ }
+
+ debug("Rolling, rolling, rolling");
+ this.closeTheStream(
+ renameTheFiles.bind(null,
+ this.openTheStream.bind(this,
+ callback)));
+
+};
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/streams/index.js b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/streams/index.js
new file mode 100644
index 00000000..d8e026dc
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/lib/streams/index.js
@@ -0,0 +1,3 @@
+"use strict";
+exports.RollingFileStream = require('./RollingFileStream');
+exports.DateRollingFileStream = require('./DateRollingFileStream');
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/node_modules/isarray/README.md b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/node_modules/isarray/README.md
new file mode 100644
index 00000000..052a62b8
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/node_modules/isarray/README.md
@@ -0,0 +1,54 @@
+
+# isarray
+
+`Array#isArray` for older browsers.
+
+## Usage
+
+```js
+var isArray = require('isarray');
+
+console.log(isArray([])); // => true
+console.log(isArray({})); // => false
+```
+
+## Installation
+
+With [npm](http://npmjs.org) do
+
+```bash
+$ npm install isarray
+```
+
+Then bundle for the browser with
+[browserify](https://github.com/substack/browserify).
+
+With [component](http://component.io) do
+
+```bash
+$ component install juliangruber/isarray
+```
+
+## License
+
+(MIT)
+
+Copyright (c) 2013 Julian Gruber &lt;julian@juliangruber.com&gt;
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of
+this software and associated documentation files (the "Software"), to deal in
+the Software without restriction, including without limitation the rights to
+use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
+of the Software, and to permit persons to whom the Software is furnished to do
+so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/node_modules/isarray/build/build.js b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/node_modules/isarray/build/build.js
new file mode 100644
index 00000000..ec58596a
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/node_modules/isarray/build/build.js
@@ -0,0 +1,209 @@
+
+/**
+ * Require the given path.
+ *
+ * @param {String} path
+ * @return {Object} exports
+ * @api public
+ */
+
+function require(path, parent, orig) {
+ var resolved = require.resolve(path);
+
+ // lookup failed
+ if (null == resolved) {
+ orig = orig || path;
+ parent = parent || 'root';
+ var err = new Error('Failed to require "' + orig + '" from "' + parent + '"');
+ err.path = orig;
+ err.parent = parent;
+ err.require = true;
+ throw err;
+ }
+
+ var module = require.modules[resolved];
+
+ // perform real require()
+ // by invoking the module's
+ // registered function
+ if (!module.exports) {
+ module.exports = {};
+ module.client = module.component = true;
+ module.call(this, module.exports, require.relative(resolved), module);
+ }
+
+ return module.exports;
+}
+
+/**
+ * Registered modules.
+ */
+
+require.modules = {};
+
+/**
+ * Registered aliases.
+ */
+
+require.aliases = {};
+
+/**
+ * Resolve `path`.
+ *
+ * Lookup:
+ *
+ * - PATH/index.js
+ * - PATH.js
+ * - PATH
+ *
+ * @param {String} path
+ * @return {String} path or null
+ * @api private
+ */
+
+require.resolve = function(path) {
+ if (path.charAt(0) === '/') path = path.slice(1);
+ var index = path + '/index.js';
+
+ var paths = [
+ path,
+ path + '.js',
+ path + '.json',
+ path + '/index.js',
+ path + '/index.json'
+ ];
+
+ for (var i = 0; i < paths.length; i++) {
+ var path = paths[i];
+ if (require.modules.hasOwnProperty(path)) return path;
+ }
+
+ if (require.aliases.hasOwnProperty(index)) {
+ return require.aliases[index];
+ }
+};
+
+/**
+ * Normalize `path` relative to the current path.
+ *
+ * @param {String} curr
+ * @param {String} path
+ * @return {String}
+ * @api private
+ */
+
+require.normalize = function(curr, path) {
+ var segs = [];
+
+ if ('.' != path.charAt(0)) return path;
+
+ curr = curr.split('/');
+ path = path.split('/');
+
+ for (var i = 0; i < path.length; ++i) {
+ if ('..' == path[i]) {
+ curr.pop();
+ } else if ('.' != path[i] && '' != path[i]) {
+ segs.push(path[i]);
+ }
+ }
+
+ return curr.concat(segs).join('/');
+};
+
+/**
+ * Register module at `path` with callback `definition`.
+ *
+ * @param {String} path
+ * @param {Function} definition
+ * @api private
+ */
+
+require.register = function(path, definition) {
+ require.modules[path] = definition;
+};
+
+/**
+ * Alias a module definition.
+ *
+ * @param {String} from
+ * @param {String} to
+ * @api private
+ */
+
+require.alias = function(from, to) {
+ if (!require.modules.hasOwnProperty(from)) {
+ throw new Error('Failed to alias "' + from + '", it does not exist');
+ }
+ require.aliases[to] = from;
+};
+
+/**
+ * Return a require function relative to the `parent` path.
+ *
+ * @param {String} parent
+ * @return {Function}
+ * @api private
+ */
+
+require.relative = function(parent) {
+ var p = require.normalize(parent, '..');
+
+ /**
+ * lastIndexOf helper.
+ */
+
+ function lastIndexOf(arr, obj) {
+ var i = arr.length;
+ while (i--) {
+ if (arr[i] === obj) return i;
+ }
+ return -1;
+ }
+
+ /**
+ * The relative require() itself.
+ */
+
+ function localRequire(path) {
+ var resolved = localRequire.resolve(path);
+ return require(resolved, parent, path);
+ }
+
+ /**
+ * Resolve relative to the parent.
+ */
+
+ localRequire.resolve = function(path) {
+ var c = path.charAt(0);
+ if ('/' == c) return path.slice(1);
+ if ('.' == c) return require.normalize(p, path);
+
+ // resolve deps by returning
+ // the dep in the nearest "deps"
+ // directory
+ var segs = parent.split('/');
+ var i = lastIndexOf(segs, 'deps') + 1;
+ if (!i) i = 0;
+ path = segs.slice(0, i + 1).join('/') + '/deps/' + path;
+ return path;
+ };
+
+ /**
+ * Check if module is defined at `path`.
+ */
+
+ localRequire.exists = function(path) {
+ return require.modules.hasOwnProperty(localRequire.resolve(path));
+ };
+
+ return localRequire;
+};
+require.register("isarray/index.js", function(exports, require, module){
+module.exports = Array.isArray || function (arr) {
+ return Object.prototype.toString.call(arr) == '[object Array]';
+};
+
+});
+require.alias("isarray/index.js", "isarray/index.js");
+
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/node_modules/isarray/component.json b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/node_modules/isarray/component.json
new file mode 100644
index 00000000..9e31b683
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/node_modules/isarray/component.json
@@ -0,0 +1,19 @@
+{
+ "name" : "isarray",
+ "description" : "Array#isArray for older browsers",
+ "version" : "0.0.1",
+ "repository" : "juliangruber/isarray",
+ "homepage": "https://github.com/juliangruber/isarray",
+ "main" : "index.js",
+ "scripts" : [
+ "index.js"
+ ],
+ "dependencies" : {},
+ "keywords": ["browser","isarray","array"],
+ "author": {
+ "name": "Julian Gruber",
+ "email": "mail@juliangruber.com",
+ "url": "http://juliangruber.com"
+ },
+ "license": "MIT"
+}
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/node_modules/isarray/index.js b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/node_modules/isarray/index.js
new file mode 100644
index 00000000..5f5ad45d
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/node_modules/isarray/index.js
@@ -0,0 +1,3 @@
+module.exports = Array.isArray || function (arr) {
+ return Object.prototype.toString.call(arr) == '[object Array]';
+};
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/node_modules/isarray/package.json b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/node_modules/isarray/package.json
new file mode 100644
index 00000000..f9604829
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/node_modules/isarray/package.json
@@ -0,0 +1,86 @@
+{
+ "_args": [
+ [
+ {
+ "raw": "isarray@0.0.1",
+ "scope": null,
+ "escapedName": "isarray",
+ "name": "isarray",
+ "rawSpec": "0.0.1",
+ "spec": "0.0.1",
+ "type": "version"
+ },
+ "C:\\apache-tomcat-8.5.11\\webapps\\vnfmarket\\node_modules\\log4js\\node_modules\\readable-stream"
+ ]
+ ],
+ "_from": "isarray@0.0.1",
+ "_id": "isarray@0.0.1",
+ "_inCache": true,
+ "_location": "/log4js/isarray",
+ "_npmUser": {
+ "name": "juliangruber",
+ "email": "julian@juliangruber.com"
+ },
+ "_npmVersion": "1.2.18",
+ "_phantomChildren": {},
+ "_requested": {
+ "raw": "isarray@0.0.1",
+ "scope": null,
+ "escapedName": "isarray",
+ "name": "isarray",
+ "rawSpec": "0.0.1",
+ "spec": "0.0.1",
+ "type": "version"
+ },
+ "_requiredBy": [
+ "/log4js/readable-stream"
+ ],
+ "_resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz",
+ "_shasum": "8a18acfca9a8f4177e09abfc6038939b05d1eedf",
+ "_shrinkwrap": null,
+ "_spec": "isarray@0.0.1",
+ "_where": "C:\\apache-tomcat-8.5.11\\webapps\\vnfmarket\\node_modules\\log4js\\node_modules\\readable-stream",
+ "author": {
+ "name": "Julian Gruber",
+ "email": "mail@juliangruber.com",
+ "url": "http://juliangruber.com"
+ },
+ "bugs": {
+ "url": "https://github.com/juliangruber/isarray/issues"
+ },
+ "dependencies": {},
+ "description": "Array#isArray for older browsers",
+ "devDependencies": {
+ "tap": "*"
+ },
+ "directories": {},
+ "dist": {
+ "shasum": "8a18acfca9a8f4177e09abfc6038939b05d1eedf",
+ "tarball": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz"
+ },
+ "homepage": "https://github.com/juliangruber/isarray",
+ "keywords": [
+ "browser",
+ "isarray",
+ "array"
+ ],
+ "license": "MIT",
+ "main": "index.js",
+ "maintainers": [
+ {
+ "name": "juliangruber",
+ "email": "julian@juliangruber.com"
+ }
+ ],
+ "name": "isarray",
+ "optionalDependencies": {},
+ "readme": "ERROR: No README data found!",
+ "repository": {
+ "type": "git",
+ "url": "git://github.com/juliangruber/isarray.git"
+ },
+ "scripts": {
+ "test": "tap test/*.js"
+ },
+ "version": "0.0.1"
+}
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/node_modules/readable-stream/.npmignore b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/node_modules/readable-stream/.npmignore
new file mode 100644
index 00000000..38344f87
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/node_modules/readable-stream/.npmignore
@@ -0,0 +1,5 @@
+build/
+test/
+examples/
+fs.js
+zlib.js \ No newline at end of file
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/node_modules/readable-stream/LICENSE b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/node_modules/readable-stream/LICENSE
new file mode 100644
index 00000000..e3d4e695
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/node_modules/readable-stream/LICENSE
@@ -0,0 +1,18 @@
+Copyright Joyent, Inc. and other Node contributors. All rights reserved.
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to
+deal in the Software without restriction, including without limitation the
+rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+sell copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+IN THE SOFTWARE.
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/node_modules/readable-stream/README.md b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/node_modules/readable-stream/README.md
new file mode 100644
index 00000000..3fb3e802
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/node_modules/readable-stream/README.md
@@ -0,0 +1,15 @@
+# readable-stream
+
+***Node-core streams for userland***
+
+[![NPM](https://nodei.co/npm/readable-stream.png?downloads=true&downloadRank=true)](https://nodei.co/npm/readable-stream/)
+[![NPM](https://nodei.co/npm-dl/readable-stream.png?&months=6&height=3)](https://nodei.co/npm/readable-stream/)
+
+This package is a mirror of the Streams2 and Streams3 implementations in Node-core.
+
+If you want to guarantee a stable streams base, regardless of what version of Node you, or the users of your libraries are using, use **readable-stream** *only* and avoid the *"stream"* module in Node-core.
+
+**readable-stream** comes in two major versions, v1.0.x and v1.1.x. The former tracks the Streams2 implementation in Node 0.10, including bug-fixes and minor improvements as they are added. The latter tracks Streams3 as it develops in Node 0.11; we will likely see a v1.2.x branch for Node 0.12.
+
+**readable-stream** uses proper patch-level versioning so if you pin to `"~1.0.0"` you’ll get the latest Node 0.10 Streams2 implementation, including any fixes and minor non-breaking improvements. The patch-level versions of 1.0.x and 1.1.x should mirror the patch-level versions of Node-core releases. You should prefer the **1.0.x** releases for now and when you’re ready to start using Streams3, pin to `"~1.1.0"`
+
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/node_modules/readable-stream/duplex.js b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/node_modules/readable-stream/duplex.js
new file mode 100644
index 00000000..ca807af8
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/node_modules/readable-stream/duplex.js
@@ -0,0 +1 @@
+module.exports = require("./lib/_stream_duplex.js")
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/node_modules/readable-stream/lib/_stream_duplex.js b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/node_modules/readable-stream/lib/_stream_duplex.js
new file mode 100644
index 00000000..b513d61a
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/node_modules/readable-stream/lib/_stream_duplex.js
@@ -0,0 +1,89 @@
+// Copyright Joyent, Inc. and other Node contributors.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a
+// copy of this software and associated documentation files (the
+// "Software"), to deal in the Software without restriction, including
+// without limitation the rights to use, copy, modify, merge, publish,
+// distribute, sublicense, and/or sell copies of the Software, and to permit
+// persons to whom the Software is furnished to do so, subject to the
+// following conditions:
+//
+// The above copyright notice and this permission notice shall be included
+// in all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+// USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+// a duplex stream is just a stream that is both readable and writable.
+// Since JS doesn't have multiple prototypal inheritance, this class
+// prototypally inherits from Readable, and then parasitically from
+// Writable.
+
+module.exports = Duplex;
+
+/*<replacement>*/
+var objectKeys = Object.keys || function (obj) {
+ var keys = [];
+ for (var key in obj) keys.push(key);
+ return keys;
+}
+/*</replacement>*/
+
+
+/*<replacement>*/
+var util = require('core-util-is');
+util.inherits = require('inherits');
+/*</replacement>*/
+
+var Readable = require('./_stream_readable');
+var Writable = require('./_stream_writable');
+
+util.inherits(Duplex, Readable);
+
+forEach(objectKeys(Writable.prototype), function(method) {
+ if (!Duplex.prototype[method])
+ Duplex.prototype[method] = Writable.prototype[method];
+});
+
+function Duplex(options) {
+ if (!(this instanceof Duplex))
+ return new Duplex(options);
+
+ Readable.call(this, options);
+ Writable.call(this, options);
+
+ if (options && options.readable === false)
+ this.readable = false;
+
+ if (options && options.writable === false)
+ this.writable = false;
+
+ this.allowHalfOpen = true;
+ if (options && options.allowHalfOpen === false)
+ this.allowHalfOpen = false;
+
+ this.once('end', onend);
+}
+
+// the no-half-open enforcer
+function onend() {
+ // if we allow half-open state, or if the writable side ended,
+ // then we're ok.
+ if (this.allowHalfOpen || this._writableState.ended)
+ return;
+
+ // no more data can be written.
+ // But allow more writes to happen in this tick.
+ process.nextTick(this.end.bind(this));
+}
+
+function forEach (xs, f) {
+ for (var i = 0, l = xs.length; i < l; i++) {
+ f(xs[i], i);
+ }
+}
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/node_modules/readable-stream/lib/_stream_passthrough.js b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/node_modules/readable-stream/lib/_stream_passthrough.js
new file mode 100644
index 00000000..895ca50a
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/node_modules/readable-stream/lib/_stream_passthrough.js
@@ -0,0 +1,46 @@
+// Copyright Joyent, Inc. and other Node contributors.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a
+// copy of this software and associated documentation files (the
+// "Software"), to deal in the Software without restriction, including
+// without limitation the rights to use, copy, modify, merge, publish,
+// distribute, sublicense, and/or sell copies of the Software, and to permit
+// persons to whom the Software is furnished to do so, subject to the
+// following conditions:
+//
+// The above copyright notice and this permission notice shall be included
+// in all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+// USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+// a passthrough stream.
+// basically just the most minimal sort of Transform stream.
+// Every written chunk gets output as-is.
+
+module.exports = PassThrough;
+
+var Transform = require('./_stream_transform');
+
+/*<replacement>*/
+var util = require('core-util-is');
+util.inherits = require('inherits');
+/*</replacement>*/
+
+util.inherits(PassThrough, Transform);
+
+function PassThrough(options) {
+ if (!(this instanceof PassThrough))
+ return new PassThrough(options);
+
+ Transform.call(this, options);
+}
+
+PassThrough.prototype._transform = function(chunk, encoding, cb) {
+ cb(null, chunk);
+};
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/node_modules/readable-stream/lib/_stream_readable.js b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/node_modules/readable-stream/lib/_stream_readable.js
new file mode 100644
index 00000000..63072209
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/node_modules/readable-stream/lib/_stream_readable.js
@@ -0,0 +1,982 @@
+// Copyright Joyent, Inc. and other Node contributors.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a
+// copy of this software and associated documentation files (the
+// "Software"), to deal in the Software without restriction, including
+// without limitation the rights to use, copy, modify, merge, publish,
+// distribute, sublicense, and/or sell copies of the Software, and to permit
+// persons to whom the Software is furnished to do so, subject to the
+// following conditions:
+//
+// The above copyright notice and this permission notice shall be included
+// in all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+// USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+module.exports = Readable;
+
+/*<replacement>*/
+var isArray = require('isarray');
+/*</replacement>*/
+
+
+/*<replacement>*/
+var Buffer = require('buffer').Buffer;
+/*</replacement>*/
+
+Readable.ReadableState = ReadableState;
+
+var EE = require('events').EventEmitter;
+
+/*<replacement>*/
+if (!EE.listenerCount) EE.listenerCount = function(emitter, type) {
+ return emitter.listeners(type).length;
+};
+/*</replacement>*/
+
+var Stream = require('stream');
+
+/*<replacement>*/
+var util = require('core-util-is');
+util.inherits = require('inherits');
+/*</replacement>*/
+
+var StringDecoder;
+
+util.inherits(Readable, Stream);
+
+function ReadableState(options, stream) {
+ options = options || {};
+
+ // the point at which it stops calling _read() to fill the buffer
+ // Note: 0 is a valid value, means "don't call _read preemptively ever"
+ var hwm = options.highWaterMark;
+ this.highWaterMark = (hwm || hwm === 0) ? hwm : 16 * 1024;
+
+ // cast to ints.
+ this.highWaterMark = ~~this.highWaterMark;
+
+ this.buffer = [];
+ this.length = 0;
+ this.pipes = null;
+ this.pipesCount = 0;
+ this.flowing = false;
+ this.ended = false;
+ this.endEmitted = false;
+ this.reading = false;
+
+ // In streams that never have any data, and do push(null) right away,
+ // the consumer can miss the 'end' event if they do some I/O before
+ // consuming the stream. So, we don't emit('end') until some reading
+ // happens.
+ this.calledRead = false;
+
+ // a flag to be able to tell if the onwrite cb is called immediately,
+ // or on a later tick. We set this to true at first, becuase any
+ // actions that shouldn't happen until "later" should generally also
+ // not happen before the first write call.
+ this.sync = true;
+
+ // whenever we return null, then we set a flag to say
+ // that we're awaiting a 'readable' event emission.
+ this.needReadable = false;
+ this.emittedReadable = false;
+ this.readableListening = false;
+
+
+ // object stream flag. Used to make read(n) ignore n and to
+ // make all the buffer merging and length checks go away
+ this.objectMode = !!options.objectMode;
+
+ // Crypto is kind of old and crusty. Historically, its default string
+ // encoding is 'binary' so we have to make this configurable.
+ // Everything else in the universe uses 'utf8', though.
+ this.defaultEncoding = options.defaultEncoding || 'utf8';
+
+ // when piping, we only care about 'readable' events that happen
+ // after read()ing all the bytes and not getting any pushback.
+ this.ranOut = false;
+
+ // the number of writers that are awaiting a drain event in .pipe()s
+ this.awaitDrain = 0;
+
+ // if true, a maybeReadMore has been scheduled
+ this.readingMore = false;
+
+ this.decoder = null;
+ this.encoding = null;
+ if (options.encoding) {
+ if (!StringDecoder)
+ StringDecoder = require('string_decoder/').StringDecoder;
+ this.decoder = new StringDecoder(options.encoding);
+ this.encoding = options.encoding;
+ }
+}
+
+function Readable(options) {
+ if (!(this instanceof Readable))
+ return new Readable(options);
+
+ this._readableState = new ReadableState(options, this);
+
+ // legacy
+ this.readable = true;
+
+ Stream.call(this);
+}
+
+// Manually shove something into the read() buffer.
+// This returns true if the highWaterMark has not been hit yet,
+// similar to how Writable.write() returns true if you should
+// write() some more.
+Readable.prototype.push = function(chunk, encoding) {
+ var state = this._readableState;
+
+ if (typeof chunk === 'string' && !state.objectMode) {
+ encoding = encoding || state.defaultEncoding;
+ if (encoding !== state.encoding) {
+ chunk = new Buffer(chunk, encoding);
+ encoding = '';
+ }
+ }
+
+ return readableAddChunk(this, state, chunk, encoding, false);
+};
+
+// Unshift should *always* be something directly out of read()
+Readable.prototype.unshift = function(chunk) {
+ var state = this._readableState;
+ return readableAddChunk(this, state, chunk, '', true);
+};
+
+function readableAddChunk(stream, state, chunk, encoding, addToFront) {
+ var er = chunkInvalid(state, chunk);
+ if (er) {
+ stream.emit('error', er);
+ } else if (chunk === null || chunk === undefined) {
+ state.reading = false;
+ if (!state.ended)
+ onEofChunk(stream, state);
+ } else if (state.objectMode || chunk && chunk.length > 0) {
+ if (state.ended && !addToFront) {
+ var e = new Error('stream.push() after EOF');
+ stream.emit('error', e);
+ } else if (state.endEmitted && addToFront) {
+ var e = new Error('stream.unshift() after end event');
+ stream.emit('error', e);
+ } else {
+ if (state.decoder && !addToFront && !encoding)
+ chunk = state.decoder.write(chunk);
+
+ // update the buffer info.
+ state.length += state.objectMode ? 1 : chunk.length;
+ if (addToFront) {
+ state.buffer.unshift(chunk);
+ } else {
+ state.reading = false;
+ state.buffer.push(chunk);
+ }
+
+ if (state.needReadable)
+ emitReadable(stream);
+
+ maybeReadMore(stream, state);
+ }
+ } else if (!addToFront) {
+ state.reading = false;
+ }
+
+ return needMoreData(state);
+}
+
+
+
+// if it's past the high water mark, we can push in some more.
+// Also, if we have no data yet, we can stand some
+// more bytes. This is to work around cases where hwm=0,
+// such as the repl. Also, if the push() triggered a
+// readable event, and the user called read(largeNumber) such that
+// needReadable was set, then we ought to push more, so that another
+// 'readable' event will be triggered.
+function needMoreData(state) {
+ return !state.ended &&
+ (state.needReadable ||
+ state.length < state.highWaterMark ||
+ state.length === 0);
+}
+
+// backwards compatibility.
+Readable.prototype.setEncoding = function(enc) {
+ if (!StringDecoder)
+ StringDecoder = require('string_decoder/').StringDecoder;
+ this._readableState.decoder = new StringDecoder(enc);
+ this._readableState.encoding = enc;
+};
+
+// Don't raise the hwm > 128MB
+var MAX_HWM = 0x800000;
+function roundUpToNextPowerOf2(n) {
+ if (n >= MAX_HWM) {
+ n = MAX_HWM;
+ } else {
+ // Get the next highest power of 2
+ n--;
+ for (var p = 1; p < 32; p <<= 1) n |= n >> p;
+ n++;
+ }
+ return n;
+}
+
+function howMuchToRead(n, state) {
+ if (state.length === 0 && state.ended)
+ return 0;
+
+ if (state.objectMode)
+ return n === 0 ? 0 : 1;
+
+ if (n === null || isNaN(n)) {
+ // only flow one buffer at a time
+ if (state.flowing && state.buffer.length)
+ return state.buffer[0].length;
+ else
+ return state.length;
+ }
+
+ if (n <= 0)
+ return 0;
+
+ // If we're asking for more than the target buffer level,
+ // then raise the water mark. Bump up to the next highest
+ // power of 2, to prevent increasing it excessively in tiny
+ // amounts.
+ if (n > state.highWaterMark)
+ state.highWaterMark = roundUpToNextPowerOf2(n);
+
+ // don't have that much. return null, unless we've ended.
+ if (n > state.length) {
+ if (!state.ended) {
+ state.needReadable = true;
+ return 0;
+ } else
+ return state.length;
+ }
+
+ return n;
+}
+
+// you can override either this method, or the async _read(n) below.
+Readable.prototype.read = function(n) {
+ var state = this._readableState;
+ state.calledRead = true;
+ var nOrig = n;
+ var ret;
+
+ if (typeof n !== 'number' || n > 0)
+ state.emittedReadable = false;
+
+ // if we're doing read(0) to trigger a readable event, but we
+ // already have a bunch of data in the buffer, then just trigger
+ // the 'readable' event and move on.
+ if (n === 0 &&
+ state.needReadable &&
+ (state.length >= state.highWaterMark || state.ended)) {
+ emitReadable(this);
+ return null;
+ }
+
+ n = howMuchToRead(n, state);
+
+ // if we've ended, and we're now clear, then finish it up.
+ if (n === 0 && state.ended) {
+ ret = null;
+
+ // In cases where the decoder did not receive enough data
+ // to produce a full chunk, then immediately received an
+ // EOF, state.buffer will contain [<Buffer >, <Buffer 00 ...>].
+ // howMuchToRead will see this and coerce the amount to
+ // read to zero (because it's looking at the length of the
+ // first <Buffer > in state.buffer), and we'll end up here.
+ //
+ // This can only happen via state.decoder -- no other venue
+ // exists for pushing a zero-length chunk into state.buffer
+ // and triggering this behavior. In this case, we return our
+ // remaining data and end the stream, if appropriate.
+ if (state.length > 0 && state.decoder) {
+ ret = fromList(n, state);
+ state.length -= ret.length;
+ }
+
+ if (state.length === 0)
+ endReadable(this);
+
+ return ret;
+ }
+
+ // All the actual chunk generation logic needs to be
+ // *below* the call to _read. The reason is that in certain
+ // synthetic stream cases, such as passthrough streams, _read
+ // may be a completely synchronous operation which may change
+ // the state of the read buffer, providing enough data when
+ // before there was *not* enough.
+ //
+ // So, the steps are:
+ // 1. Figure out what the state of things will be after we do
+ // a read from the buffer.
+ //
+ // 2. If that resulting state will trigger a _read, then call _read.
+ // Note that this may be asynchronous, or synchronous. Yes, it is
+ // deeply ugly to write APIs this way, but that still doesn't mean
+ // that the Readable class should behave improperly, as streams are
+ // designed to be sync/async agnostic.
+ // Take note if the _read call is sync or async (ie, if the read call
+ // has returned yet), so that we know whether or not it's safe to emit
+ // 'readable' etc.
+ //
+ // 3. Actually pull the requested chunks out of the buffer and return.
+
+ // if we need a readable event, then we need to do some reading.
+ var doRead = state.needReadable;
+
+ // if we currently have less than the highWaterMark, then also read some
+ if (state.length - n <= state.highWaterMark)
+ doRead = true;
+
+ // however, if we've ended, then there's no point, and if we're already
+ // reading, then it's unnecessary.
+ if (state.ended || state.reading)
+ doRead = false;
+
+ if (doRead) {
+ state.reading = true;
+ state.sync = true;
+ // if the length is currently zero, then we *need* a readable event.
+ if (state.length === 0)
+ state.needReadable = true;
+ // call internal read method
+ this._read(state.highWaterMark);
+ state.sync = false;
+ }
+
+ // If _read called its callback synchronously, then `reading`
+ // will be false, and we need to re-evaluate how much data we
+ // can return to the user.
+ if (doRead && !state.reading)
+ n = howMuchToRead(nOrig, state);
+
+ if (n > 0)
+ ret = fromList(n, state);
+ else
+ ret = null;
+
+ if (ret === null) {
+ state.needReadable = true;
+ n = 0;
+ }
+
+ state.length -= n;
+
+ // If we have nothing in the buffer, then we want to know
+ // as soon as we *do* get something into the buffer.
+ if (state.length === 0 && !state.ended)
+ state.needReadable = true;
+
+ // If we happened to read() exactly the remaining amount in the
+ // buffer, and the EOF has been seen at this point, then make sure
+ // that we emit 'end' on the very next tick.
+ if (state.ended && !state.endEmitted && state.length === 0)
+ endReadable(this);
+
+ return ret;
+};
+
+function chunkInvalid(state, chunk) {
+ var er = null;
+ if (!Buffer.isBuffer(chunk) &&
+ 'string' !== typeof chunk &&
+ chunk !== null &&
+ chunk !== undefined &&
+ !state.objectMode) {
+ er = new TypeError('Invalid non-string/buffer chunk');
+ }
+ return er;
+}
+
+
+function onEofChunk(stream, state) {
+ if (state.decoder && !state.ended) {
+ var chunk = state.decoder.end();
+ if (chunk && chunk.length) {
+ state.buffer.push(chunk);
+ state.length += state.objectMode ? 1 : chunk.length;
+ }
+ }
+ state.ended = true;
+
+ // if we've ended and we have some data left, then emit
+ // 'readable' now to make sure it gets picked up.
+ if (state.length > 0)
+ emitReadable(stream);
+ else
+ endReadable(stream);
+}
+
+// Don't emit readable right away in sync mode, because this can trigger
+// another read() call => stack overflow. This way, it might trigger
+// a nextTick recursion warning, but that's not so bad.
+function emitReadable(stream) {
+ var state = stream._readableState;
+ state.needReadable = false;
+ if (state.emittedReadable)
+ return;
+
+ state.emittedReadable = true;
+ if (state.sync)
+ process.nextTick(function() {
+ emitReadable_(stream);
+ });
+ else
+ emitReadable_(stream);
+}
+
+function emitReadable_(stream) {
+ stream.emit('readable');
+}
+
+
+// at this point, the user has presumably seen the 'readable' event,
+// and called read() to consume some data. that may have triggered
+// in turn another _read(n) call, in which case reading = true if
+// it's in progress.
+// However, if we're not ended, or reading, and the length < hwm,
+// then go ahead and try to read some more preemptively.
+function maybeReadMore(stream, state) {
+ if (!state.readingMore) {
+ state.readingMore = true;
+ process.nextTick(function() {
+ maybeReadMore_(stream, state);
+ });
+ }
+}
+
+function maybeReadMore_(stream, state) {
+ var len = state.length;
+ while (!state.reading && !state.flowing && !state.ended &&
+ state.length < state.highWaterMark) {
+ stream.read(0);
+ if (len === state.length)
+ // didn't get any data, stop spinning.
+ break;
+ else
+ len = state.length;
+ }
+ state.readingMore = false;
+}
+
+// abstract method. to be overridden in specific implementation classes.
+// call cb(er, data) where data is <= n in length.
+// for virtual (non-string, non-buffer) streams, "length" is somewhat
+// arbitrary, and perhaps not very meaningful.
+Readable.prototype._read = function(n) {
+ this.emit('error', new Error('not implemented'));
+};
+
+Readable.prototype.pipe = function(dest, pipeOpts) {
+ var src = this;
+ var state = this._readableState;
+
+ switch (state.pipesCount) {
+ case 0:
+ state.pipes = dest;
+ break;
+ case 1:
+ state.pipes = [state.pipes, dest];
+ break;
+ default:
+ state.pipes.push(dest);
+ break;
+ }
+ state.pipesCount += 1;
+
+ var doEnd = (!pipeOpts || pipeOpts.end !== false) &&
+ dest !== process.stdout &&
+ dest !== process.stderr;
+
+ var endFn = doEnd ? onend : cleanup;
+ if (state.endEmitted)
+ process.nextTick(endFn);
+ else
+ src.once('end', endFn);
+
+ dest.on('unpipe', onunpipe);
+ function onunpipe(readable) {
+ if (readable !== src) return;
+ cleanup();
+ }
+
+ function onend() {
+ dest.end();
+ }
+
+ // when the dest drains, it reduces the awaitDrain counter
+ // on the source. This would be more elegant with a .once()
+ // handler in flow(), but adding and removing repeatedly is
+ // too slow.
+ var ondrain = pipeOnDrain(src);
+ dest.on('drain', ondrain);
+
+ function cleanup() {
+ // cleanup event handlers once the pipe is broken
+ dest.removeListener('close', onclose);
+ dest.removeListener('finish', onfinish);
+ dest.removeListener('drain', ondrain);
+ dest.removeListener('error', onerror);
+ dest.removeListener('unpipe', onunpipe);
+ src.removeListener('end', onend);
+ src.removeListener('end', cleanup);
+
+ // if the reader is waiting for a drain event from this
+ // specific writer, then it would cause it to never start
+ // flowing again.
+ // So, if this is awaiting a drain, then we just call it now.
+ // If we don't know, then assume that we are waiting for one.
+ if (!dest._writableState || dest._writableState.needDrain)
+ ondrain();
+ }
+
+ // if the dest has an error, then stop piping into it.
+ // however, don't suppress the throwing behavior for this.
+ function onerror(er) {
+ unpipe();
+ dest.removeListener('error', onerror);
+ if (EE.listenerCount(dest, 'error') === 0)
+ dest.emit('error', er);
+ }
+ // This is a brutally ugly hack to make sure that our error handler
+ // is attached before any userland ones. NEVER DO THIS.
+ if (!dest._events || !dest._events.error)
+ dest.on('error', onerror);
+ else if (isArray(dest._events.error))
+ dest._events.error.unshift(onerror);
+ else
+ dest._events.error = [onerror, dest._events.error];
+
+
+
+ // Both close and finish should trigger unpipe, but only once.
+ function onclose() {
+ dest.removeListener('finish', onfinish);
+ unpipe();
+ }
+ dest.once('close', onclose);
+ function onfinish() {
+ dest.removeListener('close', onclose);
+ unpipe();
+ }
+ dest.once('finish', onfinish);
+
+ function unpipe() {
+ src.unpipe(dest);
+ }
+
+ // tell the dest that it's being piped to
+ dest.emit('pipe', src);
+
+ // start the flow if it hasn't been started already.
+ if (!state.flowing) {
+ // the handler that waits for readable events after all
+ // the data gets sucked out in flow.
+ // This would be easier to follow with a .once() handler
+ // in flow(), but that is too slow.
+ this.on('readable', pipeOnReadable);
+
+ state.flowing = true;
+ process.nextTick(function() {
+ flow(src);
+ });
+ }
+
+ return dest;
+};
+
+function pipeOnDrain(src) {
+ return function() {
+ var dest = this;
+ var state = src._readableState;
+ state.awaitDrain--;
+ if (state.awaitDrain === 0)
+ flow(src);
+ };
+}
+
+function flow(src) {
+ var state = src._readableState;
+ var chunk;
+ state.awaitDrain = 0;
+
+ function write(dest, i, list) {
+ var written = dest.write(chunk);
+ if (false === written) {
+ state.awaitDrain++;
+ }
+ }
+
+ while (state.pipesCount && null !== (chunk = src.read())) {
+
+ if (state.pipesCount === 1)
+ write(state.pipes, 0, null);
+ else
+ forEach(state.pipes, write);
+
+ src.emit('data', chunk);
+
+ // if anyone needs a drain, then we have to wait for that.
+ if (state.awaitDrain > 0)
+ return;
+ }
+
+ // if every destination was unpiped, either before entering this
+ // function, or in the while loop, then stop flowing.
+ //
+ // NB: This is a pretty rare edge case.
+ if (state.pipesCount === 0) {
+ state.flowing = false;
+
+ // if there were data event listeners added, then switch to old mode.
+ if (EE.listenerCount(src, 'data') > 0)
+ emitDataEvents(src);
+ return;
+ }
+
+ // at this point, no one needed a drain, so we just ran out of data
+ // on the next readable event, start it over again.
+ state.ranOut = true;
+}
+
+function pipeOnReadable() {
+ if (this._readableState.ranOut) {
+ this._readableState.ranOut = false;
+ flow(this);
+ }
+}
+
+
+Readable.prototype.unpipe = function(dest) {
+ var state = this._readableState;
+
+ // if we're not piping anywhere, then do nothing.
+ if (state.pipesCount === 0)
+ return this;
+
+ // just one destination. most common case.
+ if (state.pipesCount === 1) {
+ // passed in one, but it's not the right one.
+ if (dest && dest !== state.pipes)
+ return this;
+
+ if (!dest)
+ dest = state.pipes;
+
+ // got a match.
+ state.pipes = null;
+ state.pipesCount = 0;
+ this.removeListener('readable', pipeOnReadable);
+ state.flowing = false;
+ if (dest)
+ dest.emit('unpipe', this);
+ return this;
+ }
+
+ // slow case. multiple pipe destinations.
+
+ if (!dest) {
+ // remove all.
+ var dests = state.pipes;
+ var len = state.pipesCount;
+ state.pipes = null;
+ state.pipesCount = 0;
+ this.removeListener('readable', pipeOnReadable);
+ state.flowing = false;
+
+ for (var i = 0; i < len; i++)
+ dests[i].emit('unpipe', this);
+ return this;
+ }
+
+ // try to find the right one.
+ var i = indexOf(state.pipes, dest);
+ if (i === -1)
+ return this;
+
+ state.pipes.splice(i, 1);
+ state.pipesCount -= 1;
+ if (state.pipesCount === 1)
+ state.pipes = state.pipes[0];
+
+ dest.emit('unpipe', this);
+
+ return this;
+};
+
+// set up data events if they are asked for
+// Ensure readable listeners eventually get something
+Readable.prototype.on = function(ev, fn) {
+ var res = Stream.prototype.on.call(this, ev, fn);
+
+ if (ev === 'data' && !this._readableState.flowing)
+ emitDataEvents(this);
+
+ if (ev === 'readable' && this.readable) {
+ var state = this._readableState;
+ if (!state.readableListening) {
+ state.readableListening = true;
+ state.emittedReadable = false;
+ state.needReadable = true;
+ if (!state.reading) {
+ this.read(0);
+ } else if (state.length) {
+ emitReadable(this, state);
+ }
+ }
+ }
+
+ return res;
+};
+Readable.prototype.addListener = Readable.prototype.on;
+
+// pause() and resume() are remnants of the legacy readable stream API
+// If the user uses them, then switch into old mode.
+Readable.prototype.resume = function() {
+ emitDataEvents(this);
+ this.read(0);
+ this.emit('resume');
+};
+
+Readable.prototype.pause = function() {
+ emitDataEvents(this, true);
+ this.emit('pause');
+};
+
+function emitDataEvents(stream, startPaused) {
+ var state = stream._readableState;
+
+ if (state.flowing) {
+ // https://github.com/isaacs/readable-stream/issues/16
+ throw new Error('Cannot switch to old mode now.');
+ }
+
+ var paused = startPaused || false;
+ var readable = false;
+
+ // convert to an old-style stream.
+ stream.readable = true;
+ stream.pipe = Stream.prototype.pipe;
+ stream.on = stream.addListener = Stream.prototype.on;
+
+ stream.on('readable', function() {
+ readable = true;
+
+ var c;
+ while (!paused && (null !== (c = stream.read())))
+ stream.emit('data', c);
+
+ if (c === null) {
+ readable = false;
+ stream._readableState.needReadable = true;
+ }
+ });
+
+ stream.pause = function() {
+ paused = true;
+ this.emit('pause');
+ };
+
+ stream.resume = function() {
+ paused = false;
+ if (readable)
+ process.nextTick(function() {
+ stream.emit('readable');
+ });
+ else
+ this.read(0);
+ this.emit('resume');
+ };
+
+ // now make it start, just in case it hadn't already.
+ stream.emit('readable');
+}
+
+// wrap an old-style stream as the async data source.
+// This is *not* part of the readable stream interface.
+// It is an ugly unfortunate mess of history.
+Readable.prototype.wrap = function(stream) {
+ var state = this._readableState;
+ var paused = false;
+
+ var self = this;
+ stream.on('end', function() {
+ if (state.decoder && !state.ended) {
+ var chunk = state.decoder.end();
+ if (chunk && chunk.length)
+ self.push(chunk);
+ }
+
+ self.push(null);
+ });
+
+ stream.on('data', function(chunk) {
+ if (state.decoder)
+ chunk = state.decoder.write(chunk);
+
+ // don't skip over falsy values in objectMode
+ //if (state.objectMode && util.isNullOrUndefined(chunk))
+ if (state.objectMode && (chunk === null || chunk === undefined))
+ return;
+ else if (!state.objectMode && (!chunk || !chunk.length))
+ return;
+
+ var ret = self.push(chunk);
+ if (!ret) {
+ paused = true;
+ stream.pause();
+ }
+ });
+
+ // proxy all the other methods.
+ // important when wrapping filters and duplexes.
+ for (var i in stream) {
+ if (typeof stream[i] === 'function' &&
+ typeof this[i] === 'undefined') {
+ this[i] = function(method) { return function() {
+ return stream[method].apply(stream, arguments);
+ }}(i);
+ }
+ }
+
+ // proxy certain important events.
+ var events = ['error', 'close', 'destroy', 'pause', 'resume'];
+ forEach(events, function(ev) {
+ stream.on(ev, self.emit.bind(self, ev));
+ });
+
+ // when we try to consume some more bytes, simply unpause the
+ // underlying stream.
+ self._read = function(n) {
+ if (paused) {
+ paused = false;
+ stream.resume();
+ }
+ };
+
+ return self;
+};
+
+
+
+// exposed for testing purposes only.
+Readable._fromList = fromList;
+
+// Pluck off n bytes from an array of buffers.
+// Length is the combined lengths of all the buffers in the list.
+function fromList(n, state) {
+ var list = state.buffer;
+ var length = state.length;
+ var stringMode = !!state.decoder;
+ var objectMode = !!state.objectMode;
+ var ret;
+
+ // nothing in the list, definitely empty.
+ if (list.length === 0)
+ return null;
+
+ if (length === 0)
+ ret = null;
+ else if (objectMode)
+ ret = list.shift();
+ else if (!n || n >= length) {
+ // read it all, truncate the array.
+ if (stringMode)
+ ret = list.join('');
+ else
+ ret = Buffer.concat(list, length);
+ list.length = 0;
+ } else {
+ // read just some of it.
+ if (n < list[0].length) {
+ // just take a part of the first list item.
+ // slice is the same for buffers and strings.
+ var buf = list[0];
+ ret = buf.slice(0, n);
+ list[0] = buf.slice(n);
+ } else if (n === list[0].length) {
+ // first list is a perfect match
+ ret = list.shift();
+ } else {
+ // complex case.
+ // we have enough to cover it, but it spans past the first buffer.
+ if (stringMode)
+ ret = '';
+ else
+ ret = new Buffer(n);
+
+ var c = 0;
+ for (var i = 0, l = list.length; i < l && c < n; i++) {
+ var buf = list[0];
+ var cpy = Math.min(n - c, buf.length);
+
+ if (stringMode)
+ ret += buf.slice(0, cpy);
+ else
+ buf.copy(ret, c, 0, cpy);
+
+ if (cpy < buf.length)
+ list[0] = buf.slice(cpy);
+ else
+ list.shift();
+
+ c += cpy;
+ }
+ }
+ }
+
+ return ret;
+}
+
+function endReadable(stream) {
+ var state = stream._readableState;
+
+ // If we get here before consuming all the bytes, then that is a
+ // bug in node. Should never happen.
+ if (state.length > 0)
+ throw new Error('endReadable called on non-empty stream');
+
+ if (!state.endEmitted && state.calledRead) {
+ state.ended = true;
+ process.nextTick(function() {
+ // Check that we didn't get one last unshift.
+ if (!state.endEmitted && state.length === 0) {
+ state.endEmitted = true;
+ stream.readable = false;
+ stream.emit('end');
+ }
+ });
+ }
+}
+
+function forEach (xs, f) {
+ for (var i = 0, l = xs.length; i < l; i++) {
+ f(xs[i], i);
+ }
+}
+
+function indexOf (xs, x) {
+ for (var i = 0, l = xs.length; i < l; i++) {
+ if (xs[i] === x) return i;
+ }
+ return -1;
+}
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/node_modules/readable-stream/lib/_stream_transform.js b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/node_modules/readable-stream/lib/_stream_transform.js
new file mode 100644
index 00000000..eb188df3
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/node_modules/readable-stream/lib/_stream_transform.js
@@ -0,0 +1,210 @@
+// Copyright Joyent, Inc. and other Node contributors.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a
+// copy of this software and associated documentation files (the
+// "Software"), to deal in the Software without restriction, including
+// without limitation the rights to use, copy, modify, merge, publish,
+// distribute, sublicense, and/or sell copies of the Software, and to permit
+// persons to whom the Software is furnished to do so, subject to the
+// following conditions:
+//
+// The above copyright notice and this permission notice shall be included
+// in all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+// USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+
+// a transform stream is a readable/writable stream where you do
+// something with the data. Sometimes it's called a "filter",
+// but that's not a great name for it, since that implies a thing where
+// some bits pass through, and others are simply ignored. (That would
+// be a valid example of a transform, of course.)
+//
+// While the output is causally related to the input, it's not a
+// necessarily symmetric or synchronous transformation. For example,
+// a zlib stream might take multiple plain-text writes(), and then
+// emit a single compressed chunk some time in the future.
+//
+// Here's how this works:
+//
+// The Transform stream has all the aspects of the readable and writable
+// stream classes. When you write(chunk), that calls _write(chunk,cb)
+// internally, and returns false if there's a lot of pending writes
+// buffered up. When you call read(), that calls _read(n) until
+// there's enough pending readable data buffered up.
+//
+// In a transform stream, the written data is placed in a buffer. When
+// _read(n) is called, it transforms the queued up data, calling the
+// buffered _write cb's as it consumes chunks. If consuming a single
+// written chunk would result in multiple output chunks, then the first
+// outputted bit calls the readcb, and subsequent chunks just go into
+// the read buffer, and will cause it to emit 'readable' if necessary.
+//
+// This way, back-pressure is actually determined by the reading side,
+// since _read has to be called to start processing a new chunk. However,
+// a pathological inflate type of transform can cause excessive buffering
+// here. For example, imagine a stream where every byte of input is
+// interpreted as an integer from 0-255, and then results in that many
+// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in
+// 1kb of data being output. In this case, you could write a very small
+// amount of input, and end up with a very large amount of output. In
+// such a pathological inflating mechanism, there'd be no way to tell
+// the system to stop doing the transform. A single 4MB write could
+// cause the system to run out of memory.
+//
+// However, even in such a pathological case, only a single written chunk
+// would be consumed, and then the rest would wait (un-transformed) until
+// the results of the previous transformed chunk were consumed.
+
+module.exports = Transform;
+
+var Duplex = require('./_stream_duplex');
+
+/*<replacement>*/
+var util = require('core-util-is');
+util.inherits = require('inherits');
+/*</replacement>*/
+
+util.inherits(Transform, Duplex);
+
+
+function TransformState(options, stream) {
+ this.afterTransform = function(er, data) {
+ return afterTransform(stream, er, data);
+ };
+
+ this.needTransform = false;
+ this.transforming = false;
+ this.writecb = null;
+ this.writechunk = null;
+}
+
+function afterTransform(stream, er, data) {
+ var ts = stream._transformState;
+ ts.transforming = false;
+
+ var cb = ts.writecb;
+
+ if (!cb)
+ return stream.emit('error', new Error('no writecb in Transform class'));
+
+ ts.writechunk = null;
+ ts.writecb = null;
+
+ if (data !== null && data !== undefined)
+ stream.push(data);
+
+ if (cb)
+ cb(er);
+
+ var rs = stream._readableState;
+ rs.reading = false;
+ if (rs.needReadable || rs.length < rs.highWaterMark) {
+ stream._read(rs.highWaterMark);
+ }
+}
+
+
+function Transform(options) {
+ if (!(this instanceof Transform))
+ return new Transform(options);
+
+ Duplex.call(this, options);
+
+ var ts = this._transformState = new TransformState(options, this);
+
+ // when the writable side finishes, then flush out anything remaining.
+ var stream = this;
+
+ // start out asking for a readable event once data is transformed.
+ this._readableState.needReadable = true;
+
+ // we have implemented the _read method, and done the other things
+ // that Readable wants before the first _read call, so unset the
+ // sync guard flag.
+ this._readableState.sync = false;
+
+ this.once('finish', function() {
+ if ('function' === typeof this._flush)
+ this._flush(function(er) {
+ done(stream, er);
+ });
+ else
+ done(stream);
+ });
+}
+
+Transform.prototype.push = function(chunk, encoding) {
+ this._transformState.needTransform = false;
+ return Duplex.prototype.push.call(this, chunk, encoding);
+};
+
+// This is the part where you do stuff!
+// override this function in implementation classes.
+// 'chunk' is an input chunk.
+//
+// Call `push(newChunk)` to pass along transformed output
+// to the readable side. You may call 'push' zero or more times.
+//
+// Call `cb(err)` when you are done with this chunk. If you pass
+// an error, then that'll put the hurt on the whole operation. If you
+// never call cb(), then you'll never get another chunk.
+Transform.prototype._transform = function(chunk, encoding, cb) {
+ throw new Error('not implemented');
+};
+
+Transform.prototype._write = function(chunk, encoding, cb) {
+ var ts = this._transformState;
+ ts.writecb = cb;
+ ts.writechunk = chunk;
+ ts.writeencoding = encoding;
+ if (!ts.transforming) {
+ var rs = this._readableState;
+ if (ts.needTransform ||
+ rs.needReadable ||
+ rs.length < rs.highWaterMark)
+ this._read(rs.highWaterMark);
+ }
+};
+
+// Doesn't matter what the args are here.
+// _transform does all the work.
+// That we got here means that the readable side wants more data.
+Transform.prototype._read = function(n) {
+ var ts = this._transformState;
+
+ if (ts.writechunk !== null && ts.writecb && !ts.transforming) {
+ ts.transforming = true;
+ this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform);
+ } else {
+ // mark that we need a transform, so that any data that comes in
+ // will get processed, now that we've asked for it.
+ ts.needTransform = true;
+ }
+};
+
+
+function done(stream, er) {
+ if (er)
+ return stream.emit('error', er);
+
+ // if there's nothing in the write buffer, then that means
+ // that nothing more will ever be provided
+ var ws = stream._writableState;
+ var rs = stream._readableState;
+ var ts = stream._transformState;
+
+ if (ws.length)
+ throw new Error('calling transform done when ws.length != 0');
+
+ if (ts.transforming)
+ throw new Error('calling transform done when still transforming');
+
+ return stream.push(null);
+}
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/node_modules/readable-stream/lib/_stream_writable.js b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/node_modules/readable-stream/lib/_stream_writable.js
new file mode 100644
index 00000000..4bdaa4fa
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/node_modules/readable-stream/lib/_stream_writable.js
@@ -0,0 +1,386 @@
+// Copyright Joyent, Inc. and other Node contributors.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a
+// copy of this software and associated documentation files (the
+// "Software"), to deal in the Software without restriction, including
+// without limitation the rights to use, copy, modify, merge, publish,
+// distribute, sublicense, and/or sell copies of the Software, and to permit
+// persons to whom the Software is furnished to do so, subject to the
+// following conditions:
+//
+// The above copyright notice and this permission notice shall be included
+// in all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+// USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+// A bit simpler than readable streams.
+// Implement an async ._write(chunk, cb), and it'll handle all
+// the drain event emission and buffering.
+
+module.exports = Writable;
+
+/*<replacement>*/
+var Buffer = require('buffer').Buffer;
+/*</replacement>*/
+
+Writable.WritableState = WritableState;
+
+
+/*<replacement>*/
+var util = require('core-util-is');
+util.inherits = require('inherits');
+/*</replacement>*/
+
+var Stream = require('stream');
+
+util.inherits(Writable, Stream);
+
+function WriteReq(chunk, encoding, cb) {
+ this.chunk = chunk;
+ this.encoding = encoding;
+ this.callback = cb;
+}
+
+function WritableState(options, stream) {
+ options = options || {};
+
+ // the point at which write() starts returning false
+ // Note: 0 is a valid value, means that we always return false if
+ // the entire buffer is not flushed immediately on write()
+ var hwm = options.highWaterMark;
+ this.highWaterMark = (hwm || hwm === 0) ? hwm : 16 * 1024;
+
+ // object stream flag to indicate whether or not this stream
+ // contains buffers or objects.
+ this.objectMode = !!options.objectMode;
+
+ // cast to ints.
+ this.highWaterMark = ~~this.highWaterMark;
+
+ this.needDrain = false;
+ // at the start of calling end()
+ this.ending = false;
+ // when end() has been called, and returned
+ this.ended = false;
+ // when 'finish' is emitted
+ this.finished = false;
+
+ // should we decode strings into buffers before passing to _write?
+ // this is here so that some node-core streams can optimize string
+ // handling at a lower level.
+ var noDecode = options.decodeStrings === false;
+ this.decodeStrings = !noDecode;
+
+ // Crypto is kind of old and crusty. Historically, its default string
+ // encoding is 'binary' so we have to make this configurable.
+ // Everything else in the universe uses 'utf8', though.
+ this.defaultEncoding = options.defaultEncoding || 'utf8';
+
+ // not an actual buffer we keep track of, but a measurement
+ // of how much we're waiting to get pushed to some underlying
+ // socket or file.
+ this.length = 0;
+
+ // a flag to see when we're in the middle of a write.
+ this.writing = false;
+
+ // a flag to be able to tell if the onwrite cb is called immediately,
+ // or on a later tick. We set this to true at first, becuase any
+ // actions that shouldn't happen until "later" should generally also
+ // not happen before the first write call.
+ this.sync = true;
+
+ // a flag to know if we're processing previously buffered items, which
+ // may call the _write() callback in the same tick, so that we don't
+ // end up in an overlapped onwrite situation.
+ this.bufferProcessing = false;
+
+ // the callback that's passed to _write(chunk,cb)
+ this.onwrite = function(er) {
+ onwrite(stream, er);
+ };
+
+ // the callback that the user supplies to write(chunk,encoding,cb)
+ this.writecb = null;
+
+ // the amount that is being written when _write is called.
+ this.writelen = 0;
+
+ this.buffer = [];
+
+ // True if the error was already emitted and should not be thrown again
+ this.errorEmitted = false;
+}
+
+function Writable(options) {
+ var Duplex = require('./_stream_duplex');
+
+ // Writable ctor is applied to Duplexes, though they're not
+ // instanceof Writable, they're instanceof Readable.
+ if (!(this instanceof Writable) && !(this instanceof Duplex))
+ return new Writable(options);
+
+ this._writableState = new WritableState(options, this);
+
+ // legacy.
+ this.writable = true;
+
+ Stream.call(this);
+}
+
+// Otherwise people can pipe Writable streams, which is just wrong.
+Writable.prototype.pipe = function() {
+ this.emit('error', new Error('Cannot pipe. Not readable.'));
+};
+
+
+function writeAfterEnd(stream, state, cb) {
+ var er = new Error('write after end');
+ // TODO: defer error events consistently everywhere, not just the cb
+ stream.emit('error', er);
+ process.nextTick(function() {
+ cb(er);
+ });
+}
+
+// If we get something that is not a buffer, string, null, or undefined,
+// and we're not in objectMode, then that's an error.
+// Otherwise stream chunks are all considered to be of length=1, and the
+// watermarks determine how many objects to keep in the buffer, rather than
+// how many bytes or characters.
+function validChunk(stream, state, chunk, cb) {
+ var valid = true;
+ if (!Buffer.isBuffer(chunk) &&
+ 'string' !== typeof chunk &&
+ chunk !== null &&
+ chunk !== undefined &&
+ !state.objectMode) {
+ var er = new TypeError('Invalid non-string/buffer chunk');
+ stream.emit('error', er);
+ process.nextTick(function() {
+ cb(er);
+ });
+ valid = false;
+ }
+ return valid;
+}
+
+Writable.prototype.write = function(chunk, encoding, cb) {
+ var state = this._writableState;
+ var ret = false;
+
+ if (typeof encoding === 'function') {
+ cb = encoding;
+ encoding = null;
+ }
+
+ if (Buffer.isBuffer(chunk))
+ encoding = 'buffer';
+ else if (!encoding)
+ encoding = state.defaultEncoding;
+
+ if (typeof cb !== 'function')
+ cb = function() {};
+
+ if (state.ended)
+ writeAfterEnd(this, state, cb);
+ else if (validChunk(this, state, chunk, cb))
+ ret = writeOrBuffer(this, state, chunk, encoding, cb);
+
+ return ret;
+};
+
+function decodeChunk(state, chunk, encoding) {
+ if (!state.objectMode &&
+ state.decodeStrings !== false &&
+ typeof chunk === 'string') {
+ chunk = new Buffer(chunk, encoding);
+ }
+ return chunk;
+}
+
+// if we're already writing something, then just put this
+// in the queue, and wait our turn. Otherwise, call _write
+// If we return false, then we need a drain event, so set that flag.
+function writeOrBuffer(stream, state, chunk, encoding, cb) {
+ chunk = decodeChunk(state, chunk, encoding);
+ if (Buffer.isBuffer(chunk))
+ encoding = 'buffer';
+ var len = state.objectMode ? 1 : chunk.length;
+
+ state.length += len;
+
+ var ret = state.length < state.highWaterMark;
+ // we must ensure that previous needDrain will not be reset to false.
+ if (!ret)
+ state.needDrain = true;
+
+ if (state.writing)
+ state.buffer.push(new WriteReq(chunk, encoding, cb));
+ else
+ doWrite(stream, state, len, chunk, encoding, cb);
+
+ return ret;
+}
+
+function doWrite(stream, state, len, chunk, encoding, cb) {
+ state.writelen = len;
+ state.writecb = cb;
+ state.writing = true;
+ state.sync = true;
+ stream._write(chunk, encoding, state.onwrite);
+ state.sync = false;
+}
+
+function onwriteError(stream, state, sync, er, cb) {
+ if (sync)
+ process.nextTick(function() {
+ cb(er);
+ });
+ else
+ cb(er);
+
+ stream._writableState.errorEmitted = true;
+ stream.emit('error', er);
+}
+
+function onwriteStateUpdate(state) {
+ state.writing = false;
+ state.writecb = null;
+ state.length -= state.writelen;
+ state.writelen = 0;
+}
+
+function onwrite(stream, er) {
+ var state = stream._writableState;
+ var sync = state.sync;
+ var cb = state.writecb;
+
+ onwriteStateUpdate(state);
+
+ if (er)
+ onwriteError(stream, state, sync, er, cb);
+ else {
+ // Check if we're actually ready to finish, but don't emit yet
+ var finished = needFinish(stream, state);
+
+ if (!finished && !state.bufferProcessing && state.buffer.length)
+ clearBuffer(stream, state);
+
+ if (sync) {
+ process.nextTick(function() {
+ afterWrite(stream, state, finished, cb);
+ });
+ } else {
+ afterWrite(stream, state, finished, cb);
+ }
+ }
+}
+
+function afterWrite(stream, state, finished, cb) {
+ if (!finished)
+ onwriteDrain(stream, state);
+ cb();
+ if (finished)
+ finishMaybe(stream, state);
+}
+
+// Must force callback to be called on nextTick, so that we don't
+// emit 'drain' before the write() consumer gets the 'false' return
+// value, and has a chance to attach a 'drain' listener.
+function onwriteDrain(stream, state) {
+ if (state.length === 0 && state.needDrain) {
+ state.needDrain = false;
+ stream.emit('drain');
+ }
+}
+
+
+// if there's something in the buffer waiting, then process it
+function clearBuffer(stream, state) {
+ state.bufferProcessing = true;
+
+ for (var c = 0; c < state.buffer.length; c++) {
+ var entry = state.buffer[c];
+ var chunk = entry.chunk;
+ var encoding = entry.encoding;
+ var cb = entry.callback;
+ var len = state.objectMode ? 1 : chunk.length;
+
+ doWrite(stream, state, len, chunk, encoding, cb);
+
+ // if we didn't call the onwrite immediately, then
+ // it means that we need to wait until it does.
+ // also, that means that the chunk and cb are currently
+ // being processed, so move the buffer counter past them.
+ if (state.writing) {
+ c++;
+ break;
+ }
+ }
+
+ state.bufferProcessing = false;
+ if (c < state.buffer.length)
+ state.buffer = state.buffer.slice(c);
+ else
+ state.buffer.length = 0;
+}
+
+Writable.prototype._write = function(chunk, encoding, cb) {
+ cb(new Error('not implemented'));
+};
+
+Writable.prototype.end = function(chunk, encoding, cb) {
+ var state = this._writableState;
+
+ if (typeof chunk === 'function') {
+ cb = chunk;
+ chunk = null;
+ encoding = null;
+ } else if (typeof encoding === 'function') {
+ cb = encoding;
+ encoding = null;
+ }
+
+ if (typeof chunk !== 'undefined' && chunk !== null)
+ this.write(chunk, encoding);
+
+ // ignore unnecessary end() calls.
+ if (!state.ending && !state.finished)
+ endWritable(this, state, cb);
+};
+
+
+function needFinish(stream, state) {
+ return (state.ending &&
+ state.length === 0 &&
+ !state.finished &&
+ !state.writing);
+}
+
+function finishMaybe(stream, state) {
+ var need = needFinish(stream, state);
+ if (need) {
+ state.finished = true;
+ stream.emit('finish');
+ }
+ return need;
+}
+
+function endWritable(stream, state, cb) {
+ state.ending = true;
+ finishMaybe(stream, state);
+ if (cb) {
+ if (state.finished)
+ process.nextTick(cb);
+ else
+ stream.once('finish', cb);
+ }
+ state.ended = true;
+}
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/node_modules/readable-stream/package.json b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/node_modules/readable-stream/package.json
new file mode 100644
index 00000000..bec9604f
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/node_modules/readable-stream/package.json
@@ -0,0 +1,112 @@
+{
+ "_args": [
+ [
+ {
+ "raw": "readable-stream@~1.0.2",
+ "scope": null,
+ "escapedName": "readable-stream",
+ "name": "readable-stream",
+ "rawSpec": "~1.0.2",
+ "spec": ">=1.0.2 <1.1.0",
+ "type": "range"
+ },
+ "C:\\apache-tomcat-8.5.11\\webapps\\vnfmarket\\node_modules\\log4js"
+ ]
+ ],
+ "_from": "readable-stream@>=1.0.2 <1.1.0",
+ "_id": "readable-stream@1.0.34",
+ "_inCache": true,
+ "_location": "/log4js/readable-stream",
+ "_nodeVersion": "5.10.1",
+ "_npmOperationalInternal": {
+ "host": "packages-12-west.internal.npmjs.com",
+ "tmp": "tmp/readable-stream-1.0.34.tgz_1460562521506_0.019665231462568045"
+ },
+ "_npmUser": {
+ "name": "cwmma",
+ "email": "calvin.metcalf@gmail.com"
+ },
+ "_npmVersion": "3.8.3",
+ "_phantomChildren": {},
+ "_requested": {
+ "raw": "readable-stream@~1.0.2",
+ "scope": null,
+ "escapedName": "readable-stream",
+ "name": "readable-stream",
+ "rawSpec": "~1.0.2",
+ "spec": ">=1.0.2 <1.1.0",
+ "type": "range"
+ },
+ "_requiredBy": [
+ "/log4js"
+ ],
+ "_resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.0.34.tgz",
+ "_shasum": "125820e34bc842d2f2aaafafe4c2916ee32c157c",
+ "_shrinkwrap": null,
+ "_spec": "readable-stream@~1.0.2",
+ "_where": "C:\\apache-tomcat-8.5.11\\webapps\\vnfmarket\\node_modules\\log4js",
+ "author": {
+ "name": "Isaac Z. Schlueter",
+ "email": "i@izs.me",
+ "url": "http://blog.izs.me/"
+ },
+ "browser": {
+ "util": false
+ },
+ "bugs": {
+ "url": "https://github.com/isaacs/readable-stream/issues"
+ },
+ "dependencies": {
+ "core-util-is": "~1.0.0",
+ "inherits": "~2.0.1",
+ "isarray": "0.0.1",
+ "string_decoder": "~0.10.x"
+ },
+ "description": "Streams2, a user-land copy of the stream library from Node.js v0.10.x",
+ "devDependencies": {
+ "tap": "~0.2.6"
+ },
+ "directories": {},
+ "dist": {
+ "shasum": "125820e34bc842d2f2aaafafe4c2916ee32c157c",
+ "tarball": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.0.34.tgz"
+ },
+ "gitHead": "1227c7b66deedb1dc5284a89425854d5f7ad9576",
+ "homepage": "https://github.com/isaacs/readable-stream#readme",
+ "keywords": [
+ "readable",
+ "stream",
+ "pipe"
+ ],
+ "license": "MIT",
+ "main": "readable.js",
+ "maintainers": [
+ {
+ "name": "isaacs",
+ "email": "isaacs@npmjs.com"
+ },
+ {
+ "name": "tootallnate",
+ "email": "nathan@tootallnate.net"
+ },
+ {
+ "name": "rvagg",
+ "email": "rod@vagg.org"
+ },
+ {
+ "name": "cwmma",
+ "email": "calvin.metcalf@gmail.com"
+ }
+ ],
+ "name": "readable-stream",
+ "optionalDependencies": {},
+ "readme": "ERROR: No README data found!",
+ "repository": {
+ "type": "git",
+ "url": "git://github.com/isaacs/readable-stream.git"
+ },
+ "scripts": {
+ "test": "tap test/simple/*.js"
+ },
+ "version": "1.0.34"
+}
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/node_modules/readable-stream/passthrough.js b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/node_modules/readable-stream/passthrough.js
new file mode 100644
index 00000000..27e8d8a5
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/node_modules/readable-stream/passthrough.js
@@ -0,0 +1 @@
+module.exports = require("./lib/_stream_passthrough.js")
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/node_modules/readable-stream/readable.js b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/node_modules/readable-stream/readable.js
new file mode 100644
index 00000000..26511e87
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/node_modules/readable-stream/readable.js
@@ -0,0 +1,11 @@
+var Stream = require('stream'); // hack to fix a circular dependency issue when used with browserify
+exports = module.exports = require('./lib/_stream_readable.js');
+exports.Stream = Stream;
+exports.Readable = exports;
+exports.Writable = require('./lib/_stream_writable.js');
+exports.Duplex = require('./lib/_stream_duplex.js');
+exports.Transform = require('./lib/_stream_transform.js');
+exports.PassThrough = require('./lib/_stream_passthrough.js');
+if (!process.browser && process.env.READABLE_STREAM === 'disable') {
+ module.exports = require('stream');
+}
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/node_modules/readable-stream/transform.js b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/node_modules/readable-stream/transform.js
new file mode 100644
index 00000000..5d482f07
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/node_modules/readable-stream/transform.js
@@ -0,0 +1 @@
+module.exports = require("./lib/_stream_transform.js")
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/node_modules/readable-stream/writable.js b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/node_modules/readable-stream/writable.js
new file mode 100644
index 00000000..e1e9efdf
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/node_modules/readable-stream/writable.js
@@ -0,0 +1 @@
+module.exports = require("./lib/_stream_writable.js")
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/package.json b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/package.json
new file mode 100644
index 00000000..46694d13
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/package.json
@@ -0,0 +1,111 @@
+{
+ "_args": [
+ [
+ {
+ "raw": "log4js@^0.6.25",
+ "scope": null,
+ "escapedName": "log4js",
+ "name": "log4js",
+ "rawSpec": "^0.6.25",
+ "spec": ">=0.6.25 <0.7.0",
+ "type": "range"
+ },
+ "C:\\apache-tomcat-8.5.11\\webapps\\vnfmarket\\node_modules\\karma"
+ ]
+ ],
+ "_from": "log4js@>=0.6.25 <0.7.0",
+ "_id": "log4js@0.6.38",
+ "_inCache": true,
+ "_location": "/log4js",
+ "_nodeVersion": "6.2.0",
+ "_npmOperationalInternal": {
+ "host": "packages-16-east.internal.npmjs.com",
+ "tmp": "tmp/log4js-0.6.38.tgz_1468752725179_0.8987619976978749"
+ },
+ "_npmUser": {
+ "name": "csausdev",
+ "email": "gareth.nomiddlename@gmail.com"
+ },
+ "_npmVersion": "3.8.9",
+ "_phantomChildren": {
+ "core-util-is": "1.0.2",
+ "inherits": "2.0.3",
+ "string_decoder": "0.10.31"
+ },
+ "_requested": {
+ "raw": "log4js@^0.6.25",
+ "scope": null,
+ "escapedName": "log4js",
+ "name": "log4js",
+ "rawSpec": "^0.6.25",
+ "spec": ">=0.6.25 <0.7.0",
+ "type": "range"
+ },
+ "_requiredBy": [
+ "/karma"
+ ],
+ "_resolved": "https://registry.npmjs.org/log4js/-/log4js-0.6.38.tgz",
+ "_shasum": "2c494116695d6fb25480943d3fc872e662a522fd",
+ "_shrinkwrap": null,
+ "_spec": "log4js@^0.6.25",
+ "_where": "C:\\apache-tomcat-8.5.11\\webapps\\vnfmarket\\node_modules\\karma",
+ "author": {
+ "name": "Gareth Jones",
+ "email": "gareth.nomiddlename@gmail.com"
+ },
+ "browser": {
+ "os": false
+ },
+ "bugs": {
+ "url": "http://github.com/nomiddlename/log4js-node/issues"
+ },
+ "dependencies": {
+ "readable-stream": "~1.0.2",
+ "semver": "~4.3.3"
+ },
+ "description": "Port of Log4js to work with node.",
+ "devDependencies": {
+ "jshint": "^2.9.2",
+ "sandboxed-module": "0.1.3",
+ "vows": "0.7.0"
+ },
+ "directories": {
+ "test": "test",
+ "lib": "lib"
+ },
+ "dist": {
+ "shasum": "2c494116695d6fb25480943d3fc872e662a522fd",
+ "tarball": "https://registry.npmjs.org/log4js/-/log4js-0.6.38.tgz"
+ },
+ "engines": {
+ "node": ">=0.8"
+ },
+ "gitHead": "6c21e4acd90047525d34ac4f4740e8ee0dba3bc2",
+ "homepage": "https://github.com/nomiddlename/log4js-node#readme",
+ "keywords": [
+ "logging",
+ "log",
+ "log4j",
+ "node"
+ ],
+ "license": "Apache-2.0",
+ "main": "./lib/log4js",
+ "maintainers": [
+ {
+ "name": "csausdev",
+ "email": "gareth.jones@sensis.com.au"
+ }
+ ],
+ "name": "log4js",
+ "optionalDependencies": {},
+ "readme": "ERROR: No README data found!",
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/nomiddlename/log4js-node.git"
+ },
+ "scripts": {
+ "pretest": "jshint lib/ test/",
+ "test": "vows"
+ },
+ "version": "0.6.38"
+}
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/categoryFilter-test.js b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/categoryFilter-test.js
new file mode 100644
index 00000000..15a7b906
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/categoryFilter-test.js
@@ -0,0 +1,84 @@
+'use strict';
+
+var vows = require('vows')
+, fs = require('fs')
+, assert = require('assert')
+, EOL = require('os').EOL || '\n';
+
+function remove(filename) {
+ try {
+ fs.unlinkSync(filename);
+ } catch (e) {
+ //doesn't really matter if it failed
+ }
+}
+
+vows.describe('log4js categoryFilter').addBatch({
+ 'appender': {
+ topic: function() {
+
+ var log4js = require('../lib/log4js'), logEvents = [], webLogger, appLogger;
+ log4js.clearAppenders();
+ var appender = require('../lib/appenders/categoryFilter')
+ .appender(
+ ['app'],
+ function(evt) { logEvents.push(evt); }
+ );
+ log4js.addAppender(appender, ["app","web"]);
+
+ webLogger = log4js.getLogger("web");
+ appLogger = log4js.getLogger("app");
+
+ webLogger.debug('This should get logged');
+ appLogger.debug('This should not');
+ webLogger.debug('Hello again');
+ log4js.getLogger('db').debug('This shouldn\'t be included by the appender anyway');
+
+ return logEvents;
+ },
+ 'should only pass matching category' : function(logEvents) {
+ assert.equal(logEvents.length, 2);
+ assert.equal(logEvents[0].data[0], 'This should get logged');
+ assert.equal(logEvents[1].data[0], 'Hello again');
+ }
+ },
+
+ 'configure': {
+ topic: function() {
+ var log4js = require('../lib/log4js')
+ , logger, weblogger;
+
+ remove(__dirname + '/categoryFilter-web.log');
+ remove(__dirname + '/categoryFilter-noweb.log');
+
+ log4js.configure('test/with-categoryFilter.json');
+ logger = log4js.getLogger("app");
+ weblogger = log4js.getLogger("web");
+
+ logger.info('Loading app');
+ logger.debug('Initialising indexes');
+ weblogger.info('00:00:00 GET / 200');
+ weblogger.warn('00:00:00 GET / 500');
+ //wait for the file system to catch up
+ setTimeout(this.callback, 500);
+ },
+ 'tmp-tests.log': {
+ topic: function() {
+ fs.readFile(__dirname + '/categoryFilter-noweb.log', 'utf8', this.callback);
+ },
+ 'should contain all log messages': function(contents) {
+ var messages = contents.trim().split(EOL);
+ assert.deepEqual(messages, ['Loading app','Initialising indexes']);
+ }
+ },
+ 'tmp-tests-web.log': {
+ topic: function() {
+ fs.readFile(__dirname + '/categoryFilter-web.log','utf8',this.callback);
+ },
+ 'should contain only error and warning log messages': function(contents) {
+ var messages = contents.trim().split(EOL);
+ assert.deepEqual(messages, ['00:00:00 GET / 200','00:00:00 GET / 500']);
+ }
+ }
+ }
+}).export(module);
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/clusteredAppender-test.js b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/clusteredAppender-test.js
new file mode 100644
index 00000000..76cb37a8
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/clusteredAppender-test.js
@@ -0,0 +1,166 @@
+"use strict";
+var assert = require('assert');
+var vows = require('vows');
+var layouts = require('../lib/layouts');
+var sandbox = require('sandboxed-module');
+var LoggingEvent = require('../lib/logger').LoggingEvent;
+var cluster = require('cluster');
+
+vows.describe('log4js cluster appender').addBatch({
+ 'when in master mode': {
+ topic: function() {
+
+ var registeredClusterEvents = [];
+ var loggingEvents = [];
+ var onChildProcessForked;
+ var onMasterReceiveChildMessage;
+
+ // Fake cluster module, so no real cluster listeners be really added
+ var fakeCluster = {
+
+ on: function(event, callback) {
+ registeredClusterEvents.push(event);
+ onChildProcessForked = callback;
+ },
+
+ isMaster: true,
+ isWorker: false,
+
+ };
+ var fakeWorker = {
+ on: function(event, callback) {
+ onMasterReceiveChildMessage = callback;
+ },
+ process: {
+ pid: 123
+ },
+ id: 'workerid'
+ };
+
+ var fakeActualAppender = function(loggingEvent) {
+ loggingEvents.push(loggingEvent);
+ };
+
+ // Load appender and fake modules in it
+ var appenderModule = sandbox.require('../lib/appenders/clustered', {
+ requires: {
+ 'cluster': fakeCluster,
+ }
+ });
+
+ var masterAppender = appenderModule.appender({
+ actualAppenders: [fakeActualAppender, fakeActualAppender, fakeActualAppender],
+ appenders: [{}, {category: "test"}, {category: "wovs"}]
+ });
+
+ // Actual test - log message using masterAppender
+ masterAppender(new LoggingEvent('wovs', 'Info', ['masterAppender test']));
+
+ // Simulate a 'fork' event to register the master's message handler on our fake worker.
+ onChildProcessForked(fakeWorker);
+ // Simulate a cluster message received by the masterAppender.
+ var simulatedLoggingEvent = new LoggingEvent(
+ 'wovs',
+ 'Error',
+ [
+ 'message deserialization test',
+ {stack: 'my wrapped stack'}
+ ]
+ );
+ onMasterReceiveChildMessage({
+ type : '::log-message',
+ event : JSON.stringify(simulatedLoggingEvent)
+ });
+
+ var returnValue = {
+ registeredClusterEvents: registeredClusterEvents,
+ loggingEvents: loggingEvents,
+ };
+
+ return returnValue;
+ },
+
+ "should register 'fork' event listener on 'cluster'": function(topic) {
+ assert.equal(topic.registeredClusterEvents[0], 'fork');
+ },
+
+ "should log using actual appender": function(topic) {
+ assert.equal(topic.loggingEvents.length, 4);
+ assert.equal(topic.loggingEvents[0].data[0], 'masterAppender test');
+ assert.equal(topic.loggingEvents[1].data[0], 'masterAppender test');
+ assert.equal(topic.loggingEvents[2].data[0], 'message deserialization test');
+ assert.equal(topic.loggingEvents[2].data[1], 'my wrapped stack');
+ assert.equal(topic.loggingEvents[3].data[0], 'message deserialization test');
+ assert.equal(topic.loggingEvents[3].data[1], 'my wrapped stack');
+ },
+
+ },
+
+ 'when in worker mode': {
+
+ topic: function() {
+
+ var registeredProcessEvents = [];
+
+ // Fake cluster module, to fake we're inside a worker process
+ var fakeCluster = {
+
+ isMaster: false,
+ isWorker: true,
+
+ };
+
+ var fakeProcess = {
+
+ send: function(data) {
+ registeredProcessEvents.push(data);
+ },
+
+ };
+
+ // Load appender and fake modules in it
+ var appenderModule = sandbox.require('../lib/appenders/clustered', {
+ requires: {
+ 'cluster': fakeCluster,
+ },
+ globals: {
+ 'process': fakeProcess,
+ }
+ });
+
+ var workerAppender = appenderModule.appender();
+
+ // Actual test - log message using masterAppender
+ workerAppender(new LoggingEvent('wovs', 'Info', ['workerAppender test']));
+ workerAppender(new LoggingEvent('wovs', 'Info', [new Error('Error test')]));
+
+ var returnValue = {
+ registeredProcessEvents: registeredProcessEvents,
+ };
+
+ return returnValue;
+
+ },
+
+ "worker appender should call process.send" : function(topic) {
+ assert.equal(topic.registeredProcessEvents[0].type, '::log-message');
+ assert.equal(
+ JSON.parse(topic.registeredProcessEvents[0].event).data[0],
+ "workerAppender test"
+ );
+ },
+
+ "worker should serialize an Error correctly" : function(topic) {
+ assert.equal(topic.registeredProcessEvents[1].type, '::log-message');
+ assert(JSON.parse(topic.registeredProcessEvents[1].event).data[0].stack);
+ var actual = JSON.parse(topic.registeredProcessEvents[1].event).data[0].stack;
+ var expectedRegex = /^Error: Error test/;
+ assert(
+ actual.match(expectedRegex),
+ "Expected: \n\n " + actual + "\n\n to match " + expectedRegex
+ );
+ }
+
+ }
+
+}).exportTo(module);
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/configuration-test.js b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/configuration-test.js
new file mode 100644
index 00000000..ddbf7dff
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/configuration-test.js
@@ -0,0 +1,149 @@
+"use strict";
+var assert = require('assert')
+, vows = require('vows')
+, sandbox = require('sandboxed-module');
+
+function makeTestAppender() {
+ return {
+ configure: function(config, options) {
+ this.configureCalled = true;
+ this.config = config;
+ this.options = options;
+ return this.appender();
+ },
+ appender: function() {
+ var self = this;
+ return function(logEvt) { self.logEvt = logEvt; };
+ }
+ };
+}
+
+vows.describe('log4js configure').addBatch({
+ 'appenders': {
+ 'when specified by type': {
+ topic: function() {
+ var testAppender = makeTestAppender(),
+ log4js = sandbox.require(
+ '../lib/log4js',
+ {
+ requires: {
+ './appenders/cheese': testAppender
+ }
+ }
+ );
+ log4js.configure(
+ {
+ appenders: [
+ { type: "cheese", flavour: "gouda" }
+ ]
+ },
+ { pants: "yes" }
+ );
+ return testAppender;
+ },
+ 'should load appender': function(testAppender) {
+ assert.ok(testAppender.configureCalled);
+ },
+ 'should pass config to appender': function(testAppender) {
+ assert.equal(testAppender.config.flavour, 'gouda');
+ },
+ 'should pass log4js options to appender': function(testAppender) {
+ assert.equal(testAppender.options.pants, 'yes');
+ }
+ },
+ 'when core appender loaded via loadAppender': {
+ topic: function() {
+ var testAppender = makeTestAppender(),
+ log4js = sandbox.require(
+ '../lib/log4js',
+ { requires: { './appenders/cheese': testAppender } }
+ );
+
+ log4js.loadAppender('cheese');
+ return log4js;
+ },
+ 'should load appender from ../lib/appenders': function(log4js) {
+ assert.ok(log4js.appenders.cheese);
+ },
+ 'should add appender configure function to appenderMakers' : function(log4js) {
+ assert.isFunction(log4js.appenderMakers.cheese);
+ }
+ },
+ 'when appender in node_modules loaded via loadAppender': {
+ topic: function() {
+ var testAppender = makeTestAppender(),
+ log4js = sandbox.require(
+ '../lib/log4js',
+ { requires: { 'some/other/external': testAppender } }
+ );
+ log4js.loadAppender('some/other/external');
+ return log4js;
+ },
+ 'should load appender via require': function(log4js) {
+ assert.ok(log4js.appenders['some/other/external']);
+ },
+ 'should add appender configure function to appenderMakers': function(log4js) {
+ assert.isFunction(log4js.appenderMakers['some/other/external']);
+ }
+ },
+ 'when appender object loaded via loadAppender': {
+ topic: function() {
+ var testAppender = makeTestAppender(),
+ log4js = sandbox.require('../lib/log4js');
+
+ log4js.loadAppender('some/other/external', testAppender);
+ return log4js;
+ },
+ 'should load appender with provided object': function(log4js) {
+ assert.ok(log4js.appenders['some/other/external']);
+ },
+ 'should add appender configure function to appenderMakers': function(log4js) {
+ assert.isFunction(log4js.appenderMakers['some/other/external']);
+ }
+ },
+ 'when configuration file loaded via LOG4JS_CONFIG environment variable': {
+ topic: function() {
+ process.env.LOG4JS_CONFIG = 'some/path/to/mylog4js.json';
+ var fileRead = 0,
+ modulePath = 'some/path/to/mylog4js.json',
+ pathsChecked = [],
+ mtime = new Date(),
+ fakeFS = {
+ config: { appenders: [ { type: 'console', layout: { type: 'messagePassThrough' } } ],
+ levels: { 'a-test' : 'INFO' } },
+ readdirSync: function(dir) {
+ return require('fs').readdirSync(dir);
+ },
+ readFileSync: function (file, encoding) {
+ fileRead += 1;
+ assert.isString(file);
+ assert.equal(file, modulePath);
+ assert.equal(encoding, 'utf8');
+ return JSON.stringify(fakeFS.config);
+ },
+ statSync: function (path) {
+ pathsChecked.push(path);
+ if (path === modulePath) {
+ return { mtime: mtime };
+ } else {
+ throw new Error("no such file");
+ }
+ }
+ },
+ log4js = sandbox.require(
+ '../lib/log4js',
+ {
+ requires: {
+ 'fs': fakeFS,
+ }
+ }
+ );
+ delete process.env.LOG4JS_CONFIG;
+ return fileRead;
+ },
+ 'should load the specified local configuration file' : function(fileRead) {
+ assert.equal(fileRead, 1);
+ }
+ }
+ }
+}).exportTo(module);
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/configureNoLevels-test.js b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/configureNoLevels-test.js
new file mode 100644
index 00000000..55bd987b
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/configureNoLevels-test.js
@@ -0,0 +1,173 @@
+"use strict";
+// This test shows unexpected behaviour for log4js.configure() in log4js-node@0.4.3 and earlier:
+// 1) log4js.configure(), log4js.configure(null),
+// log4js.configure({}), log4js.configure(<some object with no levels prop>)
+// all set all loggers levels to trace, even if they were previously set to something else.
+// 2) log4js.configure({levels:{}}), log4js.configure({levels: {foo:
+// bar}}) leaves previously set logger levels intact.
+//
+
+// Basic set up
+var vows = require('vows');
+var assert = require('assert');
+var toLevel = require('../lib/levels').toLevel;
+
+// uncomment one or other of the following to see progress (or not) while running the tests
+// var showProgress = console.log;
+var showProgress = function() {};
+
+
+// Define the array of levels as string to iterate over.
+var strLevels= ['Trace','Debug','Info','Warn','Error','Fatal'];
+
+// setup the configurations we want to test
+var configs = {
+ 'nop': 'nop', // special case where the iterating vows generator will not call log4js.configure
+ 'is undefined': undefined,
+ 'is null': null,
+ 'is empty': {},
+ 'has no levels': {foo: 'bar'},
+ 'has null levels': {levels: null},
+ 'has empty levels': {levels: {}},
+ 'has random levels': {levels: {foo: 'bar'}},
+ 'has some valid levels': {levels: {A: 'INFO'}}
+};
+
+// Set up the basic vows batches for this test
+var batches = [];
+
+
+function getLoggerName(level) {
+ return level+'-logger';
+}
+
+// the common vows top-level context, whether log4js.configure is called or not
+// just making sure that the code is common,
+// so that there are no spurious errors in the tests themselves.
+function getTopLevelContext(nop, configToTest, name) {
+ return {
+ topic: function() {
+ var log4js = require('../lib/log4js');
+ // create loggers for each level,
+ // keeping the level in the logger's name for traceability
+ strLevels.forEach(function(l) {
+ log4js.getLogger(getLoggerName(l)).setLevel(l);
+ });
+
+ if (!nop) {
+ showProgress('** Configuring log4js with', configToTest);
+ log4js.configure(configToTest);
+ }
+ else {
+ showProgress('** Not configuring log4js');
+ }
+ return log4js;
+ }
+ };
+}
+
+showProgress('Populating batch object...');
+
+function checkForMismatch(topic) {
+ var er = topic.log4js.levels.toLevel(topic.baseLevel)
+ .isLessThanOrEqualTo(topic.log4js.levels.toLevel(topic.comparisonLevel));
+
+ assert.equal(
+ er,
+ topic.expectedResult,
+ 'Mismatch: for setLevel(' + topic.baseLevel +
+ ') was expecting a comparison with ' + topic.comparisonLevel +
+ ' to be ' + topic.expectedResult
+ );
+}
+
+function checkExpectedResult(topic) {
+ var result = topic.log4js
+ .getLogger(getLoggerName(topic.baseLevel))
+ .isLevelEnabled(topic.log4js.levels.toLevel(topic.comparisonLevel));
+
+ assert.equal(
+ result,
+ topic.expectedResult,
+ 'Failed: ' + getLoggerName(topic.baseLevel) +
+ '.isLevelEnabled( ' + topic.comparisonLevel + ' ) returned ' + result
+ );
+}
+
+function setupBaseLevelAndCompareToOtherLevels(baseLevel) {
+ var baseLevelSubContext = 'and checking the logger whose level was set to '+baseLevel ;
+ var subContext = { topic: baseLevel };
+ batch[context][baseLevelSubContext] = subContext;
+
+ // each logging level has strLevels sub-contexts,
+ // to exhaustively test all the combinations of
+ // setLevel(baseLevel) and isLevelEnabled(comparisonLevel) per config
+ strLevels.forEach(compareToOtherLevels(subContext));
+}
+
+function compareToOtherLevels(subContext) {
+ var baseLevel = subContext.topic;
+
+ return function (comparisonLevel) {
+ var comparisonLevelSubContext = 'with isLevelEnabled('+comparisonLevel+')';
+
+ // calculate this independently of log4js, but we'll add a vow
+ // later on to check that we're not mismatched with log4js
+ var expectedResult = strLevels.indexOf(baseLevel) <= strLevels.indexOf(comparisonLevel);
+
+ // the topic simply gathers all the parameters for the vow
+ // into an object, to simplify the vow's work.
+ subContext[comparisonLevelSubContext] = {
+ topic: function(baseLevel, log4js) {
+ return {
+ comparisonLevel: comparisonLevel,
+ baseLevel: baseLevel,
+ log4js: log4js,
+ expectedResult: expectedResult
+ };
+ }
+ };
+
+ var vow = 'should return '+expectedResult;
+ subContext[comparisonLevelSubContext][vow] = checkExpectedResult;
+
+ // the extra vow to check the comparison between baseLevel and
+ // comparisonLevel we performed earlier matches log4js'
+ // comparison too
+ var subSubContext = subContext[comparisonLevelSubContext];
+ subSubContext['finally checking for comparison mismatch with log4js'] = checkForMismatch;
+ };
+}
+
+// Populating the batches programmatically, as there are
+// (configs.length x strLevels.length x strLevels.length) = 324
+// possible test combinations
+for (var cfg in configs) {
+ var configToTest = configs[cfg];
+ var nop = configToTest === 'nop';
+ var context;
+ if (nop) {
+ context = 'Setting up loggers with initial levels, then NOT setting a configuration,';
+ }
+ else {
+ context = 'Setting up loggers with initial levels, then setting a configuration which '+cfg+',';
+ }
+
+ showProgress('Setting up the vows batch and context for '+context);
+ // each config to be tested has its own vows batch with a single top-level context
+ var batch={};
+ batch[context]= getTopLevelContext(nop, configToTest, context);
+ batches.push(batch);
+
+ // each top-level context has strLevels sub-contexts, one per logger
+ // which has set to a specific level in the top-level context's topic
+ strLevels.forEach(setupBaseLevelAndCompareToOtherLevels);
+}
+
+showProgress('Running tests');
+var v = vows.describe('log4js.configure(), with or without a "levels" property');
+
+batches.forEach(function(batch) {v=v.addBatch(batch);});
+
+v.export(module);
+
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/connect-logger-test.js b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/connect-logger-test.js
new file mode 100644
index 00000000..9fda2575
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/connect-logger-test.js
@@ -0,0 +1,303 @@
+/* jshint maxparams:7 */
+"use strict";
+var vows = require('vows')
+, assert = require('assert')
+, util = require('util')
+, EE = require('events').EventEmitter
+, levels = require('../lib/levels');
+
+function MockLogger() {
+
+ var that = this;
+ this.messages = [];
+
+ this.log = function(level, message, exception) {
+ that.messages.push({ level: level, message: message });
+ };
+
+ this.isLevelEnabled = function(level) {
+ return level.isGreaterThanOrEqualTo(that.level);
+ };
+
+ this.level = levels.TRACE;
+
+}
+
+function MockRequest(remoteAddr, method, originalUrl, headers) {
+
+ this.socket = { remoteAddress: remoteAddr };
+ this.originalUrl = originalUrl;
+ this.method = method;
+ this.httpVersionMajor = '5';
+ this.httpVersionMinor = '0';
+ this.headers = headers || {};
+
+ var self = this;
+ Object.keys(this.headers).forEach(function(key) {
+ self.headers[key.toLowerCase()] = self.headers[key];
+ });
+}
+
+function MockResponse() {
+ var r = this;
+ this.end = function(chunk, encoding) {
+ r.emit('finish');
+ };
+
+ this.writeHead = function(code, headers) {
+ this.statusCode = code;
+ this._headers = headers;
+ };
+}
+
+util.inherits(MockResponse, EE);
+
+function request(cl, method, url, code, reqHeaders, resHeaders) {
+ var req = new MockRequest('my.remote.addr', method, url, reqHeaders);
+ var res = new MockResponse();
+ cl(req, res, function() {});
+ res.writeHead(code, resHeaders);
+ res.end('chunk','encoding');
+}
+
+vows.describe('log4js connect logger').addBatch({
+ 'getConnectLoggerModule': {
+ topic: function() {
+ var clm = require('../lib/connect-logger');
+ return clm;
+ },
+
+ 'should return a "connect logger" factory' : function(clm) {
+ assert.isObject(clm);
+ },
+
+ 'take a log4js logger and return a "connect logger"' : {
+ topic: function(clm) {
+ var ml = new MockLogger();
+ var cl = clm.connectLogger(ml);
+ return cl;
+ },
+
+ 'should return a "connect logger"': function(cl) {
+ assert.isFunction(cl);
+ }
+ },
+
+ 'log events' : {
+ topic: function(clm) {
+ var ml = new MockLogger();
+ var cl = clm.connectLogger(ml);
+ var cb = this.callback;
+ request(cl, 'GET', 'http://url', 200);
+ setTimeout(function() {
+ cb(null, ml.messages);
+ },10);
+ },
+
+ 'check message': function(messages) {
+ assert.isArray(messages);
+ assert.equal(messages.length, 1);
+ assert.ok(levels.INFO.isEqualTo(messages[0].level));
+ assert.include(messages[0].message, 'GET');
+ assert.include(messages[0].message, 'http://url');
+ assert.include(messages[0].message, 'my.remote.addr');
+ assert.include(messages[0].message, '200');
+ }
+ },
+
+ 'log events with level below logging level' : {
+ topic: function(clm) {
+ var ml = new MockLogger();
+ ml.level = levels.FATAL;
+ var cl = clm.connectLogger(ml);
+ request(cl, 'GET', 'http://url', 200);
+ return ml.messages;
+ },
+
+ 'check message': function(messages) {
+ assert.isArray(messages);
+ assert.isEmpty(messages);
+ }
+ },
+
+ 'log events with non-default level and custom format' : {
+ topic: function(clm) {
+ var ml = new MockLogger();
+ var cb = this.callback;
+ ml.level = levels.INFO;
+ var cl = clm.connectLogger(ml, { level: levels.INFO, format: ':method :url' } );
+ request(cl, 'GET', 'http://url', 200);
+ setTimeout(function() {
+ cb(null, ml.messages);
+ },10); },
+
+ 'check message': function(messages) {
+ assert.isArray(messages);
+ assert.equal(messages.length, 1);
+ assert.ok(levels.INFO.isEqualTo(messages[0].level));
+ assert.equal(messages[0].message, 'GET http://url');
+ }
+ },
+
+ 'logger with options as string': {
+ topic: function(clm) {
+ var ml = new MockLogger();
+ var cb = this.callback;
+ ml.level = levels.INFO;
+ var cl = clm.connectLogger(ml, ':method :url');
+ request(cl, 'POST', 'http://meh', 200);
+ setTimeout(function() {
+ cb(null, ml.messages);
+ },10);
+ },
+ 'should use the passed in format': function(messages) {
+ assert.equal(messages[0].message, 'POST http://meh');
+ }
+ },
+
+ 'auto log levels': {
+ topic: function(clm) {
+ var ml = new MockLogger();
+ var cb = this.callback;
+ ml.level = levels.INFO;
+ var cl = clm.connectLogger(ml, { level: 'auto', format: ':method :url' });
+ request(cl, 'GET', 'http://meh', 200);
+ request(cl, 'GET', 'http://meh', 201);
+ request(cl, 'GET', 'http://meh', 302);
+ request(cl, 'GET', 'http://meh', 404);
+ request(cl, 'GET', 'http://meh', 500);
+ setTimeout(function() {
+ cb(null, ml.messages);
+ },10);
+ },
+
+ 'should use INFO for 2xx': function(messages) {
+ assert.ok(levels.INFO.isEqualTo(messages[0].level));
+ assert.ok(levels.INFO.isEqualTo(messages[1].level));
+ },
+
+ 'should use WARN for 3xx': function(messages) {
+ assert.ok(levels.WARN.isEqualTo(messages[2].level));
+ },
+
+ 'should use ERROR for 4xx': function(messages) {
+ assert.ok(levels.ERROR.isEqualTo(messages[3].level));
+ },
+
+ 'should use ERROR for 5xx': function(messages) {
+ assert.ok(levels.ERROR.isEqualTo(messages[4].level));
+ }
+ },
+
+ 'format using a function': {
+ topic: function(clm) {
+ var ml = new MockLogger();
+ var cb = this.callback;
+ ml.level = levels.INFO;
+ var cl = clm.connectLogger(ml, function(req, res, formatFn) { return "I was called"; });
+ request(cl, 'GET', 'http://blah', 200);
+ setTimeout(function() {
+ cb(null, ml.messages);
+ },10);
+ },
+
+ 'should call the format function': function(messages) {
+ assert.equal(messages[0].message, 'I was called');
+ }
+ },
+
+ 'format that includes request headers': {
+ topic: function(clm) {
+ var ml = new MockLogger();
+ var cb = this.callback;
+ ml.level = levels.INFO;
+ var cl = clm.connectLogger(ml, ':req[Content-Type]');
+ request(
+ cl,
+ 'GET', 'http://blah', 200,
+ { 'Content-Type': 'application/json' }
+ );
+ setTimeout(function() {
+ cb(null, ml.messages);
+ },10);
+ },
+ 'should output the request header': function(messages) {
+ assert.equal(messages[0].message, 'application/json');
+ }
+ },
+
+ 'format that includes response headers': {
+ topic: function(clm) {
+ var ml = new MockLogger();
+ var cb = this.callback;
+ ml.level = levels.INFO;
+ var cl = clm.connectLogger(ml, ':res[Content-Type]');
+ request(
+ cl,
+ 'GET', 'http://blah', 200,
+ null,
+ { 'Content-Type': 'application/cheese' }
+ );
+ setTimeout(function() {
+ cb(null, ml.messages);
+ },10);
+ },
+
+ 'should output the response header': function(messages) {
+ assert.equal(messages[0].message, 'application/cheese');
+ }
+ },
+
+ 'log events with custom token' : {
+ topic: function(clm) {
+ var ml = new MockLogger();
+ var cb = this.callback;
+ ml.level = levels.INFO;
+ var cl = clm.connectLogger(ml, {
+ level: levels.INFO,
+ format: ':method :url :custom_string',
+ tokens: [{
+ token: ':custom_string', replacement: 'fooBAR'
+ }]
+ });
+ request(cl, 'GET', 'http://url', 200);
+ setTimeout(function() {
+ cb(null, ml.messages);
+ },10);
+ },
+
+ 'check message': function(messages) {
+ assert.isArray(messages);
+ assert.equal(messages.length, 1);
+ assert.ok(levels.INFO.isEqualTo(messages[0].level));
+ assert.equal(messages[0].message, 'GET http://url fooBAR');
+ }
+ },
+
+ 'log events with custom override token' : {
+ topic: function(clm) {
+ var ml = new MockLogger();
+ var cb = this.callback;
+ ml.level = levels.INFO;
+ var cl = clm.connectLogger(ml, {
+ level: levels.INFO,
+ format: ':method :url :date',
+ tokens: [{
+ token: ':date', replacement: "20150310"
+ }]
+ });
+ request(cl, 'GET', 'http://url', 200);
+ setTimeout(function() {
+ cb(null, ml.messages);
+ },10);
+ },
+
+ 'check message': function(messages) {
+ assert.isArray(messages);
+ assert.equal(messages.length, 1);
+ assert.ok(levels.INFO.isEqualTo(messages[0].level));
+ assert.equal(messages[0].message, 'GET http://url 20150310');
+ }
+ }
+ }
+}).export(module);
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/consoleAppender-test.js b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/consoleAppender-test.js
new file mode 100644
index 00000000..3887ce5a
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/consoleAppender-test.js
@@ -0,0 +1,33 @@
+"use strict";
+var assert = require('assert')
+, vows = require('vows')
+, layouts = require('../lib/layouts')
+, sandbox = require('sandboxed-module');
+
+vows.describe('../lib/appenders/console').addBatch({
+ 'appender': {
+ topic: function() {
+ var messages = []
+ , fakeConsole = {
+ log: function(msg) { messages.push(msg); }
+ }
+ , appenderModule = sandbox.require(
+ '../lib/appenders/console',
+ {
+ globals: {
+ 'console': fakeConsole
+ }
+ }
+ )
+ , appender = appenderModule.appender(layouts.messagePassThroughLayout);
+
+ appender({ data: ["blah"] });
+ return messages;
+ },
+
+ 'should output to console': function(messages) {
+ assert.equal(messages[0], 'blah');
+ }
+ }
+
+}).exportTo(module);
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/dateFileAppender-test.js b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/dateFileAppender-test.js
new file mode 100644
index 00000000..8fa115f0
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/dateFileAppender-test.js
@@ -0,0 +1,223 @@
+"use strict";
+var vows = require('vows')
+, assert = require('assert')
+, path = require('path')
+, fs = require('fs')
+, sandbox = require('sandboxed-module')
+, log4js = require('../lib/log4js')
+, EOL = require('os').EOL || '\n';
+
+function removeFile(filename) {
+ return function() {
+ fs.unlink(path.join(__dirname, filename), function(err) {
+ if (err) {
+ console.log("Could not delete ", filename, err);
+ }
+ });
+ };
+}
+
+vows.describe('../lib/appenders/dateFile').addBatch({
+ 'appender': {
+ 'adding multiple dateFileAppenders': {
+ topic: function () {
+ var listenersCount = process.listeners('exit').length,
+ dateFileAppender = require('../lib/appenders/dateFile'),
+ count = 5,
+ logfile;
+
+ while (count--) {
+ logfile = path.join(__dirname, 'datefa-default-test' + count + '.log');
+ log4js.addAppender(dateFileAppender.appender(logfile));
+ }
+
+ return listenersCount;
+ },
+ teardown: function() {
+ removeFile('datefa-default-test0.log')();
+ removeFile('datefa-default-test1.log')();
+ removeFile('datefa-default-test2.log')();
+ removeFile('datefa-default-test3.log')();
+ removeFile('datefa-default-test4.log')();
+ },
+
+ 'should only add one `exit` listener': function (initialCount) {
+ assert.equal(process.listeners('exit').length, initialCount + 1);
+ },
+
+ },
+
+ 'exit listener': {
+ topic: function() {
+ var exitListener
+ , openedFiles = []
+ , dateFileAppender = sandbox.require(
+ '../lib/appenders/dateFile',
+ {
+ globals: {
+ process: {
+ on: function(evt, listener) {
+ exitListener = listener;
+ }
+ }
+ },
+ requires: {
+ '../streams': {
+ DateRollingFileStream: function(filename) {
+ openedFiles.push(filename);
+
+ this.end = function() {
+ openedFiles.shift();
+ };
+ }
+ }
+ }
+ }
+ );
+ for (var i=0; i < 5; i += 1) {
+ dateFileAppender.appender('test' + i);
+ }
+ assert.isNotEmpty(openedFiles);
+ exitListener();
+ return openedFiles;
+ },
+ 'should close all open files': function(openedFiles) {
+ assert.isEmpty(openedFiles);
+ }
+ },
+
+ 'with default settings': {
+ topic: function() {
+ var that = this,
+ testFile = path.join(__dirname, 'date-appender-default.log'),
+ appender = require('../lib/appenders/dateFile').appender(testFile),
+ logger = log4js.getLogger('default-settings');
+ log4js.clearAppenders();
+ log4js.addAppender(appender, 'default-settings');
+
+ logger.info("This should be in the file.");
+
+ setTimeout(function() {
+ fs.readFile(testFile, "utf8", that.callback);
+ }, 100);
+
+ },
+ teardown: removeFile('date-appender-default.log'),
+
+ 'should write to the file': function(contents) {
+ assert.include(contents, 'This should be in the file');
+ },
+
+ 'should use the basic layout': function(contents) {
+ assert.match(
+ contents,
+ /\[\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\.\d{3}\] \[INFO\] default-settings - /
+ );
+ }
+ }
+
+ }
+}).addBatch({
+ 'configure': {
+ 'with dateFileAppender': {
+ topic: function() {
+ var log4js = require('../lib/log4js')
+ , logger;
+ //this config file defines one file appender (to ./date-file-test.log)
+ //and sets the log level for "tests" to WARN
+ log4js.configure('test/with-dateFile.json');
+ logger = log4js.getLogger('tests');
+ logger.info('this should not be written to the file');
+ logger.warn('this should be written to the file');
+
+ fs.readFile(path.join(__dirname, 'date-file-test.log'), 'utf8', this.callback);
+ },
+ teardown: removeFile('date-file-test.log'),
+
+ 'should load appender configuration from a json file': function(err, contents) {
+ if (err) {
+ throw err;
+ }
+ assert.include(contents, 'this should be written to the file' + EOL);
+ assert.equal(contents.indexOf('this should not be written to the file'), -1);
+ }
+ },
+ 'with options.alwaysIncludePattern': {
+ topic: function() {
+ var self = this
+ , log4js = require('../lib/log4js')
+ , format = require('../lib/date_format')
+ , logger
+ , options = {
+ "appenders": [
+ {
+ "category": "tests",
+ "type": "dateFile",
+ "filename": "test/date-file-test",
+ "pattern": "-from-MM-dd.log",
+ "alwaysIncludePattern": true,
+ "layout": {
+ "type": "messagePassThrough"
+ }
+ }
+ ]
+ }
+ , thisTime = format.asString(options.appenders[0].pattern, new Date());
+ fs.writeFileSync(
+ path.join(__dirname, 'date-file-test' + thisTime),
+ "this is existing data" + EOL,
+ 'utf8'
+ );
+ log4js.clearAppenders();
+ log4js.configure(options);
+ logger = log4js.getLogger('tests');
+ logger.warn('this should be written to the file with the appended date');
+ this.teardown = removeFile('date-file-test' + thisTime);
+ //wait for filesystem to catch up
+ setTimeout(function() {
+ fs.readFile(path.join(__dirname, 'date-file-test' + thisTime), 'utf8', self.callback);
+ }, 100);
+ },
+ 'should create file with the correct pattern': function(contents) {
+ assert.include(contents, 'this should be written to the file with the appended date');
+ },
+ 'should not overwrite the file on open (bug found in issue #132)': function(contents) {
+ assert.include(contents, 'this is existing data');
+ }
+ },
+ 'with cwd option': {
+ topic: function () {
+ var fileOpened,
+ appender = sandbox.require(
+ '../lib/appenders/dateFile',
+ { requires:
+ { '../streams':
+ { DateRollingFileStream:
+ function(file) {
+ fileOpened = file;
+ return {
+ on: function() {},
+ end: function() {}
+ };
+ }
+ }
+ }
+ }
+ );
+ appender.configure(
+ {
+ filename: "whatever.log",
+ maxLogSize: 10
+ },
+ { cwd: '/absolute/path/to' }
+ );
+ return fileOpened;
+ },
+ 'should prepend options.cwd to config.filename': function (fileOpened) {
+ var expected = path.sep + path.join("absolute", "path", "to", "whatever.log");
+ assert.equal(fileOpened, expected);
+ }
+ }
+
+ }
+}).exportTo(module);
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/date_format-test.js b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/date_format-test.js
new file mode 100644
index 00000000..04adb08b
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/date_format-test.js
@@ -0,0 +1,58 @@
+"use strict";
+var vows = require('vows')
+, assert = require('assert')
+, dateFormat = require('../lib/date_format');
+
+function createFixedDate() {
+ return new Date(2010, 0, 11, 14, 31, 30, 5);
+}
+
+vows.describe('date_format').addBatch({
+ 'Date extensions': {
+ topic: createFixedDate,
+ 'should format a date as string using a pattern': function(date) {
+ assert.equal(
+ dateFormat.asString(dateFormat.DATETIME_FORMAT, date),
+ "11 01 2010 14:31:30.005"
+ );
+ },
+ 'should default to the ISO8601 format': function(date) {
+ assert.equal(
+ dateFormat.asString(date),
+ '2010-01-11 14:31:30.005'
+ );
+ },
+ 'should provide a ISO8601 with timezone offset format': function() {
+ var date = createFixedDate();
+ date.setMinutes(date.getMinutes() - date.getTimezoneOffset() - 660);
+ date.getTimezoneOffset = function() { return -660; };
+ assert.equal(
+ dateFormat.asString(dateFormat.ISO8601_WITH_TZ_OFFSET_FORMAT, date),
+ "2010-01-11T14:31:30+1100"
+ );
+ date = createFixedDate();
+ date.setMinutes(date.getMinutes() - date.getTimezoneOffset() + 120);
+ date.getTimezoneOffset = function() { return 120; };
+ assert.equal(
+ dateFormat.asString(dateFormat.ISO8601_WITH_TZ_OFFSET_FORMAT, date),
+ "2010-01-11T14:31:30-0200"
+ );
+
+ },
+ 'should provide a just-the-time format': function(date) {
+ assert.equal(
+ dateFormat.asString(dateFormat.ABSOLUTETIME_FORMAT, date),
+ '14:31:30.005'
+ );
+ },
+ 'should provide a custom format': function() {
+ var date = createFixedDate();
+ date.setMinutes(date.getMinutes() - date.getTimezoneOffset() + 120);
+ date.getTimezoneOffset = function() { return 120; };
+ assert.equal(
+ dateFormat.asString("O.SSS.ss.mm.hh.dd.MM.yy", date),
+ '-0200.005.30.31.14.11.01.10'
+ );
+ }
+ }
+}).export(module);
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/debug-test.js b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/debug-test.js
new file mode 100644
index 00000000..92dd915b
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/debug-test.js
@@ -0,0 +1,72 @@
+"use strict";
+var vows = require('vows')
+, assert = require('assert')
+, sandbox = require('sandboxed-module')
+, fakeConsole = {
+ error: function(format, label, message) {
+ this.logged = [ format, label, message ];
+ }
+}
+, globals = function(debugValue) {
+ return {
+ process: {
+ env: {
+ 'NODE_DEBUG': debugValue
+ }
+ },
+ console: fakeConsole
+ };
+};
+
+vows.describe('../lib/debug').addBatch({
+ 'when NODE_DEBUG is set to log4js': {
+ topic: function() {
+ var debug = sandbox.require(
+ '../lib/debug',
+ { 'globals': globals('log4js') }
+ );
+
+ fakeConsole.logged = [];
+ debug('cheese')('biscuits');
+ return fakeConsole.logged;
+ },
+ 'it should log to console.error': function(logged) {
+ assert.equal(logged[0], 'LOG4JS: (%s) %s');
+ assert.equal(logged[1], 'cheese');
+ assert.equal(logged[2], 'biscuits');
+ }
+ },
+
+ 'when NODE_DEBUG is set to not log4js': {
+ topic: function() {
+ var debug = sandbox.require(
+ '../lib/debug',
+ { globals: globals('other_module') }
+ );
+
+ fakeConsole.logged = [];
+ debug('cheese')('biscuits');
+ return fakeConsole.logged;
+ },
+ 'it should not log to console.error': function(logged) {
+ assert.equal(logged.length, 0);
+ }
+ },
+
+ 'when NODE_DEBUG is not set': {
+ topic: function() {
+ var debug = sandbox.require(
+ '../lib/debug',
+ { globals: globals(null) }
+ );
+
+ fakeConsole.logged = [];
+ debug('cheese')('biscuits');
+ return fakeConsole.logged;
+ },
+ 'it should not log to console.error': function(logged) {
+ assert.equal(logged.length, 0);
+ }
+ }
+
+}).exportTo(module);
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/fileAppender-test.js b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/fileAppender-test.js
new file mode 100644
index 00000000..007dbbd5
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/fileAppender-test.js
@@ -0,0 +1,442 @@
+"use strict";
+var vows = require('vows')
+, fs = require('fs')
+, path = require('path')
+, sandbox = require('sandboxed-module')
+, log4js = require('../lib/log4js')
+, assert = require('assert')
+, zlib = require('zlib')
+, EOL = require('os').EOL || '\n';
+
+log4js.clearAppenders();
+
+function remove(filename) {
+ try {
+ fs.unlinkSync(filename);
+ } catch (e) {
+ //doesn't really matter if it failed
+ }
+}
+
+vows.describe('log4js fileAppender').addBatch({
+ 'adding multiple fileAppenders': {
+ topic: function () {
+ var listenersCount = process.listeners('exit').length
+ , logger = log4js.getLogger('default-settings')
+ , count = 5, logfile;
+
+ while (count--) {
+ logfile = path.join(__dirname, '/fa-default-test' + count + '.log');
+ log4js.addAppender(require('../lib/appenders/file').appender(logfile), 'default-settings');
+ }
+
+ return listenersCount;
+ },
+
+ 'does not add more than one `exit` listeners': function (initialCount) {
+ assert.ok(process.listeners('exit').length <= initialCount + 1);
+ }
+ },
+
+ 'exit listener': {
+ topic: function() {
+ var exitListener
+ , openedFiles = []
+ , fileAppender = sandbox.require(
+ '../lib/appenders/file',
+ {
+ globals: {
+ process: {
+ on: function(evt, listener) {
+ exitListener = listener;
+ }
+ }
+ },
+ requires: {
+ '../streams': {
+ RollingFileStream: function(filename) {
+ openedFiles.push(filename);
+
+ this.end = function() {
+ openedFiles.shift();
+ };
+
+ this.on = function() {};
+ }
+ }
+ }
+ }
+ );
+ for (var i=0; i < 5; i += 1) {
+ fileAppender.appender('test' + i, null, 100);
+ }
+ assert.isNotEmpty(openedFiles);
+ exitListener();
+ return openedFiles;
+ },
+ 'should close all open files': function(openedFiles) {
+ assert.isEmpty(openedFiles);
+ }
+ },
+
+ 'with default fileAppender settings': {
+ topic: function() {
+ var that = this
+ , testFile = path.join(__dirname, '/fa-default-test.log')
+ , logger = log4js.getLogger('default-settings');
+ remove(testFile);
+
+ log4js.clearAppenders();
+ log4js.addAppender(require('../lib/appenders/file').appender(testFile), 'default-settings');
+
+ logger.info("This should be in the file.");
+
+ setTimeout(function() {
+ fs.readFile(testFile, "utf8", that.callback);
+ }, 100);
+ },
+ 'should write log messages to the file': function (err, fileContents) {
+ assert.include(fileContents, "This should be in the file." + EOL);
+ },
+ 'log messages should be in the basic layout format': function(err, fileContents) {
+ assert.match(
+ fileContents,
+ /\[\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\.\d{3}\] \[INFO\] default-settings - /
+ );
+ }
+ },
+ 'fileAppender subcategories': {
+ topic: function() {
+ var that = this;
+
+ log4js.clearAppenders();
+
+ function addAppender(cat) {
+ var testFile = path.join(
+ __dirname,
+ '/fa-subcategories-test-'+cat.join('-').replace(/\./g, "_")+'.log'
+ );
+ remove(testFile);
+ log4js.addAppender(require('../lib/appenders/file').appender(testFile), cat);
+ return testFile;
+ }
+
+ var file_sub1 = addAppender([ 'sub1']);
+
+ var file_sub1_sub12$sub1_sub13 = addAppender([ 'sub1.sub12', 'sub1.sub13' ]);
+
+ var file_sub1_sub12 = addAppender([ 'sub1.sub12' ]);
+
+
+ var logger_sub1_sub12_sub123 = log4js.getLogger('sub1.sub12.sub123');
+
+ var logger_sub1_sub13_sub133 = log4js.getLogger('sub1.sub13.sub133');
+
+ var logger_sub1_sub14 = log4js.getLogger('sub1.sub14');
+
+ var logger_sub2 = log4js.getLogger('sub2');
+
+
+ logger_sub1_sub12_sub123.info('sub1_sub12_sub123');
+
+ logger_sub1_sub13_sub133.info('sub1_sub13_sub133');
+
+ logger_sub1_sub14.info('sub1_sub14');
+
+ logger_sub2.info('sub2');
+
+
+ setTimeout(function() {
+ that.callback(null, {
+ file_sub1: fs.readFileSync(file_sub1).toString(),
+ file_sub1_sub12$sub1_sub13: fs.readFileSync(file_sub1_sub12$sub1_sub13).toString(),
+ file_sub1_sub12: fs.readFileSync(file_sub1_sub12).toString()
+ });
+ }, 3000);
+ },
+ 'check file contents': function (err, fileContents) {
+
+ // everything but category 'sub2'
+ assert.match(
+ fileContents.file_sub1,
+ /^(\[\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\.\d{3}\] \[INFO\] (sub1.sub12.sub123 - sub1_sub12_sub123|sub1.sub13.sub133 - sub1_sub13_sub133|sub1.sub14 - sub1_sub14)[\s\S]){3}$/ // jshint ignore:line
+ );
+ assert.ok(
+ fileContents.file_sub1.match(/sub123/) &&
+ fileContents.file_sub1.match(/sub133/) &&
+ fileContents.file_sub1.match(/sub14/)
+ );
+ assert.ok(!fileContents.file_sub1.match(/sub2/));
+
+ // only catgories starting with 'sub1.sub12' and 'sub1.sub13'
+ assert.match(
+ fileContents.file_sub1_sub12$sub1_sub13,
+ /^(\[\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\.\d{3}\] \[INFO\] (sub1.sub12.sub123 - sub1_sub12_sub123|sub1.sub13.sub133 - sub1_sub13_sub133)[\s\S]){2}$/ //jshint ignore:line
+ );
+ assert.ok(
+ fileContents.file_sub1_sub12$sub1_sub13.match(/sub123/) &&
+ fileContents.file_sub1_sub12$sub1_sub13.match(/sub133/)
+ );
+ assert.ok(!fileContents.file_sub1_sub12$sub1_sub13.match(/sub14|sub2/));
+
+ // only catgories starting with 'sub1.sub12'
+ assert.match(
+ fileContents.file_sub1_sub12,
+ /^(\[\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\.\d{3}\] \[INFO\] (sub1.sub12.sub123 - sub1_sub12_sub123)[\s\S]){1}$/ //jshint ignore:line
+ );
+ assert.ok(!fileContents.file_sub1_sub12.match(/sub14|sub2|sub13/));
+
+ }
+ },
+ 'with a max file size and no backups': {
+ topic: function() {
+ var testFile = path.join(__dirname, '/fa-maxFileSize-test.log')
+ , logger = log4js.getLogger('max-file-size')
+ , that = this;
+ remove(testFile);
+ remove(testFile + '.1');
+ //log file of 100 bytes maximum, no backups
+ log4js.clearAppenders();
+ log4js.addAppender(
+ require('../lib/appenders/file').appender(testFile, log4js.layouts.basicLayout, 100, 0),
+ 'max-file-size'
+ );
+ logger.info("This is the first log message.");
+ logger.info("This is an intermediate log message.");
+ logger.info("This is the second log message.");
+ //wait for the file system to catch up
+ setTimeout(function() {
+ fs.readFile(testFile, "utf8", that.callback);
+ }, 100);
+ },
+ 'log file should only contain the second message': function(err, fileContents) {
+ assert.include(fileContents, "This is the second log message.");
+ assert.equal(fileContents.indexOf("This is the first log message."), -1);
+ },
+ 'the number of files': {
+ topic: function() {
+ fs.readdir(__dirname, this.callback);
+ },
+ 'starting with the test file name should be two': function(err, files) {
+ //there will always be one backup if you've specified a max log size
+ var logFiles = files.filter(
+ function(file) { return file.indexOf('fa-maxFileSize-test.log') > -1; }
+ );
+ assert.equal(logFiles.length, 2);
+ }
+ }
+ },
+ 'with a max file size and 2 backups': {
+ topic: function() {
+ var testFile = path.join(__dirname, '/fa-maxFileSize-with-backups-test.log')
+ , logger = log4js.getLogger('max-file-size-backups');
+ remove(testFile);
+ remove(testFile+'.1');
+ remove(testFile+'.2');
+
+ //log file of 50 bytes maximum, 2 backups
+ log4js.clearAppenders();
+ log4js.addAppender(
+ require('../lib/appenders/file').appender(testFile, log4js.layouts.basicLayout, 50, 2),
+ 'max-file-size-backups'
+ );
+ logger.info("This is the first log message.");
+ logger.info("This is the second log message.");
+ logger.info("This is the third log message.");
+ logger.info("This is the fourth log message.");
+ var that = this;
+ //give the system a chance to open the stream
+ setTimeout(function() {
+ fs.readdir(__dirname, function(err, files) {
+ if (files) {
+ that.callback(null, files.sort());
+ } else {
+ that.callback(err, files);
+ }
+ });
+ }, 200);
+ },
+ 'the log files': {
+ topic: function(files) {
+ var logFiles = files.filter(
+ function(file) { return file.indexOf('fa-maxFileSize-with-backups-test.log') > -1; }
+ );
+ return logFiles;
+ },
+ 'should be 3': function (files) {
+ assert.equal(files.length, 3);
+ },
+ 'should be named in sequence': function (files) {
+ assert.deepEqual(files, [
+ 'fa-maxFileSize-with-backups-test.log',
+ 'fa-maxFileSize-with-backups-test.log.1',
+ 'fa-maxFileSize-with-backups-test.log.2'
+ ]);
+ },
+ 'and the contents of the first file': {
+ topic: function(logFiles) {
+ fs.readFile(path.join(__dirname, logFiles[0]), "utf8", this.callback);
+ },
+ 'should be the last log message': function(contents) {
+ assert.include(contents, 'This is the fourth log message.');
+ }
+ },
+ 'and the contents of the second file': {
+ topic: function(logFiles) {
+ fs.readFile(path.join(__dirname, logFiles[1]), "utf8", this.callback);
+ },
+ 'should be the third log message': function(contents) {
+ assert.include(contents, 'This is the third log message.');
+ }
+ },
+ 'and the contents of the third file': {
+ topic: function(logFiles) {
+ fs.readFile(path.join(__dirname, logFiles[2]), "utf8", this.callback);
+ },
+ 'should be the second log message': function(contents) {
+ assert.include(contents, 'This is the second log message.');
+ }
+ }
+ }
+ },
+ 'with a max file size and 2 compressed backups': {
+ topic: function() {
+ var testFile = path.join(__dirname, '/fa-maxFileSize-with-backups-compressed-test.log')
+ , logger = log4js.getLogger('max-file-size-backups');
+ remove(testFile);
+ remove(testFile+'.1.gz');
+ remove(testFile+'.2.gz');
+
+ //log file of 50 bytes maximum, 2 backups
+ log4js.clearAppenders();
+ log4js.addAppender(
+ require('../lib/appenders/file').appender(
+ testFile, log4js.layouts.basicLayout, 50, 2, true
+ ),
+ 'max-file-size-backups'
+ );
+ logger.info("This is the first log message.");
+ logger.info("This is the second log message.");
+ logger.info("This is the third log message.");
+ logger.info("This is the fourth log message.");
+ var that = this;
+ //give the system a chance to open the stream
+ setTimeout(function() {
+ fs.readdir(__dirname, function(err, files) {
+ if (files) {
+ that.callback(null, files.sort());
+ } else {
+ that.callback(err, files);
+ }
+ });
+ }, 1000);
+ },
+ 'the log files': {
+ topic: function(files) {
+ var logFiles = files.filter(
+ function(file) {
+ return file.indexOf('fa-maxFileSize-with-backups-compressed-test.log') > -1;
+ }
+ );
+ return logFiles;
+ },
+ 'should be 3': function (files) {
+ assert.equal(files.length, 3);
+ },
+ 'should be named in sequence': function (files) {
+ assert.deepEqual(files, [
+ 'fa-maxFileSize-with-backups-compressed-test.log',
+ 'fa-maxFileSize-with-backups-compressed-test.log.1.gz',
+ 'fa-maxFileSize-with-backups-compressed-test.log.2.gz'
+ ]);
+ },
+ 'and the contents of the first file': {
+ topic: function(logFiles) {
+ fs.readFile(path.join(__dirname, logFiles[0]), "utf8", this.callback);
+ },
+ 'should be the last log message': function(contents) {
+ assert.include(contents, 'This is the fourth log message.');
+ }
+ },
+ 'and the contents of the second file': {
+ topic: function(logFiles) {
+ zlib.gunzip(fs.readFileSync(path.join(__dirname, logFiles[1])), this.callback);
+ },
+ 'should be the third log message': function(contents) {
+ assert.include(contents.toString('utf8'), 'This is the third log message.');
+ }
+ },
+ 'and the contents of the third file': {
+ topic: function(logFiles) {
+ zlib.gunzip(fs.readFileSync(path.join(__dirname, logFiles[2])), this.callback);
+ },
+ 'should be the second log message': function(contents) {
+ assert.include(contents.toString('utf8'), 'This is the second log message.');
+ }
+ }
+ }
+ }
+}).addBatch({
+ 'configure' : {
+ 'with fileAppender': {
+ topic: function() {
+ var log4js = require('../lib/log4js')
+ , logger;
+ //this config file defines one file appender (to ./tmp-tests.log)
+ //and sets the log level for "tests" to WARN
+ log4js.configure('./test/log4js.json');
+ logger = log4js.getLogger('tests');
+ logger.info('this should not be written to the file');
+ logger.warn('this should be written to the file');
+
+ fs.readFile('tmp-tests.log', 'utf8', this.callback);
+ },
+ 'should load appender configuration from a json file': function (err, contents) {
+ assert.include(contents, 'this should be written to the file' + EOL);
+ assert.equal(contents.indexOf('this should not be written to the file'), -1);
+ }
+ }
+ }
+}).addBatch({
+ 'when underlying stream errors': {
+ topic: function() {
+ var consoleArgs
+ , errorHandler
+ , fileAppender = sandbox.require(
+ '../lib/appenders/file',
+ {
+ globals: {
+ console: {
+ error: function() {
+ consoleArgs = Array.prototype.slice.call(arguments);
+ }
+ }
+ },
+ requires: {
+ '../streams': {
+ RollingFileStream: function(filename) {
+
+ this.end = function() {};
+ this.on = function(evt, cb) {
+ if (evt === 'error') {
+ errorHandler = cb;
+ }
+ };
+ }
+ }
+ }
+ }
+ );
+ fileAppender.appender('test1.log', null, 100);
+ errorHandler({ error: 'aargh' });
+ return consoleArgs;
+ },
+ 'should log the error to console.error': function(consoleArgs) {
+ assert.isNotEmpty(consoleArgs);
+ assert.equal(consoleArgs[0], 'log4js.fileAppender - Writing to file %s, error happened ');
+ assert.equal(consoleArgs[1], 'test1.log');
+ assert.equal(consoleArgs[2].error, 'aargh');
+ }
+ }
+
+}).export(module);
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/fileSyncAppender-test.js b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/fileSyncAppender-test.js
new file mode 100644
index 00000000..d6e2b29a
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/fileSyncAppender-test.js
@@ -0,0 +1,185 @@
+"use strict";
+var vows = require('vows')
+, fs = require('fs')
+, path = require('path')
+, sandbox = require('sandboxed-module')
+, log4js = require('../lib/log4js')
+, assert = require('assert')
+, EOL = require('os').EOL || '\n';
+
+log4js.clearAppenders();
+
+function remove(filename) {
+ try {
+ fs.unlinkSync(filename);
+ } catch (e) {
+ //doesn't really matter if it failed
+ }
+}
+
+vows.describe('log4js fileSyncAppender').addBatch({
+ 'with default fileSyncAppender settings': {
+ topic: function() {
+ var that = this
+ , testFile = path.join(__dirname, '/fa-default-sync-test.log')
+ , logger = log4js.getLogger('default-settings');
+ remove(testFile);
+
+ log4js.clearAppenders();
+ log4js.addAppender(
+ require('../lib/appenders/fileSync').appender(testFile),
+ 'default-settings'
+ );
+
+ logger.info("This should be in the file.");
+
+ fs.readFile(testFile, "utf8", that.callback);
+ },
+ 'should write log messages to the file': function (err, fileContents) {
+ assert.include(fileContents, "This should be in the file." + EOL);
+ },
+ 'log messages should be in the basic layout format': function(err, fileContents) {
+ assert.match(
+ fileContents,
+ /\[\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\.\d{3}\] \[INFO\] default-settings - /
+ );
+ }
+ },
+ 'with a max file size and no backups': {
+ topic: function() {
+ var testFile = path.join(__dirname, '/fa-maxFileSize-sync-test.log')
+ , logger = log4js.getLogger('max-file-size')
+ , that = this;
+ remove(testFile);
+ remove(testFile + '.1');
+ //log file of 100 bytes maximum, no backups
+ log4js.clearAppenders();
+ log4js.addAppender(
+ require('../lib/appenders/fileSync').appender(testFile, log4js.layouts.basicLayout, 100, 0),
+ 'max-file-size'
+ );
+ logger.info("This is the first log message.");
+ logger.info("This is an intermediate log message.");
+ logger.info("This is the second log message.");
+
+ fs.readFile(testFile, "utf8", that.callback);
+ },
+ 'log file should only contain the second message': function (err, fileContents) {
+ assert.include(fileContents, "This is the second log message." + EOL);
+ assert.equal(fileContents.indexOf("This is the first log message."), -1);
+ },
+ 'the number of files': {
+ topic: function() {
+ fs.readdir(__dirname, this.callback);
+ },
+ 'starting with the test file name should be two': function(err, files) {
+ //there will always be one backup if you've specified a max log size
+ var logFiles = files.filter(
+ function(file) { return file.indexOf('fa-maxFileSize-sync-test.log') > -1; }
+ );
+ assert.equal(logFiles.length, 2);
+ }
+ }
+ },
+ 'with a max file size and 2 backups': {
+ topic: function() {
+ var testFile = path.join(__dirname, '/fa-maxFileSize-with-backups-sync-test.log')
+ , logger = log4js.getLogger('max-file-size-backups');
+ remove(testFile);
+ remove(testFile+'.1');
+ remove(testFile+'.2');
+
+ //log file of 50 bytes maximum, 2 backups
+ log4js.clearAppenders();
+ log4js.addAppender(
+ require('../lib/appenders/fileSync').appender(testFile, log4js.layouts.basicLayout, 50, 2),
+ 'max-file-size-backups'
+ );
+ logger.info("This is the first log message.");
+ logger.info("This is the second log message.");
+ logger.info("This is the third log message.");
+ logger.info("This is the fourth log message.");
+ var that = this;
+
+ fs.readdir(__dirname, function(err, files) {
+ if (files) {
+ that.callback(null, files.sort());
+ } else {
+ that.callback(err, files);
+ }
+ });
+ },
+ 'the log files': {
+ topic: function(files) {
+ var logFiles = files.filter(
+ function(file) { return file.indexOf('fa-maxFileSize-with-backups-sync-test.log') > -1; }
+ );
+ return logFiles;
+ },
+ 'should be 3': function (files) {
+ assert.equal(files.length, 3);
+ },
+ 'should be named in sequence': function (files) {
+ assert.deepEqual(files, [
+ 'fa-maxFileSize-with-backups-sync-test.log',
+ 'fa-maxFileSize-with-backups-sync-test.log.1',
+ 'fa-maxFileSize-with-backups-sync-test.log.2'
+ ]);
+ },
+ 'and the contents of the first file': {
+ topic: function(logFiles) {
+ fs.readFile(path.join(__dirname, logFiles[0]), "utf8", this.callback);
+ },
+ 'should be the last log message': function(contents) {
+ assert.include(contents, 'This is the fourth log message.');
+ }
+ },
+ 'and the contents of the second file': {
+ topic: function(logFiles) {
+ fs.readFile(path.join(__dirname, logFiles[1]), "utf8", this.callback);
+ },
+ 'should be the third log message': function(contents) {
+ assert.include(contents, 'This is the third log message.');
+ }
+ },
+ 'and the contents of the third file': {
+ topic: function(logFiles) {
+ fs.readFile(path.join(__dirname, logFiles[2]), "utf8", this.callback);
+ },
+ 'should be the second log message': function(contents) {
+ assert.include(contents, 'This is the second log message.');
+ }
+ }
+ }
+ }
+}).addBatch({
+ 'configure' : {
+ 'with fileSyncAppender': {
+ topic: function() {
+ var log4js = require('../lib/log4js')
+ , logger;
+ //this config defines one file appender (to ./tmp-sync-tests.log)
+ //and sets the log level for "tests" to WARN
+ log4js.configure({
+ appenders: [{
+ category: "tests",
+ type: "file",
+ filename: "tmp-sync-tests.log",
+ layout: { type: "messagePassThrough" }
+ }],
+
+ levels: { tests: "WARN" }
+ });
+ logger = log4js.getLogger('tests');
+ logger.info('this should not be written to the file');
+ logger.warn('this should be written to the file');
+
+ fs.readFile('tmp-sync-tests.log', 'utf8', this.callback);
+ },
+ 'should load appender configuration from a json file': function(err, contents) {
+ assert.include(contents, 'this should be written to the file' + EOL);
+ assert.equal(contents.indexOf('this should not be written to the file'), -1);
+ }
+ }
+ }
+}).export(module);
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/gelfAppender-test.js b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/gelfAppender-test.js
new file mode 100644
index 00000000..76fb5ea3
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/gelfAppender-test.js
@@ -0,0 +1,257 @@
+"use strict";
+var vows = require('vows')
+, assert = require('assert')
+, sandbox = require('sandboxed-module')
+, log4js = require('../lib/log4js')
+, realLayouts = require('../lib/layouts')
+, setupLogging = function(options, category, compressedLength) {
+ var fakeDgram = {
+ sent: false,
+ socket: {
+ packetLength: 0,
+ closed: false,
+ close: function() {
+ this.closed = true;
+ },
+ send: function(pkt, offset, pktLength, port, host) {
+ fakeDgram.sent = true;
+ this.packet = pkt;
+ this.offset = offset;
+ this.packetLength = pktLength;
+ this.port = port;
+ this.host = host;
+ }
+ },
+ createSocket: function(type) {
+ this.type = type;
+ return this.socket;
+ }
+ }
+ , fakeZlib = {
+ gzip: function(objectToCompress, callback) {
+ fakeZlib.uncompressed = objectToCompress;
+ if (this.shouldError) {
+ callback({ stack: "oh noes" });
+ return;
+ }
+
+ if (compressedLength) {
+ callback(null, { length: compressedLength });
+ } else {
+ callback(null, "I've been compressed");
+ }
+ }
+ }
+ , exitHandler
+ , fakeConsole = {
+ error: function(message) {
+ this.message = message;
+ }
+ }
+ , fakeLayouts = {
+ layout: function(type, options) {
+ this.type = type;
+ this.options = options;
+ return realLayouts.messagePassThroughLayout;
+ },
+ messagePassThroughLayout: realLayouts.messagePassThroughLayout
+ }
+ , appender = sandbox.require('../lib/appenders/gelf', {
+ requires: {
+ dgram: fakeDgram,
+ zlib: fakeZlib,
+ '../layouts': fakeLayouts
+ },
+ globals: {
+ process: {
+ on: function(evt, handler) {
+ if (evt === 'exit') {
+ exitHandler = handler;
+ }
+ }
+ },
+ console: fakeConsole
+ }
+ });
+
+ log4js.clearAppenders();
+ log4js.addAppender(appender.configure(options || {}), category || "gelf-test");
+ return {
+ dgram: fakeDgram,
+ compress: fakeZlib,
+ exitHandler: exitHandler,
+ console: fakeConsole,
+ layouts: fakeLayouts,
+ logger: log4js.getLogger(category || "gelf-test")
+ };
+};
+
+vows.describe('log4js gelfAppender').addBatch({
+
+ 'with default gelfAppender settings': {
+ topic: function() {
+ var setup = setupLogging();
+ setup.logger.info("This is a test");
+ return setup;
+ },
+ 'the dgram packet': {
+ topic: function(setup) {
+ return setup.dgram;
+ },
+ 'should be sent via udp to the localhost gelf server': function(dgram) {
+ assert.equal(dgram.type, "udp4");
+ assert.equal(dgram.socket.host, "localhost");
+ assert.equal(dgram.socket.port, 12201);
+ assert.equal(dgram.socket.offset, 0);
+ assert.ok(dgram.socket.packetLength > 0, "Received blank message");
+ },
+ 'should be compressed': function(dgram) {
+ assert.equal(dgram.socket.packet, "I've been compressed");
+ }
+ },
+ 'the uncompressed log message': {
+ topic: function(setup) {
+ var message = JSON.parse(setup.compress.uncompressed);
+ return message;
+ },
+ 'should be in the gelf format': function(message) {
+ assert.equal(message.version, '1.1');
+ assert.equal(message.host, require('os').hostname());
+ assert.equal(message.level, 6); //INFO
+ assert.equal(message.short_message, 'This is a test');
+ }
+ }
+ },
+ 'with a message longer than 8k': {
+ topic: function() {
+ var setup = setupLogging(undefined, undefined, 10240);
+ setup.logger.info("Blah.");
+ return setup;
+ },
+ 'the dgram packet': {
+ topic: function(setup) {
+ return setup.dgram;
+ },
+ 'should not be sent': function(dgram) {
+ assert.equal(dgram.sent, false);
+ }
+ }
+ },
+ 'with non-default options': {
+ topic: function() {
+ var setup = setupLogging({
+ host: 'somewhere',
+ port: 12345,
+ hostname: 'cheese',
+ facility: 'nonsense'
+ });
+ setup.logger.debug("Just testing.");
+ return setup;
+ },
+ 'the dgram packet': {
+ topic: function(setup) {
+ return setup.dgram;
+ },
+ 'should pick up the options': function(dgram) {
+ assert.equal(dgram.socket.host, 'somewhere');
+ assert.equal(dgram.socket.port, 12345);
+ }
+ },
+ 'the uncompressed packet': {
+ topic: function(setup) {
+ var message = JSON.parse(setup.compress.uncompressed);
+ return message;
+ },
+ 'should pick up the options': function(message) {
+ assert.equal(message.host, 'cheese');
+ assert.equal(message._facility, 'nonsense');
+ }
+ }
+ },
+
+ 'on process.exit': {
+ topic: function() {
+ var setup = setupLogging();
+ setup.exitHandler();
+ return setup;
+ },
+ 'should close open sockets': function(setup) {
+ assert.isTrue(setup.dgram.socket.closed);
+ }
+ },
+
+ 'on zlib error': {
+ topic: function() {
+ var setup = setupLogging();
+ setup.compress.shouldError = true;
+ setup.logger.info('whatever');
+ return setup;
+ },
+ 'should output to console.error': function(setup) {
+ assert.equal(setup.console.message, 'oh noes');
+ }
+ },
+
+ 'with layout in configuration': {
+ topic: function() {
+ var setup = setupLogging({
+ layout: {
+ type: 'madeuplayout',
+ earlgrey: 'yes, please'
+ }
+ });
+ return setup;
+ },
+ 'should pass options to layout': function(setup) {
+ assert.equal(setup.layouts.type, 'madeuplayout');
+ assert.equal(setup.layouts.options.earlgrey, 'yes, please');
+ }
+ },
+
+ 'with custom fields options': {
+ topic: function() {
+ var setup = setupLogging({
+ host: 'somewhere',
+ port: 12345,
+ hostname: 'cheese',
+ facility: 'nonsense',
+ customFields: {
+ _every1: 'Hello every one',
+ _every2: 'Hello every two'
+ }
+ });
+ var myFields = {
+ GELF: true,
+ _every2: 'Overwritten!',
+ _myField: 'This is my field!'
+ };
+ setup.logger.debug(myFields, "Just testing.");
+ return setup;
+ },
+ 'the dgram packet': {
+ topic: function(setup) {
+ return setup.dgram;
+ },
+ 'should pick up the options': function(dgram) {
+ assert.equal(dgram.socket.host, 'somewhere');
+ assert.equal(dgram.socket.port, 12345);
+ }
+ },
+ 'the uncompressed packet': {
+ topic: function(setup) {
+ var message = JSON.parse(setup.compress.uncompressed);
+ return message;
+ },
+ 'should pick up the options': function(message) {
+ assert.equal(message.host, 'cheese');
+ assert.isUndefined(message.GELF); // make sure flag was removed
+ assert.equal(message._facility, 'nonsense');
+ assert.equal(message._every1, 'Hello every one'); // the default value
+ assert.equal(message._every2, 'Overwritten!'); // the overwritten value
+ assert.equal(message._myField, 'This is my field!'); // the value for this message only
+ assert.equal(message.short_message, 'Just testing.'); // skip the field object
+ }
+ }
+ }
+
+}).export(module);
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/global-log-level-test.js b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/global-log-level-test.js
new file mode 100644
index 00000000..4ccc5832
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/global-log-level-test.js
@@ -0,0 +1,121 @@
+"use strict";
+var vows = require('vows')
+, assert = require('assert');
+
+vows.describe('log4js global loglevel').addBatch({
+ 'global loglevel' : {
+ topic: function() {
+ var log4js = require('../lib/log4js');
+ return log4js;
+ },
+
+ 'set global loglevel on creation': function(log4js) {
+ var log1 = log4js.getLogger('log1');
+ var level = 'OFF';
+ if (log1.level.toString() == level) {
+ level = 'TRACE';
+ }
+ assert.notEqual(log1.level.toString(), level);
+
+ log4js.setGlobalLogLevel(level);
+ assert.equal(log1.level.toString(), level);
+
+ var log2 = log4js.getLogger('log2');
+ assert.equal(log2.level.toString(), level);
+ },
+
+ 'global change loglevel': function(log4js) {
+ var log1 = log4js.getLogger('log1');
+ var log2 = log4js.getLogger('log2');
+ var level = 'OFF';
+ if (log1.level.toString() == level) {
+ level = 'TRACE';
+ }
+ assert.notEqual(log1.level.toString(), level);
+
+ log4js.setGlobalLogLevel(level);
+ assert.equal(log1.level.toString(), level);
+ assert.equal(log2.level.toString(), level);
+ },
+
+ 'override loglevel': function(log4js) {
+ var log1 = log4js.getLogger('log1');
+ var log2 = log4js.getLogger('log2');
+ var level = 'OFF';
+ if (log1.level.toString() == level) {
+ level = 'TRACE';
+ }
+ assert.notEqual(log1.level.toString(), level);
+
+ var oldLevel = log1.level.toString();
+ assert.equal(log2.level.toString(), oldLevel);
+
+ log2.setLevel(level);
+ assert.equal(log1.level.toString(), oldLevel);
+ assert.equal(log2.level.toString(), level);
+ assert.notEqual(oldLevel, level);
+
+ log2.removeLevel();
+ assert.equal(log1.level.toString(), oldLevel);
+ assert.equal(log2.level.toString(), oldLevel);
+ },
+
+ 'preload loglevel': function(log4js) {
+ var log1 = log4js.getLogger('log1');
+ var level = 'OFF';
+ if (log1.level.toString() == level) {
+ level = 'TRACE';
+ }
+ assert.notEqual(log1.level.toString(), level);
+
+ var oldLevel = log1.level.toString();
+ log4js.getLogger('log2').setLevel(level);
+
+ assert.equal(log1.level.toString(), oldLevel);
+
+ // get again same logger but as different variable
+ var log2 = log4js.getLogger('log2');
+ assert.equal(log2.level.toString(), level);
+ assert.notEqual(oldLevel, level);
+
+ log2.removeLevel();
+ assert.equal(log1.level.toString(), oldLevel);
+ assert.equal(log2.level.toString(), oldLevel);
+ },
+
+ 'set level on all categories': function(log4js) {
+ // Get 2 loggers
+ var log1 = log4js.getLogger('log1');
+ var log2 = log4js.getLogger('log2');
+
+ // First a test with 2 categories with different levels
+ var config = {
+ 'levels': {
+ 'log1': 'ERROR',
+ 'log2': 'WARN'
+ }
+ };
+ log4js.configure(config);
+
+ // Check if the levels are set correctly
+ assert.equal('ERROR', log1.level.toString());
+ assert.equal('WARN', log2.level.toString());
+
+ log1.removeLevel();
+ log2.removeLevel();
+
+ // Almost identical test, but now we set
+ // level on all categories
+ var config2 = {
+ 'levels': {
+ '[all]': 'DEBUG'
+ }
+ };
+ log4js.configure(config2);
+
+ // Check if the loggers got the DEBUG level
+ assert.equal('DEBUG', log1.level.toString());
+ assert.equal('DEBUG', log2.level.toString());
+ }
+ }
+}).export(module);
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/hipchatAppender-test.js b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/hipchatAppender-test.js
new file mode 100644
index 00000000..4769c3a2
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/hipchatAppender-test.js
@@ -0,0 +1,112 @@
+"use strict";
+var vows = require('vows'),
+ assert = require('assert'),
+ log4js = require('../lib/log4js'),
+ sandbox = require('sandboxed-module');
+
+function setupLogging(category, options) {
+ var lastRequest = {};
+
+ var fakeRequest = function(args, level){
+ lastRequest.notifier = this;
+ lastRequest.body = args[0];
+ lastRequest.callback = args[1];
+ lastRequest.level = level;
+ };
+
+ var fakeHipchatNotifier = {
+ 'make': function(room, token, from, host, notify){
+ return {
+ 'room': room,
+ 'token': token,
+ 'from': from || '',
+ 'host': host || 'api.hipchat.com',
+ 'notify': notify || false,
+ 'setRoom': function(val){ this.room = val; },
+ 'setFrom': function(val){ this.from = val; },
+ 'setHost': function(val){ this.host = val; },
+ 'setNotify': function(val){ this.notify = val; },
+ 'info': function(){ fakeRequest.call(this, arguments, 'info'); },
+ 'warning': function(){ fakeRequest.call(this, arguments, 'warning'); },
+ 'failure': function(){ fakeRequest.call(this, arguments, 'failure'); },
+ 'success': function(){ fakeRequest.call(this, arguments, 'success'); }
+ };
+ }
+ };
+
+ var hipchatModule = sandbox.require('../lib/appenders/hipchat', {
+ requires: {
+ 'hipchat-notifier': fakeHipchatNotifier
+ }
+ });
+ log4js.clearAppenders();
+ log4js.addAppender(hipchatModule.configure(options), category);
+
+ return {
+ logger: log4js.getLogger(category),
+ lastRequest: lastRequest
+ };
+}
+
+vows.describe('HipChat appender').addBatch({
+ 'when logging to HipChat v2 API': {
+ topic: function() {
+ var customCallback = function(err, res, body){ return 'works'; };
+
+ var setup = setupLogging('myCategory', {
+ "type": "hipchat",
+ "hipchat_token": "User_Token_With_Notification_Privs",
+ "hipchat_room": "Room_ID_Or_Name",
+ "hipchat_from": "Log4js_Test",
+ "hipchat_notify": true,
+ "hipchat_host": "hipchat.your-company.tld",
+ "hipchat_response_callback": customCallback
+ });
+ setup.logger.warn('Log event #1');
+ return setup;
+ },
+ 'a request to hipchat_host should be sent': function (topic) {
+ assert.equal(topic.lastRequest.notifier.host, "hipchat.your-company.tld");
+ assert.equal(topic.lastRequest.notifier.notify, true);
+ assert.equal(topic.lastRequest.body, 'Log event #1');
+ assert.equal(topic.lastRequest.level, 'warning');
+ },
+ 'a custom callback to the HipChat response is supported': function(topic) {
+ assert.equal(topic.lastRequest.callback(), 'works');
+ }
+ },
+ 'when missing options': {
+ topic: function() {
+ var setup = setupLogging('myLogger', {
+ "type": "hipchat",
+ });
+ setup.logger.error('Log event #2');
+ return setup;
+ },
+ 'it sets some defaults': function (topic) {
+ assert.equal(topic.lastRequest.notifier.host, "api.hipchat.com");
+ assert.equal(topic.lastRequest.notifier.notify, false);
+ assert.equal(topic.lastRequest.body, 'Log event #2');
+ assert.equal(topic.lastRequest.level, 'failure');
+ }
+ },
+ 'when basicLayout is provided': {
+ topic: function() {
+ var setup = setupLogging('myLogger', {
+ "type": "hipchat",
+ "layout": log4js.layouts.basicLayout
+ });
+ setup.logger.debug('Log event #3');
+ return setup;
+ },
+ 'it should include the timestamp': function (topic) {
+
+ // basicLayout adds [TIMESTAMP] [LEVEL] category - message
+ // e.g. [2016-06-10 11:50:53.819] [DEBUG] myLogger - Log event #23
+
+ assert.match(topic.lastRequest.body, /^\[[^\]]+\] \[[^\]]+\].*Log event \#3$/);
+ assert.equal(topic.lastRequest.level, 'info');
+ }
+ }
+
+}).export(module);
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/layouts-test.js b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/layouts-test.js
new file mode 100644
index 00000000..1b7d2ef0
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/layouts-test.js
@@ -0,0 +1,330 @@
+"use strict";
+var vows = require('vows')
+, assert = require('assert')
+, os = require('os')
+, semver = require('semver')
+, EOL = os.EOL || '\n';
+
+//used for patternLayout tests.
+function test(args, pattern, value) {
+ var layout = args[0]
+ , event = args[1]
+ , tokens = args[2];
+
+ assert.equal(layout(pattern, tokens)(event), value);
+}
+
+vows.describe('log4js layouts').addBatch({
+ 'colouredLayout': {
+ topic: function() {
+ return require('../lib/layouts').colouredLayout;
+ },
+
+ 'should apply level colour codes to output': function(layout) {
+ var output = layout({
+ data: ["nonsense"],
+ startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
+ categoryName: "cheese",
+ level: {
+ toString: function() { return "ERROR"; }
+ }
+ });
+ assert.equal(output, '\x1B[31m[2010-12-05 14:18:30.045] [ERROR] cheese - \x1B[39mnonsense');
+ },
+ 'should support the console.log format for the message': function(layout) {
+ var output = layout({
+ data: ["thing %d", 2],
+ startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
+ categoryName: "cheese",
+ level: {
+ toString: function() { return "ERROR"; }
+ }
+ });
+ assert.equal(output, '\x1B[31m[2010-12-05 14:18:30.045] [ERROR] cheese - \x1B[39mthing 2');
+ }
+ },
+
+ 'messagePassThroughLayout': {
+ topic: function() {
+ return require('../lib/layouts').messagePassThroughLayout;
+ },
+ 'should take a logevent and output only the message' : function(layout) {
+ assert.equal(layout({
+ data: ["nonsense"],
+ startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
+ categoryName: "cheese",
+ level: {
+ colour: "green",
+ toString: function() { return "ERROR"; }
+ }
+ }), "nonsense");
+ },
+ 'should support the console.log format for the message' : function(layout) {
+ assert.equal(layout({
+ data: ["thing %d", 1, "cheese"],
+ startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
+ categoryName: "cheese",
+ level : {
+ colour: "green",
+ toString: function() { return "ERROR"; }
+ }
+ }), "thing 1 cheese");
+ },
+ 'should output the first item even if it is not a string': function(layout) {
+ assert.equal(layout({
+ data: [ { thing: 1} ],
+ startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
+ categoryName: "cheese",
+ level: {
+ colour: "green",
+ toString: function() { return "ERROR"; }
+ }
+ }), "{ thing: 1 }");
+ },
+ 'should print the stacks of a passed error objects': function(layout) {
+ assert.isArray(layout({
+ data: [ new Error() ],
+ startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
+ categoryName: "cheese",
+ level: {
+ colour: "green",
+ toString: function() { return "ERROR"; }
+ }
+ }).match(/Error\s+at Object\..*\s+\((.*)test[\\\/]layouts-test\.js\:\d+\:\d+\)\s+at runTest/)
+ , 'regexp did not return a match');
+ },
+ 'with passed augmented errors': {
+ topic: function(layout){
+ var e = new Error("My Unique Error Message");
+ e.augmented = "My Unique attribute value";
+ e.augObj = { at1: "at2" };
+ return layout({
+ data: [ e ],
+ startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
+ categoryName: "cheese",
+ level: {
+ colour: "green",
+ toString: function() { return "ERROR"; }
+ }
+ });
+ },
+ 'should print error the contained error message': function(layoutOutput) {
+ var m = layoutOutput.match(/Error: My Unique Error Message/);
+ assert.isArray(m);
+ },
+ 'should print error augmented string attributes': function(layoutOutput) {
+ var m = layoutOutput.match(/augmented:\s'My Unique attribute value'/);
+ assert.isArray(m);
+ },
+ 'should print error augmented object attributes': function(layoutOutput) {
+ var m = layoutOutput.match(/augObj:\s\{ at1: 'at2' \}/);
+ assert.isArray(m);
+ }
+ }
+
+
+ },
+
+ 'basicLayout': {
+ topic: function() {
+ var layout = require('../lib/layouts').basicLayout,
+ event = {
+ data: ['this is a test'],
+ startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
+ categoryName: "tests",
+ level: {
+ toString: function() { return "DEBUG"; }
+ }
+ };
+ return [layout, event];
+ },
+ 'should take a logevent and output a formatted string': function(args) {
+ var layout = args[0], event = args[1];
+ assert.equal(layout(event), "[2010-12-05 14:18:30.045] [DEBUG] tests - this is a test");
+ },
+ 'should output a stacktrace, message if the event has an error attached': function(args) {
+ var i, layout = args[0], event = args[1], output, lines,
+ error = new Error("Some made-up error"),
+ stack = error.stack.split(/\n/);
+
+ event.data = ['this is a test', error];
+ output = layout(event);
+ lines = output.split(/\n/);
+
+ if (semver.satisfies(process.version, '>=6')) {
+ assert.equal(lines.length, stack.length);
+ assert.equal(
+ lines[0],
+ "[2010-12-05 14:18:30.045] [DEBUG] tests - this is a test Error: Some made-up error"
+ );
+ for (i = 1; i < stack.length; i++) {
+ assert.equal(lines[i], stack[i]);
+ }
+ } else {
+ assert.equal(lines.length - 1, stack.length);
+ assert.equal(
+ lines[0],
+ "[2010-12-05 14:18:30.045] [DEBUG] tests - this is a test [Error: Some made-up error]"
+ );
+ for (i = 1; i < stack.length; i++) {
+ assert.equal(lines[i+2], stack[i+1]);
+ }
+ }
+
+ },
+ 'should output any extra data in the log event as util.inspect strings': function(args) {
+ var layout = args[0], event = args[1], output, lines;
+ event.data = ['this is a test', {
+ name: 'Cheese',
+ message: 'Gorgonzola smells.'
+ }];
+ output = layout(event);
+ assert.equal(
+ output,
+ "[2010-12-05 14:18:30.045] [DEBUG] tests - this is a test " +
+ "{ name: 'Cheese', message: 'Gorgonzola smells.' }"
+ );
+ }
+ },
+
+ 'patternLayout': {
+ topic: function() {
+ var event = {
+ data: ['this is a test'],
+ startTime: new Date('2010-12-05T14:18:30.045Z'), //new Date(2010, 11, 5, 14, 18, 30, 45),
+ categoryName: "multiple.levels.of.tests",
+ level: {
+ toString: function() { return "DEBUG"; }
+ }
+ }, layout = require('../lib/layouts').patternLayout
+ , tokens = {
+ testString: 'testStringToken',
+ testFunction: function() { return 'testFunctionToken'; },
+ fnThatUsesLogEvent: function(logEvent) { return logEvent.level.toString(); }
+ };
+
+ //override getTimezoneOffset
+ event.startTime.getTimezoneOffset = function() { return 0; };
+ return [layout, event, tokens];
+ },
+
+ 'should default to "time logLevel loggerName - message"': function(args) {
+ test(args, null, "14:18:30 DEBUG multiple.levels.of.tests - this is a test" + EOL);
+ },
+ '%r should output time only': function(args) {
+ test(args, '%r', '14:18:30');
+ },
+ '%p should output the log level': function(args) {
+ test(args, '%p', 'DEBUG');
+ },
+ '%c should output the log category': function(args) {
+ test(args, '%c', 'multiple.levels.of.tests');
+ },
+ '%m should output the log data': function(args) {
+ test(args, '%m', 'this is a test');
+ },
+ '%n should output a new line': function(args) {
+ test(args, '%n', EOL);
+ },
+ '%h should output hostname' : function(args) {
+ test(args, '%h', os.hostname().toString());
+ },
+ '%z should output pid' : function(args) {
+ test(args, '%z', process.pid);
+ },
+ '%c should handle category names like java-style package names': function(args) {
+ test(args, '%c{1}', 'tests');
+ test(args, '%c{2}', 'of.tests');
+ test(args, '%c{3}', 'levels.of.tests');
+ test(args, '%c{4}', 'multiple.levels.of.tests');
+ test(args, '%c{5}', 'multiple.levels.of.tests');
+ test(args, '%c{99}', 'multiple.levels.of.tests');
+ },
+ '%d should output the date in ISO8601 format': function(args) {
+ test(args, '%d', '2010-12-05 14:18:30.045');
+ },
+ '%d should allow for format specification': function(args) {
+ test(args, '%d{ISO8601_WITH_TZ_OFFSET}', '2010-12-05T14:18:30-0000');
+ test(args, '%d{ISO8601}', '2010-12-05 14:18:30.045');
+ test(args, '%d{ABSOLUTE}', '14:18:30.045');
+ test(args, '%d{DATE}', '05 12 2010 14:18:30.045');
+ test(args, '%d{yy MM dd hh mm ss}', '10 12 05 14 18 30');
+ test(args, '%d{yyyy MM dd}', '2010 12 05');
+ test(args, '%d{yyyy MM dd hh mm ss SSS}', '2010 12 05 14 18 30 045');
+ },
+ '%% should output %': function(args) {
+ test(args, '%%', '%');
+ },
+ 'should output anything not preceded by % as literal': function(args) {
+ test(args, 'blah blah blah', 'blah blah blah');
+ },
+ 'should output the original string if no replacer matches the token': function(args) {
+ test(args, '%a{3}', 'a{3}');
+ },
+ 'should handle complicated patterns': function(args) {
+ test(args,
+ '%m%n %c{2} at %d{ABSOLUTE} cheese %p%n',
+ 'this is a test'+ EOL +' of.tests at 14:18:30.045 cheese DEBUG' + EOL
+ );
+ },
+ 'should truncate fields if specified': function(args) {
+ test(args, '%.4m', 'this');
+ test(args, '%.7m', 'this is');
+ test(args, '%.9m', 'this is a');
+ test(args, '%.14m', 'this is a test');
+ test(args, '%.2919102m', 'this is a test');
+ },
+ 'should pad fields if specified': function(args) {
+ test(args, '%10p', ' DEBUG');
+ test(args, '%8p', ' DEBUG');
+ test(args, '%6p', ' DEBUG');
+ test(args, '%4p', 'DEBUG');
+ test(args, '%-4p', 'DEBUG');
+ test(args, '%-6p', 'DEBUG ');
+ test(args, '%-8p', 'DEBUG ');
+ test(args, '%-10p', 'DEBUG ');
+ },
+ '%[%r%] should output colored time': function(args) {
+ test(args, '%[%r%]', '\x1B[36m14:18:30\x1B[39m');
+ },
+ '%x{testString} should output the string stored in tokens': function(args) {
+ test(args, '%x{testString}', 'testStringToken');
+ },
+ '%x{testFunction} should output the result of the function stored in tokens': function(args) {
+ test(args, '%x{testFunction}', 'testFunctionToken');
+ },
+ '%x{doesNotExist} should output the string stored in tokens': function(args) {
+ test(args, '%x{doesNotExist}', 'null');
+ },
+ '%x{fnThatUsesLogEvent} should be able to use the logEvent': function(args) {
+ test(args, '%x{fnThatUsesLogEvent}', 'DEBUG');
+ },
+ '%x should output the string stored in tokens': function(args) {
+ test(args, '%x', 'null');
+ }
+ },
+ 'layout makers': {
+ topic: require('../lib/layouts'),
+ 'should have a maker for each layout': function(layouts) {
+ assert.ok(layouts.layout("messagePassThrough"));
+ assert.ok(layouts.layout("basic"));
+ assert.ok(layouts.layout("colored"));
+ assert.ok(layouts.layout("coloured"));
+ assert.ok(layouts.layout("pattern"));
+ }
+ },
+ 'add layout': {
+ topic: require('../lib/layouts'),
+ 'should be able to add a layout': function(layouts) {
+ layouts.addLayout('test_layout', function(config){
+ assert.equal(config, 'test_config');
+ return function(logEvent) {
+ return "TEST LAYOUT >"+logEvent.data;
+ };
+ });
+ var serializer = layouts.layout('test_layout', 'test_config');
+ assert.ok(serializer);
+ assert.equal(serializer({data: "INPUT"}), "TEST LAYOUT >INPUT");
+ }
+ }
+}).export(module);
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/levels-test.js b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/levels-test.js
new file mode 100644
index 00000000..df655fd1
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/levels-test.js
@@ -0,0 +1,464 @@
+"use strict";
+var vows = require('vows')
+, assert = require('assert')
+, levels = require('../lib/levels');
+
+function assertThat(level) {
+ function assertForEach(assertion, test, otherLevels) {
+ otherLevels.forEach(function(other) {
+ assertion.call(assert, test.call(level, other));
+ });
+ }
+
+ return {
+ isLessThanOrEqualTo: function(levels) {
+ assertForEach(assert.isTrue, level.isLessThanOrEqualTo, levels);
+ },
+ isNotLessThanOrEqualTo: function(levels) {
+ assertForEach(assert.isFalse, level.isLessThanOrEqualTo, levels);
+ },
+ isGreaterThanOrEqualTo: function(levels) {
+ assertForEach(assert.isTrue, level.isGreaterThanOrEqualTo, levels);
+ },
+ isNotGreaterThanOrEqualTo: function(levels) {
+ assertForEach(assert.isFalse, level.isGreaterThanOrEqualTo, levels);
+ },
+ isEqualTo: function(levels) {
+ assertForEach(assert.isTrue, level.isEqualTo, levels);
+ },
+ isNotEqualTo: function(levels) {
+ assertForEach(assert.isFalse, level.isEqualTo, levels);
+ }
+ };
+}
+
+vows.describe('levels').addBatch({
+ 'values': {
+ topic: levels,
+ 'should define some levels': function(levels) {
+ assert.isNotNull(levels.ALL);
+ assert.isNotNull(levels.TRACE);
+ assert.isNotNull(levels.DEBUG);
+ assert.isNotNull(levels.INFO);
+ assert.isNotNull(levels.WARN);
+ assert.isNotNull(levels.ERROR);
+ assert.isNotNull(levels.FATAL);
+ assert.isNotNull(levels.MARK);
+ assert.isNotNull(levels.OFF);
+ },
+ 'ALL': {
+ topic: levels.ALL,
+ 'should be less than the other levels': function(all) {
+ assertThat(all).isLessThanOrEqualTo(
+ [
+ levels.ALL,
+ levels.TRACE,
+ levels.DEBUG,
+ levels.INFO,
+ levels.WARN,
+ levels.ERROR,
+ levels.FATAL,
+ levels.MARK,
+ levels.OFF
+ ]
+ );
+ },
+ 'should be greater than no levels': function(all) {
+ assertThat(all).isNotGreaterThanOrEqualTo(
+ [
+ levels.TRACE,
+ levels.DEBUG,
+ levels.INFO,
+ levels.WARN,
+ levels.ERROR,
+ levels.FATAL,
+ levels.MARK,
+ levels.OFF
+ ]
+ );
+ },
+ 'should only be equal to ALL': function(all) {
+ assertThat(all).isEqualTo([levels.toLevel("ALL")]);
+ assertThat(all).isNotEqualTo(
+ [
+ levels.TRACE,
+ levels.DEBUG,
+ levels.INFO,
+ levels.WARN,
+ levels.ERROR,
+ levels.FATAL,
+ levels.MARK,
+ levels.OFF
+ ]
+ );
+ }
+ },
+ 'TRACE': {
+ topic: levels.TRACE,
+ 'should be less than DEBUG': function(trace) {
+ assertThat(trace).isLessThanOrEqualTo(
+ [
+ levels.DEBUG,
+ levels.INFO,
+ levels.WARN,
+ levels.ERROR,
+ levels.FATAL,
+ levels.MARK,
+ levels.OFF
+ ]
+ );
+ assertThat(trace).isNotLessThanOrEqualTo([levels.ALL]);
+ },
+ 'should be greater than ALL': function(trace) {
+ assertThat(trace).isGreaterThanOrEqualTo([levels.ALL, levels.TRACE]);
+ assertThat(trace).isNotGreaterThanOrEqualTo(
+ [
+ levels.DEBUG,
+ levels.INFO,
+ levels.WARN,
+ levels.ERROR,
+ levels.FATAL,
+ levels.MARK,
+ levels.OFF
+ ]
+ );
+ },
+ 'should only be equal to TRACE': function(trace) {
+ assertThat(trace).isEqualTo([levels.toLevel("TRACE")]);
+ assertThat(trace).isNotEqualTo(
+ [
+ levels.ALL,
+ levels.DEBUG,
+ levels.INFO,
+ levels.WARN,
+ levels.ERROR,
+ levels.FATAL,
+ levels.MARK,
+ levels.OFF
+ ]
+ );
+ }
+ },
+ 'DEBUG': {
+ topic: levels.DEBUG,
+ 'should be less than INFO': function(debug) {
+ assertThat(debug).isLessThanOrEqualTo(
+ [
+ levels.INFO,
+ levels.WARN,
+ levels.ERROR,
+ levels.FATAL,
+ levels.MARK,
+ levels.OFF
+ ]
+ );
+ assertThat(debug).isNotLessThanOrEqualTo([levels.ALL, levels.TRACE]);
+ },
+ 'should be greater than TRACE': function(debug) {
+ assertThat(debug).isGreaterThanOrEqualTo([levels.ALL, levels.TRACE]);
+ assertThat(debug).isNotGreaterThanOrEqualTo(
+ [
+ levels.INFO,
+ levels.WARN,
+ levels.ERROR,
+ levels.FATAL,
+ levels.MARK,
+ levels.OFF
+ ]
+ );
+ },
+ 'should only be equal to DEBUG': function(trace) {
+ assertThat(trace).isEqualTo([levels.toLevel("DEBUG")]);
+ assertThat(trace).isNotEqualTo(
+ [
+ levels.ALL,
+ levels.TRACE,
+ levels.INFO,
+ levels.WARN,
+ levels.ERROR,
+ levels.FATAL,
+ levels.MARK,
+ levels.OFF
+ ]
+ );
+ }
+ },
+ 'INFO': {
+ topic: levels.INFO,
+ 'should be less than WARN': function(info) {
+ assertThat(info).isLessThanOrEqualTo([
+ levels.WARN,
+ levels.ERROR,
+ levels.FATAL,
+ levels.MARK,
+ levels.OFF
+ ]);
+ assertThat(info).isNotLessThanOrEqualTo([levels.ALL, levels.TRACE, levels.DEBUG]);
+ },
+ 'should be greater than DEBUG': function(info) {
+ assertThat(info).isGreaterThanOrEqualTo([levels.ALL, levels.TRACE, levels.DEBUG]);
+ assertThat(info).isNotGreaterThanOrEqualTo([
+ levels.WARN,
+ levels.ERROR,
+ levels.FATAL,
+ levels.MARK,
+ levels.OFF
+ ]);
+ },
+ 'should only be equal to INFO': function(trace) {
+ assertThat(trace).isEqualTo([levels.toLevel("INFO")]);
+ assertThat(trace).isNotEqualTo([
+ levels.ALL,
+ levels.TRACE,
+ levels.DEBUG,
+ levels.WARN,
+ levels.ERROR,
+ levels.FATAL,
+ levels.MARK,
+ levels.OFF
+ ]);
+ }
+ },
+ 'WARN': {
+ topic: levels.WARN,
+ 'should be less than ERROR': function(warn) {
+ assertThat(warn).isLessThanOrEqualTo([levels.ERROR, levels.FATAL, levels.MARK, levels.OFF]);
+ assertThat(warn).isNotLessThanOrEqualTo([
+ levels.ALL,
+ levels.TRACE,
+ levels.DEBUG,
+ levels.INFO
+ ]);
+ },
+ 'should be greater than INFO': function(warn) {
+ assertThat(warn).isGreaterThanOrEqualTo([
+ levels.ALL,
+ levels.TRACE,
+ levels.DEBUG,
+ levels.INFO
+ ]);
+ assertThat(warn).isNotGreaterThanOrEqualTo([
+ levels.ERROR, levels.FATAL, levels.MARK, levels.OFF
+ ]);
+ },
+ 'should only be equal to WARN': function(trace) {
+ assertThat(trace).isEqualTo([levels.toLevel("WARN")]);
+ assertThat(trace).isNotEqualTo([
+ levels.ALL,
+ levels.TRACE,
+ levels.DEBUG,
+ levels.INFO,
+ levels.ERROR,
+ levels.FATAL,
+ levels.OFF
+ ]);
+ }
+ },
+ 'ERROR': {
+ topic: levels.ERROR,
+ 'should be less than FATAL': function(error) {
+ assertThat(error).isLessThanOrEqualTo([levels.FATAL, levels.MARK, levels.OFF]);
+ assertThat(error).isNotLessThanOrEqualTo([
+ levels.ALL,
+ levels.TRACE,
+ levels.DEBUG,
+ levels.INFO,
+ levels.WARN
+ ]);
+ },
+ 'should be greater than WARN': function(error) {
+ assertThat(error).isGreaterThanOrEqualTo([
+ levels.ALL,
+ levels.TRACE,
+ levels.DEBUG,
+ levels.INFO,
+ levels.WARN
+ ]);
+ assertThat(error).isNotGreaterThanOrEqualTo([levels.FATAL, levels.MARK, levels.OFF]);
+ },
+ 'should only be equal to ERROR': function(trace) {
+ assertThat(trace).isEqualTo([levels.toLevel("ERROR")]);
+ assertThat(trace).isNotEqualTo([
+ levels.ALL,
+ levels.TRACE,
+ levels.DEBUG,
+ levels.INFO,
+ levels.WARN,
+ levels.FATAL,
+ levels.MARK,
+ levels.OFF
+ ]);
+ }
+ },
+ 'FATAL': {
+ topic: levels.FATAL,
+ 'should be less than OFF': function(fatal) {
+ assertThat(fatal).isLessThanOrEqualTo([levels.MARK, levels.OFF]);
+ assertThat(fatal).isNotLessThanOrEqualTo([
+ levels.ALL,
+ levels.TRACE,
+ levels.DEBUG,
+ levels.INFO,
+ levels.WARN,
+ levels.ERROR
+ ]);
+ },
+ 'should be greater than ERROR': function(fatal) {
+ assertThat(fatal).isGreaterThanOrEqualTo([
+ levels.ALL,
+ levels.TRACE,
+ levels.DEBUG,
+ levels.INFO,
+ levels.WARN,
+ levels.ERROR
+ ]);
+ assertThat(fatal).isNotGreaterThanOrEqualTo([levels.MARK, levels.OFF]);
+ },
+ 'should only be equal to FATAL': function(fatal) {
+ assertThat(fatal).isEqualTo([levels.toLevel("FATAL")]);
+ assertThat(fatal).isNotEqualTo([
+ levels.ALL,
+ levels.TRACE,
+ levels.DEBUG,
+ levels.INFO,
+ levels.WARN,
+ levels.ERROR,
+ levels.MARK,
+ levels.OFF
+ ]);
+ }
+ },
+ 'MARK': {
+ topic: levels.MARK,
+ 'should be less than OFF': function(mark) {
+ assertThat(mark).isLessThanOrEqualTo([levels.OFF]);
+ assertThat(mark).isNotLessThanOrEqualTo([
+ levels.ALL,
+ levels.TRACE,
+ levels.DEBUG,
+ levels.INFO,
+ levels.WARN,
+ levels.FATAL,
+ levels.ERROR
+ ]);
+ },
+ 'should be greater than FATAL': function(mark) {
+ assertThat(mark).isGreaterThanOrEqualTo([
+ levels.ALL,
+ levels.TRACE,
+ levels.DEBUG,
+ levels.INFO,
+ levels.WARN,
+ levels.ERROR,
+ levels.FATAL
+ ]);
+ assertThat(mark).isNotGreaterThanOrEqualTo([levels.OFF]);
+ },
+ 'should only be equal to MARK': function(mark) {
+ assertThat(mark).isEqualTo([levels.toLevel("MARK")]);
+ assertThat(mark).isNotEqualTo([
+ levels.ALL,
+ levels.TRACE,
+ levels.DEBUG,
+ levels.INFO,
+ levels.WARN,
+ levels.ERROR,
+ levels.FATAL,
+ levels.OFF
+ ]);
+ }
+ },
+ 'OFF': {
+ topic: levels.OFF,
+ 'should not be less than anything': function(off) {
+ assertThat(off).isNotLessThanOrEqualTo([
+ levels.ALL,
+ levels.TRACE,
+ levels.DEBUG,
+ levels.INFO,
+ levels.WARN,
+ levels.ERROR,
+ levels.FATAL,
+ levels.MARK
+ ]);
+ },
+ 'should be greater than everything': function(off) {
+ assertThat(off).isGreaterThanOrEqualTo([
+ levels.ALL,
+ levels.TRACE,
+ levels.DEBUG,
+ levels.INFO,
+ levels.WARN,
+ levels.ERROR,
+ levels.FATAL,
+ levels.MARK
+ ]);
+ },
+ 'should only be equal to OFF': function(off) {
+ assertThat(off).isEqualTo([levels.toLevel("OFF")]);
+ assertThat(off).isNotEqualTo([
+ levels.ALL,
+ levels.TRACE,
+ levels.DEBUG,
+ levels.INFO,
+ levels.WARN,
+ levels.ERROR,
+ levels.FATAL,
+ levels.MARK
+ ]);
+ }
+ }
+ },
+ 'isGreaterThanOrEqualTo': {
+ topic: levels.INFO,
+ 'should handle string arguments': function(info) {
+ assertThat(info).isGreaterThanOrEqualTo(["all", "trace", "debug"]);
+ assertThat(info).isNotGreaterThanOrEqualTo(['warn', 'ERROR', 'Fatal', 'MARK', 'off']);
+ }
+ },
+ 'isLessThanOrEqualTo': {
+ topic: levels.INFO,
+ 'should handle string arguments': function(info) {
+ assertThat(info).isNotLessThanOrEqualTo(["all", "trace", "debug"]);
+ assertThat(info).isLessThanOrEqualTo(['warn', 'ERROR', 'Fatal', 'MARK', 'off']);
+ }
+ },
+ 'isEqualTo': {
+ topic: levels.INFO,
+ 'should handle string arguments': function(info) {
+ assertThat(info).isEqualTo(["info", "INFO", "iNfO"]);
+ }
+ },
+ 'toLevel': {
+ 'with lowercase argument': {
+ topic: levels.toLevel("debug"),
+ 'should take the string and return the corresponding level': function(level) {
+ assert.equal(level, levels.DEBUG);
+ }
+ },
+ 'with uppercase argument': {
+ topic: levels.toLevel("DEBUG"),
+ 'should take the string and return the corresponding level': function(level) {
+ assert.equal(level, levels.DEBUG);
+ }
+ },
+ 'with varying case': {
+ topic: levels.toLevel("DeBuG"),
+ 'should take the string and return the corresponding level': function(level) {
+ assert.equal(level, levels.DEBUG);
+ }
+ },
+ 'with unrecognised argument': {
+ topic: levels.toLevel("cheese"),
+ 'should return undefined': function(level) {
+ assert.isUndefined(level);
+ }
+ },
+ 'with unrecognised argument and default value': {
+ topic: levels.toLevel("cheese", levels.DEBUG),
+ 'should return default value': function(level) {
+ assert.equal(level, levels.DEBUG);
+ }
+ }
+ }
+}).export(module);
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/log-abspath-test.js b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/log-abspath-test.js
new file mode 100644
index 00000000..5bb64d35
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/log-abspath-test.js
@@ -0,0 +1,77 @@
+"use strict";
+var vows = require('vows')
+, assert = require('assert')
+, path = require('path')
+, sandbox = require('sandboxed-module');
+
+vows.describe('log4js-abspath').addBatch({
+ 'options': {
+ topic: function() {
+ var appenderOptions,
+ log4js = sandbox.require(
+ '../lib/log4js',
+ { requires:
+ { './appenders/fake':
+ { name: "fake",
+ appender: function() {},
+ configure: function(configuration, options) {
+ appenderOptions = options;
+ return function() {};
+ }
+ }
+ }
+ }
+ ),
+ config = {
+ "appenders": [
+ {
+ "type" : "fake",
+ "filename" : "cheesy-wotsits.log"
+ }
+ ]
+ };
+
+ log4js.configure(config, {
+ cwd: '/absolute/path/to'
+ });
+ return appenderOptions;
+ },
+ 'should be passed to appenders during configuration': function(options) {
+ assert.equal(options.cwd, '/absolute/path/to');
+ }
+ },
+
+ 'file appender': {
+ topic: function() {
+ var fileOpened,
+ fileAppender = sandbox.require(
+ '../lib/appenders/file',
+ { requires:
+ { '../streams':
+ { RollingFileStream:
+ function(file) {
+ fileOpened = file;
+ return {
+ on: function() {},
+ end: function() {}
+ };
+ }
+ }
+ }
+ }
+ );
+ fileAppender.configure(
+ {
+ filename: "whatever.log",
+ maxLogSize: 10
+ },
+ { cwd: '/absolute/path/to' }
+ );
+ return fileOpened;
+ },
+ 'should prepend options.cwd to config.filename': function(fileOpened) {
+ var expected = path.sep + path.join("absolute", "path", "to", "whatever.log");
+ assert.equal(fileOpened, expected);
+ }
+ },
+}).export(module);
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/log4js.json b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/log4js.json
new file mode 100644
index 00000000..3a4e54a9
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/log4js.json
@@ -0,0 +1,16 @@
+{
+ "appenders": [
+ {
+ "category": "tests",
+ "type": "file",
+ "filename": "tmp-tests.log",
+ "layout": {
+ "type": "messagePassThrough"
+ }
+ }
+ ],
+
+ "levels": {
+ "tests": "WARN"
+ }
+}
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/logFacesAppender-test.js b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/logFacesAppender-test.js
new file mode 100644
index 00000000..3c2d62cd
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/logFacesAppender-test.js
@@ -0,0 +1,96 @@
+"use strict";
+var vows = require('vows'),
+ assert = require('assert'),
+ log4js = require('../lib/log4js'),
+ sandbox = require('sandboxed-module');
+
+function setupLogging(category, options) {
+ var udpSent = {};
+
+ var fakeDgram = {
+ createSocket: function (type) {
+ return {
+ send: function(buffer, offset, length, port, host, callback) {
+ udpSent.date = new Date();
+ udpSent.host = host;
+ udpSent.port = port;
+ udpSent.length = length;
+ udpSent.offset = 0;
+ udpSent.buffer = buffer;
+ callback(undefined, length);
+ }
+ };
+ }
+ };
+
+ var lfsModule = sandbox.require('../lib/appenders/logFacesAppender', {
+ requires: {
+ 'dgram': fakeDgram
+ }
+ });
+ log4js.clearAppenders();
+ log4js.addAppender(lfsModule.configure(options), category);
+
+ return {
+ logger: log4js.getLogger(category),
+ results: udpSent
+ };
+}
+
+vows.describe('logFaces UDP appender').addBatch({
+ 'when logging to logFaces UDP receiver': {
+ topic: function() {
+ var setup = setupLogging('myCategory', {
+ "type": "logFacesAppender",
+ "application": "LFS-TEST",
+ "remoteHost": "127.0.0.1",
+ "port": 55201,
+ "layout": {
+ "type": "pattern",
+ "pattern": "%m"
+ }
+ });
+
+ setup.logger.warn('Log event #1');
+ return setup;
+ },
+ 'an UDP packet should be sent': function (topic) {
+ assert.equal(topic.results.host, "127.0.0.1");
+ assert.equal(topic.results.port, 55201);
+ assert.equal(topic.results.offset, 0);
+ var json = JSON.parse(topic.results.buffer.toString());
+ assert.equal(json.a, 'LFS-TEST');
+ assert.equal(json.m, 'Log event #1');
+ assert.equal(json.g, 'myCategory');
+ assert.equal(json.p, 'WARN');
+
+ // Assert timestamp, up to hours resolution.
+ var date = new Date(json.t);
+ assert.equal(
+ date.toISOString().substring(0, 14),
+ topic.results.date.toISOString().substring(0, 14)
+ );
+ }
+ },
+
+ 'when missing options': {
+ topic: function() {
+ var setup = setupLogging('myLogger', {
+ "type": "logFacesAppender",
+ });
+ setup.logger.error('Log event #2');
+ return setup;
+ },
+ 'it sets some defaults': function (topic) {
+ assert.equal(topic.results.host, "127.0.0.1");
+ assert.equal(topic.results.port, 55201);
+
+ var json = JSON.parse(topic.results.buffer.toString());
+ assert.equal(json.a, "");
+ assert.equal(json.m, 'Log event #2');
+ assert.equal(json.g, 'myLogger');
+ assert.equal(json.p, 'ERROR');
+ }
+ }
+
+}).export(module);
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/logLevelFilter-test.js b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/logLevelFilter-test.js
new file mode 100644
index 00000000..b3deb058
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/logLevelFilter-test.js
@@ -0,0 +1,93 @@
+"use strict";
+var vows = require('vows')
+, fs = require('fs')
+, assert = require('assert')
+, os = require('os')
+, EOL = require('os').EOL || '\n';
+
+function remove(filename) {
+ try {
+ fs.unlinkSync(filename);
+ } catch (e) {
+ //doesn't really matter if it failed
+ }
+}
+
+vows.describe('log4js logLevelFilter').addBatch({
+ 'appender': {
+ topic: function() {
+ var log4js = require('../lib/log4js'), logEvents = [], logger;
+ log4js.clearAppenders();
+ log4js.addAppender(
+ require('../lib/appenders/logLevelFilter')
+ .appender(
+ 'ERROR',
+ undefined,
+ function(evt) { logEvents.push(evt); }
+ ),
+ "logLevelTest"
+ );
+
+ logger = log4js.getLogger("logLevelTest");
+ logger.debug('this should not trigger an event');
+ logger.warn('neither should this');
+ logger.error('this should, though');
+ logger.fatal('so should this');
+ return logEvents;
+ },
+ 'should only pass log events greater than or equal to its own level' : function(logEvents) {
+ assert.equal(logEvents.length, 2);
+ assert.equal(logEvents[0].data[0], 'this should, though');
+ assert.equal(logEvents[1].data[0], 'so should this');
+ }
+ },
+
+ 'configure': {
+ topic: function() {
+ var log4js = require('../lib/log4js')
+ , logger;
+
+ remove(__dirname + '/logLevelFilter.log');
+ remove(__dirname + '/logLevelFilter-warnings.log');
+ remove(__dirname + '/logLevelFilter-debugs.log');
+
+ log4js.configure('test/with-logLevelFilter.json');
+ logger = log4js.getLogger("tests");
+ logger.debug('debug');
+ logger.info('info');
+ logger.error('error');
+ logger.warn('warn');
+ logger.debug('debug');
+ logger.trace('trace');
+ //wait for the file system to catch up
+ setTimeout(this.callback, 500);
+ },
+ 'tmp-tests.log': {
+ topic: function() {
+ fs.readFile(__dirname + '/logLevelFilter.log', 'utf8', this.callback);
+ },
+ 'should contain all log messages': function (contents) {
+ var messages = contents.trim().split(EOL);
+ assert.deepEqual(messages, ['debug','info','error','warn','debug','trace']);
+ }
+ },
+ 'tmp-tests-warnings.log': {
+ topic: function() {
+ fs.readFile(__dirname + '/logLevelFilter-warnings.log','utf8',this.callback);
+ },
+ 'should contain only error and warning log messages': function(contents) {
+ var messages = contents.trim().split(EOL);
+ assert.deepEqual(messages, ['error','warn']);
+ }
+ },
+ 'tmp-tests-debugs.log': {
+ topic: function() {
+ fs.readFile(__dirname + '/logLevelFilter-debugs.log','utf8',this.callback);
+ },
+ 'should contain only trace and debug log messages': function(contents) {
+ var messages = contents.trim().split(EOL);
+ assert.deepEqual(messages, ['debug','debug','trace']);
+ }
+ }
+ }
+}).export(module);
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/logger-test.js b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/logger-test.js
new file mode 100644
index 00000000..0bd29e1c
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/logger-test.js
@@ -0,0 +1,81 @@
+"use strict";
+var vows = require('vows')
+, assert = require('assert')
+, levels = require('../lib/levels')
+, loggerModule = require('../lib/logger')
+, Logger = loggerModule.Logger;
+
+vows.describe('../lib/logger').addBatch({
+ 'constructor with no parameters': {
+ topic: new Logger(),
+ 'should use default category': function(logger) {
+ assert.equal(logger.category, Logger.DEFAULT_CATEGORY);
+ },
+ 'should use TRACE log level': function(logger) {
+ assert.equal(logger.level, levels.TRACE);
+ }
+ },
+
+ 'constructor with category': {
+ topic: new Logger('cheese'),
+ 'should use category': function(logger) {
+ assert.equal(logger.category, 'cheese');
+ },
+ 'should use TRACE log level': function(logger) {
+ assert.equal(logger.level, levels.TRACE);
+ }
+ },
+
+ 'constructor with category and level': {
+ topic: new Logger('cheese', 'debug'),
+ 'should use category': function(logger) {
+ assert.equal(logger.category, 'cheese');
+ },
+ 'should use level': function(logger) {
+ assert.equal(logger.level, levels.DEBUG);
+ }
+ },
+
+ 'isLevelEnabled': {
+ topic: new Logger('cheese', 'info'),
+ 'should provide a level enabled function for all levels': function(logger) {
+ assert.isFunction(logger.isTraceEnabled);
+ assert.isFunction(logger.isDebugEnabled);
+ assert.isFunction(logger.isInfoEnabled);
+ assert.isFunction(logger.isWarnEnabled);
+ assert.isFunction(logger.isErrorEnabled);
+ assert.isFunction(logger.isFatalEnabled);
+ },
+ 'should return the right values': function(logger) {
+ assert.isFalse(logger.isTraceEnabled());
+ assert.isFalse(logger.isDebugEnabled());
+ assert.isTrue(logger.isInfoEnabled());
+ assert.isTrue(logger.isWarnEnabled());
+ assert.isTrue(logger.isErrorEnabled());
+ assert.isTrue(logger.isFatalEnabled());
+ }
+ },
+
+ 'should emit log events': {
+ topic: function() {
+ var events = [],
+ logger = new Logger();
+ logger.addListener('log', function (logEvent) { events.push(logEvent); });
+ logger.debug('Event 1');
+ loggerModule.disableAllLogWrites();
+ logger.debug('Event 2');
+ loggerModule.enableAllLogWrites();
+ logger.debug('Event 3');
+ return events;
+ },
+
+ 'when log writes are enabled': function(events) {
+ assert.equal(events[0].data[0], 'Event 1');
+ },
+
+ 'but not when log writes are disabled': function(events) {
+ assert.equal(events.length, 2);
+ assert.equal(events[1].data[0], 'Event 3');
+ }
+ }
+}).exportTo(module);
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/logging-test.js b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/logging-test.js
new file mode 100644
index 00000000..2d71d426
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/logging-test.js
@@ -0,0 +1,636 @@
+"use strict";
+var vows = require('vows')
+, assert = require('assert')
+, sandbox = require('sandboxed-module');
+
+function setupConsoleTest() {
+ var fakeConsole = {}
+ , logEvents = []
+ , log4js;
+
+ ['trace','debug','log','info','warn','error'].forEach(function(fn) {
+ fakeConsole[fn] = function() {
+ throw new Error("this should not be called.");
+ };
+ });
+
+ log4js = sandbox.require(
+ '../lib/log4js',
+ {
+ globals: {
+ console: fakeConsole
+ }
+ }
+ );
+
+ log4js.clearAppenders();
+ log4js.addAppender(function(evt) {
+ logEvents.push(evt);
+ });
+
+ return { log4js: log4js, logEvents: logEvents, fakeConsole: fakeConsole };
+}
+
+vows.describe('log4js').addBatch({
+
+ 'getBufferedLogger': {
+ topic: function () {
+ var log4js = require('../lib/log4js');
+ log4js.clearAppenders();
+ var logger = log4js.getBufferedLogger('tests');
+ return logger;
+ },
+
+ 'should take a category and return a logger': function (logger) {
+ assert.equal(logger.target.category, 'tests');
+ assert.isFunction(logger.flush);
+ assert.isFunction(logger.trace);
+ assert.isFunction(logger.debug);
+ assert.isFunction(logger.info);
+ assert.isFunction(logger.warn);
+ assert.isFunction(logger.error);
+ assert.isFunction(logger.fatal);
+ },
+
+ 'cache events': {
+ topic: function () {
+ var log4js = require('../lib/log4js');
+ log4js.clearAppenders();
+ var logger = log4js.getBufferedLogger('tests1');
+ var events = [];
+ logger.target.addListener("log", function (logEvent) { events.push(logEvent); });
+ logger.debug("Debug event");
+ logger.trace("Trace event 1");
+ logger.trace("Trace event 2");
+ logger.warn("Warning event");
+ logger.error("Aargh!", new Error("Pants are on fire!"));
+ logger.error(
+ "Simulated CouchDB problem",
+ { err: 127, cause: "incendiary underwear" }
+ );
+ return events;
+ },
+
+ 'should not emit log events if .flush() is not called.': function (events) {
+ assert.equal(events.length, 0);
+ }
+ },
+
+ 'log events after flush() is called': {
+ topic: function () {
+ var log4js = require('../lib/log4js');
+ log4js.clearAppenders();
+ var logger = log4js.getBufferedLogger('tests2');
+ logger.target.setLevel("TRACE");
+ var events = [];
+ logger.target.addListener("log", function (logEvent) { events.push(logEvent); });
+ logger.debug("Debug event");
+ logger.trace("Trace event 1");
+ logger.trace("Trace event 2");
+ logger.warn("Warning event");
+ logger.error("Aargh!", new Error("Pants are on fire!"));
+ logger.error(
+ "Simulated CouchDB problem",
+ { err: 127, cause: "incendiary underwear" }
+ );
+ logger.flush();
+ return events;
+ },
+
+ 'should emit log events when .flush() is called.': function (events) {
+ assert.equal(events.length, 6);
+ }
+ }
+ },
+
+
+ 'getLogger': {
+ topic: function() {
+ var log4js = require('../lib/log4js');
+ log4js.clearAppenders();
+ var logger = log4js.getLogger('tests');
+ logger.setLevel("DEBUG");
+ return logger;
+ },
+
+ 'should take a category and return a logger': function(logger) {
+ assert.equal(logger.category, 'tests');
+ assert.equal(logger.level.toString(), "DEBUG");
+ assert.isFunction(logger.debug);
+ assert.isFunction(logger.info);
+ assert.isFunction(logger.warn);
+ assert.isFunction(logger.error);
+ assert.isFunction(logger.fatal);
+ },
+
+ 'log events' : {
+ topic: function(logger) {
+ var events = [];
+ logger.addListener("log", function (logEvent) { events.push(logEvent); });
+ logger.debug("Debug event");
+ logger.trace("Trace event 1");
+ logger.trace("Trace event 2");
+ logger.warn("Warning event");
+ logger.error("Aargh!", new Error("Pants are on fire!"));
+ logger.error("Simulated CouchDB problem", { err: 127, cause: "incendiary underwear" });
+ return events;
+ },
+
+ 'should emit log events': function(events) {
+ assert.equal(events[0].level.toString(), 'DEBUG');
+ assert.equal(events[0].data[0], 'Debug event');
+ assert.instanceOf(events[0].startTime, Date);
+ },
+
+ 'should not emit events of a lower level': function(events) {
+ assert.equal(events.length, 4);
+ assert.equal(events[1].level.toString(), 'WARN');
+ },
+
+ 'should include the error if passed in': function(events) {
+ assert.instanceOf(events[2].data[1], Error);
+ assert.equal(events[2].data[1].message, 'Pants are on fire!');
+ }
+ }
+ },
+
+ 'when shutdown is called': {
+ topic: function() {
+ var callback = this.callback;
+ var events = {
+ appenderShutdownCalled: false,
+ shutdownCallbackCalled: false
+ },
+ log4js = sandbox.require(
+ '../lib/log4js',
+ {
+ requires: {
+ './appenders/file':
+ {
+ name: "file",
+ appender: function() {},
+ configure: function(configuration) {
+ return function() {};
+ },
+ shutdown: function(cb) {
+ events.appenderShutdownCalled = true;
+ cb();
+ }
+ }
+ }
+ }
+ ),
+ config = { appenders:
+ [ { "type" : "file",
+ "filename" : "cheesy-wotsits.log",
+ "maxLogSize" : 1024,
+ "backups" : 3
+ }
+ ]
+ };
+
+ log4js.configure(config);
+ log4js.shutdown(function shutdownCallback() {
+ events.shutdownCallbackCalled = true;
+ // Re-enable log writing so other tests that use logger are not
+ // affected.
+ require('../lib/logger').enableAllLogWrites();
+ callback(null, events);
+ });
+ },
+
+ 'should invoke appender shutdowns': function(events) {
+ assert.ok(events.appenderShutdownCalled);
+ },
+
+ 'should call callback': function(events) {
+ assert.ok(events.shutdownCallbackCalled);
+ }
+ },
+
+ 'invalid configuration': {
+ 'should throw an exception': function() {
+ assert.throws(function() {
+ require('log4js').configure({ "type": "invalid" });
+ });
+ }
+ },
+
+ 'configuration when passed as object': {
+ topic: function() {
+ var appenderConfig,
+ log4js = sandbox.require(
+ '../lib/log4js',
+ {
+ requires: {
+ './appenders/file':
+ {
+ name: "file",
+ appender: function() {},
+ configure: function(configuration) {
+ appenderConfig = configuration;
+ return function() {};
+ }
+ }
+ }
+ }
+ ),
+ config = { appenders:
+ [ { "type" : "file",
+ "filename" : "cheesy-wotsits.log",
+ "maxLogSize" : 1024,
+ "backups" : 3
+ }
+ ]
+ };
+ log4js.configure(config);
+ return appenderConfig;
+ },
+ 'should be passed to appender config': function(configuration) {
+ assert.equal(configuration.filename, 'cheesy-wotsits.log');
+ }
+ },
+
+ 'configuration that causes an error': {
+ topic: function() {
+ var log4js = sandbox.require(
+ '../lib/log4js',
+ {
+ requires: {
+ './appenders/file':
+ {
+ name: "file",
+ appender: function() {},
+ configure: function(configuration) {
+ throw new Error("oh noes");
+ }
+ }
+ }
+ }
+ ),
+ config = { appenders:
+ [ { "type" : "file",
+ "filename" : "cheesy-wotsits.log",
+ "maxLogSize" : 1024,
+ "backups" : 3
+ }
+ ]
+ };
+ try {
+ log4js.configure(config);
+ } catch (e) {
+ return e;
+ }
+ },
+ 'should wrap error in a meaningful message': function(e) {
+ assert.ok(e.message.indexOf('log4js configuration problem for') > -1);
+ }
+ },
+
+ 'configuration when passed as filename': {
+ topic: function() {
+ var appenderConfig,
+ configFilename,
+ log4js = sandbox.require(
+ '../lib/log4js',
+ { requires:
+ { 'fs':
+ { statSync:
+ function() {
+ return { mtime: Date.now() };
+ },
+ readFileSync:
+ function(filename) {
+ configFilename = filename;
+ return JSON.stringify({
+ appenders: [
+ { type: "file"
+ , filename: "whatever.log"
+ }
+ ]
+ });
+ },
+ readdirSync:
+ function() {
+ return ['file'];
+ }
+ },
+ './appenders/file':
+ { name: "file",
+ appender: function() {},
+ configure: function(configuration) {
+ appenderConfig = configuration;
+ return function() {};
+ }
+ }
+ }
+ }
+ );
+ log4js.configure("/path/to/cheese.json");
+ return [ configFilename, appenderConfig ];
+ },
+ 'should read the config from a file': function(args) {
+ assert.equal(args[0], '/path/to/cheese.json');
+ },
+ 'should pass config to appender': function(args) {
+ assert.equal(args[1].filename, "whatever.log");
+ }
+ },
+
+ 'with no appenders defined' : {
+ topic: function() {
+ var logger,
+ that = this,
+ fakeConsoleAppender = {
+ name: "console",
+ appender: function() {
+ return function(evt) {
+ that.callback(null, evt);
+ };
+ },
+ configure: function() {
+ return fakeConsoleAppender.appender();
+ }
+ },
+ log4js = sandbox.require(
+ '../lib/log4js',
+ {
+ requires: {
+ './appenders/console': fakeConsoleAppender
+ }
+ }
+ );
+ logger = log4js.getLogger("some-logger");
+ logger.debug("This is a test");
+ },
+ 'should default to the console appender': function(evt) {
+ assert.equal(evt.data[0], "This is a test");
+ }
+ },
+
+ 'addAppender' : {
+ topic: function() {
+ var log4js = require('../lib/log4js');
+ log4js.clearAppenders();
+ return log4js;
+ },
+ 'without a category': {
+ 'should register the function as a listener for all loggers': function (log4js) {
+ var appenderEvent,
+ appender = function(evt) { appenderEvent = evt; },
+ logger = log4js.getLogger("tests");
+
+ log4js.addAppender(appender);
+ logger.debug("This is a test");
+ assert.equal(appenderEvent.data[0], "This is a test");
+ assert.equal(appenderEvent.categoryName, "tests");
+ assert.equal(appenderEvent.level.toString(), "DEBUG");
+ },
+ 'if an appender for a category is defined': {
+ 'should register for that category': function (log4js) {
+ var otherEvent,
+ appenderEvent,
+ cheeseLogger;
+
+ log4js.addAppender(function (evt) { appenderEvent = evt; });
+ log4js.addAppender(function (evt) { otherEvent = evt; }, 'cheese');
+
+ cheeseLogger = log4js.getLogger('cheese');
+ cheeseLogger.debug('This is a test');
+ assert.deepEqual(appenderEvent, otherEvent);
+ assert.equal(otherEvent.data[0], 'This is a test');
+ assert.equal(otherEvent.categoryName, 'cheese');
+
+ otherEvent = undefined;
+ appenderEvent = undefined;
+ log4js.getLogger('pants').debug("this should not be propagated to otherEvent");
+ assert.isUndefined(otherEvent);
+ assert.equal(appenderEvent.data[0], "this should not be propagated to otherEvent");
+ }
+ }
+ },
+
+ 'with a category': {
+ 'should only register the function as a listener for that category': function(log4js) {
+ var appenderEvent,
+ appender = function(evt) { appenderEvent = evt; },
+ logger = log4js.getLogger("tests");
+
+ log4js.addAppender(appender, 'tests');
+ logger.debug('this is a category test');
+ assert.equal(appenderEvent.data[0], 'this is a category test');
+
+ appenderEvent = undefined;
+ log4js.getLogger('some other category').debug('Cheese');
+ assert.isUndefined(appenderEvent);
+ }
+ },
+
+ 'with multiple categories': {
+ 'should register the function as a listener for all the categories': function(log4js) {
+ var appenderEvent,
+ appender = function(evt) { appenderEvent = evt; },
+ logger = log4js.getLogger('tests');
+
+ log4js.addAppender(appender, 'tests', 'biscuits');
+
+ logger.debug('this is a test');
+ assert.equal(appenderEvent.data[0], 'this is a test');
+ appenderEvent = undefined;
+
+ var otherLogger = log4js.getLogger('biscuits');
+ otherLogger.debug("mmm... garibaldis");
+ assert.equal(appenderEvent.data[0], "mmm... garibaldis");
+
+ appenderEvent = undefined;
+
+ log4js.getLogger("something else").debug("pants");
+ assert.isUndefined(appenderEvent);
+ },
+ 'should register the function when the list of categories is an array': function(log4js) {
+ var appenderEvent,
+ appender = function(evt) { appenderEvent = evt; };
+
+ log4js.addAppender(appender, ['tests', 'pants']);
+
+ log4js.getLogger('tests').debug('this is a test');
+ assert.equal(appenderEvent.data[0], 'this is a test');
+
+ appenderEvent = undefined;
+
+ log4js.getLogger('pants').debug("big pants");
+ assert.equal(appenderEvent.data[0], "big pants");
+
+ appenderEvent = undefined;
+
+ log4js.getLogger("something else").debug("pants");
+ assert.isUndefined(appenderEvent);
+ }
+ }
+ },
+
+ 'default setup': {
+ topic: function() {
+ var appenderEvents = [],
+ fakeConsole = {
+ 'name': 'console',
+ 'appender': function () {
+ return function(evt) {
+ appenderEvents.push(evt);
+ };
+ },
+ 'configure': function (config) {
+ return fakeConsole.appender();
+ }
+ },
+ globalConsole = {
+ log: function() { }
+ },
+ log4js = sandbox.require(
+ '../lib/log4js',
+ {
+ requires: {
+ './appenders/console': fakeConsole
+ },
+ globals: {
+ console: globalConsole
+ }
+ }
+ ),
+ logger = log4js.getLogger('a-test');
+
+ logger.debug("this is a test");
+ globalConsole.log("this should not be logged");
+
+ return appenderEvents;
+ },
+
+ 'should configure a console appender': function(appenderEvents) {
+ assert.equal(appenderEvents[0].data[0], 'this is a test');
+ },
+
+ 'should not replace console.log with log4js version': function(appenderEvents) {
+ assert.equal(appenderEvents.length, 1);
+ }
+ },
+
+ 'console' : {
+ topic: setupConsoleTest,
+
+ 'when replaceConsole called': {
+ topic: function(test) {
+ test.log4js.replaceConsole();
+
+ test.fakeConsole.log("Some debug message someone put in a module");
+ test.fakeConsole.debug("Some debug");
+ test.fakeConsole.error("An error");
+ test.fakeConsole.info("some info");
+ test.fakeConsole.warn("a warning");
+
+ test.fakeConsole.log("cheese (%s) and biscuits (%s)", "gouda", "garibaldis");
+ test.fakeConsole.log({ lumpy: "tapioca" });
+ test.fakeConsole.log("count %d", 123);
+ test.fakeConsole.log("stringify %j", { lumpy: "tapioca" });
+
+ return test.logEvents;
+ },
+
+ 'should replace console.log methods with log4js ones': function(logEvents) {
+ assert.equal(logEvents.length, 9);
+ assert.equal(logEvents[0].data[0], "Some debug message someone put in a module");
+ assert.equal(logEvents[0].level.toString(), "INFO");
+ assert.equal(logEvents[1].data[0], "Some debug");
+ assert.equal(logEvents[1].level.toString(), "DEBUG");
+ assert.equal(logEvents[2].data[0], "An error");
+ assert.equal(logEvents[2].level.toString(), "ERROR");
+ assert.equal(logEvents[3].data[0], "some info");
+ assert.equal(logEvents[3].level.toString(), "INFO");
+ assert.equal(logEvents[4].data[0], "a warning");
+ assert.equal(logEvents[4].level.toString(), "WARN");
+ assert.equal(logEvents[5].data[0], "cheese (%s) and biscuits (%s)");
+ assert.equal(logEvents[5].data[1], "gouda");
+ assert.equal(logEvents[5].data[2], "garibaldis");
+ }
+ },
+ 'when turned off': {
+ topic: function(test) {
+ test.log4js.restoreConsole();
+ try {
+ test.fakeConsole.log("This should cause the error described in the setup");
+ } catch (e) {
+ return e;
+ }
+ },
+ 'should call the original console methods': function (err) {
+ assert.instanceOf(err, Error);
+ assert.equal(err.message, "this should not be called.");
+ }
+ }
+ },
+ 'console configuration': {
+ topic: setupConsoleTest,
+ 'when disabled': {
+ topic: function(test) {
+ test.log4js.replaceConsole();
+ test.log4js.configure({ replaceConsole: false });
+ try {
+ test.fakeConsole.log("This should cause the error described in the setup");
+ } catch (e) {
+ return e;
+ }
+ },
+ 'should allow for turning off console replacement': function (err) {
+ assert.instanceOf(err, Error);
+ assert.equal(err.message, 'this should not be called.');
+ }
+ },
+ 'when enabled': {
+ topic: function(test) {
+ test.log4js.restoreConsole();
+ test.log4js.configure({ replaceConsole: true });
+ //log4js.configure clears all appenders
+ test.log4js.addAppender(function(evt) {
+ test.logEvents.push(evt);
+ });
+
+ test.fakeConsole.debug("Some debug");
+ return test.logEvents;
+ },
+
+ 'should allow for turning on console replacement': function (logEvents) {
+ assert.equal(logEvents.length, 1);
+ assert.equal(logEvents[0].level.toString(), "DEBUG");
+ assert.equal(logEvents[0].data[0], "Some debug");
+ }
+ }
+ },
+ 'configuration persistence' : {
+ topic: function() {
+ var logEvent,
+ firstLog4js = require('../lib/log4js'),
+ secondLog4js;
+
+ firstLog4js.clearAppenders();
+ firstLog4js.addAppender(function(evt) { logEvent = evt; });
+
+ secondLog4js = require('../lib/log4js');
+ secondLog4js.getLogger().info("This should go to the appender defined in firstLog4js");
+
+ return logEvent;
+ },
+ 'should maintain appenders between requires': function (logEvent) {
+ assert.equal(logEvent.data[0], "This should go to the appender defined in firstLog4js");
+ }
+ },
+
+ 'getDefaultLogger': {
+ topic: function() {
+ return require('../lib/log4js').getDefaultLogger();
+ },
+ 'should return a logger': function(logger) {
+ assert.ok(logger.info);
+ assert.ok(logger.debug);
+ assert.ok(logger.error);
+ }
+ }
+}).export(module);
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/logglyAppender-test.js b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/logglyAppender-test.js
new file mode 100644
index 00000000..688e43ee
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/logglyAppender-test.js
@@ -0,0 +1,110 @@
+"use strict";
+var vows = require('vows')
+ , assert = require('assert')
+ , log4js = require('../lib/log4js')
+ , sandbox = require('sandboxed-module')
+ ;
+
+function setupLogging(category, options) {
+ var msgs = [];
+
+ var fakeLoggly = {
+ createClient: function(options) {
+ return {
+ config: options,
+ log: function(msg, tags) {
+ msgs.push({
+ msg: msg,
+ tags: tags
+ });
+ }
+ };
+ }
+ };
+
+ var fakeLayouts = {
+ layout: function(type, config) {
+ this.type = type;
+ this.config = config;
+ return log4js.layouts.messagePassThroughLayout;
+ },
+ basicLayout: log4js.layouts.basicLayout,
+ messagePassThroughLayout: log4js.layouts.messagePassThroughLayout
+ };
+
+ var fakeConsole = {
+ errors: [],
+ error: function(msg, value) {
+ this.errors.push({ msg: msg, value: value });
+ }
+ };
+
+ var logglyModule = sandbox.require('../lib/appenders/loggly', {
+ requires: {
+ 'loggly': fakeLoggly,
+ '../layouts': fakeLayouts
+ },
+ globals: {
+ console: fakeConsole
+ }
+ });
+
+ log4js.addAppender(logglyModule.configure(options), category);
+
+ return {
+ logger: log4js.getLogger(category),
+ loggly: fakeLoggly,
+ layouts: fakeLayouts,
+ console: fakeConsole,
+ results: msgs
+ };
+}
+
+log4js.clearAppenders();
+
+function setupTaggedLogging() {
+ return setupLogging('loggly', {
+ token: 'your-really-long-input-token',
+ subdomain: 'your-subdomain',
+ tags: ['loggly-tag1', 'loggly-tag2', 'loggly-tagn']
+ });
+}
+
+vows.describe('log4js logglyAppender').addBatch({
+ 'with minimal config': {
+ topic: function() {
+ var setup = setupTaggedLogging();
+ setup.logger.log('trace', 'Log event #1', 'Log 2', { tags: ['tag1', 'tag2'] });
+ return setup;
+ },
+ 'has a results.length of 1': function(topic) {
+ assert.equal(topic.results.length, 1);
+ },
+ 'has a result msg with both args concatenated': function(topic) {
+ assert.equal(topic.results[0].msg.msg, 'Log event #1 Log 2');
+ },
+ 'has a result tags with the arg that contains tags': function(topic) {
+ assert.deepEqual(topic.results[0].tags, ['tag1', 'tag2']);
+ }
+ }
+}).addBatch({
+ 'config with object with tags and other keys': {
+ topic: function() {
+ var setup = setupTaggedLogging();
+
+ // ignore this tags object b/c there are 2 keys
+ setup.logger.log('trace', 'Log event #1', { other: 'other', tags: ['tag1', 'tag2'] });
+ return setup;
+ },
+ 'has a results.length of 1': function(topic) {
+ assert.equal(topic.results.length, 1);
+ },
+ 'has a result msg with the args concatenated': function(topic) {
+ assert.equal(topic.results[0].msg.msg,
+ 'Log event #1 { other: \'other\', tags: [ \'tag1\', \'tag2\' ] }');
+ },
+ 'has a result tags with the arg that contains no tags': function(topic) {
+ assert.deepEqual(topic.results[0].tags, []);
+ }
+ }
+}).export(module);
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/logstashUDP-test.js b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/logstashUDP-test.js
new file mode 100644
index 00000000..25d356c9
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/logstashUDP-test.js
@@ -0,0 +1,126 @@
+"use strict";
+var vows = require('vows')
+, assert = require('assert')
+, log4js = require('../lib/log4js')
+, sandbox = require('sandboxed-module')
+;
+
+function setupLogging(category, options) {
+ var udpSent = {};
+
+ var fakeDgram = {
+ createSocket: function (type) {
+ return {
+ send: function(buffer, offset, length, port, host, callback) {
+ udpSent.date = new Date();
+ udpSent.host = host;
+ udpSent.port = port;
+ udpSent.length = length;
+ udpSent.offset = 0;
+ udpSent.buffer = buffer;
+ callback(undefined, length);
+ }
+ };
+ }
+ };
+
+ var logstashModule = sandbox.require('../lib/appenders/logstashUDP', {
+ requires: {
+ 'dgram': fakeDgram
+ }
+ });
+ log4js.clearAppenders();
+ log4js.addAppender(logstashModule.configure(options), category);
+
+ return {
+ logger: log4js.getLogger(category),
+ results: udpSent
+ };
+}
+
+vows.describe('logstashUDP appender').addBatch({
+ 'when logging with logstash via UDP': {
+ topic: function() {
+ var setup = setupLogging('myCategory', {
+ "host": "127.0.0.1",
+ "port": 10001,
+ "type": "logstashUDP",
+ "logType": "myAppType",
+ "category": "myLogger",
+ "fields": {
+ "field1": "value1",
+ "field2": "value2"
+ },
+ "layout": {
+ "type": "pattern",
+ "pattern": "%m"
+ }
+ });
+ setup.logger.log('trace', 'Log event #1');
+ return setup;
+ },
+ 'an UDP packet should be sent': function (topic) {
+ assert.equal(topic.results.host, "127.0.0.1");
+ assert.equal(topic.results.port, 10001);
+ assert.equal(topic.results.offset, 0);
+ var json = JSON.parse(topic.results.buffer.toString());
+ assert.equal(json.type, 'myAppType');
+ var fields = {
+ field1: 'value1',
+ field2: 'value2',
+ level: 'TRACE'
+ };
+ assert.equal(JSON.stringify(json.fields), JSON.stringify(fields));
+ assert.equal(json.message, 'Log event #1');
+ // Assert timestamp, up to hours resolution.
+ var date = new Date(json['@timestamp']);
+ assert.equal(
+ date.toISOString().substring(0, 14),
+ topic.results.date.toISOString().substring(0, 14)
+ );
+ }
+ },
+
+ 'when missing some options': {
+ topic: function() {
+ var setup = setupLogging('myLogger', {
+ "host": "127.0.0.1",
+ "port": 10001,
+ "type": "logstashUDP",
+ "category": "myLogger",
+ "layout": {
+ "type": "pattern",
+ "pattern": "%m"
+ }
+ });
+ setup.logger.log('trace', 'Log event #1');
+ return setup;
+ },
+ 'it sets some defaults': function (topic) {
+ var json = JSON.parse(topic.results.buffer.toString());
+ assert.equal(json.type, 'myLogger');
+ assert.equal(JSON.stringify(json.fields), JSON.stringify({'level': 'TRACE'}));
+ }
+ },
+
+ 'when extra fields provided': {
+ topic: function() {
+ var setup = setupLogging('myLogger', {
+ "host": "127.0.0.1",
+ "port": 10001,
+ "type": "logstashUDP",
+ "category": "myLogger",
+ "layout": {
+ "type": "dummy"
+ }
+ });
+ setup.logger.log('trace', 'Log event #1', {'extra1': 'value1', 'extra2': 'value2'});
+ return setup;
+ },'they should be added to fields structure': function (topic) {
+ var json = JSON.parse(topic.results.buffer.toString());
+ var fields = {'extra1': 'value1', 'extra2': 'value2', 'level': 'TRACE'};
+ assert.equal(JSON.stringify(json.fields), JSON.stringify(fields));
+ }
+ }
+
+}).export(module);
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/mailgunAppender-test.js b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/mailgunAppender-test.js
new file mode 100644
index 00000000..fa3842d8
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/mailgunAppender-test.js
@@ -0,0 +1,190 @@
+"use strict";
+var vows = require('vows');
+var assert = require('assert');
+var log4js = require('../lib/log4js');
+var sandbox = require('sandboxed-module');
+
+function setupLogging(category, options) {
+ var msgs = [];
+
+ var mailgunCredentials = {
+ apiKey: options.apikey,
+ domain: options.domain
+ };
+
+ var fakeMailgun = function (conf) {
+ return {
+ messages: function () {
+ return {
+ config: options,
+ send: function (data, callback) {
+ msgs.push(data);
+ callback(false, {status:"OK"});
+ }
+ };
+ }
+ };
+ };
+
+ var fakeLayouts = {
+ layout: function (type, config) {
+ this.type = type;
+ this.config = config;
+ return log4js.layouts.messagePassThroughLayout;
+ },
+ basicLayout: log4js.layouts.basicLayout,
+ messagePassThroughLayout: log4js.layouts.messagePassThroughLayout
+ };
+
+ var fakeConsole = {
+ errors: [],
+ logs: [],
+ error: function (msg, value) {
+ this.errors.push({msg: msg, value: value});
+ },
+ log: function (msg, value) {
+ this.logs.push({msg: msg, value: value});
+ }
+ };
+
+
+ var mailgunModule = sandbox.require('../lib/appenders/mailgun', {
+ requires: {
+ 'mailgun-js': fakeMailgun,
+ '../layouts': fakeLayouts
+ },
+ globals: {
+ console: fakeConsole
+ }
+ });
+
+
+ log4js.addAppender(mailgunModule.configure(options), category);
+
+ return {
+ logger: log4js.getLogger(category),
+ mailer: fakeMailgun,
+ layouts: fakeLayouts,
+ console: fakeConsole,
+ mails: msgs,
+ credentials: mailgunCredentials
+ };
+}
+
+function checkMessages(result) {
+ for (var i = 0; i < result.mails.length; ++i) {
+ assert.equal(result.mails[i].from, 'sender@domain.com');
+ assert.equal(result.mails[i].to, 'recepient@domain.com');
+ assert.equal(result.mails[i].subject, 'This is subject');
+ assert.ok(new RegExp('.+Log event #' + (i + 1)).test(result.mails[i].text));
+ }
+}
+
+log4js.clearAppenders();
+
+vows.describe('log4js mailgunAppender').addBatch({
+ 'mailgun setup': {
+ topic: setupLogging('mailgun setup', {
+ apikey: 'APIKEY',
+ domain: 'DOMAIN',
+ from: 'sender@domain.com',
+ to: 'recepient@domain.com',
+ subject: 'This is subject'
+ }),
+ 'mailgun credentials should match': function(result){
+ assert.equal(result.credentials.apiKey, 'APIKEY');
+ assert.equal(result.credentials.domain, 'DOMAIN');
+ }
+ },
+
+ 'basic usage': {
+ topic: function(){
+ var setup = setupLogging('basic usage', {
+ apikey: 'APIKEY',
+ domain: 'DOMAIN',
+ from: 'sender@domain.com',
+ to: 'recepient@domain.com',
+ subject: 'This is subject'
+ });
+
+ setup.logger.info("Log event #1");
+ return setup;
+ },
+ 'there should be one message only': function (result) {
+ assert.equal(result.mails.length, 1);
+ },
+ 'message should contain proper data': function (result) {
+ checkMessages(result);
+ }
+ },
+ 'config with layout': {
+ topic: function () {
+ var setup = setupLogging('config with layout', {
+ layout: {
+ type: "tester"
+ }
+ });
+ return setup;
+ },
+ 'should configure layout': function (result) {
+ assert.equal(result.layouts.type, 'tester');
+ }
+ },
+ 'error when sending email': {
+ topic: function () {
+ var setup = setupLogging('separate email for each event', {
+ apikey: 'APIKEY',
+ domain: 'DOMAIN',
+ from: 'sender@domain.com',
+ to: 'recepient@domain.com',
+ subject: 'This is subject'
+ });
+
+ setup.mailer.messages = function () {
+ return {
+ send: function (msg, cb) {
+ cb({msg: "log4js.mailgunAppender - Error happened"}, null);
+ }
+ };
+ };
+
+ setup.logger.info("This will break");
+ return setup.console;
+ },
+ 'should be logged to console': function (cons) {
+ assert.equal(cons.errors.length, 1);
+ assert.equal(cons.errors[0].msg, 'log4js.mailgunAppender - Error happened');
+ }
+ },
+ 'separate email for each event': {
+ topic: function () {
+ var self = this;
+ var setup = setupLogging('separate email for each event', {
+ apikey: 'APIKEY',
+ domain: 'DOMAIN',
+ from: 'sender@domain.com',
+ to: 'recepient@domain.com',
+ subject: 'This is subject'
+ });
+ setTimeout(function () {
+ setup.logger.info('Log event #1');
+ }, 0);
+ setTimeout(function () {
+ setup.logger.info('Log event #2');
+ }, 500);
+ setTimeout(function () {
+ setup.logger.info('Log event #3');
+ }, 1100);
+ setTimeout(function () {
+ self.callback(null, setup);
+ }, 3000);
+ },
+ 'there should be three messages': function (result) {
+ assert.equal(result.mails.length, 3);
+ },
+ 'messages should contain proper data': function (result) {
+ checkMessages(result);
+ }
+ }
+
+}).export(module);
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/multiprocess-test.js b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/multiprocess-test.js
new file mode 100644
index 00000000..d193e836
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/multiprocess-test.js
@@ -0,0 +1,317 @@
+"use strict";
+var vows = require('vows')
+, sandbox = require('sandboxed-module')
+, assert = require('assert')
+;
+
+function makeFakeNet() {
+ return {
+ logEvents: [],
+ data: [],
+ cbs: {},
+ createConnectionCalled: 0,
+ fakeAppender: function(logEvent) {
+ this.logEvents.push(logEvent);
+ },
+ createConnection: function(port, host) {
+ var fakeNet = this;
+ this.port = port;
+ this.host = host;
+ this.createConnectionCalled += 1;
+ return {
+ on: function(evt, cb) {
+ fakeNet.cbs[evt] = cb;
+ },
+ write: function(data, encoding) {
+ fakeNet.data.push(data);
+ fakeNet.encoding = encoding;
+ },
+ end: function() {
+ fakeNet.closeCalled = true;
+ }
+ };
+ },
+ createServer: function(cb) {
+ var fakeNet = this;
+ cb({
+ remoteAddress: '1.2.3.4',
+ remotePort: '1234',
+ setEncoding: function(encoding) {
+ fakeNet.encoding = encoding;
+ },
+ on: function(event, cb) {
+ fakeNet.cbs[event] = cb;
+ }
+ });
+
+ return {
+ listen: function(port, host) {
+ fakeNet.port = port;
+ fakeNet.host = host;
+ }
+ };
+ }
+ };
+}
+
+vows.describe('Multiprocess Appender').addBatch({
+ 'worker': {
+ topic: function() {
+ var fakeNet = makeFakeNet(),
+ appender = sandbox.require(
+ '../lib/appenders/multiprocess',
+ {
+ requires: {
+ 'net': fakeNet
+ }
+ }
+ ).appender({ mode: 'worker', loggerPort: 1234, loggerHost: 'pants' });
+
+ //don't need a proper log event for the worker tests
+ appender('before connect');
+ fakeNet.cbs.connect();
+ appender('after connect');
+ fakeNet.cbs.close(true);
+ appender('after error, before connect');
+ fakeNet.cbs.connect();
+ appender('after error, after connect');
+ appender(new Error('Error test'));
+
+ return fakeNet;
+ },
+ 'should open a socket to the loggerPort and loggerHost': function(net) {
+ assert.equal(net.port, 1234);
+ assert.equal(net.host, 'pants');
+ },
+ 'should buffer messages written before socket is connected': function(net) {
+ assert.equal(net.data[0], JSON.stringify('before connect'));
+ },
+ 'should write log messages to socket as json strings with a terminator string': function(net) {
+ assert.equal(net.data[0], JSON.stringify('before connect'));
+ assert.equal(net.data[1], '__LOG4JS__');
+ assert.equal(net.data[2], JSON.stringify('after connect'));
+ assert.equal(net.data[3], '__LOG4JS__');
+ assert.equal(net.encoding, 'utf8');
+ },
+ 'should attempt to re-open the socket on error': function(net) {
+ assert.equal(net.data[4], JSON.stringify('after error, before connect'));
+ assert.equal(net.data[5], '__LOG4JS__');
+ assert.equal(net.data[6], JSON.stringify('after error, after connect'));
+ assert.equal(net.data[7], '__LOG4JS__');
+ assert.equal(net.createConnectionCalled, 2);
+ },
+ 'should serialize an Error correctly': function(net) {
+ assert(
+ JSON.parse(net.data[8]).stack,
+ "Expected:\n\n" + net.data[8] + "\n\n to have a 'stack' property"
+ );
+ var actual = JSON.parse(net.data[8]).stack;
+ var expectedRegex = /^Error: Error test/;
+ assert(
+ actual.match(expectedRegex),
+ "Expected: \n\n " + actual + "\n\n to match " + expectedRegex
+ );
+
+ }
+ },
+ 'worker with timeout': {
+ topic: function() {
+ var fakeNet = makeFakeNet(),
+ appender = sandbox.require(
+ '../lib/appenders/multiprocess',
+ {
+ requires: {
+ 'net': fakeNet
+ }
+ }
+ ).appender({ mode: 'worker' });
+
+ //don't need a proper log event for the worker tests
+ appender('before connect');
+ fakeNet.cbs.connect();
+ appender('after connect');
+ fakeNet.cbs.timeout();
+ appender('after timeout, before close');
+ fakeNet.cbs.close();
+ appender('after close, before connect');
+ fakeNet.cbs.connect();
+ appender('after close, after connect');
+
+ return fakeNet;
+ },
+ 'should attempt to re-open the socket': function(net) {
+ //skipping the __LOG4JS__ separators
+ assert.equal(net.data[0], JSON.stringify('before connect'));
+ assert.equal(net.data[2], JSON.stringify('after connect'));
+ assert.equal(net.data[4], JSON.stringify('after timeout, before close'));
+ assert.equal(net.data[6], JSON.stringify('after close, before connect'));
+ assert.equal(net.data[8], JSON.stringify('after close, after connect'));
+ assert.equal(net.createConnectionCalled, 2);
+ }
+ },
+ 'worker defaults': {
+ topic: function() {
+ var fakeNet = makeFakeNet(),
+ appender = sandbox.require(
+ '../lib/appenders/multiprocess',
+ {
+ requires: {
+ 'net': fakeNet
+ }
+ }
+ ).appender({ mode: 'worker' });
+
+ return fakeNet;
+ },
+ 'should open a socket to localhost:5000': function(net) {
+ assert.equal(net.port, 5000);
+ assert.equal(net.host, 'localhost');
+ }
+ },
+ 'master': {
+ topic: function() {
+ var fakeNet = makeFakeNet(),
+ appender = sandbox.require(
+ '../lib/appenders/multiprocess',
+ {
+ requires: {
+ 'net': fakeNet
+ }
+ }
+ ).appender({ mode: 'master',
+ loggerHost: 'server',
+ loggerPort: 1234,
+ actualAppender: fakeNet.fakeAppender.bind(fakeNet)
+ });
+
+ appender('this should be sent to the actual appender directly');
+
+ return fakeNet;
+ },
+ 'should listen for log messages on loggerPort and loggerHost': function(net) {
+ assert.equal(net.port, 1234);
+ assert.equal(net.host, 'server');
+ },
+ 'should return the underlying appender': function(net) {
+ assert.equal(net.logEvents[0], 'this should be sent to the actual appender directly');
+ },
+ 'when a client connects': {
+ topic: function(net) {
+ var logString = JSON.stringify(
+ { level: { level: 10000, levelStr: 'DEBUG' }
+ , data: ['some debug']}
+ ) + '__LOG4JS__';
+
+ net.cbs.data(
+ JSON.stringify(
+ { level: { level: 40000, levelStr: 'ERROR' }
+ , data: ['an error message'] }
+ ) + '__LOG4JS__'
+ );
+ net.cbs.data(logString.substring(0, 10));
+ net.cbs.data(logString.substring(10));
+ net.cbs.data(logString + logString + logString);
+ net.cbs.end(
+ JSON.stringify(
+ { level: { level: 50000, levelStr: 'FATAL' }
+ , data: ["that's all folks"] }
+ ) + '__LOG4JS__'
+ );
+ net.cbs.data('bad message__LOG4JS__');
+ return net;
+ },
+ 'should parse log messages into log events and send to appender': function(net) {
+ assert.equal(net.logEvents[1].level.toString(), 'ERROR');
+ assert.equal(net.logEvents[1].data[0], 'an error message');
+ assert.equal(net.logEvents[1].remoteAddress, '1.2.3.4');
+ assert.equal(net.logEvents[1].remotePort, '1234');
+ },
+ 'should parse log messages split into multiple chunks': function(net) {
+ assert.equal(net.logEvents[2].level.toString(), 'DEBUG');
+ assert.equal(net.logEvents[2].data[0], 'some debug');
+ assert.equal(net.logEvents[2].remoteAddress, '1.2.3.4');
+ assert.equal(net.logEvents[2].remotePort, '1234');
+ },
+ 'should parse multiple log messages in a single chunk': function(net) {
+ assert.equal(net.logEvents[3].data[0], 'some debug');
+ assert.equal(net.logEvents[4].data[0], 'some debug');
+ assert.equal(net.logEvents[5].data[0], 'some debug');
+ },
+ 'should handle log messages sent as part of end event': function(net) {
+ assert.equal(net.logEvents[6].data[0], "that's all folks");
+ },
+ 'should handle unparseable log messages': function(net) {
+ assert.equal(net.logEvents[7].level.toString(), 'ERROR');
+ assert.equal(net.logEvents[7].categoryName, 'log4js');
+ assert.equal(net.logEvents[7].data[0], 'Unable to parse log:');
+ assert.equal(net.logEvents[7].data[1], 'bad message');
+ }
+ }
+ },
+ 'master defaults': {
+ topic: function() {
+ var fakeNet = makeFakeNet(),
+ appender = sandbox.require(
+ '../lib/appenders/multiprocess',
+ {
+ requires: {
+ 'net': fakeNet
+ }
+ }
+ ).appender({ mode: 'master' });
+
+ return fakeNet;
+ },
+ 'should listen for log messages on localhost:5000': function(net) {
+ assert.equal(net.port, 5000);
+ assert.equal(net.host, 'localhost');
+ }
+ }
+}).addBatch({
+ 'configure': {
+ topic: function() {
+ var results = {}
+ , fakeNet = makeFakeNet()
+ , appender = sandbox.require(
+ '../lib/appenders/multiprocess',
+ {
+ requires: {
+ 'net': fakeNet,
+ '../log4js': {
+ loadAppender: function(app) {
+ results.appenderLoaded = app;
+ },
+ appenderMakers: {
+ 'madeupappender': function(config, options) {
+ results.config = config;
+ results.options = options;
+ }
+ }
+ }
+ }
+ }
+ ).configure(
+ {
+ mode: 'master',
+ appender: {
+ type: 'madeupappender',
+ cheese: 'gouda'
+ }
+ },
+ { crackers: 'jacobs' }
+ );
+
+ return results;
+
+ },
+ 'should load underlying appender for master': function(results) {
+ assert.equal(results.appenderLoaded, 'madeupappender');
+ },
+ 'should pass config to underlying appender': function(results) {
+ assert.equal(results.config.cheese, 'gouda');
+ },
+ 'should pass options to underlying appender': function(results) {
+ assert.equal(results.options.crackers, 'jacobs');
+ }
+ }
+}).exportTo(module);
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/newLevel-test.js b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/newLevel-test.js
new file mode 100644
index 00000000..72dece96
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/newLevel-test.js
@@ -0,0 +1,138 @@
+"use strict";
+var vows = require('vows')
+ , assert = require('assert')
+ , Level = require('../lib/levels')
+ , log4js = require('../lib/log4js')
+ , loggerModule = require('../lib/logger')
+ , Logger = loggerModule.Logger;
+
+vows.describe('../lib/logger').addBatch({
+ 'creating a new log level': {
+ topic: function () {
+ Level.forName("DIAG", 6000);
+ return new Logger();
+ },
+
+ 'should export new log level in levels module': function (logger) {
+ assert.isDefined(Level.DIAG);
+ assert.equal(Level.DIAG.levelStr, "DIAG");
+ assert.equal(Level.DIAG.level, 6000);
+ },
+
+ 'should create named function on logger prototype': function(logger) {
+ assert.isFunction(logger.diag);
+ },
+
+ 'should create isLevelEnabled function on logger prototype': function(logger) {
+ assert.isFunction(logger.isDiagEnabled);
+ },
+ },
+
+ 'creating a new log level with underscores': {
+ topic: function () {
+ Level.forName("NEW_LEVEL_OTHER", 6000);
+ return new Logger();
+ },
+
+ 'should export new log level to levels module': function (logger) {
+ assert.isDefined(Level.NEW_LEVEL_OTHER);
+ assert.equal(Level.NEW_LEVEL_OTHER.levelStr, "NEW_LEVEL_OTHER");
+ assert.equal(Level.NEW_LEVEL_OTHER.level, 6000);
+ },
+
+ 'should create named function on logger prototype in camel case': function(logger) {
+ assert.isFunction(logger.newLevelOther);
+ },
+
+ 'should create named isLevelEnabled function on logger prototype in camel case':
+ function(logger) {
+ assert.isFunction(logger.isNewLevelOtherEnabled);
+ }
+ },
+
+ 'creating log events containing newly created log level': {
+ topic: function() {
+ var events = [],
+ logger = new Logger();
+ logger.addListener("log", function (logEvent) { events.push(logEvent); });
+
+ logger.log(Level.forName("LVL1", 6000), "Event 1");
+ logger.log(Level.getLevel("LVL1"), "Event 2");
+ logger.log("LVL1", "Event 3");
+ logger.lvl1("Event 4");
+
+ logger.setLevel(Level.forName("LVL2", 7000));
+ logger.lvl1("Event 5");
+
+ return events;
+ },
+
+ 'should show log events with new log level': function(events) {
+ assert.equal(events[0].level.toString(), "LVL1");
+ assert.equal(events[0].data[0], "Event 1");
+
+ assert.equal(events[1].level.toString(), "LVL1");
+ assert.equal(events[1].data[0], "Event 2");
+
+ assert.equal(events[2].level.toString(), "LVL1");
+ assert.equal(events[2].data[0], "Event 3");
+
+ assert.equal(events[3].level.toString(), "LVL1");
+ assert.equal(events[3].data[0], "Event 4");
+ },
+
+ 'should not be present if min log level is greater than newly created level':
+ function(events) {
+ assert.equal(events.length, 4);
+ }
+ },
+
+ 'creating a new log level with incorrect parameters': {
+ topic: function() {
+ log4js.levels.forName(9000, "FAIL_LEVEL_1");
+ log4js.levels.forName("FAIL_LEVEL_2");
+ return new Logger();
+ },
+
+ 'should fail to create the level': function(logger) {
+ assert.isUndefined(Level.FAIL_LEVEL_1);
+ assert.isUndefined(Level.FAIL_LEVEL_2);
+ }
+ },
+
+ 'calling log with an undefined log level': {
+ topic: function() {
+ var events = [],
+ logger = new Logger();
+ logger.addListener("log", function (logEvent) { events.push(logEvent); });
+
+ logger.log("LEVEL_DOES_NEXT_EXIST", "Event 1");
+ logger.log(Level.forName("LEVEL_DOES_NEXT_EXIST"), "Event 2");
+
+ return events;
+ },
+
+ 'should fallback to the default log level (INFO)': function(events) {
+ assert.equal(events[0].level.toString(), "INFO");
+ assert.equal(events[1].level.toString(), "INFO");
+ }
+ },
+
+ 'creating a new level with an existing level name': {
+ topic: function() {
+ var events = [],
+ logger = new Logger();
+ logger.addListener("log", function (logEvent) { events.push(logEvent); });
+
+ logger.log(log4js.levels.forName("MY_LEVEL", 9000), "Event 1");
+ logger.log(log4js.levels.forName("MY_LEVEL", 8000), "Event 1");
+
+ return events;
+ },
+
+ 'should override the existing log level': function(events) {
+ assert.equal(events[0].level.level, 9000);
+ assert.equal(events[1].level.level, 8000);
+ }
+ }
+}).exportTo(module); \ No newline at end of file
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/nolog-test.js b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/nolog-test.js
new file mode 100644
index 00000000..80c3c184
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/nolog-test.js
@@ -0,0 +1,297 @@
+"use strict";
+var vows = require('vows')
+, assert = require('assert')
+, util = require('util')
+, EE = require('events').EventEmitter
+, levels = require('../lib/levels');
+
+function MockLogger() {
+
+ var that = this;
+ this.messages = [];
+
+ this.log = function(level, message, exception) {
+ that.messages.push({ level: level, message: message });
+ };
+
+ this.isLevelEnabled = function(level) {
+ return level.isGreaterThanOrEqualTo(that.level);
+ };
+
+ this.level = levels.TRACE;
+
+}
+
+function MockRequest(remoteAddr, method, originalUrl) {
+
+ this.socket = { remoteAddress: remoteAddr };
+ this.originalUrl = originalUrl;
+ this.method = method;
+ this.httpVersionMajor = '5';
+ this.httpVersionMinor = '0';
+ this.headers = {};
+}
+
+function MockResponse(statusCode) {
+ var r = this;
+ this.statusCode = statusCode;
+
+ this.end = function(chunk, encoding) {
+ r.emit('finish');
+ };
+}
+util.inherits(MockResponse, EE);
+
+vows.describe('log4js connect logger').addBatch({
+ 'getConnectLoggerModule': {
+ topic: function() {
+ var clm = require('../lib/connect-logger');
+ return clm;
+ },
+
+ 'should return a "connect logger" factory' : function(clm) {
+ assert.isObject(clm);
+ },
+
+ 'nolog String' : {
+ topic: function(clm) {
+ var ml = new MockLogger();
+ var cl = clm.connectLogger(ml, { nolog: "\\.gif" });
+ return {cl: cl, ml: ml};
+ },
+
+ 'check unmatch url request': {
+ topic: function(d){
+ var req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.png'); // not gif
+ var res = new MockResponse(200);
+ var cb = this.callback;
+ d.cl(req, res, function() { });
+ res.end('chunk', 'encoding');
+ setTimeout(function() {
+ cb(null, d.ml.messages);
+ },10);
+ },
+ 'check message': function(messages){
+ assert.isArray(messages);
+ assert.equal(messages.length, 1);
+ assert.ok(levels.INFO.isEqualTo(messages[0].level));
+ assert.include(messages[0].message, 'GET');
+ assert.include(messages[0].message, 'http://url');
+ assert.include(messages[0].message, 'my.remote.addr');
+ assert.include(messages[0].message, '200');
+ messages.pop();
+ }
+ },
+
+ 'check match url request': {
+ topic: function(d) {
+ var req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.gif'); // gif
+ var res = new MockResponse(200);
+ var cb = this.callback;
+ d.cl(req, res, function() { });
+ res.end('chunk', 'encoding');
+ setTimeout(function() {
+ cb(null, d.ml.messages);
+ },10);
+ },
+ 'check message': function(messages) {
+ assert.isArray(messages);
+ assert.equal(messages.length, 0);
+ }
+ }
+ },
+
+ 'nolog Strings' : {
+ topic: function(clm) {
+ var ml = new MockLogger();
+ var cl = clm.connectLogger(ml, {nolog: "\\.gif|\\.jpe?g"});
+ return {cl: cl, ml: ml};
+ },
+
+ 'check unmatch url request (png)': {
+ topic: function(d){
+ var req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.png'); // not gif
+ var res = new MockResponse(200);
+ var cb = this.callback;
+ d.cl(req, res, function() { });
+ res.end('chunk', 'encoding');
+ setTimeout(function() {
+ cb(null, d.ml.messages);
+ }, 10);
+ },
+ 'check message': function(messages){
+ assert.isArray(messages);
+ assert.equal(messages.length, 1);
+ assert.ok(levels.INFO.isEqualTo(messages[0].level));
+ assert.include(messages[0].message, 'GET');
+ assert.include(messages[0].message, 'http://url');
+ assert.include(messages[0].message, 'my.remote.addr');
+ assert.include(messages[0].message, '200');
+ messages.pop();
+ }
+ },
+
+ 'check match url request (gif)': {
+ topic: function(d) {
+ var req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.gif'); // gif
+ var res = new MockResponse(200);
+ var cb = this.callback;
+ d.cl(req, res, function() { });
+ res.end('chunk', 'encoding');
+ setTimeout(function() {
+ cb(null, d.ml.messages);
+ }, 10);
+ },
+ 'check message': function(messages) {
+ assert.isArray(messages);
+ assert.equal(messages.length, 0);
+ }
+ },
+ 'check match url request (jpeg)': {
+ topic: function(d) {
+ var req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.jpeg'); // gif
+ var res = new MockResponse(200);
+ var cb = this.callback;
+ d.cl(req, res, function() { });
+ res.end('chunk', 'encoding');
+ setTimeout(function() {
+ cb(null, d.ml.messages);
+ }, 10);
+ },
+ 'check message': function(messages) {
+ assert.isArray(messages);
+ assert.equal(messages.length, 0);
+ }
+ }
+ },
+ 'nolog Array<String>' : {
+ topic: function(clm) {
+ var ml = new MockLogger();
+ var cl = clm.connectLogger(ml, {nolog: ["\\.gif", "\\.jpe?g"]});
+ return {cl: cl, ml: ml};
+ },
+
+ 'check unmatch url request (png)': {
+ topic: function(d){
+ var req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.png'); // not gif
+ var res = new MockResponse(200);
+ var cb = this.callback;
+ d.cl(req, res, function() { });
+ res.end('chunk', 'encoding');
+ setTimeout(function() {
+ cb(null, d.ml.messages);
+ }, 10);
+ },
+ 'check message': function(messages){
+ assert.isArray(messages);
+ assert.equal(messages.length, 1);
+ assert.ok(levels.INFO.isEqualTo(messages[0].level));
+ assert.include(messages[0].message, 'GET');
+ assert.include(messages[0].message, 'http://url');
+ assert.include(messages[0].message, 'my.remote.addr');
+ assert.include(messages[0].message, '200');
+ messages.pop();
+ }
+ },
+
+ 'check match url request (gif)': {
+ topic: function(d) {
+ var req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.gif'); // gif
+ var res = new MockResponse(200);
+ var cb = this.callback;
+ d.cl(req, res, function() { });
+ res.end('chunk', 'encoding');
+ setTimeout(function() {
+ cb(null, d.ml.messages);
+ }, 10);
+ },
+ 'check message': function(messages) {
+ assert.isArray(messages);
+ assert.equal(messages.length, 0);
+ }
+ },
+
+ 'check match url request (jpeg)': {
+ topic: function(d) {
+ var req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.jpeg'); // gif
+ var res = new MockResponse(200);
+ var cb = this.callback;
+ d.cl(req, res, function() { });
+ res.end('chunk', 'encoding');
+ setTimeout(function() {
+ cb(null, d.ml.messages);
+ }, 10);
+ },
+ 'check message': function(messages) {
+ assert.isArray(messages);
+ assert.equal(messages.length, 0);
+ }
+ },
+ },
+ 'nolog RegExp' : {
+ topic: function(clm) {
+ var ml = new MockLogger();
+ var cl = clm.connectLogger(ml, {nolog: /\.gif|\.jpe?g/});
+ return {cl: cl, ml: ml};
+ },
+
+ 'check unmatch url request (png)': {
+ topic: function(d){
+ var req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.png'); // not gif
+ var res = new MockResponse(200);
+ var cb = this.callback;
+ d.cl(req, res, function() { });
+ res.end('chunk', 'encoding');
+ setTimeout(function() {
+ cb(null, d.ml.messages);
+ }, 10);
+ },
+ 'check message': function(messages){
+ assert.isArray(messages);
+ assert.equal(messages.length, 1);
+ assert.ok(levels.INFO.isEqualTo(messages[0].level));
+ assert.include(messages[0].message, 'GET');
+ assert.include(messages[0].message, 'http://url');
+ assert.include(messages[0].message, 'my.remote.addr');
+ assert.include(messages[0].message, '200');
+ messages.pop();
+ }
+ },
+
+ 'check match url request (gif)': {
+ topic: function(d) {
+ var req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.gif'); // gif
+ var res = new MockResponse(200);
+ var cb = this.callback;
+ d.cl(req, res, function() { });
+ res.end('chunk', 'encoding');
+ setTimeout(function() {
+ cb(null, d.ml.messages);
+ }, 10);
+ },
+ 'check message': function(messages) {
+ assert.isArray(messages);
+ assert.equal(messages.length, 0);
+ }
+ },
+
+ 'check match url request (jpeg)': {
+ topic: function(d) {
+ var req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.jpeg'); // gif
+ var res = new MockResponse(200);
+ var cb = this.callback;
+ d.cl(req, res, function() { });
+ res.end('chunk', 'encoding');
+ setTimeout(function() {
+ cb(null, d.ml.messages);
+ }, 10);
+ },
+ 'check message': function(messages) {
+ assert.isArray(messages);
+ assert.equal(messages.length, 0);
+ }
+ }
+ }
+ }
+
+}).export(module);
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/reloadConfiguration-test.js b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/reloadConfiguration-test.js
new file mode 100644
index 00000000..060f0895
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/reloadConfiguration-test.js
@@ -0,0 +1,340 @@
+"use strict";
+var vows = require('vows')
+, assert = require('assert')
+, sandbox = require('sandboxed-module');
+
+function setupConsoleTest() {
+ var fakeConsole = {}
+ , logEvents = []
+ , log4js;
+
+ ['trace','debug','log','info','warn','error'].forEach(function(fn) {
+ fakeConsole[fn] = function() {
+ throw new Error("this should not be called.");
+ };
+ });
+
+ log4js = sandbox.require(
+ '../lib/log4js',
+ {
+ globals: {
+ console: fakeConsole
+ }
+ }
+ );
+
+ log4js.clearAppenders();
+ log4js.addAppender(function(evt) {
+ logEvents.push(evt);
+ });
+
+ return { log4js: log4js, logEvents: logEvents, fakeConsole: fakeConsole };
+}
+
+vows.describe('reload configuration').addBatch({
+ 'with config file changing' : {
+ topic: function() {
+ var pathsChecked = [],
+ logEvents = [],
+ logger,
+ modulePath = 'path/to/log4js.json',
+ fakeFS = {
+ lastMtime: Date.now(),
+ config: {
+ appenders: [
+ { type: 'console', layout: { type: 'messagePassThrough' } }
+ ],
+ levels: { 'a-test' : 'INFO' }
+ },
+ readFileSync: function (file, encoding) {
+ assert.equal(file, modulePath);
+ assert.equal(encoding, 'utf8');
+ return JSON.stringify(fakeFS.config);
+ },
+ statSync: function (path) {
+ pathsChecked.push(path);
+ if (path === modulePath) {
+ fakeFS.lastMtime += 1;
+ return { mtime: new Date(fakeFS.lastMtime) };
+ } else {
+ throw new Error("no such file");
+ }
+ }
+ },
+ fakeConsole = {
+ 'name': 'console',
+ 'appender': function () {
+ return function(evt) { logEvents.push(evt); };
+ },
+ 'configure': function (config) {
+ return fakeConsole.appender();
+ }
+ },
+ setIntervalCallback,
+ fakeSetInterval = function(cb, timeout) {
+ setIntervalCallback = cb;
+ },
+ log4js = sandbox.require(
+ '../lib/log4js',
+ {
+ requires: {
+ 'fs': fakeFS,
+ './appenders/console': fakeConsole
+ },
+ globals: {
+ 'console': fakeConsole,
+ 'setInterval' : fakeSetInterval,
+ }
+ }
+ );
+
+ log4js.configure('path/to/log4js.json', { reloadSecs: 30 });
+ logger = log4js.getLogger('a-test');
+ logger.info("info1");
+ logger.debug("debug2 - should be ignored");
+ fakeFS.config.levels['a-test'] = "DEBUG";
+ setIntervalCallback();
+ logger.info("info3");
+ logger.debug("debug4");
+
+ return logEvents;
+ },
+ 'should configure log4js from first log4js.json found': function(logEvents) {
+ assert.equal(logEvents[0].data[0], 'info1');
+ assert.equal(logEvents[1].data[0], 'info3');
+ assert.equal(logEvents[2].data[0], 'debug4');
+ assert.equal(logEvents.length, 3);
+ }
+ },
+
+ 'with config file staying the same' : {
+ topic: function() {
+ var pathsChecked = [],
+ fileRead = 0,
+ logEvents = [],
+ logger,
+ modulePath = require('path').normalize(__dirname + '/../lib/log4js.json'),
+ mtime = new Date(),
+ fakeFS = {
+ config: {
+ appenders: [
+ { type: 'console', layout: { type: 'messagePassThrough' } }
+ ],
+ levels: { 'a-test' : 'INFO' }
+ },
+ readFileSync: function (file, encoding) {
+ fileRead += 1;
+ assert.isString(file);
+ assert.equal(file, modulePath);
+ assert.equal(encoding, 'utf8');
+ return JSON.stringify(fakeFS.config);
+ },
+ statSync: function (path) {
+ pathsChecked.push(path);
+ if (path === modulePath) {
+ return { mtime: mtime };
+ } else {
+ throw new Error("no such file");
+ }
+ }
+ },
+ fakeConsole = {
+ 'name': 'console',
+ 'appender': function () {
+ return function(evt) { logEvents.push(evt); };
+ },
+ 'configure': function (config) {
+ return fakeConsole.appender();
+ }
+ },
+ setIntervalCallback,
+ fakeSetInterval = function(cb, timeout) {
+ setIntervalCallback = cb;
+ },
+ log4js = sandbox.require(
+ '../lib/log4js',
+ {
+ requires: {
+ 'fs': fakeFS,
+ './appenders/console': fakeConsole
+ },
+ globals: {
+ 'console': fakeConsole,
+ 'setInterval' : fakeSetInterval,
+ }
+ }
+ );
+
+ log4js.configure(modulePath, { reloadSecs: 3 });
+ logger = log4js.getLogger('a-test');
+ logger.info("info1");
+ logger.debug("debug2 - should be ignored");
+ setIntervalCallback();
+ logger.info("info3");
+ logger.debug("debug4");
+
+ return [ pathsChecked, logEvents, modulePath, fileRead ];
+ },
+ 'should only read the configuration file once': function(args) {
+ var fileRead = args[3];
+ assert.equal(fileRead, 1);
+ },
+ 'should configure log4js from first log4js.json found': function(args) {
+ var logEvents = args[1];
+ assert.equal(logEvents.length, 2);
+ assert.equal(logEvents[0].data[0], 'info1');
+ assert.equal(logEvents[1].data[0], 'info3');
+ }
+ },
+
+ 'when config file is removed': {
+ topic: function() {
+ var pathsChecked = [],
+ fileRead = 0,
+ logEvents = [],
+ logger,
+ modulePath = require('path').normalize(__dirname + '/../lib/log4js.json'),
+ mtime = new Date(),
+ fakeFS = {
+ config: {
+ appenders: [
+ { type: 'console', layout: { type: 'messagePassThrough' } }
+ ],
+ levels: { 'a-test' : 'INFO' }
+ },
+ readFileSync: function (file, encoding) {
+ fileRead += 1;
+ assert.isString(file);
+ assert.equal(file, modulePath);
+ assert.equal(encoding, 'utf8');
+ return JSON.stringify(fakeFS.config);
+ },
+ statSync: function (path) {
+ this.statSync = function() {
+ throw new Error("no such file");
+ };
+ return { mtime: new Date() };
+ }
+ },
+ fakeConsole = {
+ 'name': 'console',
+ 'appender': function () {
+ return function(evt) { logEvents.push(evt); };
+ },
+ 'configure': function (config) {
+ return fakeConsole.appender();
+ }
+ },
+ setIntervalCallback,
+ fakeSetInterval = function(cb, timeout) {
+ setIntervalCallback = cb;
+ },
+ log4js = sandbox.require(
+ '../lib/log4js',
+ {
+ requires: {
+ 'fs': fakeFS,
+ './appenders/console': fakeConsole
+ },
+ globals: {
+ 'console': fakeConsole,
+ 'setInterval' : fakeSetInterval,
+ }
+ }
+ );
+
+ log4js.configure(modulePath, { reloadSecs: 3 });
+ logger = log4js.getLogger('a-test');
+ logger.info("info1");
+ logger.debug("debug2 - should be ignored");
+ setIntervalCallback();
+ logger.info("info3");
+ logger.debug("debug4");
+
+ return [ pathsChecked, logEvents, modulePath, fileRead ];
+ },
+ 'should only read the configuration file once': function(args) {
+ var fileRead = args[3];
+ assert.equal(fileRead, 1);
+ },
+ 'should not clear configuration when config file not found': function(args) {
+ var logEvents = args[1];
+ assert.equal(logEvents.length, 3);
+ assert.equal(logEvents[0].data[0], 'info1');
+ assert.equal(logEvents[1].level.toString(), 'WARN');
+ assert.include(logEvents[1].data[0], 'Failed to load configuration file');
+ assert.equal(logEvents[2].data[0], 'info3');
+ }
+ },
+
+ 'when passed an object': {
+ topic: function() {
+ var test = setupConsoleTest();
+ test.log4js.configure({}, { reloadSecs: 30 });
+ return test.logEvents;
+ },
+ 'should log a warning': function(events) {
+ assert.equal(events[0].level.toString(), 'WARN');
+ assert.equal(
+ events[0].data[0],
+ 'Ignoring configuration reload parameter for "object" configuration.'
+ );
+ }
+ },
+
+ 'when called twice with reload options': {
+ topic: function() {
+ var modulePath = require('path').normalize(__dirname + '/../lib/log4js.json'),
+ fakeFS = {
+ readFileSync: function (file, encoding) {
+ return JSON.stringify({});
+ },
+ statSync: function (path) {
+ return { mtime: new Date() };
+ }
+ },
+ fakeConsole = {
+ 'name': 'console',
+ 'appender': function () {
+ return function(evt) { };
+ },
+ 'configure': function (config) {
+ return fakeConsole.appender();
+ }
+ },
+ setIntervalCallback,
+ intervalCleared = false,
+ clearedId,
+ fakeSetInterval = function(cb, timeout) {
+ setIntervalCallback = cb;
+ return 1234;
+ },
+ log4js = sandbox.require(
+ '../lib/log4js',
+ {
+ requires: {
+ 'fs': fakeFS,
+ './appenders/console': fakeConsole
+ },
+ globals: {
+ 'console': fakeConsole,
+ 'setInterval' : fakeSetInterval,
+ 'clearInterval': function(interval) {
+ intervalCleared = true;
+ clearedId = interval;
+ }
+ }
+ }
+ );
+
+ log4js.configure(modulePath, { reloadSecs: 3 });
+ log4js.configure(modulePath, { reloadSecs: 15 });
+
+ return { cleared: intervalCleared, id: clearedId };
+ },
+ 'should clear the previous interval': function(result) {
+ assert.isTrue(result.cleared);
+ assert.equal(result.id, 1234);
+ }
+ }
+}).exportTo(module);
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/setLevel-asymmetry-test.js b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/setLevel-asymmetry-test.js
new file mode 100644
index 00000000..95ba84b4
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/setLevel-asymmetry-test.js
@@ -0,0 +1,100 @@
+"use strict";
+/* jshint loopfunc: true */
+// This test shows an asymmetry between setLevel and isLevelEnabled
+// (in log4js-node@0.4.3 and earlier):
+// 1) setLevel("foo") works, but setLevel(log4js.levels.foo) silently
+// does not (sets the level to TRACE).
+// 2) isLevelEnabled("foo") works as does isLevelEnabled(log4js.levels.foo).
+//
+
+// Basic set up
+var vows = require('vows');
+var assert = require('assert');
+var log4js = require('../lib/log4js');
+var logger = log4js.getLogger('test-setLevel-asymmetry');
+
+// uncomment one or other of the following to see progress (or not) while running the tests
+// var showProgress = console.log;
+var showProgress = function() {};
+
+
+// Define the array of levels as string to iterate over.
+var strLevels= ['Trace','Debug','Info','Warn','Error','Fatal'];
+
+var log4jsLevels =[];
+// populate an array with the log4js.levels that match the strLevels.
+// Would be nice if we could iterate over log4js.levels instead,
+// but log4js.levels.toLevel prevents that for now.
+strLevels.forEach(function(l) {
+ log4jsLevels.push(log4js.levels.toLevel(l));
+});
+
+
+// We are going to iterate over this object's properties to define an exhaustive list of vows.
+var levelTypes = {
+ 'string': strLevels,
+ 'log4js.levels.level': log4jsLevels,
+};
+
+// Set up the basic vows batch for this test
+var batch = {
+ setLevel: {
+ }
+};
+
+showProgress('Populating batch object...');
+
+// Populating the batch object programmatically,
+// as I don't have the patience to manually populate it with
+// the (strLevels.length x levelTypes.length) ^ 2 = 144 possible test combinations
+for (var type in levelTypes) {
+ var context = 'is called with a '+type;
+ var levelsToTest = levelTypes[type];
+ showProgress('Setting up the vows context for '+context);
+
+ batch.setLevel[context]= {};
+ levelsToTest.forEach( function(level) {
+ var subContext = 'of '+level;
+ var log4jsLevel=log4js.levels.toLevel(level.toString());
+
+ showProgress('Setting up the vows sub-context for '+subContext);
+ batch.setLevel[context][subContext] = {topic: level};
+ for (var comparisonType in levelTypes) {
+ levelTypes[comparisonType].forEach(function(comparisonLevel) {
+ var t = type;
+ var ct = comparisonType;
+ var expectedResult = log4jsLevel.isLessThanOrEqualTo(comparisonLevel);
+ var vow = 'isLevelEnabled(' + comparisonLevel +
+ ') called with a ' + comparisonType +
+ ' should return ' + expectedResult;
+ showProgress('Setting up the vows vow for '+vow);
+
+ batch.setLevel[context][subContext][vow] = function(levelToSet) {
+ logger.setLevel(levelToSet);
+ showProgress(
+ '*** Checking setLevel( ' + level +
+ ' ) of type ' + t +
+ ', and isLevelEnabled( ' + comparisonLevel +
+ ' ) of type ' + ct + '. Expecting: ' + expectedResult
+ );
+ assert.equal(
+ logger.isLevelEnabled(comparisonLevel),
+ expectedResult,
+ 'Failed: calling setLevel( ' + level +
+ ' ) with type ' + type +
+ ', isLevelEnabled( ' + comparisonLevel +
+ ' ) of type ' + comparisonType +
+ ' did not return ' + expectedResult
+ );
+ };
+ });
+ }
+ });
+
+}
+
+showProgress('Running tests...');
+
+vows.describe('log4js setLevel asymmetry fix').addBatch(batch).export(module);
+
+
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/slackAppender-test.js b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/slackAppender-test.js
new file mode 100644
index 00000000..366bfcd4
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/slackAppender-test.js
@@ -0,0 +1,168 @@
+"use strict";
+var vows = require('vows');
+var assert = require('assert');
+var log4js = require('../lib/log4js');
+var sandbox = require('sandboxed-module');
+
+function setupLogging(category, options) {
+ var msgs = [];
+
+ var slackCredentials = {
+ token: options.token,
+ channel_id: options.channel_id,
+ username: options.username,
+ format: options.format,
+ icon_url: options.icon_url
+ };
+ var fakeSlack = (function (key) {
+ function constructor() {
+ return {
+ options: key,
+ api: function (action, data, callback) {
+ msgs.push(data);
+ callback(false, {status: "sent"});
+ }
+ };
+ }
+
+ return constructor(key);
+ });
+
+ var fakeLayouts = {
+ layout: function (type, config) {
+ this.type = type;
+ this.config = config;
+ return log4js.layouts.messagePassThroughLayout;
+ },
+ basicLayout: log4js.layouts.basicLayout,
+ coloredLayout: log4js.layouts.coloredLayout,
+ messagePassThroughLayout: log4js.layouts.messagePassThroughLayout
+ };
+
+ var fakeConsole = {
+ errors: [],
+ logs: [],
+ error: function (msg, value) {
+ this.errors.push({msg: msg, value: value});
+ },
+ log: function (msg, value) {
+ this.logs.push({msg: msg, value: value});
+ }
+ };
+
+
+ var slackModule = sandbox.require('../lib/appenders/slack', {
+ requires: {
+ 'slack-node': fakeSlack,
+ '../layouts': fakeLayouts
+ },
+ globals: {
+ console: fakeConsole
+ }
+ });
+
+
+ log4js.addAppender(slackModule.configure(options), category);
+
+ return {
+ logger: log4js.getLogger(category),
+ mailer: fakeSlack,
+ layouts: fakeLayouts,
+ console: fakeConsole,
+ messages: msgs,
+ credentials: slackCredentials
+ };
+}
+
+function checkMessages(result) {
+ for (var i = 0; i < result.messages.length; ++i) {
+ assert.equal(result.messages[i].channel, '#CHANNEL');
+ assert.equal(result.messages[i].username, 'USERNAME');
+ assert.ok(new RegExp('.+Log event #' + (i + 1)).test(result.messages[i].text));
+ }
+}
+
+log4js.clearAppenders();
+
+vows.describe('log4js slackAppender').addBatch({
+ 'slack setup': {
+ topic: setupLogging('slack setup', {
+ token: 'TOKEN',
+ channel_id: "#CHANNEL",
+ username: "USERNAME",
+ format: "FORMAT",
+ icon_url: "ICON_URL"
+ }),
+ 'slack credentials should match': function (result) {
+ assert.equal(result.credentials.token, 'TOKEN');
+ assert.equal(result.credentials.channel_id, '#CHANNEL');
+ assert.equal(result.credentials.username, 'USERNAME');
+ assert.equal(result.credentials.format, 'FORMAT');
+ assert.equal(result.credentials.icon_url, 'ICON_URL');
+ }
+ },
+
+ 'basic usage': {
+ topic: function () {
+ var setup = setupLogging('basic usage', {
+ token: 'TOKEN',
+ channel_id: "#CHANNEL",
+ username: "USERNAME",
+ format: "FORMAT",
+ icon_url: "ICON_URL",
+ });
+
+ setup.logger.info("Log event #1");
+ return setup;
+ },
+ 'there should be one message only': function (result) {
+ assert.equal(result.messages.length, 1);
+ },
+ 'message should contain proper data': function (result) {
+ checkMessages(result);
+ }
+ },
+ 'config with layout': {
+ topic: function () {
+ var setup = setupLogging('config with layout', {
+ layout: {
+ type: "tester"
+ }
+ });
+ return setup;
+ },
+ 'should configure layout': function (result) {
+ assert.equal(result.layouts.type, 'tester');
+ }
+ },
+ 'separate notification for each event': {
+ topic: function () {
+ var self = this;
+ var setup = setupLogging('separate notification for each event', {
+ token: 'TOKEN',
+ channel_id: "#CHANNEL",
+ username: "USERNAME",
+ format: "FORMAT",
+ icon_url: "ICON_URL",
+ });
+ setTimeout(function () {
+ setup.logger.info('Log event #1');
+ }, 0);
+ setTimeout(function () {
+ setup.logger.info('Log event #2');
+ }, 500);
+ setTimeout(function () {
+ setup.logger.info('Log event #3');
+ }, 1100);
+ setTimeout(function () {
+ self.callback(null, setup);
+ }, 3000);
+ },
+ 'there should be three messages': function (result) {
+ assert.equal(result.messages.length, 3);
+ },
+ 'messages should contain proper data': function (result) {
+ checkMessages(result);
+ }
+ }
+}).export(module);
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/smtpAppender-test.js b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/smtpAppender-test.js
new file mode 100644
index 00000000..5ebda98a
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/smtpAppender-test.js
@@ -0,0 +1,318 @@
+"use strict";
+var vows = require('vows');
+var assert = require('assert');
+var log4js = require('../lib/log4js');
+var sandbox = require('sandboxed-module');
+
+function setupLogging(category, options) {
+ var msgs = [];
+
+ var fakeMailer = {
+ createTransport: function (name, options) {
+ return {
+ config: options,
+ sendMail: function (msg, callback) {
+ msgs.push(msg);
+ callback(null, true);
+ },
+ close: function () {
+ }
+ };
+ }
+ };
+
+ var fakeLayouts = {
+ layout: function (type, config) {
+ this.type = type;
+ this.config = config;
+ return log4js.layouts.messagePassThroughLayout;
+ },
+ basicLayout: log4js.layouts.basicLayout,
+ messagePassThroughLayout: log4js.layouts.messagePassThroughLayout
+ };
+
+ var fakeConsole = {
+ errors: [],
+ error: function (msg, value) {
+ this.errors.push({msg: msg, value: value});
+ }
+ };
+
+ var fakeTransportPlugin = function () {
+ };
+
+ var smtpModule = sandbox.require('../lib/appenders/smtp', {
+ requires: {
+ 'nodemailer': fakeMailer,
+ 'nodemailer-sendmail-transport': fakeTransportPlugin,
+ 'nodemailer-smtp-transport': fakeTransportPlugin,
+ '../layouts': fakeLayouts
+ },
+ globals: {
+ console: fakeConsole
+ }
+ });
+
+ log4js.addAppender(smtpModule.configure(options), category);
+
+ return {
+ logger: log4js.getLogger(category),
+ mailer: fakeMailer,
+ layouts: fakeLayouts,
+ console: fakeConsole,
+ results: msgs
+ };
+}
+
+function checkMessages(result, sender, subject) {
+ for (var i = 0; i < result.results.length; ++i) {
+ assert.equal(result.results[i].from, sender);
+ assert.equal(result.results[i].to, 'recipient@domain.com');
+ assert.equal(result.results[i].subject, subject ? subject : 'Log event #' + (i + 1));
+ assert.ok(new RegExp('.+Log event #' + (i + 1) + '\n$').test(result.results[i].text));
+ }
+}
+
+log4js.clearAppenders();
+vows.describe('log4js smtpAppender').addBatch({
+ 'minimal config': {
+ topic: function () {
+ var setup = setupLogging('minimal config', {
+ recipients: 'recipient@domain.com',
+ SMTP: {
+ port: 25,
+ auth: {
+ user: 'user@domain.com'
+ }
+ }
+ });
+ setup.logger.info('Log event #1');
+ return setup;
+ },
+ 'there should be one message only': function (result) {
+ assert.equal(result.results.length, 1);
+ },
+ 'message should contain proper data': function (result) {
+ checkMessages(result);
+ }
+ },
+ 'fancy config': {
+ topic: function () {
+ var setup = setupLogging('fancy config', {
+ recipients: 'recipient@domain.com',
+ sender: 'sender@domain.com',
+ subject: 'This is subject',
+ SMTP: {
+ port: 25,
+ auth: {
+ user: 'user@domain.com'
+ }
+ }
+ });
+ setup.logger.info('Log event #1');
+ return setup;
+ },
+ 'there should be one message only': function (result) {
+ assert.equal(result.results.length, 1);
+ },
+ 'message should contain proper data': function (result) {
+ checkMessages(result, 'sender@domain.com', 'This is subject');
+ }
+ },
+ 'config with layout': {
+ topic: function () {
+ var setup = setupLogging('config with layout', {
+ layout: {
+ type: "tester"
+ }
+ });
+ return setup;
+ },
+ 'should configure layout': function (result) {
+ assert.equal(result.layouts.type, 'tester');
+ }
+ },
+ 'separate email for each event': {
+ topic: function () {
+ var self = this;
+ var setup = setupLogging('separate email for each event', {
+ recipients: 'recipient@domain.com',
+ SMTP: {
+ port: 25,
+ auth: {
+ user: 'user@domain.com'
+ }
+ }
+ });
+ setTimeout(function () {
+ setup.logger.info('Log event #1');
+ }, 0);
+ setTimeout(function () {
+ setup.logger.info('Log event #2');
+ }, 500);
+ setTimeout(function () {
+ setup.logger.info('Log event #3');
+ }, 1100);
+ setTimeout(function () {
+ self.callback(null, setup);
+ }, 3000);
+ },
+ 'there should be three messages': function (result) {
+ assert.equal(result.results.length, 3);
+ },
+ 'messages should contain proper data': function (result) {
+ checkMessages(result);
+ }
+ },
+ 'multiple events in one email': {
+ topic: function () {
+ var self = this;
+ var setup = setupLogging('multiple events in one email', {
+ recipients: 'recipient@domain.com',
+ sendInterval: 1,
+ SMTP: {
+ port: 25,
+ auth: {
+ user: 'user@domain.com'
+ }
+ }
+ });
+ setTimeout(function () {
+ setup.logger.info('Log event #1');
+ }, 0);
+ setTimeout(function () {
+ setup.logger.info('Log event #2');
+ }, 100);
+ setTimeout(function () {
+ setup.logger.info('Log event #3');
+ }, 1500);
+ setTimeout(function () {
+ self.callback(null, setup);
+ }, 3000);
+ },
+ 'there should be two messages': function (result) {
+ assert.equal(result.results.length, 2);
+ },
+ 'messages should contain proper data': function (result) {
+ assert.equal(result.results[0].to, 'recipient@domain.com');
+ assert.equal(result.results[0].subject, 'Log event #1');
+ assert.equal(
+ result.results[0].text.match(new RegExp('.+Log event #[1-2]$', 'gm')).length,
+ 2
+ );
+ assert.equal(result.results[1].to, 'recipient@domain.com');
+ assert.equal(result.results[1].subject, 'Log event #3');
+ assert.ok(new RegExp('.+Log event #3\n$').test(result.results[1].text));
+ }
+ },
+ 'error when sending email': {
+ topic: function () {
+ var setup = setupLogging('error when sending email', {
+ recipients: 'recipient@domain.com',
+ sendInterval: 0,
+ SMTP: {port: 25, auth: {user: 'user@domain.com'}}
+ });
+
+ setup.mailer.createTransport = function () {
+ return {
+ sendMail: function (msg, cb) {
+ cb({message: "oh noes"});
+ },
+ close: function () {
+ }
+ };
+ };
+
+ setup.logger.info("This will break");
+ return setup.console;
+ },
+ 'should be logged to console': function (cons) {
+ assert.equal(cons.errors.length, 1);
+ assert.equal(cons.errors[0].msg, "log4js.smtpAppender - Error happened");
+ assert.equal(cons.errors[0].value.message, 'oh noes');
+ }
+ },
+ 'transport full config': {
+ topic: function () {
+ var setup = setupLogging('transport full config', {
+ recipients: 'recipient@domain.com',
+ transport: {
+ plugin: 'sendmail',
+ options: {
+ path: '/usr/sbin/sendmail'
+ }
+ }
+ });
+ setup.logger.info('Log event #1');
+ return setup;
+ },
+ 'there should be one message only': function (result) {
+ assert.equal(result.results.length, 1);
+ },
+ 'message should contain proper data': function (result) {
+ checkMessages(result);
+ }
+ },
+ 'transport no-options config': {
+ topic: function () {
+ var setup = setupLogging('transport no-options config', {
+ recipients: 'recipient@domain.com',
+ transport: {
+ plugin: 'sendmail'
+ }
+ });
+ setup.logger.info('Log event #1');
+ return setup;
+ },
+ 'there should be one message only': function (result) {
+ assert.equal(result.results.length, 1);
+ },
+ 'message should contain proper data': function (result) {
+ checkMessages(result);
+ }
+ },
+ 'transport no-plugin config': {
+ topic: function () {
+ var setup = setupLogging('transport no-plugin config', {
+ recipients: 'recipient@domain.com',
+ transport: {
+ }
+ });
+ setup.logger.info('Log event #1');
+ return setup;
+ },
+ 'there should be one message only': function (result) {
+ assert.equal(result.results.length, 1);
+ },
+ 'message should contain proper data': function (result) {
+ checkMessages(result);
+ }
+ },
+ 'attachment config': {
+ topic: function () {
+ var setup = setupLogging('attachment config', {
+ recipients: 'recipient@domain.com',
+ attachment: {
+ enable: true
+ },
+ SMTP: {
+ port: 25,
+ auth: {
+ user: 'user@domain.com'
+ }
+ }
+ });
+ setup.logger.info('Log event #1');
+ return setup;
+ },
+ 'message should contain proper data': function (result) {
+ assert.equal(result.results.length, 1);
+ assert.equal(result.results[0].attachments.length, 1);
+ var attachment = result.results[0].attachments[0];
+ assert.equal(result.results[0].text, "See logs as attachment");
+ assert.equal(attachment.filename, "default.log");
+ assert.equal(attachment.contentType, "text/x-log");
+ assert.ok(new RegExp('.+Log event #' + 1 + '\n$').test(attachment.content));
+ }
+ }
+}).export(module);
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/stderrAppender-test.js b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/stderrAppender-test.js
new file mode 100644
index 00000000..c4244d13
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/stderrAppender-test.js
@@ -0,0 +1,35 @@
+"use strict";
+var assert = require('assert')
+, vows = require('vows')
+, layouts = require('../lib/layouts')
+, sandbox = require('sandboxed-module');
+
+vows.describe('../lib/appenders/stderr').addBatch({
+ 'appender': {
+ topic: function() {
+ var messages = []
+ , fakeProcess = {
+ stderr: {
+ write: function(msg) { messages.push(msg); }
+ }
+ }
+ , appenderModule = sandbox.require(
+ '../lib/appenders/stderr',
+ {
+ globals: {
+ 'process': fakeProcess
+ }
+ }
+ )
+ , appender = appenderModule.appender(layouts.messagePassThroughLayout);
+
+ appender({ data: ["blah"] });
+ return messages;
+ },
+
+ 'should output to stderr': function(messages) {
+ assert.equal(messages[0], 'blah\n');
+ }
+ }
+
+}).exportTo(module);
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/streams/BaseRollingFileStream-test.js b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/streams/BaseRollingFileStream-test.js
new file mode 100644
index 00000000..a414d5a5
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/streams/BaseRollingFileStream-test.js
@@ -0,0 +1,93 @@
+"use strict";
+var vows = require('vows')
+, assert = require('assert')
+, fs = require('fs')
+, sandbox = require('sandboxed-module');
+
+vows.describe('../../lib/streams/BaseRollingFileStream').addBatch({
+ 'when node version < 0.10.0': {
+ topic: function() {
+ var streamLib = sandbox.load(
+ '../../lib/streams/BaseRollingFileStream',
+ {
+ globals: {
+ process: {
+ version: '0.8.11'
+ }
+ },
+ requires: {
+ 'readable-stream': {
+ Writable: function() {}
+ }
+ }
+ }
+ );
+ return streamLib.required;
+ },
+ 'it should use readable-stream to maintain compatibility': function(required) {
+ assert.ok(required['readable-stream']);
+ assert.ok(!required.stream);
+ }
+ },
+
+ 'when node version > 0.10.0': {
+ topic: function() {
+ var streamLib = sandbox.load(
+ '../../lib/streams/BaseRollingFileStream',
+ {
+ globals: {
+ process: {
+ version: '0.10.1'
+ }
+ },
+ requires: {
+ 'stream': {
+ Writable: function() {}
+ }
+ }
+ }
+ );
+ return streamLib.required;
+ },
+ 'it should use the core stream module': function(required) {
+ assert.ok(required.stream);
+ assert.ok(!required['readable-stream']);
+ }
+ },
+
+ 'when no filename is passed': {
+ topic: require('../../lib/streams/BaseRollingFileStream'),
+ 'it should throw an error': function(BaseRollingFileStream) {
+ try {
+ new BaseRollingFileStream();
+ assert.fail('should not get here');
+ } catch (e) {
+ assert.ok(e);
+ }
+ }
+ },
+
+ 'default behaviour': {
+ topic: function() {
+ var BaseRollingFileStream = require('../../lib/streams/BaseRollingFileStream')
+ , stream = new BaseRollingFileStream('basetest.log');
+ return stream;
+ },
+ teardown: function() {
+ try {
+ fs.unlink('basetest.log');
+ } catch (e) {
+ console.error("could not remove basetest.log", e);
+ }
+ },
+ 'it should not want to roll': function(stream) {
+ assert.isFalse(stream.shouldRoll());
+ },
+ 'it should not roll': function(stream) {
+ var cbCalled = false;
+ //just calls the callback straight away, no async calls
+ stream.roll('basetest.log', function() { cbCalled = true; });
+ assert.isTrue(cbCalled);
+ }
+ }
+}).exportTo(module);
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/streams/DateRollingFileStream-test.js b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/streams/DateRollingFileStream-test.js
new file mode 100644
index 00000000..33f014b2
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/streams/DateRollingFileStream-test.js
@@ -0,0 +1,227 @@
+"use strict";
+var vows = require('vows')
+, assert = require('assert')
+, fs = require('fs')
+, semver = require('semver')
+, streams
+, DateRollingFileStream
+, testTime = new Date(2012, 8, 12, 10, 37, 11);
+
+if (semver.satisfies(process.version, '>=0.10.0')) {
+ streams = require('stream');
+} else {
+ streams = require('readable-stream');
+}
+DateRollingFileStream = require('../../lib/streams').DateRollingFileStream;
+
+function cleanUp(filename) {
+ return function() {
+ fs.unlink(filename);
+ };
+}
+
+function now() {
+ return testTime.getTime();
+}
+
+vows.describe('DateRollingFileStream').addBatch({
+ 'arguments': {
+ topic: new DateRollingFileStream(
+ __dirname + '/test-date-rolling-file-stream-1',
+ 'yyyy-mm-dd.hh'
+ ),
+ teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-1'),
+
+ 'should take a filename and a pattern and return a WritableStream': function(stream) {
+ assert.equal(stream.filename, __dirname + '/test-date-rolling-file-stream-1');
+ assert.equal(stream.pattern, 'yyyy-mm-dd.hh');
+ assert.instanceOf(stream, streams.Writable);
+ },
+ 'with default settings for the underlying stream': function(stream) {
+ assert.equal(stream.theStream.mode, 420);
+ assert.equal(stream.theStream.flags, 'a');
+ //encoding is not available on the underlying stream
+ //assert.equal(stream.encoding, 'utf8');
+ }
+ },
+
+ 'default arguments': {
+ topic: new DateRollingFileStream(__dirname + '/test-date-rolling-file-stream-2'),
+ teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-2'),
+
+ 'pattern should be .yyyy-MM-dd': function(stream) {
+ assert.equal(stream.pattern, '.yyyy-MM-dd');
+ }
+ },
+
+ 'with stream arguments': {
+ topic: new DateRollingFileStream(
+ __dirname + '/test-date-rolling-file-stream-3',
+ 'yyyy-MM-dd',
+ { mode: parseInt('0666', 8) }
+ ),
+ teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-3'),
+
+ 'should pass them to the underlying stream': function(stream) {
+ assert.equal(stream.theStream.mode, parseInt('0666', 8));
+ }
+ },
+
+ 'with stream arguments but no pattern': {
+ topic: new DateRollingFileStream(
+ __dirname + '/test-date-rolling-file-stream-4',
+ { mode: parseInt('0666', 8) }
+ ),
+ teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-4'),
+
+ 'should pass them to the underlying stream': function(stream) {
+ assert.equal(stream.theStream.mode, parseInt('0666', 8));
+ },
+ 'should use default pattern': function(stream) {
+ assert.equal(stream.pattern, '.yyyy-MM-dd');
+ }
+ },
+
+ 'with a pattern of .yyyy-MM-dd': {
+ topic: function() {
+ var that = this,
+ stream = new DateRollingFileStream(
+ __dirname + '/test-date-rolling-file-stream-5', '.yyyy-MM-dd',
+ null,
+ now
+ );
+ stream.write("First message\n", 'utf8', function() {
+ that.callback(null, stream);
+ });
+ },
+ teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-5'),
+
+ 'should create a file with the base name': {
+ topic: function(stream) {
+ fs.readFile(__dirname + '/test-date-rolling-file-stream-5', this.callback);
+ },
+ 'file should contain first message': function(result) {
+ assert.equal(result.toString(), "First message\n");
+ }
+ },
+
+ 'when the day changes': {
+ topic: function(stream) {
+ testTime = new Date(2012, 8, 13, 0, 10, 12);
+ stream.write("Second message\n", 'utf8', this.callback);
+ },
+ teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-5.2012-09-12'),
+
+
+ 'the number of files': {
+ topic: function() {
+ fs.readdir(__dirname, this.callback);
+ },
+ 'should be two': function(files) {
+ assert.equal(
+ files.filter(
+ function(file) {
+ return file.indexOf('test-date-rolling-file-stream-5') > -1;
+ }
+ ).length,
+ 2
+ );
+ }
+ },
+
+ 'the file without a date': {
+ topic: function() {
+ fs.readFile(__dirname + '/test-date-rolling-file-stream-5', this.callback);
+ },
+ 'should contain the second message': function(contents) {
+ assert.equal(contents.toString(), "Second message\n");
+ }
+ },
+
+ 'the file with the date': {
+ topic: function() {
+ fs.readFile(__dirname + '/test-date-rolling-file-stream-5.2012-09-12', this.callback);
+ },
+ 'should contain the first message': function(contents) {
+ assert.equal(contents.toString(), "First message\n");
+ }
+ }
+ }
+ },
+
+ 'with alwaysIncludePattern': {
+ topic: function() {
+ var that = this,
+ testTime = new Date(2012, 8, 12, 0, 10, 12),
+ stream = new DateRollingFileStream(
+ __dirname + '/test-date-rolling-file-stream-pattern',
+ '.yyyy-MM-dd',
+ {alwaysIncludePattern: true},
+ now
+ );
+ stream.write("First message\n", 'utf8', function() {
+ that.callback(null, stream);
+ });
+ },
+ teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-pattern.2012-09-12'),
+
+ 'should create a file with the pattern set': {
+ topic: function(stream) {
+ fs.readFile(__dirname + '/test-date-rolling-file-stream-pattern.2012-09-12', this.callback);
+ },
+ 'file should contain first message': function(result) {
+ assert.equal(result.toString(), "First message\n");
+ }
+ },
+
+ 'when the day changes': {
+ topic: function(stream) {
+ testTime = new Date(2012, 8, 13, 0, 10, 12);
+ stream.write("Second message\n", 'utf8', this.callback);
+ },
+ teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-pattern.2012-09-13'),
+
+
+ 'the number of files': {
+ topic: function() {
+ fs.readdir(__dirname, this.callback);
+ },
+ 'should be two': function(files) {
+ assert.equal(
+ files.filter(
+ function(file) {
+ return file.indexOf('test-date-rolling-file-stream-pattern') > -1;
+ }
+ ).length,
+ 2
+ );
+ }
+ },
+
+ 'the file with the later date': {
+ topic: function() {
+ fs.readFile(
+ __dirname + '/test-date-rolling-file-stream-pattern.2012-09-13',
+ this.callback
+ );
+ },
+ 'should contain the second message': function(contents) {
+ assert.equal(contents.toString(), "Second message\n");
+ }
+ },
+
+ 'the file with the date': {
+ topic: function() {
+ fs.readFile(
+ __dirname + '/test-date-rolling-file-stream-pattern.2012-09-12',
+ this.callback
+ );
+ },
+ 'should contain the first message': function(contents) {
+ assert.equal(contents.toString(), "First message\n");
+ }
+ }
+ }
+ }
+
+}).exportTo(module);
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/streams/rollingFileStream-test.js b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/streams/rollingFileStream-test.js
new file mode 100644
index 00000000..c3d9fc32
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/streams/rollingFileStream-test.js
@@ -0,0 +1,207 @@
+"use strict";
+var vows = require('vows')
+, assert = require('assert')
+, events = require('events')
+, fs = require('fs')
+, semver = require('semver')
+, streams
+, RollingFileStream;
+
+if (semver.satisfies(process.version, '>=0.10.0')) {
+ streams = require('stream');
+} else {
+ streams = require('readable-stream');
+}
+RollingFileStream = require('../../lib/streams').RollingFileStream;
+
+function remove(filename) {
+ try {
+ fs.unlinkSync(filename);
+ } catch (e) {
+ //doesn't really matter if it failed
+ }
+}
+
+function create(filename) {
+ fs.writeFileSync(filename, "test file");
+}
+
+vows.describe('RollingFileStream').addBatch({
+ 'arguments': {
+ topic: function() {
+ remove(__dirname + "/test-rolling-file-stream");
+ return new RollingFileStream("test-rolling-file-stream", 1024, 5);
+ },
+ 'should take a filename, file size (bytes), no. backups, return Writable': function(stream) {
+ assert.instanceOf(stream, streams.Writable);
+ assert.equal(stream.filename, "test-rolling-file-stream");
+ assert.equal(stream.size, 1024);
+ assert.equal(stream.backups, 5);
+ },
+ 'with default settings for the underlying stream': function(stream) {
+ assert.equal(stream.theStream.mode, 420);
+ assert.equal(stream.theStream.flags, 'a');
+ //encoding isn't a property on the underlying stream
+ //assert.equal(stream.theStream.encoding, 'utf8');
+ }
+ },
+ 'with stream arguments': {
+ topic: function() {
+ remove(__dirname + '/test-rolling-file-stream');
+ return new RollingFileStream(
+ 'test-rolling-file-stream',
+ 1024,
+ 5,
+ { mode: parseInt('0666', 8) }
+ );
+ },
+ 'should pass them to the underlying stream': function(stream) {
+ assert.equal(stream.theStream.mode, parseInt('0666', 8));
+ }
+ },
+ 'without size': {
+ topic: function() {
+ try {
+ new RollingFileStream(__dirname + "/test-rolling-file-stream");
+ } catch (e) {
+ return e;
+ }
+ },
+ 'should throw an error': function(err) {
+ assert.instanceOf(err, Error);
+ }
+ },
+ 'without number of backups': {
+ topic: function() {
+ remove('test-rolling-file-stream');
+ return new RollingFileStream(__dirname + "/test-rolling-file-stream", 1024);
+ },
+ 'should default to 1 backup': function(stream) {
+ assert.equal(stream.backups, 1);
+ }
+ },
+ 'writing less than the file size': {
+ topic: function() {
+ remove(__dirname + "/test-rolling-file-stream-write-less");
+ var that = this
+ , stream = new RollingFileStream(
+ __dirname + "/test-rolling-file-stream-write-less",
+ 100
+ );
+ stream.write("cheese", "utf8", function() {
+ stream.end();
+ fs.readFile(__dirname + "/test-rolling-file-stream-write-less", "utf8", that.callback);
+ });
+ },
+ 'should write to the file': function(contents) {
+ assert.equal(contents, "cheese");
+ },
+ 'the number of files': {
+ topic: function() {
+ fs.readdir(__dirname, this.callback);
+ },
+ 'should be one': function(files) {
+ assert.equal(
+ files.filter(
+ function(file) {
+ return file.indexOf('test-rolling-file-stream-write-less') > -1;
+ }
+ ).length,
+ 1
+ );
+ }
+ }
+ },
+ 'writing more than the file size': {
+ topic: function() {
+ remove(__dirname + "/test-rolling-file-stream-write-more");
+ remove(__dirname + "/test-rolling-file-stream-write-more.1");
+ var that = this
+ , stream = new RollingFileStream(
+ __dirname + "/test-rolling-file-stream-write-more",
+ 45
+ );
+
+ write7Cheese(that, stream);
+ },
+ 'the number of files': {
+ topic: function() {
+ fs.readdir(__dirname, this.callback);
+ },
+ 'should be two': function(files) {
+ assert.equal(files.filter(
+ function(file) {
+ return file.indexOf('test-rolling-file-stream-write-more') > -1;
+ }
+ ).length, 2);
+ }
+ },
+ 'the first file': {
+ topic: function() {
+ fs.readFile(__dirname + "/test-rolling-file-stream-write-more", "utf8", this.callback);
+ },
+ 'should contain the last two log messages': function(contents) {
+ assert.equal(contents, '5.cheese\n6.cheese\n');
+ }
+ },
+ 'the second file': {
+ topic: function() {
+ fs.readFile(__dirname + '/test-rolling-file-stream-write-more.1', "utf8", this.callback);
+ },
+ 'should contain the first five log messages': function(contents) {
+ assert.equal(contents, '0.cheese\n1.cheese\n2.cheese\n3.cheese\n4.cheese\n');
+ }
+ }
+ },
+ 'when many files already exist': {
+ topic: function() {
+ remove(__dirname + '/test-rolling-stream-with-existing-files.11');
+ remove(__dirname + '/test-rolling-stream-with-existing-files.20');
+ remove(__dirname + '/test-rolling-stream-with-existing-files.-1');
+ remove(__dirname + '/test-rolling-stream-with-existing-files.1.1');
+ remove(__dirname + '/test-rolling-stream-with-existing-files.1');
+
+
+ create(__dirname + '/test-rolling-stream-with-existing-files.11');
+ create(__dirname + '/test-rolling-stream-with-existing-files.20');
+ create(__dirname + '/test-rolling-stream-with-existing-files.-1');
+ create(__dirname + '/test-rolling-stream-with-existing-files.1.1');
+ create(__dirname + '/test-rolling-stream-with-existing-files.1');
+
+ var that = this
+ , stream = new RollingFileStream(
+ __dirname + "/test-rolling-stream-with-existing-files",
+ 45,
+ 5
+ );
+
+ write7Cheese(that, stream);
+ },
+ 'the files': {
+ topic: function() {
+ fs.readdir(__dirname, this.callback);
+ },
+ 'should be rolled': function(files) {
+ assert.include(files, 'test-rolling-stream-with-existing-files');
+ assert.include(files, 'test-rolling-stream-with-existing-files.1');
+ assert.include(files, 'test-rolling-stream-with-existing-files.2');
+ assert.include(files, 'test-rolling-stream-with-existing-files.11');
+ assert.include(files, 'test-rolling-stream-with-existing-files.20');
+ }
+ }
+ }
+}).exportTo(module);
+
+function write7Cheese(that, stream) {
+ var streamed = 0;
+ [0, 1, 2, 3, 4, 5, 6].forEach(function(i) {
+ stream.write(i +".cheese\n", "utf8", function(e) {
+ streamed++;
+ if (e) { return that.callback(e); }
+ if (streamed === 7) {
+ stream.end();
+ that.callback();
+ }
+ });
+ });
+}
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/subcategories-test.js b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/subcategories-test.js
new file mode 100644
index 00000000..8570f0e5
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/subcategories-test.js
@@ -0,0 +1,86 @@
+"use strict";
+var assert = require('assert')
+, vows = require('vows')
+, sandbox = require('sandboxed-module')
+, log4js = require('../lib/log4js')
+, levels = require('../lib/levels');
+
+vows.describe('subcategories').addBatch({
+ 'loggers created after levels configuration is loaded': {
+ topic: function() {
+
+ log4js.configure({
+ "levels": {
+ "sub1": "WARN",
+ "sub1.sub11": "TRACE",
+ "sub1.sub11.sub111": "WARN",
+ "sub1.sub12": "INFO"
+ }
+ }, { reloadSecs: 30 });
+
+ return {
+ "sub1": log4js.getLogger('sub1'), // WARN
+ "sub11": log4js.getLogger('sub1.sub11'), // TRACE
+ "sub111": log4js.getLogger('sub1.sub11.sub111'), // WARN
+ "sub12": log4js.getLogger('sub1.sub12'), // INFO
+
+ "sub13": log4js.getLogger('sub1.sub13'), // Inherits sub1: WARN
+ "sub112": log4js.getLogger('sub1.sub11.sub112'), // Inherits sub1.sub11: TRACE
+ "sub121": log4js.getLogger('sub1.sub12.sub121'), // Inherits sub12: INFO
+ "sub0": log4js.getLogger('sub0') // Not defined, not inherited: TRACE
+ };
+ },
+ 'check logger levels': function(loggers) {
+ assert.equal(loggers.sub1.level, levels.WARN);
+ assert.equal(loggers.sub11.level, levels.TRACE);
+ assert.equal(loggers.sub111.level, levels.WARN);
+ assert.equal(loggers.sub12.level, levels.INFO);
+
+ assert.equal(loggers.sub13.level, levels.WARN);
+ assert.equal(loggers.sub112.level, levels.TRACE);
+ assert.equal(loggers.sub121.level, levels.INFO);
+ assert.equal(loggers.sub0.level, levels.TRACE);
+ }
+ },
+ 'loggers created before levels configuration is loaded': {
+ topic: function() {
+
+ var loggers = {
+ "sub1": log4js.getLogger('sub1'), // WARN
+ "sub11": log4js.getLogger('sub1.sub11'), // TRACE
+ "sub111": log4js.getLogger('sub1.sub11.sub111'), // WARN
+ "sub12": log4js.getLogger('sub1.sub12'), // INFO
+
+ "sub13": log4js.getLogger('sub1.sub13'), // Inherits sub1: WARN
+ "sub112": log4js.getLogger('sub1.sub11.sub112'), // Inherits sub1.sub11: TRACE
+ "sub121": log4js.getLogger('sub1.sub12.sub121'), // Inherits sub12: INFO
+ "sub0": log4js.getLogger('sub0') // Not defined, not inherited: TRACE
+ };
+
+
+ log4js.configure({
+ "levels": {
+ "sub1": "WARN",
+ "sub1.sub11": "TRACE",
+ "sub1.sub11.sub111": "WARN",
+ "sub1.sub12": "INFO"
+ }
+ }, { reloadSecs: 30 });
+
+ return loggers;
+
+
+ },
+ 'check logger levels': function(loggers) {
+ assert.equal(loggers.sub1.level, levels.WARN);
+ assert.equal(loggers.sub11.level, levels.TRACE);
+ assert.equal(loggers.sub111.level, levels.WARN);
+ assert.equal(loggers.sub12.level, levels.INFO);
+
+ assert.equal(loggers.sub13.level, levels.WARN);
+ assert.equal(loggers.sub112.level, levels.TRACE);
+ assert.equal(loggers.sub121.level, levels.INFO);
+ assert.equal(loggers.sub0.level, levels.TRACE);
+ }
+ }
+}).exportTo(module);
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/with-categoryFilter.json b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/with-categoryFilter.json
new file mode 100644
index 00000000..7998cc85
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/with-categoryFilter.json
@@ -0,0 +1,23 @@
+{
+ "appenders": [
+ {
+ "type": "categoryFilter",
+ "exclude": "web",
+ "appender": {
+ "type": "file",
+ "filename": "test/categoryFilter-noweb.log",
+ "layout": {
+ "type": "messagePassThrough"
+ }
+ }
+ },
+ {
+ "category": "web",
+ "type": "file",
+ "filename": "test/categoryFilter-web.log",
+ "layout": {
+ "type": "messagePassThrough"
+ }
+ }
+ ]
+}
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/with-dateFile.json b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/with-dateFile.json
new file mode 100644
index 00000000..18727433
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/with-dateFile.json
@@ -0,0 +1,17 @@
+{
+ "appenders": [
+ {
+ "category": "tests",
+ "type": "dateFile",
+ "filename": "test/date-file-test.log",
+ "pattern": "-from-MM-dd",
+ "layout": {
+ "type": "messagePassThrough"
+ }
+ }
+ ],
+
+ "levels": {
+ "tests": "WARN"
+ }
+}
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/with-log-rolling.json b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/with-log-rolling.json
new file mode 100644
index 00000000..e946f313
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/with-log-rolling.json
@@ -0,0 +1,10 @@
+{
+ "appenders": [
+ {
+ "type": "file",
+ "filename": "tmp-test.log",
+ "maxLogSize": 1024,
+ "backups": 3
+ }
+ ]
+}
diff --git a/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/with-logLevelFilter.json b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/with-logLevelFilter.json
new file mode 100644
index 00000000..7bcd8ad4
--- /dev/null
+++ b/vnfmarket/src/main/webapp/vnfmarket/node_modules/log4js/test/with-logLevelFilter.json
@@ -0,0 +1,41 @@
+{
+ "appenders": [
+ {
+ "category": "tests",
+ "type": "logLevelFilter",
+ "level": "WARN",
+ "appender": {
+ "type": "file",
+ "filename": "test/logLevelFilter-warnings.log",
+ "layout": {
+ "type": "messagePassThrough"
+ }
+ }
+ },
+ {
+ "category": "tests",
+ "type": "logLevelFilter",
+ "level": "TRACE",
+ "maxLevel": "DEBUG",
+ "appender": {
+ "type": "file",
+ "filename": "test/logLevelFilter-debugs.log",
+ "layout": {
+ "type": "messagePassThrough"
+ }
+ }
+ },
+ {
+ "category": "tests",
+ "type": "file",
+ "filename": "test/logLevelFilter.log",
+ "layout": {
+ "type": "messagePassThrough"
+ }
+ }
+ ],
+
+ "levels": {
+ "tests": "TRACE"
+ }
+}