diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 0000000..23aeb97 --- /dev/null +++ b/.editorconfig @@ -0,0 +1,12 @@ +; EditorConfig file: http://EditorConfig.org +; Install the "EditorConfig" plugin into your editor to use + +root = true + +[*] +charset = utf-8 +insert_final_newline = true +indent_style = space +indent_size = 2 +trim_trailing_whitespace = true + diff --git a/.eslintrc.js b/.eslintrc.js new file mode 100644 index 0000000..b07f377 --- /dev/null +++ b/.eslintrc.js @@ -0,0 +1,19 @@ +module.exports = { + 'extends': 'airbnb', + 'env': { + 'node': true, + 'mocha': true, + }, + 'rules': { + 'no-console': 0, + 'import/no-amd': 0, + 'import/no-extraneous-dependencies': 0, + 'comma-dangle': ['error', { + arrays: 'always-multiline', + objects: 'always-multiline', + imports: 'always-multiline', + exports: 'always-multiline', + functions: 'never', // This is not supported in Node without Babel transform + }], + }, +}; diff --git a/.gitignore b/.gitignore index b7cda54..3d1f9b0 100644 --- a/.gitignore +++ b/.gitignore @@ -1,7 +1,6 @@ node_modules /npm-debug.log *.DS_Store -lib/ *.log !test/fixtures/config.json config.json diff --git a/.npmignore b/.npmignore index 74b0c5f..ad52e86 100644 --- a/.npmignore +++ b/.npmignore @@ -1,9 +1,7 @@ -src scripts test cov.info README.md -example.coffee config.json -coffeelint.json .travis.yml +example.js diff --git a/.travis.yml b/.travis.yml index abfeede..d3ff175 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,6 +1,5 @@ language: node_js node_js: - - "5" - "6" - node before_script: @@ -10,7 +9,7 @@ before_install: # so 'conventional-changelog-lint' could compare commits and lint them: marionebl/conventional-changelog-lint#7 - "git remote set-branches origin master && git fetch && git checkout master && git checkout -" after_success: - - "npm run coveralls || true" +# - "npm run coveralls || true"; @fixme: tempory commenting out, until coveralls flow for JS will be solved - "npm run semantic-release || true" env: global: diff --git a/README.md b/README.md index 218b601..0138109 100644 --- a/README.md +++ b/README.md @@ -4,7 +4,6 @@ # NodeJS supported versions -- 5.x - 6.x (LTS) # Pager Duty Overrides Checker diff --git a/bin/pdoverrides b/bin/pdoverrides index afa3b8b..994caea 100755 --- a/bin/pdoverrides +++ b/bin/pdoverrides @@ -1,39 +1,33 @@ #!/usr/bin/env node -var program = require('commander'); -var fs = require('fs'); -var pjson = require('../package.json'); -var config = require('../lib/config'); -var fsAccess = require('fs-access'); - -program - .command('check') - .description('Check PagerDuty Overlaps') - .action(runCheck); +const program = require('commander'); +const pjson = require('../package.json'); +const config = require('../src/config'); +const fsAccess = require('fs-access'); +const pd = require('../src/pagerduty'); function runCheck(configFile) { - fsAccess(configFile, function (err) { + fsAccess(configFile, (err) => { if (err) throw err; - config.setupConfig(configFile, function(err, res){ - if (err) { - console.error(err); + config.setupConfig(configFile, (configErr) => { + if (configErr) { + console.error(configErr); process.exit(1); } - var pd = require('../lib/pagerduty'); - pd.checkSchedulesIds(function(error, res) { - if (error) { - console.error("Check failed with error:", error) + pd.checkSchedulesIds((checkError, res) => { + if (checkError) { + console.error('Check failed with error:', checkError); process.exit(1); } if (!res) { - console.error("Check failed - empty response"); + console.error('Check failed - empty response'); process.exit(1); } else { - console.log("Config schedule IDs passed."); - pd.processSchedulesFromConfig(function(error, msg) { + console.log('Config schedule IDs passed.'); + pd.processSchedulesFromConfig((error, msg) => { if (error) { - console.error("Error while processing schedules from config", error); - process.exit(1) + console.error('Error while processing schedules from config', error); + process.exit(1); } console.log(msg); process.exit(0); @@ -42,13 +36,18 @@ function runCheck(configFile) { }); }); }); -}; +} + +program + .command('check') + .description('Check PagerDuty Overlaps') + .action(runCheck); program - .version(pjson.version) - .usage('check --config ') - .option('-c, --config', 'Path to config.json') - .parse(process.argv); + .version(pjson.version) + .usage('check --config ') + .option('-c, --config', 'Path to config.json') + .parse(process.argv); // default help if (!program.args.length) program.help(); diff --git a/coffeelint.json b/coffeelint.json deleted file mode 100644 index fed5713..0000000 --- a/coffeelint.json +++ /dev/null @@ -1,103 +0,0 @@ -{ - "arrow_spacing": { - "level": "ignore" - }, - "camel_case_classes": { - "level": "error" - }, - "coffeescript_error": { - "level": "error" - }, - "colon_assignment_spacing": { - "level": "ignore", - "spacing": { - "left": 0, - "right": 0 - } - }, - "cyclomatic_complexity": { - "value": 10, - "level": "ignore" - }, - "duplicate_key": { - "level": "error" - }, - "empty_constructor_needs_parens": { - "level": "ignore" - }, - "indentation": { - "value": 2, - "level": "error" - }, - "line_endings": { - "level": "ignore", - "value": "unix" - }, - "max_line_length": { - "value": 500, - "level": "warn", - "limitComments": true - }, - "missing_fat_arrows": { - "level": "ignore" - }, - "newlines_after_classes": { - "value": 3, - "level": "ignore" - }, - "no_backticks": { - "level": "error" - }, - "no_debugger": { - "level": "warn" - }, - "no_empty_functions": { - "level": "ignore" - }, - "no_empty_param_list": { - "level": "ignore" - }, - "no_implicit_braces": { - "level": "ignore", - "strict": true - }, - "no_implicit_parens": { - "strict": true, - "level": "ignore" - }, - "no_interpolation_in_single_quotes": { - "level": "ignore" - }, - "no_plusplus": { - "level": "ignore" - }, - "no_stand_alone_at": { - "level": "ignore" - }, - "no_tabs": { - "level": "error" - }, - "no_throwing_strings": { - "level": "error" - }, - "no_trailing_semicolons": { - "level": "error" - }, - "no_trailing_whitespace": { - "level": "error", - "allowed_in_comments": false, - "allowed_in_empty_lines": true - }, - "no_unnecessary_double_quotes": { - "level": "ignore" - }, - "no_unnecessary_fat_arrows": { - "level": "warn" - }, - "non_empty_constructor_needs_parens": { - "level": "ignore" - }, - "space_operators": { - "level": "ignore" - } -} diff --git a/example.coffee b/example.coffee deleted file mode 100644 index b5541c6..0000000 --- a/example.coffee +++ /dev/null @@ -1,18 +0,0 @@ -config = require './src/config' -nconf = require 'nconf' -pd = require './src/pagerduty' - -configPath = __dirname + '/config.json' - -config.setupConfig configPath, (err) -> - if err then console.error err - pd.checkSchedulesIds (err, res) -> - if err then console.error err - unless res - console.error "Check failed" - else - pd.processSchedulesFromConfig (err, msg) -> - if err - console.error(err) - else - console.log(msg) diff --git a/example.js b/example.js new file mode 100644 index 0000000..08de165 --- /dev/null +++ b/example.js @@ -0,0 +1,20 @@ +const config = require('./src/config'); +const pd = require('./src/pagerduty'); + +const configPath = `${__dirname}/config.json`; + +config.setupConfig(configPath, (configErr) => { + if (configErr) { console.error(configErr); } + return pd.checkSchedulesIds((checkSchedulesErr, res) => { + if (checkSchedulesErr) { console.error(checkSchedulesErr); } + if (!res) { + return console.error('Check failed'); + } + return pd.processSchedulesFromConfig((err, msg) => { + if (err) { + return console.error(err); + } + return console.log(msg); + }); + }); +}); diff --git a/package.json b/package.json index ab7762a..f093de7 100644 --- a/package.json +++ b/package.json @@ -2,17 +2,13 @@ "name": "pagerduty-overlap-checker", "version": "0.0.0-semantically-released", "description": "PagerDuty Overlap Duties Checker", - "main": "lib/", + "main": "src/", "bin": { "pdoverrides": "bin/pdoverrides" }, "scripts": { - "test": "./node_modules/.bin/mocha --compilers \"coffee:coffee-script/register\"", - "integration": "./node_modules/.bin/mocha --compilers \"coffee:coffee-script/register\" --recursive", - "lint": "conventional-changelog-lint --from=master && coffeelint ./src", - "compile": "coffee -b -c -o lib/ src/", - "pretest": "npm run compile", - "prepublish": "npm run compile", + "test": "./node_modules/.bin/mocha", + "lint": "conventional-changelog-lint --from=master && eslint ./src", "coverage": "./scripts/cov", "coveralls": "npm run coverage && cat ./cov.info | ./node_modules/coveralls/bin/coveralls.js", "semantic-release": "semantic-release pre && npm publish && semantic-release post" @@ -40,11 +36,13 @@ }, "devDependencies": { "chai": "", - "coffee-coverage": "^0.7.0", - "coffee-script": "1.10.0", - "coffeelint": "^1.9.2", "conventional-changelog-lint": "^1.1.9", "coveralls": "~2.11.2", + "eslint": "^4.6.1", + "eslint-config-airbnb": "^15.1.0", + "eslint-plugin-import": "^2.7.0", + "eslint-plugin-jsx-a11y": "^5.1.1", + "eslint-plugin-react": "^7.3.0", "mocha": "", "mocha-lcov-reporter": "1.0.0", "nock": "7.2.2", diff --git a/scripts/build b/scripts/build deleted file mode 100755 index 66814d8..0000000 --- a/scripts/build +++ /dev/null @@ -1,6 +0,0 @@ -#!/bin/sh - -if [ -d "src/" ]; then - rm -fr lib/ - node_modules/.bin/coffee -b -c -o lib/ src/ -fi diff --git a/src/config.coffee b/src/config.coffee deleted file mode 100644 index 6b00682..0000000 --- a/src/config.coffee +++ /dev/null @@ -1,18 +0,0 @@ -fs = require 'fs' -nconf = require 'nconf' - -setupConfig = (configPath, cb) -> - if fs.existsSync(configPath) - console.log 'Loading config from :', configPath - # Priority order argv before ENV and file as defaults - nconf.argv() - .env() - .file({ file: configPath }) - process.env.DEBUG = nconf.get('DEBUG') - cb() - else - cb new Error "Config does not exist: #{configPath}" - -module.exports = { - setupConfig -} diff --git a/src/config.js b/src/config.js new file mode 100644 index 0000000..8febd82 --- /dev/null +++ b/src/config.js @@ -0,0 +1,19 @@ +const fs = require('fs'); +const nconf = require('nconf'); + +function setupConfig(configPath, cb) { + if (fs.existsSync(configPath)) { + console.log('Loading config from :', configPath); + // Priority order argv before ENV and file as defaults + nconf.argv() + .env() + .file({ file: configPath }); + process.env.DEBUG = nconf.get('DEBUG'); + return cb(); + } + return cb(new Error(`Config does not exist: ${configPath}`)); +} + +module.exports = { + setupConfig, +}; diff --git a/src/index.coffee b/src/index.coffee deleted file mode 100644 index b47706a..0000000 --- a/src/index.coffee +++ /dev/null @@ -1,5 +0,0 @@ -module.exports = { - checker: require './pagerduty' - config: require './config' - notify: require './notify' -} diff --git a/src/index.js b/src/index.js new file mode 100644 index 0000000..5e8283c --- /dev/null +++ b/src/index.js @@ -0,0 +1,9 @@ +const checker = require('./pagerduty'); +const config = require('./config'); +const notify = require('./notify'); + +module.exports = { + checker, + config, + notify, +}; diff --git a/src/notify.coffee b/src/notify.coffee deleted file mode 100644 index 2c4ee2d..0000000 --- a/src/notify.coffee +++ /dev/null @@ -1,138 +0,0 @@ -Slack = require 'node-slackr' -nconf = require 'nconf' -async = require 'async' -request = require 'request' -debug = require('debug')('pagerduty-overrides:notifications') -pdApi = require './pagerduty-api' - -createPagerDutyIncident = (options, message, cb) -> - debug("Creating PD incident #{JSON.stringify(message)} with options #{JSON.stringify(options)}") - - unless options.pdToken and options.serviceId and options.from - cb new Error "Missing PAGERDUTY settings (you'll need PAGERDUTY_TOKEN, PAGERDUTY_SERVICE_ID and PAGERDUTY_FROM)" - - unless message.userId or options.escalationPolicyId - cb new Error "No userId or escalation policy specified" - - else - incident = - type: "incident" - title: "On-call overlap found!" - service: - id: options.serviceId - type: "service_reference" - body: - type: "incident_body" - details: message.messages.join('\n') - - if options.escalationPolicyId - incident.escalationPolicy = - id: options.escalationPolicyId - type: "escalation_policy_reference" - else - incident.assignments = [ - assignee : - id: message.userId - type: "user_reference" - ] - - incidentOptions = - method: "POST" - json: - incident: incident - headers: - From: options.from - Authorization: 'Token token=' + options.pdToken - - pdApi.send '/incidents', incidentOptions, (err, res, body) -> - if body?.errors?.length > 0 - err ?= new Error "INCIDENT_CREATION_FAILED Errors: #{JSON.stringify body.errors}" - if res?.statusCode isnt 200 and res?.statusCode isnt 201 - err ?= new Error "INCIDENT_CREATION_FAILED Creating incident failed with status #{res.statusCode}. Returned body: #{JSON.stringify body}" - if err - debug("INCIDENT_CREATION_FAILED: ", err) - cb err - -# https://www.npmjs.com/package/node-slackr -createSlackMessage = (options, message, cb) -> - if options.webhookUrl - slack = new Slack(options.webhookUrl) - slack.notify message, (err, result) -> - if err - console.error("SLACK_SEND_MESSAGE_FAILED:", err) - return cb err - cb null, result - else - cb new Error "Missing Slack webhook URL." - -# Input is array of messages is we have more overlaps -formatMessage = (messages, option = 'plain') -> - if typeof messages is 'string' - return messages - else - switch option - when 'plain' - outputMessage = "_Following overlaps found:_\n" - for message in messages - outputMessage += """#{message.user}: #{message.schedules[0]} and #{message.schedules[1]} (the first starting on #{message.date.toUTCString()}, the second on #{message.crossDate.toUTCString()})\n""" - when 'markdown' - outputMessage = "Following overlaps found:\n" - for message in messages - outputMessage += """*#{message.user}:* `#{message.schedules[0]}` and `#{message.schedules[1]}` (the first starting on #{message.date.toUTCString()}, the second on #{message.crossDate.toUTCString()})\n""" - when 'json' - outputMessage = messages.reduce((acc, curr)-> - acc[curr.userId] ?= {} - acc[curr.userId].userId ?= curr.userId - acc[curr.userId].user ?= curr.user - acc[curr.userId].messages ?= [] - acc[curr.userId].messages.push("#{curr.schedules[0]} and #{curr.schedules[1]} (the first starting on #{curr.date.toUTCString()}, the second on #{curr.crossDate.toUTCString()})") - return acc - , {}) - - debug('Notification - formatMessage option: ', option) - debug('Notification - formatMessage: ', outputMessage) - return outputMessage - -send = (options, message, cb) -> - debug('send:', options, message) - - async.parallel [ - (next) -> - if options['SLACK'] or options['SLACK_WEBHOOK_URL']? - debug('Found Slack webhook, sending a notification') - slackMessage = {} - slackMessage.text = formatMessage(message, 'markdown') - slackMessage.channel = options['SLACK']?['CHANNEL'] - slackOptions = {} - slackOptions.webhookUrl = options['SLACK']?['SLACK_WEBHOOK_URL'] or options['SLACK_WEBHOOK_URL'] - createSlackMessage slackOptions, slackMessage, next - else - debug('No Slack webhook defined') - next() - (next) -> - if not options['PAGERDUTY'] and not options['PAGERDUTY_TOKEN'] - debug('No PAGERDUTY options defined') - else if (options['PAGERDUTY']['PAGERDUTY_TOKEN'] or options['PAGERDUTY_TOKEN']) and options['PAGERDUTY']['PAGERDUTY_SERVICE_ID'] and options['PAGERDUTY']['PAGERDUTY_FROM'] - debug('Found PD token - creating an incident') - pdOptions = {} - pdOptions.pdToken = options['PAGERDUTY']['PAGERDUTY_TOKEN'] or options['PAGERDUTY_TOKEN'] - pdOptions.serviceId = options['PAGERDUTY']['PAGERDUTY_SERVICE_ID'] - pdOptions.escalationPolicyId = options['PAGERDUTY']['PAGERDUTY_ESCALATION_POLICY_ID'] - pdOptions.from = options['PAGERDUTY']?['PAGERDUTY_FROM'] - messagesByUser = formatMessage(message, 'json') - async.each(messagesByUser, - (item, cb) -> - createPagerDutyIncident pdOptions, item, cb - (err) -> - next err) - else - console.log("No PD options defined or defined incorrectly (#{JSON.stringify(options['PAGERDUTY'])})") - next() - ], (err, results) -> - if err then return cb err - output = results.filter (n) -> n isnt undefined - cb null, output - -module.exports = { - send -} diff --git a/src/notify.js b/src/notify.js new file mode 100644 index 0000000..670d75f --- /dev/null +++ b/src/notify.js @@ -0,0 +1,171 @@ +const Slack = require('node-slackr'); +const async = require('async'); +const debug = require('debug')('pagerduty-overrides:notifications'); +const pdApi = require('./pagerduty-api'); + +function createPagerDutyIncident(options, message, cb) { + debug(`Creating PD incident ${JSON.stringify(message)} with options ${JSON.stringify(options)}`); + + if (!options.pdToken || !options.serviceId || !options.from) { + return cb(new Error("Missing PAGERDUTY settings (you'll need PAGERDUTY_TOKEN, PAGERDUTY_SERVICE_ID and PAGERDUTY_FROM)")); + } + + if (!message.userId && !options.escalationPolicyId) { + return cb(new Error('No userId or escalation policy specified')); + } + const incident = { + type: 'incident', + title: 'On-call overlap found!', + service: { + id: options.serviceId, + type: 'service_reference', + }, + body: { + type: 'incident_body', + details: message.messages.join('\n'), + }, + }; + + if (options.escalationPolicyId) { + incident.escalationPolicy = { + id: options.escalationPolicyId, + type: 'escalation_policy_reference', + }; + } else { + incident.assignments = [{ + assignee: { + id: message.userId, + type: 'user_reference', + }, + }, + ]; + } + + const incidentOptions = { + method: 'POST', + json: { + incident, + }, + headers: { + From: options.from, + Authorization: `Token token=${options.pdToken}`, + }, + }; + + return pdApi.send('/incidents', incidentOptions, (err, res, body) => { + let error = err; + if (!error && body && body.errors && body.errors.length > 0) { + error = new Error(`INCIDENT_CREATION_FAILED Errors: ${JSON.stringify(body.errors)}`); + } else if (!error && res && !(res.statusCode === 200 || res.statusCode === 201)) { + error = new Error(`INCIDENT_CREATION_FAILED Creating incident failed with status ${res.statusCode}. Returned body: ${JSON.stringify(body)}`); + } + if (error) { + debug('INCIDENT_CREATION_FAILED: ', error); + } + return cb(error); + }); +} + +// https://www.npmjs.com/package/node-slackr +function createSlackMessage(options, message, cb) { + if (options.webhookUrl) { + const slack = new Slack(options.webhookUrl); + return slack.notify(message, (err, result) => { + if (err) { + console.error('SLACK_SEND_MESSAGE_FAILED:', err); + return cb(err); + } + return cb(null, result); + }); + } + return cb(new Error('Missing Slack webhook URL.')); +} + +// Input is array of messages is we have more overlaps +function formatMessage(messages, option = 'plain') { + let outputMessage; + if (typeof messages === 'string') { + return messages; + } + switch (option) { + case 'plain': + outputMessage = '_Following overlaps found:_\n'; + messages.forEach((message) => { + outputMessage += `${message.user}: ${message.schedules[0]} and ${message.schedules[1]} (the first starting on ${message.date.toUTCString()}, the second on ${message.crossDate.toUTCString()})\n`; + }); + break; + case 'markdown': + outputMessage = 'Following overlaps found:\n'; + messages.forEach((message) => { + outputMessage += `*${message.user}:* \`${message.schedules[0]}\` and \`${message.schedules[1]}\` (the first starting on ${message.date.toUTCString()}, the second on ${message.crossDate.toUTCString()})\n`; + }); + break; + case 'json': + outputMessage = messages.reduce((acc, curr) => { + if (acc[curr.userId] == null) { acc[curr.userId] = {}; } + if (acc[curr.userId].userId == null) { acc[curr.userId].userId = curr.userId; } + if (acc[curr.userId].user == null) { acc[curr.userId].user = curr.user; } + if (acc[curr.userId].messages == null) { acc[curr.userId].messages = []; } + acc[curr.userId].messages.push(`${curr.schedules[0]} and ${curr.schedules[1]} (the first starting on ${curr.date.toUTCString()}, the second on ${curr.crossDate.toUTCString()})`); + return acc; + } + , {}); + break; + default: + console.error(`Unsupported option ${option} used.`); + } + + + debug('Notification - formatMessage option: ', option); + debug('Notification - formatMessage: ', outputMessage); + return outputMessage; +} + +function send(options, message, cb) { + debug('send:', options, message); + + return async.parallel([ + function sendSlack(next) { + if (options.SLACK || (options.SLACK_WEBHOOK_URL)) { + debug('Found Slack webhook, sending a notification'); + const slackMessage = {}; + const slackOptions = {}; + slackMessage.text = formatMessage(message, 'markdown'); + if (options.SLACK) { + slackMessage.channel = options.SLACK.CHANNEL; + slackOptions.webhookUrl = options.SLACK.SLACK_WEBHOOK_URL; + } + if (!slackOptions.webhookUrl) { slackOptions.webhookUrl = options.SLACK_WEBHOOK_URL; } + return createSlackMessage(slackOptions, slackMessage, next); + } + debug('No Slack webhook defined'); + return next(); + }, + function sendPagerDuty(next) { + if (!((options.PAGERDUTY && options.PAGERDUTY.PAGERDUTY_TOKEN) || options.PAGERDUTY_TOKEN)) { + debug('No PAGERDUTY token defined'); + } else if (options.PAGERDUTY.PAGERDUTY_SERVICE_ID && options.PAGERDUTY.PAGERDUTY_FROM) { + debug('Found PD token - creating an incident'); + const pdOptions = {}; + pdOptions.pdToken = options.PAGERDUTY.PAGERDUTY_TOKEN || options.PAGERDUTY_TOKEN; + pdOptions.serviceId = options.PAGERDUTY.PAGERDUTY_SERVICE_ID; + pdOptions.escalationPolicyId = options.PAGERDUTY.PAGERDUTY_ESCALATION_POLICY_ID; + pdOptions.from = options.PAGERDUTY != null ? options.PAGERDUTY.PAGERDUTY_FROM : undefined; + const messagesByUser = formatMessage(message, 'json'); + return async.each(messagesByUser, + (item, pdCb) => createPagerDutyIncident(pdOptions, item, pdCb), + err => next(err)); + } + console.log(`No PD options defined or defined incorrectly (${JSON.stringify(options.PAGERDUTY)})`); + return next(); + }, + ], (err, results) => { + if (err) { return cb(err); } + const output = results.filter(n => n !== undefined); + return cb(null, output); + }); +} + +module.exports = { + send, +}; diff --git a/src/pagerduty-api.coffee b/src/pagerduty-api.coffee deleted file mode 100644 index b59468e..0000000 --- a/src/pagerduty-api.coffee +++ /dev/null @@ -1,35 +0,0 @@ -_ = require 'underscore' -debug = require('debug')('pagerduty-overrides') -request = require 'request' -nconf = require 'nconf' -url = require 'url' - -# Factory for sending request to PD API -send = (endpointPath, overrideOptions, cb) -> - debug("Calling #{endpointPath} with options:", overrideOptions) - defaultOptions = - uri: url.resolve 'https://api.pagerduty.com', endpointPath - method: 'GET' - json: true - - if typeof overrideOptions is 'function' - cb = overrideOptions - overrideOptions = {} - - _.extend defaultOptions, overrideOptions - - defaultOptions.headers ?= [] - defaultOptions.headers.Authorization ?= 'Token token=' + nconf.get('PAGERDUTY_READ_ONLY_TOKEN') - defaultOptions.headers.Accept = 'application/vnd.pagerduty+json;version=2' - defaultOptions.headers['Content-Type'] = 'application/json' - - defaultOptions.qs ?= [] - defaultOptions.qs.limit = 100 - defaultOptions.qs.timezone = 'UTC' - - debug('Calling request with: ', defaultOptions) - request defaultOptions, cb - -module.exports = { - send -} diff --git a/src/pagerduty-api.js b/src/pagerduty-api.js new file mode 100644 index 0000000..d2c48ab --- /dev/null +++ b/src/pagerduty-api.js @@ -0,0 +1,41 @@ +const _ = require('underscore'); +const debug = require('debug')('pagerduty-overrides'); +const request = require('request'); +const nconf = require('nconf'); +const url = require('url'); + +// Factory for sending request to PD API +function send(endpointPath, overrideOptions, cb) { + let callback = cb; + let options = overrideOptions; + + debug(`Calling ${endpointPath} with options:`, options); + const defaultOptions = { + uri: url.resolve('https://api.pagerduty.com', endpointPath), + method: 'GET', + json: true, + }; + + if (typeof options === 'function') { + callback = options; + options = {}; + } + + _.extend(defaultOptions, options); + + if (!defaultOptions.headers) { defaultOptions.headers = []; } + if (!defaultOptions.headers.Authorization) { defaultOptions.headers.Authorization = `Token token=${nconf.get('PAGERDUTY_READ_ONLY_TOKEN')}`; } + defaultOptions.headers.Accept = 'application/vnd.pagerduty+json;version=2'; + defaultOptions.headers['Content-Type'] = 'application/json'; + + if (!defaultOptions.qs) { defaultOptions.qs = []; } + defaultOptions.qs.limit = 100; + defaultOptions.qs.timezone = 'UTC'; + + debug('Calling request with: ', defaultOptions); + return request(defaultOptions, callback); +} + +module.exports = { + send, +}; diff --git a/src/pagerduty.coffee b/src/pagerduty.coffee deleted file mode 100644 index 97300e2..0000000 --- a/src/pagerduty.coffee +++ /dev/null @@ -1,172 +0,0 @@ -async = require 'async' -nconf = require 'nconf' -_ = require 'underscore' -debug = require('debug')('pagerduty-overrides') -notify = require './notify' -pdApi = require './pagerduty-api' - -# Get schedule for ID and 2 weeks -getSchedule = (id, cb) -> - if nconf.get('WEEKS_TO_CHECK') > 0 - week = 7 * 86400 * 1000 - timeNow = new Date() - timeUntil = new Date(timeNow.getTime() + nconf.get('WEEKS_TO_CHECK') * week) - - scheduleOpts = - qs: - 'schedule_ids[]': id - until: timeUntil.toISOString() - since: timeNow.toISOString() - - - pdApi.send "/oncalls", scheduleOpts, (err, res, body) -> - if err - console.log "Request send error:", err - return cb err - - if res.statusCode isnt 200 then return cb new Error("Entries returned status code #{res.statusCode}") - cb err, id: id, entries: body.oncalls - else - cb new Error "Missing WEEKS_TO_CHECK settings" - -# Get all schedules and returns their ids -getSchedulesIds = (cb) -> - debug("Getting schedules from PD") - pdApi.send "/schedules", {}, (err, res, body) -> - if err - console.log "Request send error:", err - return cb err - - debug('Returned status code:', res.statusCode) - schedulesIds = [] - - for schedule in body.schedules - schedulesIds.push(schedule.id) - # UWAGA UWAGA - side effect follows! - # it's easier, cheaper and more comprehensive to load schedule names here and temporarily store them using nconf - nconf.set("schedulesNames:#{schedule.id}", schedule.name) - - debug("Schedules Ids from PD: ", schedulesIds) - debug("Schedules Names from PD: ", nconf.get("schedulesNames")) - cb null, schedulesIds - -# Check if all schedules defined in config are available in PD -checkSchedulesIds = (cb) -> - configSchedules = nconf.get('SCHEDULES') - listIds = [] - for ids in configSchedules - listIds.push ids['SCHEDULE'] - configSchedulesIds = _.uniq(_.flatten(listIds)) - debug("Schedules Ids from config: ", configSchedulesIds) - getSchedulesIds (err, schedulesIds) -> - if err then return cb err - debug('intersection: ', _.intersection(configSchedulesIds, schedulesIds).length) - debug('config: ', configSchedulesIds.length) - if (_.intersection(configSchedulesIds, schedulesIds).length) is configSchedulesIds.length - cb null, true - else - cb null, false - -processSchedulesFromConfig = (done) -> - messages = [] - configSchedules = nconf.get('SCHEDULES') - debug('configSchedules:', configSchedules.length) - async.forEach configSchedules, (processedConfig, cb) -> - debug('Process schedule:', ) - async.mapSeries processedConfig['SCHEDULE'], (i, next) -> - getSchedule i, next - , (err, results) -> - if err then return cb err - if results - processSchedules results, processedConfig['EXCLUSION_DAYS'], (err, message) -> - debug('processSchedules:', processedConfig) - if message isnt "OK" - messages = messages.concat(message) - if processedConfig['NOTIFICATIONS'] - debug('Sending notifications.') - return sendNotification processedConfig['NOTIFICATIONS'], message, cb - return cb() - else - return cb new Error "No schedule to process." - , (err) -> - if err then return done err - done null, messages - -sendNotification = (options, message, cb) -> - debug("NOTIFICATIONS:", message) - debug("NOTIFICATIONS-OPTIONS:", options) - notify.send options, message, (err) -> - cb err - -processSchedules = (allSchedules, days = [], cb) -> - if typeof days is 'function' - [cb, days] = [days, []] - messages = [] - duplicities = {} - debug('allSchedules:', allSchedules) - for schedule in allSchedules - debug('schedule:', JSON.stringify(schedule)) - otherSchedules = _.without(allSchedules, schedule) - debug('otherSchedules:',JSON.stringify(otherSchedules)) - for entry in schedule.entries - debug('checking entry: ', JSON.stringify(entry)) - myStart = entry.start - myEnd = entry.end - myUserId = entry.user.id - myUserName = entry.user.summary - duplicities.myUserName ?= [] - for crossSchedule in otherSchedules - for crossCheckEntry in crossSchedule.entries - overlap = false - startDate = new Date(myStart) - day = getDayAbbrev(startDate.getUTCDay()) - - scheduleId = nconf.get("schedulesNames:#{schedule.id}") - crossScheduleId = nconf.get("schedulesNames:#{crossSchedule.id}") - - message = {user: myUserName, userId: myUserId, schedules: [scheduleId, crossScheduleId], date: startDate, crossDate: new Date(crossCheckEntry.start)} - - if myStart <= crossCheckEntry.start < myEnd and - crossCheckEntry.user.id == myUserId - overlap = true - - if day in Object.keys(days) - - if days[day]?.start? and days[day]?.end? - exclusionStartTime = days[day].start.split(':') - exclusionEndTime = days[day].end.split(':') - exclusionStartDate = new Date(myStart) - exclusionStartDate.setUTCHours(exclusionStartTime[0]) - exclusionStartDate.setUTCMinutes(exclusionStartTime[1]) - exclusionEndDate = new Date(myStart) - exclusionEndDate.setUTCHours(exclusionEndTime[0]) - exclusionEndDate.setUTCMinutes(exclusionEndTime[1]) - - - if exclusionStartDate <= startDate < exclusionEndDate - debug('excluded:', message) - overlap = false - else - overlap = false - - if overlap and crossCheckEntry.start not in duplicities.myUserName - duplicities.myUserName.push(crossCheckEntry.start) - messages.push message - debug(_.uniq(messages)) - if messages.length is 0 - cb null, "OK" - else - cb null, _.uniq(messages) - -getDayAbbrev = (utcDay) -> - days = ["Sun", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat"] - - return days[utcDay] - -module.exports = { - getSchedulesIds - checkSchedulesIds - processSchedules - processSchedulesFromConfig - sendNotification -} diff --git a/src/pagerduty.js b/src/pagerduty.js new file mode 100644 index 0000000..2161114 --- /dev/null +++ b/src/pagerduty.js @@ -0,0 +1,214 @@ +const async = require('async'); +const nconf = require('nconf'); +const _ = require('underscore'); +const debug = require('debug')('pagerduty-overrides'); +const notify = require('./notify'); +const pdApi = require('./pagerduty-api'); + +// Get schedule for ID and 2 weeks +function getSchedule(id, cb) { + if (nconf.get('WEEKS_TO_CHECK') > 0) { + const week = 7 * 86400 * 1000; + const timeNow = new Date(); + const timeUntil = new Date(timeNow.getTime() + (nconf.get('WEEKS_TO_CHECK') * week)); + + const scheduleOpts = { + qs: { + 'schedule_ids[]': id, + until: timeUntil.toISOString(), + since: timeNow.toISOString(), + }, + }; + + + return pdApi.send('/oncalls', scheduleOpts, (err, res, body) => { + if (err) { + console.log('Request send error:', err); + return cb(err); + } + + if (res.statusCode !== 200) { + return cb(new Error(`Entries returned status code ${res.statusCode}`)); + } + return cb(err, { id, entries: body.oncalls }); + }); + } + return cb(new Error('Missing WEEKS_TO_CHECK settings')); +} + +// Get all schedules and returns their ids +function getSchedulesIds(cb) { + debug('Getting schedules from PD'); + return pdApi.send('/schedules', {}, (err, res, body) => { + if (err) { + console.log('Request send error:', err); + return cb(err); + } + + debug('Returned status code:', res.statusCode); + const schedulesIds = []; + + body.schedules.forEach((schedule) => { + schedulesIds.push(schedule.id); + // UWAGA UWAGA - side effect follows! + // it's easier and more comprehensive to load schedule names here and store them using nconf + nconf.set(`schedulesNames:${schedule.id}`, schedule.name); + }); + + debug('Schedules Ids from PD: ', schedulesIds); + debug('Schedules Names from PD: ', nconf.get('schedulesNames')); + return cb(null, schedulesIds); + }); +} + +// Check if all schedules defined in config are available in PD +function checkSchedulesIds(cb) { + const configSchedules = nconf.get('SCHEDULES'); + const listIds = []; + configSchedules.forEach((ids) => { + listIds.push(ids.SCHEDULE); + }); + const configSchedulesIds = _.uniq(_.flatten(listIds)); + debug('Schedules Ids from config: ', configSchedulesIds); + return getSchedulesIds((err, schedulesIds) => { + if (err) { return cb(err); } + debug('intersection: ', _.intersection(configSchedulesIds, schedulesIds).length); + debug('config: ', configSchedulesIds.length); + if ((_.intersection(configSchedulesIds, schedulesIds).length) === configSchedulesIds.length) { + return cb(null, true); + } + return cb(null, false); + }); +} + +function sendNotification(options, message, cb) { + debug('NOTIFICATIONS:', message); + debug('NOTIFICATIONS-OPTIONS:', options); + return notify.send(options, message, err => cb(err)); +} + +function getDayAbbrev(utcDay) { + const days = ['Sun', 'Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat']; + + return days[utcDay]; +} + +function processSchedules(allSchedules, days = [], cb) { + let callback = cb; + let daysArray = days; + + if (typeof daysArray === 'function') { + callback = daysArray; + daysArray = []; + } + + const messages = []; + const duplicities = {}; + debug('allSchedules:', allSchedules); + allSchedules.forEach((schedule) => { + debug('schedule:', JSON.stringify(schedule)); + const otherSchedules = _.without(allSchedules, schedule); + debug('otherSchedules:', JSON.stringify(otherSchedules)); + schedule.entries.forEach((entry) => { + debug('checking entry: ', JSON.stringify(entry)); + const myStart = entry.start; + const myEnd = entry.end; + const myUserId = entry.user.id; + const myUserName = entry.user.summary; + if (duplicities.myUserName == null) { duplicities.myUserName = []; } + otherSchedules.forEach((crossSchedule) => { + crossSchedule.entries.forEach((crossCheckEntry) => { + let overlap = false; + const startDate = new Date(myStart); + const day = getDayAbbrev(startDate.getUTCDay()); + + const scheduleId = nconf.get(`schedulesNames:${schedule.id}`); + const crossScheduleId = nconf.get(`schedulesNames:${crossSchedule.id}`); + + const message = { + user: myUserName, + userId: myUserId, + schedules: [scheduleId, crossScheduleId], + date: startDate, + crossDate: new Date(crossCheckEntry.start), + }; + + if ((myStart <= crossCheckEntry.start && crossCheckEntry.start < myEnd) && + (crossCheckEntry.user.id === myUserId)) { + overlap = true; + + if (Object.keys(daysArray).includes(day)) { + if (daysArray[day].start && daysArray[day].end) { + const exclusionStartTime = daysArray[day].start.split(':'); + const exclusionEndTime = daysArray[day].end.split(':'); + const exclusionStartDate = new Date(myStart); + exclusionStartDate.setUTCHours(exclusionStartTime[0]); + exclusionStartDate.setUTCMinutes(exclusionStartTime[1]); + const exclusionEndDate = new Date(myStart); + exclusionEndDate.setUTCHours(exclusionEndTime[0]); + exclusionEndDate.setUTCMinutes(exclusionEndTime[1]); + + + if (exclusionStartDate <= startDate && startDate < exclusionEndDate) { + debug('excluded:', message); + overlap = false; + } + } else { + overlap = false; + } + } + } + + if (overlap && !duplicities.myUserName.includes(crossCheckEntry.start)) { + duplicities.myUserName.push(crossCheckEntry.start); + messages.push(message); + } + }); + }); + }); + }); + debug(_.uniq(messages)); + if (messages.length === 0) { + return callback(null, 'OK'); + } + return callback(null, _.uniq(messages)); +} + +function processSchedulesFromConfig(done) { + let messages = []; + const configSchedules = nconf.get('SCHEDULES'); + debug('configSchedules:', configSchedules.length); + return async.forEach(configSchedules, (processedConfig, cb) => { + debug('Process schedule:'); + return async.mapSeries(processedConfig.SCHEDULE, (i, next) => getSchedule(i, next) + , (mapErr, results) => { + if (mapErr) { return cb(mapErr); } + if (results) { + return processSchedules(results, processedConfig.EXCLUSION_DAYS, (err, message) => { + debug('processSchedules:', processedConfig); + if (message !== 'OK') { + messages = messages.concat(message); + if (processedConfig.NOTIFICATIONS) { + debug('Sending notifications.'); + return sendNotification(processedConfig.NOTIFICATIONS, message, cb); + } + } + return cb(); + }); + } + return cb(new Error('No schedule to process.')); + }); + } + , (err) => { + if (err) { return done(err); } + return done(null, messages); + }); +} + +module.exports = { + getSchedulesIds, + checkSchedulesIds, + processSchedules, + processSchedulesFromConfig, + sendNotification, +}; diff --git a/test/config-test.coffee b/test/config-test.coffee deleted file mode 100644 index ab889b0..0000000 --- a/test/config-test.coffee +++ /dev/null @@ -1,24 +0,0 @@ -assert = require('chai').assert -config = require '../src/config' -nconf = require 'nconf' -debug = require('debug')('pagerduty-overrides:tests') - -describe 'Get config for PagerDuty Overrides', -> - - it 'NODE_ENV isn\'t set', -> - assert.equal nconf.get('NODE_ENV'), undefined - - it 'PAGERDUTY_READ_ONLY_TOKEN isn\'t set', -> - assert.equal nconf.get('PAGERDUTY_READ_ONLY_TOKEN'), undefined - -describe 'Setup config and get config', -> - - before (done) -> - config.setupConfig __dirname + '/fixtures/config.json', (err) -> - done err - - it 'NODE_ENV isn\'t set', -> - assert.equal nconf.get('NODE_ENV'), undefined - - it 'PAGERDUTY_READ_ONLY_TOKEN is set', -> - assert.equal nconf.get('PAGERDUTY_READ_ONLY_TOKEN'), 'E7px6VVr3PVHZPJq51oa' diff --git a/test/config-test.js b/test/config-test.js new file mode 100644 index 0000000..3aee28f --- /dev/null +++ b/test/config-test.js @@ -0,0 +1,19 @@ +const { assert } = require('chai'); +const config = require('../src/config'); +const nconf = require('nconf'); + +describe('Get config for PagerDuty Overrides', () => { + it('NODE_ENV isn\'t set', () => assert.equal(nconf.get('NODE_ENV'), undefined)); + + return it('PAGERDUTY_READ_ONLY_TOKEN isn\'t set', () => assert.equal(nconf.get('PAGERDUTY_READ_ONLY_TOKEN'), undefined)); +}); + +describe('Setup config and get config', () => { + before(done => + config.setupConfig(`${__dirname}/fixtures/config.json`, err => done(err)) + ); + + it('NODE_ENV isn\'t set', () => assert.equal(nconf.get('NODE_ENV'), undefined)); + + return it('PAGERDUTY_READ_ONLY_TOKEN is set', () => assert.equal(nconf.get('PAGERDUTY_READ_ONLY_TOKEN'), 'E7px6VVr3PVHZPJq51oa')); +}); diff --git a/test/mocha.opts b/test/mocha.opts index 8b574a1..52f04a7 100644 --- a/test/mocha.opts +++ b/test/mocha.opts @@ -1,3 +1,2 @@ ---compilers=coffee:coffee-script/register --reporter=spec --timeout=12000 diff --git a/test/notify-test.coffee b/test/notify-test.coffee deleted file mode 100644 index abf4df4..0000000 --- a/test/notify-test.coffee +++ /dev/null @@ -1,58 +0,0 @@ -assert = require('chai').assert -nock = require 'nock' -nconf = require 'nconf' -debug = require('debug')('pagerduty-overrides:tests') - -config = require '../src/config' -notify = require '../src/notify' - -configPath = __dirname + '/fixtures/config.json' - -nock.disableNetConnect(); - -# https://github.com/chenka/node-slackr/blob/master/test/index.coffee -describe 'Test send message using notify.send for both', -> - - actual = null - - before (done) -> - overlapDate = new Date() - message = - user: 'Test user' - userId: '1234' - schedules: ['TEST1', 'TEST2'] - date: overlapDate - crossDate: overlapDate - - expectBody = - text:"Following overlaps found:\n*Test user:* `TEST1` and `TEST2` (the first starting on #{overlapDate.toUTCString()}, the second on #{overlapDate.toUTCString()})\n" - channel:"#channel-name" - - config.setupConfig configPath, (err) -> - if err then return done err - nock('https://incomingUrl') - .post("/", expectBody) - .query(true) - .reply(200, 'ok') - - nock('https://api.pagerduty.com/') - .post('/incidents') - .query(true) - .reply(200, 'ok') - - configSchedules = nconf.get('SCHEDULES') - options = configSchedules[0]['NOTIFICATIONS'] - message = - user: 'Test user' - userId: '1234' - schedules: ['TEST1', 'TEST2'] - date: overlapDate - crossDate: overlapDate - - notify.send options, [ message ], (err, result) -> - if err then return done err - actual = result - done() - - it 'Check result from send notification', -> - assert.equal 'ok', actual diff --git a/test/notify-test.js b/test/notify-test.js new file mode 100644 index 0000000..673b3e6 --- /dev/null +++ b/test/notify-test.js @@ -0,0 +1,62 @@ +const { assert } = require('chai'); +const nock = require('nock'); +const nconf = require('nconf'); + +const config = require('../src/config'); +const notify = require('../src/notify'); + +const configPath = `${__dirname}/fixtures/config.json`; + +nock.disableNetConnect(); + +// https://github.com/chenka/node-slackr/blob/master/test/index.coffee +describe('Test send message using notify.send for both', () => { + let actual = null; + + before((done) => { + const overlapDate = new Date(); + let message = { + user: 'Test user', + userId: '1234', + schedules: ['TEST1', 'TEST2'], + date: overlapDate, + crossDate: overlapDate, + }; + + const expectBody = { + text: `Following overlaps found:\n*Test user:* \`TEST1\` and \`TEST2\` (the first starting on ${overlapDate.toUTCString()}, the second on ${overlapDate.toUTCString()})\n`, + channel: '#channel-name', + }; + + return config.setupConfig(configPath, (configErr) => { + if (configErr) { return done(configErr); } + nock('https://incomingUrl') + .post('/', expectBody) + .query(true) + .reply(200, 'ok'); + + nock('https://api.pagerduty.com/') + .post('/incidents') + .query(true) + .reply(200, 'ok'); + + const configSchedules = nconf.get('SCHEDULES'); + const options = configSchedules[0].NOTIFICATIONS; + message = { + user: 'Test user', + userId: '1234', + schedules: ['TEST1', 'TEST2'], + date: overlapDate, + crossDate: overlapDate, + }; + + return notify.send(options, [message], (err, result) => { + if (err) { return done(err); } + actual = result; + return done(); + }); + }); + }); + + return it('Check result from send notification', () => assert.equal('ok', actual)); +}); diff --git a/test/pagerduty-test.coffee b/test/pagerduty-test.coffee deleted file mode 100644 index 420e8c9..0000000 --- a/test/pagerduty-test.coffee +++ /dev/null @@ -1,208 +0,0 @@ -assert = require('chai').assert -nock = require 'nock' -nconf = require 'nconf' -debug = require('debug')('pagerduty-overrides:tests') - -config = require '../src/config' -pd = require '../src/pagerduty' - -configPath = __dirname + '/fixtures/config.json' -configWithDaysPath = __dirname + '/fixtures/config-days.json' -configWrongPath = __dirname + '/fixtures/config-wrong.json' - -nock.disableNetConnect(); - -describe 'Get schedules Ids', -> - schedules = null - - before (done) -> - config.setupConfig configPath, (err) -> - if err then return done err - nock('https://api.pagerduty.com/') - .get('/schedules') - .query(true) - .replyWithFile(200, __dirname + '/fixtures/schedules.json') - - pd.getSchedulesIds (err, schedulesIds) -> - schedules = schedulesIds - done err - - it 'Check how many schedules', -> - assert.equal schedules.length, 2 - -describe 'Check schedules', -> - schedules = null - - before (done) -> - config.setupConfig configPath, (err) -> - if err then return done err - - nock('https://api.pagerduty.com/') - .get('/schedules') - .query(true) - .replyWithFile(200, __dirname + '/fixtures/schedules.json') - - pd.checkSchedulesIds (err, res) -> - schedules = res - done err - - it 'Check if config ids are in pagerduty schedules', -> - assert.ok schedules - -describe 'Check schedules with wrong config', -> - schedules = null - - before (done) -> - config.setupConfig configWrongPath, (err) -> - if err then return done err - - nock('https://api.pagerduty.com/') - .get('/schedules') - .query(true) - .replyWithFile(200, __dirname + '/fixtures/schedules.json') - - pd.checkSchedulesIds (err, res) -> - schedules = res - done err - - it 'Check if config ids are in pagerduty schedules', -> - assert.notOk schedules - -describe 'Compare schedules', -> - - message = null - - before (done) -> - config.setupConfig configPath, (err) -> - if err then return done err - nock('https://api.pagerduty.com/') - .get('/schedules') - .query(true) - .replyWithFile(200, __dirname + '/fixtures/schedules.json') - - nock('https://api.pagerduty.com/') - .get('/oncalls') - .query(true) - .replyWithFile(200, __dirname + '/fixtures/entries.json') - - nock('https://api.pagerduty.com/') - .get('/oncalls') - .query(true) - .replyWithFile(200, __dirname + '/fixtures/entries.json') - - nock('https://api.pagerduty.com/') - .post('/incidents', require('./fixtures/incident.json')) - .query(true) - .reply(200, 'ok') - - nock('https://api.pagerduty.com/') - .post('/incidents', require('./fixtures/incident2.json')) - .query(true) - .reply(200, 'ok') - - nock('https://incomingUrl/').post("/").reply(200, 'ok') - - pd.checkSchedulesIds (err, res) -> - if err then return done err - unless res - return done new Error("Check failed") - pd.processSchedulesFromConfig (err, msg) -> - if err then return done err - message = msg - done err - - it 'Check if there are 2 returned messages', -> - assert.isArray message - assert.lengthOf message, 2 - - it 'Check returned messages if they contain "Primary and Secondary"', -> - for singleMessage in message - debug(singleMessage) - assert.isObject singleMessage - assert.include singleMessage.schedules, "Primary" - assert.include singleMessage.schedules, "Secondary" - - -describe 'Compare schedules on specific days', -> - - message = null - - before (done) -> - config.setupConfig configWithDaysPath, (err) -> - if err then return done err - nock('https://api.pagerduty.com/') - .get('/schedules') - .query(true) - .replyWithFile(200, __dirname + '/fixtures/schedules.json') - - nock('https://api.pagerduty.com/') - .get('/oncalls') - .query(true) - .replyWithFile(200, __dirname + '/fixtures/entries-days.json') - - nock('https://api.pagerduty.com/') - .get('/oncalls') - .query(true) - .replyWithFile(200, __dirname + '/fixtures/entries-days.json') - - nock('https://api.pagerduty.com/') - .post('/incidents', require('./fixtures/incident.json')) - .query(true) - .reply(200, 'ok') - - nock('https://incomingUrl/').post("/").reply(200, 'ok') - - pd.checkSchedulesIds (err, res) -> - if err then return done err - unless res - return done new Error("Check failed") - pd.processSchedulesFromConfig (err, msg) -> - if err then return done err - message = msg - done err - - it 'Check if there is 1 returned message', -> - assert.isArray message - assert.lengthOf message, 1 - - it 'Check if the returned message contains "Primary and Secondary"', -> - for singleMessage in message - debug(singleMessage) - assert.isObject singleMessage - assert.include singleMessage.schedules, "Primary" - assert.include singleMessage.schedules, "Secondary" - -describe 'Compare schedules with no overlap', -> - - message = null - - before (done) -> - config.setupConfig configPath, (err) -> - if err then return done err - nock('https://api.pagerduty.com/') - .get('/schedules') - .query(true) - .replyWithFile(200, __dirname + '/fixtures/schedules.json') - - nock('https://api.pagerduty.com/') - .get('/oncalls') - .query(true) - .replyWithFile(200, __dirname + '/fixtures/entries.json') - - nock('https://api.pagerduty.com/') - .get('/oncalls') - .query(true) - .replyWithFile(200, __dirname + '/fixtures/entries-no-overlap.json') - - pd.checkSchedulesIds (err, res) -> - if err then return done err - unless res - return done new Error("Check failed") - pd.processSchedulesFromConfig (err, msg) -> - if err then return done err - message = msg - done err - - it 'Check that there are no returned messages', -> - assert.isArray message - assert.isEmpty message diff --git a/test/pagerduty-test.js b/test/pagerduty-test.js new file mode 100644 index 0000000..03fbb98 --- /dev/null +++ b/test/pagerduty-test.js @@ -0,0 +1,245 @@ +const { assert } = require('chai'); +const nock = require('nock'); +const debug = require('debug')('pagerduty-overrides:tests'); + +const config = require('../src/config'); +const pd = require('../src/pagerduty'); + +const configPath = `${__dirname}/fixtures/config.json`; +const configWithDaysPath = `${__dirname}/fixtures/config-days.json`; +const configWrongPath = `${__dirname}/fixtures/config-wrong.json`; + +const incident = require('./fixtures/incident.json'); +const incident2 = require('./fixtures/incident2.json'); + +nock.disableNetConnect(); + +describe('Get schedules Ids', () => { + let schedules = null; + + before(done => + config.setupConfig(configPath, (configErr) => { + if (configErr) { return done(configErr); } + nock('https://api.pagerduty.com/') + .get('/schedules') + .query(true) + .replyWithFile(200, `${__dirname}/fixtures/schedules.json`); + + return pd.getSchedulesIds((err, schedulesIds) => { + schedules = schedulesIds; + return done(err); + }); + }) + ); + + return it('Check how many schedules', () => assert.equal(schedules.length, 2)); +}); + +describe('Check schedules', () => { + let schedules = null; + + before(done => + config.setupConfig(configPath, (configErr) => { + if (configErr) { return done(configErr); } + + nock('https://api.pagerduty.com/') + .get('/schedules') + .query(true) + .replyWithFile(200, `${__dirname}/fixtures/schedules.json`); + + return pd.checkSchedulesIds((err, res) => { + schedules = res; + return done(err); + }); + }) + ); + + return it('Check if config ids are in pagerduty schedules', () => assert.ok(schedules)); +}); + +describe('Check schedules with wrong config', () => { + let schedules = null; + + before(done => + config.setupConfig(configWrongPath, (configErr) => { + if (configErr) { return done(configErr); } + + nock('https://api.pagerduty.com/') + .get('/schedules') + .query(true) + .replyWithFile(200, `${__dirname}/fixtures/schedules.json`); + + return pd.checkSchedulesIds((err, res) => { + schedules = res; + return done(err); + }); + }) + ); + + return it('Check if config ids are in pagerduty schedules', () => assert.notOk(schedules)); +}); + +describe('Compare schedules', () => { + let message = null; + + before(done => + config.setupConfig(configPath, (configErr) => { + if (configErr) { return done(configErr); } + nock('https://api.pagerduty.com/') + .get('/schedules') + .query(true) + .replyWithFile(200, `${__dirname}/fixtures/schedules.json`); + + nock('https://api.pagerduty.com/') + .get('/oncalls') + .query(true) + .replyWithFile(200, `${__dirname}/fixtures/entries.json`); + + nock('https://api.pagerduty.com/') + .get('/oncalls') + .query(true) + .replyWithFile(200, `${__dirname}/fixtures/entries.json`); + + nock('https://api.pagerduty.com/') + .post('/incidents', incident) + .query(true) + .reply(200, 'ok'); + + nock('https://api.pagerduty.com/') + .post('/incidents', incident2) + .query(true) + .reply(200, 'ok'); + + nock('https://incomingUrl/').post('/').reply(200, 'ok'); + + return pd.checkSchedulesIds((checkErr, res) => { + if (checkErr) { return done(checkErr); } + if (!res) { + return done(new Error('Check failed')); + } + return pd.processSchedulesFromConfig((err, msg) => { + if (err) { return done(err); } + message = msg; + return done(err); + }); + }); + }) + ); + + it('Check if there are 2 returned messages', () => { + assert.isArray(message); + return assert.lengthOf(message, 2); + }); + + return it('Check returned messages if they contain "Primary and Secondary"', () => + (() => { + const result = []; + message.forEach((singleMessage) => { + debug(singleMessage); + assert.isObject(singleMessage); + assert.include(singleMessage.schedules, 'Primary'); + result.push(assert.include(singleMessage.schedules, 'Secondary')); + }); + return result; + })() + ); +}); + + +describe('Compare schedules on specific days', () => { + let message = null; + + before(done => + config.setupConfig(configWithDaysPath, (configErr) => { + if (configErr) { return done(configErr); } + nock('https://api.pagerduty.com/') + .get('/schedules') + .query(true) + .replyWithFile(200, `${__dirname}/fixtures/schedules.json`); + + nock('https://api.pagerduty.com/') + .get('/oncalls') + .query(true) + .replyWithFile(200, `${__dirname}/fixtures/entries-days.json`); + + nock('https://api.pagerduty.com/') + .get('/oncalls') + .query(true) + .replyWithFile(200, `${__dirname}/fixtures/entries-days.json`); + + nock('https://api.pagerduty.com/') + .post('/incidents', incident) + .query(true) + .reply(200, 'ok'); + + nock('https://incomingUrl/').post('/').reply(200, 'ok'); + + return pd.checkSchedulesIds((checkErr, res) => { + if (checkErr) { return done(checkErr); } + if (!res) { + return done(new Error('Check failed')); + } + return pd.processSchedulesFromConfig((err, msg) => { + if (err) { return done(err); } + message = msg; + return done(err); + }); + }); + }) + ); + + it('Check if there is 1 returned message', () => { + assert.isArray(message); + return assert.lengthOf(message, 1); + }); + + it('Check if the returned message contains "Primary and Secondary"', () => { + message.forEach((singleMessage) => { + debug(singleMessage); + assert.isObject(singleMessage); + assert.include(singleMessage.schedules, 'Primary'); + assert.include(singleMessage.schedules, 'Secondary'); + }); + }); +}); + +describe('Compare schedules with no overlap', () => { + let message = null; + + before((done) => { + config.setupConfig(configPath, (configErr) => { + if (configErr) { return done(configErr); } + nock('https://api.pagerduty.com/') + .get('/schedules') + .query(true) + .replyWithFile(200, `${__dirname}/fixtures/schedules.json`); + + nock('https://api.pagerduty.com/') + .get('/oncalls') + .query(true) + .replyWithFile(200, `${__dirname}/fixtures/entries.json`); + + nock('https://api.pagerduty.com/') + .get('/oncalls') + .query(true) + .replyWithFile(200, `${__dirname}/fixtures/entries-no-overlap.json`); + + return pd.checkSchedulesIds((checkErr, res) => { + if (checkErr) { return done(checkErr); } + if (!res) { + return done(new Error('Check failed')); + } + return pd.processSchedulesFromConfig((err, msg) => { + if (err) { return done(err); } + message = msg; + return done(err); + }); + }); + }); + }); + + return it('Check that there are no returned messages', () => { + assert.isArray(message); + return assert.isEmpty(message); + }); +});