diff --git a/.all-contributorsrc b/.all-contributorsrc index 3d50d31c0..149459782 100644 --- a/.all-contributorsrc +++ b/.all-contributorsrc @@ -8,7 +8,7 @@ ], "imageSize": 100, "commit": false, - "commitConvention": "none", + "commitConvention": "angular", "contributors": [ { "login": "fmvilas", @@ -134,8 +134,71 @@ "contributions": [ "doc" ] + }, + { + "login": "Florence-Njeri", + "name": "Florence Njeri", + "avatar_url": "https://avatars.githubusercontent.com/u/40742916?v=4", + "profile": "https://github.com/Florence-Njeri", + "contributions": [ + "doc", + "review", + "infra", + "maintenance" + ] + }, + { + "login": "pratik2315", + "name": "Pratik Haldankar", + "avatar_url": "https://avatars.githubusercontent.com/u/77961530?v=4", + "profile": "https://unruffled-goodall-dd424e.netlify.app/", + "contributions": [ + "doc", + "review", + "maintenance", + "talk" + ] + }, + { + "login": "swastiksuvam55", + "name": "swastik suvam singh", + "avatar_url": "https://avatars.githubusercontent.com/u/90003260?v=4", + "profile": "https://github.com/swastiksuvam55", + "contributions": [ + "code" + ] + }, + { + "login": "GavinZhengOI", + "name": "GavinZhengOI", + "avatar_url": "https://avatars.githubusercontent.com/u/33168669?v=4", + "profile": "https://blog.orzzh.icu/", + "contributions": [ + "doc" + ] + }, + { + "login": "lmgyuan", + "name": "lmgyuan", + "avatar_url": "https://avatars.githubusercontent.com/u/16447041?v=4", + "profile": "https://github.com/lmgyuan", + "contributions": [ + "doc" + ] + }, + { + "login": "pierrick-boule", + "name": "pierrick-boule", + "avatar_url": "https://avatars.githubusercontent.com/u/3237116?v=4", + "profile": "https://github.com/pierrick-boule", + "contributions": [ + "code", + "test", + "doc" + ] } ], "contributorsPerLine": 3, - "skipCi": true + "skipCi": false, + "commitType": "docs" } diff --git a/.changeset/config.json b/.changeset/config.json new file mode 100644 index 000000000..2396f0ca7 --- /dev/null +++ b/.changeset/config.json @@ -0,0 +1,14 @@ +{ + "$schema": "https://unpkg.com/@changesets/config@2.3.0/schema.json", + "changelog": ["@changesets/changelog-git", { "repo": "asyncapi/generator" }], + "commit": false, + "fixed": [], + "linked": [], + "access": "public", + "baseBranch": "master", + "updateInternalDependencies": "patch", + "privatePackages": { + "version": true, + "tag": true + } +} diff --git a/.eslintignore b/.eslintignore index a5981ceaf..8177e20b5 100644 --- a/.eslintignore +++ b/.eslintignore @@ -1,4 +1,6 @@ -output -node_modules -.github/templates-list-validator/dist -test/temp \ No newline at end of file +apps/generator/output/* +**/node_modules/ +.github/templates-list-validator/dist/* +apps/generator/test/temp/* +apps/generator/test/test-templates/react-template/__transpiled/* + diff --git a/.eslintrc b/.eslintrc index 09225e0ad..38ad4e746 100644 --- a/.eslintrc +++ b/.eslintrc @@ -6,12 +6,20 @@ env: plugins: - sonarjs - jest + - react extends: + - plugin:react/recommended - plugin:sonarjs/recommended parserOptions: ecmaVersion: 2018 + sourceType: module + ecmaFeatures: + jsx: true +settings: + react: + version: detect rules: # Ignore Rules @@ -96,3 +104,10 @@ rules: prefer-const: 2 prefer-spread: 2 prefer-template: 2 + + # React + react/jsx-uses-react: off + react/react-in-jsx-scope: off + react/display-name: off + react/prop-types: off + react/jsx-key: off \ No newline at end of file diff --git a/.github/templates-list-validator/README.md b/.github/templates-list-validator/README.md deleted file mode 100644 index 4791da435..000000000 --- a/.github/templates-list-validator/README.md +++ /dev/null @@ -1,16 +0,0 @@ -# Templates List Validator - -Simple action that runs to check with GitHub API official list of templates and inform about the change. -It indicates where lists of templates must be update to have latest state. - -## Development - -To try action locally, run the following: -``` -GITHUB_TOKEN=your-token npm run start -``` - -After every change in dependencies or code you need to generate a new bundle for the GitHub Action: -``` -npm run package -``` \ No newline at end of file diff --git a/.github/templates-list-validator/action.yml b/.github/templates-list-validator/action.yml deleted file mode 100644 index 1b0bad4a8..000000000 --- a/.github/templates-list-validator/action.yml +++ /dev/null @@ -1,9 +0,0 @@ -name: 'Templates list validator' -description: 'Use this action to generate docs or code from your AsyncAPI document. Use default templates or provide your custom ones.' -inputs: - token: - description: 'GitHub Token used to call GitHub API for list of templates' - required: true -runs: - using: 'node12' - main: 'dist/index.js' diff --git a/.github/templates-list-validator/dist/index.js b/.github/templates-list-validator/dist/index.js deleted file mode 100644 index cee2dc858..000000000 --- a/.github/templates-list-validator/dist/index.js +++ /dev/null @@ -1,25435 +0,0 @@ -module.exports = -/******/ (function(modules, runtime) { // webpackBootstrap -/******/ "use strict"; -/******/ // The module cache -/******/ var installedModules = {}; -/******/ -/******/ // The require function -/******/ function __webpack_require__(moduleId) { -/******/ -/******/ // Check if module is in cache -/******/ if(installedModules[moduleId]) { -/******/ return installedModules[moduleId].exports; -/******/ } -/******/ // Create a new module (and put it into the cache) -/******/ var module = installedModules[moduleId] = { -/******/ i: moduleId, -/******/ l: false, -/******/ exports: {} -/******/ }; -/******/ -/******/ // Execute the module function -/******/ var threw = true; -/******/ try { -/******/ modules[moduleId].call(module.exports, module, module.exports, __webpack_require__); -/******/ threw = false; -/******/ } finally { -/******/ if(threw) delete installedModules[moduleId]; -/******/ } -/******/ -/******/ // Flag the module as loaded -/******/ module.l = true; -/******/ -/******/ // Return the exports of the module -/******/ return module.exports; -/******/ } -/******/ -/******/ -/******/ __webpack_require__.ab = __dirname + "/"; -/******/ -/******/ // the startup function -/******/ function startup() { -/******/ // Load entry module and return exports -/******/ return __webpack_require__(104); -/******/ }; -/******/ -/******/ // run startup -/******/ return startup(); -/******/ }) -/************************************************************************/ -/******/ ({ - -/***/ 0: -/***/ (function(module, __unusedexports, __webpack_require__) { - -const { requestLog } = __webpack_require__(916); -const { - restEndpointMethods -} = __webpack_require__(842); - -const Core = __webpack_require__(529); - -const CORE_PLUGINS = [ - __webpack_require__(190), - __webpack_require__(19), // deprecated: remove in v17 - requestLog, - __webpack_require__(148), - restEndpointMethods, - __webpack_require__(430), - - __webpack_require__(850) // deprecated: remove in v17 -]; - -const OctokitRest = Core.plugin(CORE_PLUGINS); - -function DeprecatedOctokit(options) { - const warn = - options && options.log && options.log.warn - ? options.log.warn - : console.warn; - warn( - '[@octokit/rest] `const Octokit = require("@octokit/rest")` is deprecated. Use `const { Octokit } = require("@octokit/rest")` instead' - ); - return new OctokitRest(options); -} - -const Octokit = Object.assign(DeprecatedOctokit, { - Octokit: OctokitRest -}); - -Object.keys(OctokitRest).forEach(key => { - /* istanbul ignore else */ - if (OctokitRest.hasOwnProperty(key)) { - Octokit[key] = OctokitRest[key]; - } -}); - -module.exports = Octokit; - - -/***/ }), - -/***/ 2: -/***/ (function(module, __unusedexports, __webpack_require__) { - -"use strict"; - -const os = __webpack_require__(87); -const macosRelease = __webpack_require__(118); -const winRelease = __webpack_require__(49); - -const osName = (platform, release) => { - if (!platform && release) { - throw new Error('You can\'t specify a `release` without specifying `platform`'); - } - - platform = platform || os.platform(); - - let id; - - if (platform === 'darwin') { - if (!release && os.platform() === 'darwin') { - release = os.release(); - } - - const prefix = release ? (Number(release.split('.')[0]) > 15 ? 'macOS' : 'OS X') : 'macOS'; - id = release ? macosRelease(release).name : ''; - return prefix + (id ? ' ' + id : ''); - } - - if (platform === 'linux') { - if (!release && os.platform() === 'linux') { - release = os.release(); - } - - id = release ? release.replace(/^(\d+\.\d+).*/, '$1') : ''; - return 'Linux' + (id ? ' ' + id : ''); - } - - if (platform === 'win32') { - if (!release && os.platform() === 'win32') { - release = os.release(); - } - - id = release ? winRelease(release) : ''; - return 'Windows' + (id ? ' ' + id : ''); - } - - return platform; -}; - -module.exports = osName; - - -/***/ }), - -/***/ 9: -/***/ (function(module, __unusedexports, __webpack_require__) { - -var once = __webpack_require__(969); - -var noop = function() {}; - -var isRequest = function(stream) { - return stream.setHeader && typeof stream.abort === 'function'; -}; - -var isChildProcess = function(stream) { - return stream.stdio && Array.isArray(stream.stdio) && stream.stdio.length === 3 -}; - -var eos = function(stream, opts, callback) { - if (typeof opts === 'function') return eos(stream, null, opts); - if (!opts) opts = {}; - - callback = once(callback || noop); - - var ws = stream._writableState; - var rs = stream._readableState; - var readable = opts.readable || (opts.readable !== false && stream.readable); - var writable = opts.writable || (opts.writable !== false && stream.writable); - var cancelled = false; - - var onlegacyfinish = function() { - if (!stream.writable) onfinish(); - }; - - var onfinish = function() { - writable = false; - if (!readable) callback.call(stream); - }; - - var onend = function() { - readable = false; - if (!writable) callback.call(stream); - }; - - var onexit = function(exitCode) { - callback.call(stream, exitCode ? new Error('exited with error code: ' + exitCode) : null); - }; - - var onerror = function(err) { - callback.call(stream, err); - }; - - var onclose = function() { - process.nextTick(onclosenexttick); - }; - - var onclosenexttick = function() { - if (cancelled) return; - if (readable && !(rs && (rs.ended && !rs.destroyed))) return callback.call(stream, new Error('premature close')); - if (writable && !(ws && (ws.ended && !ws.destroyed))) return callback.call(stream, new Error('premature close')); - }; - - var onrequest = function() { - stream.req.on('finish', onfinish); - }; - - if (isRequest(stream)) { - stream.on('complete', onfinish); - stream.on('abort', onclose); - if (stream.req) onrequest(); - else stream.on('request', onrequest); - } else if (writable && !ws) { // legacy streams - stream.on('end', onlegacyfinish); - stream.on('close', onlegacyfinish); - } - - if (isChildProcess(stream)) stream.on('exit', onexit); - - stream.on('end', onend); - stream.on('finish', onfinish); - if (opts.error !== false) stream.on('error', onerror); - stream.on('close', onclose); - - return function() { - cancelled = true; - stream.removeListener('complete', onfinish); - stream.removeListener('abort', onclose); - stream.removeListener('request', onrequest); - if (stream.req) stream.req.removeListener('finish', onfinish); - stream.removeListener('end', onlegacyfinish); - stream.removeListener('close', onlegacyfinish); - stream.removeListener('finish', onfinish); - stream.removeListener('exit', onexit); - stream.removeListener('end', onend); - stream.removeListener('error', onerror); - stream.removeListener('close', onclose); - }; -}; - -module.exports = eos; - - -/***/ }), - -/***/ 11: -/***/ (function(module) { - -// Returns a wrapper function that returns a wrapped callback -// The wrapper function should do some stuff, and return a -// presumably different callback function. -// This makes sure that own properties are retained, so that -// decorations and such are not lost along the way. -module.exports = wrappy -function wrappy (fn, cb) { - if (fn && cb) return wrappy(fn)(cb) - - if (typeof fn !== 'function') - throw new TypeError('need wrapper function') - - Object.keys(fn).forEach(function (k) { - wrapper[k] = fn[k] - }) - - return wrapper - - function wrapper() { - var args = new Array(arguments.length) - for (var i = 0; i < args.length; i++) { - args[i] = arguments[i] - } - var ret = fn.apply(this, args) - var cb = args[args.length-1] - if (typeof ret === 'function' && ret !== cb) { - Object.keys(cb).forEach(function (k) { - ret[k] = cb[k] - }) - } - return ret - } -} - - -/***/ }), - -/***/ 16: -/***/ (function(module) { - -module.exports = require("tls"); - -/***/ }), - -/***/ 18: -/***/ (function(module) { - -module.exports = eval("require")("encoding"); - - -/***/ }), - -/***/ 19: -/***/ (function(module, __unusedexports, __webpack_require__) { - -module.exports = authenticationPlugin; - -const { Deprecation } = __webpack_require__(692); -const once = __webpack_require__(969); - -const deprecateAuthenticate = once((log, deprecation) => log.warn(deprecation)); - -const authenticate = __webpack_require__(674); -const beforeRequest = __webpack_require__(471); -const requestError = __webpack_require__(349); - -function authenticationPlugin(octokit, options) { - if (options.auth) { - octokit.authenticate = () => { - deprecateAuthenticate( - octokit.log, - new Deprecation( - '[@octokit/rest] octokit.authenticate() is deprecated and has no effect when "auth" option is set on Octokit constructor' - ) - ); - }; - return; - } - const state = { - octokit, - auth: false - }; - octokit.authenticate = authenticate.bind(null, state); - octokit.hook.before("request", beforeRequest.bind(null, state)); - octokit.hook.error("request", requestError.bind(null, state)); -} - - -/***/ }), - -/***/ 20: -/***/ (function(module, __unusedexports, __webpack_require__) { - -"use strict"; - - -const cp = __webpack_require__(129); -const parse = __webpack_require__(568); -const enoent = __webpack_require__(881); - -function spawn(command, args, options) { - // Parse the arguments - const parsed = parse(command, args, options); - - // Spawn the child process - const spawned = cp.spawn(parsed.command, parsed.args, parsed.options); - - // Hook into child process "exit" event to emit an error if the command - // does not exists, see: https://github.com/IndigoUnited/node-cross-spawn/issues/16 - enoent.hookChildProcess(spawned, parsed); - - return spawned; -} - -function spawnSync(command, args, options) { - // Parse the arguments - const parsed = parse(command, args, options); - - // Spawn the child process - const result = cp.spawnSync(parsed.command, parsed.args, parsed.options); - - // Analyze if the command does not exist, see: https://github.com/IndigoUnited/node-cross-spawn/issues/16 - result.error = result.error || enoent.verifyENOENTSync(result.status, parsed); - - return result; -} - -module.exports = spawn; -module.exports.spawn = spawn; -module.exports.sync = spawnSync; - -module.exports._parse = parse; -module.exports._enoent = enoent; - - -/***/ }), - -/***/ 39: -/***/ (function(module) { - -"use strict"; - -module.exports = opts => { - opts = opts || {}; - - const env = opts.env || process.env; - const platform = opts.platform || process.platform; - - if (platform !== 'win32') { - return 'PATH'; - } - - return Object.keys(env).find(x => x.toUpperCase() === 'PATH') || 'Path'; -}; - - -/***/ }), - -/***/ 47: -/***/ (function(module, __unusedexports, __webpack_require__) { - -module.exports = factory; - -const Octokit = __webpack_require__(402); -const registerPlugin = __webpack_require__(855); - -function factory(plugins) { - const Api = Octokit.bind(null, plugins || []); - Api.plugin = registerPlugin.bind(null, plugins || []); - return Api; -} - - -/***/ }), - -/***/ 49: -/***/ (function(module, __unusedexports, __webpack_require__) { - -"use strict"; - -const os = __webpack_require__(87); -const execa = __webpack_require__(955); - -// Reference: https://www.gaijin.at/en/lstwinver.php -const names = new Map([ - ['10.0', '10'], - ['6.3', '8.1'], - ['6.2', '8'], - ['6.1', '7'], - ['6.0', 'Vista'], - ['5.2', 'Server 2003'], - ['5.1', 'XP'], - ['5.0', '2000'], - ['4.9', 'ME'], - ['4.1', '98'], - ['4.0', '95'] -]); - -const windowsRelease = release => { - const version = /\d+\.\d/.exec(release || os.release()); - - if (release && !version) { - throw new Error('`release` argument doesn\'t match `n.n`'); - } - - const ver = (version || [])[0]; - - // Server 2008, 2012, 2016, and 2019 versions are ambiguous with desktop versions and must be detected at runtime. - // If `release` is omitted or we're on a Windows system, and the version number is an ambiguous version - // then use `wmic` to get the OS caption: https://msdn.microsoft.com/en-us/library/aa394531(v=vs.85).aspx - // If `wmic` is obsoloete (later versions of Windows 10), use PowerShell instead. - // If the resulting caption contains the year 2008, 2012, 2016 or 2019, it is a server version, so return a server OS name. - if ((!release || release === os.release()) && ['6.1', '6.2', '6.3', '10.0'].includes(ver)) { - let stdout; - try { - stdout = execa.sync('powershell', ['(Get-CimInstance -ClassName Win32_OperatingSystem).caption']).stdout || ''; - } catch (_) { - stdout = execa.sync('wmic', ['os', 'get', 'Caption']).stdout || ''; - } - - const year = (stdout.match(/2008|2012|2016|2019/) || [])[0]; - - if (year) { - return `Server ${year}`; - } - } - - return names.get(ver); -}; - -module.exports = windowsRelease; - - -/***/ }), - -/***/ 87: -/***/ (function(module) { - -module.exports = require("os"); - -/***/ }), - -/***/ 104: -/***/ (function(__unusedmodule, __unusedexports, __webpack_require__) { - -const github = __webpack_require__(469); -const core = __webpack_require__(470); -const path = __webpack_require__(622); -const fs = __webpack_require__(747); - -async function run() { - try { - const token = process.env.GITHUB_TOKEN || core.getInput('token', { required: true }); - //TODO we should not only check the content of the readme but also the content of the website listing from https://github.com/asyncapi/website/blob/master/content/docs/tooling.md - const templatesListContent = getReadmeContent(); - const officialTemplates = await searchOfficialTemplates(token); - - const missingTemplates = officialTemplates.filter(str => !templatesListContent.includes(str)); - - if (missingTemplates.length) core.setFailed( - `The following templates are not in the README.md: ${missingTemplates}. Make sure missing templates are added to the list in the README of this repository and also to the website to the list of offecial tools: https://github.com/asyncapi/website/blob/master/content/docs/tooling.md` - ); - } catch (error) { - core.setFailed(error.message); - } -} - -/** - * Gets the markdown table from the readme of the Generator - * - * @private - * @return {String} - */ -function getReadmeContent() { - //README.md must be read 3levels above ../../../README.md because the action code is run from the bundle of the action, dist folder - const readmeContent = fs.readFileSync(path.resolve(__dirname,'../../../README.md'), 'utf8'); - const startingTag = ''; - const startOfOpeningTagIndex = readmeContent.indexOf(`${startingTag}START`); - const endOfOpeningTagIndex = readmeContent.indexOf(closingTag, startOfOpeningTagIndex); - const endOfClosingTagIndex = readmeContent.indexOf(`${startingTag}END ${closingTag}`); - - if (endOfOpeningTagIndex === -1 || endOfClosingTagIndex === -1) { - core.setFailed(); - throw new Error( - 'Templates list is missing or someone removed markers that indicate the list. Table with list of templates that is in the README should be wrapped in markers like and ' - ); - } - - return readmeContent.slice(endOfOpeningTagIndex + closingTag.length, endOfClosingTagIndex); -} - -/** - * Gets array with list of names of all the official templates - * - * @private - * @param {String} Token to authorize with GitHub - * @return {Array[String]} - */ -async function searchOfficialTemplates(token) { - const client = new github.GitHub(token); - const searchQuery = ` - query { - search(query: "topic:asyncapi topic:generator topic:template user:asyncapi", type: REPOSITORY, first:100) { - repositoryCount - nodes { - ... on Repository { - name - } - } - } - } - `; - - const searchResult = await client.graphql(searchQuery); - return searchResult.search.nodes.map(obj => obj.name); -} - -run(); - - -/***/ }), - -/***/ 118: -/***/ (function(module, __unusedexports, __webpack_require__) { - -"use strict"; - -const os = __webpack_require__(87); - -const nameMap = new Map([ - [19, 'Catalina'], - [18, 'Mojave'], - [17, 'High Sierra'], - [16, 'Sierra'], - [15, 'El Capitan'], - [14, 'Yosemite'], - [13, 'Mavericks'], - [12, 'Mountain Lion'], - [11, 'Lion'], - [10, 'Snow Leopard'], - [9, 'Leopard'], - [8, 'Tiger'], - [7, 'Panther'], - [6, 'Jaguar'], - [5, 'Puma'] -]); - -const macosRelease = release => { - release = Number((release || os.release()).split('.')[0]); - return { - name: nameMap.get(release), - version: '10.' + (release - 4) - }; -}; - -module.exports = macosRelease; -// TODO: remove this in the next major version -module.exports.default = macosRelease; - - -/***/ }), - -/***/ 126: -/***/ (function(module) { - -/** - * lodash (Custom Build) - * Build: `lodash modularize exports="npm" -o ./` - * Copyright jQuery Foundation and other contributors - * Released under MIT license - * Based on Underscore.js 1.8.3 - * Copyright Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors - */ - -/** Used as the size to enable large array optimizations. */ -var LARGE_ARRAY_SIZE = 200; - -/** Used to stand-in for `undefined` hash values. */ -var HASH_UNDEFINED = '__lodash_hash_undefined__'; - -/** Used as references for various `Number` constants. */ -var INFINITY = 1 / 0; - -/** `Object#toString` result references. */ -var funcTag = '[object Function]', - genTag = '[object GeneratorFunction]'; - -/** - * Used to match `RegExp` - * [syntax characters](http://ecma-international.org/ecma-262/7.0/#sec-patterns). - */ -var reRegExpChar = /[\\^$.*+?()[\]{}|]/g; - -/** Used to detect host constructors (Safari). */ -var reIsHostCtor = /^\[object .+?Constructor\]$/; - -/** Detect free variable `global` from Node.js. */ -var freeGlobal = typeof global == 'object' && global && global.Object === Object && global; - -/** Detect free variable `self`. */ -var freeSelf = typeof self == 'object' && self && self.Object === Object && self; - -/** Used as a reference to the global object. */ -var root = freeGlobal || freeSelf || Function('return this')(); - -/** - * A specialized version of `_.includes` for arrays without support for - * specifying an index to search from. - * - * @private - * @param {Array} [array] The array to inspect. - * @param {*} target The value to search for. - * @returns {boolean} Returns `true` if `target` is found, else `false`. - */ -function arrayIncludes(array, value) { - var length = array ? array.length : 0; - return !!length && baseIndexOf(array, value, 0) > -1; -} - -/** - * This function is like `arrayIncludes` except that it accepts a comparator. - * - * @private - * @param {Array} [array] The array to inspect. - * @param {*} target The value to search for. - * @param {Function} comparator The comparator invoked per element. - * @returns {boolean} Returns `true` if `target` is found, else `false`. - */ -function arrayIncludesWith(array, value, comparator) { - var index = -1, - length = array ? array.length : 0; - - while (++index < length) { - if (comparator(value, array[index])) { - return true; - } - } - return false; -} - -/** - * The base implementation of `_.findIndex` and `_.findLastIndex` without - * support for iteratee shorthands. - * - * @private - * @param {Array} array The array to inspect. - * @param {Function} predicate The function invoked per iteration. - * @param {number} fromIndex The index to search from. - * @param {boolean} [fromRight] Specify iterating from right to left. - * @returns {number} Returns the index of the matched value, else `-1`. - */ -function baseFindIndex(array, predicate, fromIndex, fromRight) { - var length = array.length, - index = fromIndex + (fromRight ? 1 : -1); - - while ((fromRight ? index-- : ++index < length)) { - if (predicate(array[index], index, array)) { - return index; - } - } - return -1; -} - -/** - * The base implementation of `_.indexOf` without `fromIndex` bounds checks. - * - * @private - * @param {Array} array The array to inspect. - * @param {*} value The value to search for. - * @param {number} fromIndex The index to search from. - * @returns {number} Returns the index of the matched value, else `-1`. - */ -function baseIndexOf(array, value, fromIndex) { - if (value !== value) { - return baseFindIndex(array, baseIsNaN, fromIndex); - } - var index = fromIndex - 1, - length = array.length; - - while (++index < length) { - if (array[index] === value) { - return index; - } - } - return -1; -} - -/** - * The base implementation of `_.isNaN` without support for number objects. - * - * @private - * @param {*} value The value to check. - * @returns {boolean} Returns `true` if `value` is `NaN`, else `false`. - */ -function baseIsNaN(value) { - return value !== value; -} - -/** - * Checks if a cache value for `key` exists. - * - * @private - * @param {Object} cache The cache to query. - * @param {string} key The key of the entry to check. - * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`. - */ -function cacheHas(cache, key) { - return cache.has(key); -} - -/** - * Gets the value at `key` of `object`. - * - * @private - * @param {Object} [object] The object to query. - * @param {string} key The key of the property to get. - * @returns {*} Returns the property value. - */ -function getValue(object, key) { - return object == null ? undefined : object[key]; -} - -/** - * Checks if `value` is a host object in IE < 9. - * - * @private - * @param {*} value The value to check. - * @returns {boolean} Returns `true` if `value` is a host object, else `false`. - */ -function isHostObject(value) { - // Many host objects are `Object` objects that can coerce to strings - // despite having improperly defined `toString` methods. - var result = false; - if (value != null && typeof value.toString != 'function') { - try { - result = !!(value + ''); - } catch (e) {} - } - return result; -} - -/** - * Converts `set` to an array of its values. - * - * @private - * @param {Object} set The set to convert. - * @returns {Array} Returns the values. - */ -function setToArray(set) { - var index = -1, - result = Array(set.size); - - set.forEach(function(value) { - result[++index] = value; - }); - return result; -} - -/** Used for built-in method references. */ -var arrayProto = Array.prototype, - funcProto = Function.prototype, - objectProto = Object.prototype; - -/** Used to detect overreaching core-js shims. */ -var coreJsData = root['__core-js_shared__']; - -/** Used to detect methods masquerading as native. */ -var maskSrcKey = (function() { - var uid = /[^.]+$/.exec(coreJsData && coreJsData.keys && coreJsData.keys.IE_PROTO || ''); - return uid ? ('Symbol(src)_1.' + uid) : ''; -}()); - -/** Used to resolve the decompiled source of functions. */ -var funcToString = funcProto.toString; - -/** Used to check objects for own properties. */ -var hasOwnProperty = objectProto.hasOwnProperty; - -/** - * Used to resolve the - * [`toStringTag`](http://ecma-international.org/ecma-262/7.0/#sec-object.prototype.tostring) - * of values. - */ -var objectToString = objectProto.toString; - -/** Used to detect if a method is native. */ -var reIsNative = RegExp('^' + - funcToString.call(hasOwnProperty).replace(reRegExpChar, '\\$&') - .replace(/hasOwnProperty|(function).*?(?=\\\()| for .+?(?=\\\])/g, '$1.*?') + '$' -); - -/** Built-in value references. */ -var splice = arrayProto.splice; - -/* Built-in method references that are verified to be native. */ -var Map = getNative(root, 'Map'), - Set = getNative(root, 'Set'), - nativeCreate = getNative(Object, 'create'); - -/** - * Creates a hash object. - * - * @private - * @constructor - * @param {Array} [entries] The key-value pairs to cache. - */ -function Hash(entries) { - var index = -1, - length = entries ? entries.length : 0; - - this.clear(); - while (++index < length) { - var entry = entries[index]; - this.set(entry[0], entry[1]); - } -} - -/** - * Removes all key-value entries from the hash. - * - * @private - * @name clear - * @memberOf Hash - */ -function hashClear() { - this.__data__ = nativeCreate ? nativeCreate(null) : {}; -} - -/** - * Removes `key` and its value from the hash. - * - * @private - * @name delete - * @memberOf Hash - * @param {Object} hash The hash to modify. - * @param {string} key The key of the value to remove. - * @returns {boolean} Returns `true` if the entry was removed, else `false`. - */ -function hashDelete(key) { - return this.has(key) && delete this.__data__[key]; -} - -/** - * Gets the hash value for `key`. - * - * @private - * @name get - * @memberOf Hash - * @param {string} key The key of the value to get. - * @returns {*} Returns the entry value. - */ -function hashGet(key) { - var data = this.__data__; - if (nativeCreate) { - var result = data[key]; - return result === HASH_UNDEFINED ? undefined : result; - } - return hasOwnProperty.call(data, key) ? data[key] : undefined; -} - -/** - * Checks if a hash value for `key` exists. - * - * @private - * @name has - * @memberOf Hash - * @param {string} key The key of the entry to check. - * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`. - */ -function hashHas(key) { - var data = this.__data__; - return nativeCreate ? data[key] !== undefined : hasOwnProperty.call(data, key); -} - -/** - * Sets the hash `key` to `value`. - * - * @private - * @name set - * @memberOf Hash - * @param {string} key The key of the value to set. - * @param {*} value The value to set. - * @returns {Object} Returns the hash instance. - */ -function hashSet(key, value) { - var data = this.__data__; - data[key] = (nativeCreate && value === undefined) ? HASH_UNDEFINED : value; - return this; -} - -// Add methods to `Hash`. -Hash.prototype.clear = hashClear; -Hash.prototype['delete'] = hashDelete; -Hash.prototype.get = hashGet; -Hash.prototype.has = hashHas; -Hash.prototype.set = hashSet; - -/** - * Creates an list cache object. - * - * @private - * @constructor - * @param {Array} [entries] The key-value pairs to cache. - */ -function ListCache(entries) { - var index = -1, - length = entries ? entries.length : 0; - - this.clear(); - while (++index < length) { - var entry = entries[index]; - this.set(entry[0], entry[1]); - } -} - -/** - * Removes all key-value entries from the list cache. - * - * @private - * @name clear - * @memberOf ListCache - */ -function listCacheClear() { - this.__data__ = []; -} - -/** - * Removes `key` and its value from the list cache. - * - * @private - * @name delete - * @memberOf ListCache - * @param {string} key The key of the value to remove. - * @returns {boolean} Returns `true` if the entry was removed, else `false`. - */ -function listCacheDelete(key) { - var data = this.__data__, - index = assocIndexOf(data, key); - - if (index < 0) { - return false; - } - var lastIndex = data.length - 1; - if (index == lastIndex) { - data.pop(); - } else { - splice.call(data, index, 1); - } - return true; -} - -/** - * Gets the list cache value for `key`. - * - * @private - * @name get - * @memberOf ListCache - * @param {string} key The key of the value to get. - * @returns {*} Returns the entry value. - */ -function listCacheGet(key) { - var data = this.__data__, - index = assocIndexOf(data, key); - - return index < 0 ? undefined : data[index][1]; -} - -/** - * Checks if a list cache value for `key` exists. - * - * @private - * @name has - * @memberOf ListCache - * @param {string} key The key of the entry to check. - * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`. - */ -function listCacheHas(key) { - return assocIndexOf(this.__data__, key) > -1; -} - -/** - * Sets the list cache `key` to `value`. - * - * @private - * @name set - * @memberOf ListCache - * @param {string} key The key of the value to set. - * @param {*} value The value to set. - * @returns {Object} Returns the list cache instance. - */ -function listCacheSet(key, value) { - var data = this.__data__, - index = assocIndexOf(data, key); - - if (index < 0) { - data.push([key, value]); - } else { - data[index][1] = value; - } - return this; -} - -// Add methods to `ListCache`. -ListCache.prototype.clear = listCacheClear; -ListCache.prototype['delete'] = listCacheDelete; -ListCache.prototype.get = listCacheGet; -ListCache.prototype.has = listCacheHas; -ListCache.prototype.set = listCacheSet; - -/** - * Creates a map cache object to store key-value pairs. - * - * @private - * @constructor - * @param {Array} [entries] The key-value pairs to cache. - */ -function MapCache(entries) { - var index = -1, - length = entries ? entries.length : 0; - - this.clear(); - while (++index < length) { - var entry = entries[index]; - this.set(entry[0], entry[1]); - } -} - -/** - * Removes all key-value entries from the map. - * - * @private - * @name clear - * @memberOf MapCache - */ -function mapCacheClear() { - this.__data__ = { - 'hash': new Hash, - 'map': new (Map || ListCache), - 'string': new Hash - }; -} - -/** - * Removes `key` and its value from the map. - * - * @private - * @name delete - * @memberOf MapCache - * @param {string} key The key of the value to remove. - * @returns {boolean} Returns `true` if the entry was removed, else `false`. - */ -function mapCacheDelete(key) { - return getMapData(this, key)['delete'](key); -} - -/** - * Gets the map value for `key`. - * - * @private - * @name get - * @memberOf MapCache - * @param {string} key The key of the value to get. - * @returns {*} Returns the entry value. - */ -function mapCacheGet(key) { - return getMapData(this, key).get(key); -} - -/** - * Checks if a map value for `key` exists. - * - * @private - * @name has - * @memberOf MapCache - * @param {string} key The key of the entry to check. - * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`. - */ -function mapCacheHas(key) { - return getMapData(this, key).has(key); -} - -/** - * Sets the map `key` to `value`. - * - * @private - * @name set - * @memberOf MapCache - * @param {string} key The key of the value to set. - * @param {*} value The value to set. - * @returns {Object} Returns the map cache instance. - */ -function mapCacheSet(key, value) { - getMapData(this, key).set(key, value); - return this; -} - -// Add methods to `MapCache`. -MapCache.prototype.clear = mapCacheClear; -MapCache.prototype['delete'] = mapCacheDelete; -MapCache.prototype.get = mapCacheGet; -MapCache.prototype.has = mapCacheHas; -MapCache.prototype.set = mapCacheSet; - -/** - * - * Creates an array cache object to store unique values. - * - * @private - * @constructor - * @param {Array} [values] The values to cache. - */ -function SetCache(values) { - var index = -1, - length = values ? values.length : 0; - - this.__data__ = new MapCache; - while (++index < length) { - this.add(values[index]); - } -} - -/** - * Adds `value` to the array cache. - * - * @private - * @name add - * @memberOf SetCache - * @alias push - * @param {*} value The value to cache. - * @returns {Object} Returns the cache instance. - */ -function setCacheAdd(value) { - this.__data__.set(value, HASH_UNDEFINED); - return this; -} - -/** - * Checks if `value` is in the array cache. - * - * @private - * @name has - * @memberOf SetCache - * @param {*} value The value to search for. - * @returns {number} Returns `true` if `value` is found, else `false`. - */ -function setCacheHas(value) { - return this.__data__.has(value); -} - -// Add methods to `SetCache`. -SetCache.prototype.add = SetCache.prototype.push = setCacheAdd; -SetCache.prototype.has = setCacheHas; - -/** - * Gets the index at which the `key` is found in `array` of key-value pairs. - * - * @private - * @param {Array} array The array to inspect. - * @param {*} key The key to search for. - * @returns {number} Returns the index of the matched value, else `-1`. - */ -function assocIndexOf(array, key) { - var length = array.length; - while (length--) { - if (eq(array[length][0], key)) { - return length; - } - } - return -1; -} - -/** - * The base implementation of `_.isNative` without bad shim checks. - * - * @private - * @param {*} value The value to check. - * @returns {boolean} Returns `true` if `value` is a native function, - * else `false`. - */ -function baseIsNative(value) { - if (!isObject(value) || isMasked(value)) { - return false; - } - var pattern = (isFunction(value) || isHostObject(value)) ? reIsNative : reIsHostCtor; - return pattern.test(toSource(value)); -} - -/** - * The base implementation of `_.uniqBy` without support for iteratee shorthands. - * - * @private - * @param {Array} array The array to inspect. - * @param {Function} [iteratee] The iteratee invoked per element. - * @param {Function} [comparator] The comparator invoked per element. - * @returns {Array} Returns the new duplicate free array. - */ -function baseUniq(array, iteratee, comparator) { - var index = -1, - includes = arrayIncludes, - length = array.length, - isCommon = true, - result = [], - seen = result; - - if (comparator) { - isCommon = false; - includes = arrayIncludesWith; - } - else if (length >= LARGE_ARRAY_SIZE) { - var set = iteratee ? null : createSet(array); - if (set) { - return setToArray(set); - } - isCommon = false; - includes = cacheHas; - seen = new SetCache; - } - else { - seen = iteratee ? [] : result; - } - outer: - while (++index < length) { - var value = array[index], - computed = iteratee ? iteratee(value) : value; - - value = (comparator || value !== 0) ? value : 0; - if (isCommon && computed === computed) { - var seenIndex = seen.length; - while (seenIndex--) { - if (seen[seenIndex] === computed) { - continue outer; - } - } - if (iteratee) { - seen.push(computed); - } - result.push(value); - } - else if (!includes(seen, computed, comparator)) { - if (seen !== result) { - seen.push(computed); - } - result.push(value); - } - } - return result; -} - -/** - * Creates a set object of `values`. - * - * @private - * @param {Array} values The values to add to the set. - * @returns {Object} Returns the new set. - */ -var createSet = !(Set && (1 / setToArray(new Set([,-0]))[1]) == INFINITY) ? noop : function(values) { - return new Set(values); -}; - -/** - * Gets the data for `map`. - * - * @private - * @param {Object} map The map to query. - * @param {string} key The reference key. - * @returns {*} Returns the map data. - */ -function getMapData(map, key) { - var data = map.__data__; - return isKeyable(key) - ? data[typeof key == 'string' ? 'string' : 'hash'] - : data.map; -} - -/** - * Gets the native function at `key` of `object`. - * - * @private - * @param {Object} object The object to query. - * @param {string} key The key of the method to get. - * @returns {*} Returns the function if it's native, else `undefined`. - */ -function getNative(object, key) { - var value = getValue(object, key); - return baseIsNative(value) ? value : undefined; -} - -/** - * Checks if `value` is suitable for use as unique object key. - * - * @private - * @param {*} value The value to check. - * @returns {boolean} Returns `true` if `value` is suitable, else `false`. - */ -function isKeyable(value) { - var type = typeof value; - return (type == 'string' || type == 'number' || type == 'symbol' || type == 'boolean') - ? (value !== '__proto__') - : (value === null); -} - -/** - * Checks if `func` has its source masked. - * - * @private - * @param {Function} func The function to check. - * @returns {boolean} Returns `true` if `func` is masked, else `false`. - */ -function isMasked(func) { - return !!maskSrcKey && (maskSrcKey in func); -} - -/** - * Converts `func` to its source code. - * - * @private - * @param {Function} func The function to process. - * @returns {string} Returns the source code. - */ -function toSource(func) { - if (func != null) { - try { - return funcToString.call(func); - } catch (e) {} - try { - return (func + ''); - } catch (e) {} - } - return ''; -} - -/** - * Creates a duplicate-free version of an array, using - * [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero) - * for equality comparisons, in which only the first occurrence of each - * element is kept. - * - * @static - * @memberOf _ - * @since 0.1.0 - * @category Array - * @param {Array} array The array to inspect. - * @returns {Array} Returns the new duplicate free array. - * @example - * - * _.uniq([2, 1, 2]); - * // => [2, 1] - */ -function uniq(array) { - return (array && array.length) - ? baseUniq(array) - : []; -} - -/** - * Performs a - * [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero) - * comparison between two values to determine if they are equivalent. - * - * @static - * @memberOf _ - * @since 4.0.0 - * @category Lang - * @param {*} value The value to compare. - * @param {*} other The other value to compare. - * @returns {boolean} Returns `true` if the values are equivalent, else `false`. - * @example - * - * var object = { 'a': 1 }; - * var other = { 'a': 1 }; - * - * _.eq(object, object); - * // => true - * - * _.eq(object, other); - * // => false - * - * _.eq('a', 'a'); - * // => true - * - * _.eq('a', Object('a')); - * // => false - * - * _.eq(NaN, NaN); - * // => true - */ -function eq(value, other) { - return value === other || (value !== value && other !== other); -} - -/** - * Checks if `value` is classified as a `Function` object. - * - * @static - * @memberOf _ - * @since 0.1.0 - * @category Lang - * @param {*} value The value to check. - * @returns {boolean} Returns `true` if `value` is a function, else `false`. - * @example - * - * _.isFunction(_); - * // => true - * - * _.isFunction(/abc/); - * // => false - */ -function isFunction(value) { - // The use of `Object#toString` avoids issues with the `typeof` operator - // in Safari 8-9 which returns 'object' for typed array and other constructors. - var tag = isObject(value) ? objectToString.call(value) : ''; - return tag == funcTag || tag == genTag; -} - -/** - * Checks if `value` is the - * [language type](http://www.ecma-international.org/ecma-262/7.0/#sec-ecmascript-language-types) - * of `Object`. (e.g. arrays, functions, objects, regexes, `new Number(0)`, and `new String('')`) - * - * @static - * @memberOf _ - * @since 0.1.0 - * @category Lang - * @param {*} value The value to check. - * @returns {boolean} Returns `true` if `value` is an object, else `false`. - * @example - * - * _.isObject({}); - * // => true - * - * _.isObject([1, 2, 3]); - * // => true - * - * _.isObject(_.noop); - * // => true - * - * _.isObject(null); - * // => false - */ -function isObject(value) { - var type = typeof value; - return !!value && (type == 'object' || type == 'function'); -} - -/** - * This method returns `undefined`. - * - * @static - * @memberOf _ - * @since 2.3.0 - * @category Util - * @example - * - * _.times(2, _.noop); - * // => [undefined, undefined] - */ -function noop() { - // No operation performed. -} - -module.exports = uniq; - - -/***/ }), - -/***/ 129: -/***/ (function(module) { - -module.exports = require("child_process"); - -/***/ }), - -/***/ 141: -/***/ (function(__unusedmodule, exports, __webpack_require__) { - -"use strict"; - - -var net = __webpack_require__(631); -var tls = __webpack_require__(16); -var http = __webpack_require__(605); -var https = __webpack_require__(211); -var events = __webpack_require__(614); -var assert = __webpack_require__(357); -var util = __webpack_require__(669); - - -exports.httpOverHttp = httpOverHttp; -exports.httpsOverHttp = httpsOverHttp; -exports.httpOverHttps = httpOverHttps; -exports.httpsOverHttps = httpsOverHttps; - - -function httpOverHttp(options) { - var agent = new TunnelingAgent(options); - agent.request = http.request; - return agent; -} - -function httpsOverHttp(options) { - var agent = new TunnelingAgent(options); - agent.request = http.request; - agent.createSocket = createSecureSocket; - agent.defaultPort = 443; - return agent; -} - -function httpOverHttps(options) { - var agent = new TunnelingAgent(options); - agent.request = https.request; - return agent; -} - -function httpsOverHttps(options) { - var agent = new TunnelingAgent(options); - agent.request = https.request; - agent.createSocket = createSecureSocket; - agent.defaultPort = 443; - return agent; -} - - -function TunnelingAgent(options) { - var self = this; - self.options = options || {}; - self.proxyOptions = self.options.proxy || {}; - self.maxSockets = self.options.maxSockets || http.Agent.defaultMaxSockets; - self.requests = []; - self.sockets = []; - - self.on('free', function onFree(socket, host, port, localAddress) { - var options = toOptions(host, port, localAddress); - for (var i = 0, len = self.requests.length; i < len; ++i) { - var pending = self.requests[i]; - if (pending.host === options.host && pending.port === options.port) { - // Detect the request to connect same origin server, - // reuse the connection. - self.requests.splice(i, 1); - pending.request.onSocket(socket); - return; - } - } - socket.destroy(); - self.removeSocket(socket); - }); -} -util.inherits(TunnelingAgent, events.EventEmitter); - -TunnelingAgent.prototype.addRequest = function addRequest(req, host, port, localAddress) { - var self = this; - var options = mergeOptions({request: req}, self.options, toOptions(host, port, localAddress)); - - if (self.sockets.length >= this.maxSockets) { - // We are over limit so we'll add it to the queue. - self.requests.push(options); - return; - } - - // If we are under maxSockets create a new one. - self.createSocket(options, function(socket) { - socket.on('free', onFree); - socket.on('close', onCloseOrRemove); - socket.on('agentRemove', onCloseOrRemove); - req.onSocket(socket); - - function onFree() { - self.emit('free', socket, options); - } - - function onCloseOrRemove(err) { - self.removeSocket(socket); - socket.removeListener('free', onFree); - socket.removeListener('close', onCloseOrRemove); - socket.removeListener('agentRemove', onCloseOrRemove); - } - }); -}; - -TunnelingAgent.prototype.createSocket = function createSocket(options, cb) { - var self = this; - var placeholder = {}; - self.sockets.push(placeholder); - - var connectOptions = mergeOptions({}, self.proxyOptions, { - method: 'CONNECT', - path: options.host + ':' + options.port, - agent: false, - headers: { - host: options.host + ':' + options.port - } - }); - if (options.localAddress) { - connectOptions.localAddress = options.localAddress; - } - if (connectOptions.proxyAuth) { - connectOptions.headers = connectOptions.headers || {}; - connectOptions.headers['Proxy-Authorization'] = 'Basic ' + - new Buffer(connectOptions.proxyAuth).toString('base64'); - } - - debug('making CONNECT request'); - var connectReq = self.request(connectOptions); - connectReq.useChunkedEncodingByDefault = false; // for v0.6 - connectReq.once('response', onResponse); // for v0.6 - connectReq.once('upgrade', onUpgrade); // for v0.6 - connectReq.once('connect', onConnect); // for v0.7 or later - connectReq.once('error', onError); - connectReq.end(); - - function onResponse(res) { - // Very hacky. This is necessary to avoid http-parser leaks. - res.upgrade = true; - } - - function onUpgrade(res, socket, head) { - // Hacky. - process.nextTick(function() { - onConnect(res, socket, head); - }); - } - - function onConnect(res, socket, head) { - connectReq.removeAllListeners(); - socket.removeAllListeners(); - - if (res.statusCode !== 200) { - debug('tunneling socket could not be established, statusCode=%d', - res.statusCode); - socket.destroy(); - var error = new Error('tunneling socket could not be established, ' + - 'statusCode=' + res.statusCode); - error.code = 'ECONNRESET'; - options.request.emit('error', error); - self.removeSocket(placeholder); - return; - } - if (head.length > 0) { - debug('got illegal response body from proxy'); - socket.destroy(); - var error = new Error('got illegal response body from proxy'); - error.code = 'ECONNRESET'; - options.request.emit('error', error); - self.removeSocket(placeholder); - return; - } - debug('tunneling connection has established'); - self.sockets[self.sockets.indexOf(placeholder)] = socket; - return cb(socket); - } - - function onError(cause) { - connectReq.removeAllListeners(); - - debug('tunneling socket could not be established, cause=%s\n', - cause.message, cause.stack); - var error = new Error('tunneling socket could not be established, ' + - 'cause=' + cause.message); - error.code = 'ECONNRESET'; - options.request.emit('error', error); - self.removeSocket(placeholder); - } -}; - -TunnelingAgent.prototype.removeSocket = function removeSocket(socket) { - var pos = this.sockets.indexOf(socket) - if (pos === -1) { - return; - } - this.sockets.splice(pos, 1); - - var pending = this.requests.shift(); - if (pending) { - // If we have pending requests and a socket gets closed a new one - // needs to be created to take over in the pool for the one that closed. - this.createSocket(pending, function(socket) { - pending.request.onSocket(socket); - }); - } -}; - -function createSecureSocket(options, cb) { - var self = this; - TunnelingAgent.prototype.createSocket.call(self, options, function(socket) { - var hostHeader = options.request.getHeader('host'); - var tlsOptions = mergeOptions({}, self.options, { - socket: socket, - servername: hostHeader ? hostHeader.replace(/:.*$/, '') : options.host - }); - - // 0 is dummy port for v0.6 - var secureSocket = tls.connect(0, tlsOptions); - self.sockets[self.sockets.indexOf(socket)] = secureSocket; - cb(secureSocket); - }); -} - - -function toOptions(host, port, localAddress) { - if (typeof host === 'string') { // since v0.10 - return { - host: host, - port: port, - localAddress: localAddress - }; - } - return host; // for v0.11 or later -} - -function mergeOptions(target) { - for (var i = 1, len = arguments.length; i < len; ++i) { - var overrides = arguments[i]; - if (typeof overrides === 'object') { - var keys = Object.keys(overrides); - for (var j = 0, keyLen = keys.length; j < keyLen; ++j) { - var k = keys[j]; - if (overrides[k] !== undefined) { - target[k] = overrides[k]; - } - } - } - } - return target; -} - - -var debug; -if (process.env.NODE_DEBUG && /\btunnel\b/.test(process.env.NODE_DEBUG)) { - debug = function() { - var args = Array.prototype.slice.call(arguments); - if (typeof args[0] === 'string') { - args[0] = 'TUNNEL: ' + args[0]; - } else { - args.unshift('TUNNEL:'); - } - console.error.apply(console, args); - } -} else { - debug = function() {}; -} -exports.debug = debug; // for test - - -/***/ }), - -/***/ 143: -/***/ (function(module, __unusedexports, __webpack_require__) { - -module.exports = withAuthorizationPrefix; - -const atob = __webpack_require__(368); - -const REGEX_IS_BASIC_AUTH = /^[\w-]+:/; - -function withAuthorizationPrefix(authorization) { - if (/^(basic|bearer|token) /i.test(authorization)) { - return authorization; - } - - try { - if (REGEX_IS_BASIC_AUTH.test(atob(authorization))) { - return `basic ${authorization}`; - } - } catch (error) {} - - if (authorization.split(/\./).length === 3) { - return `bearer ${authorization}`; - } - - return `token ${authorization}`; -} - - -/***/ }), - -/***/ 145: -/***/ (function(module, __unusedexports, __webpack_require__) { - -"use strict"; - -const pump = __webpack_require__(453); -const bufferStream = __webpack_require__(966); - -class MaxBufferError extends Error { - constructor() { - super('maxBuffer exceeded'); - this.name = 'MaxBufferError'; - } -} - -function getStream(inputStream, options) { - if (!inputStream) { - return Promise.reject(new Error('Expected a stream')); - } - - options = Object.assign({maxBuffer: Infinity}, options); - - const {maxBuffer} = options; - - let stream; - return new Promise((resolve, reject) => { - const rejectPromise = error => { - if (error) { // A null check - error.bufferedData = stream.getBufferedValue(); - } - reject(error); - }; - - stream = pump(inputStream, bufferStream(options), error => { - if (error) { - rejectPromise(error); - return; - } - - resolve(); - }); - - stream.on('data', () => { - if (stream.getBufferedLength() > maxBuffer) { - rejectPromise(new MaxBufferError()); - } - }); - }).then(() => stream.getBufferedValue()); -} - -module.exports = getStream; -module.exports.buffer = (stream, options) => getStream(stream, Object.assign({}, options, {encoding: 'buffer'})); -module.exports.array = (stream, options) => getStream(stream, Object.assign({}, options, {array: true})); -module.exports.MaxBufferError = MaxBufferError; - - -/***/ }), - -/***/ 148: -/***/ (function(module, __unusedexports, __webpack_require__) { - -module.exports = paginatePlugin; - -const { paginateRest } = __webpack_require__(299); - -function paginatePlugin(octokit) { - Object.assign(octokit, paginateRest(octokit)); -} - - -/***/ }), - -/***/ 168: -/***/ (function(module) { - -"use strict"; - -const alias = ['stdin', 'stdout', 'stderr']; - -const hasAlias = opts => alias.some(x => Boolean(opts[x])); - -module.exports = opts => { - if (!opts) { - return null; - } - - if (opts.stdio && hasAlias(opts)) { - throw new Error(`It's not possible to provide \`stdio\` in combination with one of ${alias.map(x => `\`${x}\``).join(', ')}`); - } - - if (typeof opts.stdio === 'string') { - return opts.stdio; - } - - const stdio = opts.stdio || []; - - if (!Array.isArray(stdio)) { - throw new TypeError(`Expected \`stdio\` to be of type \`string\` or \`Array\`, got \`${typeof stdio}\``); - } - - const result = []; - const len = Math.max(stdio.length, alias.length); - - for (let i = 0; i < len; i++) { - let value = null; - - if (stdio[i] !== undefined) { - value = stdio[i]; - } else if (opts[alias[i]] !== undefined) { - value = opts[alias[i]]; - } - - result[i] = value; - } - - return result; -}; - - -/***/ }), - -/***/ 190: -/***/ (function(module, __unusedexports, __webpack_require__) { - -module.exports = authenticationPlugin; - -const { createTokenAuth } = __webpack_require__(813); -const { Deprecation } = __webpack_require__(692); -const once = __webpack_require__(969); - -const beforeRequest = __webpack_require__(863); -const requestError = __webpack_require__(293); -const validate = __webpack_require__(954); -const withAuthorizationPrefix = __webpack_require__(143); - -const deprecateAuthBasic = once((log, deprecation) => log.warn(deprecation)); -const deprecateAuthObject = once((log, deprecation) => log.warn(deprecation)); - -function authenticationPlugin(octokit, options) { - // If `options.authStrategy` is set then use it and pass in `options.auth` - if (options.authStrategy) { - const auth = options.authStrategy(options.auth); - octokit.hook.wrap("request", auth.hook); - octokit.auth = auth; - return; - } - - // If neither `options.authStrategy` nor `options.auth` are set, the `octokit` instance - // is unauthenticated. The `octokit.auth()` method is a no-op and no request hook is registred. - if (!options.auth) { - octokit.auth = () => - Promise.resolve({ - type: "unauthenticated" - }); - return; - } - - const isBasicAuthString = - typeof options.auth === "string" && - /^basic/.test(withAuthorizationPrefix(options.auth)); - - // If only `options.auth` is set to a string, use the default token authentication strategy. - if (typeof options.auth === "string" && !isBasicAuthString) { - const auth = createTokenAuth(options.auth); - octokit.hook.wrap("request", auth.hook); - octokit.auth = auth; - return; - } - - // Otherwise log a deprecation message - const [deprecationMethod, deprecationMessapge] = isBasicAuthString - ? [ - deprecateAuthBasic, - 'Setting the "new Octokit({ auth })" option to a Basic Auth string is deprecated. Use https://github.com/octokit/auth-basic.js instead. See (https://octokit.github.io/rest.js/#authentication)' - ] - : [ - deprecateAuthObject, - 'Setting the "new Octokit({ auth })" option to an object without also setting the "authStrategy" option is deprecated and will be removed in v17. See (https://octokit.github.io/rest.js/#authentication)' - ]; - deprecationMethod( - octokit.log, - new Deprecation("[@octokit/rest] " + deprecationMessapge) - ); - - octokit.auth = () => - Promise.resolve({ - type: "deprecated", - message: deprecationMessapge - }); - - validate(options.auth); - - const state = { - octokit, - auth: options.auth - }; - - octokit.hook.before("request", beforeRequest.bind(null, state)); - octokit.hook.error("request", requestError.bind(null, state)); -} - - -/***/ }), - -/***/ 197: -/***/ (function(module, __unusedexports, __webpack_require__) { - -module.exports = isexe -isexe.sync = sync - -var fs = __webpack_require__(747) - -function isexe (path, options, cb) { - fs.stat(path, function (er, stat) { - cb(er, er ? false : checkStat(stat, options)) - }) -} - -function sync (path, options) { - return checkStat(fs.statSync(path), options) -} - -function checkStat (stat, options) { - return stat.isFile() && checkMode(stat, options) -} - -function checkMode (stat, options) { - var mod = stat.mode - var uid = stat.uid - var gid = stat.gid - - var myUid = options.uid !== undefined ? - options.uid : process.getuid && process.getuid() - var myGid = options.gid !== undefined ? - options.gid : process.getgid && process.getgid() - - var u = parseInt('100', 8) - var g = parseInt('010', 8) - var o = parseInt('001', 8) - var ug = u | g - - var ret = (mod & o) || - (mod & g) && gid === myGid || - (mod & u) && uid === myUid || - (mod & ug) && myUid === 0 - - return ret -} - - -/***/ }), - -/***/ 211: -/***/ (function(module) { - -module.exports = require("https"); - -/***/ }), - -/***/ 215: -/***/ (function(module) { - -module.exports = {"_from":"@octokit/rest@^16.43.1","_id":"@octokit/rest@16.43.1","_inBundle":false,"_integrity":"sha512-gfFKwRT/wFxq5qlNjnW2dh+qh74XgTQ2B179UX5K1HYCluioWj8Ndbgqw2PVqa1NnVJkGHp2ovMpVn/DImlmkw==","_location":"/@octokit/rest","_phantomChildren":{"@types/node":"14.0.5","deprecation":"2.3.1","once":"1.4.0","os-name":"3.1.0"},"_requested":{"type":"range","registry":true,"raw":"@octokit/rest@^16.43.1","name":"@octokit/rest","escapedName":"@octokit%2frest","scope":"@octokit","rawSpec":"^16.43.1","saveSpec":null,"fetchSpec":"^16.43.1"},"_requiredBy":["/@actions/github"],"_resolved":"https://registry.npmjs.org/@octokit/rest/-/rest-16.43.1.tgz","_shasum":"3b11e7d1b1ac2bbeeb23b08a17df0b20947eda6b","_spec":"@octokit/rest@^16.43.1","_where":"/Users/wookiee/Documents/sources/generator/.github/templates-list-validator/node_modules/@actions/github","author":{"name":"Gregor Martynus","url":"https://github.com/gr2m"},"bugs":{"url":"https://github.com/octokit/rest.js/issues"},"bundleDependencies":false,"bundlesize":[{"path":"./dist/octokit-rest.min.js.gz","maxSize":"33 kB"}],"contributors":[{"name":"Mike de Boer","email":"info@mikedeboer.nl"},{"name":"Fabian Jakobs","email":"fabian@c9.io"},{"name":"Joe Gallo","email":"joe@brassafrax.com"},{"name":"Gregor Martynus","url":"https://github.com/gr2m"}],"dependencies":{"@octokit/auth-token":"^2.4.0","@octokit/plugin-paginate-rest":"^1.1.1","@octokit/plugin-request-log":"^1.0.0","@octokit/plugin-rest-endpoint-methods":"2.4.0","@octokit/request":"^5.2.0","@octokit/request-error":"^1.0.2","atob-lite":"^2.0.0","before-after-hook":"^2.0.0","btoa-lite":"^1.0.0","deprecation":"^2.0.0","lodash.get":"^4.4.2","lodash.set":"^4.3.2","lodash.uniq":"^4.5.0","octokit-pagination-methods":"^1.1.0","once":"^1.4.0","universal-user-agent":"^4.0.0"},"deprecated":false,"description":"GitHub REST API client for Node.js","devDependencies":{"@gimenete/type-writer":"^0.1.3","@octokit/auth":"^1.1.1","@octokit/fixtures-server":"^5.0.6","@octokit/graphql":"^4.2.0","@types/node":"^13.1.0","bundlesize":"^0.18.0","chai":"^4.1.2","compression-webpack-plugin":"^3.1.0","cypress":"^3.0.0","glob":"^7.1.2","http-proxy-agent":"^4.0.0","lodash.camelcase":"^4.3.0","lodash.merge":"^4.6.1","lodash.upperfirst":"^4.3.1","lolex":"^5.1.2","mkdirp":"^1.0.0","mocha":"^7.0.1","mustache":"^4.0.0","nock":"^11.3.3","npm-run-all":"^4.1.2","nyc":"^15.0.0","prettier":"^1.14.2","proxy":"^1.0.0","semantic-release":"^17.0.0","sinon":"^8.0.0","sinon-chai":"^3.0.0","sort-keys":"^4.0.0","string-to-arraybuffer":"^1.0.0","string-to-jsdoc-comment":"^1.0.0","typescript":"^3.3.1","webpack":"^4.0.0","webpack-bundle-analyzer":"^3.0.0","webpack-cli":"^3.0.0"},"files":["index.js","index.d.ts","lib","plugins"],"homepage":"https://github.com/octokit/rest.js#readme","keywords":["octokit","github","rest","api-client"],"license":"MIT","name":"@octokit/rest","nyc":{"ignore":["test"]},"publishConfig":{"access":"public"},"release":{"publish":["@semantic-release/npm",{"path":"@semantic-release/github","assets":["dist/*","!dist/*.map.gz"]}]},"repository":{"type":"git","url":"git+https://github.com/octokit/rest.js.git"},"scripts":{"build":"npm-run-all build:*","build:browser":"npm-run-all build:browser:*","build:browser:development":"webpack --mode development --entry . --output-library=Octokit --output=./dist/octokit-rest.js --profile --json > dist/bundle-stats.json","build:browser:production":"webpack --mode production --entry . --plugin=compression-webpack-plugin --output-library=Octokit --output-path=./dist --output-filename=octokit-rest.min.js --devtool source-map","build:ts":"npm run -s update-endpoints:typescript","coverage":"nyc report --reporter=html && open coverage/index.html","generate-bundle-report":"webpack-bundle-analyzer dist/bundle-stats.json --mode=static --no-open --report dist/bundle-report.html","lint":"prettier --check '{lib,plugins,scripts,test}/**/*.{js,json,ts}' 'docs/*.{js,json}' 'docs/src/**/*' index.js README.md package.json","lint:fix":"prettier --write '{lib,plugins,scripts,test}/**/*.{js,json,ts}' 'docs/*.{js,json}' 'docs/src/**/*' index.js README.md package.json","postvalidate:ts":"tsc --noEmit --target es6 test/typescript-validate.ts","prebuild:browser":"mkdirp dist/","pretest":"npm run -s lint","prevalidate:ts":"npm run -s build:ts","start-fixtures-server":"octokit-fixtures-server","test":"nyc mocha test/mocha-node-setup.js \"test/*/**/*-test.js\"","test:browser":"cypress run --browser chrome","update-endpoints":"npm-run-all update-endpoints:*","update-endpoints:fetch-json":"node scripts/update-endpoints/fetch-json","update-endpoints:typescript":"node scripts/update-endpoints/typescript","validate:ts":"tsc --target es6 --noImplicitAny index.d.ts"},"types":"index.d.ts","version":"16.43.1"}; - -/***/ }), - -/***/ 260: -/***/ (function(module, __unusedexports, __webpack_require__) { - -// Note: since nyc uses this module to output coverage, any lines -// that are in the direct sync flow of nyc's outputCoverage are -// ignored, since we can never get coverage for them. -var assert = __webpack_require__(357) -var signals = __webpack_require__(654) -var isWin = /^win/i.test(process.platform) - -var EE = __webpack_require__(614) -/* istanbul ignore if */ -if (typeof EE !== 'function') { - EE = EE.EventEmitter -} - -var emitter -if (process.__signal_exit_emitter__) { - emitter = process.__signal_exit_emitter__ -} else { - emitter = process.__signal_exit_emitter__ = new EE() - emitter.count = 0 - emitter.emitted = {} -} - -// Because this emitter is a global, we have to check to see if a -// previous version of this library failed to enable infinite listeners. -// I know what you're about to say. But literally everything about -// signal-exit is a compromise with evil. Get used to it. -if (!emitter.infinite) { - emitter.setMaxListeners(Infinity) - emitter.infinite = true -} - -module.exports = function (cb, opts) { - assert.equal(typeof cb, 'function', 'a callback must be provided for exit handler') - - if (loaded === false) { - load() - } - - var ev = 'exit' - if (opts && opts.alwaysLast) { - ev = 'afterexit' - } - - var remove = function () { - emitter.removeListener(ev, cb) - if (emitter.listeners('exit').length === 0 && - emitter.listeners('afterexit').length === 0) { - unload() - } - } - emitter.on(ev, cb) - - return remove -} - -module.exports.unload = unload -function unload () { - if (!loaded) { - return - } - loaded = false - - signals.forEach(function (sig) { - try { - process.removeListener(sig, sigListeners[sig]) - } catch (er) {} - }) - process.emit = originalProcessEmit - process.reallyExit = originalProcessReallyExit - emitter.count -= 1 -} - -function emit (event, code, signal) { - if (emitter.emitted[event]) { - return - } - emitter.emitted[event] = true - emitter.emit(event, code, signal) -} - -// { : , ... } -var sigListeners = {} -signals.forEach(function (sig) { - sigListeners[sig] = function listener () { - // If there are no other listeners, an exit is coming! - // Simplest way: remove us and then re-send the signal. - // We know that this will kill the process, so we can - // safely emit now. - var listeners = process.listeners(sig) - if (listeners.length === emitter.count) { - unload() - emit('exit', null, sig) - /* istanbul ignore next */ - emit('afterexit', null, sig) - /* istanbul ignore next */ - if (isWin && sig === 'SIGHUP') { - // "SIGHUP" throws an `ENOSYS` error on Windows, - // so use a supported signal instead - sig = 'SIGINT' - } - process.kill(process.pid, sig) - } - } -}) - -module.exports.signals = function () { - return signals -} - -module.exports.load = load - -var loaded = false - -function load () { - if (loaded) { - return - } - loaded = true - - // This is the number of onSignalExit's that are in play. - // It's important so that we can count the correct number of - // listeners on signals, and don't wait for the other one to - // handle it instead of us. - emitter.count += 1 - - signals = signals.filter(function (sig) { - try { - process.on(sig, sigListeners[sig]) - return true - } catch (er) { - return false - } - }) - - process.emit = processEmit - process.reallyExit = processReallyExit -} - -var originalProcessReallyExit = process.reallyExit -function processReallyExit (code) { - process.exitCode = code || 0 - emit('exit', process.exitCode, null) - /* istanbul ignore next */ - emit('afterexit', process.exitCode, null) - /* istanbul ignore next */ - originalProcessReallyExit.call(process, process.exitCode) -} - -var originalProcessEmit = process.emit -function processEmit (ev, arg) { - if (ev === 'exit') { - if (arg !== undefined) { - process.exitCode = arg - } - var ret = originalProcessEmit.apply(this, arguments) - emit('exit', process.exitCode, null) - /* istanbul ignore next */ - emit('afterexit', process.exitCode, null) - return ret - } else { - return originalProcessEmit.apply(this, arguments) - } -} - - -/***/ }), - -/***/ 262: -/***/ (function(__unusedmodule, exports, __webpack_require__) { - -"use strict"; - -Object.defineProperty(exports, "__esModule", { value: true }); -const fs_1 = __webpack_require__(747); -const os_1 = __webpack_require__(87); -class Context { - /** - * Hydrate the context from the environment - */ - constructor() { - this.payload = {}; - if (process.env.GITHUB_EVENT_PATH) { - if (fs_1.existsSync(process.env.GITHUB_EVENT_PATH)) { - this.payload = JSON.parse(fs_1.readFileSync(process.env.GITHUB_EVENT_PATH, { encoding: 'utf8' })); - } - else { - const path = process.env.GITHUB_EVENT_PATH; - process.stdout.write(`GITHUB_EVENT_PATH ${path} does not exist${os_1.EOL}`); - } - } - this.eventName = process.env.GITHUB_EVENT_NAME; - this.sha = process.env.GITHUB_SHA; - this.ref = process.env.GITHUB_REF; - this.workflow = process.env.GITHUB_WORKFLOW; - this.action = process.env.GITHUB_ACTION; - this.actor = process.env.GITHUB_ACTOR; - } - get issue() { - const payload = this.payload; - return Object.assign(Object.assign({}, this.repo), { number: (payload.issue || payload.pull_request || payload).number }); - } - get repo() { - if (process.env.GITHUB_REPOSITORY) { - const [owner, repo] = process.env.GITHUB_REPOSITORY.split('/'); - return { owner, repo }; - } - if (this.payload.repository) { - return { - owner: this.payload.repository.owner.login, - repo: this.payload.repository.name - }; - } - throw new Error("context.repo requires a GITHUB_REPOSITORY environment variable like 'owner/repo'"); - } -} -exports.Context = Context; -//# sourceMappingURL=context.js.map - -/***/ }), - -/***/ 265: -/***/ (function(module, __unusedexports, __webpack_require__) { - -module.exports = getPage - -const deprecate = __webpack_require__(370) -const getPageLinks = __webpack_require__(577) -const HttpError = __webpack_require__(297) - -function getPage (octokit, link, which, headers) { - deprecate(`octokit.get${which.charAt(0).toUpperCase() + which.slice(1)}Page() – You can use octokit.paginate or async iterators instead: https://github.com/octokit/rest.js#pagination.`) - const url = getPageLinks(link)[which] - - if (!url) { - const urlError = new HttpError(`No ${which} page found`, 404) - return Promise.reject(urlError) - } - - const requestOptions = { - url, - headers: applyAcceptHeader(link, headers) - } - - const promise = octokit.request(requestOptions) - - return promise -} - -function applyAcceptHeader (res, headers) { - const previous = res.headers && res.headers['x-github-media-type'] - - if (!previous || (headers && headers.accept)) { - return headers - } - headers = headers || {} - headers.accept = 'application/vnd.' + previous - .replace('; param=', '.') - .replace('; format=', '+') - - return headers -} - - -/***/ }), - -/***/ 280: -/***/ (function(module, exports) { - -exports = module.exports = SemVer - -var debug -/* istanbul ignore next */ -if (typeof process === 'object' && - process.env && - process.env.NODE_DEBUG && - /\bsemver\b/i.test(process.env.NODE_DEBUG)) { - debug = function () { - var args = Array.prototype.slice.call(arguments, 0) - args.unshift('SEMVER') - console.log.apply(console, args) - } -} else { - debug = function () {} -} - -// Note: this is the semver.org version of the spec that it implements -// Not necessarily the package version of this code. -exports.SEMVER_SPEC_VERSION = '2.0.0' - -var MAX_LENGTH = 256 -var MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER || - /* istanbul ignore next */ 9007199254740991 - -// Max safe segment length for coercion. -var MAX_SAFE_COMPONENT_LENGTH = 16 - -// The actual regexps go on exports.re -var re = exports.re = [] -var src = exports.src = [] -var R = 0 - -// The following Regular Expressions can be used for tokenizing, -// validating, and parsing SemVer version strings. - -// ## Numeric Identifier -// A single `0`, or a non-zero digit followed by zero or more digits. - -var NUMERICIDENTIFIER = R++ -src[NUMERICIDENTIFIER] = '0|[1-9]\\d*' -var NUMERICIDENTIFIERLOOSE = R++ -src[NUMERICIDENTIFIERLOOSE] = '[0-9]+' - -// ## Non-numeric Identifier -// Zero or more digits, followed by a letter or hyphen, and then zero or -// more letters, digits, or hyphens. - -var NONNUMERICIDENTIFIER = R++ -src[NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-][a-zA-Z0-9-]*' - -// ## Main Version -// Three dot-separated numeric identifiers. - -var MAINVERSION = R++ -src[MAINVERSION] = '(' + src[NUMERICIDENTIFIER] + ')\\.' + - '(' + src[NUMERICIDENTIFIER] + ')\\.' + - '(' + src[NUMERICIDENTIFIER] + ')' - -var MAINVERSIONLOOSE = R++ -src[MAINVERSIONLOOSE] = '(' + src[NUMERICIDENTIFIERLOOSE] + ')\\.' + - '(' + src[NUMERICIDENTIFIERLOOSE] + ')\\.' + - '(' + src[NUMERICIDENTIFIERLOOSE] + ')' - -// ## Pre-release Version Identifier -// A numeric identifier, or a non-numeric identifier. - -var PRERELEASEIDENTIFIER = R++ -src[PRERELEASEIDENTIFIER] = '(?:' + src[NUMERICIDENTIFIER] + - '|' + src[NONNUMERICIDENTIFIER] + ')' - -var PRERELEASEIDENTIFIERLOOSE = R++ -src[PRERELEASEIDENTIFIERLOOSE] = '(?:' + src[NUMERICIDENTIFIERLOOSE] + - '|' + src[NONNUMERICIDENTIFIER] + ')' - -// ## Pre-release Version -// Hyphen, followed by one or more dot-separated pre-release version -// identifiers. - -var PRERELEASE = R++ -src[PRERELEASE] = '(?:-(' + src[PRERELEASEIDENTIFIER] + - '(?:\\.' + src[PRERELEASEIDENTIFIER] + ')*))' - -var PRERELEASELOOSE = R++ -src[PRERELEASELOOSE] = '(?:-?(' + src[PRERELEASEIDENTIFIERLOOSE] + - '(?:\\.' + src[PRERELEASEIDENTIFIERLOOSE] + ')*))' - -// ## Build Metadata Identifier -// Any combination of digits, letters, or hyphens. - -var BUILDIDENTIFIER = R++ -src[BUILDIDENTIFIER] = '[0-9A-Za-z-]+' - -// ## Build Metadata -// Plus sign, followed by one or more period-separated build metadata -// identifiers. - -var BUILD = R++ -src[BUILD] = '(?:\\+(' + src[BUILDIDENTIFIER] + - '(?:\\.' + src[BUILDIDENTIFIER] + ')*))' - -// ## Full Version String -// A main version, followed optionally by a pre-release version and -// build metadata. - -// Note that the only major, minor, patch, and pre-release sections of -// the version string are capturing groups. The build metadata is not a -// capturing group, because it should not ever be used in version -// comparison. - -var FULL = R++ -var FULLPLAIN = 'v?' + src[MAINVERSION] + - src[PRERELEASE] + '?' + - src[BUILD] + '?' - -src[FULL] = '^' + FULLPLAIN + '$' - -// like full, but allows v1.2.3 and =1.2.3, which people do sometimes. -// also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty -// common in the npm registry. -var LOOSEPLAIN = '[v=\\s]*' + src[MAINVERSIONLOOSE] + - src[PRERELEASELOOSE] + '?' + - src[BUILD] + '?' - -var LOOSE = R++ -src[LOOSE] = '^' + LOOSEPLAIN + '$' - -var GTLT = R++ -src[GTLT] = '((?:<|>)?=?)' - -// Something like "2.*" or "1.2.x". -// Note that "x.x" is a valid xRange identifer, meaning "any version" -// Only the first item is strictly required. -var XRANGEIDENTIFIERLOOSE = R++ -src[XRANGEIDENTIFIERLOOSE] = src[NUMERICIDENTIFIERLOOSE] + '|x|X|\\*' -var XRANGEIDENTIFIER = R++ -src[XRANGEIDENTIFIER] = src[NUMERICIDENTIFIER] + '|x|X|\\*' - -var XRANGEPLAIN = R++ -src[XRANGEPLAIN] = '[v=\\s]*(' + src[XRANGEIDENTIFIER] + ')' + - '(?:\\.(' + src[XRANGEIDENTIFIER] + ')' + - '(?:\\.(' + src[XRANGEIDENTIFIER] + ')' + - '(?:' + src[PRERELEASE] + ')?' + - src[BUILD] + '?' + - ')?)?' - -var XRANGEPLAINLOOSE = R++ -src[XRANGEPLAINLOOSE] = '[v=\\s]*(' + src[XRANGEIDENTIFIERLOOSE] + ')' + - '(?:\\.(' + src[XRANGEIDENTIFIERLOOSE] + ')' + - '(?:\\.(' + src[XRANGEIDENTIFIERLOOSE] + ')' + - '(?:' + src[PRERELEASELOOSE] + ')?' + - src[BUILD] + '?' + - ')?)?' - -var XRANGE = R++ -src[XRANGE] = '^' + src[GTLT] + '\\s*' + src[XRANGEPLAIN] + '$' -var XRANGELOOSE = R++ -src[XRANGELOOSE] = '^' + src[GTLT] + '\\s*' + src[XRANGEPLAINLOOSE] + '$' - -// Coercion. -// Extract anything that could conceivably be a part of a valid semver -var COERCE = R++ -src[COERCE] = '(?:^|[^\\d])' + - '(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '})' + - '(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' + - '(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' + - '(?:$|[^\\d])' - -// Tilde ranges. -// Meaning is "reasonably at or greater than" -var LONETILDE = R++ -src[LONETILDE] = '(?:~>?)' - -var TILDETRIM = R++ -src[TILDETRIM] = '(\\s*)' + src[LONETILDE] + '\\s+' -re[TILDETRIM] = new RegExp(src[TILDETRIM], 'g') -var tildeTrimReplace = '$1~' - -var TILDE = R++ -src[TILDE] = '^' + src[LONETILDE] + src[XRANGEPLAIN] + '$' -var TILDELOOSE = R++ -src[TILDELOOSE] = '^' + src[LONETILDE] + src[XRANGEPLAINLOOSE] + '$' - -// Caret ranges. -// Meaning is "at least and backwards compatible with" -var LONECARET = R++ -src[LONECARET] = '(?:\\^)' - -var CARETTRIM = R++ -src[CARETTRIM] = '(\\s*)' + src[LONECARET] + '\\s+' -re[CARETTRIM] = new RegExp(src[CARETTRIM], 'g') -var caretTrimReplace = '$1^' - -var CARET = R++ -src[CARET] = '^' + src[LONECARET] + src[XRANGEPLAIN] + '$' -var CARETLOOSE = R++ -src[CARETLOOSE] = '^' + src[LONECARET] + src[XRANGEPLAINLOOSE] + '$' - -// A simple gt/lt/eq thing, or just "" to indicate "any version" -var COMPARATORLOOSE = R++ -src[COMPARATORLOOSE] = '^' + src[GTLT] + '\\s*(' + LOOSEPLAIN + ')$|^$' -var COMPARATOR = R++ -src[COMPARATOR] = '^' + src[GTLT] + '\\s*(' + FULLPLAIN + ')$|^$' - -// An expression to strip any whitespace between the gtlt and the thing -// it modifies, so that `> 1.2.3` ==> `>1.2.3` -var COMPARATORTRIM = R++ -src[COMPARATORTRIM] = '(\\s*)' + src[GTLT] + - '\\s*(' + LOOSEPLAIN + '|' + src[XRANGEPLAIN] + ')' - -// this one has to use the /g flag -re[COMPARATORTRIM] = new RegExp(src[COMPARATORTRIM], 'g') -var comparatorTrimReplace = '$1$2$3' - -// Something like `1.2.3 - 1.2.4` -// Note that these all use the loose form, because they'll be -// checked against either the strict or loose comparator form -// later. -var HYPHENRANGE = R++ -src[HYPHENRANGE] = '^\\s*(' + src[XRANGEPLAIN] + ')' + - '\\s+-\\s+' + - '(' + src[XRANGEPLAIN] + ')' + - '\\s*$' - -var HYPHENRANGELOOSE = R++ -src[HYPHENRANGELOOSE] = '^\\s*(' + src[XRANGEPLAINLOOSE] + ')' + - '\\s+-\\s+' + - '(' + src[XRANGEPLAINLOOSE] + ')' + - '\\s*$' - -// Star ranges basically just allow anything at all. -var STAR = R++ -src[STAR] = '(<|>)?=?\\s*\\*' - -// Compile to actual regexp objects. -// All are flag-free, unless they were created above with a flag. -for (var i = 0; i < R; i++) { - debug(i, src[i]) - if (!re[i]) { - re[i] = new RegExp(src[i]) - } -} - -exports.parse = parse -function parse (version, options) { - if (!options || typeof options !== 'object') { - options = { - loose: !!options, - includePrerelease: false - } - } - - if (version instanceof SemVer) { - return version - } - - if (typeof version !== 'string') { - return null - } - - if (version.length > MAX_LENGTH) { - return null - } - - var r = options.loose ? re[LOOSE] : re[FULL] - if (!r.test(version)) { - return null - } - - try { - return new SemVer(version, options) - } catch (er) { - return null - } -} - -exports.valid = valid -function valid (version, options) { - var v = parse(version, options) - return v ? v.version : null -} - -exports.clean = clean -function clean (version, options) { - var s = parse(version.trim().replace(/^[=v]+/, ''), options) - return s ? s.version : null -} - -exports.SemVer = SemVer - -function SemVer (version, options) { - if (!options || typeof options !== 'object') { - options = { - loose: !!options, - includePrerelease: false - } - } - if (version instanceof SemVer) { - if (version.loose === options.loose) { - return version - } else { - version = version.version - } - } else if (typeof version !== 'string') { - throw new TypeError('Invalid Version: ' + version) - } - - if (version.length > MAX_LENGTH) { - throw new TypeError('version is longer than ' + MAX_LENGTH + ' characters') - } - - if (!(this instanceof SemVer)) { - return new SemVer(version, options) - } - - debug('SemVer', version, options) - this.options = options - this.loose = !!options.loose - - var m = version.trim().match(options.loose ? re[LOOSE] : re[FULL]) - - if (!m) { - throw new TypeError('Invalid Version: ' + version) - } - - this.raw = version - - // these are actually numbers - this.major = +m[1] - this.minor = +m[2] - this.patch = +m[3] - - if (this.major > MAX_SAFE_INTEGER || this.major < 0) { - throw new TypeError('Invalid major version') - } - - if (this.minor > MAX_SAFE_INTEGER || this.minor < 0) { - throw new TypeError('Invalid minor version') - } - - if (this.patch > MAX_SAFE_INTEGER || this.patch < 0) { - throw new TypeError('Invalid patch version') - } - - // numberify any prerelease numeric ids - if (!m[4]) { - this.prerelease = [] - } else { - this.prerelease = m[4].split('.').map(function (id) { - if (/^[0-9]+$/.test(id)) { - var num = +id - if (num >= 0 && num < MAX_SAFE_INTEGER) { - return num - } - } - return id - }) - } - - this.build = m[5] ? m[5].split('.') : [] - this.format() -} - -SemVer.prototype.format = function () { - this.version = this.major + '.' + this.minor + '.' + this.patch - if (this.prerelease.length) { - this.version += '-' + this.prerelease.join('.') - } - return this.version -} - -SemVer.prototype.toString = function () { - return this.version -} - -SemVer.prototype.compare = function (other) { - debug('SemVer.compare', this.version, this.options, other) - if (!(other instanceof SemVer)) { - other = new SemVer(other, this.options) - } - - return this.compareMain(other) || this.comparePre(other) -} - -SemVer.prototype.compareMain = function (other) { - if (!(other instanceof SemVer)) { - other = new SemVer(other, this.options) - } - - return compareIdentifiers(this.major, other.major) || - compareIdentifiers(this.minor, other.minor) || - compareIdentifiers(this.patch, other.patch) -} - -SemVer.prototype.comparePre = function (other) { - if (!(other instanceof SemVer)) { - other = new SemVer(other, this.options) - } - - // NOT having a prerelease is > having one - if (this.prerelease.length && !other.prerelease.length) { - return -1 - } else if (!this.prerelease.length && other.prerelease.length) { - return 1 - } else if (!this.prerelease.length && !other.prerelease.length) { - return 0 - } - - var i = 0 - do { - var a = this.prerelease[i] - var b = other.prerelease[i] - debug('prerelease compare', i, a, b) - if (a === undefined && b === undefined) { - return 0 - } else if (b === undefined) { - return 1 - } else if (a === undefined) { - return -1 - } else if (a === b) { - continue - } else { - return compareIdentifiers(a, b) - } - } while (++i) -} - -// preminor will bump the version up to the next minor release, and immediately -// down to pre-release. premajor and prepatch work the same way. -SemVer.prototype.inc = function (release, identifier) { - switch (release) { - case 'premajor': - this.prerelease.length = 0 - this.patch = 0 - this.minor = 0 - this.major++ - this.inc('pre', identifier) - break - case 'preminor': - this.prerelease.length = 0 - this.patch = 0 - this.minor++ - this.inc('pre', identifier) - break - case 'prepatch': - // If this is already a prerelease, it will bump to the next version - // drop any prereleases that might already exist, since they are not - // relevant at this point. - this.prerelease.length = 0 - this.inc('patch', identifier) - this.inc('pre', identifier) - break - // If the input is a non-prerelease version, this acts the same as - // prepatch. - case 'prerelease': - if (this.prerelease.length === 0) { - this.inc('patch', identifier) - } - this.inc('pre', identifier) - break - - case 'major': - // If this is a pre-major version, bump up to the same major version. - // Otherwise increment major. - // 1.0.0-5 bumps to 1.0.0 - // 1.1.0 bumps to 2.0.0 - if (this.minor !== 0 || - this.patch !== 0 || - this.prerelease.length === 0) { - this.major++ - } - this.minor = 0 - this.patch = 0 - this.prerelease = [] - break - case 'minor': - // If this is a pre-minor version, bump up to the same minor version. - // Otherwise increment minor. - // 1.2.0-5 bumps to 1.2.0 - // 1.2.1 bumps to 1.3.0 - if (this.patch !== 0 || this.prerelease.length === 0) { - this.minor++ - } - this.patch = 0 - this.prerelease = [] - break - case 'patch': - // If this is not a pre-release version, it will increment the patch. - // If it is a pre-release it will bump up to the same patch version. - // 1.2.0-5 patches to 1.2.0 - // 1.2.0 patches to 1.2.1 - if (this.prerelease.length === 0) { - this.patch++ - } - this.prerelease = [] - break - // This probably shouldn't be used publicly. - // 1.0.0 "pre" would become 1.0.0-0 which is the wrong direction. - case 'pre': - if (this.prerelease.length === 0) { - this.prerelease = [0] - } else { - var i = this.prerelease.length - while (--i >= 0) { - if (typeof this.prerelease[i] === 'number') { - this.prerelease[i]++ - i = -2 - } - } - if (i === -1) { - // didn't increment anything - this.prerelease.push(0) - } - } - if (identifier) { - // 1.2.0-beta.1 bumps to 1.2.0-beta.2, - // 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0 - if (this.prerelease[0] === identifier) { - if (isNaN(this.prerelease[1])) { - this.prerelease = [identifier, 0] - } - } else { - this.prerelease = [identifier, 0] - } - } - break - - default: - throw new Error('invalid increment argument: ' + release) - } - this.format() - this.raw = this.version - return this -} - -exports.inc = inc -function inc (version, release, loose, identifier) { - if (typeof (loose) === 'string') { - identifier = loose - loose = undefined - } - - try { - return new SemVer(version, loose).inc(release, identifier).version - } catch (er) { - return null - } -} - -exports.diff = diff -function diff (version1, version2) { - if (eq(version1, version2)) { - return null - } else { - var v1 = parse(version1) - var v2 = parse(version2) - var prefix = '' - if (v1.prerelease.length || v2.prerelease.length) { - prefix = 'pre' - var defaultResult = 'prerelease' - } - for (var key in v1) { - if (key === 'major' || key === 'minor' || key === 'patch') { - if (v1[key] !== v2[key]) { - return prefix + key - } - } - } - return defaultResult // may be undefined - } -} - -exports.compareIdentifiers = compareIdentifiers - -var numeric = /^[0-9]+$/ -function compareIdentifiers (a, b) { - var anum = numeric.test(a) - var bnum = numeric.test(b) - - if (anum && bnum) { - a = +a - b = +b - } - - return a === b ? 0 - : (anum && !bnum) ? -1 - : (bnum && !anum) ? 1 - : a < b ? -1 - : 1 -} - -exports.rcompareIdentifiers = rcompareIdentifiers -function rcompareIdentifiers (a, b) { - return compareIdentifiers(b, a) -} - -exports.major = major -function major (a, loose) { - return new SemVer(a, loose).major -} - -exports.minor = minor -function minor (a, loose) { - return new SemVer(a, loose).minor -} - -exports.patch = patch -function patch (a, loose) { - return new SemVer(a, loose).patch -} - -exports.compare = compare -function compare (a, b, loose) { - return new SemVer(a, loose).compare(new SemVer(b, loose)) -} - -exports.compareLoose = compareLoose -function compareLoose (a, b) { - return compare(a, b, true) -} - -exports.rcompare = rcompare -function rcompare (a, b, loose) { - return compare(b, a, loose) -} - -exports.sort = sort -function sort (list, loose) { - return list.sort(function (a, b) { - return exports.compare(a, b, loose) - }) -} - -exports.rsort = rsort -function rsort (list, loose) { - return list.sort(function (a, b) { - return exports.rcompare(a, b, loose) - }) -} - -exports.gt = gt -function gt (a, b, loose) { - return compare(a, b, loose) > 0 -} - -exports.lt = lt -function lt (a, b, loose) { - return compare(a, b, loose) < 0 -} - -exports.eq = eq -function eq (a, b, loose) { - return compare(a, b, loose) === 0 -} - -exports.neq = neq -function neq (a, b, loose) { - return compare(a, b, loose) !== 0 -} - -exports.gte = gte -function gte (a, b, loose) { - return compare(a, b, loose) >= 0 -} - -exports.lte = lte -function lte (a, b, loose) { - return compare(a, b, loose) <= 0 -} - -exports.cmp = cmp -function cmp (a, op, b, loose) { - switch (op) { - case '===': - if (typeof a === 'object') - a = a.version - if (typeof b === 'object') - b = b.version - return a === b - - case '!==': - if (typeof a === 'object') - a = a.version - if (typeof b === 'object') - b = b.version - return a !== b - - case '': - case '=': - case '==': - return eq(a, b, loose) - - case '!=': - return neq(a, b, loose) - - case '>': - return gt(a, b, loose) - - case '>=': - return gte(a, b, loose) - - case '<': - return lt(a, b, loose) - - case '<=': - return lte(a, b, loose) - - default: - throw new TypeError('Invalid operator: ' + op) - } -} - -exports.Comparator = Comparator -function Comparator (comp, options) { - if (!options || typeof options !== 'object') { - options = { - loose: !!options, - includePrerelease: false - } - } - - if (comp instanceof Comparator) { - if (comp.loose === !!options.loose) { - return comp - } else { - comp = comp.value - } - } - - if (!(this instanceof Comparator)) { - return new Comparator(comp, options) - } - - debug('comparator', comp, options) - this.options = options - this.loose = !!options.loose - this.parse(comp) - - if (this.semver === ANY) { - this.value = '' - } else { - this.value = this.operator + this.semver.version - } - - debug('comp', this) -} - -var ANY = {} -Comparator.prototype.parse = function (comp) { - var r = this.options.loose ? re[COMPARATORLOOSE] : re[COMPARATOR] - var m = comp.match(r) - - if (!m) { - throw new TypeError('Invalid comparator: ' + comp) - } - - this.operator = m[1] - if (this.operator === '=') { - this.operator = '' - } - - // if it literally is just '>' or '' then allow anything. - if (!m[2]) { - this.semver = ANY - } else { - this.semver = new SemVer(m[2], this.options.loose) - } -} - -Comparator.prototype.toString = function () { - return this.value -} - -Comparator.prototype.test = function (version) { - debug('Comparator.test', version, this.options.loose) - - if (this.semver === ANY) { - return true - } - - if (typeof version === 'string') { - version = new SemVer(version, this.options) - } - - return cmp(version, this.operator, this.semver, this.options) -} - -Comparator.prototype.intersects = function (comp, options) { - if (!(comp instanceof Comparator)) { - throw new TypeError('a Comparator is required') - } - - if (!options || typeof options !== 'object') { - options = { - loose: !!options, - includePrerelease: false - } - } - - var rangeTmp - - if (this.operator === '') { - rangeTmp = new Range(comp.value, options) - return satisfies(this.value, rangeTmp, options) - } else if (comp.operator === '') { - rangeTmp = new Range(this.value, options) - return satisfies(comp.semver, rangeTmp, options) - } - - var sameDirectionIncreasing = - (this.operator === '>=' || this.operator === '>') && - (comp.operator === '>=' || comp.operator === '>') - var sameDirectionDecreasing = - (this.operator === '<=' || this.operator === '<') && - (comp.operator === '<=' || comp.operator === '<') - var sameSemVer = this.semver.version === comp.semver.version - var differentDirectionsInclusive = - (this.operator === '>=' || this.operator === '<=') && - (comp.operator === '>=' || comp.operator === '<=') - var oppositeDirectionsLessThan = - cmp(this.semver, '<', comp.semver, options) && - ((this.operator === '>=' || this.operator === '>') && - (comp.operator === '<=' || comp.operator === '<')) - var oppositeDirectionsGreaterThan = - cmp(this.semver, '>', comp.semver, options) && - ((this.operator === '<=' || this.operator === '<') && - (comp.operator === '>=' || comp.operator === '>')) - - return sameDirectionIncreasing || sameDirectionDecreasing || - (sameSemVer && differentDirectionsInclusive) || - oppositeDirectionsLessThan || oppositeDirectionsGreaterThan -} - -exports.Range = Range -function Range (range, options) { - if (!options || typeof options !== 'object') { - options = { - loose: !!options, - includePrerelease: false - } - } - - if (range instanceof Range) { - if (range.loose === !!options.loose && - range.includePrerelease === !!options.includePrerelease) { - return range - } else { - return new Range(range.raw, options) - } - } - - if (range instanceof Comparator) { - return new Range(range.value, options) - } - - if (!(this instanceof Range)) { - return new Range(range, options) - } - - this.options = options - this.loose = !!options.loose - this.includePrerelease = !!options.includePrerelease - - // First, split based on boolean or || - this.raw = range - this.set = range.split(/\s*\|\|\s*/).map(function (range) { - return this.parseRange(range.trim()) - }, this).filter(function (c) { - // throw out any that are not relevant for whatever reason - return c.length - }) - - if (!this.set.length) { - throw new TypeError('Invalid SemVer Range: ' + range) - } - - this.format() -} - -Range.prototype.format = function () { - this.range = this.set.map(function (comps) { - return comps.join(' ').trim() - }).join('||').trim() - return this.range -} - -Range.prototype.toString = function () { - return this.range -} - -Range.prototype.parseRange = function (range) { - var loose = this.options.loose - range = range.trim() - // `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4` - var hr = loose ? re[HYPHENRANGELOOSE] : re[HYPHENRANGE] - range = range.replace(hr, hyphenReplace) - debug('hyphen replace', range) - // `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5` - range = range.replace(re[COMPARATORTRIM], comparatorTrimReplace) - debug('comparator trim', range, re[COMPARATORTRIM]) - - // `~ 1.2.3` => `~1.2.3` - range = range.replace(re[TILDETRIM], tildeTrimReplace) - - // `^ 1.2.3` => `^1.2.3` - range = range.replace(re[CARETTRIM], caretTrimReplace) - - // normalize spaces - range = range.split(/\s+/).join(' ') - - // At this point, the range is completely trimmed and - // ready to be split into comparators. - - var compRe = loose ? re[COMPARATORLOOSE] : re[COMPARATOR] - var set = range.split(' ').map(function (comp) { - return parseComparator(comp, this.options) - }, this).join(' ').split(/\s+/) - if (this.options.loose) { - // in loose mode, throw out any that are not valid comparators - set = set.filter(function (comp) { - return !!comp.match(compRe) - }) - } - set = set.map(function (comp) { - return new Comparator(comp, this.options) - }, this) - - return set -} - -Range.prototype.intersects = function (range, options) { - if (!(range instanceof Range)) { - throw new TypeError('a Range is required') - } - - return this.set.some(function (thisComparators) { - return thisComparators.every(function (thisComparator) { - return range.set.some(function (rangeComparators) { - return rangeComparators.every(function (rangeComparator) { - return thisComparator.intersects(rangeComparator, options) - }) - }) - }) - }) -} - -// Mostly just for testing and legacy API reasons -exports.toComparators = toComparators -function toComparators (range, options) { - return new Range(range, options).set.map(function (comp) { - return comp.map(function (c) { - return c.value - }).join(' ').trim().split(' ') - }) -} - -// comprised of xranges, tildes, stars, and gtlt's at this point. -// already replaced the hyphen ranges -// turn into a set of JUST comparators. -function parseComparator (comp, options) { - debug('comp', comp, options) - comp = replaceCarets(comp, options) - debug('caret', comp) - comp = replaceTildes(comp, options) - debug('tildes', comp) - comp = replaceXRanges(comp, options) - debug('xrange', comp) - comp = replaceStars(comp, options) - debug('stars', comp) - return comp -} - -function isX (id) { - return !id || id.toLowerCase() === 'x' || id === '*' -} - -// ~, ~> --> * (any, kinda silly) -// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0 -// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0 -// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0 -// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0 -// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0 -function replaceTildes (comp, options) { - return comp.trim().split(/\s+/).map(function (comp) { - return replaceTilde(comp, options) - }).join(' ') -} - -function replaceTilde (comp, options) { - var r = options.loose ? re[TILDELOOSE] : re[TILDE] - return comp.replace(r, function (_, M, m, p, pr) { - debug('tilde', comp, _, M, m, p, pr) - var ret - - if (isX(M)) { - ret = '' - } else if (isX(m)) { - ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0' - } else if (isX(p)) { - // ~1.2 == >=1.2.0 <1.3.0 - ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0' - } else if (pr) { - debug('replaceTilde pr', pr) - ret = '>=' + M + '.' + m + '.' + p + '-' + pr + - ' <' + M + '.' + (+m + 1) + '.0' - } else { - // ~1.2.3 == >=1.2.3 <1.3.0 - ret = '>=' + M + '.' + m + '.' + p + - ' <' + M + '.' + (+m + 1) + '.0' - } - - debug('tilde return', ret) - return ret - }) -} - -// ^ --> * (any, kinda silly) -// ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0 -// ^2.0, ^2.0.x --> >=2.0.0 <3.0.0 -// ^1.2, ^1.2.x --> >=1.2.0 <2.0.0 -// ^1.2.3 --> >=1.2.3 <2.0.0 -// ^1.2.0 --> >=1.2.0 <2.0.0 -function replaceCarets (comp, options) { - return comp.trim().split(/\s+/).map(function (comp) { - return replaceCaret(comp, options) - }).join(' ') -} - -function replaceCaret (comp, options) { - debug('caret', comp, options) - var r = options.loose ? re[CARETLOOSE] : re[CARET] - return comp.replace(r, function (_, M, m, p, pr) { - debug('caret', comp, _, M, m, p, pr) - var ret - - if (isX(M)) { - ret = '' - } else if (isX(m)) { - ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0' - } else if (isX(p)) { - if (M === '0') { - ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0' - } else { - ret = '>=' + M + '.' + m + '.0 <' + (+M + 1) + '.0.0' - } - } else if (pr) { - debug('replaceCaret pr', pr) - if (M === '0') { - if (m === '0') { - ret = '>=' + M + '.' + m + '.' + p + '-' + pr + - ' <' + M + '.' + m + '.' + (+p + 1) - } else { - ret = '>=' + M + '.' + m + '.' + p + '-' + pr + - ' <' + M + '.' + (+m + 1) + '.0' - } - } else { - ret = '>=' + M + '.' + m + '.' + p + '-' + pr + - ' <' + (+M + 1) + '.0.0' - } - } else { - debug('no pr') - if (M === '0') { - if (m === '0') { - ret = '>=' + M + '.' + m + '.' + p + - ' <' + M + '.' + m + '.' + (+p + 1) - } else { - ret = '>=' + M + '.' + m + '.' + p + - ' <' + M + '.' + (+m + 1) + '.0' - } - } else { - ret = '>=' + M + '.' + m + '.' + p + - ' <' + (+M + 1) + '.0.0' - } - } - - debug('caret return', ret) - return ret - }) -} - -function replaceXRanges (comp, options) { - debug('replaceXRanges', comp, options) - return comp.split(/\s+/).map(function (comp) { - return replaceXRange(comp, options) - }).join(' ') -} - -function replaceXRange (comp, options) { - comp = comp.trim() - var r = options.loose ? re[XRANGELOOSE] : re[XRANGE] - return comp.replace(r, function (ret, gtlt, M, m, p, pr) { - debug('xRange', comp, ret, gtlt, M, m, p, pr) - var xM = isX(M) - var xm = xM || isX(m) - var xp = xm || isX(p) - var anyX = xp - - if (gtlt === '=' && anyX) { - gtlt = '' - } - - if (xM) { - if (gtlt === '>' || gtlt === '<') { - // nothing is allowed - ret = '<0.0.0' - } else { - // nothing is forbidden - ret = '*' - } - } else if (gtlt && anyX) { - // we know patch is an x, because we have any x at all. - // replace X with 0 - if (xm) { - m = 0 - } - p = 0 - - if (gtlt === '>') { - // >1 => >=2.0.0 - // >1.2 => >=1.3.0 - // >1.2.3 => >= 1.2.4 - gtlt = '>=' - if (xm) { - M = +M + 1 - m = 0 - p = 0 - } else { - m = +m + 1 - p = 0 - } - } else if (gtlt === '<=') { - // <=0.7.x is actually <0.8.0, since any 0.7.x should - // pass. Similarly, <=7.x is actually <8.0.0, etc. - gtlt = '<' - if (xm) { - M = +M + 1 - } else { - m = +m + 1 - } - } - - ret = gtlt + M + '.' + m + '.' + p - } else if (xm) { - ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0' - } else if (xp) { - ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0' - } - - debug('xRange return', ret) - - return ret - }) -} - -// Because * is AND-ed with everything else in the comparator, -// and '' means "any version", just remove the *s entirely. -function replaceStars (comp, options) { - debug('replaceStars', comp, options) - // Looseness is ignored here. star is always as loose as it gets! - return comp.trim().replace(re[STAR], '') -} - -// This function is passed to string.replace(re[HYPHENRANGE]) -// M, m, patch, prerelease, build -// 1.2 - 3.4.5 => >=1.2.0 <=3.4.5 -// 1.2.3 - 3.4 => >=1.2.0 <3.5.0 Any 3.4.x will do -// 1.2 - 3.4 => >=1.2.0 <3.5.0 -function hyphenReplace ($0, - from, fM, fm, fp, fpr, fb, - to, tM, tm, tp, tpr, tb) { - if (isX(fM)) { - from = '' - } else if (isX(fm)) { - from = '>=' + fM + '.0.0' - } else if (isX(fp)) { - from = '>=' + fM + '.' + fm + '.0' - } else { - from = '>=' + from - } - - if (isX(tM)) { - to = '' - } else if (isX(tm)) { - to = '<' + (+tM + 1) + '.0.0' - } else if (isX(tp)) { - to = '<' + tM + '.' + (+tm + 1) + '.0' - } else if (tpr) { - to = '<=' + tM + '.' + tm + '.' + tp + '-' + tpr - } else { - to = '<=' + to - } - - return (from + ' ' + to).trim() -} - -// if ANY of the sets match ALL of its comparators, then pass -Range.prototype.test = function (version) { - if (!version) { - return false - } - - if (typeof version === 'string') { - version = new SemVer(version, this.options) - } - - for (var i = 0; i < this.set.length; i++) { - if (testSet(this.set[i], version, this.options)) { - return true - } - } - return false -} - -function testSet (set, version, options) { - for (var i = 0; i < set.length; i++) { - if (!set[i].test(version)) { - return false - } - } - - if (version.prerelease.length && !options.includePrerelease) { - // Find the set of versions that are allowed to have prereleases - // For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0 - // That should allow `1.2.3-pr.2` to pass. - // However, `1.2.4-alpha.notready` should NOT be allowed, - // even though it's within the range set by the comparators. - for (i = 0; i < set.length; i++) { - debug(set[i].semver) - if (set[i].semver === ANY) { - continue - } - - if (set[i].semver.prerelease.length > 0) { - var allowed = set[i].semver - if (allowed.major === version.major && - allowed.minor === version.minor && - allowed.patch === version.patch) { - return true - } - } - } - - // Version has a -pre, but it's not one of the ones we like. - return false - } - - return true -} - -exports.satisfies = satisfies -function satisfies (version, range, options) { - try { - range = new Range(range, options) - } catch (er) { - return false - } - return range.test(version) -} - -exports.maxSatisfying = maxSatisfying -function maxSatisfying (versions, range, options) { - var max = null - var maxSV = null - try { - var rangeObj = new Range(range, options) - } catch (er) { - return null - } - versions.forEach(function (v) { - if (rangeObj.test(v)) { - // satisfies(v, range, options) - if (!max || maxSV.compare(v) === -1) { - // compare(max, v, true) - max = v - maxSV = new SemVer(max, options) - } - } - }) - return max -} - -exports.minSatisfying = minSatisfying -function minSatisfying (versions, range, options) { - var min = null - var minSV = null - try { - var rangeObj = new Range(range, options) - } catch (er) { - return null - } - versions.forEach(function (v) { - if (rangeObj.test(v)) { - // satisfies(v, range, options) - if (!min || minSV.compare(v) === 1) { - // compare(min, v, true) - min = v - minSV = new SemVer(min, options) - } - } - }) - return min -} - -exports.minVersion = minVersion -function minVersion (range, loose) { - range = new Range(range, loose) - - var minver = new SemVer('0.0.0') - if (range.test(minver)) { - return minver - } - - minver = new SemVer('0.0.0-0') - if (range.test(minver)) { - return minver - } - - minver = null - for (var i = 0; i < range.set.length; ++i) { - var comparators = range.set[i] - - comparators.forEach(function (comparator) { - // Clone to avoid manipulating the comparator's semver object. - var compver = new SemVer(comparator.semver.version) - switch (comparator.operator) { - case '>': - if (compver.prerelease.length === 0) { - compver.patch++ - } else { - compver.prerelease.push(0) - } - compver.raw = compver.format() - /* fallthrough */ - case '': - case '>=': - if (!minver || gt(minver, compver)) { - minver = compver - } - break - case '<': - case '<=': - /* Ignore maximum versions */ - break - /* istanbul ignore next */ - default: - throw new Error('Unexpected operation: ' + comparator.operator) - } - }) - } - - if (minver && range.test(minver)) { - return minver - } - - return null -} - -exports.validRange = validRange -function validRange (range, options) { - try { - // Return '*' instead of '' so that truthiness works. - // This will throw if it's invalid anyway - return new Range(range, options).range || '*' - } catch (er) { - return null - } -} - -// Determine if version is less than all the versions possible in the range -exports.ltr = ltr -function ltr (version, range, options) { - return outside(version, range, '<', options) -} - -// Determine if version is greater than all the versions possible in the range. -exports.gtr = gtr -function gtr (version, range, options) { - return outside(version, range, '>', options) -} - -exports.outside = outside -function outside (version, range, hilo, options) { - version = new SemVer(version, options) - range = new Range(range, options) - - var gtfn, ltefn, ltfn, comp, ecomp - switch (hilo) { - case '>': - gtfn = gt - ltefn = lte - ltfn = lt - comp = '>' - ecomp = '>=' - break - case '<': - gtfn = lt - ltefn = gte - ltfn = gt - comp = '<' - ecomp = '<=' - break - default: - throw new TypeError('Must provide a hilo val of "<" or ">"') - } - - // If it satisifes the range it is not outside - if (satisfies(version, range, options)) { - return false - } - - // From now on, variable terms are as if we're in "gtr" mode. - // but note that everything is flipped for the "ltr" function. - - for (var i = 0; i < range.set.length; ++i) { - var comparators = range.set[i] - - var high = null - var low = null - - comparators.forEach(function (comparator) { - if (comparator.semver === ANY) { - comparator = new Comparator('>=0.0.0') - } - high = high || comparator - low = low || comparator - if (gtfn(comparator.semver, high.semver, options)) { - high = comparator - } else if (ltfn(comparator.semver, low.semver, options)) { - low = comparator - } - }) - - // If the edge version comparator has a operator then our version - // isn't outside it - if (high.operator === comp || high.operator === ecomp) { - return false - } - - // If the lowest version comparator has an operator and our version - // is less than it then it isn't higher than the range - if ((!low.operator || low.operator === comp) && - ltefn(version, low.semver)) { - return false - } else if (low.operator === ecomp && ltfn(version, low.semver)) { - return false - } - } - return true -} - -exports.prerelease = prerelease -function prerelease (version, options) { - var parsed = parse(version, options) - return (parsed && parsed.prerelease.length) ? parsed.prerelease : null -} - -exports.intersects = intersects -function intersects (r1, r2, options) { - r1 = new Range(r1, options) - r2 = new Range(r2, options) - return r1.intersects(r2) -} - -exports.coerce = coerce -function coerce (version) { - if (version instanceof SemVer) { - return version - } - - if (typeof version !== 'string') { - return null - } - - var match = version.match(re[COERCE]) - - if (match == null) { - return null - } - - return parse(match[1] + - '.' + (match[2] || '0') + - '.' + (match[3] || '0')) -} - - -/***/ }), - -/***/ 293: -/***/ (function(module, __unusedexports, __webpack_require__) { - -module.exports = authenticationRequestError; - -const { RequestError } = __webpack_require__(497); - -function authenticationRequestError(state, error, options) { - if (!error.headers) throw error; - - const otpRequired = /required/.test(error.headers["x-github-otp"] || ""); - // handle "2FA required" error only - if (error.status !== 401 || !otpRequired) { - throw error; - } - - if ( - error.status === 401 && - otpRequired && - error.request && - error.request.headers["x-github-otp"] - ) { - if (state.otp) { - delete state.otp; // no longer valid, request again - } else { - throw new RequestError( - "Invalid one-time password for two-factor authentication", - 401, - { - headers: error.headers, - request: options - } - ); - } - } - - if (typeof state.auth.on2fa !== "function") { - throw new RequestError( - "2FA required, but options.on2fa is not a function. See https://github.com/octokit/rest.js#authentication", - 401, - { - headers: error.headers, - request: options - } - ); - } - - return Promise.resolve() - .then(() => { - return state.auth.on2fa(); - }) - .then(oneTimePassword => { - const newOptions = Object.assign(options, { - headers: Object.assign(options.headers, { - "x-github-otp": oneTimePassword - }) - }); - return state.octokit.request(newOptions).then(response => { - // If OTP still valid, then persist it for following requests - state.otp = oneTimePassword; - return response; - }); - }); -} - - -/***/ }), - -/***/ 294: -/***/ (function(module, __unusedexports, __webpack_require__) { - -module.exports = parseOptions; - -const { Deprecation } = __webpack_require__(692); -const { getUserAgent } = __webpack_require__(619); -const once = __webpack_require__(969); - -const pkg = __webpack_require__(215); - -const deprecateOptionsTimeout = once((log, deprecation) => - log.warn(deprecation) -); -const deprecateOptionsAgent = once((log, deprecation) => log.warn(deprecation)); -const deprecateOptionsHeaders = once((log, deprecation) => - log.warn(deprecation) -); - -function parseOptions(options, log, hook) { - if (options.headers) { - options.headers = Object.keys(options.headers).reduce((newObj, key) => { - newObj[key.toLowerCase()] = options.headers[key]; - return newObj; - }, {}); - } - - const clientDefaults = { - headers: options.headers || {}, - request: options.request || {}, - mediaType: { - previews: [], - format: "" - } - }; - - if (options.baseUrl) { - clientDefaults.baseUrl = options.baseUrl; - } - - if (options.userAgent) { - clientDefaults.headers["user-agent"] = options.userAgent; - } - - if (options.previews) { - clientDefaults.mediaType.previews = options.previews; - } - - if (options.timeZone) { - clientDefaults.headers["time-zone"] = options.timeZone; - } - - if (options.timeout) { - deprecateOptionsTimeout( - log, - new Deprecation( - "[@octokit/rest] new Octokit({timeout}) is deprecated. Use {request: {timeout}} instead. See https://github.com/octokit/request.js#request" - ) - ); - clientDefaults.request.timeout = options.timeout; - } - - if (options.agent) { - deprecateOptionsAgent( - log, - new Deprecation( - "[@octokit/rest] new Octokit({agent}) is deprecated. Use {request: {agent}} instead. See https://github.com/octokit/request.js#request" - ) - ); - clientDefaults.request.agent = options.agent; - } - - if (options.headers) { - deprecateOptionsHeaders( - log, - new Deprecation( - "[@octokit/rest] new Octokit({headers}) is deprecated. Use {userAgent, previews} instead. See https://github.com/octokit/request.js#request" - ) - ); - } - - const userAgentOption = clientDefaults.headers["user-agent"]; - const defaultUserAgent = `octokit.js/${pkg.version} ${getUserAgent()}`; - - clientDefaults.headers["user-agent"] = [userAgentOption, defaultUserAgent] - .filter(Boolean) - .join(" "); - - clientDefaults.request.hook = hook.bind(null, "request"); - - return clientDefaults; -} - - -/***/ }), - -/***/ 297: -/***/ (function(module) { - -module.exports = class HttpError extends Error { - constructor (message, code, headers) { - super(message) - - // Maintains proper stack trace (only available on V8) - /* istanbul ignore next */ - if (Error.captureStackTrace) { - Error.captureStackTrace(this, this.constructor) - } - - this.name = 'HttpError' - this.code = code - this.headers = headers - } -} - - -/***/ }), - -/***/ 299: -/***/ (function(__unusedmodule, exports) { - -"use strict"; - - -Object.defineProperty(exports, '__esModule', { value: true }); - -const VERSION = "1.1.2"; - -/** - * Some “list” response that can be paginated have a different response structure - * - * They have a `total_count` key in the response (search also has `incomplete_results`, - * /installation/repositories also has `repository_selection`), as well as a key with - * the list of the items which name varies from endpoint to endpoint: - * - * - https://developer.github.com/v3/search/#example (key `items`) - * - https://developer.github.com/v3/checks/runs/#response-3 (key: `check_runs`) - * - https://developer.github.com/v3/checks/suites/#response-1 (key: `check_suites`) - * - https://developer.github.com/v3/apps/installations/#list-repositories (key: `repositories`) - * - https://developer.github.com/v3/apps/installations/#list-installations-for-a-user (key `installations`) - * - * Octokit normalizes these responses so that paginated results are always returned following - * the same structure. One challenge is that if the list response has only one page, no Link - * header is provided, so this header alone is not sufficient to check wether a response is - * paginated or not. For the exceptions with the namespace, a fallback check for the route - * paths has to be added in order to normalize the response. We cannot check for the total_count - * property because it also exists in the response of Get the combined status for a specific ref. - */ -const REGEX = [/^\/search\//, /^\/repos\/[^/]+\/[^/]+\/commits\/[^/]+\/(check-runs|check-suites)([^/]|$)/, /^\/installation\/repositories([^/]|$)/, /^\/user\/installations([^/]|$)/, /^\/repos\/[^/]+\/[^/]+\/actions\/secrets([^/]|$)/, /^\/repos\/[^/]+\/[^/]+\/actions\/workflows(\/[^/]+\/runs)?([^/]|$)/, /^\/repos\/[^/]+\/[^/]+\/actions\/runs(\/[^/]+\/(artifacts|jobs))?([^/]|$)/]; -function normalizePaginatedListResponse(octokit, url, response) { - const path = url.replace(octokit.request.endpoint.DEFAULTS.baseUrl, ""); - const responseNeedsNormalization = REGEX.find(regex => regex.test(path)); - if (!responseNeedsNormalization) return; // keep the additional properties intact as there is currently no other way - // to retrieve the same information. - - const incompleteResults = response.data.incomplete_results; - const repositorySelection = response.data.repository_selection; - const totalCount = response.data.total_count; - delete response.data.incomplete_results; - delete response.data.repository_selection; - delete response.data.total_count; - const namespaceKey = Object.keys(response.data)[0]; - const data = response.data[namespaceKey]; - response.data = data; - - if (typeof incompleteResults !== "undefined") { - response.data.incomplete_results = incompleteResults; - } - - if (typeof repositorySelection !== "undefined") { - response.data.repository_selection = repositorySelection; - } - - response.data.total_count = totalCount; - Object.defineProperty(response.data, namespaceKey, { - get() { - octokit.log.warn(`[@octokit/paginate-rest] "response.data.${namespaceKey}" is deprecated for "GET ${path}". Get the results directly from "response.data"`); - return Array.from(data); - } - - }); -} - -function iterator(octokit, route, parameters) { - const options = octokit.request.endpoint(route, parameters); - const method = options.method; - const headers = options.headers; - let url = options.url; - return { - [Symbol.asyncIterator]: () => ({ - next() { - if (!url) { - return Promise.resolve({ - done: true - }); - } - - return octokit.request({ - method, - url, - headers - }).then(response => { - normalizePaginatedListResponse(octokit, url, response); // `response.headers.link` format: - // '; rel="next", ; rel="last"' - // sets `url` to undefined if "next" URL is not present or `link` header is not set - - url = ((response.headers.link || "").match(/<([^>]+)>;\s*rel="next"/) || [])[1]; - return { - value: response - }; - }); - } - - }) - }; -} - -function paginate(octokit, route, parameters, mapFn) { - if (typeof parameters === "function") { - mapFn = parameters; - parameters = undefined; - } - - return gather(octokit, [], iterator(octokit, route, parameters)[Symbol.asyncIterator](), mapFn); -} - -function gather(octokit, results, iterator, mapFn) { - return iterator.next().then(result => { - if (result.done) { - return results; - } - - let earlyExit = false; - - function done() { - earlyExit = true; - } - - results = results.concat(mapFn ? mapFn(result.value, done) : result.value.data); - - if (earlyExit) { - return results; - } - - return gather(octokit, results, iterator, mapFn); - }); -} - -/** - * @param octokit Octokit instance - * @param options Options passed to Octokit constructor - */ - -function paginateRest(octokit) { - return { - paginate: Object.assign(paginate.bind(null, octokit), { - iterator: iterator.bind(null, octokit) - }) - }; -} -paginateRest.VERSION = VERSION; - -exports.paginateRest = paginateRest; -//# sourceMappingURL=index.js.map - - -/***/ }), - -/***/ 323: -/***/ (function(module) { - -"use strict"; - - -var isStream = module.exports = function (stream) { - return stream !== null && typeof stream === 'object' && typeof stream.pipe === 'function'; -}; - -isStream.writable = function (stream) { - return isStream(stream) && stream.writable !== false && typeof stream._write === 'function' && typeof stream._writableState === 'object'; -}; - -isStream.readable = function (stream) { - return isStream(stream) && stream.readable !== false && typeof stream._read === 'function' && typeof stream._readableState === 'object'; -}; - -isStream.duplex = function (stream) { - return isStream.writable(stream) && isStream.readable(stream); -}; - -isStream.transform = function (stream) { - return isStream.duplex(stream) && typeof stream._transform === 'function' && typeof stream._transformState === 'object'; -}; - - -/***/ }), - -/***/ 336: -/***/ (function(module, __unusedexports, __webpack_require__) { - -module.exports = hasLastPage - -const deprecate = __webpack_require__(370) -const getPageLinks = __webpack_require__(577) - -function hasLastPage (link) { - deprecate(`octokit.hasLastPage() – You can use octokit.paginate or async iterators instead: https://github.com/octokit/rest.js#pagination.`) - return getPageLinks(link).last -} - - -/***/ }), - -/***/ 348: -/***/ (function(module, __unusedexports, __webpack_require__) { - -"use strict"; - - -module.exports = validate; - -const { RequestError } = __webpack_require__(497); -const get = __webpack_require__(854); -const set = __webpack_require__(883); - -function validate(octokit, options) { - if (!options.request.validate) { - return; - } - const { validate: params } = options.request; - - Object.keys(params).forEach(parameterName => { - const parameter = get(params, parameterName); - - const expectedType = parameter.type; - let parentParameterName; - let parentValue; - let parentParamIsPresent = true; - let parentParameterIsArray = false; - - if (/\./.test(parameterName)) { - parentParameterName = parameterName.replace(/\.[^.]+$/, ""); - parentParameterIsArray = parentParameterName.slice(-2) === "[]"; - if (parentParameterIsArray) { - parentParameterName = parentParameterName.slice(0, -2); - } - parentValue = get(options, parentParameterName); - parentParamIsPresent = - parentParameterName === "headers" || - (typeof parentValue === "object" && parentValue !== null); - } - - const values = parentParameterIsArray - ? (get(options, parentParameterName) || []).map( - value => value[parameterName.split(/\./).pop()] - ) - : [get(options, parameterName)]; - - values.forEach((value, i) => { - const valueIsPresent = typeof value !== "undefined"; - const valueIsNull = value === null; - const currentParameterName = parentParameterIsArray - ? parameterName.replace(/\[\]/, `[${i}]`) - : parameterName; - - if (!parameter.required && !valueIsPresent) { - return; - } - - // if the parent parameter is of type object but allows null - // then the child parameters can be ignored - if (!parentParamIsPresent) { - return; - } - - if (parameter.allowNull && valueIsNull) { - return; - } - - if (!parameter.allowNull && valueIsNull) { - throw new RequestError( - `'${currentParameterName}' cannot be null`, - 400, - { - request: options - } - ); - } - - if (parameter.required && !valueIsPresent) { - throw new RequestError( - `Empty value for parameter '${currentParameterName}': ${JSON.stringify( - value - )}`, - 400, - { - request: options - } - ); - } - - // parse to integer before checking for enum - // so that string "1" will match enum with number 1 - if (expectedType === "integer") { - const unparsedValue = value; - value = parseInt(value, 10); - if (isNaN(value)) { - throw new RequestError( - `Invalid value for parameter '${currentParameterName}': ${JSON.stringify( - unparsedValue - )} is NaN`, - 400, - { - request: options - } - ); - } - } - - if (parameter.enum && parameter.enum.indexOf(String(value)) === -1) { - throw new RequestError( - `Invalid value for parameter '${currentParameterName}': ${JSON.stringify( - value - )}`, - 400, - { - request: options - } - ); - } - - if (parameter.validation) { - const regex = new RegExp(parameter.validation); - if (!regex.test(value)) { - throw new RequestError( - `Invalid value for parameter '${currentParameterName}': ${JSON.stringify( - value - )}`, - 400, - { - request: options - } - ); - } - } - - if (expectedType === "object" && typeof value === "string") { - try { - value = JSON.parse(value); - } catch (exception) { - throw new RequestError( - `JSON parse error of value for parameter '${currentParameterName}': ${JSON.stringify( - value - )}`, - 400, - { - request: options - } - ); - } - } - - set(options, parameter.mapTo || currentParameterName, value); - }); - }); - - return options; -} - - -/***/ }), - -/***/ 349: -/***/ (function(module, __unusedexports, __webpack_require__) { - -module.exports = authenticationRequestError; - -const { RequestError } = __webpack_require__(497); - -function authenticationRequestError(state, error, options) { - /* istanbul ignore next */ - if (!error.headers) throw error; - - const otpRequired = /required/.test(error.headers["x-github-otp"] || ""); - // handle "2FA required" error only - if (error.status !== 401 || !otpRequired) { - throw error; - } - - if ( - error.status === 401 && - otpRequired && - error.request && - error.request.headers["x-github-otp"] - ) { - throw new RequestError( - "Invalid one-time password for two-factor authentication", - 401, - { - headers: error.headers, - request: options - } - ); - } - - if (typeof state.auth.on2fa !== "function") { - throw new RequestError( - "2FA required, but options.on2fa is not a function. See https://github.com/octokit/rest.js#authentication", - 401, - { - headers: error.headers, - request: options - } - ); - } - - return Promise.resolve() - .then(() => { - return state.auth.on2fa(); - }) - .then(oneTimePassword => { - const newOptions = Object.assign(options, { - headers: Object.assign( - { "x-github-otp": oneTimePassword }, - options.headers - ) - }); - return state.octokit.request(newOptions); - }); -} - - -/***/ }), - -/***/ 357: -/***/ (function(module) { - -module.exports = require("assert"); - -/***/ }), - -/***/ 363: -/***/ (function(module) { - -module.exports = register - -function register (state, name, method, options) { - if (typeof method !== 'function') { - throw new Error('method for before hook must be a function') - } - - if (!options) { - options = {} - } - - if (Array.isArray(name)) { - return name.reverse().reduce(function (callback, name) { - return register.bind(null, state, name, callback, options) - }, method)() - } - - return Promise.resolve() - .then(function () { - if (!state.registry[name]) { - return method(options) - } - - return (state.registry[name]).reduce(function (method, registered) { - return registered.hook.bind(null, method, options) - }, method)() - }) -} - - -/***/ }), - -/***/ 368: -/***/ (function(module) { - -module.exports = function atob(str) { - return Buffer.from(str, 'base64').toString('binary') -} - - -/***/ }), - -/***/ 370: -/***/ (function(module) { - -module.exports = deprecate - -const loggedMessages = {} - -function deprecate (message) { - if (loggedMessages[message]) { - return - } - - console.warn(`DEPRECATED (@octokit/rest): ${message}`) - loggedMessages[message] = 1 -} - - -/***/ }), - -/***/ 385: -/***/ (function(__unusedmodule, exports, __webpack_require__) { - -"use strict"; - - -Object.defineProperty(exports, '__esModule', { value: true }); - -function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } - -var isPlainObject = _interopDefault(__webpack_require__(696)); -var universalUserAgent = __webpack_require__(796); - -function lowercaseKeys(object) { - if (!object) { - return {}; - } - - return Object.keys(object).reduce((newObj, key) => { - newObj[key.toLowerCase()] = object[key]; - return newObj; - }, {}); -} - -function mergeDeep(defaults, options) { - const result = Object.assign({}, defaults); - Object.keys(options).forEach(key => { - if (isPlainObject(options[key])) { - if (!(key in defaults)) Object.assign(result, { - [key]: options[key] - });else result[key] = mergeDeep(defaults[key], options[key]); - } else { - Object.assign(result, { - [key]: options[key] - }); - } - }); - return result; -} - -function merge(defaults, route, options) { - if (typeof route === "string") { - let [method, url] = route.split(" "); - options = Object.assign(url ? { - method, - url - } : { - url: method - }, options); - } else { - options = Object.assign({}, route); - } // lowercase header names before merging with defaults to avoid duplicates - - - options.headers = lowercaseKeys(options.headers); - const mergedOptions = mergeDeep(defaults || {}, options); // mediaType.previews arrays are merged, instead of overwritten - - if (defaults && defaults.mediaType.previews.length) { - mergedOptions.mediaType.previews = defaults.mediaType.previews.filter(preview => !mergedOptions.mediaType.previews.includes(preview)).concat(mergedOptions.mediaType.previews); - } - - mergedOptions.mediaType.previews = mergedOptions.mediaType.previews.map(preview => preview.replace(/-preview/, "")); - return mergedOptions; -} - -function addQueryParameters(url, parameters) { - const separator = /\?/.test(url) ? "&" : "?"; - const names = Object.keys(parameters); - - if (names.length === 0) { - return url; - } - - return url + separator + names.map(name => { - if (name === "q") { - return "q=" + parameters.q.split("+").map(encodeURIComponent).join("+"); - } - - return `${name}=${encodeURIComponent(parameters[name])}`; - }).join("&"); -} - -const urlVariableRegex = /\{[^}]+\}/g; - -function removeNonChars(variableName) { - return variableName.replace(/^\W+|\W+$/g, "").split(/,/); -} - -function extractUrlVariableNames(url) { - const matches = url.match(urlVariableRegex); - - if (!matches) { - return []; - } - - return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []); -} - -function omit(object, keysToOmit) { - return Object.keys(object).filter(option => !keysToOmit.includes(option)).reduce((obj, key) => { - obj[key] = object[key]; - return obj; - }, {}); -} - -// Based on https://github.com/bramstein/url-template, licensed under BSD -// TODO: create separate package. -// -// Copyright (c) 2012-2014, Bram Stein -// All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// 1. Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// 2. Redistributions in binary form must reproduce the above copyright -// notice, this list of conditions and the following disclaimer in the -// documentation and/or other materials provided with the distribution. -// 3. The name of the author may not be used to endorse or promote products -// derived from this software without specific prior written permission. -// THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR IMPLIED -// WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF -// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO -// EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, -// INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY -// OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING -// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, -// EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -/* istanbul ignore file */ -function encodeReserved(str) { - return str.split(/(%[0-9A-Fa-f]{2})/g).map(function (part) { - if (!/%[0-9A-Fa-f]/.test(part)) { - part = encodeURI(part).replace(/%5B/g, "[").replace(/%5D/g, "]"); - } - - return part; - }).join(""); -} - -function encodeUnreserved(str) { - return encodeURIComponent(str).replace(/[!'()*]/g, function (c) { - return "%" + c.charCodeAt(0).toString(16).toUpperCase(); - }); -} - -function encodeValue(operator, value, key) { - value = operator === "+" || operator === "#" ? encodeReserved(value) : encodeUnreserved(value); - - if (key) { - return encodeUnreserved(key) + "=" + value; - } else { - return value; - } -} - -function isDefined(value) { - return value !== undefined && value !== null; -} - -function isKeyOperator(operator) { - return operator === ";" || operator === "&" || operator === "?"; -} - -function getValues(context, operator, key, modifier) { - var value = context[key], - result = []; - - if (isDefined(value) && value !== "") { - if (typeof value === "string" || typeof value === "number" || typeof value === "boolean") { - value = value.toString(); - - if (modifier && modifier !== "*") { - value = value.substring(0, parseInt(modifier, 10)); - } - - result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : "")); - } else { - if (modifier === "*") { - if (Array.isArray(value)) { - value.filter(isDefined).forEach(function (value) { - result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : "")); - }); - } else { - Object.keys(value).forEach(function (k) { - if (isDefined(value[k])) { - result.push(encodeValue(operator, value[k], k)); - } - }); - } - } else { - const tmp = []; - - if (Array.isArray(value)) { - value.filter(isDefined).forEach(function (value) { - tmp.push(encodeValue(operator, value)); - }); - } else { - Object.keys(value).forEach(function (k) { - if (isDefined(value[k])) { - tmp.push(encodeUnreserved(k)); - tmp.push(encodeValue(operator, value[k].toString())); - } - }); - } - - if (isKeyOperator(operator)) { - result.push(encodeUnreserved(key) + "=" + tmp.join(",")); - } else if (tmp.length !== 0) { - result.push(tmp.join(",")); - } - } - } - } else { - if (operator === ";") { - if (isDefined(value)) { - result.push(encodeUnreserved(key)); - } - } else if (value === "" && (operator === "&" || operator === "?")) { - result.push(encodeUnreserved(key) + "="); - } else if (value === "") { - result.push(""); - } - } - - return result; -} - -function parseUrl(template) { - return { - expand: expand.bind(null, template) - }; -} - -function expand(template, context) { - var operators = ["+", "#", ".", "/", ";", "?", "&"]; - return template.replace(/\{([^\{\}]+)\}|([^\{\}]+)/g, function (_, expression, literal) { - if (expression) { - let operator = ""; - const values = []; - - if (operators.indexOf(expression.charAt(0)) !== -1) { - operator = expression.charAt(0); - expression = expression.substr(1); - } - - expression.split(/,/g).forEach(function (variable) { - var tmp = /([^:\*]*)(?::(\d+)|(\*))?/.exec(variable); - values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3])); - }); - - if (operator && operator !== "+") { - var separator = ","; - - if (operator === "?") { - separator = "&"; - } else if (operator !== "#") { - separator = operator; - } - - return (values.length !== 0 ? operator : "") + values.join(separator); - } else { - return values.join(","); - } - } else { - return encodeReserved(literal); - } - }); -} - -function parse(options) { - // https://fetch.spec.whatwg.org/#methods - let method = options.method.toUpperCase(); // replace :varname with {varname} to make it RFC 6570 compatible - - let url = (options.url || "/").replace(/:([a-z]\w+)/g, "{+$1}"); - let headers = Object.assign({}, options.headers); - let body; - let parameters = omit(options, ["method", "baseUrl", "url", "headers", "request", "mediaType"]); // extract variable names from URL to calculate remaining variables later - - const urlVariableNames = extractUrlVariableNames(url); - url = parseUrl(url).expand(parameters); - - if (!/^http/.test(url)) { - url = options.baseUrl + url; - } - - const omittedParameters = Object.keys(options).filter(option => urlVariableNames.includes(option)).concat("baseUrl"); - const remainingParameters = omit(parameters, omittedParameters); - const isBinaryRequset = /application\/octet-stream/i.test(headers.accept); - - if (!isBinaryRequset) { - if (options.mediaType.format) { - // e.g. application/vnd.github.v3+json => application/vnd.github.v3.raw - headers.accept = headers.accept.split(/,/).map(preview => preview.replace(/application\/vnd(\.\w+)(\.v3)?(\.\w+)?(\+json)?$/, `application/vnd$1$2.${options.mediaType.format}`)).join(","); - } - - if (options.mediaType.previews.length) { - const previewsFromAcceptHeader = headers.accept.match(/[\w-]+(?=-preview)/g) || []; - headers.accept = previewsFromAcceptHeader.concat(options.mediaType.previews).map(preview => { - const format = options.mediaType.format ? `.${options.mediaType.format}` : "+json"; - return `application/vnd.github.${preview}-preview${format}`; - }).join(","); - } - } // for GET/HEAD requests, set URL query parameters from remaining parameters - // for PATCH/POST/PUT/DELETE requests, set request body from remaining parameters - - - if (["GET", "HEAD"].includes(method)) { - url = addQueryParameters(url, remainingParameters); - } else { - if ("data" in remainingParameters) { - body = remainingParameters.data; - } else { - if (Object.keys(remainingParameters).length) { - body = remainingParameters; - } else { - headers["content-length"] = 0; - } - } - } // default content-type for JSON if body is set - - - if (!headers["content-type"] && typeof body !== "undefined") { - headers["content-type"] = "application/json; charset=utf-8"; - } // GitHub expects 'content-length: 0' header for PUT/PATCH requests without body. - // fetch does not allow to set `content-length` header, but we can set body to an empty string - - - if (["PATCH", "PUT"].includes(method) && typeof body === "undefined") { - body = ""; - } // Only return body/request keys if present - - - return Object.assign({ - method, - url, - headers - }, typeof body !== "undefined" ? { - body - } : null, options.request ? { - request: options.request - } : null); -} - -function endpointWithDefaults(defaults, route, options) { - return parse(merge(defaults, route, options)); -} - -function withDefaults(oldDefaults, newDefaults) { - const DEFAULTS = merge(oldDefaults, newDefaults); - const endpoint = endpointWithDefaults.bind(null, DEFAULTS); - return Object.assign(endpoint, { - DEFAULTS, - defaults: withDefaults.bind(null, DEFAULTS), - merge: merge.bind(null, DEFAULTS), - parse - }); -} - -const VERSION = "6.0.1"; - -const userAgent = `octokit-endpoint.js/${VERSION} ${universalUserAgent.getUserAgent()}`; // DEFAULTS has all properties set that EndpointOptions has, except url. -// So we use RequestParameters and add method as additional required property. - -const DEFAULTS = { - method: "GET", - baseUrl: "https://api.github.com", - headers: { - accept: "application/vnd.github.v3+json", - "user-agent": userAgent - }, - mediaType: { - format: "", - previews: [] - } -}; - -const endpoint = withDefaults(null, DEFAULTS); - -exports.endpoint = endpoint; -//# sourceMappingURL=index.js.map - - -/***/ }), - -/***/ 389: -/***/ (function(module, __unusedexports, __webpack_require__) { - -"use strict"; - - -const fs = __webpack_require__(747); -const shebangCommand = __webpack_require__(866); - -function readShebang(command) { - // Read the first 150 bytes from the file - const size = 150; - let buffer; - - if (Buffer.alloc) { - // Node.js v4.5+ / v5.10+ - buffer = Buffer.alloc(size); - } else { - // Old Node.js API - buffer = new Buffer(size); - buffer.fill(0); // zero-fill - } - - let fd; - - try { - fd = fs.openSync(command, 'r'); - fs.readSync(fd, buffer, 0, size, 0); - fs.closeSync(fd); - } catch (e) { /* Empty */ } - - // Attempt to extract shebang (null is returned if not a shebang) - return shebangCommand(buffer.toString()); -} - -module.exports = readShebang; - - -/***/ }), - -/***/ 402: -/***/ (function(module, __unusedexports, __webpack_require__) { - -module.exports = Octokit; - -const { request } = __webpack_require__(753); -const Hook = __webpack_require__(523); - -const parseClientOptions = __webpack_require__(294); - -function Octokit(plugins, options) { - options = options || {}; - const hook = new Hook.Collection(); - const log = Object.assign( - { - debug: () => {}, - info: () => {}, - warn: console.warn, - error: console.error - }, - options && options.log - ); - const api = { - hook, - log, - request: request.defaults(parseClientOptions(options, log, hook)) - }; - - plugins.forEach(pluginFunction => pluginFunction(api, options)); - - return api; -} - - -/***/ }), - -/***/ 413: -/***/ (function(module) { - -module.exports = require("stream"); - -/***/ }), - -/***/ 427: -/***/ (function(module, __unusedexports, __webpack_require__) { - -"use strict"; - -// Older verions of Node.js might not have `util.getSystemErrorName()`. -// In that case, fall back to a deprecated internal. -const util = __webpack_require__(669); - -let uv; - -if (typeof util.getSystemErrorName === 'function') { - module.exports = util.getSystemErrorName; -} else { - try { - uv = process.binding('uv'); - - if (typeof uv.errname !== 'function') { - throw new TypeError('uv.errname is not a function'); - } - } catch (err) { - console.error('execa/lib/errname: unable to establish process.binding(\'uv\')', err); - uv = null; - } - - module.exports = code => errname(uv, code); -} - -// Used for testing the fallback behavior -module.exports.__test__ = errname; - -function errname(uv, code) { - if (uv) { - return uv.errname(code); - } - - if (!(code < 0)) { - throw new Error('err >= 0'); - } - - return `Unknown system error ${code}`; -} - - - -/***/ }), - -/***/ 430: -/***/ (function(module, __unusedexports, __webpack_require__) { - -module.exports = octokitValidate; - -const validate = __webpack_require__(348); - -function octokitValidate(octokit) { - octokit.hook.before("request", validate.bind(null, octokit)); -} - - -/***/ }), - -/***/ 431: -/***/ (function(__unusedmodule, exports, __webpack_require__) { - -"use strict"; - -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; - return result; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -const os = __importStar(__webpack_require__(87)); -/** - * Commands - * - * Command Format: - * ::name key=value,key=value::message - * - * Examples: - * ::warning::This is the message - * ::set-env name=MY_VAR::some value - */ -function issueCommand(command, properties, message) { - const cmd = new Command(command, properties, message); - process.stdout.write(cmd.toString() + os.EOL); -} -exports.issueCommand = issueCommand; -function issue(name, message = '') { - issueCommand(name, {}, message); -} -exports.issue = issue; -const CMD_STRING = '::'; -class Command { - constructor(command, properties, message) { - if (!command) { - command = 'missing.command'; - } - this.command = command; - this.properties = properties; - this.message = message; - } - toString() { - let cmdStr = CMD_STRING + this.command; - if (this.properties && Object.keys(this.properties).length > 0) { - cmdStr += ' '; - let first = true; - for (const key in this.properties) { - if (this.properties.hasOwnProperty(key)) { - const val = this.properties[key]; - if (val) { - if (first) { - first = false; - } - else { - cmdStr += ','; - } - cmdStr += `${key}=${escapeProperty(val)}`; - } - } - } - } - cmdStr += `${CMD_STRING}${escapeData(this.message)}`; - return cmdStr; - } -} -/** - * Sanitizes an input into a string so it can be passed into issueCommand safely - * @param input input to sanitize into a string - */ -function toCommandValue(input) { - if (input === null || input === undefined) { - return ''; - } - else if (typeof input === 'string' || input instanceof String) { - return input; - } - return JSON.stringify(input); -} -exports.toCommandValue = toCommandValue; -function escapeData(s) { - return toCommandValue(s) - .replace(/%/g, '%25') - .replace(/\r/g, '%0D') - .replace(/\n/g, '%0A'); -} -function escapeProperty(s) { - return toCommandValue(s) - .replace(/%/g, '%25') - .replace(/\r/g, '%0D') - .replace(/\n/g, '%0A') - .replace(/:/g, '%3A') - .replace(/,/g, '%2C'); -} -//# sourceMappingURL=command.js.map - -/***/ }), - -/***/ 453: -/***/ (function(module, __unusedexports, __webpack_require__) { - -var once = __webpack_require__(969) -var eos = __webpack_require__(9) -var fs = __webpack_require__(747) // we only need fs to get the ReadStream and WriteStream prototypes - -var noop = function () {} -var ancient = /^v?\.0/.test(process.version) - -var isFn = function (fn) { - return typeof fn === 'function' -} - -var isFS = function (stream) { - if (!ancient) return false // newer node version do not need to care about fs is a special way - if (!fs) return false // browser - return (stream instanceof (fs.ReadStream || noop) || stream instanceof (fs.WriteStream || noop)) && isFn(stream.close) -} - -var isRequest = function (stream) { - return stream.setHeader && isFn(stream.abort) -} - -var destroyer = function (stream, reading, writing, callback) { - callback = once(callback) - - var closed = false - stream.on('close', function () { - closed = true - }) - - eos(stream, {readable: reading, writable: writing}, function (err) { - if (err) return callback(err) - closed = true - callback() - }) - - var destroyed = false - return function (err) { - if (closed) return - if (destroyed) return - destroyed = true - - if (isFS(stream)) return stream.close(noop) // use close for fs streams to avoid fd leaks - if (isRequest(stream)) return stream.abort() // request.destroy just do .end - .abort is what we want - - if (isFn(stream.destroy)) return stream.destroy() - - callback(err || new Error('stream was destroyed')) - } -} - -var call = function (fn) { - fn() -} - -var pipe = function (from, to) { - return from.pipe(to) -} - -var pump = function () { - var streams = Array.prototype.slice.call(arguments) - var callback = isFn(streams[streams.length - 1] || noop) && streams.pop() || noop - - if (Array.isArray(streams[0])) streams = streams[0] - if (streams.length < 2) throw new Error('pump requires two streams per minimum') - - var error - var destroys = streams.map(function (stream, i) { - var reading = i < streams.length - 1 - var writing = i > 0 - return destroyer(stream, reading, writing, function (err) { - if (!error) error = err - if (err) destroys.forEach(call) - if (reading) return - destroys.forEach(call) - callback(error) - }) - }) - - return streams.reduce(pipe) -} - -module.exports = pump - - -/***/ }), - -/***/ 454: -/***/ (function(module, exports, __webpack_require__) { - -"use strict"; - - -Object.defineProperty(exports, '__esModule', { value: true }); - -function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } - -var Stream = _interopDefault(__webpack_require__(413)); -var http = _interopDefault(__webpack_require__(605)); -var Url = _interopDefault(__webpack_require__(835)); -var https = _interopDefault(__webpack_require__(211)); -var zlib = _interopDefault(__webpack_require__(761)); - -// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js - -// fix for "Readable" isn't a named export issue -const Readable = Stream.Readable; - -const BUFFER = Symbol('buffer'); -const TYPE = Symbol('type'); - -class Blob { - constructor() { - this[TYPE] = ''; - - const blobParts = arguments[0]; - const options = arguments[1]; - - const buffers = []; - let size = 0; - - if (blobParts) { - const a = blobParts; - const length = Number(a.length); - for (let i = 0; i < length; i++) { - const element = a[i]; - let buffer; - if (element instanceof Buffer) { - buffer = element; - } else if (ArrayBuffer.isView(element)) { - buffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength); - } else if (element instanceof ArrayBuffer) { - buffer = Buffer.from(element); - } else if (element instanceof Blob) { - buffer = element[BUFFER]; - } else { - buffer = Buffer.from(typeof element === 'string' ? element : String(element)); - } - size += buffer.length; - buffers.push(buffer); - } - } - - this[BUFFER] = Buffer.concat(buffers); - - let type = options && options.type !== undefined && String(options.type).toLowerCase(); - if (type && !/[^\u0020-\u007E]/.test(type)) { - this[TYPE] = type; - } - } - get size() { - return this[BUFFER].length; - } - get type() { - return this[TYPE]; - } - text() { - return Promise.resolve(this[BUFFER].toString()); - } - arrayBuffer() { - const buf = this[BUFFER]; - const ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); - return Promise.resolve(ab); - } - stream() { - const readable = new Readable(); - readable._read = function () {}; - readable.push(this[BUFFER]); - readable.push(null); - return readable; - } - toString() { - return '[object Blob]'; - } - slice() { - const size = this.size; - - const start = arguments[0]; - const end = arguments[1]; - let relativeStart, relativeEnd; - if (start === undefined) { - relativeStart = 0; - } else if (start < 0) { - relativeStart = Math.max(size + start, 0); - } else { - relativeStart = Math.min(start, size); - } - if (end === undefined) { - relativeEnd = size; - } else if (end < 0) { - relativeEnd = Math.max(size + end, 0); - } else { - relativeEnd = Math.min(end, size); - } - const span = Math.max(relativeEnd - relativeStart, 0); - - const buffer = this[BUFFER]; - const slicedBuffer = buffer.slice(relativeStart, relativeStart + span); - const blob = new Blob([], { type: arguments[2] }); - blob[BUFFER] = slicedBuffer; - return blob; - } -} - -Object.defineProperties(Blob.prototype, { - size: { enumerable: true }, - type: { enumerable: true }, - slice: { enumerable: true } -}); - -Object.defineProperty(Blob.prototype, Symbol.toStringTag, { - value: 'Blob', - writable: false, - enumerable: false, - configurable: true -}); - -/** - * fetch-error.js - * - * FetchError interface for operational errors - */ - -/** - * Create FetchError instance - * - * @param String message Error message for human - * @param String type Error type for machine - * @param String systemError For Node.js system error - * @return FetchError - */ -function FetchError(message, type, systemError) { - Error.call(this, message); - - this.message = message; - this.type = type; - - // when err.type is `system`, err.code contains system error code - if (systemError) { - this.code = this.errno = systemError.code; - } - - // hide custom error implementation details from end-users - Error.captureStackTrace(this, this.constructor); -} - -FetchError.prototype = Object.create(Error.prototype); -FetchError.prototype.constructor = FetchError; -FetchError.prototype.name = 'FetchError'; - -let convert; -try { - convert = __webpack_require__(18).convert; -} catch (e) {} - -const INTERNALS = Symbol('Body internals'); - -// fix an issue where "PassThrough" isn't a named export for node <10 -const PassThrough = Stream.PassThrough; - -/** - * Body mixin - * - * Ref: https://fetch.spec.whatwg.org/#body - * - * @param Stream body Readable stream - * @param Object opts Response options - * @return Void - */ -function Body(body) { - var _this = this; - - var _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}, - _ref$size = _ref.size; - - let size = _ref$size === undefined ? 0 : _ref$size; - var _ref$timeout = _ref.timeout; - let timeout = _ref$timeout === undefined ? 0 : _ref$timeout; - - if (body == null) { - // body is undefined or null - body = null; - } else if (isURLSearchParams(body)) { - // body is a URLSearchParams - body = Buffer.from(body.toString()); - } else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') { - // body is ArrayBuffer - body = Buffer.from(body); - } else if (ArrayBuffer.isView(body)) { - // body is ArrayBufferView - body = Buffer.from(body.buffer, body.byteOffset, body.byteLength); - } else if (body instanceof Stream) ; else { - // none of the above - // coerce to string then buffer - body = Buffer.from(String(body)); - } - this[INTERNALS] = { - body, - disturbed: false, - error: null - }; - this.size = size; - this.timeout = timeout; - - if (body instanceof Stream) { - body.on('error', function (err) { - const error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err); - _this[INTERNALS].error = error; - }); - } -} - -Body.prototype = { - get body() { - return this[INTERNALS].body; - }, - - get bodyUsed() { - return this[INTERNALS].disturbed; - }, - - /** - * Decode response as ArrayBuffer - * - * @return Promise - */ - arrayBuffer() { - return consumeBody.call(this).then(function (buf) { - return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); - }); - }, - - /** - * Return raw response as Blob - * - * @return Promise - */ - blob() { - let ct = this.headers && this.headers.get('content-type') || ''; - return consumeBody.call(this).then(function (buf) { - return Object.assign( - // Prevent copying - new Blob([], { - type: ct.toLowerCase() - }), { - [BUFFER]: buf - }); - }); - }, - - /** - * Decode response as json - * - * @return Promise - */ - json() { - var _this2 = this; - - return consumeBody.call(this).then(function (buffer) { - try { - return JSON.parse(buffer.toString()); - } catch (err) { - return Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json')); - } - }); - }, - - /** - * Decode response as text - * - * @return Promise - */ - text() { - return consumeBody.call(this).then(function (buffer) { - return buffer.toString(); - }); - }, - - /** - * Decode response as buffer (non-spec api) - * - * @return Promise - */ - buffer() { - return consumeBody.call(this); - }, - - /** - * Decode response as text, while automatically detecting the encoding and - * trying to decode to UTF-8 (non-spec api) - * - * @return Promise - */ - textConverted() { - var _this3 = this; - - return consumeBody.call(this).then(function (buffer) { - return convertBody(buffer, _this3.headers); - }); - } -}; - -// In browsers, all properties are enumerable. -Object.defineProperties(Body.prototype, { - body: { enumerable: true }, - bodyUsed: { enumerable: true }, - arrayBuffer: { enumerable: true }, - blob: { enumerable: true }, - json: { enumerable: true }, - text: { enumerable: true } -}); - -Body.mixIn = function (proto) { - for (const name of Object.getOwnPropertyNames(Body.prototype)) { - // istanbul ignore else: future proof - if (!(name in proto)) { - const desc = Object.getOwnPropertyDescriptor(Body.prototype, name); - Object.defineProperty(proto, name, desc); - } - } -}; - -/** - * Consume and convert an entire Body to a Buffer. - * - * Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body - * - * @return Promise - */ -function consumeBody() { - var _this4 = this; - - if (this[INTERNALS].disturbed) { - return Body.Promise.reject(new TypeError(`body used already for: ${this.url}`)); - } - - this[INTERNALS].disturbed = true; - - if (this[INTERNALS].error) { - return Body.Promise.reject(this[INTERNALS].error); - } - - let body = this.body; - - // body is null - if (body === null) { - return Body.Promise.resolve(Buffer.alloc(0)); - } - - // body is blob - if (isBlob(body)) { - body = body.stream(); - } - - // body is buffer - if (Buffer.isBuffer(body)) { - return Body.Promise.resolve(body); - } - - // istanbul ignore if: should never happen - if (!(body instanceof Stream)) { - return Body.Promise.resolve(Buffer.alloc(0)); - } - - // body is stream - // get ready to actually consume the body - let accum = []; - let accumBytes = 0; - let abort = false; - - return new Body.Promise(function (resolve, reject) { - let resTimeout; - - // allow timeout on slow response body - if (_this4.timeout) { - resTimeout = setTimeout(function () { - abort = true; - reject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout')); - }, _this4.timeout); - } - - // handle stream errors - body.on('error', function (err) { - if (err.name === 'AbortError') { - // if the request was aborted, reject with this Error - abort = true; - reject(err); - } else { - // other errors, such as incorrect content-encoding - reject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err)); - } - }); - - body.on('data', function (chunk) { - if (abort || chunk === null) { - return; - } - - if (_this4.size && accumBytes + chunk.length > _this4.size) { - abort = true; - reject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size')); - return; - } - - accumBytes += chunk.length; - accum.push(chunk); - }); - - body.on('end', function () { - if (abort) { - return; - } - - clearTimeout(resTimeout); - - try { - resolve(Buffer.concat(accum, accumBytes)); - } catch (err) { - // handle streams that have accumulated too much data (issue #414) - reject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err)); - } - }); - }); -} - -/** - * Detect buffer encoding and convert to target encoding - * ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding - * - * @param Buffer buffer Incoming buffer - * @param String encoding Target encoding - * @return String - */ -function convertBody(buffer, headers) { - if (typeof convert !== 'function') { - throw new Error('The package `encoding` must be installed to use the textConverted() function'); - } - - const ct = headers.get('content-type'); - let charset = 'utf-8'; - let res, str; - - // header - if (ct) { - res = /charset=([^;]*)/i.exec(ct); - } - - // no charset in content type, peek at response body for at most 1024 bytes - str = buffer.slice(0, 1024).toString(); - - // html5 - if (!res && str) { - res = / 0 && arguments[0] !== undefined ? arguments[0] : undefined; - - this[MAP] = Object.create(null); - - if (init instanceof Headers) { - const rawHeaders = init.raw(); - const headerNames = Object.keys(rawHeaders); - - for (const headerName of headerNames) { - for (const value of rawHeaders[headerName]) { - this.append(headerName, value); - } - } - - return; - } - - // We don't worry about converting prop to ByteString here as append() - // will handle it. - if (init == null) ; else if (typeof init === 'object') { - const method = init[Symbol.iterator]; - if (method != null) { - if (typeof method !== 'function') { - throw new TypeError('Header pairs must be iterable'); - } - - // sequence> - // Note: per spec we have to first exhaust the lists then process them - const pairs = []; - for (const pair of init) { - if (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') { - throw new TypeError('Each header pair must be iterable'); - } - pairs.push(Array.from(pair)); - } - - for (const pair of pairs) { - if (pair.length !== 2) { - throw new TypeError('Each header pair must be a name/value tuple'); - } - this.append(pair[0], pair[1]); - } - } else { - // record - for (const key of Object.keys(init)) { - const value = init[key]; - this.append(key, value); - } - } - } else { - throw new TypeError('Provided initializer must be an object'); - } - } - - /** - * Return combined header value given name - * - * @param String name Header name - * @return Mixed - */ - get(name) { - name = `${name}`; - validateName(name); - const key = find(this[MAP], name); - if (key === undefined) { - return null; - } - - return this[MAP][key].join(', '); - } - - /** - * Iterate over all headers - * - * @param Function callback Executed for each item with parameters (value, name, thisArg) - * @param Boolean thisArg `this` context for callback function - * @return Void - */ - forEach(callback) { - let thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined; - - let pairs = getHeaders(this); - let i = 0; - while (i < pairs.length) { - var _pairs$i = pairs[i]; - const name = _pairs$i[0], - value = _pairs$i[1]; - - callback.call(thisArg, value, name, this); - pairs = getHeaders(this); - i++; - } - } - - /** - * Overwrite header values given name - * - * @param String name Header name - * @param String value Header value - * @return Void - */ - set(name, value) { - name = `${name}`; - value = `${value}`; - validateName(name); - validateValue(value); - const key = find(this[MAP], name); - this[MAP][key !== undefined ? key : name] = [value]; - } - - /** - * Append a value onto existing header - * - * @param String name Header name - * @param String value Header value - * @return Void - */ - append(name, value) { - name = `${name}`; - value = `${value}`; - validateName(name); - validateValue(value); - const key = find(this[MAP], name); - if (key !== undefined) { - this[MAP][key].push(value); - } else { - this[MAP][name] = [value]; - } - } - - /** - * Check for header name existence - * - * @param String name Header name - * @return Boolean - */ - has(name) { - name = `${name}`; - validateName(name); - return find(this[MAP], name) !== undefined; - } - - /** - * Delete all header values given name - * - * @param String name Header name - * @return Void - */ - delete(name) { - name = `${name}`; - validateName(name); - const key = find(this[MAP], name); - if (key !== undefined) { - delete this[MAP][key]; - } - } - - /** - * Return raw headers (non-spec api) - * - * @return Object - */ - raw() { - return this[MAP]; - } - - /** - * Get an iterator on keys. - * - * @return Iterator - */ - keys() { - return createHeadersIterator(this, 'key'); - } - - /** - * Get an iterator on values. - * - * @return Iterator - */ - values() { - return createHeadersIterator(this, 'value'); - } - - /** - * Get an iterator on entries. - * - * This is the default iterator of the Headers object. - * - * @return Iterator - */ - [Symbol.iterator]() { - return createHeadersIterator(this, 'key+value'); - } -} -Headers.prototype.entries = Headers.prototype[Symbol.iterator]; - -Object.defineProperty(Headers.prototype, Symbol.toStringTag, { - value: 'Headers', - writable: false, - enumerable: false, - configurable: true -}); - -Object.defineProperties(Headers.prototype, { - get: { enumerable: true }, - forEach: { enumerable: true }, - set: { enumerable: true }, - append: { enumerable: true }, - has: { enumerable: true }, - delete: { enumerable: true }, - keys: { enumerable: true }, - values: { enumerable: true }, - entries: { enumerable: true } -}); - -function getHeaders(headers) { - let kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value'; - - const keys = Object.keys(headers[MAP]).sort(); - return keys.map(kind === 'key' ? function (k) { - return k.toLowerCase(); - } : kind === 'value' ? function (k) { - return headers[MAP][k].join(', '); - } : function (k) { - return [k.toLowerCase(), headers[MAP][k].join(', ')]; - }); -} - -const INTERNAL = Symbol('internal'); - -function createHeadersIterator(target, kind) { - const iterator = Object.create(HeadersIteratorPrototype); - iterator[INTERNAL] = { - target, - kind, - index: 0 - }; - return iterator; -} - -const HeadersIteratorPrototype = Object.setPrototypeOf({ - next() { - // istanbul ignore if - if (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) { - throw new TypeError('Value of `this` is not a HeadersIterator'); - } - - var _INTERNAL = this[INTERNAL]; - const target = _INTERNAL.target, - kind = _INTERNAL.kind, - index = _INTERNAL.index; - - const values = getHeaders(target, kind); - const len = values.length; - if (index >= len) { - return { - value: undefined, - done: true - }; - } - - this[INTERNAL].index = index + 1; - - return { - value: values[index], - done: false - }; - } -}, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]()))); - -Object.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, { - value: 'HeadersIterator', - writable: false, - enumerable: false, - configurable: true -}); - -/** - * Export the Headers object in a form that Node.js can consume. - * - * @param Headers headers - * @return Object - */ -function exportNodeCompatibleHeaders(headers) { - const obj = Object.assign({ __proto__: null }, headers[MAP]); - - // http.request() only supports string as Host header. This hack makes - // specifying custom Host header possible. - const hostHeaderKey = find(headers[MAP], 'Host'); - if (hostHeaderKey !== undefined) { - obj[hostHeaderKey] = obj[hostHeaderKey][0]; - } - - return obj; -} - -/** - * Create a Headers object from an object of headers, ignoring those that do - * not conform to HTTP grammar productions. - * - * @param Object obj Object of headers - * @return Headers - */ -function createHeadersLenient(obj) { - const headers = new Headers(); - for (const name of Object.keys(obj)) { - if (invalidTokenRegex.test(name)) { - continue; - } - if (Array.isArray(obj[name])) { - for (const val of obj[name]) { - if (invalidHeaderCharRegex.test(val)) { - continue; - } - if (headers[MAP][name] === undefined) { - headers[MAP][name] = [val]; - } else { - headers[MAP][name].push(val); - } - } - } else if (!invalidHeaderCharRegex.test(obj[name])) { - headers[MAP][name] = [obj[name]]; - } - } - return headers; -} - -const INTERNALS$1 = Symbol('Response internals'); - -// fix an issue where "STATUS_CODES" aren't a named export for node <10 -const STATUS_CODES = http.STATUS_CODES; - -/** - * Response class - * - * @param Stream body Readable stream - * @param Object opts Response options - * @return Void - */ -class Response { - constructor() { - let body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null; - let opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; - - Body.call(this, body, opts); - - const status = opts.status || 200; - const headers = new Headers(opts.headers); - - if (body != null && !headers.has('Content-Type')) { - const contentType = extractContentType(body); - if (contentType) { - headers.append('Content-Type', contentType); - } - } - - this[INTERNALS$1] = { - url: opts.url, - status, - statusText: opts.statusText || STATUS_CODES[status], - headers, - counter: opts.counter - }; - } - - get url() { - return this[INTERNALS$1].url || ''; - } - - get status() { - return this[INTERNALS$1].status; - } - - /** - * Convenience property representing if the request ended normally - */ - get ok() { - return this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300; - } - - get redirected() { - return this[INTERNALS$1].counter > 0; - } - - get statusText() { - return this[INTERNALS$1].statusText; - } - - get headers() { - return this[INTERNALS$1].headers; - } - - /** - * Clone this response - * - * @return Response - */ - clone() { - return new Response(clone(this), { - url: this.url, - status: this.status, - statusText: this.statusText, - headers: this.headers, - ok: this.ok, - redirected: this.redirected - }); - } -} - -Body.mixIn(Response.prototype); - -Object.defineProperties(Response.prototype, { - url: { enumerable: true }, - status: { enumerable: true }, - ok: { enumerable: true }, - redirected: { enumerable: true }, - statusText: { enumerable: true }, - headers: { enumerable: true }, - clone: { enumerable: true } -}); - -Object.defineProperty(Response.prototype, Symbol.toStringTag, { - value: 'Response', - writable: false, - enumerable: false, - configurable: true -}); - -const INTERNALS$2 = Symbol('Request internals'); - -// fix an issue where "format", "parse" aren't a named export for node <10 -const parse_url = Url.parse; -const format_url = Url.format; - -const streamDestructionSupported = 'destroy' in Stream.Readable.prototype; - -/** - * Check if a value is an instance of Request. - * - * @param Mixed input - * @return Boolean - */ -function isRequest(input) { - return typeof input === 'object' && typeof input[INTERNALS$2] === 'object'; -} - -function isAbortSignal(signal) { - const proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal); - return !!(proto && proto.constructor.name === 'AbortSignal'); -} - -/** - * Request class - * - * @param Mixed input Url or Request instance - * @param Object init Custom options - * @return Void - */ -class Request { - constructor(input) { - let init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; - - let parsedURL; - - // normalize input - if (!isRequest(input)) { - if (input && input.href) { - // in order to support Node.js' Url objects; though WHATWG's URL objects - // will fall into this branch also (since their `toString()` will return - // `href` property anyway) - parsedURL = parse_url(input.href); - } else { - // coerce input to a string before attempting to parse - parsedURL = parse_url(`${input}`); - } - input = {}; - } else { - parsedURL = parse_url(input.url); - } - - let method = init.method || input.method || 'GET'; - method = method.toUpperCase(); - - if ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) { - throw new TypeError('Request with GET/HEAD method cannot have body'); - } - - let inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null; - - Body.call(this, inputBody, { - timeout: init.timeout || input.timeout || 0, - size: init.size || input.size || 0 - }); - - const headers = new Headers(init.headers || input.headers || {}); - - if (inputBody != null && !headers.has('Content-Type')) { - const contentType = extractContentType(inputBody); - if (contentType) { - headers.append('Content-Type', contentType); - } - } - - let signal = isRequest(input) ? input.signal : null; - if ('signal' in init) signal = init.signal; - - if (signal != null && !isAbortSignal(signal)) { - throw new TypeError('Expected signal to be an instanceof AbortSignal'); - } - - this[INTERNALS$2] = { - method, - redirect: init.redirect || input.redirect || 'follow', - headers, - parsedURL, - signal - }; - - // node-fetch-only options - this.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20; - this.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true; - this.counter = init.counter || input.counter || 0; - this.agent = init.agent || input.agent; - } - - get method() { - return this[INTERNALS$2].method; - } - - get url() { - return format_url(this[INTERNALS$2].parsedURL); - } - - get headers() { - return this[INTERNALS$2].headers; - } - - get redirect() { - return this[INTERNALS$2].redirect; - } - - get signal() { - return this[INTERNALS$2].signal; - } - - /** - * Clone this request - * - * @return Request - */ - clone() { - return new Request(this); - } -} - -Body.mixIn(Request.prototype); - -Object.defineProperty(Request.prototype, Symbol.toStringTag, { - value: 'Request', - writable: false, - enumerable: false, - configurable: true -}); - -Object.defineProperties(Request.prototype, { - method: { enumerable: true }, - url: { enumerable: true }, - headers: { enumerable: true }, - redirect: { enumerable: true }, - clone: { enumerable: true }, - signal: { enumerable: true } -}); - -/** - * Convert a Request to Node.js http request options. - * - * @param Request A Request instance - * @return Object The options object to be passed to http.request - */ -function getNodeRequestOptions(request) { - const parsedURL = request[INTERNALS$2].parsedURL; - const headers = new Headers(request[INTERNALS$2].headers); - - // fetch step 1.3 - if (!headers.has('Accept')) { - headers.set('Accept', '*/*'); - } - - // Basic fetch - if (!parsedURL.protocol || !parsedURL.hostname) { - throw new TypeError('Only absolute URLs are supported'); - } - - if (!/^https?:$/.test(parsedURL.protocol)) { - throw new TypeError('Only HTTP(S) protocols are supported'); - } - - if (request.signal && request.body instanceof Stream.Readable && !streamDestructionSupported) { - throw new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8'); - } - - // HTTP-network-or-cache fetch steps 2.4-2.7 - let contentLengthValue = null; - if (request.body == null && /^(POST|PUT)$/i.test(request.method)) { - contentLengthValue = '0'; - } - if (request.body != null) { - const totalBytes = getTotalBytes(request); - if (typeof totalBytes === 'number') { - contentLengthValue = String(totalBytes); - } - } - if (contentLengthValue) { - headers.set('Content-Length', contentLengthValue); - } - - // HTTP-network-or-cache fetch step 2.11 - if (!headers.has('User-Agent')) { - headers.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)'); - } - - // HTTP-network-or-cache fetch step 2.15 - if (request.compress && !headers.has('Accept-Encoding')) { - headers.set('Accept-Encoding', 'gzip,deflate'); - } - - let agent = request.agent; - if (typeof agent === 'function') { - agent = agent(parsedURL); - } - - if (!headers.has('Connection') && !agent) { - headers.set('Connection', 'close'); - } - - // HTTP-network fetch step 4.2 - // chunked encoding is handled by Node.js - - return Object.assign({}, parsedURL, { - method: request.method, - headers: exportNodeCompatibleHeaders(headers), - agent - }); -} - -/** - * abort-error.js - * - * AbortError interface for cancelled requests - */ - -/** - * Create AbortError instance - * - * @param String message Error message for human - * @return AbortError - */ -function AbortError(message) { - Error.call(this, message); - - this.type = 'aborted'; - this.message = message; - - // hide custom error implementation details from end-users - Error.captureStackTrace(this, this.constructor); -} - -AbortError.prototype = Object.create(Error.prototype); -AbortError.prototype.constructor = AbortError; -AbortError.prototype.name = 'AbortError'; - -// fix an issue where "PassThrough", "resolve" aren't a named export for node <10 -const PassThrough$1 = Stream.PassThrough; -const resolve_url = Url.resolve; - -/** - * Fetch function - * - * @param Mixed url Absolute url or Request instance - * @param Object opts Fetch options - * @return Promise - */ -function fetch(url, opts) { - - // allow custom promise - if (!fetch.Promise) { - throw new Error('native promise missing, set fetch.Promise to your favorite alternative'); - } - - Body.Promise = fetch.Promise; - - // wrap http.request into fetch - return new fetch.Promise(function (resolve, reject) { - // build request object - const request = new Request(url, opts); - const options = getNodeRequestOptions(request); - - const send = (options.protocol === 'https:' ? https : http).request; - const signal = request.signal; - - let response = null; - - const abort = function abort() { - let error = new AbortError('The user aborted a request.'); - reject(error); - if (request.body && request.body instanceof Stream.Readable) { - request.body.destroy(error); - } - if (!response || !response.body) return; - response.body.emit('error', error); - }; - - if (signal && signal.aborted) { - abort(); - return; - } - - const abortAndFinalize = function abortAndFinalize() { - abort(); - finalize(); - }; - - // send request - const req = send(options); - let reqTimeout; - - if (signal) { - signal.addEventListener('abort', abortAndFinalize); - } - - function finalize() { - req.abort(); - if (signal) signal.removeEventListener('abort', abortAndFinalize); - clearTimeout(reqTimeout); - } - - if (request.timeout) { - req.once('socket', function (socket) { - reqTimeout = setTimeout(function () { - reject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout')); - finalize(); - }, request.timeout); - }); - } - - req.on('error', function (err) { - reject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err)); - finalize(); - }); - - req.on('response', function (res) { - clearTimeout(reqTimeout); - - const headers = createHeadersLenient(res.headers); - - // HTTP fetch step 5 - if (fetch.isRedirect(res.statusCode)) { - // HTTP fetch step 5.2 - const location = headers.get('Location'); - - // HTTP fetch step 5.3 - const locationURL = location === null ? null : resolve_url(request.url, location); - - // HTTP fetch step 5.5 - switch (request.redirect) { - case 'error': - reject(new FetchError(`redirect mode is set to error: ${request.url}`, 'no-redirect')); - finalize(); - return; - case 'manual': - // node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL. - if (locationURL !== null) { - // handle corrupted header - try { - headers.set('Location', locationURL); - } catch (err) { - // istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request - reject(err); - } - } - break; - case 'follow': - // HTTP-redirect fetch step 2 - if (locationURL === null) { - break; - } - - // HTTP-redirect fetch step 5 - if (request.counter >= request.follow) { - reject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect')); - finalize(); - return; - } - - // HTTP-redirect fetch step 6 (counter increment) - // Create a new Request object. - const requestOpts = { - headers: new Headers(request.headers), - follow: request.follow, - counter: request.counter + 1, - agent: request.agent, - compress: request.compress, - method: request.method, - body: request.body, - signal: request.signal, - timeout: request.timeout - }; - - // HTTP-redirect fetch step 9 - if (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) { - reject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect')); - finalize(); - return; - } - - // HTTP-redirect fetch step 11 - if (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') { - requestOpts.method = 'GET'; - requestOpts.body = undefined; - requestOpts.headers.delete('content-length'); - } - - // HTTP-redirect fetch step 15 - resolve(fetch(new Request(locationURL, requestOpts))); - finalize(); - return; - } - } - - // prepare response - res.once('end', function () { - if (signal) signal.removeEventListener('abort', abortAndFinalize); - }); - let body = res.pipe(new PassThrough$1()); - - const response_options = { - url: request.url, - status: res.statusCode, - statusText: res.statusMessage, - headers: headers, - size: request.size, - timeout: request.timeout, - counter: request.counter - }; - - // HTTP-network fetch step 12.1.1.3 - const codings = headers.get('Content-Encoding'); - - // HTTP-network fetch step 12.1.1.4: handle content codings - - // in following scenarios we ignore compression support - // 1. compression support is disabled - // 2. HEAD request - // 3. no Content-Encoding header - // 4. no content response (204) - // 5. content not modified response (304) - if (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) { - response = new Response(body, response_options); - resolve(response); - return; - } - - // For Node v6+ - // Be less strict when decoding compressed responses, since sometimes - // servers send slightly invalid responses that are still accepted - // by common browsers. - // Always using Z_SYNC_FLUSH is what cURL does. - const zlibOptions = { - flush: zlib.Z_SYNC_FLUSH, - finishFlush: zlib.Z_SYNC_FLUSH - }; - - // for gzip - if (codings == 'gzip' || codings == 'x-gzip') { - body = body.pipe(zlib.createGunzip(zlibOptions)); - response = new Response(body, response_options); - resolve(response); - return; - } - - // for deflate - if (codings == 'deflate' || codings == 'x-deflate') { - // handle the infamous raw deflate response from old servers - // a hack for old IIS and Apache servers - const raw = res.pipe(new PassThrough$1()); - raw.once('data', function (chunk) { - // see http://stackoverflow.com/questions/37519828 - if ((chunk[0] & 0x0F) === 0x08) { - body = body.pipe(zlib.createInflate()); - } else { - body = body.pipe(zlib.createInflateRaw()); - } - response = new Response(body, response_options); - resolve(response); - }); - return; - } - - // for br - if (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') { - body = body.pipe(zlib.createBrotliDecompress()); - response = new Response(body, response_options); - resolve(response); - return; - } - - // otherwise, use response as-is - response = new Response(body, response_options); - resolve(response); - }); - - writeToStream(req, request); - }); -} -/** - * Redirect code matching - * - * @param Number code Status code - * @return Boolean - */ -fetch.isRedirect = function (code) { - return code === 301 || code === 302 || code === 303 || code === 307 || code === 308; -}; - -// expose Promise -fetch.Promise = global.Promise; - -module.exports = exports = fetch; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = exports; -exports.Headers = Headers; -exports.Request = Request; -exports.Response = Response; -exports.FetchError = FetchError; - - -/***/ }), - -/***/ 462: -/***/ (function(module) { - -"use strict"; - - -// See http://www.robvanderwoude.com/escapechars.php -const metaCharsRegExp = /([()\][%!^"`<>&|;, *?])/g; - -function escapeCommand(arg) { - // Escape meta chars - arg = arg.replace(metaCharsRegExp, '^$1'); - - return arg; -} - -function escapeArgument(arg, doubleEscapeMetaChars) { - // Convert to string - arg = `${arg}`; - - // Algorithm below is based on https://qntm.org/cmd - - // Sequence of backslashes followed by a double quote: - // double up all the backslashes and escape the double quote - arg = arg.replace(/(\\*)"/g, '$1$1\\"'); - - // Sequence of backslashes followed by the end of the string - // (which will become a double quote later): - // double up all the backslashes - arg = arg.replace(/(\\*)$/, '$1$1'); - - // All other backslashes occur literally - - // Quote the whole thing: - arg = `"${arg}"`; - - // Escape meta chars - arg = arg.replace(metaCharsRegExp, '^$1'); - - // Double escape meta chars if necessary - if (doubleEscapeMetaChars) { - arg = arg.replace(metaCharsRegExp, '^$1'); - } - - return arg; -} - -module.exports.command = escapeCommand; -module.exports.argument = escapeArgument; - - -/***/ }), - -/***/ 463: -/***/ (function(__unusedmodule, exports, __webpack_require__) { - -"use strict"; - - -Object.defineProperty(exports, '__esModule', { value: true }); - -function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } - -var deprecation = __webpack_require__(692); -var once = _interopDefault(__webpack_require__(969)); - -const logOnce = once(deprecation => console.warn(deprecation)); -/** - * Error with extra properties to help with debugging - */ - -class RequestError extends Error { - constructor(message, statusCode, options) { - super(message); // Maintains proper stack trace (only available on V8) - - /* istanbul ignore next */ - - if (Error.captureStackTrace) { - Error.captureStackTrace(this, this.constructor); - } - - this.name = "HttpError"; - this.status = statusCode; - Object.defineProperty(this, "code", { - get() { - logOnce(new deprecation.Deprecation("[@octokit/request-error] `error.code` is deprecated, use `error.status`.")); - return statusCode; - } - - }); - this.headers = options.headers || {}; // redact request credentials without mutating original request options - - const requestCopy = Object.assign({}, options.request); - - if (options.request.headers.authorization) { - requestCopy.headers = Object.assign({}, options.request.headers, { - authorization: options.request.headers.authorization.replace(/ .*$/, " [REDACTED]") - }); - } - - requestCopy.url = requestCopy.url // client_id & client_secret can be passed as URL query parameters to increase rate limit - // see https://developer.github.com/v3/#increasing-the-unauthenticated-rate-limit-for-oauth-applications - .replace(/\bclient_secret=\w+/g, "client_secret=[REDACTED]") // OAuth tokens can be passed as URL query parameters, although it is not recommended - // see https://developer.github.com/v3/#oauth2-token-sent-in-a-header - .replace(/\baccess_token=\w+/g, "access_token=[REDACTED]"); - this.request = requestCopy; - } - -} - -exports.RequestError = RequestError; -//# sourceMappingURL=index.js.map - - -/***/ }), - -/***/ 469: -/***/ (function(__unusedmodule, exports, __webpack_require__) { - -"use strict"; - -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; - return result; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -// Originally pulled from https://github.com/JasonEtco/actions-toolkit/blob/master/src/github.ts -const graphql_1 = __webpack_require__(898); -const rest_1 = __webpack_require__(0); -const Context = __importStar(__webpack_require__(262)); -const httpClient = __importStar(__webpack_require__(539)); -// We need this in order to extend Octokit -rest_1.Octokit.prototype = new rest_1.Octokit(); -exports.context = new Context.Context(); -class GitHub extends rest_1.Octokit { - constructor(token, opts) { - super(GitHub.getOctokitOptions(GitHub.disambiguate(token, opts))); - this.graphql = GitHub.getGraphQL(GitHub.disambiguate(token, opts)); - } - /** - * Disambiguates the constructor overload parameters - */ - static disambiguate(token, opts) { - return [ - typeof token === 'string' ? token : '', - typeof token === 'object' ? token : opts || {} - ]; - } - static getOctokitOptions(args) { - const token = args[0]; - const options = Object.assign({}, args[1]); // Shallow clone - don't mutate the object provided by the caller - // Base URL - GHES or Dotcom - options.baseUrl = options.baseUrl || this.getApiBaseUrl(); - // Auth - const auth = GitHub.getAuthString(token, options); - if (auth) { - options.auth = auth; - } - // Proxy - const agent = GitHub.getProxyAgent(options.baseUrl, options); - if (agent) { - // Shallow clone - don't mutate the object provided by the caller - options.request = options.request ? Object.assign({}, options.request) : {}; - // Set the agent - options.request.agent = agent; - } - return options; - } - static getGraphQL(args) { - const defaults = {}; - defaults.baseUrl = this.getGraphQLBaseUrl(); - const token = args[0]; - const options = args[1]; - // Authorization - const auth = this.getAuthString(token, options); - if (auth) { - defaults.headers = { - authorization: auth - }; - } - // Proxy - const agent = GitHub.getProxyAgent(defaults.baseUrl, options); - if (agent) { - defaults.request = { agent }; - } - return graphql_1.graphql.defaults(defaults); - } - static getAuthString(token, options) { - // Validate args - if (!token && !options.auth) { - throw new Error('Parameter token or opts.auth is required'); - } - else if (token && options.auth) { - throw new Error('Parameters token and opts.auth may not both be specified'); - } - return typeof options.auth === 'string' ? options.auth : `token ${token}`; - } - static getProxyAgent(destinationUrl, options) { - var _a; - if (!((_a = options.request) === null || _a === void 0 ? void 0 : _a.agent)) { - if (httpClient.getProxyUrl(destinationUrl)) { - const hc = new httpClient.HttpClient(); - return hc.getAgent(destinationUrl); - } - } - return undefined; - } - static getApiBaseUrl() { - return process.env['GITHUB_API_URL'] || 'https://api.github.com'; - } - static getGraphQLBaseUrl() { - let url = process.env['GITHUB_GRAPHQL_URL'] || 'https://api.github.com/graphql'; - // Shouldn't be a trailing slash, but remove if so - if (url.endsWith('/')) { - url = url.substr(0, url.length - 1); - } - // Remove trailing "/graphql" - if (url.toUpperCase().endsWith('/GRAPHQL')) { - url = url.substr(0, url.length - '/graphql'.length); - } - return url; - } -} -exports.GitHub = GitHub; -//# sourceMappingURL=github.js.map - -/***/ }), - -/***/ 470: -/***/ (function(__unusedmodule, exports, __webpack_require__) { - -"use strict"; - -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; - return result; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -const command_1 = __webpack_require__(431); -const os = __importStar(__webpack_require__(87)); -const path = __importStar(__webpack_require__(622)); -/** - * The code to exit an action - */ -var ExitCode; -(function (ExitCode) { - /** - * A code indicating that the action was successful - */ - ExitCode[ExitCode["Success"] = 0] = "Success"; - /** - * A code indicating that the action was a failure - */ - ExitCode[ExitCode["Failure"] = 1] = "Failure"; -})(ExitCode = exports.ExitCode || (exports.ExitCode = {})); -//----------------------------------------------------------------------- -// Variables -//----------------------------------------------------------------------- -/** - * Sets env variable for this action and future actions in the job - * @param name the name of the variable to set - * @param val the value of the variable. Non-string values will be converted to a string via JSON.stringify - */ -// eslint-disable-next-line @typescript-eslint/no-explicit-any -function exportVariable(name, val) { - const convertedVal = command_1.toCommandValue(val); - process.env[name] = convertedVal; - command_1.issueCommand('set-env', { name }, convertedVal); -} -exports.exportVariable = exportVariable; -/** - * Registers a secret which will get masked from logs - * @param secret value of the secret - */ -function setSecret(secret) { - command_1.issueCommand('add-mask', {}, secret); -} -exports.setSecret = setSecret; -/** - * Prepends inputPath to the PATH (for this action and future actions) - * @param inputPath - */ -function addPath(inputPath) { - command_1.issueCommand('add-path', {}, inputPath); - process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`; -} -exports.addPath = addPath; -/** - * Gets the value of an input. The value is also trimmed. - * - * @param name name of the input to get - * @param options optional. See InputOptions. - * @returns string - */ -function getInput(name, options) { - const val = process.env[`INPUT_${name.replace(/ /g, '_').toUpperCase()}`] || ''; - if (options && options.required && !val) { - throw new Error(`Input required and not supplied: ${name}`); - } - return val.trim(); -} -exports.getInput = getInput; -/** - * Sets the value of an output. - * - * @param name name of the output to set - * @param value value to store. Non-string values will be converted to a string via JSON.stringify - */ -// eslint-disable-next-line @typescript-eslint/no-explicit-any -function setOutput(name, value) { - command_1.issueCommand('set-output', { name }, value); -} -exports.setOutput = setOutput; -/** - * Enables or disables the echoing of commands into stdout for the rest of the step. - * Echoing is disabled by default if ACTIONS_STEP_DEBUG is not set. - * - */ -function setCommandEcho(enabled) { - command_1.issue('echo', enabled ? 'on' : 'off'); -} -exports.setCommandEcho = setCommandEcho; -//----------------------------------------------------------------------- -// Results -//----------------------------------------------------------------------- -/** - * Sets the action status to failed. - * When the action exits it will be with an exit code of 1 - * @param message add error issue message - */ -function setFailed(message) { - process.exitCode = ExitCode.Failure; - error(message); -} -exports.setFailed = setFailed; -//----------------------------------------------------------------------- -// Logging Commands -//----------------------------------------------------------------------- -/** - * Gets whether Actions Step Debug is on or not - */ -function isDebug() { - return process.env['RUNNER_DEBUG'] === '1'; -} -exports.isDebug = isDebug; -/** - * Writes debug message to user log - * @param message debug message - */ -function debug(message) { - command_1.issueCommand('debug', {}, message); -} -exports.debug = debug; -/** - * Adds an error issue - * @param message error issue message. Errors will be converted to string via toString() - */ -function error(message) { - command_1.issue('error', message instanceof Error ? message.toString() : message); -} -exports.error = error; -/** - * Adds an warning issue - * @param message warning issue message. Errors will be converted to string via toString() - */ -function warning(message) { - command_1.issue('warning', message instanceof Error ? message.toString() : message); -} -exports.warning = warning; -/** - * Writes info to log with console.log. - * @param message info message - */ -function info(message) { - process.stdout.write(message + os.EOL); -} -exports.info = info; -/** - * Begin an output group. - * - * Output until the next `groupEnd` will be foldable in this group - * - * @param name The name of the output group - */ -function startGroup(name) { - command_1.issue('group', name); -} -exports.startGroup = startGroup; -/** - * End an output group. - */ -function endGroup() { - command_1.issue('endgroup'); -} -exports.endGroup = endGroup; -/** - * Wrap an asynchronous function call in a group. - * - * Returns the same type as the function itself. - * - * @param name The name of the group - * @param fn The function to wrap in the group - */ -function group(name, fn) { - return __awaiter(this, void 0, void 0, function* () { - startGroup(name); - let result; - try { - result = yield fn(); - } - finally { - endGroup(); - } - return result; - }); -} -exports.group = group; -//----------------------------------------------------------------------- -// Wrapper action state -//----------------------------------------------------------------------- -/** - * Saves state for current action, the state can only be retrieved by this action's post job execution. - * - * @param name name of the state to store - * @param value value to store. Non-string values will be converted to a string via JSON.stringify - */ -// eslint-disable-next-line @typescript-eslint/no-explicit-any -function saveState(name, value) { - command_1.issueCommand('save-state', { name }, value); -} -exports.saveState = saveState; -/** - * Gets the value of an state set by this action's main execution. - * - * @param name name of the state to get - * @returns string - */ -function getState(name) { - return process.env[`STATE_${name}`] || ''; -} -exports.getState = getState; -//# sourceMappingURL=core.js.map - -/***/ }), - -/***/ 471: -/***/ (function(module, __unusedexports, __webpack_require__) { - -module.exports = authenticationBeforeRequest; - -const btoa = __webpack_require__(675); -const uniq = __webpack_require__(126); - -function authenticationBeforeRequest(state, options) { - if (!state.auth.type) { - return; - } - - if (state.auth.type === "basic") { - const hash = btoa(`${state.auth.username}:${state.auth.password}`); - options.headers.authorization = `Basic ${hash}`; - return; - } - - if (state.auth.type === "token") { - options.headers.authorization = `token ${state.auth.token}`; - return; - } - - if (state.auth.type === "app") { - options.headers.authorization = `Bearer ${state.auth.token}`; - const acceptHeaders = options.headers.accept - .split(",") - .concat("application/vnd.github.machine-man-preview+json"); - options.headers.accept = uniq(acceptHeaders) - .filter(Boolean) - .join(","); - return; - } - - options.url += options.url.indexOf("?") === -1 ? "?" : "&"; - - if (state.auth.token) { - options.url += `access_token=${encodeURIComponent(state.auth.token)}`; - return; - } - - const key = encodeURIComponent(state.auth.key); - const secret = encodeURIComponent(state.auth.secret); - options.url += `client_id=${key}&client_secret=${secret}`; -} - - -/***/ }), - -/***/ 489: -/***/ (function(module, __unusedexports, __webpack_require__) { - -"use strict"; - - -const path = __webpack_require__(622); -const which = __webpack_require__(814); -const pathKey = __webpack_require__(39)(); - -function resolveCommandAttempt(parsed, withoutPathExt) { - const cwd = process.cwd(); - const hasCustomCwd = parsed.options.cwd != null; - - // If a custom `cwd` was specified, we need to change the process cwd - // because `which` will do stat calls but does not support a custom cwd - if (hasCustomCwd) { - try { - process.chdir(parsed.options.cwd); - } catch (err) { - /* Empty */ - } - } - - let resolved; - - try { - resolved = which.sync(parsed.command, { - path: (parsed.options.env || process.env)[pathKey], - pathExt: withoutPathExt ? path.delimiter : undefined, - }); - } catch (e) { - /* Empty */ - } finally { - process.chdir(cwd); - } - - // If we successfully resolved, ensure that an absolute path is returned - // Note that when a custom `cwd` was used, we need to resolve to an absolute path based on it - if (resolved) { - resolved = path.resolve(hasCustomCwd ? parsed.options.cwd : '', resolved); - } - - return resolved; -} - -function resolveCommand(parsed) { - return resolveCommandAttempt(parsed) || resolveCommandAttempt(parsed, true); -} - -module.exports = resolveCommand; - - -/***/ }), - -/***/ 497: -/***/ (function(__unusedmodule, exports, __webpack_require__) { - -"use strict"; - - -Object.defineProperty(exports, '__esModule', { value: true }); - -function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } - -var deprecation = __webpack_require__(692); -var once = _interopDefault(__webpack_require__(969)); - -const logOnce = once(deprecation => console.warn(deprecation)); -/** - * Error with extra properties to help with debugging - */ - -class RequestError extends Error { - constructor(message, statusCode, options) { - super(message); // Maintains proper stack trace (only available on V8) - - /* istanbul ignore next */ - - if (Error.captureStackTrace) { - Error.captureStackTrace(this, this.constructor); - } - - this.name = "HttpError"; - this.status = statusCode; - Object.defineProperty(this, "code", { - get() { - logOnce(new deprecation.Deprecation("[@octokit/request-error] `error.code` is deprecated, use `error.status`.")); - return statusCode; - } - - }); - this.headers = options.headers || {}; // redact request credentials without mutating original request options - - const requestCopy = Object.assign({}, options.request); - - if (options.request.headers.authorization) { - requestCopy.headers = Object.assign({}, options.request.headers, { - authorization: options.request.headers.authorization.replace(/ .*$/, " [REDACTED]") - }); - } - - requestCopy.url = requestCopy.url // client_id & client_secret can be passed as URL query parameters to increase rate limit - // see https://developer.github.com/v3/#increasing-the-unauthenticated-rate-limit-for-oauth-applications - .replace(/\bclient_secret=\w+/g, "client_secret=[REDACTED]") // OAuth tokens can be passed as URL query parameters, although it is not recommended - // see https://developer.github.com/v3/#oauth2-token-sent-in-a-header - .replace(/\baccess_token=\w+/g, "access_token=[REDACTED]"); - this.request = requestCopy; - } - -} - -exports.RequestError = RequestError; -//# sourceMappingURL=index.js.map - - -/***/ }), - -/***/ 510: -/***/ (function(module) { - -module.exports = addHook - -function addHook (state, kind, name, hook) { - var orig = hook - if (!state.registry[name]) { - state.registry[name] = [] - } - - if (kind === 'before') { - hook = function (method, options) { - return Promise.resolve() - .then(orig.bind(null, options)) - .then(method.bind(null, options)) - } - } - - if (kind === 'after') { - hook = function (method, options) { - var result - return Promise.resolve() - .then(method.bind(null, options)) - .then(function (result_) { - result = result_ - return orig(result, options) - }) - .then(function () { - return result - }) - } - } - - if (kind === 'error') { - hook = function (method, options) { - return Promise.resolve() - .then(method.bind(null, options)) - .catch(function (error) { - return orig(error, options) - }) - } - } - - state.registry[name].push({ - hook: hook, - orig: orig - }) -} - - -/***/ }), - -/***/ 523: -/***/ (function(module, __unusedexports, __webpack_require__) { - -var register = __webpack_require__(363) -var addHook = __webpack_require__(510) -var removeHook = __webpack_require__(763) - -// bind with array of arguments: https://stackoverflow.com/a/21792913 -var bind = Function.bind -var bindable = bind.bind(bind) - -function bindApi (hook, state, name) { - var removeHookRef = bindable(removeHook, null).apply(null, name ? [state, name] : [state]) - hook.api = { remove: removeHookRef } - hook.remove = removeHookRef - - ;['before', 'error', 'after', 'wrap'].forEach(function (kind) { - var args = name ? [state, kind, name] : [state, kind] - hook[kind] = hook.api[kind] = bindable(addHook, null).apply(null, args) - }) -} - -function HookSingular () { - var singularHookName = 'h' - var singularHookState = { - registry: {} - } - var singularHook = register.bind(null, singularHookState, singularHookName) - bindApi(singularHook, singularHookState, singularHookName) - return singularHook -} - -function HookCollection () { - var state = { - registry: {} - } - - var hook = register.bind(null, state) - bindApi(hook, state) - - return hook -} - -var collectionHookDeprecationMessageDisplayed = false -function Hook () { - if (!collectionHookDeprecationMessageDisplayed) { - console.warn('[before-after-hook]: "Hook()" repurposing warning, use "Hook.Collection()". Read more: https://git.io/upgrade-before-after-hook-to-1.4') - collectionHookDeprecationMessageDisplayed = true - } - return HookCollection() -} - -Hook.Singular = HookSingular.bind() -Hook.Collection = HookCollection.bind() - -module.exports = Hook -// expose constructors as a named property for TypeScript -module.exports.Hook = Hook -module.exports.Singular = Hook.Singular -module.exports.Collection = Hook.Collection - - -/***/ }), - -/***/ 529: -/***/ (function(module, __unusedexports, __webpack_require__) { - -const factory = __webpack_require__(47); - -module.exports = factory(); - - -/***/ }), - -/***/ 536: -/***/ (function(module, __unusedexports, __webpack_require__) { - -module.exports = hasFirstPage - -const deprecate = __webpack_require__(370) -const getPageLinks = __webpack_require__(577) - -function hasFirstPage (link) { - deprecate(`octokit.hasFirstPage() – You can use octokit.paginate or async iterators instead: https://github.com/octokit/rest.js#pagination.`) - return getPageLinks(link).first -} - - -/***/ }), - -/***/ 539: -/***/ (function(__unusedmodule, exports, __webpack_require__) { - -"use strict"; - -Object.defineProperty(exports, "__esModule", { value: true }); -const url = __webpack_require__(835); -const http = __webpack_require__(605); -const https = __webpack_require__(211); -const pm = __webpack_require__(950); -let tunnel; -var HttpCodes; -(function (HttpCodes) { - HttpCodes[HttpCodes["OK"] = 200] = "OK"; - HttpCodes[HttpCodes["MultipleChoices"] = 300] = "MultipleChoices"; - HttpCodes[HttpCodes["MovedPermanently"] = 301] = "MovedPermanently"; - HttpCodes[HttpCodes["ResourceMoved"] = 302] = "ResourceMoved"; - HttpCodes[HttpCodes["SeeOther"] = 303] = "SeeOther"; - HttpCodes[HttpCodes["NotModified"] = 304] = "NotModified"; - HttpCodes[HttpCodes["UseProxy"] = 305] = "UseProxy"; - HttpCodes[HttpCodes["SwitchProxy"] = 306] = "SwitchProxy"; - HttpCodes[HttpCodes["TemporaryRedirect"] = 307] = "TemporaryRedirect"; - HttpCodes[HttpCodes["PermanentRedirect"] = 308] = "PermanentRedirect"; - HttpCodes[HttpCodes["BadRequest"] = 400] = "BadRequest"; - HttpCodes[HttpCodes["Unauthorized"] = 401] = "Unauthorized"; - HttpCodes[HttpCodes["PaymentRequired"] = 402] = "PaymentRequired"; - HttpCodes[HttpCodes["Forbidden"] = 403] = "Forbidden"; - HttpCodes[HttpCodes["NotFound"] = 404] = "NotFound"; - HttpCodes[HttpCodes["MethodNotAllowed"] = 405] = "MethodNotAllowed"; - HttpCodes[HttpCodes["NotAcceptable"] = 406] = "NotAcceptable"; - HttpCodes[HttpCodes["ProxyAuthenticationRequired"] = 407] = "ProxyAuthenticationRequired"; - HttpCodes[HttpCodes["RequestTimeout"] = 408] = "RequestTimeout"; - HttpCodes[HttpCodes["Conflict"] = 409] = "Conflict"; - HttpCodes[HttpCodes["Gone"] = 410] = "Gone"; - HttpCodes[HttpCodes["TooManyRequests"] = 429] = "TooManyRequests"; - HttpCodes[HttpCodes["InternalServerError"] = 500] = "InternalServerError"; - HttpCodes[HttpCodes["NotImplemented"] = 501] = "NotImplemented"; - HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway"; - HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable"; - HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout"; -})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {})); -var Headers; -(function (Headers) { - Headers["Accept"] = "accept"; - Headers["ContentType"] = "content-type"; -})(Headers = exports.Headers || (exports.Headers = {})); -var MediaTypes; -(function (MediaTypes) { - MediaTypes["ApplicationJson"] = "application/json"; -})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {})); -/** - * Returns the proxy URL, depending upon the supplied url and proxy environment variables. - * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com - */ -function getProxyUrl(serverUrl) { - let proxyUrl = pm.getProxyUrl(url.parse(serverUrl)); - return proxyUrl ? proxyUrl.href : ''; -} -exports.getProxyUrl = getProxyUrl; -const HttpRedirectCodes = [ - HttpCodes.MovedPermanently, - HttpCodes.ResourceMoved, - HttpCodes.SeeOther, - HttpCodes.TemporaryRedirect, - HttpCodes.PermanentRedirect -]; -const HttpResponseRetryCodes = [ - HttpCodes.BadGateway, - HttpCodes.ServiceUnavailable, - HttpCodes.GatewayTimeout -]; -const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD']; -const ExponentialBackoffCeiling = 10; -const ExponentialBackoffTimeSlice = 5; -class HttpClientResponse { - constructor(message) { - this.message = message; - } - readBody() { - return new Promise(async (resolve, reject) => { - let output = Buffer.alloc(0); - this.message.on('data', (chunk) => { - output = Buffer.concat([output, chunk]); - }); - this.message.on('end', () => { - resolve(output.toString()); - }); - }); - } -} -exports.HttpClientResponse = HttpClientResponse; -function isHttps(requestUrl) { - let parsedUrl = url.parse(requestUrl); - return parsedUrl.protocol === 'https:'; -} -exports.isHttps = isHttps; -class HttpClient { - constructor(userAgent, handlers, requestOptions) { - this._ignoreSslError = false; - this._allowRedirects = true; - this._allowRedirectDowngrade = false; - this._maxRedirects = 50; - this._allowRetries = false; - this._maxRetries = 1; - this._keepAlive = false; - this._disposed = false; - this.userAgent = userAgent; - this.handlers = handlers || []; - this.requestOptions = requestOptions; - if (requestOptions) { - if (requestOptions.ignoreSslError != null) { - this._ignoreSslError = requestOptions.ignoreSslError; - } - this._socketTimeout = requestOptions.socketTimeout; - if (requestOptions.allowRedirects != null) { - this._allowRedirects = requestOptions.allowRedirects; - } - if (requestOptions.allowRedirectDowngrade != null) { - this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade; - } - if (requestOptions.maxRedirects != null) { - this._maxRedirects = Math.max(requestOptions.maxRedirects, 0); - } - if (requestOptions.keepAlive != null) { - this._keepAlive = requestOptions.keepAlive; - } - if (requestOptions.allowRetries != null) { - this._allowRetries = requestOptions.allowRetries; - } - if (requestOptions.maxRetries != null) { - this._maxRetries = requestOptions.maxRetries; - } - } - } - options(requestUrl, additionalHeaders) { - return this.request('OPTIONS', requestUrl, null, additionalHeaders || {}); - } - get(requestUrl, additionalHeaders) { - return this.request('GET', requestUrl, null, additionalHeaders || {}); - } - del(requestUrl, additionalHeaders) { - return this.request('DELETE', requestUrl, null, additionalHeaders || {}); - } - post(requestUrl, data, additionalHeaders) { - return this.request('POST', requestUrl, data, additionalHeaders || {}); - } - patch(requestUrl, data, additionalHeaders) { - return this.request('PATCH', requestUrl, data, additionalHeaders || {}); - } - put(requestUrl, data, additionalHeaders) { - return this.request('PUT', requestUrl, data, additionalHeaders || {}); - } - head(requestUrl, additionalHeaders) { - return this.request('HEAD', requestUrl, null, additionalHeaders || {}); - } - sendStream(verb, requestUrl, stream, additionalHeaders) { - return this.request(verb, requestUrl, stream, additionalHeaders); - } - /** - * Gets a typed object from an endpoint - * Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise - */ - async getJson(requestUrl, additionalHeaders = {}) { - additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); - let res = await this.get(requestUrl, additionalHeaders); - return this._processResponse(res, this.requestOptions); - } - async postJson(requestUrl, obj, additionalHeaders = {}) { - let data = JSON.stringify(obj, null, 2); - additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); - additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); - let res = await this.post(requestUrl, data, additionalHeaders); - return this._processResponse(res, this.requestOptions); - } - async putJson(requestUrl, obj, additionalHeaders = {}) { - let data = JSON.stringify(obj, null, 2); - additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); - additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); - let res = await this.put(requestUrl, data, additionalHeaders); - return this._processResponse(res, this.requestOptions); - } - async patchJson(requestUrl, obj, additionalHeaders = {}) { - let data = JSON.stringify(obj, null, 2); - additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); - additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); - let res = await this.patch(requestUrl, data, additionalHeaders); - return this._processResponse(res, this.requestOptions); - } - /** - * Makes a raw http request. - * All other methods such as get, post, patch, and request ultimately call this. - * Prefer get, del, post and patch - */ - async request(verb, requestUrl, data, headers) { - if (this._disposed) { - throw new Error('Client has already been disposed.'); - } - let parsedUrl = url.parse(requestUrl); - let info = this._prepareRequest(verb, parsedUrl, headers); - // Only perform retries on reads since writes may not be idempotent. - let maxTries = this._allowRetries && RetryableHttpVerbs.indexOf(verb) != -1 - ? this._maxRetries + 1 - : 1; - let numTries = 0; - let response; - while (numTries < maxTries) { - response = await this.requestRaw(info, data); - // Check if it's an authentication challenge - if (response && - response.message && - response.message.statusCode === HttpCodes.Unauthorized) { - let authenticationHandler; - for (let i = 0; i < this.handlers.length; i++) { - if (this.handlers[i].canHandleAuthentication(response)) { - authenticationHandler = this.handlers[i]; - break; - } - } - if (authenticationHandler) { - return authenticationHandler.handleAuthentication(this, info, data); - } - else { - // We have received an unauthorized response but have no handlers to handle it. - // Let the response return to the caller. - return response; - } - } - let redirectsRemaining = this._maxRedirects; - while (HttpRedirectCodes.indexOf(response.message.statusCode) != -1 && - this._allowRedirects && - redirectsRemaining > 0) { - const redirectUrl = response.message.headers['location']; - if (!redirectUrl) { - // if there's no location to redirect to, we won't - break; - } - let parsedRedirectUrl = url.parse(redirectUrl); - if (parsedUrl.protocol == 'https:' && - parsedUrl.protocol != parsedRedirectUrl.protocol && - !this._allowRedirectDowngrade) { - throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.'); - } - // we need to finish reading the response before reassigning response - // which will leak the open socket. - await response.readBody(); - // strip authorization header if redirected to a different hostname - if (parsedRedirectUrl.hostname !== parsedUrl.hostname) { - for (let header in headers) { - // header names are case insensitive - if (header.toLowerCase() === 'authorization') { - delete headers[header]; - } - } - } - // let's make the request with the new redirectUrl - info = this._prepareRequest(verb, parsedRedirectUrl, headers); - response = await this.requestRaw(info, data); - redirectsRemaining--; - } - if (HttpResponseRetryCodes.indexOf(response.message.statusCode) == -1) { - // If not a retry code, return immediately instead of retrying - return response; - } - numTries += 1; - if (numTries < maxTries) { - await response.readBody(); - await this._performExponentialBackoff(numTries); - } - } - return response; - } - /** - * Needs to be called if keepAlive is set to true in request options. - */ - dispose() { - if (this._agent) { - this._agent.destroy(); - } - this._disposed = true; - } - /** - * Raw request. - * @param info - * @param data - */ - requestRaw(info, data) { - return new Promise((resolve, reject) => { - let callbackForResult = function (err, res) { - if (err) { - reject(err); - } - resolve(res); - }; - this.requestRawWithCallback(info, data, callbackForResult); - }); - } - /** - * Raw request with callback. - * @param info - * @param data - * @param onResult - */ - requestRawWithCallback(info, data, onResult) { - let socket; - if (typeof data === 'string') { - info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8'); - } - let callbackCalled = false; - let handleResult = (err, res) => { - if (!callbackCalled) { - callbackCalled = true; - onResult(err, res); - } - }; - let req = info.httpModule.request(info.options, (msg) => { - let res = new HttpClientResponse(msg); - handleResult(null, res); - }); - req.on('socket', sock => { - socket = sock; - }); - // If we ever get disconnected, we want the socket to timeout eventually - req.setTimeout(this._socketTimeout || 3 * 60000, () => { - if (socket) { - socket.end(); - } - handleResult(new Error('Request timeout: ' + info.options.path), null); - }); - req.on('error', function (err) { - // err has statusCode property - // res should have headers - handleResult(err, null); - }); - if (data && typeof data === 'string') { - req.write(data, 'utf8'); - } - if (data && typeof data !== 'string') { - data.on('close', function () { - req.end(); - }); - data.pipe(req); - } - else { - req.end(); - } - } - /** - * Gets an http agent. This function is useful when you need an http agent that handles - * routing through a proxy server - depending upon the url and proxy environment variables. - * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com - */ - getAgent(serverUrl) { - let parsedUrl = url.parse(serverUrl); - return this._getAgent(parsedUrl); - } - _prepareRequest(method, requestUrl, headers) { - const info = {}; - info.parsedUrl = requestUrl; - const usingSsl = info.parsedUrl.protocol === 'https:'; - info.httpModule = usingSsl ? https : http; - const defaultPort = usingSsl ? 443 : 80; - info.options = {}; - info.options.host = info.parsedUrl.hostname; - info.options.port = info.parsedUrl.port - ? parseInt(info.parsedUrl.port) - : defaultPort; - info.options.path = - (info.parsedUrl.pathname || '') + (info.parsedUrl.search || ''); - info.options.method = method; - info.options.headers = this._mergeHeaders(headers); - if (this.userAgent != null) { - info.options.headers['user-agent'] = this.userAgent; - } - info.options.agent = this._getAgent(info.parsedUrl); - // gives handlers an opportunity to participate - if (this.handlers) { - this.handlers.forEach(handler => { - handler.prepareRequest(info.options); - }); - } - return info; - } - _mergeHeaders(headers) { - const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {}); - if (this.requestOptions && this.requestOptions.headers) { - return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers)); - } - return lowercaseKeys(headers || {}); - } - _getExistingOrDefaultHeader(additionalHeaders, header, _default) { - const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {}); - let clientHeader; - if (this.requestOptions && this.requestOptions.headers) { - clientHeader = lowercaseKeys(this.requestOptions.headers)[header]; - } - return additionalHeaders[header] || clientHeader || _default; - } - _getAgent(parsedUrl) { - let agent; - let proxyUrl = pm.getProxyUrl(parsedUrl); - let useProxy = proxyUrl && proxyUrl.hostname; - if (this._keepAlive && useProxy) { - agent = this._proxyAgent; - } - if (this._keepAlive && !useProxy) { - agent = this._agent; - } - // if agent is already assigned use that agent. - if (!!agent) { - return agent; - } - const usingSsl = parsedUrl.protocol === 'https:'; - let maxSockets = 100; - if (!!this.requestOptions) { - maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets; - } - if (useProxy) { - // If using proxy, need tunnel - if (!tunnel) { - tunnel = __webpack_require__(856); - } - const agentOptions = { - maxSockets: maxSockets, - keepAlive: this._keepAlive, - proxy: { - proxyAuth: proxyUrl.auth, - host: proxyUrl.hostname, - port: proxyUrl.port - } - }; - let tunnelAgent; - const overHttps = proxyUrl.protocol === 'https:'; - if (usingSsl) { - tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp; - } - else { - tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp; - } - agent = tunnelAgent(agentOptions); - this._proxyAgent = agent; - } - // if reusing agent across request and tunneling agent isn't assigned create a new agent - if (this._keepAlive && !agent) { - const options = { keepAlive: this._keepAlive, maxSockets: maxSockets }; - agent = usingSsl ? new https.Agent(options) : new http.Agent(options); - this._agent = agent; - } - // if not using private agent and tunnel agent isn't setup then use global agent - if (!agent) { - agent = usingSsl ? https.globalAgent : http.globalAgent; - } - if (usingSsl && this._ignoreSslError) { - // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process - // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options - // we have to cast it to any and change it directly - agent.options = Object.assign(agent.options || {}, { - rejectUnauthorized: false - }); - } - return agent; - } - _performExponentialBackoff(retryNumber) { - retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber); - const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber); - return new Promise(resolve => setTimeout(() => resolve(), ms)); - } - static dateTimeDeserializer(key, value) { - if (typeof value === 'string') { - let a = new Date(value); - if (!isNaN(a.valueOf())) { - return a; - } - } - return value; - } - async _processResponse(res, options) { - return new Promise(async (resolve, reject) => { - const statusCode = res.message.statusCode; - const response = { - statusCode: statusCode, - result: null, - headers: {} - }; - // not found leads to null obj returned - if (statusCode == HttpCodes.NotFound) { - resolve(response); - } - let obj; - let contents; - // get the result from the body - try { - contents = await res.readBody(); - if (contents && contents.length > 0) { - if (options && options.deserializeDates) { - obj = JSON.parse(contents, HttpClient.dateTimeDeserializer); - } - else { - obj = JSON.parse(contents); - } - response.result = obj; - } - response.headers = res.message.headers; - } - catch (err) { - // Invalid resource (contents not json); leaving result obj null - } - // note that 3xx redirects are handled by the http layer. - if (statusCode > 299) { - let msg; - // if exception/error in body, attempt to get better error - if (obj && obj.message) { - msg = obj.message; - } - else if (contents && contents.length > 0) { - // it may be the case that the exception is in the body message as string - msg = contents; - } - else { - msg = 'Failed request: (' + statusCode + ')'; - } - let err = new Error(msg); - // attach statusCode and body obj (if available) to the error object - err['statusCode'] = statusCode; - if (response.result) { - err['result'] = response.result; - } - reject(err); - } - else { - resolve(response); - } - }); - } -} -exports.HttpClient = HttpClient; - - -/***/ }), - -/***/ 550: -/***/ (function(module, __unusedexports, __webpack_require__) { - -module.exports = getNextPage - -const getPage = __webpack_require__(265) - -function getNextPage (octokit, link, headers) { - return getPage(octokit, link, 'next', headers) -} - - -/***/ }), - -/***/ 558: -/***/ (function(module, __unusedexports, __webpack_require__) { - -module.exports = hasPreviousPage - -const deprecate = __webpack_require__(370) -const getPageLinks = __webpack_require__(577) - -function hasPreviousPage (link) { - deprecate(`octokit.hasPreviousPage() – You can use octokit.paginate or async iterators instead: https://github.com/octokit/rest.js#pagination.`) - return getPageLinks(link).prev -} - - -/***/ }), - -/***/ 563: -/***/ (function(module, __unusedexports, __webpack_require__) { - -module.exports = getPreviousPage - -const getPage = __webpack_require__(265) - -function getPreviousPage (octokit, link, headers) { - return getPage(octokit, link, 'prev', headers) -} - - -/***/ }), - -/***/ 568: -/***/ (function(module, __unusedexports, __webpack_require__) { - -"use strict"; - - -const path = __webpack_require__(622); -const niceTry = __webpack_require__(948); -const resolveCommand = __webpack_require__(489); -const escape = __webpack_require__(462); -const readShebang = __webpack_require__(389); -const semver = __webpack_require__(280); - -const isWin = process.platform === 'win32'; -const isExecutableRegExp = /\.(?:com|exe)$/i; -const isCmdShimRegExp = /node_modules[\\/].bin[\\/][^\\/]+\.cmd$/i; - -// `options.shell` is supported in Node ^4.8.0, ^5.7.0 and >= 6.0.0 -const supportsShellOption = niceTry(() => semver.satisfies(process.version, '^4.8.0 || ^5.7.0 || >= 6.0.0', true)) || false; - -function detectShebang(parsed) { - parsed.file = resolveCommand(parsed); - - const shebang = parsed.file && readShebang(parsed.file); - - if (shebang) { - parsed.args.unshift(parsed.file); - parsed.command = shebang; - - return resolveCommand(parsed); - } - - return parsed.file; -} - -function parseNonShell(parsed) { - if (!isWin) { - return parsed; - } - - // Detect & add support for shebangs - const commandFile = detectShebang(parsed); - - // We don't need a shell if the command filename is an executable - const needsShell = !isExecutableRegExp.test(commandFile); - - // If a shell is required, use cmd.exe and take care of escaping everything correctly - // Note that `forceShell` is an hidden option used only in tests - if (parsed.options.forceShell || needsShell) { - // Need to double escape meta chars if the command is a cmd-shim located in `node_modules/.bin/` - // The cmd-shim simply calls execute the package bin file with NodeJS, proxying any argument - // Because the escape of metachars with ^ gets interpreted when the cmd.exe is first called, - // we need to double escape them - const needsDoubleEscapeMetaChars = isCmdShimRegExp.test(commandFile); - - // Normalize posix paths into OS compatible paths (e.g.: foo/bar -> foo\bar) - // This is necessary otherwise it will always fail with ENOENT in those cases - parsed.command = path.normalize(parsed.command); - - // Escape command & arguments - parsed.command = escape.command(parsed.command); - parsed.args = parsed.args.map((arg) => escape.argument(arg, needsDoubleEscapeMetaChars)); - - const shellCommand = [parsed.command].concat(parsed.args).join(' '); - - parsed.args = ['/d', '/s', '/c', `"${shellCommand}"`]; - parsed.command = process.env.comspec || 'cmd.exe'; - parsed.options.windowsVerbatimArguments = true; // Tell node's spawn that the arguments are already escaped - } - - return parsed; -} - -function parseShell(parsed) { - // If node supports the shell option, there's no need to mimic its behavior - if (supportsShellOption) { - return parsed; - } - - // Mimic node shell option - // See https://github.com/nodejs/node/blob/b9f6a2dc059a1062776133f3d4fd848c4da7d150/lib/child_process.js#L335 - const shellCommand = [parsed.command].concat(parsed.args).join(' '); - - if (isWin) { - parsed.command = typeof parsed.options.shell === 'string' ? parsed.options.shell : process.env.comspec || 'cmd.exe'; - parsed.args = ['/d', '/s', '/c', `"${shellCommand}"`]; - parsed.options.windowsVerbatimArguments = true; // Tell node's spawn that the arguments are already escaped - } else { - if (typeof parsed.options.shell === 'string') { - parsed.command = parsed.options.shell; - } else if (process.platform === 'android') { - parsed.command = '/system/bin/sh'; - } else { - parsed.command = '/bin/sh'; - } - - parsed.args = ['-c', shellCommand]; - } - - return parsed; -} - -function parse(command, args, options) { - // Normalize arguments, similar to nodejs - if (args && !Array.isArray(args)) { - options = args; - args = null; - } - - args = args ? args.slice(0) : []; // Clone array to avoid changing the original - options = Object.assign({}, options); // Clone object to avoid changing the original - - // Build our parsed object - const parsed = { - command, - args, - options, - file: undefined, - original: { - command, - args, - }, - }; - - // Delegate further parsing to shell or non-shell - return options.shell ? parseShell(parsed) : parseNonShell(parsed); -} - -module.exports = parse; - - -/***/ }), - -/***/ 577: -/***/ (function(module) { - -module.exports = getPageLinks - -function getPageLinks (link) { - link = link.link || link.headers.link || '' - - const links = {} - - // link format: - // '; rel="next", ; rel="last"' - link.replace(/<([^>]*)>;\s*rel="([\w]*)"/g, (m, uri, type) => { - links[type] = uri - }) - - return links -} - - -/***/ }), - -/***/ 605: -/***/ (function(module) { - -module.exports = require("http"); - -/***/ }), - -/***/ 614: -/***/ (function(module) { - -module.exports = require("events"); - -/***/ }), - -/***/ 619: -/***/ (function(__unusedmodule, exports, __webpack_require__) { - -"use strict"; - - -Object.defineProperty(exports, '__esModule', { value: true }); - -function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } - -var osName = _interopDefault(__webpack_require__(2)); - -function getUserAgent() { - try { - return `Node.js/${process.version.substr(1)} (${osName()}; ${process.arch})`; - } catch (error) { - if (/wmic os get Caption/.test(error.message)) { - return "Windows "; - } - - throw error; - } -} - -exports.getUserAgent = getUserAgent; -//# sourceMappingURL=index.js.map - - -/***/ }), - -/***/ 621: -/***/ (function(module, __unusedexports, __webpack_require__) { - -"use strict"; - -const path = __webpack_require__(622); -const pathKey = __webpack_require__(39); - -module.exports = opts => { - opts = Object.assign({ - cwd: process.cwd(), - path: process.env[pathKey()] - }, opts); - - let prev; - let pth = path.resolve(opts.cwd); - const ret = []; - - while (prev !== pth) { - ret.push(path.join(pth, 'node_modules/.bin')); - prev = pth; - pth = path.resolve(pth, '..'); - } - - // ensure the running `node` binary is used - ret.push(path.dirname(process.execPath)); - - return ret.concat(opts.path).join(path.delimiter); -}; - -module.exports.env = opts => { - opts = Object.assign({ - env: process.env - }, opts); - - const env = Object.assign({}, opts.env); - const path = pathKey({env}); - - opts.path = env[path]; - env[path] = module.exports(opts); - - return env; -}; - - -/***/ }), - -/***/ 622: -/***/ (function(module) { - -module.exports = require("path"); - -/***/ }), - -/***/ 631: -/***/ (function(module) { - -module.exports = require("net"); - -/***/ }), - -/***/ 649: -/***/ (function(module, __unusedexports, __webpack_require__) { - -module.exports = getLastPage - -const getPage = __webpack_require__(265) - -function getLastPage (octokit, link, headers) { - return getPage(octokit, link, 'last', headers) -} - - -/***/ }), - -/***/ 654: -/***/ (function(module) { - -// This is not the set of all possible signals. -// -// It IS, however, the set of all signals that trigger -// an exit on either Linux or BSD systems. Linux is a -// superset of the signal names supported on BSD, and -// the unknown signals just fail to register, so we can -// catch that easily enough. -// -// Don't bother with SIGKILL. It's uncatchable, which -// means that we can't fire any callbacks anyway. -// -// If a user does happen to register a handler on a non- -// fatal signal like SIGWINCH or something, and then -// exit, it'll end up firing `process.emit('exit')`, so -// the handler will be fired anyway. -// -// SIGBUS, SIGFPE, SIGSEGV and SIGILL, when not raised -// artificially, inherently leave the process in a -// state from which it is not safe to try and enter JS -// listeners. -module.exports = [ - 'SIGABRT', - 'SIGALRM', - 'SIGHUP', - 'SIGINT', - 'SIGTERM' -] - -if (process.platform !== 'win32') { - module.exports.push( - 'SIGVTALRM', - 'SIGXCPU', - 'SIGXFSZ', - 'SIGUSR2', - 'SIGTRAP', - 'SIGSYS', - 'SIGQUIT', - 'SIGIOT' - // should detect profiler and enable/disable accordingly. - // see #21 - // 'SIGPROF' - ) -} - -if (process.platform === 'linux') { - module.exports.push( - 'SIGIO', - 'SIGPOLL', - 'SIGPWR', - 'SIGSTKFLT', - 'SIGUNUSED' - ) -} - - -/***/ }), - -/***/ 669: -/***/ (function(module) { - -module.exports = require("util"); - -/***/ }), - -/***/ 674: -/***/ (function(module, __unusedexports, __webpack_require__) { - -module.exports = authenticate; - -const { Deprecation } = __webpack_require__(692); -const once = __webpack_require__(969); - -const deprecateAuthenticate = once((log, deprecation) => log.warn(deprecation)); - -function authenticate(state, options) { - deprecateAuthenticate( - state.octokit.log, - new Deprecation( - '[@octokit/rest] octokit.authenticate() is deprecated. Use "auth" constructor option instead.' - ) - ); - - if (!options) { - state.auth = false; - return; - } - - switch (options.type) { - case "basic": - if (!options.username || !options.password) { - throw new Error( - "Basic authentication requires both a username and password to be set" - ); - } - break; - - case "oauth": - if (!options.token && !(options.key && options.secret)) { - throw new Error( - "OAuth2 authentication requires a token or key & secret to be set" - ); - } - break; - - case "token": - case "app": - if (!options.token) { - throw new Error("Token authentication requires a token to be set"); - } - break; - - default: - throw new Error( - "Invalid authentication type, must be 'basic', 'oauth', 'token' or 'app'" - ); - } - - state.auth = options; -} - - -/***/ }), - -/***/ 675: -/***/ (function(module) { - -module.exports = function btoa(str) { - return new Buffer(str).toString('base64') -} - - -/***/ }), - -/***/ 692: -/***/ (function(__unusedmodule, exports) { - -"use strict"; - - -Object.defineProperty(exports, '__esModule', { value: true }); - -class Deprecation extends Error { - constructor(message) { - super(message); // Maintains proper stack trace (only available on V8) - - /* istanbul ignore next */ - - if (Error.captureStackTrace) { - Error.captureStackTrace(this, this.constructor); - } - - this.name = 'Deprecation'; - } - -} - -exports.Deprecation = Deprecation; - - -/***/ }), - -/***/ 696: -/***/ (function(module) { - -"use strict"; - - -/*! - * isobject - * - * Copyright (c) 2014-2017, Jon Schlinkert. - * Released under the MIT License. - */ - -function isObject(val) { - return val != null && typeof val === 'object' && Array.isArray(val) === false; -} - -/*! - * is-plain-object - * - * Copyright (c) 2014-2017, Jon Schlinkert. - * Released under the MIT License. - */ - -function isObjectObject(o) { - return isObject(o) === true - && Object.prototype.toString.call(o) === '[object Object]'; -} - -function isPlainObject(o) { - var ctor,prot; - - if (isObjectObject(o) === false) return false; - - // If has modified constructor - ctor = o.constructor; - if (typeof ctor !== 'function') return false; - - // If has modified prototype - prot = ctor.prototype; - if (isObjectObject(prot) === false) return false; - - // If constructor does not have an Object-specific method - if (prot.hasOwnProperty('isPrototypeOf') === false) { - return false; - } - - // Most likely a plain Object - return true; -} - -module.exports = isPlainObject; - - -/***/ }), - -/***/ 697: -/***/ (function(module) { - -"use strict"; - -module.exports = (promise, onFinally) => { - onFinally = onFinally || (() => {}); - - return promise.then( - val => new Promise(resolve => { - resolve(onFinally()); - }).then(() => val), - err => new Promise(resolve => { - resolve(onFinally()); - }).then(() => { - throw err; - }) - ); -}; - - -/***/ }), - -/***/ 742: -/***/ (function(module, __unusedexports, __webpack_require__) { - -var fs = __webpack_require__(747) -var core -if (process.platform === 'win32' || global.TESTING_WINDOWS) { - core = __webpack_require__(818) -} else { - core = __webpack_require__(197) -} - -module.exports = isexe -isexe.sync = sync - -function isexe (path, options, cb) { - if (typeof options === 'function') { - cb = options - options = {} - } - - if (!cb) { - if (typeof Promise !== 'function') { - throw new TypeError('callback not provided') - } - - return new Promise(function (resolve, reject) { - isexe(path, options || {}, function (er, is) { - if (er) { - reject(er) - } else { - resolve(is) - } - }) - }) - } - - core(path, options || {}, function (er, is) { - // ignore EACCES because that just means we aren't allowed to run it - if (er) { - if (er.code === 'EACCES' || options && options.ignoreErrors) { - er = null - is = false - } - } - cb(er, is) - }) -} - -function sync (path, options) { - // my kingdom for a filtered catch - try { - return core.sync(path, options || {}) - } catch (er) { - if (options && options.ignoreErrors || er.code === 'EACCES') { - return false - } else { - throw er - } - } -} - - -/***/ }), - -/***/ 747: -/***/ (function(module) { - -module.exports = require("fs"); - -/***/ }), - -/***/ 753: -/***/ (function(__unusedmodule, exports, __webpack_require__) { - -"use strict"; - - -Object.defineProperty(exports, '__esModule', { value: true }); - -function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } - -var endpoint = __webpack_require__(385); -var universalUserAgent = __webpack_require__(796); -var isPlainObject = _interopDefault(__webpack_require__(696)); -var nodeFetch = _interopDefault(__webpack_require__(454)); -var requestError = __webpack_require__(463); - -const VERSION = "5.4.3"; - -function getBufferResponse(response) { - return response.arrayBuffer(); -} - -function fetchWrapper(requestOptions) { - if (isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) { - requestOptions.body = JSON.stringify(requestOptions.body); - } - - let headers = {}; - let status; - let url; - const fetch = requestOptions.request && requestOptions.request.fetch || nodeFetch; - return fetch(requestOptions.url, Object.assign({ - method: requestOptions.method, - body: requestOptions.body, - headers: requestOptions.headers, - redirect: requestOptions.redirect - }, requestOptions.request)).then(response => { - url = response.url; - status = response.status; - - for (const keyAndValue of response.headers) { - headers[keyAndValue[0]] = keyAndValue[1]; - } - - if (status === 204 || status === 205) { - return; - } // GitHub API returns 200 for HEAD requests - - - if (requestOptions.method === "HEAD") { - if (status < 400) { - return; - } - - throw new requestError.RequestError(response.statusText, status, { - headers, - request: requestOptions - }); - } - - if (status === 304) { - throw new requestError.RequestError("Not modified", status, { - headers, - request: requestOptions - }); - } - - if (status >= 400) { - return response.text().then(message => { - const error = new requestError.RequestError(message, status, { - headers, - request: requestOptions - }); - - try { - let responseBody = JSON.parse(error.message); - Object.assign(error, responseBody); - let errors = responseBody.errors; // Assumption `errors` would always be in Array format - - error.message = error.message + ": " + errors.map(JSON.stringify).join(", "); - } catch (e) {// ignore, see octokit/rest.js#684 - } - - throw error; - }); - } - - const contentType = response.headers.get("content-type"); - - if (/application\/json/.test(contentType)) { - return response.json(); - } - - if (!contentType || /^text\/|charset=utf-8$/.test(contentType)) { - return response.text(); - } - - return getBufferResponse(response); - }).then(data => { - return { - status, - url, - headers, - data - }; - }).catch(error => { - if (error instanceof requestError.RequestError) { - throw error; - } - - throw new requestError.RequestError(error.message, 500, { - headers, - request: requestOptions - }); - }); -} - -function withDefaults(oldEndpoint, newDefaults) { - const endpoint = oldEndpoint.defaults(newDefaults); - - const newApi = function (route, parameters) { - const endpointOptions = endpoint.merge(route, parameters); - - if (!endpointOptions.request || !endpointOptions.request.hook) { - return fetchWrapper(endpoint.parse(endpointOptions)); - } - - const request = (route, parameters) => { - return fetchWrapper(endpoint.parse(endpoint.merge(route, parameters))); - }; - - Object.assign(request, { - endpoint, - defaults: withDefaults.bind(null, endpoint) - }); - return endpointOptions.request.hook(request, endpointOptions); - }; - - return Object.assign(newApi, { - endpoint, - defaults: withDefaults.bind(null, endpoint) - }); -} - -const request = withDefaults(endpoint.endpoint, { - headers: { - "user-agent": `octokit-request.js/${VERSION} ${universalUserAgent.getUserAgent()}` - } -}); - -exports.request = request; -//# sourceMappingURL=index.js.map - - -/***/ }), - -/***/ 761: -/***/ (function(module) { - -module.exports = require("zlib"); - -/***/ }), - -/***/ 763: -/***/ (function(module) { - -module.exports = removeHook - -function removeHook (state, name, method) { - if (!state.registry[name]) { - return - } - - var index = state.registry[name] - .map(function (registered) { return registered.orig }) - .indexOf(method) - - if (index === -1) { - return - } - - state.registry[name].splice(index, 1) -} - - -/***/ }), - -/***/ 768: -/***/ (function(module) { - -"use strict"; - -module.exports = function (x) { - var lf = typeof x === 'string' ? '\n' : '\n'.charCodeAt(); - var cr = typeof x === 'string' ? '\r' : '\r'.charCodeAt(); - - if (x[x.length - 1] === lf) { - x = x.slice(0, x.length - 1); - } - - if (x[x.length - 1] === cr) { - x = x.slice(0, x.length - 1); - } - - return x; -}; - - -/***/ }), - -/***/ 777: -/***/ (function(module, __unusedexports, __webpack_require__) { - -module.exports = getFirstPage - -const getPage = __webpack_require__(265) - -function getFirstPage (octokit, link, headers) { - return getPage(octokit, link, 'first', headers) -} - - -/***/ }), - -/***/ 796: -/***/ (function(__unusedmodule, exports, __webpack_require__) { - -"use strict"; - - -Object.defineProperty(exports, '__esModule', { value: true }); - -function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } - -var osName = _interopDefault(__webpack_require__(2)); - -function getUserAgent() { - try { - return `Node.js/${process.version.substr(1)} (${osName()}; ${process.arch})`; - } catch (error) { - if (/wmic os get Caption/.test(error.message)) { - return "Windows "; - } - - return ""; - } -} - -exports.getUserAgent = getUserAgent; -//# sourceMappingURL=index.js.map - - -/***/ }), - -/***/ 813: -/***/ (function(__unusedmodule, exports) { - -"use strict"; - - -Object.defineProperty(exports, '__esModule', { value: true }); - -async function auth(token) { - const tokenType = token.split(/\./).length === 3 ? "app" : /^v\d+\./.test(token) ? "installation" : "oauth"; - return { - type: "token", - token: token, - tokenType - }; -} - -/** - * Prefix token for usage in the Authorization header - * - * @param token OAuth token or JSON Web Token - */ -function withAuthorizationPrefix(token) { - if (token.split(/\./).length === 3) { - return `bearer ${token}`; - } - - return `token ${token}`; -} - -async function hook(token, request, route, parameters) { - const endpoint = request.endpoint.merge(route, parameters); - endpoint.headers.authorization = withAuthorizationPrefix(token); - return request(endpoint); -} - -const createTokenAuth = function createTokenAuth(token) { - if (!token) { - throw new Error("[@octokit/auth-token] No token passed to createTokenAuth"); - } - - if (typeof token !== "string") { - throw new Error("[@octokit/auth-token] Token passed to createTokenAuth is not a string"); - } - - token = token.replace(/^(token|bearer) +/i, ""); - return Object.assign(auth.bind(null, token), { - hook: hook.bind(null, token) - }); -}; - -exports.createTokenAuth = createTokenAuth; -//# sourceMappingURL=index.js.map - - -/***/ }), - -/***/ 814: -/***/ (function(module, __unusedexports, __webpack_require__) { - -module.exports = which -which.sync = whichSync - -var isWindows = process.platform === 'win32' || - process.env.OSTYPE === 'cygwin' || - process.env.OSTYPE === 'msys' - -var path = __webpack_require__(622) -var COLON = isWindows ? ';' : ':' -var isexe = __webpack_require__(742) - -function getNotFoundError (cmd) { - var er = new Error('not found: ' + cmd) - er.code = 'ENOENT' - - return er -} - -function getPathInfo (cmd, opt) { - var colon = opt.colon || COLON - var pathEnv = opt.path || process.env.PATH || '' - var pathExt = [''] - - pathEnv = pathEnv.split(colon) - - var pathExtExe = '' - if (isWindows) { - pathEnv.unshift(process.cwd()) - pathExtExe = (opt.pathExt || process.env.PATHEXT || '.EXE;.CMD;.BAT;.COM') - pathExt = pathExtExe.split(colon) - - - // Always test the cmd itself first. isexe will check to make sure - // it's found in the pathExt set. - if (cmd.indexOf('.') !== -1 && pathExt[0] !== '') - pathExt.unshift('') - } - - // If it has a slash, then we don't bother searching the pathenv. - // just check the file itself, and that's it. - if (cmd.match(/\//) || isWindows && cmd.match(/\\/)) - pathEnv = [''] - - return { - env: pathEnv, - ext: pathExt, - extExe: pathExtExe - } -} - -function which (cmd, opt, cb) { - if (typeof opt === 'function') { - cb = opt - opt = {} - } - - var info = getPathInfo(cmd, opt) - var pathEnv = info.env - var pathExt = info.ext - var pathExtExe = info.extExe - var found = [] - - ;(function F (i, l) { - if (i === l) { - if (opt.all && found.length) - return cb(null, found) - else - return cb(getNotFoundError(cmd)) - } - - var pathPart = pathEnv[i] - if (pathPart.charAt(0) === '"' && pathPart.slice(-1) === '"') - pathPart = pathPart.slice(1, -1) - - var p = path.join(pathPart, cmd) - if (!pathPart && (/^\.[\\\/]/).test(cmd)) { - p = cmd.slice(0, 2) + p - } - ;(function E (ii, ll) { - if (ii === ll) return F(i + 1, l) - var ext = pathExt[ii] - isexe(p + ext, { pathExt: pathExtExe }, function (er, is) { - if (!er && is) { - if (opt.all) - found.push(p + ext) - else - return cb(null, p + ext) - } - return E(ii + 1, ll) - }) - })(0, pathExt.length) - })(0, pathEnv.length) -} - -function whichSync (cmd, opt) { - opt = opt || {} - - var info = getPathInfo(cmd, opt) - var pathEnv = info.env - var pathExt = info.ext - var pathExtExe = info.extExe - var found = [] - - for (var i = 0, l = pathEnv.length; i < l; i ++) { - var pathPart = pathEnv[i] - if (pathPart.charAt(0) === '"' && pathPart.slice(-1) === '"') - pathPart = pathPart.slice(1, -1) - - var p = path.join(pathPart, cmd) - if (!pathPart && /^\.[\\\/]/.test(cmd)) { - p = cmd.slice(0, 2) + p - } - for (var j = 0, ll = pathExt.length; j < ll; j ++) { - var cur = p + pathExt[j] - var is - try { - is = isexe.sync(cur, { pathExt: pathExtExe }) - if (is) { - if (opt.all) - found.push(cur) - else - return cur - } - } catch (ex) {} - } - } - - if (opt.all && found.length) - return found - - if (opt.nothrow) - return null - - throw getNotFoundError(cmd) -} - - -/***/ }), - -/***/ 816: -/***/ (function(module) { - -"use strict"; - -module.exports = /^#!.*/; - - -/***/ }), - -/***/ 818: -/***/ (function(module, __unusedexports, __webpack_require__) { - -module.exports = isexe -isexe.sync = sync - -var fs = __webpack_require__(747) - -function checkPathExt (path, options) { - var pathext = options.pathExt !== undefined ? - options.pathExt : process.env.PATHEXT - - if (!pathext) { - return true - } - - pathext = pathext.split(';') - if (pathext.indexOf('') !== -1) { - return true - } - for (var i = 0; i < pathext.length; i++) { - var p = pathext[i].toLowerCase() - if (p && path.substr(-p.length).toLowerCase() === p) { - return true - } - } - return false -} - -function checkStat (stat, path, options) { - if (!stat.isSymbolicLink() && !stat.isFile()) { - return false - } - return checkPathExt(path, options) -} - -function isexe (path, options, cb) { - fs.stat(path, function (er, stat) { - cb(er, er ? false : checkStat(stat, path, options)) - }) -} - -function sync (path, options) { - return checkStat(fs.statSync(path), path, options) -} - - -/***/ }), - -/***/ 835: -/***/ (function(module) { - -module.exports = require("url"); - -/***/ }), - -/***/ 842: -/***/ (function(__unusedmodule, exports, __webpack_require__) { - -"use strict"; - - -Object.defineProperty(exports, '__esModule', { value: true }); - -var deprecation = __webpack_require__(692); - -var endpointsByScope = { - actions: { - cancelWorkflowRun: { - method: "POST", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - run_id: { - required: true, - type: "integer" - } - }, - url: "/repos/:owner/:repo/actions/runs/:run_id/cancel" - }, - createOrUpdateSecretForRepo: { - method: "PUT", - params: { - encrypted_value: { - type: "string" - }, - key_id: { - type: "string" - }, - name: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/actions/secrets/:name" - }, - createRegistrationToken: { - method: "POST", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/actions/runners/registration-token" - }, - createRemoveToken: { - method: "POST", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/actions/runners/remove-token" - }, - deleteArtifact: { - method: "DELETE", - params: { - artifact_id: { - required: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/actions/artifacts/:artifact_id" - }, - deleteSecretFromRepo: { - method: "DELETE", - params: { - name: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/actions/secrets/:name" - }, - downloadArtifact: { - method: "GET", - params: { - archive_format: { - required: true, - type: "string" - }, - artifact_id: { - required: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/actions/artifacts/:artifact_id/:archive_format" - }, - getArtifact: { - method: "GET", - params: { - artifact_id: { - required: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/actions/artifacts/:artifact_id" - }, - getPublicKey: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/actions/secrets/public-key" - }, - getSecret: { - method: "GET", - params: { - name: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/actions/secrets/:name" - }, - getSelfHostedRunner: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - runner_id: { - required: true, - type: "integer" - } - }, - url: "/repos/:owner/:repo/actions/runners/:runner_id" - }, - getWorkflow: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - workflow_id: { - required: true, - type: "integer" - } - }, - url: "/repos/:owner/:repo/actions/workflows/:workflow_id" - }, - getWorkflowJob: { - method: "GET", - params: { - job_id: { - required: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/actions/jobs/:job_id" - }, - getWorkflowRun: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - run_id: { - required: true, - type: "integer" - } - }, - url: "/repos/:owner/:repo/actions/runs/:run_id" - }, - listDownloadsForSelfHostedRunnerApplication: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/actions/runners/downloads" - }, - listJobsForWorkflowRun: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - }, - run_id: { - required: true, - type: "integer" - } - }, - url: "/repos/:owner/:repo/actions/runs/:run_id/jobs" - }, - listRepoWorkflowRuns: { - method: "GET", - params: { - actor: { - type: "string" - }, - branch: { - type: "string" - }, - event: { - type: "string" - }, - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - }, - status: { - enum: ["completed", "status", "conclusion"], - type: "string" - } - }, - url: "/repos/:owner/:repo/actions/runs" - }, - listRepoWorkflows: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/actions/workflows" - }, - listSecretsForRepo: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/actions/secrets" - }, - listSelfHostedRunnersForRepo: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/actions/runners" - }, - listWorkflowJobLogs: { - method: "GET", - params: { - job_id: { - required: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/actions/jobs/:job_id/logs" - }, - listWorkflowRunArtifacts: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - }, - run_id: { - required: true, - type: "integer" - } - }, - url: "/repos/:owner/:repo/actions/runs/:run_id/artifacts" - }, - listWorkflowRunLogs: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - }, - run_id: { - required: true, - type: "integer" - } - }, - url: "/repos/:owner/:repo/actions/runs/:run_id/logs" - }, - listWorkflowRuns: { - method: "GET", - params: { - actor: { - type: "string" - }, - branch: { - type: "string" - }, - event: { - type: "string" - }, - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - }, - status: { - enum: ["completed", "status", "conclusion"], - type: "string" - }, - workflow_id: { - required: true, - type: "integer" - } - }, - url: "/repos/:owner/:repo/actions/workflows/:workflow_id/runs" - }, - reRunWorkflow: { - method: "POST", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - run_id: { - required: true, - type: "integer" - } - }, - url: "/repos/:owner/:repo/actions/runs/:run_id/rerun" - }, - removeSelfHostedRunner: { - method: "DELETE", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - runner_id: { - required: true, - type: "integer" - } - }, - url: "/repos/:owner/:repo/actions/runners/:runner_id" - } - }, - activity: { - checkStarringRepo: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/user/starred/:owner/:repo" - }, - deleteRepoSubscription: { - method: "DELETE", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/subscription" - }, - deleteThreadSubscription: { - method: "DELETE", - params: { - thread_id: { - required: true, - type: "integer" - } - }, - url: "/notifications/threads/:thread_id/subscription" - }, - getRepoSubscription: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/subscription" - }, - getThread: { - method: "GET", - params: { - thread_id: { - required: true, - type: "integer" - } - }, - url: "/notifications/threads/:thread_id" - }, - getThreadSubscription: { - method: "GET", - params: { - thread_id: { - required: true, - type: "integer" - } - }, - url: "/notifications/threads/:thread_id/subscription" - }, - listEventsForOrg: { - method: "GET", - params: { - org: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - username: { - required: true, - type: "string" - } - }, - url: "/users/:username/events/orgs/:org" - }, - listEventsForUser: { - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - username: { - required: true, - type: "string" - } - }, - url: "/users/:username/events" - }, - listFeeds: { - method: "GET", - params: {}, - url: "/feeds" - }, - listNotifications: { - method: "GET", - params: { - all: { - type: "boolean" - }, - before: { - type: "string" - }, - page: { - type: "integer" - }, - participating: { - type: "boolean" - }, - per_page: { - type: "integer" - }, - since: { - type: "string" - } - }, - url: "/notifications" - }, - listNotificationsForRepo: { - method: "GET", - params: { - all: { - type: "boolean" - }, - before: { - type: "string" - }, - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - participating: { - type: "boolean" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - }, - since: { - type: "string" - } - }, - url: "/repos/:owner/:repo/notifications" - }, - listPublicEvents: { - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - } - }, - url: "/events" - }, - listPublicEventsForOrg: { - method: "GET", - params: { - org: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - } - }, - url: "/orgs/:org/events" - }, - listPublicEventsForRepoNetwork: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/networks/:owner/:repo/events" - }, - listPublicEventsForUser: { - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - username: { - required: true, - type: "string" - } - }, - url: "/users/:username/events/public" - }, - listReceivedEventsForUser: { - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - username: { - required: true, - type: "string" - } - }, - url: "/users/:username/received_events" - }, - listReceivedPublicEventsForUser: { - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - username: { - required: true, - type: "string" - } - }, - url: "/users/:username/received_events/public" - }, - listRepoEvents: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/events" - }, - listReposStarredByAuthenticatedUser: { - method: "GET", - params: { - direction: { - enum: ["asc", "desc"], - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - sort: { - enum: ["created", "updated"], - type: "string" - } - }, - url: "/user/starred" - }, - listReposStarredByUser: { - method: "GET", - params: { - direction: { - enum: ["asc", "desc"], - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - sort: { - enum: ["created", "updated"], - type: "string" - }, - username: { - required: true, - type: "string" - } - }, - url: "/users/:username/starred" - }, - listReposWatchedByUser: { - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - username: { - required: true, - type: "string" - } - }, - url: "/users/:username/subscriptions" - }, - listStargazersForRepo: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/stargazers" - }, - listWatchedReposForAuthenticatedUser: { - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - } - }, - url: "/user/subscriptions" - }, - listWatchersForRepo: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/subscribers" - }, - markAsRead: { - method: "PUT", - params: { - last_read_at: { - type: "string" - } - }, - url: "/notifications" - }, - markNotificationsAsReadForRepo: { - method: "PUT", - params: { - last_read_at: { - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/notifications" - }, - markThreadAsRead: { - method: "PATCH", - params: { - thread_id: { - required: true, - type: "integer" - } - }, - url: "/notifications/threads/:thread_id" - }, - setRepoSubscription: { - method: "PUT", - params: { - ignored: { - type: "boolean" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - subscribed: { - type: "boolean" - } - }, - url: "/repos/:owner/:repo/subscription" - }, - setThreadSubscription: { - method: "PUT", - params: { - ignored: { - type: "boolean" - }, - thread_id: { - required: true, - type: "integer" - } - }, - url: "/notifications/threads/:thread_id/subscription" - }, - starRepo: { - method: "PUT", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/user/starred/:owner/:repo" - }, - unstarRepo: { - method: "DELETE", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/user/starred/:owner/:repo" - } - }, - apps: { - addRepoToInstallation: { - headers: { - accept: "application/vnd.github.machine-man-preview+json" - }, - method: "PUT", - params: { - installation_id: { - required: true, - type: "integer" - }, - repository_id: { - required: true, - type: "integer" - } - }, - url: "/user/installations/:installation_id/repositories/:repository_id" - }, - checkAccountIsAssociatedWithAny: { - method: "GET", - params: { - account_id: { - required: true, - type: "integer" - } - }, - url: "/marketplace_listing/accounts/:account_id" - }, - checkAccountIsAssociatedWithAnyStubbed: { - method: "GET", - params: { - account_id: { - required: true, - type: "integer" - } - }, - url: "/marketplace_listing/stubbed/accounts/:account_id" - }, - checkAuthorization: { - deprecated: "octokit.apps.checkAuthorization() is deprecated, see https://developer.github.com/v3/apps/oauth_applications/#check-an-authorization", - method: "GET", - params: { - access_token: { - required: true, - type: "string" - }, - client_id: { - required: true, - type: "string" - } - }, - url: "/applications/:client_id/tokens/:access_token" - }, - checkToken: { - headers: { - accept: "application/vnd.github.doctor-strange-preview+json" - }, - method: "POST", - params: { - access_token: { - type: "string" - }, - client_id: { - required: true, - type: "string" - } - }, - url: "/applications/:client_id/token" - }, - createContentAttachment: { - headers: { - accept: "application/vnd.github.corsair-preview+json" - }, - method: "POST", - params: { - body: { - required: true, - type: "string" - }, - content_reference_id: { - required: true, - type: "integer" - }, - title: { - required: true, - type: "string" - } - }, - url: "/content_references/:content_reference_id/attachments" - }, - createFromManifest: { - headers: { - accept: "application/vnd.github.fury-preview+json" - }, - method: "POST", - params: { - code: { - required: true, - type: "string" - } - }, - url: "/app-manifests/:code/conversions" - }, - createInstallationToken: { - headers: { - accept: "application/vnd.github.machine-man-preview+json" - }, - method: "POST", - params: { - installation_id: { - required: true, - type: "integer" - }, - permissions: { - type: "object" - }, - repository_ids: { - type: "integer[]" - } - }, - url: "/app/installations/:installation_id/access_tokens" - }, - deleteAuthorization: { - headers: { - accept: "application/vnd.github.doctor-strange-preview+json" - }, - method: "DELETE", - params: { - access_token: { - type: "string" - }, - client_id: { - required: true, - type: "string" - } - }, - url: "/applications/:client_id/grant" - }, - deleteInstallation: { - headers: { - accept: "application/vnd.github.gambit-preview+json,application/vnd.github.machine-man-preview+json" - }, - method: "DELETE", - params: { - installation_id: { - required: true, - type: "integer" - } - }, - url: "/app/installations/:installation_id" - }, - deleteToken: { - headers: { - accept: "application/vnd.github.doctor-strange-preview+json" - }, - method: "DELETE", - params: { - access_token: { - type: "string" - }, - client_id: { - required: true, - type: "string" - } - }, - url: "/applications/:client_id/token" - }, - findOrgInstallation: { - deprecated: "octokit.apps.findOrgInstallation() has been renamed to octokit.apps.getOrgInstallation() (2019-04-10)", - headers: { - accept: "application/vnd.github.machine-man-preview+json" - }, - method: "GET", - params: { - org: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/installation" - }, - findRepoInstallation: { - deprecated: "octokit.apps.findRepoInstallation() has been renamed to octokit.apps.getRepoInstallation() (2019-04-10)", - headers: { - accept: "application/vnd.github.machine-man-preview+json" - }, - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/installation" - }, - findUserInstallation: { - deprecated: "octokit.apps.findUserInstallation() has been renamed to octokit.apps.getUserInstallation() (2019-04-10)", - headers: { - accept: "application/vnd.github.machine-man-preview+json" - }, - method: "GET", - params: { - username: { - required: true, - type: "string" - } - }, - url: "/users/:username/installation" - }, - getAuthenticated: { - headers: { - accept: "application/vnd.github.machine-man-preview+json" - }, - method: "GET", - params: {}, - url: "/app" - }, - getBySlug: { - headers: { - accept: "application/vnd.github.machine-man-preview+json" - }, - method: "GET", - params: { - app_slug: { - required: true, - type: "string" - } - }, - url: "/apps/:app_slug" - }, - getInstallation: { - headers: { - accept: "application/vnd.github.machine-man-preview+json" - }, - method: "GET", - params: { - installation_id: { - required: true, - type: "integer" - } - }, - url: "/app/installations/:installation_id" - }, - getOrgInstallation: { - headers: { - accept: "application/vnd.github.machine-man-preview+json" - }, - method: "GET", - params: { - org: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/installation" - }, - getRepoInstallation: { - headers: { - accept: "application/vnd.github.machine-man-preview+json" - }, - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/installation" - }, - getUserInstallation: { - headers: { - accept: "application/vnd.github.machine-man-preview+json" - }, - method: "GET", - params: { - username: { - required: true, - type: "string" - } - }, - url: "/users/:username/installation" - }, - listAccountsUserOrOrgOnPlan: { - method: "GET", - params: { - direction: { - enum: ["asc", "desc"], - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - plan_id: { - required: true, - type: "integer" - }, - sort: { - enum: ["created", "updated"], - type: "string" - } - }, - url: "/marketplace_listing/plans/:plan_id/accounts" - }, - listAccountsUserOrOrgOnPlanStubbed: { - method: "GET", - params: { - direction: { - enum: ["asc", "desc"], - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - plan_id: { - required: true, - type: "integer" - }, - sort: { - enum: ["created", "updated"], - type: "string" - } - }, - url: "/marketplace_listing/stubbed/plans/:plan_id/accounts" - }, - listInstallationReposForAuthenticatedUser: { - headers: { - accept: "application/vnd.github.machine-man-preview+json" - }, - method: "GET", - params: { - installation_id: { - required: true, - type: "integer" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - } - }, - url: "/user/installations/:installation_id/repositories" - }, - listInstallations: { - headers: { - accept: "application/vnd.github.machine-man-preview+json" - }, - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - } - }, - url: "/app/installations" - }, - listInstallationsForAuthenticatedUser: { - headers: { - accept: "application/vnd.github.machine-man-preview+json" - }, - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - } - }, - url: "/user/installations" - }, - listMarketplacePurchasesForAuthenticatedUser: { - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - } - }, - url: "/user/marketplace_purchases" - }, - listMarketplacePurchasesForAuthenticatedUserStubbed: { - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - } - }, - url: "/user/marketplace_purchases/stubbed" - }, - listPlans: { - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - } - }, - url: "/marketplace_listing/plans" - }, - listPlansStubbed: { - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - } - }, - url: "/marketplace_listing/stubbed/plans" - }, - listRepos: { - headers: { - accept: "application/vnd.github.machine-man-preview+json" - }, - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - } - }, - url: "/installation/repositories" - }, - removeRepoFromInstallation: { - headers: { - accept: "application/vnd.github.machine-man-preview+json" - }, - method: "DELETE", - params: { - installation_id: { - required: true, - type: "integer" - }, - repository_id: { - required: true, - type: "integer" - } - }, - url: "/user/installations/:installation_id/repositories/:repository_id" - }, - resetAuthorization: { - deprecated: "octokit.apps.resetAuthorization() is deprecated, see https://developer.github.com/v3/apps/oauth_applications/#reset-an-authorization", - method: "POST", - params: { - access_token: { - required: true, - type: "string" - }, - client_id: { - required: true, - type: "string" - } - }, - url: "/applications/:client_id/tokens/:access_token" - }, - resetToken: { - headers: { - accept: "application/vnd.github.doctor-strange-preview+json" - }, - method: "PATCH", - params: { - access_token: { - type: "string" - }, - client_id: { - required: true, - type: "string" - } - }, - url: "/applications/:client_id/token" - }, - revokeAuthorizationForApplication: { - deprecated: "octokit.apps.revokeAuthorizationForApplication() is deprecated, see https://developer.github.com/v3/apps/oauth_applications/#revoke-an-authorization-for-an-application", - method: "DELETE", - params: { - access_token: { - required: true, - type: "string" - }, - client_id: { - required: true, - type: "string" - } - }, - url: "/applications/:client_id/tokens/:access_token" - }, - revokeGrantForApplication: { - deprecated: "octokit.apps.revokeGrantForApplication() is deprecated, see https://developer.github.com/v3/apps/oauth_applications/#revoke-a-grant-for-an-application", - method: "DELETE", - params: { - access_token: { - required: true, - type: "string" - }, - client_id: { - required: true, - type: "string" - } - }, - url: "/applications/:client_id/grants/:access_token" - }, - revokeInstallationToken: { - headers: { - accept: "application/vnd.github.gambit-preview+json" - }, - method: "DELETE", - params: {}, - url: "/installation/token" - } - }, - checks: { - create: { - headers: { - accept: "application/vnd.github.antiope-preview+json" - }, - method: "POST", - params: { - actions: { - type: "object[]" - }, - "actions[].description": { - required: true, - type: "string" - }, - "actions[].identifier": { - required: true, - type: "string" - }, - "actions[].label": { - required: true, - type: "string" - }, - completed_at: { - type: "string" - }, - conclusion: { - enum: ["success", "failure", "neutral", "cancelled", "timed_out", "action_required"], - type: "string" - }, - details_url: { - type: "string" - }, - external_id: { - type: "string" - }, - head_sha: { - required: true, - type: "string" - }, - name: { - required: true, - type: "string" - }, - output: { - type: "object" - }, - "output.annotations": { - type: "object[]" - }, - "output.annotations[].annotation_level": { - enum: ["notice", "warning", "failure"], - required: true, - type: "string" - }, - "output.annotations[].end_column": { - type: "integer" - }, - "output.annotations[].end_line": { - required: true, - type: "integer" - }, - "output.annotations[].message": { - required: true, - type: "string" - }, - "output.annotations[].path": { - required: true, - type: "string" - }, - "output.annotations[].raw_details": { - type: "string" - }, - "output.annotations[].start_column": { - type: "integer" - }, - "output.annotations[].start_line": { - required: true, - type: "integer" - }, - "output.annotations[].title": { - type: "string" - }, - "output.images": { - type: "object[]" - }, - "output.images[].alt": { - required: true, - type: "string" - }, - "output.images[].caption": { - type: "string" - }, - "output.images[].image_url": { - required: true, - type: "string" - }, - "output.summary": { - required: true, - type: "string" - }, - "output.text": { - type: "string" - }, - "output.title": { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - started_at: { - type: "string" - }, - status: { - enum: ["queued", "in_progress", "completed"], - type: "string" - } - }, - url: "/repos/:owner/:repo/check-runs" - }, - createSuite: { - headers: { - accept: "application/vnd.github.antiope-preview+json" - }, - method: "POST", - params: { - head_sha: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/check-suites" - }, - get: { - headers: { - accept: "application/vnd.github.antiope-preview+json" - }, - method: "GET", - params: { - check_run_id: { - required: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/check-runs/:check_run_id" - }, - getSuite: { - headers: { - accept: "application/vnd.github.antiope-preview+json" - }, - method: "GET", - params: { - check_suite_id: { - required: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/check-suites/:check_suite_id" - }, - listAnnotations: { - headers: { - accept: "application/vnd.github.antiope-preview+json" - }, - method: "GET", - params: { - check_run_id: { - required: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/check-runs/:check_run_id/annotations" - }, - listForRef: { - headers: { - accept: "application/vnd.github.antiope-preview+json" - }, - method: "GET", - params: { - check_name: { - type: "string" - }, - filter: { - enum: ["latest", "all"], - type: "string" - }, - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - ref: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - status: { - enum: ["queued", "in_progress", "completed"], - type: "string" - } - }, - url: "/repos/:owner/:repo/commits/:ref/check-runs" - }, - listForSuite: { - headers: { - accept: "application/vnd.github.antiope-preview+json" - }, - method: "GET", - params: { - check_name: { - type: "string" - }, - check_suite_id: { - required: true, - type: "integer" - }, - filter: { - enum: ["latest", "all"], - type: "string" - }, - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - }, - status: { - enum: ["queued", "in_progress", "completed"], - type: "string" - } - }, - url: "/repos/:owner/:repo/check-suites/:check_suite_id/check-runs" - }, - listSuitesForRef: { - headers: { - accept: "application/vnd.github.antiope-preview+json" - }, - method: "GET", - params: { - app_id: { - type: "integer" - }, - check_name: { - type: "string" - }, - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - ref: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/commits/:ref/check-suites" - }, - rerequestSuite: { - headers: { - accept: "application/vnd.github.antiope-preview+json" - }, - method: "POST", - params: { - check_suite_id: { - required: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/check-suites/:check_suite_id/rerequest" - }, - setSuitesPreferences: { - headers: { - accept: "application/vnd.github.antiope-preview+json" - }, - method: "PATCH", - params: { - auto_trigger_checks: { - type: "object[]" - }, - "auto_trigger_checks[].app_id": { - required: true, - type: "integer" - }, - "auto_trigger_checks[].setting": { - required: true, - type: "boolean" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/check-suites/preferences" - }, - update: { - headers: { - accept: "application/vnd.github.antiope-preview+json" - }, - method: "PATCH", - params: { - actions: { - type: "object[]" - }, - "actions[].description": { - required: true, - type: "string" - }, - "actions[].identifier": { - required: true, - type: "string" - }, - "actions[].label": { - required: true, - type: "string" - }, - check_run_id: { - required: true, - type: "integer" - }, - completed_at: { - type: "string" - }, - conclusion: { - enum: ["success", "failure", "neutral", "cancelled", "timed_out", "action_required"], - type: "string" - }, - details_url: { - type: "string" - }, - external_id: { - type: "string" - }, - name: { - type: "string" - }, - output: { - type: "object" - }, - "output.annotations": { - type: "object[]" - }, - "output.annotations[].annotation_level": { - enum: ["notice", "warning", "failure"], - required: true, - type: "string" - }, - "output.annotations[].end_column": { - type: "integer" - }, - "output.annotations[].end_line": { - required: true, - type: "integer" - }, - "output.annotations[].message": { - required: true, - type: "string" - }, - "output.annotations[].path": { - required: true, - type: "string" - }, - "output.annotations[].raw_details": { - type: "string" - }, - "output.annotations[].start_column": { - type: "integer" - }, - "output.annotations[].start_line": { - required: true, - type: "integer" - }, - "output.annotations[].title": { - type: "string" - }, - "output.images": { - type: "object[]" - }, - "output.images[].alt": { - required: true, - type: "string" - }, - "output.images[].caption": { - type: "string" - }, - "output.images[].image_url": { - required: true, - type: "string" - }, - "output.summary": { - required: true, - type: "string" - }, - "output.text": { - type: "string" - }, - "output.title": { - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - started_at: { - type: "string" - }, - status: { - enum: ["queued", "in_progress", "completed"], - type: "string" - } - }, - url: "/repos/:owner/:repo/check-runs/:check_run_id" - } - }, - codesOfConduct: { - getConductCode: { - headers: { - accept: "application/vnd.github.scarlet-witch-preview+json" - }, - method: "GET", - params: { - key: { - required: true, - type: "string" - } - }, - url: "/codes_of_conduct/:key" - }, - getForRepo: { - headers: { - accept: "application/vnd.github.scarlet-witch-preview+json" - }, - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/community/code_of_conduct" - }, - listConductCodes: { - headers: { - accept: "application/vnd.github.scarlet-witch-preview+json" - }, - method: "GET", - params: {}, - url: "/codes_of_conduct" - } - }, - emojis: { - get: { - method: "GET", - params: {}, - url: "/emojis" - } - }, - gists: { - checkIsStarred: { - method: "GET", - params: { - gist_id: { - required: true, - type: "string" - } - }, - url: "/gists/:gist_id/star" - }, - create: { - method: "POST", - params: { - description: { - type: "string" - }, - files: { - required: true, - type: "object" - }, - "files.content": { - type: "string" - }, - public: { - type: "boolean" - } - }, - url: "/gists" - }, - createComment: { - method: "POST", - params: { - body: { - required: true, - type: "string" - }, - gist_id: { - required: true, - type: "string" - } - }, - url: "/gists/:gist_id/comments" - }, - delete: { - method: "DELETE", - params: { - gist_id: { - required: true, - type: "string" - } - }, - url: "/gists/:gist_id" - }, - deleteComment: { - method: "DELETE", - params: { - comment_id: { - required: true, - type: "integer" - }, - gist_id: { - required: true, - type: "string" - } - }, - url: "/gists/:gist_id/comments/:comment_id" - }, - fork: { - method: "POST", - params: { - gist_id: { - required: true, - type: "string" - } - }, - url: "/gists/:gist_id/forks" - }, - get: { - method: "GET", - params: { - gist_id: { - required: true, - type: "string" - } - }, - url: "/gists/:gist_id" - }, - getComment: { - method: "GET", - params: { - comment_id: { - required: true, - type: "integer" - }, - gist_id: { - required: true, - type: "string" - } - }, - url: "/gists/:gist_id/comments/:comment_id" - }, - getRevision: { - method: "GET", - params: { - gist_id: { - required: true, - type: "string" - }, - sha: { - required: true, - type: "string" - } - }, - url: "/gists/:gist_id/:sha" - }, - list: { - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - since: { - type: "string" - } - }, - url: "/gists" - }, - listComments: { - method: "GET", - params: { - gist_id: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - } - }, - url: "/gists/:gist_id/comments" - }, - listCommits: { - method: "GET", - params: { - gist_id: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - } - }, - url: "/gists/:gist_id/commits" - }, - listForks: { - method: "GET", - params: { - gist_id: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - } - }, - url: "/gists/:gist_id/forks" - }, - listPublic: { - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - since: { - type: "string" - } - }, - url: "/gists/public" - }, - listPublicForUser: { - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - since: { - type: "string" - }, - username: { - required: true, - type: "string" - } - }, - url: "/users/:username/gists" - }, - listStarred: { - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - since: { - type: "string" - } - }, - url: "/gists/starred" - }, - star: { - method: "PUT", - params: { - gist_id: { - required: true, - type: "string" - } - }, - url: "/gists/:gist_id/star" - }, - unstar: { - method: "DELETE", - params: { - gist_id: { - required: true, - type: "string" - } - }, - url: "/gists/:gist_id/star" - }, - update: { - method: "PATCH", - params: { - description: { - type: "string" - }, - files: { - type: "object" - }, - "files.content": { - type: "string" - }, - "files.filename": { - type: "string" - }, - gist_id: { - required: true, - type: "string" - } - }, - url: "/gists/:gist_id" - }, - updateComment: { - method: "PATCH", - params: { - body: { - required: true, - type: "string" - }, - comment_id: { - required: true, - type: "integer" - }, - gist_id: { - required: true, - type: "string" - } - }, - url: "/gists/:gist_id/comments/:comment_id" - } - }, - git: { - createBlob: { - method: "POST", - params: { - content: { - required: true, - type: "string" - }, - encoding: { - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/git/blobs" - }, - createCommit: { - method: "POST", - params: { - author: { - type: "object" - }, - "author.date": { - type: "string" - }, - "author.email": { - type: "string" - }, - "author.name": { - type: "string" - }, - committer: { - type: "object" - }, - "committer.date": { - type: "string" - }, - "committer.email": { - type: "string" - }, - "committer.name": { - type: "string" - }, - message: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - parents: { - required: true, - type: "string[]" - }, - repo: { - required: true, - type: "string" - }, - signature: { - type: "string" - }, - tree: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/git/commits" - }, - createRef: { - method: "POST", - params: { - owner: { - required: true, - type: "string" - }, - ref: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - sha: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/git/refs" - }, - createTag: { - method: "POST", - params: { - message: { - required: true, - type: "string" - }, - object: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - tag: { - required: true, - type: "string" - }, - tagger: { - type: "object" - }, - "tagger.date": { - type: "string" - }, - "tagger.email": { - type: "string" - }, - "tagger.name": { - type: "string" - }, - type: { - enum: ["commit", "tree", "blob"], - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/git/tags" - }, - createTree: { - method: "POST", - params: { - base_tree: { - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - tree: { - required: true, - type: "object[]" - }, - "tree[].content": { - type: "string" - }, - "tree[].mode": { - enum: ["100644", "100755", "040000", "160000", "120000"], - type: "string" - }, - "tree[].path": { - type: "string" - }, - "tree[].sha": { - allowNull: true, - type: "string" - }, - "tree[].type": { - enum: ["blob", "tree", "commit"], - type: "string" - } - }, - url: "/repos/:owner/:repo/git/trees" - }, - deleteRef: { - method: "DELETE", - params: { - owner: { - required: true, - type: "string" - }, - ref: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/git/refs/:ref" - }, - getBlob: { - method: "GET", - params: { - file_sha: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/git/blobs/:file_sha" - }, - getCommit: { - method: "GET", - params: { - commit_sha: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/git/commits/:commit_sha" - }, - getRef: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - ref: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/git/ref/:ref" - }, - getTag: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - tag_sha: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/git/tags/:tag_sha" - }, - getTree: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - recursive: { - enum: ["1"], - type: "integer" - }, - repo: { - required: true, - type: "string" - }, - tree_sha: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/git/trees/:tree_sha" - }, - listMatchingRefs: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - ref: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/git/matching-refs/:ref" - }, - listRefs: { - method: "GET", - params: { - namespace: { - type: "string" - }, - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/git/refs/:namespace" - }, - updateRef: { - method: "PATCH", - params: { - force: { - type: "boolean" - }, - owner: { - required: true, - type: "string" - }, - ref: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - sha: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/git/refs/:ref" - } - }, - gitignore: { - getTemplate: { - method: "GET", - params: { - name: { - required: true, - type: "string" - } - }, - url: "/gitignore/templates/:name" - }, - listTemplates: { - method: "GET", - params: {}, - url: "/gitignore/templates" - } - }, - interactions: { - addOrUpdateRestrictionsForOrg: { - headers: { - accept: "application/vnd.github.sombra-preview+json" - }, - method: "PUT", - params: { - limit: { - enum: ["existing_users", "contributors_only", "collaborators_only"], - required: true, - type: "string" - }, - org: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/interaction-limits" - }, - addOrUpdateRestrictionsForRepo: { - headers: { - accept: "application/vnd.github.sombra-preview+json" - }, - method: "PUT", - params: { - limit: { - enum: ["existing_users", "contributors_only", "collaborators_only"], - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/interaction-limits" - }, - getRestrictionsForOrg: { - headers: { - accept: "application/vnd.github.sombra-preview+json" - }, - method: "GET", - params: { - org: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/interaction-limits" - }, - getRestrictionsForRepo: { - headers: { - accept: "application/vnd.github.sombra-preview+json" - }, - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/interaction-limits" - }, - removeRestrictionsForOrg: { - headers: { - accept: "application/vnd.github.sombra-preview+json" - }, - method: "DELETE", - params: { - org: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/interaction-limits" - }, - removeRestrictionsForRepo: { - headers: { - accept: "application/vnd.github.sombra-preview+json" - }, - method: "DELETE", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/interaction-limits" - } - }, - issues: { - addAssignees: { - method: "POST", - params: { - assignees: { - type: "string[]" - }, - issue_number: { - required: true, - type: "integer" - }, - number: { - alias: "issue_number", - deprecated: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/issues/:issue_number/assignees" - }, - addLabels: { - method: "POST", - params: { - issue_number: { - required: true, - type: "integer" - }, - labels: { - required: true, - type: "string[]" - }, - number: { - alias: "issue_number", - deprecated: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/issues/:issue_number/labels" - }, - checkAssignee: { - method: "GET", - params: { - assignee: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/assignees/:assignee" - }, - create: { - method: "POST", - params: { - assignee: { - type: "string" - }, - assignees: { - type: "string[]" - }, - body: { - type: "string" - }, - labels: { - type: "string[]" - }, - milestone: { - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - title: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/issues" - }, - createComment: { - method: "POST", - params: { - body: { - required: true, - type: "string" - }, - issue_number: { - required: true, - type: "integer" - }, - number: { - alias: "issue_number", - deprecated: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/issues/:issue_number/comments" - }, - createLabel: { - method: "POST", - params: { - color: { - required: true, - type: "string" - }, - description: { - type: "string" - }, - name: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/labels" - }, - createMilestone: { - method: "POST", - params: { - description: { - type: "string" - }, - due_on: { - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - state: { - enum: ["open", "closed"], - type: "string" - }, - title: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/milestones" - }, - deleteComment: { - method: "DELETE", - params: { - comment_id: { - required: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/issues/comments/:comment_id" - }, - deleteLabel: { - method: "DELETE", - params: { - name: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/labels/:name" - }, - deleteMilestone: { - method: "DELETE", - params: { - milestone_number: { - required: true, - type: "integer" - }, - number: { - alias: "milestone_number", - deprecated: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/milestones/:milestone_number" - }, - get: { - method: "GET", - params: { - issue_number: { - required: true, - type: "integer" - }, - number: { - alias: "issue_number", - deprecated: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/issues/:issue_number" - }, - getComment: { - method: "GET", - params: { - comment_id: { - required: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/issues/comments/:comment_id" - }, - getEvent: { - method: "GET", - params: { - event_id: { - required: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/issues/events/:event_id" - }, - getLabel: { - method: "GET", - params: { - name: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/labels/:name" - }, - getMilestone: { - method: "GET", - params: { - milestone_number: { - required: true, - type: "integer" - }, - number: { - alias: "milestone_number", - deprecated: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/milestones/:milestone_number" - }, - list: { - method: "GET", - params: { - direction: { - enum: ["asc", "desc"], - type: "string" - }, - filter: { - enum: ["assigned", "created", "mentioned", "subscribed", "all"], - type: "string" - }, - labels: { - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - since: { - type: "string" - }, - sort: { - enum: ["created", "updated", "comments"], - type: "string" - }, - state: { - enum: ["open", "closed", "all"], - type: "string" - } - }, - url: "/issues" - }, - listAssignees: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/assignees" - }, - listComments: { - method: "GET", - params: { - issue_number: { - required: true, - type: "integer" - }, - number: { - alias: "issue_number", - deprecated: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - }, - since: { - type: "string" - } - }, - url: "/repos/:owner/:repo/issues/:issue_number/comments" - }, - listCommentsForRepo: { - method: "GET", - params: { - direction: { - enum: ["asc", "desc"], - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - since: { - type: "string" - }, - sort: { - enum: ["created", "updated"], - type: "string" - } - }, - url: "/repos/:owner/:repo/issues/comments" - }, - listEvents: { - method: "GET", - params: { - issue_number: { - required: true, - type: "integer" - }, - number: { - alias: "issue_number", - deprecated: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/issues/:issue_number/events" - }, - listEventsForRepo: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/issues/events" - }, - listEventsForTimeline: { - headers: { - accept: "application/vnd.github.mockingbird-preview+json" - }, - method: "GET", - params: { - issue_number: { - required: true, - type: "integer" - }, - number: { - alias: "issue_number", - deprecated: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/issues/:issue_number/timeline" - }, - listForAuthenticatedUser: { - method: "GET", - params: { - direction: { - enum: ["asc", "desc"], - type: "string" - }, - filter: { - enum: ["assigned", "created", "mentioned", "subscribed", "all"], - type: "string" - }, - labels: { - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - since: { - type: "string" - }, - sort: { - enum: ["created", "updated", "comments"], - type: "string" - }, - state: { - enum: ["open", "closed", "all"], - type: "string" - } - }, - url: "/user/issues" - }, - listForOrg: { - method: "GET", - params: { - direction: { - enum: ["asc", "desc"], - type: "string" - }, - filter: { - enum: ["assigned", "created", "mentioned", "subscribed", "all"], - type: "string" - }, - labels: { - type: "string" - }, - org: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - since: { - type: "string" - }, - sort: { - enum: ["created", "updated", "comments"], - type: "string" - }, - state: { - enum: ["open", "closed", "all"], - type: "string" - } - }, - url: "/orgs/:org/issues" - }, - listForRepo: { - method: "GET", - params: { - assignee: { - type: "string" - }, - creator: { - type: "string" - }, - direction: { - enum: ["asc", "desc"], - type: "string" - }, - labels: { - type: "string" - }, - mentioned: { - type: "string" - }, - milestone: { - type: "string" - }, - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - }, - since: { - type: "string" - }, - sort: { - enum: ["created", "updated", "comments"], - type: "string" - }, - state: { - enum: ["open", "closed", "all"], - type: "string" - } - }, - url: "/repos/:owner/:repo/issues" - }, - listLabelsForMilestone: { - method: "GET", - params: { - milestone_number: { - required: true, - type: "integer" - }, - number: { - alias: "milestone_number", - deprecated: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/milestones/:milestone_number/labels" - }, - listLabelsForRepo: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/labels" - }, - listLabelsOnIssue: { - method: "GET", - params: { - issue_number: { - required: true, - type: "integer" - }, - number: { - alias: "issue_number", - deprecated: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/issues/:issue_number/labels" - }, - listMilestonesForRepo: { - method: "GET", - params: { - direction: { - enum: ["asc", "desc"], - type: "string" - }, - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - }, - sort: { - enum: ["due_on", "completeness"], - type: "string" - }, - state: { - enum: ["open", "closed", "all"], - type: "string" - } - }, - url: "/repos/:owner/:repo/milestones" - }, - lock: { - method: "PUT", - params: { - issue_number: { - required: true, - type: "integer" - }, - lock_reason: { - enum: ["off-topic", "too heated", "resolved", "spam"], - type: "string" - }, - number: { - alias: "issue_number", - deprecated: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/issues/:issue_number/lock" - }, - removeAssignees: { - method: "DELETE", - params: { - assignees: { - type: "string[]" - }, - issue_number: { - required: true, - type: "integer" - }, - number: { - alias: "issue_number", - deprecated: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/issues/:issue_number/assignees" - }, - removeLabel: { - method: "DELETE", - params: { - issue_number: { - required: true, - type: "integer" - }, - name: { - required: true, - type: "string" - }, - number: { - alias: "issue_number", - deprecated: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/issues/:issue_number/labels/:name" - }, - removeLabels: { - method: "DELETE", - params: { - issue_number: { - required: true, - type: "integer" - }, - number: { - alias: "issue_number", - deprecated: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/issues/:issue_number/labels" - }, - replaceLabels: { - method: "PUT", - params: { - issue_number: { - required: true, - type: "integer" - }, - labels: { - type: "string[]" - }, - number: { - alias: "issue_number", - deprecated: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/issues/:issue_number/labels" - }, - unlock: { - method: "DELETE", - params: { - issue_number: { - required: true, - type: "integer" - }, - number: { - alias: "issue_number", - deprecated: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/issues/:issue_number/lock" - }, - update: { - method: "PATCH", - params: { - assignee: { - type: "string" - }, - assignees: { - type: "string[]" - }, - body: { - type: "string" - }, - issue_number: { - required: true, - type: "integer" - }, - labels: { - type: "string[]" - }, - milestone: { - allowNull: true, - type: "integer" - }, - number: { - alias: "issue_number", - deprecated: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - state: { - enum: ["open", "closed"], - type: "string" - }, - title: { - type: "string" - } - }, - url: "/repos/:owner/:repo/issues/:issue_number" - }, - updateComment: { - method: "PATCH", - params: { - body: { - required: true, - type: "string" - }, - comment_id: { - required: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/issues/comments/:comment_id" - }, - updateLabel: { - method: "PATCH", - params: { - color: { - type: "string" - }, - current_name: { - required: true, - type: "string" - }, - description: { - type: "string" - }, - name: { - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/labels/:current_name" - }, - updateMilestone: { - method: "PATCH", - params: { - description: { - type: "string" - }, - due_on: { - type: "string" - }, - milestone_number: { - required: true, - type: "integer" - }, - number: { - alias: "milestone_number", - deprecated: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - state: { - enum: ["open", "closed"], - type: "string" - }, - title: { - type: "string" - } - }, - url: "/repos/:owner/:repo/milestones/:milestone_number" - } - }, - licenses: { - get: { - method: "GET", - params: { - license: { - required: true, - type: "string" - } - }, - url: "/licenses/:license" - }, - getForRepo: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/license" - }, - list: { - deprecated: "octokit.licenses.list() has been renamed to octokit.licenses.listCommonlyUsed() (2019-03-05)", - method: "GET", - params: {}, - url: "/licenses" - }, - listCommonlyUsed: { - method: "GET", - params: {}, - url: "/licenses" - } - }, - markdown: { - render: { - method: "POST", - params: { - context: { - type: "string" - }, - mode: { - enum: ["markdown", "gfm"], - type: "string" - }, - text: { - required: true, - type: "string" - } - }, - url: "/markdown" - }, - renderRaw: { - headers: { - "content-type": "text/plain; charset=utf-8" - }, - method: "POST", - params: { - data: { - mapTo: "data", - required: true, - type: "string" - } - }, - url: "/markdown/raw" - } - }, - meta: { - get: { - method: "GET", - params: {}, - url: "/meta" - } - }, - migrations: { - cancelImport: { - method: "DELETE", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/import" - }, - deleteArchiveForAuthenticatedUser: { - headers: { - accept: "application/vnd.github.wyandotte-preview+json" - }, - method: "DELETE", - params: { - migration_id: { - required: true, - type: "integer" - } - }, - url: "/user/migrations/:migration_id/archive" - }, - deleteArchiveForOrg: { - headers: { - accept: "application/vnd.github.wyandotte-preview+json" - }, - method: "DELETE", - params: { - migration_id: { - required: true, - type: "integer" - }, - org: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/migrations/:migration_id/archive" - }, - downloadArchiveForOrg: { - headers: { - accept: "application/vnd.github.wyandotte-preview+json" - }, - method: "GET", - params: { - migration_id: { - required: true, - type: "integer" - }, - org: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/migrations/:migration_id/archive" - }, - getArchiveForAuthenticatedUser: { - headers: { - accept: "application/vnd.github.wyandotte-preview+json" - }, - method: "GET", - params: { - migration_id: { - required: true, - type: "integer" - } - }, - url: "/user/migrations/:migration_id/archive" - }, - getArchiveForOrg: { - deprecated: "octokit.migrations.getArchiveForOrg() has been renamed to octokit.migrations.downloadArchiveForOrg() (2020-01-27)", - headers: { - accept: "application/vnd.github.wyandotte-preview+json" - }, - method: "GET", - params: { - migration_id: { - required: true, - type: "integer" - }, - org: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/migrations/:migration_id/archive" - }, - getCommitAuthors: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - since: { - type: "string" - } - }, - url: "/repos/:owner/:repo/import/authors" - }, - getImportProgress: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/import" - }, - getLargeFiles: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/import/large_files" - }, - getStatusForAuthenticatedUser: { - headers: { - accept: "application/vnd.github.wyandotte-preview+json" - }, - method: "GET", - params: { - migration_id: { - required: true, - type: "integer" - } - }, - url: "/user/migrations/:migration_id" - }, - getStatusForOrg: { - headers: { - accept: "application/vnd.github.wyandotte-preview+json" - }, - method: "GET", - params: { - migration_id: { - required: true, - type: "integer" - }, - org: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/migrations/:migration_id" - }, - listForAuthenticatedUser: { - headers: { - accept: "application/vnd.github.wyandotte-preview+json" - }, - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - } - }, - url: "/user/migrations" - }, - listForOrg: { - headers: { - accept: "application/vnd.github.wyandotte-preview+json" - }, - method: "GET", - params: { - org: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - } - }, - url: "/orgs/:org/migrations" - }, - listReposForOrg: { - headers: { - accept: "application/vnd.github.wyandotte-preview+json" - }, - method: "GET", - params: { - migration_id: { - required: true, - type: "integer" - }, - org: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - } - }, - url: "/orgs/:org/migrations/:migration_id/repositories" - }, - listReposForUser: { - headers: { - accept: "application/vnd.github.wyandotte-preview+json" - }, - method: "GET", - params: { - migration_id: { - required: true, - type: "integer" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - } - }, - url: "/user/:migration_id/repositories" - }, - mapCommitAuthor: { - method: "PATCH", - params: { - author_id: { - required: true, - type: "integer" - }, - email: { - type: "string" - }, - name: { - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/import/authors/:author_id" - }, - setLfsPreference: { - method: "PATCH", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - use_lfs: { - enum: ["opt_in", "opt_out"], - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/import/lfs" - }, - startForAuthenticatedUser: { - method: "POST", - params: { - exclude_attachments: { - type: "boolean" - }, - lock_repositories: { - type: "boolean" - }, - repositories: { - required: true, - type: "string[]" - } - }, - url: "/user/migrations" - }, - startForOrg: { - method: "POST", - params: { - exclude_attachments: { - type: "boolean" - }, - lock_repositories: { - type: "boolean" - }, - org: { - required: true, - type: "string" - }, - repositories: { - required: true, - type: "string[]" - } - }, - url: "/orgs/:org/migrations" - }, - startImport: { - method: "PUT", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - tfvc_project: { - type: "string" - }, - vcs: { - enum: ["subversion", "git", "mercurial", "tfvc"], - type: "string" - }, - vcs_password: { - type: "string" - }, - vcs_url: { - required: true, - type: "string" - }, - vcs_username: { - type: "string" - } - }, - url: "/repos/:owner/:repo/import" - }, - unlockRepoForAuthenticatedUser: { - headers: { - accept: "application/vnd.github.wyandotte-preview+json" - }, - method: "DELETE", - params: { - migration_id: { - required: true, - type: "integer" - }, - repo_name: { - required: true, - type: "string" - } - }, - url: "/user/migrations/:migration_id/repos/:repo_name/lock" - }, - unlockRepoForOrg: { - headers: { - accept: "application/vnd.github.wyandotte-preview+json" - }, - method: "DELETE", - params: { - migration_id: { - required: true, - type: "integer" - }, - org: { - required: true, - type: "string" - }, - repo_name: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/migrations/:migration_id/repos/:repo_name/lock" - }, - updateImport: { - method: "PATCH", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - vcs_password: { - type: "string" - }, - vcs_username: { - type: "string" - } - }, - url: "/repos/:owner/:repo/import" - } - }, - oauthAuthorizations: { - checkAuthorization: { - deprecated: "octokit.oauthAuthorizations.checkAuthorization() has been renamed to octokit.apps.checkAuthorization() (2019-11-05)", - method: "GET", - params: { - access_token: { - required: true, - type: "string" - }, - client_id: { - required: true, - type: "string" - } - }, - url: "/applications/:client_id/tokens/:access_token" - }, - createAuthorization: { - deprecated: "octokit.oauthAuthorizations.createAuthorization() is deprecated, see https://developer.github.com/v3/oauth_authorizations/#create-a-new-authorization", - method: "POST", - params: { - client_id: { - type: "string" - }, - client_secret: { - type: "string" - }, - fingerprint: { - type: "string" - }, - note: { - required: true, - type: "string" - }, - note_url: { - type: "string" - }, - scopes: { - type: "string[]" - } - }, - url: "/authorizations" - }, - deleteAuthorization: { - deprecated: "octokit.oauthAuthorizations.deleteAuthorization() is deprecated, see https://developer.github.com/v3/oauth_authorizations/#delete-an-authorization", - method: "DELETE", - params: { - authorization_id: { - required: true, - type: "integer" - } - }, - url: "/authorizations/:authorization_id" - }, - deleteGrant: { - deprecated: "octokit.oauthAuthorizations.deleteGrant() is deprecated, see https://developer.github.com/v3/oauth_authorizations/#delete-a-grant", - method: "DELETE", - params: { - grant_id: { - required: true, - type: "integer" - } - }, - url: "/applications/grants/:grant_id" - }, - getAuthorization: { - deprecated: "octokit.oauthAuthorizations.getAuthorization() is deprecated, see https://developer.github.com/v3/oauth_authorizations/#get-a-single-authorization", - method: "GET", - params: { - authorization_id: { - required: true, - type: "integer" - } - }, - url: "/authorizations/:authorization_id" - }, - getGrant: { - deprecated: "octokit.oauthAuthorizations.getGrant() is deprecated, see https://developer.github.com/v3/oauth_authorizations/#get-a-single-grant", - method: "GET", - params: { - grant_id: { - required: true, - type: "integer" - } - }, - url: "/applications/grants/:grant_id" - }, - getOrCreateAuthorizationForApp: { - deprecated: "octokit.oauthAuthorizations.getOrCreateAuthorizationForApp() is deprecated, see https://developer.github.com/v3/oauth_authorizations/#get-or-create-an-authorization-for-a-specific-app", - method: "PUT", - params: { - client_id: { - required: true, - type: "string" - }, - client_secret: { - required: true, - type: "string" - }, - fingerprint: { - type: "string" - }, - note: { - type: "string" - }, - note_url: { - type: "string" - }, - scopes: { - type: "string[]" - } - }, - url: "/authorizations/clients/:client_id" - }, - getOrCreateAuthorizationForAppAndFingerprint: { - deprecated: "octokit.oauthAuthorizations.getOrCreateAuthorizationForAppAndFingerprint() is deprecated, see https://developer.github.com/v3/oauth_authorizations/#get-or-create-an-authorization-for-a-specific-app-and-fingerprint", - method: "PUT", - params: { - client_id: { - required: true, - type: "string" - }, - client_secret: { - required: true, - type: "string" - }, - fingerprint: { - required: true, - type: "string" - }, - note: { - type: "string" - }, - note_url: { - type: "string" - }, - scopes: { - type: "string[]" - } - }, - url: "/authorizations/clients/:client_id/:fingerprint" - }, - getOrCreateAuthorizationForAppFingerprint: { - deprecated: "octokit.oauthAuthorizations.getOrCreateAuthorizationForAppFingerprint() has been renamed to octokit.oauthAuthorizations.getOrCreateAuthorizationForAppAndFingerprint() (2018-12-27)", - method: "PUT", - params: { - client_id: { - required: true, - type: "string" - }, - client_secret: { - required: true, - type: "string" - }, - fingerprint: { - required: true, - type: "string" - }, - note: { - type: "string" - }, - note_url: { - type: "string" - }, - scopes: { - type: "string[]" - } - }, - url: "/authorizations/clients/:client_id/:fingerprint" - }, - listAuthorizations: { - deprecated: "octokit.oauthAuthorizations.listAuthorizations() is deprecated, see https://developer.github.com/v3/oauth_authorizations/#list-your-authorizations", - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - } - }, - url: "/authorizations" - }, - listGrants: { - deprecated: "octokit.oauthAuthorizations.listGrants() is deprecated, see https://developer.github.com/v3/oauth_authorizations/#list-your-grants", - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - } - }, - url: "/applications/grants" - }, - resetAuthorization: { - deprecated: "octokit.oauthAuthorizations.resetAuthorization() has been renamed to octokit.apps.resetAuthorization() (2019-11-05)", - method: "POST", - params: { - access_token: { - required: true, - type: "string" - }, - client_id: { - required: true, - type: "string" - } - }, - url: "/applications/:client_id/tokens/:access_token" - }, - revokeAuthorizationForApplication: { - deprecated: "octokit.oauthAuthorizations.revokeAuthorizationForApplication() has been renamed to octokit.apps.revokeAuthorizationForApplication() (2019-11-05)", - method: "DELETE", - params: { - access_token: { - required: true, - type: "string" - }, - client_id: { - required: true, - type: "string" - } - }, - url: "/applications/:client_id/tokens/:access_token" - }, - revokeGrantForApplication: { - deprecated: "octokit.oauthAuthorizations.revokeGrantForApplication() has been renamed to octokit.apps.revokeGrantForApplication() (2019-11-05)", - method: "DELETE", - params: { - access_token: { - required: true, - type: "string" - }, - client_id: { - required: true, - type: "string" - } - }, - url: "/applications/:client_id/grants/:access_token" - }, - updateAuthorization: { - deprecated: "octokit.oauthAuthorizations.updateAuthorization() is deprecated, see https://developer.github.com/v3/oauth_authorizations/#update-an-existing-authorization", - method: "PATCH", - params: { - add_scopes: { - type: "string[]" - }, - authorization_id: { - required: true, - type: "integer" - }, - fingerprint: { - type: "string" - }, - note: { - type: "string" - }, - note_url: { - type: "string" - }, - remove_scopes: { - type: "string[]" - }, - scopes: { - type: "string[]" - } - }, - url: "/authorizations/:authorization_id" - } - }, - orgs: { - addOrUpdateMembership: { - method: "PUT", - params: { - org: { - required: true, - type: "string" - }, - role: { - enum: ["admin", "member"], - type: "string" - }, - username: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/memberships/:username" - }, - blockUser: { - method: "PUT", - params: { - org: { - required: true, - type: "string" - }, - username: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/blocks/:username" - }, - checkBlockedUser: { - method: "GET", - params: { - org: { - required: true, - type: "string" - }, - username: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/blocks/:username" - }, - checkMembership: { - method: "GET", - params: { - org: { - required: true, - type: "string" - }, - username: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/members/:username" - }, - checkPublicMembership: { - method: "GET", - params: { - org: { - required: true, - type: "string" - }, - username: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/public_members/:username" - }, - concealMembership: { - method: "DELETE", - params: { - org: { - required: true, - type: "string" - }, - username: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/public_members/:username" - }, - convertMemberToOutsideCollaborator: { - method: "PUT", - params: { - org: { - required: true, - type: "string" - }, - username: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/outside_collaborators/:username" - }, - createHook: { - method: "POST", - params: { - active: { - type: "boolean" - }, - config: { - required: true, - type: "object" - }, - "config.content_type": { - type: "string" - }, - "config.insecure_ssl": { - type: "string" - }, - "config.secret": { - type: "string" - }, - "config.url": { - required: true, - type: "string" - }, - events: { - type: "string[]" - }, - name: { - required: true, - type: "string" - }, - org: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/hooks" - }, - createInvitation: { - method: "POST", - params: { - email: { - type: "string" - }, - invitee_id: { - type: "integer" - }, - org: { - required: true, - type: "string" - }, - role: { - enum: ["admin", "direct_member", "billing_manager"], - type: "string" - }, - team_ids: { - type: "integer[]" - } - }, - url: "/orgs/:org/invitations" - }, - deleteHook: { - method: "DELETE", - params: { - hook_id: { - required: true, - type: "integer" - }, - org: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/hooks/:hook_id" - }, - get: { - method: "GET", - params: { - org: { - required: true, - type: "string" - } - }, - url: "/orgs/:org" - }, - getHook: { - method: "GET", - params: { - hook_id: { - required: true, - type: "integer" - }, - org: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/hooks/:hook_id" - }, - getMembership: { - method: "GET", - params: { - org: { - required: true, - type: "string" - }, - username: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/memberships/:username" - }, - getMembershipForAuthenticatedUser: { - method: "GET", - params: { - org: { - required: true, - type: "string" - } - }, - url: "/user/memberships/orgs/:org" - }, - list: { - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - since: { - type: "integer" - } - }, - url: "/organizations" - }, - listBlockedUsers: { - method: "GET", - params: { - org: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/blocks" - }, - listForAuthenticatedUser: { - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - } - }, - url: "/user/orgs" - }, - listForUser: { - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - username: { - required: true, - type: "string" - } - }, - url: "/users/:username/orgs" - }, - listHooks: { - method: "GET", - params: { - org: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - } - }, - url: "/orgs/:org/hooks" - }, - listInstallations: { - headers: { - accept: "application/vnd.github.machine-man-preview+json" - }, - method: "GET", - params: { - org: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - } - }, - url: "/orgs/:org/installations" - }, - listInvitationTeams: { - method: "GET", - params: { - invitation_id: { - required: true, - type: "integer" - }, - org: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - } - }, - url: "/orgs/:org/invitations/:invitation_id/teams" - }, - listMembers: { - method: "GET", - params: { - filter: { - enum: ["2fa_disabled", "all"], - type: "string" - }, - org: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - role: { - enum: ["all", "admin", "member"], - type: "string" - } - }, - url: "/orgs/:org/members" - }, - listMemberships: { - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - state: { - enum: ["active", "pending"], - type: "string" - } - }, - url: "/user/memberships/orgs" - }, - listOutsideCollaborators: { - method: "GET", - params: { - filter: { - enum: ["2fa_disabled", "all"], - type: "string" - }, - org: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - } - }, - url: "/orgs/:org/outside_collaborators" - }, - listPendingInvitations: { - method: "GET", - params: { - org: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - } - }, - url: "/orgs/:org/invitations" - }, - listPublicMembers: { - method: "GET", - params: { - org: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - } - }, - url: "/orgs/:org/public_members" - }, - pingHook: { - method: "POST", - params: { - hook_id: { - required: true, - type: "integer" - }, - org: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/hooks/:hook_id/pings" - }, - publicizeMembership: { - method: "PUT", - params: { - org: { - required: true, - type: "string" - }, - username: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/public_members/:username" - }, - removeMember: { - method: "DELETE", - params: { - org: { - required: true, - type: "string" - }, - username: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/members/:username" - }, - removeMembership: { - method: "DELETE", - params: { - org: { - required: true, - type: "string" - }, - username: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/memberships/:username" - }, - removeOutsideCollaborator: { - method: "DELETE", - params: { - org: { - required: true, - type: "string" - }, - username: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/outside_collaborators/:username" - }, - unblockUser: { - method: "DELETE", - params: { - org: { - required: true, - type: "string" - }, - username: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/blocks/:username" - }, - update: { - method: "PATCH", - params: { - billing_email: { - type: "string" - }, - company: { - type: "string" - }, - default_repository_permission: { - enum: ["read", "write", "admin", "none"], - type: "string" - }, - description: { - type: "string" - }, - email: { - type: "string" - }, - has_organization_projects: { - type: "boolean" - }, - has_repository_projects: { - type: "boolean" - }, - location: { - type: "string" - }, - members_allowed_repository_creation_type: { - enum: ["all", "private", "none"], - type: "string" - }, - members_can_create_internal_repositories: { - type: "boolean" - }, - members_can_create_private_repositories: { - type: "boolean" - }, - members_can_create_public_repositories: { - type: "boolean" - }, - members_can_create_repositories: { - type: "boolean" - }, - name: { - type: "string" - }, - org: { - required: true, - type: "string" - } - }, - url: "/orgs/:org" - }, - updateHook: { - method: "PATCH", - params: { - active: { - type: "boolean" - }, - config: { - type: "object" - }, - "config.content_type": { - type: "string" - }, - "config.insecure_ssl": { - type: "string" - }, - "config.secret": { - type: "string" - }, - "config.url": { - required: true, - type: "string" - }, - events: { - type: "string[]" - }, - hook_id: { - required: true, - type: "integer" - }, - org: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/hooks/:hook_id" - }, - updateMembership: { - method: "PATCH", - params: { - org: { - required: true, - type: "string" - }, - state: { - enum: ["active"], - required: true, - type: "string" - } - }, - url: "/user/memberships/orgs/:org" - } - }, - projects: { - addCollaborator: { - headers: { - accept: "application/vnd.github.inertia-preview+json" - }, - method: "PUT", - params: { - permission: { - enum: ["read", "write", "admin"], - type: "string" - }, - project_id: { - required: true, - type: "integer" - }, - username: { - required: true, - type: "string" - } - }, - url: "/projects/:project_id/collaborators/:username" - }, - createCard: { - headers: { - accept: "application/vnd.github.inertia-preview+json" - }, - method: "POST", - params: { - column_id: { - required: true, - type: "integer" - }, - content_id: { - type: "integer" - }, - content_type: { - type: "string" - }, - note: { - type: "string" - } - }, - url: "/projects/columns/:column_id/cards" - }, - createColumn: { - headers: { - accept: "application/vnd.github.inertia-preview+json" - }, - method: "POST", - params: { - name: { - required: true, - type: "string" - }, - project_id: { - required: true, - type: "integer" - } - }, - url: "/projects/:project_id/columns" - }, - createForAuthenticatedUser: { - headers: { - accept: "application/vnd.github.inertia-preview+json" - }, - method: "POST", - params: { - body: { - type: "string" - }, - name: { - required: true, - type: "string" - } - }, - url: "/user/projects" - }, - createForOrg: { - headers: { - accept: "application/vnd.github.inertia-preview+json" - }, - method: "POST", - params: { - body: { - type: "string" - }, - name: { - required: true, - type: "string" - }, - org: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/projects" - }, - createForRepo: { - headers: { - accept: "application/vnd.github.inertia-preview+json" - }, - method: "POST", - params: { - body: { - type: "string" - }, - name: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/projects" - }, - delete: { - headers: { - accept: "application/vnd.github.inertia-preview+json" - }, - method: "DELETE", - params: { - project_id: { - required: true, - type: "integer" - } - }, - url: "/projects/:project_id" - }, - deleteCard: { - headers: { - accept: "application/vnd.github.inertia-preview+json" - }, - method: "DELETE", - params: { - card_id: { - required: true, - type: "integer" - } - }, - url: "/projects/columns/cards/:card_id" - }, - deleteColumn: { - headers: { - accept: "application/vnd.github.inertia-preview+json" - }, - method: "DELETE", - params: { - column_id: { - required: true, - type: "integer" - } - }, - url: "/projects/columns/:column_id" - }, - get: { - headers: { - accept: "application/vnd.github.inertia-preview+json" - }, - method: "GET", - params: { - project_id: { - required: true, - type: "integer" - } - }, - url: "/projects/:project_id" - }, - getCard: { - headers: { - accept: "application/vnd.github.inertia-preview+json" - }, - method: "GET", - params: { - card_id: { - required: true, - type: "integer" - } - }, - url: "/projects/columns/cards/:card_id" - }, - getColumn: { - headers: { - accept: "application/vnd.github.inertia-preview+json" - }, - method: "GET", - params: { - column_id: { - required: true, - type: "integer" - } - }, - url: "/projects/columns/:column_id" - }, - listCards: { - headers: { - accept: "application/vnd.github.inertia-preview+json" - }, - method: "GET", - params: { - archived_state: { - enum: ["all", "archived", "not_archived"], - type: "string" - }, - column_id: { - required: true, - type: "integer" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - } - }, - url: "/projects/columns/:column_id/cards" - }, - listCollaborators: { - headers: { - accept: "application/vnd.github.inertia-preview+json" - }, - method: "GET", - params: { - affiliation: { - enum: ["outside", "direct", "all"], - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - project_id: { - required: true, - type: "integer" - } - }, - url: "/projects/:project_id/collaborators" - }, - listColumns: { - headers: { - accept: "application/vnd.github.inertia-preview+json" - }, - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - project_id: { - required: true, - type: "integer" - } - }, - url: "/projects/:project_id/columns" - }, - listForOrg: { - headers: { - accept: "application/vnd.github.inertia-preview+json" - }, - method: "GET", - params: { - org: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - state: { - enum: ["open", "closed", "all"], - type: "string" - } - }, - url: "/orgs/:org/projects" - }, - listForRepo: { - headers: { - accept: "application/vnd.github.inertia-preview+json" - }, - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - }, - state: { - enum: ["open", "closed", "all"], - type: "string" - } - }, - url: "/repos/:owner/:repo/projects" - }, - listForUser: { - headers: { - accept: "application/vnd.github.inertia-preview+json" - }, - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - state: { - enum: ["open", "closed", "all"], - type: "string" - }, - username: { - required: true, - type: "string" - } - }, - url: "/users/:username/projects" - }, - moveCard: { - headers: { - accept: "application/vnd.github.inertia-preview+json" - }, - method: "POST", - params: { - card_id: { - required: true, - type: "integer" - }, - column_id: { - type: "integer" - }, - position: { - required: true, - type: "string", - validation: "^(top|bottom|after:\\d+)$" - } - }, - url: "/projects/columns/cards/:card_id/moves" - }, - moveColumn: { - headers: { - accept: "application/vnd.github.inertia-preview+json" - }, - method: "POST", - params: { - column_id: { - required: true, - type: "integer" - }, - position: { - required: true, - type: "string", - validation: "^(first|last|after:\\d+)$" - } - }, - url: "/projects/columns/:column_id/moves" - }, - removeCollaborator: { - headers: { - accept: "application/vnd.github.inertia-preview+json" - }, - method: "DELETE", - params: { - project_id: { - required: true, - type: "integer" - }, - username: { - required: true, - type: "string" - } - }, - url: "/projects/:project_id/collaborators/:username" - }, - reviewUserPermissionLevel: { - headers: { - accept: "application/vnd.github.inertia-preview+json" - }, - method: "GET", - params: { - project_id: { - required: true, - type: "integer" - }, - username: { - required: true, - type: "string" - } - }, - url: "/projects/:project_id/collaborators/:username/permission" - }, - update: { - headers: { - accept: "application/vnd.github.inertia-preview+json" - }, - method: "PATCH", - params: { - body: { - type: "string" - }, - name: { - type: "string" - }, - organization_permission: { - type: "string" - }, - private: { - type: "boolean" - }, - project_id: { - required: true, - type: "integer" - }, - state: { - enum: ["open", "closed"], - type: "string" - } - }, - url: "/projects/:project_id" - }, - updateCard: { - headers: { - accept: "application/vnd.github.inertia-preview+json" - }, - method: "PATCH", - params: { - archived: { - type: "boolean" - }, - card_id: { - required: true, - type: "integer" - }, - note: { - type: "string" - } - }, - url: "/projects/columns/cards/:card_id" - }, - updateColumn: { - headers: { - accept: "application/vnd.github.inertia-preview+json" - }, - method: "PATCH", - params: { - column_id: { - required: true, - type: "integer" - }, - name: { - required: true, - type: "string" - } - }, - url: "/projects/columns/:column_id" - } - }, - pulls: { - checkIfMerged: { - method: "GET", - params: { - number: { - alias: "pull_number", - deprecated: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - pull_number: { - required: true, - type: "integer" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/pulls/:pull_number/merge" - }, - create: { - method: "POST", - params: { - base: { - required: true, - type: "string" - }, - body: { - type: "string" - }, - draft: { - type: "boolean" - }, - head: { - required: true, - type: "string" - }, - maintainer_can_modify: { - type: "boolean" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - title: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/pulls" - }, - createComment: { - method: "POST", - params: { - body: { - required: true, - type: "string" - }, - commit_id: { - required: true, - type: "string" - }, - in_reply_to: { - deprecated: true, - description: "The comment ID to reply to. **Note**: This must be the ID of a top-level comment, not a reply to that comment. Replies to replies are not supported.", - type: "integer" - }, - line: { - type: "integer" - }, - number: { - alias: "pull_number", - deprecated: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - path: { - required: true, - type: "string" - }, - position: { - type: "integer" - }, - pull_number: { - required: true, - type: "integer" - }, - repo: { - required: true, - type: "string" - }, - side: { - enum: ["LEFT", "RIGHT"], - type: "string" - }, - start_line: { - type: "integer" - }, - start_side: { - enum: ["LEFT", "RIGHT", "side"], - type: "string" - } - }, - url: "/repos/:owner/:repo/pulls/:pull_number/comments" - }, - createCommentReply: { - deprecated: "octokit.pulls.createCommentReply() has been renamed to octokit.pulls.createComment() (2019-09-09)", - method: "POST", - params: { - body: { - required: true, - type: "string" - }, - commit_id: { - required: true, - type: "string" - }, - in_reply_to: { - deprecated: true, - description: "The comment ID to reply to. **Note**: This must be the ID of a top-level comment, not a reply to that comment. Replies to replies are not supported.", - type: "integer" - }, - line: { - type: "integer" - }, - number: { - alias: "pull_number", - deprecated: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - path: { - required: true, - type: "string" - }, - position: { - type: "integer" - }, - pull_number: { - required: true, - type: "integer" - }, - repo: { - required: true, - type: "string" - }, - side: { - enum: ["LEFT", "RIGHT"], - type: "string" - }, - start_line: { - type: "integer" - }, - start_side: { - enum: ["LEFT", "RIGHT", "side"], - type: "string" - } - }, - url: "/repos/:owner/:repo/pulls/:pull_number/comments" - }, - createFromIssue: { - deprecated: "octokit.pulls.createFromIssue() is deprecated, see https://developer.github.com/v3/pulls/#create-a-pull-request", - method: "POST", - params: { - base: { - required: true, - type: "string" - }, - draft: { - type: "boolean" - }, - head: { - required: true, - type: "string" - }, - issue: { - required: true, - type: "integer" - }, - maintainer_can_modify: { - type: "boolean" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/pulls" - }, - createReview: { - method: "POST", - params: { - body: { - type: "string" - }, - comments: { - type: "object[]" - }, - "comments[].body": { - required: true, - type: "string" - }, - "comments[].path": { - required: true, - type: "string" - }, - "comments[].position": { - required: true, - type: "integer" - }, - commit_id: { - type: "string" - }, - event: { - enum: ["APPROVE", "REQUEST_CHANGES", "COMMENT"], - type: "string" - }, - number: { - alias: "pull_number", - deprecated: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - pull_number: { - required: true, - type: "integer" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/pulls/:pull_number/reviews" - }, - createReviewCommentReply: { - method: "POST", - params: { - body: { - required: true, - type: "string" - }, - comment_id: { - required: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - pull_number: { - required: true, - type: "integer" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/pulls/:pull_number/comments/:comment_id/replies" - }, - createReviewRequest: { - method: "POST", - params: { - number: { - alias: "pull_number", - deprecated: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - pull_number: { - required: true, - type: "integer" - }, - repo: { - required: true, - type: "string" - }, - reviewers: { - type: "string[]" - }, - team_reviewers: { - type: "string[]" - } - }, - url: "/repos/:owner/:repo/pulls/:pull_number/requested_reviewers" - }, - deleteComment: { - method: "DELETE", - params: { - comment_id: { - required: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/pulls/comments/:comment_id" - }, - deletePendingReview: { - method: "DELETE", - params: { - number: { - alias: "pull_number", - deprecated: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - pull_number: { - required: true, - type: "integer" - }, - repo: { - required: true, - type: "string" - }, - review_id: { - required: true, - type: "integer" - } - }, - url: "/repos/:owner/:repo/pulls/:pull_number/reviews/:review_id" - }, - deleteReviewRequest: { - method: "DELETE", - params: { - number: { - alias: "pull_number", - deprecated: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - pull_number: { - required: true, - type: "integer" - }, - repo: { - required: true, - type: "string" - }, - reviewers: { - type: "string[]" - }, - team_reviewers: { - type: "string[]" - } - }, - url: "/repos/:owner/:repo/pulls/:pull_number/requested_reviewers" - }, - dismissReview: { - method: "PUT", - params: { - message: { - required: true, - type: "string" - }, - number: { - alias: "pull_number", - deprecated: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - pull_number: { - required: true, - type: "integer" - }, - repo: { - required: true, - type: "string" - }, - review_id: { - required: true, - type: "integer" - } - }, - url: "/repos/:owner/:repo/pulls/:pull_number/reviews/:review_id/dismissals" - }, - get: { - method: "GET", - params: { - number: { - alias: "pull_number", - deprecated: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - pull_number: { - required: true, - type: "integer" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/pulls/:pull_number" - }, - getComment: { - method: "GET", - params: { - comment_id: { - required: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/pulls/comments/:comment_id" - }, - getCommentsForReview: { - method: "GET", - params: { - number: { - alias: "pull_number", - deprecated: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - pull_number: { - required: true, - type: "integer" - }, - repo: { - required: true, - type: "string" - }, - review_id: { - required: true, - type: "integer" - } - }, - url: "/repos/:owner/:repo/pulls/:pull_number/reviews/:review_id/comments" - }, - getReview: { - method: "GET", - params: { - number: { - alias: "pull_number", - deprecated: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - pull_number: { - required: true, - type: "integer" - }, - repo: { - required: true, - type: "string" - }, - review_id: { - required: true, - type: "integer" - } - }, - url: "/repos/:owner/:repo/pulls/:pull_number/reviews/:review_id" - }, - list: { - method: "GET", - params: { - base: { - type: "string" - }, - direction: { - enum: ["asc", "desc"], - type: "string" - }, - head: { - type: "string" - }, - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - }, - sort: { - enum: ["created", "updated", "popularity", "long-running"], - type: "string" - }, - state: { - enum: ["open", "closed", "all"], - type: "string" - } - }, - url: "/repos/:owner/:repo/pulls" - }, - listComments: { - method: "GET", - params: { - direction: { - enum: ["asc", "desc"], - type: "string" - }, - number: { - alias: "pull_number", - deprecated: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - pull_number: { - required: true, - type: "integer" - }, - repo: { - required: true, - type: "string" - }, - since: { - type: "string" - }, - sort: { - enum: ["created", "updated"], - type: "string" - } - }, - url: "/repos/:owner/:repo/pulls/:pull_number/comments" - }, - listCommentsForRepo: { - method: "GET", - params: { - direction: { - enum: ["asc", "desc"], - type: "string" - }, - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - }, - since: { - type: "string" - }, - sort: { - enum: ["created", "updated"], - type: "string" - } - }, - url: "/repos/:owner/:repo/pulls/comments" - }, - listCommits: { - method: "GET", - params: { - number: { - alias: "pull_number", - deprecated: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - pull_number: { - required: true, - type: "integer" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/pulls/:pull_number/commits" - }, - listFiles: { - method: "GET", - params: { - number: { - alias: "pull_number", - deprecated: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - pull_number: { - required: true, - type: "integer" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/pulls/:pull_number/files" - }, - listReviewRequests: { - method: "GET", - params: { - number: { - alias: "pull_number", - deprecated: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - pull_number: { - required: true, - type: "integer" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/pulls/:pull_number/requested_reviewers" - }, - listReviews: { - method: "GET", - params: { - number: { - alias: "pull_number", - deprecated: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - pull_number: { - required: true, - type: "integer" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/pulls/:pull_number/reviews" - }, - merge: { - method: "PUT", - params: { - commit_message: { - type: "string" - }, - commit_title: { - type: "string" - }, - merge_method: { - enum: ["merge", "squash", "rebase"], - type: "string" - }, - number: { - alias: "pull_number", - deprecated: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - pull_number: { - required: true, - type: "integer" - }, - repo: { - required: true, - type: "string" - }, - sha: { - type: "string" - } - }, - url: "/repos/:owner/:repo/pulls/:pull_number/merge" - }, - submitReview: { - method: "POST", - params: { - body: { - type: "string" - }, - event: { - enum: ["APPROVE", "REQUEST_CHANGES", "COMMENT"], - required: true, - type: "string" - }, - number: { - alias: "pull_number", - deprecated: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - pull_number: { - required: true, - type: "integer" - }, - repo: { - required: true, - type: "string" - }, - review_id: { - required: true, - type: "integer" - } - }, - url: "/repos/:owner/:repo/pulls/:pull_number/reviews/:review_id/events" - }, - update: { - method: "PATCH", - params: { - base: { - type: "string" - }, - body: { - type: "string" - }, - maintainer_can_modify: { - type: "boolean" - }, - number: { - alias: "pull_number", - deprecated: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - pull_number: { - required: true, - type: "integer" - }, - repo: { - required: true, - type: "string" - }, - state: { - enum: ["open", "closed"], - type: "string" - }, - title: { - type: "string" - } - }, - url: "/repos/:owner/:repo/pulls/:pull_number" - }, - updateBranch: { - headers: { - accept: "application/vnd.github.lydian-preview+json" - }, - method: "PUT", - params: { - expected_head_sha: { - type: "string" - }, - owner: { - required: true, - type: "string" - }, - pull_number: { - required: true, - type: "integer" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/pulls/:pull_number/update-branch" - }, - updateComment: { - method: "PATCH", - params: { - body: { - required: true, - type: "string" - }, - comment_id: { - required: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/pulls/comments/:comment_id" - }, - updateReview: { - method: "PUT", - params: { - body: { - required: true, - type: "string" - }, - number: { - alias: "pull_number", - deprecated: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - pull_number: { - required: true, - type: "integer" - }, - repo: { - required: true, - type: "string" - }, - review_id: { - required: true, - type: "integer" - } - }, - url: "/repos/:owner/:repo/pulls/:pull_number/reviews/:review_id" - } - }, - rateLimit: { - get: { - method: "GET", - params: {}, - url: "/rate_limit" - } - }, - reactions: { - createForCommitComment: { - headers: { - accept: "application/vnd.github.squirrel-girl-preview+json" - }, - method: "POST", - params: { - comment_id: { - required: true, - type: "integer" - }, - content: { - enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"], - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/comments/:comment_id/reactions" - }, - createForIssue: { - headers: { - accept: "application/vnd.github.squirrel-girl-preview+json" - }, - method: "POST", - params: { - content: { - enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"], - required: true, - type: "string" - }, - issue_number: { - required: true, - type: "integer" - }, - number: { - alias: "issue_number", - deprecated: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/issues/:issue_number/reactions" - }, - createForIssueComment: { - headers: { - accept: "application/vnd.github.squirrel-girl-preview+json" - }, - method: "POST", - params: { - comment_id: { - required: true, - type: "integer" - }, - content: { - enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"], - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/issues/comments/:comment_id/reactions" - }, - createForPullRequestReviewComment: { - headers: { - accept: "application/vnd.github.squirrel-girl-preview+json" - }, - method: "POST", - params: { - comment_id: { - required: true, - type: "integer" - }, - content: { - enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"], - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/pulls/comments/:comment_id/reactions" - }, - createForTeamDiscussion: { - deprecated: "octokit.reactions.createForTeamDiscussion() has been renamed to octokit.reactions.createForTeamDiscussionLegacy() (2020-01-16)", - headers: { - accept: "application/vnd.github.squirrel-girl-preview+json" - }, - method: "POST", - params: { - content: { - enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"], - required: true, - type: "string" - }, - discussion_number: { - required: true, - type: "integer" - }, - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id/discussions/:discussion_number/reactions" - }, - createForTeamDiscussionComment: { - deprecated: "octokit.reactions.createForTeamDiscussionComment() has been renamed to octokit.reactions.createForTeamDiscussionCommentLegacy() (2020-01-16)", - headers: { - accept: "application/vnd.github.squirrel-girl-preview+json" - }, - method: "POST", - params: { - comment_number: { - required: true, - type: "integer" - }, - content: { - enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"], - required: true, - type: "string" - }, - discussion_number: { - required: true, - type: "integer" - }, - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id/discussions/:discussion_number/comments/:comment_number/reactions" - }, - createForTeamDiscussionCommentInOrg: { - headers: { - accept: "application/vnd.github.squirrel-girl-preview+json" - }, - method: "POST", - params: { - comment_number: { - required: true, - type: "integer" - }, - content: { - enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"], - required: true, - type: "string" - }, - discussion_number: { - required: true, - type: "integer" - }, - org: { - required: true, - type: "string" - }, - team_slug: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/teams/:team_slug/discussions/:discussion_number/comments/:comment_number/reactions" - }, - createForTeamDiscussionCommentLegacy: { - deprecated: "octokit.reactions.createForTeamDiscussionCommentLegacy() is deprecated, see https://developer.github.com/v3/reactions/#create-reaction-for-a-team-discussion-comment-legacy", - headers: { - accept: "application/vnd.github.squirrel-girl-preview+json" - }, - method: "POST", - params: { - comment_number: { - required: true, - type: "integer" - }, - content: { - enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"], - required: true, - type: "string" - }, - discussion_number: { - required: true, - type: "integer" - }, - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id/discussions/:discussion_number/comments/:comment_number/reactions" - }, - createForTeamDiscussionInOrg: { - headers: { - accept: "application/vnd.github.squirrel-girl-preview+json" - }, - method: "POST", - params: { - content: { - enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"], - required: true, - type: "string" - }, - discussion_number: { - required: true, - type: "integer" - }, - org: { - required: true, - type: "string" - }, - team_slug: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/teams/:team_slug/discussions/:discussion_number/reactions" - }, - createForTeamDiscussionLegacy: { - deprecated: "octokit.reactions.createForTeamDiscussionLegacy() is deprecated, see https://developer.github.com/v3/reactions/#create-reaction-for-a-team-discussion-legacy", - headers: { - accept: "application/vnd.github.squirrel-girl-preview+json" - }, - method: "POST", - params: { - content: { - enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"], - required: true, - type: "string" - }, - discussion_number: { - required: true, - type: "integer" - }, - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id/discussions/:discussion_number/reactions" - }, - delete: { - headers: { - accept: "application/vnd.github.squirrel-girl-preview+json" - }, - method: "DELETE", - params: { - reaction_id: { - required: true, - type: "integer" - } - }, - url: "/reactions/:reaction_id" - }, - listForCommitComment: { - headers: { - accept: "application/vnd.github.squirrel-girl-preview+json" - }, - method: "GET", - params: { - comment_id: { - required: true, - type: "integer" - }, - content: { - enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"], - type: "string" - }, - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/comments/:comment_id/reactions" - }, - listForIssue: { - headers: { - accept: "application/vnd.github.squirrel-girl-preview+json" - }, - method: "GET", - params: { - content: { - enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"], - type: "string" - }, - issue_number: { - required: true, - type: "integer" - }, - number: { - alias: "issue_number", - deprecated: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/issues/:issue_number/reactions" - }, - listForIssueComment: { - headers: { - accept: "application/vnd.github.squirrel-girl-preview+json" - }, - method: "GET", - params: { - comment_id: { - required: true, - type: "integer" - }, - content: { - enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"], - type: "string" - }, - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/issues/comments/:comment_id/reactions" - }, - listForPullRequestReviewComment: { - headers: { - accept: "application/vnd.github.squirrel-girl-preview+json" - }, - method: "GET", - params: { - comment_id: { - required: true, - type: "integer" - }, - content: { - enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"], - type: "string" - }, - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/pulls/comments/:comment_id/reactions" - }, - listForTeamDiscussion: { - deprecated: "octokit.reactions.listForTeamDiscussion() has been renamed to octokit.reactions.listForTeamDiscussionLegacy() (2020-01-16)", - headers: { - accept: "application/vnd.github.squirrel-girl-preview+json" - }, - method: "GET", - params: { - content: { - enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"], - type: "string" - }, - discussion_number: { - required: true, - type: "integer" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id/discussions/:discussion_number/reactions" - }, - listForTeamDiscussionComment: { - deprecated: "octokit.reactions.listForTeamDiscussionComment() has been renamed to octokit.reactions.listForTeamDiscussionCommentLegacy() (2020-01-16)", - headers: { - accept: "application/vnd.github.squirrel-girl-preview+json" - }, - method: "GET", - params: { - comment_number: { - required: true, - type: "integer" - }, - content: { - enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"], - type: "string" - }, - discussion_number: { - required: true, - type: "integer" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id/discussions/:discussion_number/comments/:comment_number/reactions" - }, - listForTeamDiscussionCommentInOrg: { - headers: { - accept: "application/vnd.github.squirrel-girl-preview+json" - }, - method: "GET", - params: { - comment_number: { - required: true, - type: "integer" - }, - content: { - enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"], - type: "string" - }, - discussion_number: { - required: true, - type: "integer" - }, - org: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - team_slug: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/teams/:team_slug/discussions/:discussion_number/comments/:comment_number/reactions" - }, - listForTeamDiscussionCommentLegacy: { - deprecated: "octokit.reactions.listForTeamDiscussionCommentLegacy() is deprecated, see https://developer.github.com/v3/reactions/#list-reactions-for-a-team-discussion-comment-legacy", - headers: { - accept: "application/vnd.github.squirrel-girl-preview+json" - }, - method: "GET", - params: { - comment_number: { - required: true, - type: "integer" - }, - content: { - enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"], - type: "string" - }, - discussion_number: { - required: true, - type: "integer" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id/discussions/:discussion_number/comments/:comment_number/reactions" - }, - listForTeamDiscussionInOrg: { - headers: { - accept: "application/vnd.github.squirrel-girl-preview+json" - }, - method: "GET", - params: { - content: { - enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"], - type: "string" - }, - discussion_number: { - required: true, - type: "integer" - }, - org: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - team_slug: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/teams/:team_slug/discussions/:discussion_number/reactions" - }, - listForTeamDiscussionLegacy: { - deprecated: "octokit.reactions.listForTeamDiscussionLegacy() is deprecated, see https://developer.github.com/v3/reactions/#list-reactions-for-a-team-discussion-legacy", - headers: { - accept: "application/vnd.github.squirrel-girl-preview+json" - }, - method: "GET", - params: { - content: { - enum: ["+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", "eyes"], - type: "string" - }, - discussion_number: { - required: true, - type: "integer" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id/discussions/:discussion_number/reactions" - } - }, - repos: { - acceptInvitation: { - method: "PATCH", - params: { - invitation_id: { - required: true, - type: "integer" - } - }, - url: "/user/repository_invitations/:invitation_id" - }, - addCollaborator: { - method: "PUT", - params: { - owner: { - required: true, - type: "string" - }, - permission: { - enum: ["pull", "push", "admin"], - type: "string" - }, - repo: { - required: true, - type: "string" - }, - username: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/collaborators/:username" - }, - addDeployKey: { - method: "POST", - params: { - key: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - read_only: { - type: "boolean" - }, - repo: { - required: true, - type: "string" - }, - title: { - type: "string" - } - }, - url: "/repos/:owner/:repo/keys" - }, - addProtectedBranchAdminEnforcement: { - method: "POST", - params: { - branch: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/branches/:branch/protection/enforce_admins" - }, - addProtectedBranchAppRestrictions: { - method: "POST", - params: { - apps: { - mapTo: "data", - required: true, - type: "string[]" - }, - branch: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/apps" - }, - addProtectedBranchRequiredSignatures: { - headers: { - accept: "application/vnd.github.zzzax-preview+json" - }, - method: "POST", - params: { - branch: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/branches/:branch/protection/required_signatures" - }, - addProtectedBranchRequiredStatusChecksContexts: { - method: "POST", - params: { - branch: { - required: true, - type: "string" - }, - contexts: { - mapTo: "data", - required: true, - type: "string[]" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/branches/:branch/protection/required_status_checks/contexts" - }, - addProtectedBranchTeamRestrictions: { - method: "POST", - params: { - branch: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - teams: { - mapTo: "data", - required: true, - type: "string[]" - } - }, - url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/teams" - }, - addProtectedBranchUserRestrictions: { - method: "POST", - params: { - branch: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - users: { - mapTo: "data", - required: true, - type: "string[]" - } - }, - url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/users" - }, - checkCollaborator: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - username: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/collaborators/:username" - }, - checkVulnerabilityAlerts: { - headers: { - accept: "application/vnd.github.dorian-preview+json" - }, - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/vulnerability-alerts" - }, - compareCommits: { - method: "GET", - params: { - base: { - required: true, - type: "string" - }, - head: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/compare/:base...:head" - }, - createCommitComment: { - method: "POST", - params: { - body: { - required: true, - type: "string" - }, - commit_sha: { - required: true, - type: "string" - }, - line: { - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - path: { - type: "string" - }, - position: { - type: "integer" - }, - repo: { - required: true, - type: "string" - }, - sha: { - alias: "commit_sha", - deprecated: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/commits/:commit_sha/comments" - }, - createDeployment: { - method: "POST", - params: { - auto_merge: { - type: "boolean" - }, - description: { - type: "string" - }, - environment: { - type: "string" - }, - owner: { - required: true, - type: "string" - }, - payload: { - type: "string" - }, - production_environment: { - type: "boolean" - }, - ref: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - required_contexts: { - type: "string[]" - }, - task: { - type: "string" - }, - transient_environment: { - type: "boolean" - } - }, - url: "/repos/:owner/:repo/deployments" - }, - createDeploymentStatus: { - method: "POST", - params: { - auto_inactive: { - type: "boolean" - }, - deployment_id: { - required: true, - type: "integer" - }, - description: { - type: "string" - }, - environment: { - enum: ["production", "staging", "qa"], - type: "string" - }, - environment_url: { - type: "string" - }, - log_url: { - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - state: { - enum: ["error", "failure", "inactive", "in_progress", "queued", "pending", "success"], - required: true, - type: "string" - }, - target_url: { - type: "string" - } - }, - url: "/repos/:owner/:repo/deployments/:deployment_id/statuses" - }, - createDispatchEvent: { - method: "POST", - params: { - client_payload: { - type: "object" - }, - event_type: { - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/dispatches" - }, - createFile: { - deprecated: "octokit.repos.createFile() has been renamed to octokit.repos.createOrUpdateFile() (2019-06-07)", - method: "PUT", - params: { - author: { - type: "object" - }, - "author.email": { - required: true, - type: "string" - }, - "author.name": { - required: true, - type: "string" - }, - branch: { - type: "string" - }, - committer: { - type: "object" - }, - "committer.email": { - required: true, - type: "string" - }, - "committer.name": { - required: true, - type: "string" - }, - content: { - required: true, - type: "string" - }, - message: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - path: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - sha: { - type: "string" - } - }, - url: "/repos/:owner/:repo/contents/:path" - }, - createForAuthenticatedUser: { - method: "POST", - params: { - allow_merge_commit: { - type: "boolean" - }, - allow_rebase_merge: { - type: "boolean" - }, - allow_squash_merge: { - type: "boolean" - }, - auto_init: { - type: "boolean" - }, - delete_branch_on_merge: { - type: "boolean" - }, - description: { - type: "string" - }, - gitignore_template: { - type: "string" - }, - has_issues: { - type: "boolean" - }, - has_projects: { - type: "boolean" - }, - has_wiki: { - type: "boolean" - }, - homepage: { - type: "string" - }, - is_template: { - type: "boolean" - }, - license_template: { - type: "string" - }, - name: { - required: true, - type: "string" - }, - private: { - type: "boolean" - }, - team_id: { - type: "integer" - }, - visibility: { - enum: ["public", "private", "visibility", "internal"], - type: "string" - } - }, - url: "/user/repos" - }, - createFork: { - method: "POST", - params: { - organization: { - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/forks" - }, - createHook: { - method: "POST", - params: { - active: { - type: "boolean" - }, - config: { - required: true, - type: "object" - }, - "config.content_type": { - type: "string" - }, - "config.insecure_ssl": { - type: "string" - }, - "config.secret": { - type: "string" - }, - "config.url": { - required: true, - type: "string" - }, - events: { - type: "string[]" - }, - name: { - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/hooks" - }, - createInOrg: { - method: "POST", - params: { - allow_merge_commit: { - type: "boolean" - }, - allow_rebase_merge: { - type: "boolean" - }, - allow_squash_merge: { - type: "boolean" - }, - auto_init: { - type: "boolean" - }, - delete_branch_on_merge: { - type: "boolean" - }, - description: { - type: "string" - }, - gitignore_template: { - type: "string" - }, - has_issues: { - type: "boolean" - }, - has_projects: { - type: "boolean" - }, - has_wiki: { - type: "boolean" - }, - homepage: { - type: "string" - }, - is_template: { - type: "boolean" - }, - license_template: { - type: "string" - }, - name: { - required: true, - type: "string" - }, - org: { - required: true, - type: "string" - }, - private: { - type: "boolean" - }, - team_id: { - type: "integer" - }, - visibility: { - enum: ["public", "private", "visibility", "internal"], - type: "string" - } - }, - url: "/orgs/:org/repos" - }, - createOrUpdateFile: { - method: "PUT", - params: { - author: { - type: "object" - }, - "author.email": { - required: true, - type: "string" - }, - "author.name": { - required: true, - type: "string" - }, - branch: { - type: "string" - }, - committer: { - type: "object" - }, - "committer.email": { - required: true, - type: "string" - }, - "committer.name": { - required: true, - type: "string" - }, - content: { - required: true, - type: "string" - }, - message: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - path: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - sha: { - type: "string" - } - }, - url: "/repos/:owner/:repo/contents/:path" - }, - createRelease: { - method: "POST", - params: { - body: { - type: "string" - }, - draft: { - type: "boolean" - }, - name: { - type: "string" - }, - owner: { - required: true, - type: "string" - }, - prerelease: { - type: "boolean" - }, - repo: { - required: true, - type: "string" - }, - tag_name: { - required: true, - type: "string" - }, - target_commitish: { - type: "string" - } - }, - url: "/repos/:owner/:repo/releases" - }, - createStatus: { - method: "POST", - params: { - context: { - type: "string" - }, - description: { - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - sha: { - required: true, - type: "string" - }, - state: { - enum: ["error", "failure", "pending", "success"], - required: true, - type: "string" - }, - target_url: { - type: "string" - } - }, - url: "/repos/:owner/:repo/statuses/:sha" - }, - createUsingTemplate: { - headers: { - accept: "application/vnd.github.baptiste-preview+json" - }, - method: "POST", - params: { - description: { - type: "string" - }, - name: { - required: true, - type: "string" - }, - owner: { - type: "string" - }, - private: { - type: "boolean" - }, - template_owner: { - required: true, - type: "string" - }, - template_repo: { - required: true, - type: "string" - } - }, - url: "/repos/:template_owner/:template_repo/generate" - }, - declineInvitation: { - method: "DELETE", - params: { - invitation_id: { - required: true, - type: "integer" - } - }, - url: "/user/repository_invitations/:invitation_id" - }, - delete: { - method: "DELETE", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo" - }, - deleteCommitComment: { - method: "DELETE", - params: { - comment_id: { - required: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/comments/:comment_id" - }, - deleteDownload: { - method: "DELETE", - params: { - download_id: { - required: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/downloads/:download_id" - }, - deleteFile: { - method: "DELETE", - params: { - author: { - type: "object" - }, - "author.email": { - type: "string" - }, - "author.name": { - type: "string" - }, - branch: { - type: "string" - }, - committer: { - type: "object" - }, - "committer.email": { - type: "string" - }, - "committer.name": { - type: "string" - }, - message: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - path: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - sha: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/contents/:path" - }, - deleteHook: { - method: "DELETE", - params: { - hook_id: { - required: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/hooks/:hook_id" - }, - deleteInvitation: { - method: "DELETE", - params: { - invitation_id: { - required: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/invitations/:invitation_id" - }, - deleteRelease: { - method: "DELETE", - params: { - owner: { - required: true, - type: "string" - }, - release_id: { - required: true, - type: "integer" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/releases/:release_id" - }, - deleteReleaseAsset: { - method: "DELETE", - params: { - asset_id: { - required: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/releases/assets/:asset_id" - }, - disableAutomatedSecurityFixes: { - headers: { - accept: "application/vnd.github.london-preview+json" - }, - method: "DELETE", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/automated-security-fixes" - }, - disablePagesSite: { - headers: { - accept: "application/vnd.github.switcheroo-preview+json" - }, - method: "DELETE", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/pages" - }, - disableVulnerabilityAlerts: { - headers: { - accept: "application/vnd.github.dorian-preview+json" - }, - method: "DELETE", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/vulnerability-alerts" - }, - enableAutomatedSecurityFixes: { - headers: { - accept: "application/vnd.github.london-preview+json" - }, - method: "PUT", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/automated-security-fixes" - }, - enablePagesSite: { - headers: { - accept: "application/vnd.github.switcheroo-preview+json" - }, - method: "POST", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - source: { - type: "object" - }, - "source.branch": { - enum: ["master", "gh-pages"], - type: "string" - }, - "source.path": { - type: "string" - } - }, - url: "/repos/:owner/:repo/pages" - }, - enableVulnerabilityAlerts: { - headers: { - accept: "application/vnd.github.dorian-preview+json" - }, - method: "PUT", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/vulnerability-alerts" - }, - get: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo" - }, - getAppsWithAccessToProtectedBranch: { - method: "GET", - params: { - branch: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/apps" - }, - getArchiveLink: { - method: "GET", - params: { - archive_format: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - ref: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/:archive_format/:ref" - }, - getBranch: { - method: "GET", - params: { - branch: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/branches/:branch" - }, - getBranchProtection: { - method: "GET", - params: { - branch: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/branches/:branch/protection" - }, - getClones: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - per: { - enum: ["day", "week"], - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/traffic/clones" - }, - getCodeFrequencyStats: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/stats/code_frequency" - }, - getCollaboratorPermissionLevel: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - username: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/collaborators/:username/permission" - }, - getCombinedStatusForRef: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - ref: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/commits/:ref/status" - }, - getCommit: { - method: "GET", - params: { - commit_sha: { - alias: "ref", - deprecated: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - ref: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - sha: { - alias: "ref", - deprecated: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/commits/:ref" - }, - getCommitActivityStats: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/stats/commit_activity" - }, - getCommitComment: { - method: "GET", - params: { - comment_id: { - required: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/comments/:comment_id" - }, - getCommitRefSha: { - deprecated: "octokit.repos.getCommitRefSha() is deprecated, see https://developer.github.com/v3/repos/commits/#get-a-single-commit", - headers: { - accept: "application/vnd.github.v3.sha" - }, - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - ref: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/commits/:ref" - }, - getContents: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - path: { - required: true, - type: "string" - }, - ref: { - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/contents/:path" - }, - getContributorsStats: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/stats/contributors" - }, - getDeployKey: { - method: "GET", - params: { - key_id: { - required: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/keys/:key_id" - }, - getDeployment: { - method: "GET", - params: { - deployment_id: { - required: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/deployments/:deployment_id" - }, - getDeploymentStatus: { - method: "GET", - params: { - deployment_id: { - required: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - status_id: { - required: true, - type: "integer" - } - }, - url: "/repos/:owner/:repo/deployments/:deployment_id/statuses/:status_id" - }, - getDownload: { - method: "GET", - params: { - download_id: { - required: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/downloads/:download_id" - }, - getHook: { - method: "GET", - params: { - hook_id: { - required: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/hooks/:hook_id" - }, - getLatestPagesBuild: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/pages/builds/latest" - }, - getLatestRelease: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/releases/latest" - }, - getPages: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/pages" - }, - getPagesBuild: { - method: "GET", - params: { - build_id: { - required: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/pages/builds/:build_id" - }, - getParticipationStats: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/stats/participation" - }, - getProtectedBranchAdminEnforcement: { - method: "GET", - params: { - branch: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/branches/:branch/protection/enforce_admins" - }, - getProtectedBranchPullRequestReviewEnforcement: { - method: "GET", - params: { - branch: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/branches/:branch/protection/required_pull_request_reviews" - }, - getProtectedBranchRequiredSignatures: { - headers: { - accept: "application/vnd.github.zzzax-preview+json" - }, - method: "GET", - params: { - branch: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/branches/:branch/protection/required_signatures" - }, - getProtectedBranchRequiredStatusChecks: { - method: "GET", - params: { - branch: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/branches/:branch/protection/required_status_checks" - }, - getProtectedBranchRestrictions: { - method: "GET", - params: { - branch: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/branches/:branch/protection/restrictions" - }, - getPunchCardStats: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/stats/punch_card" - }, - getReadme: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - ref: { - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/readme" - }, - getRelease: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - release_id: { - required: true, - type: "integer" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/releases/:release_id" - }, - getReleaseAsset: { - method: "GET", - params: { - asset_id: { - required: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/releases/assets/:asset_id" - }, - getReleaseByTag: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - tag: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/releases/tags/:tag" - }, - getTeamsWithAccessToProtectedBranch: { - method: "GET", - params: { - branch: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/teams" - }, - getTopPaths: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/traffic/popular/paths" - }, - getTopReferrers: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/traffic/popular/referrers" - }, - getUsersWithAccessToProtectedBranch: { - method: "GET", - params: { - branch: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/users" - }, - getViews: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - per: { - enum: ["day", "week"], - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/traffic/views" - }, - list: { - method: "GET", - params: { - affiliation: { - type: "string" - }, - direction: { - enum: ["asc", "desc"], - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - sort: { - enum: ["created", "updated", "pushed", "full_name"], - type: "string" - }, - type: { - enum: ["all", "owner", "public", "private", "member"], - type: "string" - }, - visibility: { - enum: ["all", "public", "private"], - type: "string" - } - }, - url: "/user/repos" - }, - listAppsWithAccessToProtectedBranch: { - deprecated: "octokit.repos.listAppsWithAccessToProtectedBranch() has been renamed to octokit.repos.getAppsWithAccessToProtectedBranch() (2019-09-13)", - method: "GET", - params: { - branch: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/apps" - }, - listAssetsForRelease: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - release_id: { - required: true, - type: "integer" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/releases/:release_id/assets" - }, - listBranches: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - protected: { - type: "boolean" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/branches" - }, - listBranchesForHeadCommit: { - headers: { - accept: "application/vnd.github.groot-preview+json" - }, - method: "GET", - params: { - commit_sha: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/commits/:commit_sha/branches-where-head" - }, - listCollaborators: { - method: "GET", - params: { - affiliation: { - enum: ["outside", "direct", "all"], - type: "string" - }, - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/collaborators" - }, - listCommentsForCommit: { - method: "GET", - params: { - commit_sha: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - ref: { - alias: "commit_sha", - deprecated: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/commits/:commit_sha/comments" - }, - listCommitComments: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/comments" - }, - listCommits: { - method: "GET", - params: { - author: { - type: "string" - }, - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - path: { - type: "string" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - }, - sha: { - type: "string" - }, - since: { - type: "string" - }, - until: { - type: "string" - } - }, - url: "/repos/:owner/:repo/commits" - }, - listContributors: { - method: "GET", - params: { - anon: { - type: "string" - }, - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/contributors" - }, - listDeployKeys: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/keys" - }, - listDeploymentStatuses: { - method: "GET", - params: { - deployment_id: { - required: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/deployments/:deployment_id/statuses" - }, - listDeployments: { - method: "GET", - params: { - environment: { - type: "string" - }, - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - ref: { - type: "string" - }, - repo: { - required: true, - type: "string" - }, - sha: { - type: "string" - }, - task: { - type: "string" - } - }, - url: "/repos/:owner/:repo/deployments" - }, - listDownloads: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/downloads" - }, - listForOrg: { - method: "GET", - params: { - direction: { - enum: ["asc", "desc"], - type: "string" - }, - org: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - sort: { - enum: ["created", "updated", "pushed", "full_name"], - type: "string" - }, - type: { - enum: ["all", "public", "private", "forks", "sources", "member", "internal"], - type: "string" - } - }, - url: "/orgs/:org/repos" - }, - listForUser: { - method: "GET", - params: { - direction: { - enum: ["asc", "desc"], - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - sort: { - enum: ["created", "updated", "pushed", "full_name"], - type: "string" - }, - type: { - enum: ["all", "owner", "member"], - type: "string" - }, - username: { - required: true, - type: "string" - } - }, - url: "/users/:username/repos" - }, - listForks: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - }, - sort: { - enum: ["newest", "oldest", "stargazers"], - type: "string" - } - }, - url: "/repos/:owner/:repo/forks" - }, - listHooks: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/hooks" - }, - listInvitations: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/invitations" - }, - listInvitationsForAuthenticatedUser: { - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - } - }, - url: "/user/repository_invitations" - }, - listLanguages: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/languages" - }, - listPagesBuilds: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/pages/builds" - }, - listProtectedBranchRequiredStatusChecksContexts: { - method: "GET", - params: { - branch: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/branches/:branch/protection/required_status_checks/contexts" - }, - listProtectedBranchTeamRestrictions: { - deprecated: "octokit.repos.listProtectedBranchTeamRestrictions() has been renamed to octokit.repos.getTeamsWithAccessToProtectedBranch() (2019-09-09)", - method: "GET", - params: { - branch: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/teams" - }, - listProtectedBranchUserRestrictions: { - deprecated: "octokit.repos.listProtectedBranchUserRestrictions() has been renamed to octokit.repos.getUsersWithAccessToProtectedBranch() (2019-09-09)", - method: "GET", - params: { - branch: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/users" - }, - listPublic: { - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - since: { - type: "integer" - } - }, - url: "/repositories" - }, - listPullRequestsAssociatedWithCommit: { - headers: { - accept: "application/vnd.github.groot-preview+json" - }, - method: "GET", - params: { - commit_sha: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/commits/:commit_sha/pulls" - }, - listReleases: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/releases" - }, - listStatusesForRef: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - ref: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/commits/:ref/statuses" - }, - listTags: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/tags" - }, - listTeams: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/teams" - }, - listTeamsWithAccessToProtectedBranch: { - deprecated: "octokit.repos.listTeamsWithAccessToProtectedBranch() has been renamed to octokit.repos.getTeamsWithAccessToProtectedBranch() (2019-09-13)", - method: "GET", - params: { - branch: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/teams" - }, - listTopics: { - headers: { - accept: "application/vnd.github.mercy-preview+json" - }, - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/topics" - }, - listUsersWithAccessToProtectedBranch: { - deprecated: "octokit.repos.listUsersWithAccessToProtectedBranch() has been renamed to octokit.repos.getUsersWithAccessToProtectedBranch() (2019-09-13)", - method: "GET", - params: { - branch: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/users" - }, - merge: { - method: "POST", - params: { - base: { - required: true, - type: "string" - }, - commit_message: { - type: "string" - }, - head: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/merges" - }, - pingHook: { - method: "POST", - params: { - hook_id: { - required: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/hooks/:hook_id/pings" - }, - removeBranchProtection: { - method: "DELETE", - params: { - branch: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/branches/:branch/protection" - }, - removeCollaborator: { - method: "DELETE", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - username: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/collaborators/:username" - }, - removeDeployKey: { - method: "DELETE", - params: { - key_id: { - required: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/keys/:key_id" - }, - removeProtectedBranchAdminEnforcement: { - method: "DELETE", - params: { - branch: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/branches/:branch/protection/enforce_admins" - }, - removeProtectedBranchAppRestrictions: { - method: "DELETE", - params: { - apps: { - mapTo: "data", - required: true, - type: "string[]" - }, - branch: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/apps" - }, - removeProtectedBranchPullRequestReviewEnforcement: { - method: "DELETE", - params: { - branch: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/branches/:branch/protection/required_pull_request_reviews" - }, - removeProtectedBranchRequiredSignatures: { - headers: { - accept: "application/vnd.github.zzzax-preview+json" - }, - method: "DELETE", - params: { - branch: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/branches/:branch/protection/required_signatures" - }, - removeProtectedBranchRequiredStatusChecks: { - method: "DELETE", - params: { - branch: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/branches/:branch/protection/required_status_checks" - }, - removeProtectedBranchRequiredStatusChecksContexts: { - method: "DELETE", - params: { - branch: { - required: true, - type: "string" - }, - contexts: { - mapTo: "data", - required: true, - type: "string[]" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/branches/:branch/protection/required_status_checks/contexts" - }, - removeProtectedBranchRestrictions: { - method: "DELETE", - params: { - branch: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/branches/:branch/protection/restrictions" - }, - removeProtectedBranchTeamRestrictions: { - method: "DELETE", - params: { - branch: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - teams: { - mapTo: "data", - required: true, - type: "string[]" - } - }, - url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/teams" - }, - removeProtectedBranchUserRestrictions: { - method: "DELETE", - params: { - branch: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - users: { - mapTo: "data", - required: true, - type: "string[]" - } - }, - url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/users" - }, - replaceProtectedBranchAppRestrictions: { - method: "PUT", - params: { - apps: { - mapTo: "data", - required: true, - type: "string[]" - }, - branch: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/apps" - }, - replaceProtectedBranchRequiredStatusChecksContexts: { - method: "PUT", - params: { - branch: { - required: true, - type: "string" - }, - contexts: { - mapTo: "data", - required: true, - type: "string[]" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/branches/:branch/protection/required_status_checks/contexts" - }, - replaceProtectedBranchTeamRestrictions: { - method: "PUT", - params: { - branch: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - teams: { - mapTo: "data", - required: true, - type: "string[]" - } - }, - url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/teams" - }, - replaceProtectedBranchUserRestrictions: { - method: "PUT", - params: { - branch: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - users: { - mapTo: "data", - required: true, - type: "string[]" - } - }, - url: "/repos/:owner/:repo/branches/:branch/protection/restrictions/users" - }, - replaceTopics: { - headers: { - accept: "application/vnd.github.mercy-preview+json" - }, - method: "PUT", - params: { - names: { - required: true, - type: "string[]" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/topics" - }, - requestPageBuild: { - method: "POST", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/pages/builds" - }, - retrieveCommunityProfileMetrics: { - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/community/profile" - }, - testPushHook: { - method: "POST", - params: { - hook_id: { - required: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/hooks/:hook_id/tests" - }, - transfer: { - method: "POST", - params: { - new_owner: { - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - team_ids: { - type: "integer[]" - } - }, - url: "/repos/:owner/:repo/transfer" - }, - update: { - method: "PATCH", - params: { - allow_merge_commit: { - type: "boolean" - }, - allow_rebase_merge: { - type: "boolean" - }, - allow_squash_merge: { - type: "boolean" - }, - archived: { - type: "boolean" - }, - default_branch: { - type: "string" - }, - delete_branch_on_merge: { - type: "boolean" - }, - description: { - type: "string" - }, - has_issues: { - type: "boolean" - }, - has_projects: { - type: "boolean" - }, - has_wiki: { - type: "boolean" - }, - homepage: { - type: "string" - }, - is_template: { - type: "boolean" - }, - name: { - type: "string" - }, - owner: { - required: true, - type: "string" - }, - private: { - type: "boolean" - }, - repo: { - required: true, - type: "string" - }, - visibility: { - enum: ["public", "private", "visibility", "internal"], - type: "string" - } - }, - url: "/repos/:owner/:repo" - }, - updateBranchProtection: { - method: "PUT", - params: { - allow_deletions: { - type: "boolean" - }, - allow_force_pushes: { - allowNull: true, - type: "boolean" - }, - branch: { - required: true, - type: "string" - }, - enforce_admins: { - allowNull: true, - required: true, - type: "boolean" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - required_linear_history: { - type: "boolean" - }, - required_pull_request_reviews: { - allowNull: true, - required: true, - type: "object" - }, - "required_pull_request_reviews.dismiss_stale_reviews": { - type: "boolean" - }, - "required_pull_request_reviews.dismissal_restrictions": { - type: "object" - }, - "required_pull_request_reviews.dismissal_restrictions.teams": { - type: "string[]" - }, - "required_pull_request_reviews.dismissal_restrictions.users": { - type: "string[]" - }, - "required_pull_request_reviews.require_code_owner_reviews": { - type: "boolean" - }, - "required_pull_request_reviews.required_approving_review_count": { - type: "integer" - }, - required_status_checks: { - allowNull: true, - required: true, - type: "object" - }, - "required_status_checks.contexts": { - required: true, - type: "string[]" - }, - "required_status_checks.strict": { - required: true, - type: "boolean" - }, - restrictions: { - allowNull: true, - required: true, - type: "object" - }, - "restrictions.apps": { - type: "string[]" - }, - "restrictions.teams": { - required: true, - type: "string[]" - }, - "restrictions.users": { - required: true, - type: "string[]" - } - }, - url: "/repos/:owner/:repo/branches/:branch/protection" - }, - updateCommitComment: { - method: "PATCH", - params: { - body: { - required: true, - type: "string" - }, - comment_id: { - required: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/comments/:comment_id" - }, - updateFile: { - deprecated: "octokit.repos.updateFile() has been renamed to octokit.repos.createOrUpdateFile() (2019-06-07)", - method: "PUT", - params: { - author: { - type: "object" - }, - "author.email": { - required: true, - type: "string" - }, - "author.name": { - required: true, - type: "string" - }, - branch: { - type: "string" - }, - committer: { - type: "object" - }, - "committer.email": { - required: true, - type: "string" - }, - "committer.name": { - required: true, - type: "string" - }, - content: { - required: true, - type: "string" - }, - message: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - path: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - sha: { - type: "string" - } - }, - url: "/repos/:owner/:repo/contents/:path" - }, - updateHook: { - method: "PATCH", - params: { - active: { - type: "boolean" - }, - add_events: { - type: "string[]" - }, - config: { - type: "object" - }, - "config.content_type": { - type: "string" - }, - "config.insecure_ssl": { - type: "string" - }, - "config.secret": { - type: "string" - }, - "config.url": { - required: true, - type: "string" - }, - events: { - type: "string[]" - }, - hook_id: { - required: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - remove_events: { - type: "string[]" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/hooks/:hook_id" - }, - updateInformationAboutPagesSite: { - method: "PUT", - params: { - cname: { - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - source: { - enum: ['"gh-pages"', '"master"', '"master /docs"'], - type: "string" - } - }, - url: "/repos/:owner/:repo/pages" - }, - updateInvitation: { - method: "PATCH", - params: { - invitation_id: { - required: true, - type: "integer" - }, - owner: { - required: true, - type: "string" - }, - permissions: { - enum: ["read", "write", "admin"], - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/invitations/:invitation_id" - }, - updateProtectedBranchPullRequestReviewEnforcement: { - method: "PATCH", - params: { - branch: { - required: true, - type: "string" - }, - dismiss_stale_reviews: { - type: "boolean" - }, - dismissal_restrictions: { - type: "object" - }, - "dismissal_restrictions.teams": { - type: "string[]" - }, - "dismissal_restrictions.users": { - type: "string[]" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - require_code_owner_reviews: { - type: "boolean" - }, - required_approving_review_count: { - type: "integer" - } - }, - url: "/repos/:owner/:repo/branches/:branch/protection/required_pull_request_reviews" - }, - updateProtectedBranchRequiredStatusChecks: { - method: "PATCH", - params: { - branch: { - required: true, - type: "string" - }, - contexts: { - type: "string[]" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - strict: { - type: "boolean" - } - }, - url: "/repos/:owner/:repo/branches/:branch/protection/required_status_checks" - }, - updateRelease: { - method: "PATCH", - params: { - body: { - type: "string" - }, - draft: { - type: "boolean" - }, - name: { - type: "string" - }, - owner: { - required: true, - type: "string" - }, - prerelease: { - type: "boolean" - }, - release_id: { - required: true, - type: "integer" - }, - repo: { - required: true, - type: "string" - }, - tag_name: { - type: "string" - }, - target_commitish: { - type: "string" - } - }, - url: "/repos/:owner/:repo/releases/:release_id" - }, - updateReleaseAsset: { - method: "PATCH", - params: { - asset_id: { - required: true, - type: "integer" - }, - label: { - type: "string" - }, - name: { - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - } - }, - url: "/repos/:owner/:repo/releases/assets/:asset_id" - }, - uploadReleaseAsset: { - method: "POST", - params: { - data: { - mapTo: "data", - required: true, - type: "string | object" - }, - file: { - alias: "data", - deprecated: true, - type: "string | object" - }, - headers: { - required: true, - type: "object" - }, - "headers.content-length": { - required: true, - type: "integer" - }, - "headers.content-type": { - required: true, - type: "string" - }, - label: { - type: "string" - }, - name: { - required: true, - type: "string" - }, - url: { - required: true, - type: "string" - } - }, - url: ":url" - } - }, - search: { - code: { - method: "GET", - params: { - order: { - enum: ["desc", "asc"], - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - q: { - required: true, - type: "string" - }, - sort: { - enum: ["indexed"], - type: "string" - } - }, - url: "/search/code" - }, - commits: { - headers: { - accept: "application/vnd.github.cloak-preview+json" - }, - method: "GET", - params: { - order: { - enum: ["desc", "asc"], - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - q: { - required: true, - type: "string" - }, - sort: { - enum: ["author-date", "committer-date"], - type: "string" - } - }, - url: "/search/commits" - }, - issues: { - deprecated: "octokit.search.issues() has been renamed to octokit.search.issuesAndPullRequests() (2018-12-27)", - method: "GET", - params: { - order: { - enum: ["desc", "asc"], - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - q: { - required: true, - type: "string" - }, - sort: { - enum: ["comments", "reactions", "reactions-+1", "reactions--1", "reactions-smile", "reactions-thinking_face", "reactions-heart", "reactions-tada", "interactions", "created", "updated"], - type: "string" - } - }, - url: "/search/issues" - }, - issuesAndPullRequests: { - method: "GET", - params: { - order: { - enum: ["desc", "asc"], - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - q: { - required: true, - type: "string" - }, - sort: { - enum: ["comments", "reactions", "reactions-+1", "reactions--1", "reactions-smile", "reactions-thinking_face", "reactions-heart", "reactions-tada", "interactions", "created", "updated"], - type: "string" - } - }, - url: "/search/issues" - }, - labels: { - method: "GET", - params: { - order: { - enum: ["desc", "asc"], - type: "string" - }, - q: { - required: true, - type: "string" - }, - repository_id: { - required: true, - type: "integer" - }, - sort: { - enum: ["created", "updated"], - type: "string" - } - }, - url: "/search/labels" - }, - repos: { - method: "GET", - params: { - order: { - enum: ["desc", "asc"], - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - q: { - required: true, - type: "string" - }, - sort: { - enum: ["stars", "forks", "help-wanted-issues", "updated"], - type: "string" - } - }, - url: "/search/repositories" - }, - topics: { - method: "GET", - params: { - q: { - required: true, - type: "string" - } - }, - url: "/search/topics" - }, - users: { - method: "GET", - params: { - order: { - enum: ["desc", "asc"], - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - q: { - required: true, - type: "string" - }, - sort: { - enum: ["followers", "repositories", "joined"], - type: "string" - } - }, - url: "/search/users" - } - }, - teams: { - addMember: { - deprecated: "octokit.teams.addMember() has been renamed to octokit.teams.addMemberLegacy() (2020-01-16)", - method: "PUT", - params: { - team_id: { - required: true, - type: "integer" - }, - username: { - required: true, - type: "string" - } - }, - url: "/teams/:team_id/members/:username" - }, - addMemberLegacy: { - deprecated: "octokit.teams.addMemberLegacy() is deprecated, see https://developer.github.com/v3/teams/members/#add-team-member-legacy", - method: "PUT", - params: { - team_id: { - required: true, - type: "integer" - }, - username: { - required: true, - type: "string" - } - }, - url: "/teams/:team_id/members/:username" - }, - addOrUpdateMembership: { - deprecated: "octokit.teams.addOrUpdateMembership() has been renamed to octokit.teams.addOrUpdateMembershipLegacy() (2020-01-16)", - method: "PUT", - params: { - role: { - enum: ["member", "maintainer"], - type: "string" - }, - team_id: { - required: true, - type: "integer" - }, - username: { - required: true, - type: "string" - } - }, - url: "/teams/:team_id/memberships/:username" - }, - addOrUpdateMembershipInOrg: { - method: "PUT", - params: { - org: { - required: true, - type: "string" - }, - role: { - enum: ["member", "maintainer"], - type: "string" - }, - team_slug: { - required: true, - type: "string" - }, - username: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/teams/:team_slug/memberships/:username" - }, - addOrUpdateMembershipLegacy: { - deprecated: "octokit.teams.addOrUpdateMembershipLegacy() is deprecated, see https://developer.github.com/v3/teams/members/#add-or-update-team-membership-legacy", - method: "PUT", - params: { - role: { - enum: ["member", "maintainer"], - type: "string" - }, - team_id: { - required: true, - type: "integer" - }, - username: { - required: true, - type: "string" - } - }, - url: "/teams/:team_id/memberships/:username" - }, - addOrUpdateProject: { - deprecated: "octokit.teams.addOrUpdateProject() has been renamed to octokit.teams.addOrUpdateProjectLegacy() (2020-01-16)", - headers: { - accept: "application/vnd.github.inertia-preview+json" - }, - method: "PUT", - params: { - permission: { - enum: ["read", "write", "admin"], - type: "string" - }, - project_id: { - required: true, - type: "integer" - }, - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id/projects/:project_id" - }, - addOrUpdateProjectInOrg: { - headers: { - accept: "application/vnd.github.inertia-preview+json" - }, - method: "PUT", - params: { - org: { - required: true, - type: "string" - }, - permission: { - enum: ["read", "write", "admin"], - type: "string" - }, - project_id: { - required: true, - type: "integer" - }, - team_slug: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/teams/:team_slug/projects/:project_id" - }, - addOrUpdateProjectLegacy: { - deprecated: "octokit.teams.addOrUpdateProjectLegacy() is deprecated, see https://developer.github.com/v3/teams/#add-or-update-team-project-legacy", - headers: { - accept: "application/vnd.github.inertia-preview+json" - }, - method: "PUT", - params: { - permission: { - enum: ["read", "write", "admin"], - type: "string" - }, - project_id: { - required: true, - type: "integer" - }, - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id/projects/:project_id" - }, - addOrUpdateRepo: { - deprecated: "octokit.teams.addOrUpdateRepo() has been renamed to octokit.teams.addOrUpdateRepoLegacy() (2020-01-16)", - method: "PUT", - params: { - owner: { - required: true, - type: "string" - }, - permission: { - enum: ["pull", "push", "admin"], - type: "string" - }, - repo: { - required: true, - type: "string" - }, - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id/repos/:owner/:repo" - }, - addOrUpdateRepoInOrg: { - method: "PUT", - params: { - org: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - permission: { - enum: ["pull", "push", "admin"], - type: "string" - }, - repo: { - required: true, - type: "string" - }, - team_slug: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/teams/:team_slug/repos/:owner/:repo" - }, - addOrUpdateRepoLegacy: { - deprecated: "octokit.teams.addOrUpdateRepoLegacy() is deprecated, see https://developer.github.com/v3/teams/#add-or-update-team-repository-legacy", - method: "PUT", - params: { - owner: { - required: true, - type: "string" - }, - permission: { - enum: ["pull", "push", "admin"], - type: "string" - }, - repo: { - required: true, - type: "string" - }, - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id/repos/:owner/:repo" - }, - checkManagesRepo: { - deprecated: "octokit.teams.checkManagesRepo() has been renamed to octokit.teams.checkManagesRepoLegacy() (2020-01-16)", - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id/repos/:owner/:repo" - }, - checkManagesRepoInOrg: { - method: "GET", - params: { - org: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - team_slug: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/teams/:team_slug/repos/:owner/:repo" - }, - checkManagesRepoLegacy: { - deprecated: "octokit.teams.checkManagesRepoLegacy() is deprecated, see https://developer.github.com/v3/teams/#check-if-a-team-manages-a-repository-legacy", - method: "GET", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id/repos/:owner/:repo" - }, - create: { - method: "POST", - params: { - description: { - type: "string" - }, - maintainers: { - type: "string[]" - }, - name: { - required: true, - type: "string" - }, - org: { - required: true, - type: "string" - }, - parent_team_id: { - type: "integer" - }, - permission: { - enum: ["pull", "push", "admin"], - type: "string" - }, - privacy: { - enum: ["secret", "closed"], - type: "string" - }, - repo_names: { - type: "string[]" - } - }, - url: "/orgs/:org/teams" - }, - createDiscussion: { - deprecated: "octokit.teams.createDiscussion() has been renamed to octokit.teams.createDiscussionLegacy() (2020-01-16)", - method: "POST", - params: { - body: { - required: true, - type: "string" - }, - private: { - type: "boolean" - }, - team_id: { - required: true, - type: "integer" - }, - title: { - required: true, - type: "string" - } - }, - url: "/teams/:team_id/discussions" - }, - createDiscussionComment: { - deprecated: "octokit.teams.createDiscussionComment() has been renamed to octokit.teams.createDiscussionCommentLegacy() (2020-01-16)", - method: "POST", - params: { - body: { - required: true, - type: "string" - }, - discussion_number: { - required: true, - type: "integer" - }, - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id/discussions/:discussion_number/comments" - }, - createDiscussionCommentInOrg: { - method: "POST", - params: { - body: { - required: true, - type: "string" - }, - discussion_number: { - required: true, - type: "integer" - }, - org: { - required: true, - type: "string" - }, - team_slug: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/teams/:team_slug/discussions/:discussion_number/comments" - }, - createDiscussionCommentLegacy: { - deprecated: "octokit.teams.createDiscussionCommentLegacy() is deprecated, see https://developer.github.com/v3/teams/discussion_comments/#create-a-comment-legacy", - method: "POST", - params: { - body: { - required: true, - type: "string" - }, - discussion_number: { - required: true, - type: "integer" - }, - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id/discussions/:discussion_number/comments" - }, - createDiscussionInOrg: { - method: "POST", - params: { - body: { - required: true, - type: "string" - }, - org: { - required: true, - type: "string" - }, - private: { - type: "boolean" - }, - team_slug: { - required: true, - type: "string" - }, - title: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/teams/:team_slug/discussions" - }, - createDiscussionLegacy: { - deprecated: "octokit.teams.createDiscussionLegacy() is deprecated, see https://developer.github.com/v3/teams/discussions/#create-a-discussion-legacy", - method: "POST", - params: { - body: { - required: true, - type: "string" - }, - private: { - type: "boolean" - }, - team_id: { - required: true, - type: "integer" - }, - title: { - required: true, - type: "string" - } - }, - url: "/teams/:team_id/discussions" - }, - delete: { - deprecated: "octokit.teams.delete() has been renamed to octokit.teams.deleteLegacy() (2020-01-16)", - method: "DELETE", - params: { - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id" - }, - deleteDiscussion: { - deprecated: "octokit.teams.deleteDiscussion() has been renamed to octokit.teams.deleteDiscussionLegacy() (2020-01-16)", - method: "DELETE", - params: { - discussion_number: { - required: true, - type: "integer" - }, - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id/discussions/:discussion_number" - }, - deleteDiscussionComment: { - deprecated: "octokit.teams.deleteDiscussionComment() has been renamed to octokit.teams.deleteDiscussionCommentLegacy() (2020-01-16)", - method: "DELETE", - params: { - comment_number: { - required: true, - type: "integer" - }, - discussion_number: { - required: true, - type: "integer" - }, - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id/discussions/:discussion_number/comments/:comment_number" - }, - deleteDiscussionCommentInOrg: { - method: "DELETE", - params: { - comment_number: { - required: true, - type: "integer" - }, - discussion_number: { - required: true, - type: "integer" - }, - org: { - required: true, - type: "string" - }, - team_slug: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/teams/:team_slug/discussions/:discussion_number/comments/:comment_number" - }, - deleteDiscussionCommentLegacy: { - deprecated: "octokit.teams.deleteDiscussionCommentLegacy() is deprecated, see https://developer.github.com/v3/teams/discussion_comments/#delete-a-comment-legacy", - method: "DELETE", - params: { - comment_number: { - required: true, - type: "integer" - }, - discussion_number: { - required: true, - type: "integer" - }, - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id/discussions/:discussion_number/comments/:comment_number" - }, - deleteDiscussionInOrg: { - method: "DELETE", - params: { - discussion_number: { - required: true, - type: "integer" - }, - org: { - required: true, - type: "string" - }, - team_slug: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/teams/:team_slug/discussions/:discussion_number" - }, - deleteDiscussionLegacy: { - deprecated: "octokit.teams.deleteDiscussionLegacy() is deprecated, see https://developer.github.com/v3/teams/discussions/#delete-a-discussion-legacy", - method: "DELETE", - params: { - discussion_number: { - required: true, - type: "integer" - }, - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id/discussions/:discussion_number" - }, - deleteInOrg: { - method: "DELETE", - params: { - org: { - required: true, - type: "string" - }, - team_slug: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/teams/:team_slug" - }, - deleteLegacy: { - deprecated: "octokit.teams.deleteLegacy() is deprecated, see https://developer.github.com/v3/teams/#delete-team-legacy", - method: "DELETE", - params: { - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id" - }, - get: { - deprecated: "octokit.teams.get() has been renamed to octokit.teams.getLegacy() (2020-01-16)", - method: "GET", - params: { - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id" - }, - getByName: { - method: "GET", - params: { - org: { - required: true, - type: "string" - }, - team_slug: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/teams/:team_slug" - }, - getDiscussion: { - deprecated: "octokit.teams.getDiscussion() has been renamed to octokit.teams.getDiscussionLegacy() (2020-01-16)", - method: "GET", - params: { - discussion_number: { - required: true, - type: "integer" - }, - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id/discussions/:discussion_number" - }, - getDiscussionComment: { - deprecated: "octokit.teams.getDiscussionComment() has been renamed to octokit.teams.getDiscussionCommentLegacy() (2020-01-16)", - method: "GET", - params: { - comment_number: { - required: true, - type: "integer" - }, - discussion_number: { - required: true, - type: "integer" - }, - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id/discussions/:discussion_number/comments/:comment_number" - }, - getDiscussionCommentInOrg: { - method: "GET", - params: { - comment_number: { - required: true, - type: "integer" - }, - discussion_number: { - required: true, - type: "integer" - }, - org: { - required: true, - type: "string" - }, - team_slug: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/teams/:team_slug/discussions/:discussion_number/comments/:comment_number" - }, - getDiscussionCommentLegacy: { - deprecated: "octokit.teams.getDiscussionCommentLegacy() is deprecated, see https://developer.github.com/v3/teams/discussion_comments/#get-a-single-comment-legacy", - method: "GET", - params: { - comment_number: { - required: true, - type: "integer" - }, - discussion_number: { - required: true, - type: "integer" - }, - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id/discussions/:discussion_number/comments/:comment_number" - }, - getDiscussionInOrg: { - method: "GET", - params: { - discussion_number: { - required: true, - type: "integer" - }, - org: { - required: true, - type: "string" - }, - team_slug: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/teams/:team_slug/discussions/:discussion_number" - }, - getDiscussionLegacy: { - deprecated: "octokit.teams.getDiscussionLegacy() is deprecated, see https://developer.github.com/v3/teams/discussions/#get-a-single-discussion-legacy", - method: "GET", - params: { - discussion_number: { - required: true, - type: "integer" - }, - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id/discussions/:discussion_number" - }, - getLegacy: { - deprecated: "octokit.teams.getLegacy() is deprecated, see https://developer.github.com/v3/teams/#get-team-legacy", - method: "GET", - params: { - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id" - }, - getMember: { - deprecated: "octokit.teams.getMember() has been renamed to octokit.teams.getMemberLegacy() (2020-01-16)", - method: "GET", - params: { - team_id: { - required: true, - type: "integer" - }, - username: { - required: true, - type: "string" - } - }, - url: "/teams/:team_id/members/:username" - }, - getMemberLegacy: { - deprecated: "octokit.teams.getMemberLegacy() is deprecated, see https://developer.github.com/v3/teams/members/#get-team-member-legacy", - method: "GET", - params: { - team_id: { - required: true, - type: "integer" - }, - username: { - required: true, - type: "string" - } - }, - url: "/teams/:team_id/members/:username" - }, - getMembership: { - deprecated: "octokit.teams.getMembership() has been renamed to octokit.teams.getMembershipLegacy() (2020-01-16)", - method: "GET", - params: { - team_id: { - required: true, - type: "integer" - }, - username: { - required: true, - type: "string" - } - }, - url: "/teams/:team_id/memberships/:username" - }, - getMembershipInOrg: { - method: "GET", - params: { - org: { - required: true, - type: "string" - }, - team_slug: { - required: true, - type: "string" - }, - username: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/teams/:team_slug/memberships/:username" - }, - getMembershipLegacy: { - deprecated: "octokit.teams.getMembershipLegacy() is deprecated, see https://developer.github.com/v3/teams/members/#get-team-membership-legacy", - method: "GET", - params: { - team_id: { - required: true, - type: "integer" - }, - username: { - required: true, - type: "string" - } - }, - url: "/teams/:team_id/memberships/:username" - }, - list: { - method: "GET", - params: { - org: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - } - }, - url: "/orgs/:org/teams" - }, - listChild: { - deprecated: "octokit.teams.listChild() has been renamed to octokit.teams.listChildLegacy() (2020-01-16)", - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id/teams" - }, - listChildInOrg: { - method: "GET", - params: { - org: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - team_slug: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/teams/:team_slug/teams" - }, - listChildLegacy: { - deprecated: "octokit.teams.listChildLegacy() is deprecated, see https://developer.github.com/v3/teams/#list-child-teams-legacy", - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id/teams" - }, - listDiscussionComments: { - deprecated: "octokit.teams.listDiscussionComments() has been renamed to octokit.teams.listDiscussionCommentsLegacy() (2020-01-16)", - method: "GET", - params: { - direction: { - enum: ["asc", "desc"], - type: "string" - }, - discussion_number: { - required: true, - type: "integer" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id/discussions/:discussion_number/comments" - }, - listDiscussionCommentsInOrg: { - method: "GET", - params: { - direction: { - enum: ["asc", "desc"], - type: "string" - }, - discussion_number: { - required: true, - type: "integer" - }, - org: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - team_slug: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/teams/:team_slug/discussions/:discussion_number/comments" - }, - listDiscussionCommentsLegacy: { - deprecated: "octokit.teams.listDiscussionCommentsLegacy() is deprecated, see https://developer.github.com/v3/teams/discussion_comments/#list-comments-legacy", - method: "GET", - params: { - direction: { - enum: ["asc", "desc"], - type: "string" - }, - discussion_number: { - required: true, - type: "integer" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id/discussions/:discussion_number/comments" - }, - listDiscussions: { - deprecated: "octokit.teams.listDiscussions() has been renamed to octokit.teams.listDiscussionsLegacy() (2020-01-16)", - method: "GET", - params: { - direction: { - enum: ["asc", "desc"], - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id/discussions" - }, - listDiscussionsInOrg: { - method: "GET", - params: { - direction: { - enum: ["asc", "desc"], - type: "string" - }, - org: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - team_slug: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/teams/:team_slug/discussions" - }, - listDiscussionsLegacy: { - deprecated: "octokit.teams.listDiscussionsLegacy() is deprecated, see https://developer.github.com/v3/teams/discussions/#list-discussions-legacy", - method: "GET", - params: { - direction: { - enum: ["asc", "desc"], - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id/discussions" - }, - listForAuthenticatedUser: { - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - } - }, - url: "/user/teams" - }, - listMembers: { - deprecated: "octokit.teams.listMembers() has been renamed to octokit.teams.listMembersLegacy() (2020-01-16)", - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - role: { - enum: ["member", "maintainer", "all"], - type: "string" - }, - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id/members" - }, - listMembersInOrg: { - method: "GET", - params: { - org: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - role: { - enum: ["member", "maintainer", "all"], - type: "string" - }, - team_slug: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/teams/:team_slug/members" - }, - listMembersLegacy: { - deprecated: "octokit.teams.listMembersLegacy() is deprecated, see https://developer.github.com/v3/teams/members/#list-team-members-legacy", - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - role: { - enum: ["member", "maintainer", "all"], - type: "string" - }, - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id/members" - }, - listPendingInvitations: { - deprecated: "octokit.teams.listPendingInvitations() has been renamed to octokit.teams.listPendingInvitationsLegacy() (2020-01-16)", - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id/invitations" - }, - listPendingInvitationsInOrg: { - method: "GET", - params: { - org: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - team_slug: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/teams/:team_slug/invitations" - }, - listPendingInvitationsLegacy: { - deprecated: "octokit.teams.listPendingInvitationsLegacy() is deprecated, see https://developer.github.com/v3/teams/members/#list-pending-team-invitations-legacy", - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id/invitations" - }, - listProjects: { - deprecated: "octokit.teams.listProjects() has been renamed to octokit.teams.listProjectsLegacy() (2020-01-16)", - headers: { - accept: "application/vnd.github.inertia-preview+json" - }, - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id/projects" - }, - listProjectsInOrg: { - headers: { - accept: "application/vnd.github.inertia-preview+json" - }, - method: "GET", - params: { - org: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - team_slug: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/teams/:team_slug/projects" - }, - listProjectsLegacy: { - deprecated: "octokit.teams.listProjectsLegacy() is deprecated, see https://developer.github.com/v3/teams/#list-team-projects-legacy", - headers: { - accept: "application/vnd.github.inertia-preview+json" - }, - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id/projects" - }, - listRepos: { - deprecated: "octokit.teams.listRepos() has been renamed to octokit.teams.listReposLegacy() (2020-01-16)", - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id/repos" - }, - listReposInOrg: { - method: "GET", - params: { - org: { - required: true, - type: "string" - }, - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - team_slug: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/teams/:team_slug/repos" - }, - listReposLegacy: { - deprecated: "octokit.teams.listReposLegacy() is deprecated, see https://developer.github.com/v3/teams/#list-team-repos-legacy", - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id/repos" - }, - removeMember: { - deprecated: "octokit.teams.removeMember() has been renamed to octokit.teams.removeMemberLegacy() (2020-01-16)", - method: "DELETE", - params: { - team_id: { - required: true, - type: "integer" - }, - username: { - required: true, - type: "string" - } - }, - url: "/teams/:team_id/members/:username" - }, - removeMemberLegacy: { - deprecated: "octokit.teams.removeMemberLegacy() is deprecated, see https://developer.github.com/v3/teams/members/#remove-team-member-legacy", - method: "DELETE", - params: { - team_id: { - required: true, - type: "integer" - }, - username: { - required: true, - type: "string" - } - }, - url: "/teams/:team_id/members/:username" - }, - removeMembership: { - deprecated: "octokit.teams.removeMembership() has been renamed to octokit.teams.removeMembershipLegacy() (2020-01-16)", - method: "DELETE", - params: { - team_id: { - required: true, - type: "integer" - }, - username: { - required: true, - type: "string" - } - }, - url: "/teams/:team_id/memberships/:username" - }, - removeMembershipInOrg: { - method: "DELETE", - params: { - org: { - required: true, - type: "string" - }, - team_slug: { - required: true, - type: "string" - }, - username: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/teams/:team_slug/memberships/:username" - }, - removeMembershipLegacy: { - deprecated: "octokit.teams.removeMembershipLegacy() is deprecated, see https://developer.github.com/v3/teams/members/#remove-team-membership-legacy", - method: "DELETE", - params: { - team_id: { - required: true, - type: "integer" - }, - username: { - required: true, - type: "string" - } - }, - url: "/teams/:team_id/memberships/:username" - }, - removeProject: { - deprecated: "octokit.teams.removeProject() has been renamed to octokit.teams.removeProjectLegacy() (2020-01-16)", - method: "DELETE", - params: { - project_id: { - required: true, - type: "integer" - }, - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id/projects/:project_id" - }, - removeProjectInOrg: { - method: "DELETE", - params: { - org: { - required: true, - type: "string" - }, - project_id: { - required: true, - type: "integer" - }, - team_slug: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/teams/:team_slug/projects/:project_id" - }, - removeProjectLegacy: { - deprecated: "octokit.teams.removeProjectLegacy() is deprecated, see https://developer.github.com/v3/teams/#remove-team-project-legacy", - method: "DELETE", - params: { - project_id: { - required: true, - type: "integer" - }, - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id/projects/:project_id" - }, - removeRepo: { - deprecated: "octokit.teams.removeRepo() has been renamed to octokit.teams.removeRepoLegacy() (2020-01-16)", - method: "DELETE", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id/repos/:owner/:repo" - }, - removeRepoInOrg: { - method: "DELETE", - params: { - org: { - required: true, - type: "string" - }, - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - team_slug: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/teams/:team_slug/repos/:owner/:repo" - }, - removeRepoLegacy: { - deprecated: "octokit.teams.removeRepoLegacy() is deprecated, see https://developer.github.com/v3/teams/#remove-team-repository-legacy", - method: "DELETE", - params: { - owner: { - required: true, - type: "string" - }, - repo: { - required: true, - type: "string" - }, - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id/repos/:owner/:repo" - }, - reviewProject: { - deprecated: "octokit.teams.reviewProject() has been renamed to octokit.teams.reviewProjectLegacy() (2020-01-16)", - headers: { - accept: "application/vnd.github.inertia-preview+json" - }, - method: "GET", - params: { - project_id: { - required: true, - type: "integer" - }, - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id/projects/:project_id" - }, - reviewProjectInOrg: { - headers: { - accept: "application/vnd.github.inertia-preview+json" - }, - method: "GET", - params: { - org: { - required: true, - type: "string" - }, - project_id: { - required: true, - type: "integer" - }, - team_slug: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/teams/:team_slug/projects/:project_id" - }, - reviewProjectLegacy: { - deprecated: "octokit.teams.reviewProjectLegacy() is deprecated, see https://developer.github.com/v3/teams/#review-a-team-project-legacy", - headers: { - accept: "application/vnd.github.inertia-preview+json" - }, - method: "GET", - params: { - project_id: { - required: true, - type: "integer" - }, - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id/projects/:project_id" - }, - update: { - deprecated: "octokit.teams.update() has been renamed to octokit.teams.updateLegacy() (2020-01-16)", - method: "PATCH", - params: { - description: { - type: "string" - }, - name: { - required: true, - type: "string" - }, - parent_team_id: { - type: "integer" - }, - permission: { - enum: ["pull", "push", "admin"], - type: "string" - }, - privacy: { - enum: ["secret", "closed"], - type: "string" - }, - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id" - }, - updateDiscussion: { - deprecated: "octokit.teams.updateDiscussion() has been renamed to octokit.teams.updateDiscussionLegacy() (2020-01-16)", - method: "PATCH", - params: { - body: { - type: "string" - }, - discussion_number: { - required: true, - type: "integer" - }, - team_id: { - required: true, - type: "integer" - }, - title: { - type: "string" - } - }, - url: "/teams/:team_id/discussions/:discussion_number" - }, - updateDiscussionComment: { - deprecated: "octokit.teams.updateDiscussionComment() has been renamed to octokit.teams.updateDiscussionCommentLegacy() (2020-01-16)", - method: "PATCH", - params: { - body: { - required: true, - type: "string" - }, - comment_number: { - required: true, - type: "integer" - }, - discussion_number: { - required: true, - type: "integer" - }, - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id/discussions/:discussion_number/comments/:comment_number" - }, - updateDiscussionCommentInOrg: { - method: "PATCH", - params: { - body: { - required: true, - type: "string" - }, - comment_number: { - required: true, - type: "integer" - }, - discussion_number: { - required: true, - type: "integer" - }, - org: { - required: true, - type: "string" - }, - team_slug: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/teams/:team_slug/discussions/:discussion_number/comments/:comment_number" - }, - updateDiscussionCommentLegacy: { - deprecated: "octokit.teams.updateDiscussionCommentLegacy() is deprecated, see https://developer.github.com/v3/teams/discussion_comments/#edit-a-comment-legacy", - method: "PATCH", - params: { - body: { - required: true, - type: "string" - }, - comment_number: { - required: true, - type: "integer" - }, - discussion_number: { - required: true, - type: "integer" - }, - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id/discussions/:discussion_number/comments/:comment_number" - }, - updateDiscussionInOrg: { - method: "PATCH", - params: { - body: { - type: "string" - }, - discussion_number: { - required: true, - type: "integer" - }, - org: { - required: true, - type: "string" - }, - team_slug: { - required: true, - type: "string" - }, - title: { - type: "string" - } - }, - url: "/orgs/:org/teams/:team_slug/discussions/:discussion_number" - }, - updateDiscussionLegacy: { - deprecated: "octokit.teams.updateDiscussionLegacy() is deprecated, see https://developer.github.com/v3/teams/discussions/#edit-a-discussion-legacy", - method: "PATCH", - params: { - body: { - type: "string" - }, - discussion_number: { - required: true, - type: "integer" - }, - team_id: { - required: true, - type: "integer" - }, - title: { - type: "string" - } - }, - url: "/teams/:team_id/discussions/:discussion_number" - }, - updateInOrg: { - method: "PATCH", - params: { - description: { - type: "string" - }, - name: { - required: true, - type: "string" - }, - org: { - required: true, - type: "string" - }, - parent_team_id: { - type: "integer" - }, - permission: { - enum: ["pull", "push", "admin"], - type: "string" - }, - privacy: { - enum: ["secret", "closed"], - type: "string" - }, - team_slug: { - required: true, - type: "string" - } - }, - url: "/orgs/:org/teams/:team_slug" - }, - updateLegacy: { - deprecated: "octokit.teams.updateLegacy() is deprecated, see https://developer.github.com/v3/teams/#edit-team-legacy", - method: "PATCH", - params: { - description: { - type: "string" - }, - name: { - required: true, - type: "string" - }, - parent_team_id: { - type: "integer" - }, - permission: { - enum: ["pull", "push", "admin"], - type: "string" - }, - privacy: { - enum: ["secret", "closed"], - type: "string" - }, - team_id: { - required: true, - type: "integer" - } - }, - url: "/teams/:team_id" - } - }, - users: { - addEmails: { - method: "POST", - params: { - emails: { - required: true, - type: "string[]" - } - }, - url: "/user/emails" - }, - block: { - method: "PUT", - params: { - username: { - required: true, - type: "string" - } - }, - url: "/user/blocks/:username" - }, - checkBlocked: { - method: "GET", - params: { - username: { - required: true, - type: "string" - } - }, - url: "/user/blocks/:username" - }, - checkFollowing: { - method: "GET", - params: { - username: { - required: true, - type: "string" - } - }, - url: "/user/following/:username" - }, - checkFollowingForUser: { - method: "GET", - params: { - target_user: { - required: true, - type: "string" - }, - username: { - required: true, - type: "string" - } - }, - url: "/users/:username/following/:target_user" - }, - createGpgKey: { - method: "POST", - params: { - armored_public_key: { - type: "string" - } - }, - url: "/user/gpg_keys" - }, - createPublicKey: { - method: "POST", - params: { - key: { - type: "string" - }, - title: { - type: "string" - } - }, - url: "/user/keys" - }, - deleteEmails: { - method: "DELETE", - params: { - emails: { - required: true, - type: "string[]" - } - }, - url: "/user/emails" - }, - deleteGpgKey: { - method: "DELETE", - params: { - gpg_key_id: { - required: true, - type: "integer" - } - }, - url: "/user/gpg_keys/:gpg_key_id" - }, - deletePublicKey: { - method: "DELETE", - params: { - key_id: { - required: true, - type: "integer" - } - }, - url: "/user/keys/:key_id" - }, - follow: { - method: "PUT", - params: { - username: { - required: true, - type: "string" - } - }, - url: "/user/following/:username" - }, - getAuthenticated: { - method: "GET", - params: {}, - url: "/user" - }, - getByUsername: { - method: "GET", - params: { - username: { - required: true, - type: "string" - } - }, - url: "/users/:username" - }, - getContextForUser: { - method: "GET", - params: { - subject_id: { - type: "string" - }, - subject_type: { - enum: ["organization", "repository", "issue", "pull_request"], - type: "string" - }, - username: { - required: true, - type: "string" - } - }, - url: "/users/:username/hovercard" - }, - getGpgKey: { - method: "GET", - params: { - gpg_key_id: { - required: true, - type: "integer" - } - }, - url: "/user/gpg_keys/:gpg_key_id" - }, - getPublicKey: { - method: "GET", - params: { - key_id: { - required: true, - type: "integer" - } - }, - url: "/user/keys/:key_id" - }, - list: { - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - since: { - type: "string" - } - }, - url: "/users" - }, - listBlocked: { - method: "GET", - params: {}, - url: "/user/blocks" - }, - listEmails: { - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - } - }, - url: "/user/emails" - }, - listFollowersForAuthenticatedUser: { - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - } - }, - url: "/user/followers" - }, - listFollowersForUser: { - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - username: { - required: true, - type: "string" - } - }, - url: "/users/:username/followers" - }, - listFollowingForAuthenticatedUser: { - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - } - }, - url: "/user/following" - }, - listFollowingForUser: { - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - username: { - required: true, - type: "string" - } - }, - url: "/users/:username/following" - }, - listGpgKeys: { - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - } - }, - url: "/user/gpg_keys" - }, - listGpgKeysForUser: { - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - username: { - required: true, - type: "string" - } - }, - url: "/users/:username/gpg_keys" - }, - listPublicEmails: { - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - } - }, - url: "/user/public_emails" - }, - listPublicKeys: { - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - } - }, - url: "/user/keys" - }, - listPublicKeysForUser: { - method: "GET", - params: { - page: { - type: "integer" - }, - per_page: { - type: "integer" - }, - username: { - required: true, - type: "string" - } - }, - url: "/users/:username/keys" - }, - togglePrimaryEmailVisibility: { - method: "PATCH", - params: { - email: { - required: true, - type: "string" - }, - visibility: { - required: true, - type: "string" - } - }, - url: "/user/email/visibility" - }, - unblock: { - method: "DELETE", - params: { - username: { - required: true, - type: "string" - } - }, - url: "/user/blocks/:username" - }, - unfollow: { - method: "DELETE", - params: { - username: { - required: true, - type: "string" - } - }, - url: "/user/following/:username" - }, - updateAuthenticated: { - method: "PATCH", - params: { - bio: { - type: "string" - }, - blog: { - type: "string" - }, - company: { - type: "string" - }, - email: { - type: "string" - }, - hireable: { - type: "boolean" - }, - location: { - type: "string" - }, - name: { - type: "string" - } - }, - url: "/user" - } - } -}; - -const VERSION = "2.4.0"; - -function registerEndpoints(octokit, routes) { - Object.keys(routes).forEach(namespaceName => { - if (!octokit[namespaceName]) { - octokit[namespaceName] = {}; - } - - Object.keys(routes[namespaceName]).forEach(apiName => { - const apiOptions = routes[namespaceName][apiName]; - const endpointDefaults = ["method", "url", "headers"].reduce((map, key) => { - if (typeof apiOptions[key] !== "undefined") { - map[key] = apiOptions[key]; - } - - return map; - }, {}); - endpointDefaults.request = { - validate: apiOptions.params - }; - let request = octokit.request.defaults(endpointDefaults); // patch request & endpoint methods to support deprecated parameters. - // Not the most elegant solution, but we don’t want to move deprecation - // logic into octokit/endpoint.js as it’s out of scope - - const hasDeprecatedParam = Object.keys(apiOptions.params || {}).find(key => apiOptions.params[key].deprecated); - - if (hasDeprecatedParam) { - const patch = patchForDeprecation.bind(null, octokit, apiOptions); - request = patch(octokit.request.defaults(endpointDefaults), `.${namespaceName}.${apiName}()`); - request.endpoint = patch(request.endpoint, `.${namespaceName}.${apiName}.endpoint()`); - request.endpoint.merge = patch(request.endpoint.merge, `.${namespaceName}.${apiName}.endpoint.merge()`); - } - - if (apiOptions.deprecated) { - octokit[namespaceName][apiName] = Object.assign(function deprecatedEndpointMethod() { - octokit.log.warn(new deprecation.Deprecation(`[@octokit/rest] ${apiOptions.deprecated}`)); - octokit[namespaceName][apiName] = request; - return request.apply(null, arguments); - }, request); - return; - } - - octokit[namespaceName][apiName] = request; - }); - }); -} - -function patchForDeprecation(octokit, apiOptions, method, methodName) { - const patchedMethod = options => { - options = Object.assign({}, options); - Object.keys(options).forEach(key => { - if (apiOptions.params[key] && apiOptions.params[key].deprecated) { - const aliasKey = apiOptions.params[key].alias; - octokit.log.warn(new deprecation.Deprecation(`[@octokit/rest] "${key}" parameter is deprecated for "${methodName}". Use "${aliasKey}" instead`)); - - if (!(aliasKey in options)) { - options[aliasKey] = options[key]; - } - - delete options[key]; - } - }); - return method(options); - }; - - Object.keys(method).forEach(key => { - patchedMethod[key] = method[key]; - }); - return patchedMethod; -} - -/** - * This plugin is a 1:1 copy of internal @octokit/rest plugins. The primary - * goal is to rebuild @octokit/rest on top of @octokit/core. Once that is - * done, we will remove the registerEndpoints methods and return the methods - * directly as with the other plugins. At that point we will also remove the - * legacy workarounds and deprecations. - * - * See the plan at - * https://github.com/octokit/plugin-rest-endpoint-methods.js/pull/1 - */ - -function restEndpointMethods(octokit) { - // @ts-ignore - octokit.registerEndpoints = registerEndpoints.bind(null, octokit); - registerEndpoints(octokit, endpointsByScope); // Aliasing scopes for backward compatibility - // See https://github.com/octokit/rest.js/pull/1134 - - [["gitdata", "git"], ["authorization", "oauthAuthorizations"], ["pullRequests", "pulls"]].forEach(([deprecatedScope, scope]) => { - Object.defineProperty(octokit, deprecatedScope, { - get() { - octokit.log.warn( // @ts-ignore - new deprecation.Deprecation(`[@octokit/plugin-rest-endpoint-methods] "octokit.${deprecatedScope}.*" methods are deprecated, use "octokit.${scope}.*" instead`)); // @ts-ignore - - return octokit[scope]; - } - - }); - }); - return {}; -} -restEndpointMethods.VERSION = VERSION; - -exports.restEndpointMethods = restEndpointMethods; -//# sourceMappingURL=index.js.map - - -/***/ }), - -/***/ 850: -/***/ (function(module, __unusedexports, __webpack_require__) { - -module.exports = paginationMethodsPlugin - -function paginationMethodsPlugin (octokit) { - octokit.getFirstPage = __webpack_require__(777).bind(null, octokit) - octokit.getLastPage = __webpack_require__(649).bind(null, octokit) - octokit.getNextPage = __webpack_require__(550).bind(null, octokit) - octokit.getPreviousPage = __webpack_require__(563).bind(null, octokit) - octokit.hasFirstPage = __webpack_require__(536) - octokit.hasLastPage = __webpack_require__(336) - octokit.hasNextPage = __webpack_require__(929) - octokit.hasPreviousPage = __webpack_require__(558) -} - - -/***/ }), - -/***/ 854: -/***/ (function(module) { - -/** - * lodash (Custom Build) - * Build: `lodash modularize exports="npm" -o ./` - * Copyright jQuery Foundation and other contributors - * Released under MIT license - * Based on Underscore.js 1.8.3 - * Copyright Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors - */ - -/** Used as the `TypeError` message for "Functions" methods. */ -var FUNC_ERROR_TEXT = 'Expected a function'; - -/** Used to stand-in for `undefined` hash values. */ -var HASH_UNDEFINED = '__lodash_hash_undefined__'; - -/** Used as references for various `Number` constants. */ -var INFINITY = 1 / 0; - -/** `Object#toString` result references. */ -var funcTag = '[object Function]', - genTag = '[object GeneratorFunction]', - symbolTag = '[object Symbol]'; - -/** Used to match property names within property paths. */ -var reIsDeepProp = /\.|\[(?:[^[\]]*|(["'])(?:(?!\1)[^\\]|\\.)*?\1)\]/, - reIsPlainProp = /^\w*$/, - reLeadingDot = /^\./, - rePropName = /[^.[\]]+|\[(?:(-?\d+(?:\.\d+)?)|(["'])((?:(?!\2)[^\\]|\\.)*?)\2)\]|(?=(?:\.|\[\])(?:\.|\[\]|$))/g; - -/** - * Used to match `RegExp` - * [syntax characters](http://ecma-international.org/ecma-262/7.0/#sec-patterns). - */ -var reRegExpChar = /[\\^$.*+?()[\]{}|]/g; - -/** Used to match backslashes in property paths. */ -var reEscapeChar = /\\(\\)?/g; - -/** Used to detect host constructors (Safari). */ -var reIsHostCtor = /^\[object .+?Constructor\]$/; - -/** Detect free variable `global` from Node.js. */ -var freeGlobal = typeof global == 'object' && global && global.Object === Object && global; - -/** Detect free variable `self`. */ -var freeSelf = typeof self == 'object' && self && self.Object === Object && self; - -/** Used as a reference to the global object. */ -var root = freeGlobal || freeSelf || Function('return this')(); - -/** - * Gets the value at `key` of `object`. - * - * @private - * @param {Object} [object] The object to query. - * @param {string} key The key of the property to get. - * @returns {*} Returns the property value. - */ -function getValue(object, key) { - return object == null ? undefined : object[key]; -} - -/** - * Checks if `value` is a host object in IE < 9. - * - * @private - * @param {*} value The value to check. - * @returns {boolean} Returns `true` if `value` is a host object, else `false`. - */ -function isHostObject(value) { - // Many host objects are `Object` objects that can coerce to strings - // despite having improperly defined `toString` methods. - var result = false; - if (value != null && typeof value.toString != 'function') { - try { - result = !!(value + ''); - } catch (e) {} - } - return result; -} - -/** Used for built-in method references. */ -var arrayProto = Array.prototype, - funcProto = Function.prototype, - objectProto = Object.prototype; - -/** Used to detect overreaching core-js shims. */ -var coreJsData = root['__core-js_shared__']; - -/** Used to detect methods masquerading as native. */ -var maskSrcKey = (function() { - var uid = /[^.]+$/.exec(coreJsData && coreJsData.keys && coreJsData.keys.IE_PROTO || ''); - return uid ? ('Symbol(src)_1.' + uid) : ''; -}()); - -/** Used to resolve the decompiled source of functions. */ -var funcToString = funcProto.toString; - -/** Used to check objects for own properties. */ -var hasOwnProperty = objectProto.hasOwnProperty; - -/** - * Used to resolve the - * [`toStringTag`](http://ecma-international.org/ecma-262/7.0/#sec-object.prototype.tostring) - * of values. - */ -var objectToString = objectProto.toString; - -/** Used to detect if a method is native. */ -var reIsNative = RegExp('^' + - funcToString.call(hasOwnProperty).replace(reRegExpChar, '\\$&') - .replace(/hasOwnProperty|(function).*?(?=\\\()| for .+?(?=\\\])/g, '$1.*?') + '$' -); - -/** Built-in value references. */ -var Symbol = root.Symbol, - splice = arrayProto.splice; - -/* Built-in method references that are verified to be native. */ -var Map = getNative(root, 'Map'), - nativeCreate = getNative(Object, 'create'); - -/** Used to convert symbols to primitives and strings. */ -var symbolProto = Symbol ? Symbol.prototype : undefined, - symbolToString = symbolProto ? symbolProto.toString : undefined; - -/** - * Creates a hash object. - * - * @private - * @constructor - * @param {Array} [entries] The key-value pairs to cache. - */ -function Hash(entries) { - var index = -1, - length = entries ? entries.length : 0; - - this.clear(); - while (++index < length) { - var entry = entries[index]; - this.set(entry[0], entry[1]); - } -} - -/** - * Removes all key-value entries from the hash. - * - * @private - * @name clear - * @memberOf Hash - */ -function hashClear() { - this.__data__ = nativeCreate ? nativeCreate(null) : {}; -} - -/** - * Removes `key` and its value from the hash. - * - * @private - * @name delete - * @memberOf Hash - * @param {Object} hash The hash to modify. - * @param {string} key The key of the value to remove. - * @returns {boolean} Returns `true` if the entry was removed, else `false`. - */ -function hashDelete(key) { - return this.has(key) && delete this.__data__[key]; -} - -/** - * Gets the hash value for `key`. - * - * @private - * @name get - * @memberOf Hash - * @param {string} key The key of the value to get. - * @returns {*} Returns the entry value. - */ -function hashGet(key) { - var data = this.__data__; - if (nativeCreate) { - var result = data[key]; - return result === HASH_UNDEFINED ? undefined : result; - } - return hasOwnProperty.call(data, key) ? data[key] : undefined; -} - -/** - * Checks if a hash value for `key` exists. - * - * @private - * @name has - * @memberOf Hash - * @param {string} key The key of the entry to check. - * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`. - */ -function hashHas(key) { - var data = this.__data__; - return nativeCreate ? data[key] !== undefined : hasOwnProperty.call(data, key); -} - -/** - * Sets the hash `key` to `value`. - * - * @private - * @name set - * @memberOf Hash - * @param {string} key The key of the value to set. - * @param {*} value The value to set. - * @returns {Object} Returns the hash instance. - */ -function hashSet(key, value) { - var data = this.__data__; - data[key] = (nativeCreate && value === undefined) ? HASH_UNDEFINED : value; - return this; -} - -// Add methods to `Hash`. -Hash.prototype.clear = hashClear; -Hash.prototype['delete'] = hashDelete; -Hash.prototype.get = hashGet; -Hash.prototype.has = hashHas; -Hash.prototype.set = hashSet; - -/** - * Creates an list cache object. - * - * @private - * @constructor - * @param {Array} [entries] The key-value pairs to cache. - */ -function ListCache(entries) { - var index = -1, - length = entries ? entries.length : 0; - - this.clear(); - while (++index < length) { - var entry = entries[index]; - this.set(entry[0], entry[1]); - } -} - -/** - * Removes all key-value entries from the list cache. - * - * @private - * @name clear - * @memberOf ListCache - */ -function listCacheClear() { - this.__data__ = []; -} - -/** - * Removes `key` and its value from the list cache. - * - * @private - * @name delete - * @memberOf ListCache - * @param {string} key The key of the value to remove. - * @returns {boolean} Returns `true` if the entry was removed, else `false`. - */ -function listCacheDelete(key) { - var data = this.__data__, - index = assocIndexOf(data, key); - - if (index < 0) { - return false; - } - var lastIndex = data.length - 1; - if (index == lastIndex) { - data.pop(); - } else { - splice.call(data, index, 1); - } - return true; -} - -/** - * Gets the list cache value for `key`. - * - * @private - * @name get - * @memberOf ListCache - * @param {string} key The key of the value to get. - * @returns {*} Returns the entry value. - */ -function listCacheGet(key) { - var data = this.__data__, - index = assocIndexOf(data, key); - - return index < 0 ? undefined : data[index][1]; -} - -/** - * Checks if a list cache value for `key` exists. - * - * @private - * @name has - * @memberOf ListCache - * @param {string} key The key of the entry to check. - * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`. - */ -function listCacheHas(key) { - return assocIndexOf(this.__data__, key) > -1; -} - -/** - * Sets the list cache `key` to `value`. - * - * @private - * @name set - * @memberOf ListCache - * @param {string} key The key of the value to set. - * @param {*} value The value to set. - * @returns {Object} Returns the list cache instance. - */ -function listCacheSet(key, value) { - var data = this.__data__, - index = assocIndexOf(data, key); - - if (index < 0) { - data.push([key, value]); - } else { - data[index][1] = value; - } - return this; -} - -// Add methods to `ListCache`. -ListCache.prototype.clear = listCacheClear; -ListCache.prototype['delete'] = listCacheDelete; -ListCache.prototype.get = listCacheGet; -ListCache.prototype.has = listCacheHas; -ListCache.prototype.set = listCacheSet; - -/** - * Creates a map cache object to store key-value pairs. - * - * @private - * @constructor - * @param {Array} [entries] The key-value pairs to cache. - */ -function MapCache(entries) { - var index = -1, - length = entries ? entries.length : 0; - - this.clear(); - while (++index < length) { - var entry = entries[index]; - this.set(entry[0], entry[1]); - } -} - -/** - * Removes all key-value entries from the map. - * - * @private - * @name clear - * @memberOf MapCache - */ -function mapCacheClear() { - this.__data__ = { - 'hash': new Hash, - 'map': new (Map || ListCache), - 'string': new Hash - }; -} - -/** - * Removes `key` and its value from the map. - * - * @private - * @name delete - * @memberOf MapCache - * @param {string} key The key of the value to remove. - * @returns {boolean} Returns `true` if the entry was removed, else `false`. - */ -function mapCacheDelete(key) { - return getMapData(this, key)['delete'](key); -} - -/** - * Gets the map value for `key`. - * - * @private - * @name get - * @memberOf MapCache - * @param {string} key The key of the value to get. - * @returns {*} Returns the entry value. - */ -function mapCacheGet(key) { - return getMapData(this, key).get(key); -} - -/** - * Checks if a map value for `key` exists. - * - * @private - * @name has - * @memberOf MapCache - * @param {string} key The key of the entry to check. - * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`. - */ -function mapCacheHas(key) { - return getMapData(this, key).has(key); -} - -/** - * Sets the map `key` to `value`. - * - * @private - * @name set - * @memberOf MapCache - * @param {string} key The key of the value to set. - * @param {*} value The value to set. - * @returns {Object} Returns the map cache instance. - */ -function mapCacheSet(key, value) { - getMapData(this, key).set(key, value); - return this; -} - -// Add methods to `MapCache`. -MapCache.prototype.clear = mapCacheClear; -MapCache.prototype['delete'] = mapCacheDelete; -MapCache.prototype.get = mapCacheGet; -MapCache.prototype.has = mapCacheHas; -MapCache.prototype.set = mapCacheSet; - -/** - * Gets the index at which the `key` is found in `array` of key-value pairs. - * - * @private - * @param {Array} array The array to inspect. - * @param {*} key The key to search for. - * @returns {number} Returns the index of the matched value, else `-1`. - */ -function assocIndexOf(array, key) { - var length = array.length; - while (length--) { - if (eq(array[length][0], key)) { - return length; - } - } - return -1; -} - -/** - * The base implementation of `_.get` without support for default values. - * - * @private - * @param {Object} object The object to query. - * @param {Array|string} path The path of the property to get. - * @returns {*} Returns the resolved value. - */ -function baseGet(object, path) { - path = isKey(path, object) ? [path] : castPath(path); - - var index = 0, - length = path.length; - - while (object != null && index < length) { - object = object[toKey(path[index++])]; - } - return (index && index == length) ? object : undefined; -} - -/** - * The base implementation of `_.isNative` without bad shim checks. - * - * @private - * @param {*} value The value to check. - * @returns {boolean} Returns `true` if `value` is a native function, - * else `false`. - */ -function baseIsNative(value) { - if (!isObject(value) || isMasked(value)) { - return false; - } - var pattern = (isFunction(value) || isHostObject(value)) ? reIsNative : reIsHostCtor; - return pattern.test(toSource(value)); -} - -/** - * The base implementation of `_.toString` which doesn't convert nullish - * values to empty strings. - * - * @private - * @param {*} value The value to process. - * @returns {string} Returns the string. - */ -function baseToString(value) { - // Exit early for strings to avoid a performance hit in some environments. - if (typeof value == 'string') { - return value; - } - if (isSymbol(value)) { - return symbolToString ? symbolToString.call(value) : ''; - } - var result = (value + ''); - return (result == '0' && (1 / value) == -INFINITY) ? '-0' : result; -} - -/** - * Casts `value` to a path array if it's not one. - * - * @private - * @param {*} value The value to inspect. - * @returns {Array} Returns the cast property path array. - */ -function castPath(value) { - return isArray(value) ? value : stringToPath(value); -} - -/** - * Gets the data for `map`. - * - * @private - * @param {Object} map The map to query. - * @param {string} key The reference key. - * @returns {*} Returns the map data. - */ -function getMapData(map, key) { - var data = map.__data__; - return isKeyable(key) - ? data[typeof key == 'string' ? 'string' : 'hash'] - : data.map; -} - -/** - * Gets the native function at `key` of `object`. - * - * @private - * @param {Object} object The object to query. - * @param {string} key The key of the method to get. - * @returns {*} Returns the function if it's native, else `undefined`. - */ -function getNative(object, key) { - var value = getValue(object, key); - return baseIsNative(value) ? value : undefined; -} - -/** - * Checks if `value` is a property name and not a property path. - * - * @private - * @param {*} value The value to check. - * @param {Object} [object] The object to query keys on. - * @returns {boolean} Returns `true` if `value` is a property name, else `false`. - */ -function isKey(value, object) { - if (isArray(value)) { - return false; - } - var type = typeof value; - if (type == 'number' || type == 'symbol' || type == 'boolean' || - value == null || isSymbol(value)) { - return true; - } - return reIsPlainProp.test(value) || !reIsDeepProp.test(value) || - (object != null && value in Object(object)); -} - -/** - * Checks if `value` is suitable for use as unique object key. - * - * @private - * @param {*} value The value to check. - * @returns {boolean} Returns `true` if `value` is suitable, else `false`. - */ -function isKeyable(value) { - var type = typeof value; - return (type == 'string' || type == 'number' || type == 'symbol' || type == 'boolean') - ? (value !== '__proto__') - : (value === null); -} - -/** - * Checks if `func` has its source masked. - * - * @private - * @param {Function} func The function to check. - * @returns {boolean} Returns `true` if `func` is masked, else `false`. - */ -function isMasked(func) { - return !!maskSrcKey && (maskSrcKey in func); -} - -/** - * Converts `string` to a property path array. - * - * @private - * @param {string} string The string to convert. - * @returns {Array} Returns the property path array. - */ -var stringToPath = memoize(function(string) { - string = toString(string); - - var result = []; - if (reLeadingDot.test(string)) { - result.push(''); - } - string.replace(rePropName, function(match, number, quote, string) { - result.push(quote ? string.replace(reEscapeChar, '$1') : (number || match)); - }); - return result; -}); - -/** - * Converts `value` to a string key if it's not a string or symbol. - * - * @private - * @param {*} value The value to inspect. - * @returns {string|symbol} Returns the key. - */ -function toKey(value) { - if (typeof value == 'string' || isSymbol(value)) { - return value; - } - var result = (value + ''); - return (result == '0' && (1 / value) == -INFINITY) ? '-0' : result; -} - -/** - * Converts `func` to its source code. - * - * @private - * @param {Function} func The function to process. - * @returns {string} Returns the source code. - */ -function toSource(func) { - if (func != null) { - try { - return funcToString.call(func); - } catch (e) {} - try { - return (func + ''); - } catch (e) {} - } - return ''; -} - -/** - * Creates a function that memoizes the result of `func`. If `resolver` is - * provided, it determines the cache key for storing the result based on the - * arguments provided to the memoized function. By default, the first argument - * provided to the memoized function is used as the map cache key. The `func` - * is invoked with the `this` binding of the memoized function. - * - * **Note:** The cache is exposed as the `cache` property on the memoized - * function. Its creation may be customized by replacing the `_.memoize.Cache` - * constructor with one whose instances implement the - * [`Map`](http://ecma-international.org/ecma-262/7.0/#sec-properties-of-the-map-prototype-object) - * method interface of `delete`, `get`, `has`, and `set`. - * - * @static - * @memberOf _ - * @since 0.1.0 - * @category Function - * @param {Function} func The function to have its output memoized. - * @param {Function} [resolver] The function to resolve the cache key. - * @returns {Function} Returns the new memoized function. - * @example - * - * var object = { 'a': 1, 'b': 2 }; - * var other = { 'c': 3, 'd': 4 }; - * - * var values = _.memoize(_.values); - * values(object); - * // => [1, 2] - * - * values(other); - * // => [3, 4] - * - * object.a = 2; - * values(object); - * // => [1, 2] - * - * // Modify the result cache. - * values.cache.set(object, ['a', 'b']); - * values(object); - * // => ['a', 'b'] - * - * // Replace `_.memoize.Cache`. - * _.memoize.Cache = WeakMap; - */ -function memoize(func, resolver) { - if (typeof func != 'function' || (resolver && typeof resolver != 'function')) { - throw new TypeError(FUNC_ERROR_TEXT); - } - var memoized = function() { - var args = arguments, - key = resolver ? resolver.apply(this, args) : args[0], - cache = memoized.cache; - - if (cache.has(key)) { - return cache.get(key); - } - var result = func.apply(this, args); - memoized.cache = cache.set(key, result); - return result; - }; - memoized.cache = new (memoize.Cache || MapCache); - return memoized; -} - -// Assign cache to `_.memoize`. -memoize.Cache = MapCache; - -/** - * Performs a - * [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero) - * comparison between two values to determine if they are equivalent. - * - * @static - * @memberOf _ - * @since 4.0.0 - * @category Lang - * @param {*} value The value to compare. - * @param {*} other The other value to compare. - * @returns {boolean} Returns `true` if the values are equivalent, else `false`. - * @example - * - * var object = { 'a': 1 }; - * var other = { 'a': 1 }; - * - * _.eq(object, object); - * // => true - * - * _.eq(object, other); - * // => false - * - * _.eq('a', 'a'); - * // => true - * - * _.eq('a', Object('a')); - * // => false - * - * _.eq(NaN, NaN); - * // => true - */ -function eq(value, other) { - return value === other || (value !== value && other !== other); -} - -/** - * Checks if `value` is classified as an `Array` object. - * - * @static - * @memberOf _ - * @since 0.1.0 - * @category Lang - * @param {*} value The value to check. - * @returns {boolean} Returns `true` if `value` is an array, else `false`. - * @example - * - * _.isArray([1, 2, 3]); - * // => true - * - * _.isArray(document.body.children); - * // => false - * - * _.isArray('abc'); - * // => false - * - * _.isArray(_.noop); - * // => false - */ -var isArray = Array.isArray; - -/** - * Checks if `value` is classified as a `Function` object. - * - * @static - * @memberOf _ - * @since 0.1.0 - * @category Lang - * @param {*} value The value to check. - * @returns {boolean} Returns `true` if `value` is a function, else `false`. - * @example - * - * _.isFunction(_); - * // => true - * - * _.isFunction(/abc/); - * // => false - */ -function isFunction(value) { - // The use of `Object#toString` avoids issues with the `typeof` operator - // in Safari 8-9 which returns 'object' for typed array and other constructors. - var tag = isObject(value) ? objectToString.call(value) : ''; - return tag == funcTag || tag == genTag; -} - -/** - * Checks if `value` is the - * [language type](http://www.ecma-international.org/ecma-262/7.0/#sec-ecmascript-language-types) - * of `Object`. (e.g. arrays, functions, objects, regexes, `new Number(0)`, and `new String('')`) - * - * @static - * @memberOf _ - * @since 0.1.0 - * @category Lang - * @param {*} value The value to check. - * @returns {boolean} Returns `true` if `value` is an object, else `false`. - * @example - * - * _.isObject({}); - * // => true - * - * _.isObject([1, 2, 3]); - * // => true - * - * _.isObject(_.noop); - * // => true - * - * _.isObject(null); - * // => false - */ -function isObject(value) { - var type = typeof value; - return !!value && (type == 'object' || type == 'function'); -} - -/** - * Checks if `value` is object-like. A value is object-like if it's not `null` - * and has a `typeof` result of "object". - * - * @static - * @memberOf _ - * @since 4.0.0 - * @category Lang - * @param {*} value The value to check. - * @returns {boolean} Returns `true` if `value` is object-like, else `false`. - * @example - * - * _.isObjectLike({}); - * // => true - * - * _.isObjectLike([1, 2, 3]); - * // => true - * - * _.isObjectLike(_.noop); - * // => false - * - * _.isObjectLike(null); - * // => false - */ -function isObjectLike(value) { - return !!value && typeof value == 'object'; -} - -/** - * Checks if `value` is classified as a `Symbol` primitive or object. - * - * @static - * @memberOf _ - * @since 4.0.0 - * @category Lang - * @param {*} value The value to check. - * @returns {boolean} Returns `true` if `value` is a symbol, else `false`. - * @example - * - * _.isSymbol(Symbol.iterator); - * // => true - * - * _.isSymbol('abc'); - * // => false - */ -function isSymbol(value) { - return typeof value == 'symbol' || - (isObjectLike(value) && objectToString.call(value) == symbolTag); -} - -/** - * Converts `value` to a string. An empty string is returned for `null` - * and `undefined` values. The sign of `-0` is preserved. - * - * @static - * @memberOf _ - * @since 4.0.0 - * @category Lang - * @param {*} value The value to process. - * @returns {string} Returns the string. - * @example - * - * _.toString(null); - * // => '' - * - * _.toString(-0); - * // => '-0' - * - * _.toString([1, 2, 3]); - * // => '1,2,3' - */ -function toString(value) { - return value == null ? '' : baseToString(value); -} - -/** - * Gets the value at `path` of `object`. If the resolved value is - * `undefined`, the `defaultValue` is returned in its place. - * - * @static - * @memberOf _ - * @since 3.7.0 - * @category Object - * @param {Object} object The object to query. - * @param {Array|string} path The path of the property to get. - * @param {*} [defaultValue] The value returned for `undefined` resolved values. - * @returns {*} Returns the resolved value. - * @example - * - * var object = { 'a': [{ 'b': { 'c': 3 } }] }; - * - * _.get(object, 'a[0].b.c'); - * // => 3 - * - * _.get(object, ['a', '0', 'b', 'c']); - * // => 3 - * - * _.get(object, 'a.b.c', 'default'); - * // => 'default' - */ -function get(object, path, defaultValue) { - var result = object == null ? undefined : baseGet(object, path); - return result === undefined ? defaultValue : result; -} - -module.exports = get; - - -/***/ }), - -/***/ 855: -/***/ (function(module, __unusedexports, __webpack_require__) { - -module.exports = registerPlugin; - -const factory = __webpack_require__(47); - -function registerPlugin(plugins, pluginFunction) { - return factory( - plugins.includes(pluginFunction) ? plugins : plugins.concat(pluginFunction) - ); -} - - -/***/ }), - -/***/ 856: -/***/ (function(module, __unusedexports, __webpack_require__) { - -module.exports = __webpack_require__(141); - - -/***/ }), - -/***/ 863: -/***/ (function(module, __unusedexports, __webpack_require__) { - -module.exports = authenticationBeforeRequest; - -const btoa = __webpack_require__(675); - -const withAuthorizationPrefix = __webpack_require__(143); - -function authenticationBeforeRequest(state, options) { - if (typeof state.auth === "string") { - options.headers.authorization = withAuthorizationPrefix(state.auth); - return; - } - - if (state.auth.username) { - const hash = btoa(`${state.auth.username}:${state.auth.password}`); - options.headers.authorization = `Basic ${hash}`; - if (state.otp) { - options.headers["x-github-otp"] = state.otp; - } - return; - } - - if (state.auth.clientId) { - // There is a special case for OAuth applications, when `clientId` and `clientSecret` is passed as - // Basic Authorization instead of query parameters. The only routes where that applies share the same - // URL though: `/applications/:client_id/tokens/:access_token`. - // - // 1. [Check an authorization](https://developer.github.com/v3/oauth_authorizations/#check-an-authorization) - // 2. [Reset an authorization](https://developer.github.com/v3/oauth_authorizations/#reset-an-authorization) - // 3. [Revoke an authorization for an application](https://developer.github.com/v3/oauth_authorizations/#revoke-an-authorization-for-an-application) - // - // We identify by checking the URL. It must merge both "/applications/:client_id/tokens/:access_token" - // as well as "/applications/123/tokens/token456" - if (/\/applications\/:?[\w_]+\/tokens\/:?[\w_]+($|\?)/.test(options.url)) { - const hash = btoa(`${state.auth.clientId}:${state.auth.clientSecret}`); - options.headers.authorization = `Basic ${hash}`; - return; - } - - options.url += options.url.indexOf("?") === -1 ? "?" : "&"; - options.url += `client_id=${state.auth.clientId}&client_secret=${state.auth.clientSecret}`; - return; - } - - return Promise.resolve() - - .then(() => { - return state.auth(); - }) - - .then(authorization => { - options.headers.authorization = withAuthorizationPrefix(authorization); - }); -} - - -/***/ }), - -/***/ 866: -/***/ (function(module, __unusedexports, __webpack_require__) { - -"use strict"; - -var shebangRegex = __webpack_require__(816); - -module.exports = function (str) { - var match = str.match(shebangRegex); - - if (!match) { - return null; - } - - var arr = match[0].replace(/#! ?/, '').split(' '); - var bin = arr[0].split('/').pop(); - var arg = arr[1]; - - return (bin === 'env' ? - arg : - bin + (arg ? ' ' + arg : '') - ); -}; - - -/***/ }), - -/***/ 881: -/***/ (function(module) { - -"use strict"; - - -const isWin = process.platform === 'win32'; - -function notFoundError(original, syscall) { - return Object.assign(new Error(`${syscall} ${original.command} ENOENT`), { - code: 'ENOENT', - errno: 'ENOENT', - syscall: `${syscall} ${original.command}`, - path: original.command, - spawnargs: original.args, - }); -} - -function hookChildProcess(cp, parsed) { - if (!isWin) { - return; - } - - const originalEmit = cp.emit; - - cp.emit = function (name, arg1) { - // If emitting "exit" event and exit code is 1, we need to check if - // the command exists and emit an "error" instead - // See https://github.com/IndigoUnited/node-cross-spawn/issues/16 - if (name === 'exit') { - const err = verifyENOENT(arg1, parsed, 'spawn'); - - if (err) { - return originalEmit.call(cp, 'error', err); - } - } - - return originalEmit.apply(cp, arguments); // eslint-disable-line prefer-rest-params - }; -} - -function verifyENOENT(status, parsed) { - if (isWin && status === 1 && !parsed.file) { - return notFoundError(parsed.original, 'spawn'); - } - - return null; -} - -function verifyENOENTSync(status, parsed) { - if (isWin && status === 1 && !parsed.file) { - return notFoundError(parsed.original, 'spawnSync'); - } - - return null; -} - -module.exports = { - hookChildProcess, - verifyENOENT, - verifyENOENTSync, - notFoundError, -}; - - -/***/ }), - -/***/ 883: -/***/ (function(module) { - -/** - * lodash (Custom Build) - * Build: `lodash modularize exports="npm" -o ./` - * Copyright jQuery Foundation and other contributors - * Released under MIT license - * Based on Underscore.js 1.8.3 - * Copyright Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors - */ - -/** Used as the `TypeError` message for "Functions" methods. */ -var FUNC_ERROR_TEXT = 'Expected a function'; - -/** Used to stand-in for `undefined` hash values. */ -var HASH_UNDEFINED = '__lodash_hash_undefined__'; - -/** Used as references for various `Number` constants. */ -var INFINITY = 1 / 0, - MAX_SAFE_INTEGER = 9007199254740991; - -/** `Object#toString` result references. */ -var funcTag = '[object Function]', - genTag = '[object GeneratorFunction]', - symbolTag = '[object Symbol]'; - -/** Used to match property names within property paths. */ -var reIsDeepProp = /\.|\[(?:[^[\]]*|(["'])(?:(?!\1)[^\\]|\\.)*?\1)\]/, - reIsPlainProp = /^\w*$/, - reLeadingDot = /^\./, - rePropName = /[^.[\]]+|\[(?:(-?\d+(?:\.\d+)?)|(["'])((?:(?!\2)[^\\]|\\.)*?)\2)\]|(?=(?:\.|\[\])(?:\.|\[\]|$))/g; - -/** - * Used to match `RegExp` - * [syntax characters](http://ecma-international.org/ecma-262/7.0/#sec-patterns). - */ -var reRegExpChar = /[\\^$.*+?()[\]{}|]/g; - -/** Used to match backslashes in property paths. */ -var reEscapeChar = /\\(\\)?/g; - -/** Used to detect host constructors (Safari). */ -var reIsHostCtor = /^\[object .+?Constructor\]$/; - -/** Used to detect unsigned integer values. */ -var reIsUint = /^(?:0|[1-9]\d*)$/; - -/** Detect free variable `global` from Node.js. */ -var freeGlobal = typeof global == 'object' && global && global.Object === Object && global; - -/** Detect free variable `self`. */ -var freeSelf = typeof self == 'object' && self && self.Object === Object && self; - -/** Used as a reference to the global object. */ -var root = freeGlobal || freeSelf || Function('return this')(); - -/** - * Gets the value at `key` of `object`. - * - * @private - * @param {Object} [object] The object to query. - * @param {string} key The key of the property to get. - * @returns {*} Returns the property value. - */ -function getValue(object, key) { - return object == null ? undefined : object[key]; -} - -/** - * Checks if `value` is a host object in IE < 9. - * - * @private - * @param {*} value The value to check. - * @returns {boolean} Returns `true` if `value` is a host object, else `false`. - */ -function isHostObject(value) { - // Many host objects are `Object` objects that can coerce to strings - // despite having improperly defined `toString` methods. - var result = false; - if (value != null && typeof value.toString != 'function') { - try { - result = !!(value + ''); - } catch (e) {} - } - return result; -} - -/** Used for built-in method references. */ -var arrayProto = Array.prototype, - funcProto = Function.prototype, - objectProto = Object.prototype; - -/** Used to detect overreaching core-js shims. */ -var coreJsData = root['__core-js_shared__']; - -/** Used to detect methods masquerading as native. */ -var maskSrcKey = (function() { - var uid = /[^.]+$/.exec(coreJsData && coreJsData.keys && coreJsData.keys.IE_PROTO || ''); - return uid ? ('Symbol(src)_1.' + uid) : ''; -}()); - -/** Used to resolve the decompiled source of functions. */ -var funcToString = funcProto.toString; - -/** Used to check objects for own properties. */ -var hasOwnProperty = objectProto.hasOwnProperty; - -/** - * Used to resolve the - * [`toStringTag`](http://ecma-international.org/ecma-262/7.0/#sec-object.prototype.tostring) - * of values. - */ -var objectToString = objectProto.toString; - -/** Used to detect if a method is native. */ -var reIsNative = RegExp('^' + - funcToString.call(hasOwnProperty).replace(reRegExpChar, '\\$&') - .replace(/hasOwnProperty|(function).*?(?=\\\()| for .+?(?=\\\])/g, '$1.*?') + '$' -); - -/** Built-in value references. */ -var Symbol = root.Symbol, - splice = arrayProto.splice; - -/* Built-in method references that are verified to be native. */ -var Map = getNative(root, 'Map'), - nativeCreate = getNative(Object, 'create'); - -/** Used to convert symbols to primitives and strings. */ -var symbolProto = Symbol ? Symbol.prototype : undefined, - symbolToString = symbolProto ? symbolProto.toString : undefined; - -/** - * Creates a hash object. - * - * @private - * @constructor - * @param {Array} [entries] The key-value pairs to cache. - */ -function Hash(entries) { - var index = -1, - length = entries ? entries.length : 0; - - this.clear(); - while (++index < length) { - var entry = entries[index]; - this.set(entry[0], entry[1]); - } -} - -/** - * Removes all key-value entries from the hash. - * - * @private - * @name clear - * @memberOf Hash - */ -function hashClear() { - this.__data__ = nativeCreate ? nativeCreate(null) : {}; -} - -/** - * Removes `key` and its value from the hash. - * - * @private - * @name delete - * @memberOf Hash - * @param {Object} hash The hash to modify. - * @param {string} key The key of the value to remove. - * @returns {boolean} Returns `true` if the entry was removed, else `false`. - */ -function hashDelete(key) { - return this.has(key) && delete this.__data__[key]; -} - -/** - * Gets the hash value for `key`. - * - * @private - * @name get - * @memberOf Hash - * @param {string} key The key of the value to get. - * @returns {*} Returns the entry value. - */ -function hashGet(key) { - var data = this.__data__; - if (nativeCreate) { - var result = data[key]; - return result === HASH_UNDEFINED ? undefined : result; - } - return hasOwnProperty.call(data, key) ? data[key] : undefined; -} - -/** - * Checks if a hash value for `key` exists. - * - * @private - * @name has - * @memberOf Hash - * @param {string} key The key of the entry to check. - * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`. - */ -function hashHas(key) { - var data = this.__data__; - return nativeCreate ? data[key] !== undefined : hasOwnProperty.call(data, key); -} - -/** - * Sets the hash `key` to `value`. - * - * @private - * @name set - * @memberOf Hash - * @param {string} key The key of the value to set. - * @param {*} value The value to set. - * @returns {Object} Returns the hash instance. - */ -function hashSet(key, value) { - var data = this.__data__; - data[key] = (nativeCreate && value === undefined) ? HASH_UNDEFINED : value; - return this; -} - -// Add methods to `Hash`. -Hash.prototype.clear = hashClear; -Hash.prototype['delete'] = hashDelete; -Hash.prototype.get = hashGet; -Hash.prototype.has = hashHas; -Hash.prototype.set = hashSet; - -/** - * Creates an list cache object. - * - * @private - * @constructor - * @param {Array} [entries] The key-value pairs to cache. - */ -function ListCache(entries) { - var index = -1, - length = entries ? entries.length : 0; - - this.clear(); - while (++index < length) { - var entry = entries[index]; - this.set(entry[0], entry[1]); - } -} - -/** - * Removes all key-value entries from the list cache. - * - * @private - * @name clear - * @memberOf ListCache - */ -function listCacheClear() { - this.__data__ = []; -} - -/** - * Removes `key` and its value from the list cache. - * - * @private - * @name delete - * @memberOf ListCache - * @param {string} key The key of the value to remove. - * @returns {boolean} Returns `true` if the entry was removed, else `false`. - */ -function listCacheDelete(key) { - var data = this.__data__, - index = assocIndexOf(data, key); - - if (index < 0) { - return false; - } - var lastIndex = data.length - 1; - if (index == lastIndex) { - data.pop(); - } else { - splice.call(data, index, 1); - } - return true; -} - -/** - * Gets the list cache value for `key`. - * - * @private - * @name get - * @memberOf ListCache - * @param {string} key The key of the value to get. - * @returns {*} Returns the entry value. - */ -function listCacheGet(key) { - var data = this.__data__, - index = assocIndexOf(data, key); - - return index < 0 ? undefined : data[index][1]; -} - -/** - * Checks if a list cache value for `key` exists. - * - * @private - * @name has - * @memberOf ListCache - * @param {string} key The key of the entry to check. - * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`. - */ -function listCacheHas(key) { - return assocIndexOf(this.__data__, key) > -1; -} - -/** - * Sets the list cache `key` to `value`. - * - * @private - * @name set - * @memberOf ListCache - * @param {string} key The key of the value to set. - * @param {*} value The value to set. - * @returns {Object} Returns the list cache instance. - */ -function listCacheSet(key, value) { - var data = this.__data__, - index = assocIndexOf(data, key); - - if (index < 0) { - data.push([key, value]); - } else { - data[index][1] = value; - } - return this; -} - -// Add methods to `ListCache`. -ListCache.prototype.clear = listCacheClear; -ListCache.prototype['delete'] = listCacheDelete; -ListCache.prototype.get = listCacheGet; -ListCache.prototype.has = listCacheHas; -ListCache.prototype.set = listCacheSet; - -/** - * Creates a map cache object to store key-value pairs. - * - * @private - * @constructor - * @param {Array} [entries] The key-value pairs to cache. - */ -function MapCache(entries) { - var index = -1, - length = entries ? entries.length : 0; - - this.clear(); - while (++index < length) { - var entry = entries[index]; - this.set(entry[0], entry[1]); - } -} - -/** - * Removes all key-value entries from the map. - * - * @private - * @name clear - * @memberOf MapCache - */ -function mapCacheClear() { - this.__data__ = { - 'hash': new Hash, - 'map': new (Map || ListCache), - 'string': new Hash - }; -} - -/** - * Removes `key` and its value from the map. - * - * @private - * @name delete - * @memberOf MapCache - * @param {string} key The key of the value to remove. - * @returns {boolean} Returns `true` if the entry was removed, else `false`. - */ -function mapCacheDelete(key) { - return getMapData(this, key)['delete'](key); -} - -/** - * Gets the map value for `key`. - * - * @private - * @name get - * @memberOf MapCache - * @param {string} key The key of the value to get. - * @returns {*} Returns the entry value. - */ -function mapCacheGet(key) { - return getMapData(this, key).get(key); -} - -/** - * Checks if a map value for `key` exists. - * - * @private - * @name has - * @memberOf MapCache - * @param {string} key The key of the entry to check. - * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`. - */ -function mapCacheHas(key) { - return getMapData(this, key).has(key); -} - -/** - * Sets the map `key` to `value`. - * - * @private - * @name set - * @memberOf MapCache - * @param {string} key The key of the value to set. - * @param {*} value The value to set. - * @returns {Object} Returns the map cache instance. - */ -function mapCacheSet(key, value) { - getMapData(this, key).set(key, value); - return this; -} - -// Add methods to `MapCache`. -MapCache.prototype.clear = mapCacheClear; -MapCache.prototype['delete'] = mapCacheDelete; -MapCache.prototype.get = mapCacheGet; -MapCache.prototype.has = mapCacheHas; -MapCache.prototype.set = mapCacheSet; - -/** - * Assigns `value` to `key` of `object` if the existing value is not equivalent - * using [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero) - * for equality comparisons. - * - * @private - * @param {Object} object The object to modify. - * @param {string} key The key of the property to assign. - * @param {*} value The value to assign. - */ -function assignValue(object, key, value) { - var objValue = object[key]; - if (!(hasOwnProperty.call(object, key) && eq(objValue, value)) || - (value === undefined && !(key in object))) { - object[key] = value; - } -} - -/** - * Gets the index at which the `key` is found in `array` of key-value pairs. - * - * @private - * @param {Array} array The array to inspect. - * @param {*} key The key to search for. - * @returns {number} Returns the index of the matched value, else `-1`. - */ -function assocIndexOf(array, key) { - var length = array.length; - while (length--) { - if (eq(array[length][0], key)) { - return length; - } - } - return -1; -} - -/** - * The base implementation of `_.isNative` without bad shim checks. - * - * @private - * @param {*} value The value to check. - * @returns {boolean} Returns `true` if `value` is a native function, - * else `false`. - */ -function baseIsNative(value) { - if (!isObject(value) || isMasked(value)) { - return false; - } - var pattern = (isFunction(value) || isHostObject(value)) ? reIsNative : reIsHostCtor; - return pattern.test(toSource(value)); -} - -/** - * The base implementation of `_.set`. - * - * @private - * @param {Object} object The object to modify. - * @param {Array|string} path The path of the property to set. - * @param {*} value The value to set. - * @param {Function} [customizer] The function to customize path creation. - * @returns {Object} Returns `object`. - */ -function baseSet(object, path, value, customizer) { - if (!isObject(object)) { - return object; - } - path = isKey(path, object) ? [path] : castPath(path); - - var index = -1, - length = path.length, - lastIndex = length - 1, - nested = object; - - while (nested != null && ++index < length) { - var key = toKey(path[index]), - newValue = value; - - if (index != lastIndex) { - var objValue = nested[key]; - newValue = customizer ? customizer(objValue, key, nested) : undefined; - if (newValue === undefined) { - newValue = isObject(objValue) - ? objValue - : (isIndex(path[index + 1]) ? [] : {}); - } - } - assignValue(nested, key, newValue); - nested = nested[key]; - } - return object; -} - -/** - * The base implementation of `_.toString` which doesn't convert nullish - * values to empty strings. - * - * @private - * @param {*} value The value to process. - * @returns {string} Returns the string. - */ -function baseToString(value) { - // Exit early for strings to avoid a performance hit in some environments. - if (typeof value == 'string') { - return value; - } - if (isSymbol(value)) { - return symbolToString ? symbolToString.call(value) : ''; - } - var result = (value + ''); - return (result == '0' && (1 / value) == -INFINITY) ? '-0' : result; -} - -/** - * Casts `value` to a path array if it's not one. - * - * @private - * @param {*} value The value to inspect. - * @returns {Array} Returns the cast property path array. - */ -function castPath(value) { - return isArray(value) ? value : stringToPath(value); -} - -/** - * Gets the data for `map`. - * - * @private - * @param {Object} map The map to query. - * @param {string} key The reference key. - * @returns {*} Returns the map data. - */ -function getMapData(map, key) { - var data = map.__data__; - return isKeyable(key) - ? data[typeof key == 'string' ? 'string' : 'hash'] - : data.map; -} - -/** - * Gets the native function at `key` of `object`. - * - * @private - * @param {Object} object The object to query. - * @param {string} key The key of the method to get. - * @returns {*} Returns the function if it's native, else `undefined`. - */ -function getNative(object, key) { - var value = getValue(object, key); - return baseIsNative(value) ? value : undefined; -} - -/** - * Checks if `value` is a valid array-like index. - * - * @private - * @param {*} value The value to check. - * @param {number} [length=MAX_SAFE_INTEGER] The upper bounds of a valid index. - * @returns {boolean} Returns `true` if `value` is a valid index, else `false`. - */ -function isIndex(value, length) { - length = length == null ? MAX_SAFE_INTEGER : length; - return !!length && - (typeof value == 'number' || reIsUint.test(value)) && - (value > -1 && value % 1 == 0 && value < length); -} - -/** - * Checks if `value` is a property name and not a property path. - * - * @private - * @param {*} value The value to check. - * @param {Object} [object] The object to query keys on. - * @returns {boolean} Returns `true` if `value` is a property name, else `false`. - */ -function isKey(value, object) { - if (isArray(value)) { - return false; - } - var type = typeof value; - if (type == 'number' || type == 'symbol' || type == 'boolean' || - value == null || isSymbol(value)) { - return true; - } - return reIsPlainProp.test(value) || !reIsDeepProp.test(value) || - (object != null && value in Object(object)); -} - -/** - * Checks if `value` is suitable for use as unique object key. - * - * @private - * @param {*} value The value to check. - * @returns {boolean} Returns `true` if `value` is suitable, else `false`. - */ -function isKeyable(value) { - var type = typeof value; - return (type == 'string' || type == 'number' || type == 'symbol' || type == 'boolean') - ? (value !== '__proto__') - : (value === null); -} - -/** - * Checks if `func` has its source masked. - * - * @private - * @param {Function} func The function to check. - * @returns {boolean} Returns `true` if `func` is masked, else `false`. - */ -function isMasked(func) { - return !!maskSrcKey && (maskSrcKey in func); -} - -/** - * Converts `string` to a property path array. - * - * @private - * @param {string} string The string to convert. - * @returns {Array} Returns the property path array. - */ -var stringToPath = memoize(function(string) { - string = toString(string); - - var result = []; - if (reLeadingDot.test(string)) { - result.push(''); - } - string.replace(rePropName, function(match, number, quote, string) { - result.push(quote ? string.replace(reEscapeChar, '$1') : (number || match)); - }); - return result; -}); - -/** - * Converts `value` to a string key if it's not a string or symbol. - * - * @private - * @param {*} value The value to inspect. - * @returns {string|symbol} Returns the key. - */ -function toKey(value) { - if (typeof value == 'string' || isSymbol(value)) { - return value; - } - var result = (value + ''); - return (result == '0' && (1 / value) == -INFINITY) ? '-0' : result; -} - -/** - * Converts `func` to its source code. - * - * @private - * @param {Function} func The function to process. - * @returns {string} Returns the source code. - */ -function toSource(func) { - if (func != null) { - try { - return funcToString.call(func); - } catch (e) {} - try { - return (func + ''); - } catch (e) {} - } - return ''; -} - -/** - * Creates a function that memoizes the result of `func`. If `resolver` is - * provided, it determines the cache key for storing the result based on the - * arguments provided to the memoized function. By default, the first argument - * provided to the memoized function is used as the map cache key. The `func` - * is invoked with the `this` binding of the memoized function. - * - * **Note:** The cache is exposed as the `cache` property on the memoized - * function. Its creation may be customized by replacing the `_.memoize.Cache` - * constructor with one whose instances implement the - * [`Map`](http://ecma-international.org/ecma-262/7.0/#sec-properties-of-the-map-prototype-object) - * method interface of `delete`, `get`, `has`, and `set`. - * - * @static - * @memberOf _ - * @since 0.1.0 - * @category Function - * @param {Function} func The function to have its output memoized. - * @param {Function} [resolver] The function to resolve the cache key. - * @returns {Function} Returns the new memoized function. - * @example - * - * var object = { 'a': 1, 'b': 2 }; - * var other = { 'c': 3, 'd': 4 }; - * - * var values = _.memoize(_.values); - * values(object); - * // => [1, 2] - * - * values(other); - * // => [3, 4] - * - * object.a = 2; - * values(object); - * // => [1, 2] - * - * // Modify the result cache. - * values.cache.set(object, ['a', 'b']); - * values(object); - * // => ['a', 'b'] - * - * // Replace `_.memoize.Cache`. - * _.memoize.Cache = WeakMap; - */ -function memoize(func, resolver) { - if (typeof func != 'function' || (resolver && typeof resolver != 'function')) { - throw new TypeError(FUNC_ERROR_TEXT); - } - var memoized = function() { - var args = arguments, - key = resolver ? resolver.apply(this, args) : args[0], - cache = memoized.cache; - - if (cache.has(key)) { - return cache.get(key); - } - var result = func.apply(this, args); - memoized.cache = cache.set(key, result); - return result; - }; - memoized.cache = new (memoize.Cache || MapCache); - return memoized; -} - -// Assign cache to `_.memoize`. -memoize.Cache = MapCache; - -/** - * Performs a - * [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero) - * comparison between two values to determine if they are equivalent. - * - * @static - * @memberOf _ - * @since 4.0.0 - * @category Lang - * @param {*} value The value to compare. - * @param {*} other The other value to compare. - * @returns {boolean} Returns `true` if the values are equivalent, else `false`. - * @example - * - * var object = { 'a': 1 }; - * var other = { 'a': 1 }; - * - * _.eq(object, object); - * // => true - * - * _.eq(object, other); - * // => false - * - * _.eq('a', 'a'); - * // => true - * - * _.eq('a', Object('a')); - * // => false - * - * _.eq(NaN, NaN); - * // => true - */ -function eq(value, other) { - return value === other || (value !== value && other !== other); -} - -/** - * Checks if `value` is classified as an `Array` object. - * - * @static - * @memberOf _ - * @since 0.1.0 - * @category Lang - * @param {*} value The value to check. - * @returns {boolean} Returns `true` if `value` is an array, else `false`. - * @example - * - * _.isArray([1, 2, 3]); - * // => true - * - * _.isArray(document.body.children); - * // => false - * - * _.isArray('abc'); - * // => false - * - * _.isArray(_.noop); - * // => false - */ -var isArray = Array.isArray; - -/** - * Checks if `value` is classified as a `Function` object. - * - * @static - * @memberOf _ - * @since 0.1.0 - * @category Lang - * @param {*} value The value to check. - * @returns {boolean} Returns `true` if `value` is a function, else `false`. - * @example - * - * _.isFunction(_); - * // => true - * - * _.isFunction(/abc/); - * // => false - */ -function isFunction(value) { - // The use of `Object#toString` avoids issues with the `typeof` operator - // in Safari 8-9 which returns 'object' for typed array and other constructors. - var tag = isObject(value) ? objectToString.call(value) : ''; - return tag == funcTag || tag == genTag; -} - -/** - * Checks if `value` is the - * [language type](http://www.ecma-international.org/ecma-262/7.0/#sec-ecmascript-language-types) - * of `Object`. (e.g. arrays, functions, objects, regexes, `new Number(0)`, and `new String('')`) - * - * @static - * @memberOf _ - * @since 0.1.0 - * @category Lang - * @param {*} value The value to check. - * @returns {boolean} Returns `true` if `value` is an object, else `false`. - * @example - * - * _.isObject({}); - * // => true - * - * _.isObject([1, 2, 3]); - * // => true - * - * _.isObject(_.noop); - * // => true - * - * _.isObject(null); - * // => false - */ -function isObject(value) { - var type = typeof value; - return !!value && (type == 'object' || type == 'function'); -} - -/** - * Checks if `value` is object-like. A value is object-like if it's not `null` - * and has a `typeof` result of "object". - * - * @static - * @memberOf _ - * @since 4.0.0 - * @category Lang - * @param {*} value The value to check. - * @returns {boolean} Returns `true` if `value` is object-like, else `false`. - * @example - * - * _.isObjectLike({}); - * // => true - * - * _.isObjectLike([1, 2, 3]); - * // => true - * - * _.isObjectLike(_.noop); - * // => false - * - * _.isObjectLike(null); - * // => false - */ -function isObjectLike(value) { - return !!value && typeof value == 'object'; -} - -/** - * Checks if `value` is classified as a `Symbol` primitive or object. - * - * @static - * @memberOf _ - * @since 4.0.0 - * @category Lang - * @param {*} value The value to check. - * @returns {boolean} Returns `true` if `value` is a symbol, else `false`. - * @example - * - * _.isSymbol(Symbol.iterator); - * // => true - * - * _.isSymbol('abc'); - * // => false - */ -function isSymbol(value) { - return typeof value == 'symbol' || - (isObjectLike(value) && objectToString.call(value) == symbolTag); -} - -/** - * Converts `value` to a string. An empty string is returned for `null` - * and `undefined` values. The sign of `-0` is preserved. - * - * @static - * @memberOf _ - * @since 4.0.0 - * @category Lang - * @param {*} value The value to process. - * @returns {string} Returns the string. - * @example - * - * _.toString(null); - * // => '' - * - * _.toString(-0); - * // => '-0' - * - * _.toString([1, 2, 3]); - * // => '1,2,3' - */ -function toString(value) { - return value == null ? '' : baseToString(value); -} - -/** - * Sets the value at `path` of `object`. If a portion of `path` doesn't exist, - * it's created. Arrays are created for missing index properties while objects - * are created for all other missing properties. Use `_.setWith` to customize - * `path` creation. - * - * **Note:** This method mutates `object`. - * - * @static - * @memberOf _ - * @since 3.7.0 - * @category Object - * @param {Object} object The object to modify. - * @param {Array|string} path The path of the property to set. - * @param {*} value The value to set. - * @returns {Object} Returns `object`. - * @example - * - * var object = { 'a': [{ 'b': { 'c': 3 } }] }; - * - * _.set(object, 'a[0].b.c', 4); - * console.log(object.a[0].b.c); - * // => 4 - * - * _.set(object, ['x', '0', 'y', 'z'], 5); - * console.log(object.x[0].y.z); - * // => 5 - */ -function set(object, path, value) { - return object == null ? object : baseSet(object, path, value); -} - -module.exports = set; - - -/***/ }), - -/***/ 898: -/***/ (function(__unusedmodule, exports, __webpack_require__) { - -"use strict"; - - -Object.defineProperty(exports, '__esModule', { value: true }); - -var request = __webpack_require__(753); -var universalUserAgent = __webpack_require__(796); - -const VERSION = "4.5.0"; - -class GraphqlError extends Error { - constructor(request, response) { - const message = response.data.errors[0].message; - super(message); - Object.assign(this, response.data); - this.name = "GraphqlError"; - this.request = request; // Maintains proper stack trace (only available on V8) - - /* istanbul ignore next */ - - if (Error.captureStackTrace) { - Error.captureStackTrace(this, this.constructor); - } - } - -} - -const NON_VARIABLE_OPTIONS = ["method", "baseUrl", "url", "headers", "request", "query", "mediaType"]; -function graphql(request, query, options) { - options = typeof query === "string" ? options = Object.assign({ - query - }, options) : options = query; - const requestOptions = Object.keys(options).reduce((result, key) => { - if (NON_VARIABLE_OPTIONS.includes(key)) { - result[key] = options[key]; - return result; - } - - if (!result.variables) { - result.variables = {}; - } - - result.variables[key] = options[key]; - return result; - }, {}); - return request(requestOptions).then(response => { - if (response.data.errors) { - throw new GraphqlError(requestOptions, { - data: response.data - }); - } - - return response.data.data; - }); -} - -function withDefaults(request$1, newDefaults) { - const newRequest = request$1.defaults(newDefaults); - - const newApi = (query, options) => { - return graphql(newRequest, query, options); - }; - - return Object.assign(newApi, { - defaults: withDefaults.bind(null, newRequest), - endpoint: request.request.endpoint - }); -} - -const graphql$1 = withDefaults(request.request, { - headers: { - "user-agent": `octokit-graphql.js/${VERSION} ${universalUserAgent.getUserAgent()}` - }, - method: "POST", - url: "/graphql" -}); -function withCustomRequest(customRequest) { - return withDefaults(customRequest, { - method: "POST", - url: "/graphql" - }); -} - -exports.graphql = graphql$1; -exports.withCustomRequest = withCustomRequest; -//# sourceMappingURL=index.js.map - - -/***/ }), - -/***/ 916: -/***/ (function(__unusedmodule, exports) { - -"use strict"; - - -Object.defineProperty(exports, '__esModule', { value: true }); - -const VERSION = "1.0.0"; - -/** - * @param octokit Octokit instance - * @param options Options passed to Octokit constructor - */ - -function requestLog(octokit) { - octokit.hook.wrap("request", (request, options) => { - octokit.log.debug("request", options); - const start = Date.now(); - const requestOptions = octokit.request.endpoint.parse(options); - const path = requestOptions.url.replace(options.baseUrl, ""); - return request(options).then(response => { - octokit.log.info(`${requestOptions.method} ${path} - ${response.status} in ${Date.now() - start}ms`); - return response; - }).catch(error => { - octokit.log.info(`${requestOptions.method} ${path} - ${error.status} in ${Date.now() - start}ms`); - throw error; - }); - }); -} -requestLog.VERSION = VERSION; - -exports.requestLog = requestLog; -//# sourceMappingURL=index.js.map - - -/***/ }), - -/***/ 929: -/***/ (function(module, __unusedexports, __webpack_require__) { - -module.exports = hasNextPage - -const deprecate = __webpack_require__(370) -const getPageLinks = __webpack_require__(577) - -function hasNextPage (link) { - deprecate(`octokit.hasNextPage() – You can use octokit.paginate or async iterators instead: https://github.com/octokit/rest.js#pagination.`) - return getPageLinks(link).next -} - - -/***/ }), - -/***/ 948: -/***/ (function(module) { - -"use strict"; - - -/** - * Tries to execute a function and discards any error that occurs. - * @param {Function} fn - Function that might or might not throw an error. - * @returns {?*} Return-value of the function when no error occurred. - */ -module.exports = function(fn) { - - try { return fn() } catch (e) {} - -} - -/***/ }), - -/***/ 950: -/***/ (function(__unusedmodule, exports, __webpack_require__) { - -"use strict"; - -Object.defineProperty(exports, "__esModule", { value: true }); -const url = __webpack_require__(835); -function getProxyUrl(reqUrl) { - let usingSsl = reqUrl.protocol === 'https:'; - let proxyUrl; - if (checkBypass(reqUrl)) { - return proxyUrl; - } - let proxyVar; - if (usingSsl) { - proxyVar = process.env['https_proxy'] || process.env['HTTPS_PROXY']; - } - else { - proxyVar = process.env['http_proxy'] || process.env['HTTP_PROXY']; - } - if (proxyVar) { - proxyUrl = url.parse(proxyVar); - } - return proxyUrl; -} -exports.getProxyUrl = getProxyUrl; -function checkBypass(reqUrl) { - if (!reqUrl.hostname) { - return false; - } - let noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || ''; - if (!noProxy) { - return false; - } - // Determine the request port - let reqPort; - if (reqUrl.port) { - reqPort = Number(reqUrl.port); - } - else if (reqUrl.protocol === 'http:') { - reqPort = 80; - } - else if (reqUrl.protocol === 'https:') { - reqPort = 443; - } - // Format the request hostname and hostname with port - let upperReqHosts = [reqUrl.hostname.toUpperCase()]; - if (typeof reqPort === 'number') { - upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`); - } - // Compare request host against noproxy - for (let upperNoProxyItem of noProxy - .split(',') - .map(x => x.trim().toUpperCase()) - .filter(x => x)) { - if (upperReqHosts.some(x => x === upperNoProxyItem)) { - return true; - } - } - return false; -} -exports.checkBypass = checkBypass; - - -/***/ }), - -/***/ 954: -/***/ (function(module) { - -module.exports = validateAuth; - -function validateAuth(auth) { - if (typeof auth === "string") { - return; - } - - if (typeof auth === "function") { - return; - } - - if (auth.username && auth.password) { - return; - } - - if (auth.clientId && auth.clientSecret) { - return; - } - - throw new Error(`Invalid "auth" option: ${JSON.stringify(auth)}`); -} - - -/***/ }), - -/***/ 955: -/***/ (function(module, __unusedexports, __webpack_require__) { - -"use strict"; - -const path = __webpack_require__(622); -const childProcess = __webpack_require__(129); -const crossSpawn = __webpack_require__(20); -const stripEof = __webpack_require__(768); -const npmRunPath = __webpack_require__(621); -const isStream = __webpack_require__(323); -const _getStream = __webpack_require__(145); -const pFinally = __webpack_require__(697); -const onExit = __webpack_require__(260); -const errname = __webpack_require__(427); -const stdio = __webpack_require__(168); - -const TEN_MEGABYTES = 1000 * 1000 * 10; - -function handleArgs(cmd, args, opts) { - let parsed; - - opts = Object.assign({ - extendEnv: true, - env: {} - }, opts); - - if (opts.extendEnv) { - opts.env = Object.assign({}, process.env, opts.env); - } - - if (opts.__winShell === true) { - delete opts.__winShell; - parsed = { - command: cmd, - args, - options: opts, - file: cmd, - original: { - cmd, - args - } - }; - } else { - parsed = crossSpawn._parse(cmd, args, opts); - } - - opts = Object.assign({ - maxBuffer: TEN_MEGABYTES, - buffer: true, - stripEof: true, - preferLocal: true, - localDir: parsed.options.cwd || process.cwd(), - encoding: 'utf8', - reject: true, - cleanup: true - }, parsed.options); - - opts.stdio = stdio(opts); - - if (opts.preferLocal) { - opts.env = npmRunPath.env(Object.assign({}, opts, {cwd: opts.localDir})); - } - - if (opts.detached) { - // #115 - opts.cleanup = false; - } - - if (process.platform === 'win32' && path.basename(parsed.command) === 'cmd.exe') { - // #116 - parsed.args.unshift('/q'); - } - - return { - cmd: parsed.command, - args: parsed.args, - opts, - parsed - }; -} - -function handleInput(spawned, input) { - if (input === null || input === undefined) { - return; - } - - if (isStream(input)) { - input.pipe(spawned.stdin); - } else { - spawned.stdin.end(input); - } -} - -function handleOutput(opts, val) { - if (val && opts.stripEof) { - val = stripEof(val); - } - - return val; -} - -function handleShell(fn, cmd, opts) { - let file = '/bin/sh'; - let args = ['-c', cmd]; - - opts = Object.assign({}, opts); - - if (process.platform === 'win32') { - opts.__winShell = true; - file = process.env.comspec || 'cmd.exe'; - args = ['/s', '/c', `"${cmd}"`]; - opts.windowsVerbatimArguments = true; - } - - if (opts.shell) { - file = opts.shell; - delete opts.shell; - } - - return fn(file, args, opts); -} - -function getStream(process, stream, {encoding, buffer, maxBuffer}) { - if (!process[stream]) { - return null; - } - - let ret; - - if (!buffer) { - // TODO: Use `ret = util.promisify(stream.finished)(process[stream]);` when targeting Node.js 10 - ret = new Promise((resolve, reject) => { - process[stream] - .once('end', resolve) - .once('error', reject); - }); - } else if (encoding) { - ret = _getStream(process[stream], { - encoding, - maxBuffer - }); - } else { - ret = _getStream.buffer(process[stream], {maxBuffer}); - } - - return ret.catch(err => { - err.stream = stream; - err.message = `${stream} ${err.message}`; - throw err; - }); -} - -function makeError(result, options) { - const {stdout, stderr} = result; - - let err = result.error; - const {code, signal} = result; - - const {parsed, joinedCmd} = options; - const timedOut = options.timedOut || false; - - if (!err) { - let output = ''; - - if (Array.isArray(parsed.opts.stdio)) { - if (parsed.opts.stdio[2] !== 'inherit') { - output += output.length > 0 ? stderr : `\n${stderr}`; - } - - if (parsed.opts.stdio[1] !== 'inherit') { - output += `\n${stdout}`; - } - } else if (parsed.opts.stdio !== 'inherit') { - output = `\n${stderr}${stdout}`; - } - - err = new Error(`Command failed: ${joinedCmd}${output}`); - err.code = code < 0 ? errname(code) : code; - } - - err.stdout = stdout; - err.stderr = stderr; - err.failed = true; - err.signal = signal || null; - err.cmd = joinedCmd; - err.timedOut = timedOut; - - return err; -} - -function joinCmd(cmd, args) { - let joinedCmd = cmd; - - if (Array.isArray(args) && args.length > 0) { - joinedCmd += ' ' + args.join(' '); - } - - return joinedCmd; -} - -module.exports = (cmd, args, opts) => { - const parsed = handleArgs(cmd, args, opts); - const {encoding, buffer, maxBuffer} = parsed.opts; - const joinedCmd = joinCmd(cmd, args); - - let spawned; - try { - spawned = childProcess.spawn(parsed.cmd, parsed.args, parsed.opts); - } catch (err) { - return Promise.reject(err); - } - - let removeExitHandler; - if (parsed.opts.cleanup) { - removeExitHandler = onExit(() => { - spawned.kill(); - }); - } - - let timeoutId = null; - let timedOut = false; - - const cleanup = () => { - if (timeoutId) { - clearTimeout(timeoutId); - timeoutId = null; - } - - if (removeExitHandler) { - removeExitHandler(); - } - }; - - if (parsed.opts.timeout > 0) { - timeoutId = setTimeout(() => { - timeoutId = null; - timedOut = true; - spawned.kill(parsed.opts.killSignal); - }, parsed.opts.timeout); - } - - const processDone = new Promise(resolve => { - spawned.on('exit', (code, signal) => { - cleanup(); - resolve({code, signal}); - }); - - spawned.on('error', err => { - cleanup(); - resolve({error: err}); - }); - - if (spawned.stdin) { - spawned.stdin.on('error', err => { - cleanup(); - resolve({error: err}); - }); - } - }); - - function destroy() { - if (spawned.stdout) { - spawned.stdout.destroy(); - } - - if (spawned.stderr) { - spawned.stderr.destroy(); - } - } - - const handlePromise = () => pFinally(Promise.all([ - processDone, - getStream(spawned, 'stdout', {encoding, buffer, maxBuffer}), - getStream(spawned, 'stderr', {encoding, buffer, maxBuffer}) - ]).then(arr => { - const result = arr[0]; - result.stdout = arr[1]; - result.stderr = arr[2]; - - if (result.error || result.code !== 0 || result.signal !== null) { - const err = makeError(result, { - joinedCmd, - parsed, - timedOut - }); - - // TODO: missing some timeout logic for killed - // https://github.com/nodejs/node/blob/master/lib/child_process.js#L203 - // err.killed = spawned.killed || killed; - err.killed = err.killed || spawned.killed; - - if (!parsed.opts.reject) { - return err; - } - - throw err; - } - - return { - stdout: handleOutput(parsed.opts, result.stdout), - stderr: handleOutput(parsed.opts, result.stderr), - code: 0, - failed: false, - killed: false, - signal: null, - cmd: joinedCmd, - timedOut: false - }; - }), destroy); - - crossSpawn._enoent.hookChildProcess(spawned, parsed.parsed); - - handleInput(spawned, parsed.opts.input); - - spawned.then = (onfulfilled, onrejected) => handlePromise().then(onfulfilled, onrejected); - spawned.catch = onrejected => handlePromise().catch(onrejected); - - return spawned; -}; - -// TODO: set `stderr: 'ignore'` when that option is implemented -module.exports.stdout = (...args) => module.exports(...args).then(x => x.stdout); - -// TODO: set `stdout: 'ignore'` when that option is implemented -module.exports.stderr = (...args) => module.exports(...args).then(x => x.stderr); - -module.exports.shell = (cmd, opts) => handleShell(module.exports, cmd, opts); - -module.exports.sync = (cmd, args, opts) => { - const parsed = handleArgs(cmd, args, opts); - const joinedCmd = joinCmd(cmd, args); - - if (isStream(parsed.opts.input)) { - throw new TypeError('The `input` option cannot be a stream in sync mode'); - } - - const result = childProcess.spawnSync(parsed.cmd, parsed.args, parsed.opts); - result.code = result.status; - - if (result.error || result.status !== 0 || result.signal !== null) { - const err = makeError(result, { - joinedCmd, - parsed - }); - - if (!parsed.opts.reject) { - return err; - } - - throw err; - } - - return { - stdout: handleOutput(parsed.opts, result.stdout), - stderr: handleOutput(parsed.opts, result.stderr), - code: 0, - failed: false, - signal: null, - cmd: joinedCmd, - timedOut: false - }; -}; - -module.exports.shellSync = (cmd, opts) => handleShell(module.exports.sync, cmd, opts); - - -/***/ }), - -/***/ 966: -/***/ (function(module, __unusedexports, __webpack_require__) { - -"use strict"; - -const {PassThrough} = __webpack_require__(413); - -module.exports = options => { - options = Object.assign({}, options); - - const {array} = options; - let {encoding} = options; - const buffer = encoding === 'buffer'; - let objectMode = false; - - if (array) { - objectMode = !(encoding || buffer); - } else { - encoding = encoding || 'utf8'; - } - - if (buffer) { - encoding = null; - } - - let len = 0; - const ret = []; - const stream = new PassThrough({objectMode}); - - if (encoding) { - stream.setEncoding(encoding); - } - - stream.on('data', chunk => { - ret.push(chunk); - - if (objectMode) { - len = ret.length; - } else { - len += chunk.length; - } - }); - - stream.getBufferedValue = () => { - if (array) { - return ret; - } - - return buffer ? Buffer.concat(ret, len) : ret.join(''); - }; - - stream.getBufferedLength = () => len; - - return stream; -}; - - -/***/ }), - -/***/ 969: -/***/ (function(module, __unusedexports, __webpack_require__) { - -var wrappy = __webpack_require__(11) -module.exports = wrappy(once) -module.exports.strict = wrappy(onceStrict) - -once.proto = once(function () { - Object.defineProperty(Function.prototype, 'once', { - value: function () { - return once(this) - }, - configurable: true - }) - - Object.defineProperty(Function.prototype, 'onceStrict', { - value: function () { - return onceStrict(this) - }, - configurable: true - }) -}) - -function once (fn) { - var f = function () { - if (f.called) return f.value - f.called = true - return f.value = fn.apply(this, arguments) - } - f.called = false - return f -} - -function onceStrict (fn) { - var f = function () { - if (f.called) - throw new Error(f.onceError) - f.called = true - return f.value = fn.apply(this, arguments) - } - var name = fn.name || 'Function wrapped with `once`' - f.onceError = name + " shouldn't be called more than once" - f.called = false - return f -} - - -/***/ }) - -/******/ }); \ No newline at end of file diff --git a/.github/templates-list-validator/index.js b/.github/templates-list-validator/index.js deleted file mode 100644 index 3ee7152de..000000000 --- a/.github/templates-list-validator/index.js +++ /dev/null @@ -1,74 +0,0 @@ -const github = require('@actions/github'); -const core = require('@actions/core'); -const path = require('path'); -const fs = require('fs'); - -async function run() { - try { - const token = process.env.GITHUB_TOKEN || core.getInput('token', { required: true }); - //TODO we should not only check the content of the readme but also the content of the website listing from https://github.com/asyncapi/website/blob/master/content/docs/tooling.md - const templatesListContent = getReadmeContent(); - const officialTemplates = await searchOfficialTemplates(token); - - const missingTemplates = officialTemplates.filter(str => !templatesListContent.includes(str)); - - if (missingTemplates.length) core.setFailed( - `The following templates are not in the README.md: ${missingTemplates}. Make sure missing templates are added to the list in the README of this repository and also to the website to the list of offecial tools: https://github.com/asyncapi/website/blob/master/content/docs/tooling.md` - ); - } catch (error) { - core.setFailed(error.message); - } -} - -/** - * Gets the markdown table from the readme of the Generator - * - * @private - * @return {String} - */ -function getReadmeContent() { - //README.md must be read 3levels above ../../../README.md because the action code is run from the bundle of the action, dist folder - const readmeContent = fs.readFileSync(path.resolve(__dirname,'../../../README.md'), 'utf8'); - const startingTag = ''; - const startOfOpeningTagIndex = readmeContent.indexOf(`${startingTag}START`); - const endOfOpeningTagIndex = readmeContent.indexOf(closingTag, startOfOpeningTagIndex); - const endOfClosingTagIndex = readmeContent.indexOf(`${startingTag}END ${closingTag}`); - - if (endOfOpeningTagIndex === -1 || endOfClosingTagIndex === -1) { - core.setFailed(); - throw new Error( - 'Templates list is missing or someone removed markers that indicate the list. Table with list of templates that is in the README should be wrapped in markers like and ' - ); - } - - return readmeContent.slice(endOfOpeningTagIndex + closingTag.length, endOfClosingTagIndex); -} - -/** - * Gets array with list of names of all the official templates - * - * @private - * @param {String} Token to authorize with GitHub - * @return {Array[String]} - */ -async function searchOfficialTemplates(token) { - const client = new github.GitHub(token); - const searchQuery = ` - query { - search(query: "topic:asyncapi topic:generator topic:template user:asyncapi", type: REPOSITORY, first:100) { - repositoryCount - nodes { - ... on Repository { - name - } - } - } - } - `; - - const searchResult = await client.graphql(searchQuery); - return searchResult.search.nodes.map(obj => obj.name); -} - -run(); diff --git a/.github/templates-list-validator/package-lock.json b/.github/templates-list-validator/package-lock.json deleted file mode 100644 index 768a0e5df..000000000 --- a/.github/templates-list-validator/package-lock.json +++ /dev/null @@ -1,482 +0,0 @@ -{ - "name": "templates-list-validator", - "requires": true, - "lockfileVersion": 1, - "dependencies": { - "@actions/core": { - "version": "1.9.1", - "resolved": "https://registry.npmjs.org/@actions/core/-/core-1.9.1.tgz", - "integrity": "sha512-5ad+U2YGrmmiw6du20AQW5XuWo7UKN2052FjSV7MX+Wfjf8sCqcsZe62NfgHys4QI4/Y+vQvLKYL8jWtA1ZBTA==", - "requires": { - "@actions/http-client": "^2.0.1", - "uuid": "^8.3.2" - }, - "dependencies": { - "@actions/http-client": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-2.0.1.tgz", - "integrity": "sha512-PIXiMVtz6VvyaRsGY268qvj57hXQEpsYogYOu2nrQhlf+XCGmZstmuZBbAybUl1nQGnvS1k1eEsQ69ZoD7xlSw==", - "requires": { - "tunnel": "^0.0.6" - } - } - } - }, - "@actions/github": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/@actions/github/-/github-2.2.0.tgz", - "integrity": "sha512-9UAZqn8ywdR70n3GwVle4N8ALosQs4z50N7XMXrSTUVOmVpaBC5kE3TRTT7qQdi3OaQV24mjGuJZsHUmhD+ZXw==", - "requires": { - "@actions/http-client": "^1.0.3", - "@octokit/graphql": "^4.3.1", - "@octokit/rest": "^16.43.1" - } - }, - "@actions/http-client": { - "version": "1.0.8", - "resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-1.0.8.tgz", - "integrity": "sha512-G4JjJ6f9Hb3Zvejj+ewLLKLf99ZC+9v+yCxoYf9vSyH+WkzPLB2LuUtRMGNkooMqdugGBFStIKXOuvH1W+EctA==", - "requires": { - "tunnel": "0.0.6" - } - }, - "@octokit/auth-token": { - "version": "2.4.1", - "resolved": "https://registry.npmjs.org/@octokit/auth-token/-/auth-token-2.4.1.tgz", - "integrity": "sha512-NB81O5h39KfHYGtgfWr2booRxp2bWOJoqbWwbyUg2hw6h35ArWYlAST5B3XwAkbdcx13yt84hFXyFP5X0QToWA==", - "requires": { - "@octokit/types": "^4.0.1" - } - }, - "@octokit/endpoint": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-6.0.1.tgz", - "integrity": "sha512-pOPHaSz57SFT/m3R5P8MUu4wLPszokn5pXcB/pzavLTQf2jbU+6iayTvzaY6/BiotuRS0qyEUkx3QglT4U958A==", - "requires": { - "@octokit/types": "^2.11.1", - "is-plain-object": "^3.0.0", - "universal-user-agent": "^5.0.0" - }, - "dependencies": { - "@octokit/types": { - "version": "2.16.2", - "resolved": "https://registry.npmjs.org/@octokit/types/-/types-2.16.2.tgz", - "integrity": "sha512-O75k56TYvJ8WpAakWwYRN8Bgu60KrmX0z1KqFp1kNiFNkgW+JW+9EBKZ+S33PU6SLvbihqd+3drvPxKK68Ee8Q==", - "requires": { - "@types/node": ">= 8" - } - } - } - }, - "@octokit/graphql": { - "version": "4.5.0", - "resolved": "https://registry.npmjs.org/@octokit/graphql/-/graphql-4.5.0.tgz", - "integrity": "sha512-StJWfn0M1QfhL3NKBz31e1TdDNZrHLLS57J2hin92SIfzlOVBuUaRkp31AGkGOAFOAVtyEX6ZiZcsjcJDjeb5g==", - "requires": { - "@octokit/request": "^5.3.0", - "@octokit/types": "^4.0.1", - "universal-user-agent": "^5.0.0" - } - }, - "@octokit/plugin-paginate-rest": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-1.1.2.tgz", - "integrity": "sha512-jbsSoi5Q1pj63sC16XIUboklNw+8tL9VOnJsWycWYR78TKss5PVpIPb1TUUcMQ+bBh7cY579cVAWmf5qG+dw+Q==", - "requires": { - "@octokit/types": "^2.0.1" - }, - "dependencies": { - "@octokit/types": { - "version": "2.16.2", - "resolved": "https://registry.npmjs.org/@octokit/types/-/types-2.16.2.tgz", - "integrity": "sha512-O75k56TYvJ8WpAakWwYRN8Bgu60KrmX0z1KqFp1kNiFNkgW+JW+9EBKZ+S33PU6SLvbihqd+3drvPxKK68Ee8Q==", - "requires": { - "@types/node": ">= 8" - } - } - } - }, - "@octokit/plugin-request-log": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/@octokit/plugin-request-log/-/plugin-request-log-1.0.0.tgz", - "integrity": "sha512-ywoxP68aOT3zHCLgWZgwUJatiENeHE7xJzYjfz8WI0goynp96wETBF+d95b8g/uL4QmS6owPVlaxiz3wyMAzcw==" - }, - "@octokit/plugin-rest-endpoint-methods": { - "version": "2.4.0", - "resolved": "https://registry.npmjs.org/@octokit/plugin-rest-endpoint-methods/-/plugin-rest-endpoint-methods-2.4.0.tgz", - "integrity": "sha512-EZi/AWhtkdfAYi01obpX0DF7U6b1VRr30QNQ5xSFPITMdLSfhcBqjamE3F+sKcxPbD7eZuMHu3Qkk2V+JGxBDQ==", - "requires": { - "@octokit/types": "^2.0.1", - "deprecation": "^2.3.1" - }, - "dependencies": { - "@octokit/types": { - "version": "2.16.2", - "resolved": "https://registry.npmjs.org/@octokit/types/-/types-2.16.2.tgz", - "integrity": "sha512-O75k56TYvJ8WpAakWwYRN8Bgu60KrmX0z1KqFp1kNiFNkgW+JW+9EBKZ+S33PU6SLvbihqd+3drvPxKK68Ee8Q==", - "requires": { - "@types/node": ">= 8" - } - } - } - }, - "@octokit/request": { - "version": "5.4.3", - "resolved": "https://registry.npmjs.org/@octokit/request/-/request-5.4.3.tgz", - "integrity": "sha512-RtqMzF3mhqxmWoqVD84x2gdtbqn2inTBU/HPkWf5u0R5r7fBTaLPAcCBgukeI2gjTwD9ChL9Cu0MlTBs7B/tSw==", - "requires": { - "@octokit/endpoint": "^6.0.1", - "@octokit/request-error": "^2.0.0", - "@octokit/types": "^2.11.1", - "deprecation": "^2.0.0", - "is-plain-object": "^3.0.0", - "node-fetch": "^2.3.0", - "once": "^1.4.0", - "universal-user-agent": "^5.0.0" - }, - "dependencies": { - "@octokit/types": { - "version": "2.16.2", - "resolved": "https://registry.npmjs.org/@octokit/types/-/types-2.16.2.tgz", - "integrity": "sha512-O75k56TYvJ8WpAakWwYRN8Bgu60KrmX0z1KqFp1kNiFNkgW+JW+9EBKZ+S33PU6SLvbihqd+3drvPxKK68Ee8Q==", - "requires": { - "@types/node": ">= 8" - } - } - } - }, - "@octokit/request-error": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-2.0.1.tgz", - "integrity": "sha512-5lqBDJ9/TOehK82VvomQ6zFiZjPeSom8fLkFVLuYL3sKiIb5RB8iN/lenLkY7oBmyQcGP7FBMGiIZTO8jufaRQ==", - "requires": { - "@octokit/types": "^4.0.1", - "deprecation": "^2.0.0", - "once": "^1.4.0" - } - }, - "@octokit/rest": { - "version": "16.43.1", - "resolved": "https://registry.npmjs.org/@octokit/rest/-/rest-16.43.1.tgz", - "integrity": "sha512-gfFKwRT/wFxq5qlNjnW2dh+qh74XgTQ2B179UX5K1HYCluioWj8Ndbgqw2PVqa1NnVJkGHp2ovMpVn/DImlmkw==", - "requires": { - "@octokit/auth-token": "^2.4.0", - "@octokit/plugin-paginate-rest": "^1.1.1", - "@octokit/plugin-request-log": "^1.0.0", - "@octokit/plugin-rest-endpoint-methods": "2.4.0", - "@octokit/request": "^5.2.0", - "@octokit/request-error": "^1.0.2", - "atob-lite": "^2.0.0", - "before-after-hook": "^2.0.0", - "btoa-lite": "^1.0.0", - "deprecation": "^2.0.0", - "lodash.get": "^4.4.2", - "lodash.set": "^4.3.2", - "lodash.uniq": "^4.5.0", - "octokit-pagination-methods": "^1.1.0", - "once": "^1.4.0", - "universal-user-agent": "^4.0.0" - }, - "dependencies": { - "@octokit/request-error": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-1.2.1.tgz", - "integrity": "sha512-+6yDyk1EES6WK+l3viRDElw96MvwfJxCt45GvmjDUKWjYIb3PJZQkq3i46TwGwoPD4h8NmTrENmtyA1FwbmhRA==", - "requires": { - "@octokit/types": "^2.0.0", - "deprecation": "^2.0.0", - "once": "^1.4.0" - } - }, - "@octokit/types": { - "version": "2.16.2", - "resolved": "https://registry.npmjs.org/@octokit/types/-/types-2.16.2.tgz", - "integrity": "sha512-O75k56TYvJ8WpAakWwYRN8Bgu60KrmX0z1KqFp1kNiFNkgW+JW+9EBKZ+S33PU6SLvbihqd+3drvPxKK68Ee8Q==", - "requires": { - "@types/node": ">= 8" - } - }, - "universal-user-agent": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-4.0.1.tgz", - "integrity": "sha512-LnST3ebHwVL2aNe4mejI9IQh2HfZ1RLo8Io2HugSif8ekzD1TlWpHpColOB/eh8JHMLkGH3Akqf040I+4ylNxg==", - "requires": { - "os-name": "^3.1.0" - } - } - } - }, - "@octokit/types": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/@octokit/types/-/types-4.0.1.tgz", - "integrity": "sha512-Ho6h7w2h9y8RRE8r656hIj1oiSbwbIHJGF5r9G5FOwS2VdDPq8QLGvsG4x6pKHpvyGK7j+43sAc2cJKMiFoIJw==", - "requires": { - "@types/node": ">= 8" - } - }, - "@types/node": { - "version": "14.0.5", - "resolved": "https://registry.npmjs.org/@types/node/-/node-14.0.5.tgz", - "integrity": "sha512-90hiq6/VqtQgX8Sp0EzeIsv3r+ellbGj4URKj5j30tLlZvRUpnAe9YbYnjl3pJM93GyXU0tghHhvXHq+5rnCKA==" - }, - "@zeit/ncc": { - "version": "0.22.2", - "resolved": "https://registry.npmjs.org/@zeit/ncc/-/ncc-0.22.2.tgz", - "integrity": "sha512-LGW5RPIwulh+fyKiMTgbIEbdc/hdf/4+U5B1WbT/We0jweHoywDbk9hi+3hNjSzQNShGumP72zgc6PHEUG5syg==", - "dev": true - }, - "atob-lite": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/atob-lite/-/atob-lite-2.0.0.tgz", - "integrity": "sha1-D+9a1G8b16hQLGVyfwNn1e5D1pY=" - }, - "before-after-hook": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/before-after-hook/-/before-after-hook-2.1.0.tgz", - "integrity": "sha512-IWIbu7pMqyw3EAJHzzHbWa85b6oud/yfKYg5rqB5hNE8CeMi3nX+2C2sj0HswfblST86hpVEOAb9x34NZd6P7A==" - }, - "btoa-lite": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/btoa-lite/-/btoa-lite-1.0.0.tgz", - "integrity": "sha1-M3dm2hWAEhD92VbCLpxokaudAzc=" - }, - "cross-spawn": { - "version": "6.0.5", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-6.0.5.tgz", - "integrity": "sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ==", - "requires": { - "nice-try": "^1.0.4", - "path-key": "^2.0.1", - "semver": "^5.5.0", - "shebang-command": "^1.2.0", - "which": "^1.2.9" - } - }, - "deprecation": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/deprecation/-/deprecation-2.3.1.tgz", - "integrity": "sha512-xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ==" - }, - "end-of-stream": { - "version": "1.4.4", - "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz", - "integrity": "sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==", - "requires": { - "once": "^1.4.0" - } - }, - "execa": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/execa/-/execa-1.0.0.tgz", - "integrity": "sha512-adbxcyWV46qiHyvSp50TKt05tB4tK3HcmF7/nxfAdhnox83seTDbwnaqKO4sXRy7roHAIFqJP/Rw/AuEbX61LA==", - "requires": { - "cross-spawn": "^6.0.0", - "get-stream": "^4.0.0", - "is-stream": "^1.1.0", - "npm-run-path": "^2.0.0", - "p-finally": "^1.0.0", - "signal-exit": "^3.0.0", - "strip-eof": "^1.0.0" - } - }, - "get-stream": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-4.1.0.tgz", - "integrity": "sha512-GMat4EJ5161kIy2HevLlr4luNjBgvmj413KaQA7jt4V8B4RDsfpHk7WQ9GVqfYyyx8OS/L66Kox+rJRNklLK7w==", - "requires": { - "pump": "^3.0.0" - } - }, - "is-plain-object": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-plain-object/-/is-plain-object-3.0.0.tgz", - "integrity": "sha512-tZIpofR+P05k8Aocp7UI/2UTa9lTJSebCXpFFoR9aibpokDj/uXBsJ8luUu0tTVYKkMU6URDUuOfJZ7koewXvg==", - "requires": { - "isobject": "^4.0.0" - } - }, - "is-stream": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-1.1.0.tgz", - "integrity": "sha1-EtSj3U5o4Lec6428hBc66A2RykQ=" - }, - "isexe": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", - "integrity": "sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=" - }, - "isobject": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/isobject/-/isobject-4.0.0.tgz", - "integrity": "sha512-S/2fF5wH8SJA/kmwr6HYhK/RI/OkhD84k8ntalo0iJjZikgq1XFvR5M8NPT1x5F7fBwCG3qHfnzeP/Vh/ZxCUA==" - }, - "lodash.get": { - "version": "4.4.2", - "resolved": "https://registry.npmjs.org/lodash.get/-/lodash.get-4.4.2.tgz", - "integrity": "sha1-LRd/ZS+jHpObRDjVNBSZ36OCXpk=" - }, - "lodash.set": { - "version": "4.3.2", - "resolved": "https://registry.npmjs.org/lodash.set/-/lodash.set-4.3.2.tgz", - "integrity": "sha1-2HV7HagH3eJIFrDWqEvqGnYjCyM=" - }, - "lodash.uniq": { - "version": "4.5.0", - "resolved": "https://registry.npmjs.org/lodash.uniq/-/lodash.uniq-4.5.0.tgz", - "integrity": "sha1-0CJTc662Uq3BvILklFM5qEJ1R3M=" - }, - "macos-release": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/macos-release/-/macos-release-2.3.0.tgz", - "integrity": "sha512-OHhSbtcviqMPt7yfw5ef5aghS2jzFVKEFyCJndQt2YpSQ9qRVSEv2axSJI1paVThEu+FFGs584h/1YhxjVqajA==" - }, - "nice-try": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/nice-try/-/nice-try-1.0.5.tgz", - "integrity": "sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ==" - }, - "node-fetch": { - "version": "2.6.7", - "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.7.tgz", - "integrity": "sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ==", - "requires": { - "whatwg-url": "^5.0.0" - } - }, - "npm-run-path": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-2.0.2.tgz", - "integrity": "sha1-NakjLfo11wZ7TLLd8jV7GHFTbF8=", - "requires": { - "path-key": "^2.0.0" - } - }, - "octokit-pagination-methods": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/octokit-pagination-methods/-/octokit-pagination-methods-1.1.0.tgz", - "integrity": "sha512-fZ4qZdQ2nxJvtcasX7Ghl+WlWS/d9IgnBIwFZXVNNZUmzpno91SX5bc5vuxiuKoCtK78XxGGNuSCrDC7xYB3OQ==" - }, - "once": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", - "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=", - "requires": { - "wrappy": "1" - } - }, - "os-name": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/os-name/-/os-name-3.1.0.tgz", - "integrity": "sha512-h8L+8aNjNcMpo/mAIBPn5PXCM16iyPGjHNWo6U1YO8sJTMHtEtyczI6QJnLoplswm6goopQkqc7OAnjhWcugVg==", - "requires": { - "macos-release": "^2.2.0", - "windows-release": "^3.1.0" - } - }, - "p-finally": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/p-finally/-/p-finally-1.0.0.tgz", - "integrity": "sha1-P7z7FbiZpEEjs0ttzBi3JDNqLK4=" - }, - "path-key": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-2.0.1.tgz", - "integrity": "sha1-QRyttXTFoUDTpLGRDUDYDMn0C0A=" - }, - "pump": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.0.tgz", - "integrity": "sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==", - "requires": { - "end-of-stream": "^1.1.0", - "once": "^1.3.1" - } - }, - "semver": { - "version": "5.7.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", - "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==" - }, - "shebang-command": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-1.2.0.tgz", - "integrity": "sha1-RKrGW2lbAzmJaMOfNj/uXer98eo=", - "requires": { - "shebang-regex": "^1.0.0" - } - }, - "shebang-regex": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-1.0.0.tgz", - "integrity": "sha1-2kL0l0DAtC2yypcoVxyxkMmO/qM=" - }, - "signal-exit": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.3.tgz", - "integrity": "sha512-VUJ49FC8U1OxwZLxIbTTrDvLnf/6TDgxZcK8wxR8zs13xpx7xbG60ndBlhNrFi2EMuFRoeDoJO7wthSLq42EjA==" - }, - "strip-eof": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/strip-eof/-/strip-eof-1.0.0.tgz", - "integrity": "sha1-u0P/VZim6wXYm1n80SnJgzE2Br8=" - }, - "tr46": { - "version": "0.0.3", - "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", - "integrity": "sha1-gYT9NH2snNwYWZLzpmIuFLnZq2o=" - }, - "tunnel": { - "version": "0.0.6", - "resolved": "https://registry.npmjs.org/tunnel/-/tunnel-0.0.6.tgz", - "integrity": "sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg==" - }, - "universal-user-agent": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-5.0.0.tgz", - "integrity": "sha512-B5TPtzZleXyPrUMKCpEHFmVhMN6EhmJYjG5PQna9s7mXeSqGTLap4OpqLl5FCEFUI3UBmllkETwKf/db66Y54Q==", - "requires": { - "os-name": "^3.1.0" - } - }, - "uuid": { - "version": "8.3.2", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", - "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==" - }, - "webidl-conversions": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", - "integrity": "sha1-JFNCdeKnvGvnvIZhHMFq4KVlSHE=" - }, - "whatwg-url": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", - "integrity": "sha1-lmRU6HZUYuN2RNNib2dCzotwll0=", - "requires": { - "tr46": "~0.0.3", - "webidl-conversions": "^3.0.0" - } - }, - "which": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz", - "integrity": "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==", - "requires": { - "isexe": "^2.0.0" - } - }, - "windows-release": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/windows-release/-/windows-release-3.3.0.tgz", - "integrity": "sha512-2HetyTg1Y+R+rUgrKeUEhAG/ZuOmTrI1NBb3ZyAGQMYmOJjBBPe4MTodghRkmLJZHwkuPi02anbeGP+Zf401LQ==", - "requires": { - "execa": "^1.0.0" - } - }, - "wrappy": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", - "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=" - } - } -} diff --git a/.github/templates-list-validator/package.json b/.github/templates-list-validator/package.json deleted file mode 100644 index 87abf7d6d..000000000 --- a/.github/templates-list-validator/package.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "name": "templates-list-validator", - "description": "Simple action to validate if official list of templates has changed", - "main": "dist/index.js", - "scripts": { - "start": "node index.js", - "package": "ncc build index.js -o dist" - }, - "dependencies": { - "@actions/core": "^1.9.1", - "@actions/github": "^2.2.0" - }, - "devDependencies": { - "@zeit/ncc": "^0.22.2" - } -} diff --git a/.github/workflows/add-good-first-issue-labels.yml b/.github/workflows/add-good-first-issue-labels.yml index 202345269..2ae3a056a 100644 --- a/.github/workflows/add-good-first-issue-labels.yml +++ b/.github/workflows/add-good-first-issue-labels.yml @@ -4,36 +4,37 @@ # Purpose of this workflow is to enable anyone to label issue with 'Good First Issue' and 'area/*' with a single command. name: Add 'Good First Issue' and 'area/*' labels # if proper comment added -on: - issue_comment: - types: - - created +on: + issue_comment: + types: + - created jobs: add-labels: - if: ${{!github.event.issue.pull_request && github.event.issue.state != 'closed' && github.actor != 'asyncapi-bot'}} + if: ${{(!github.event.issue.pull_request && github.event.issue.state != 'closed' && github.actor != 'asyncapi-bot') && (contains(github.event.comment.body, '/good-first-issue') || contains(github.event.comment.body, '/gfi' ))}} runs-on: ubuntu-latest steps: - name: Add label - if: contains(github.event.comment.body, '/good-first-issue') || contains(github.event.comment.body, '/gfi' ) - uses: actions/github-script@v5 + uses: actions/github-script@v6 with: github-token: ${{ secrets.GH_TOKEN }} script: | const areas = ['javascript', 'typescript', 'java' , 'go', 'docs', 'ci-cd', 'design']; - const values = context.payload.comment.body.trim().split(" "); - switch(values[1]){ + const words = context.payload.comment.body.trim().split(" "); + const areaIndex = words.findIndex((word)=> word === '/gfi' || word === '/good-first-issue') + 1 + let area = words[areaIndex]; + switch(area){ case 'ts': - values[1] = 'typescript'; + area = 'typescript'; break; case 'js': - values[1] = 'javascript'; + area = 'javascript'; break; case 'markdown': - values[1] = 'docs'; + area = 'docs'; break; } - if(values.length != 2 || !areas.includes(values[1])){ + if(!areas.includes(area)){ const message = `Hey @${context.payload.sender.login}, your message doesn't follow the requirements, you can try \`/help\`.` await github.rest.issues.createComment({ @@ -44,14 +45,14 @@ jobs: }) } else { - //remove complexity and areas if there are any before adding new labels; + // remove area if there is any before adding new labels. const currentLabels = (await github.rest.issues.listLabelsOnIssue({ issue_number: context.issue.number, owner: context.repo.owner, repo: context.repo.repo, })).data.map(label => label.name); - const shouldBeRemoved = currentLabels.filter(label => (label.startsWith('area/') && !label.endsWith(values[1]))); + const shouldBeRemoved = currentLabels.filter(label => (label.startsWith('area/') && !label.endsWith(area))); shouldBeRemoved.forEach(label => { github.rest.issues.deleteLabel({ owner: context.repo.owner, @@ -60,11 +61,11 @@ jobs: }); }); - //add new labels + // Add new labels. github.rest.issues.addLabels({ issue_number: context.issue.number, owner: context.repo.owner, repo: context.repo.repo, - labels: ['good first issue', `area/${values[1]}`] + labels: ['good first issue', `area/${area}`] }); } diff --git a/.github/workflows/automerge-for-humans-add-ready-to-merge-or-do-not-merge-label.yml b/.github/workflows/automerge-for-humans-add-ready-to-merge-or-do-not-merge-label.yml index 79c8079ec..02d71a796 100644 --- a/.github/workflows/automerge-for-humans-add-ready-to-merge-or-do-not-merge-label.yml +++ b/.github/workflows/automerge-for-humans-add-ready-to-merge-or-do-not-merge-label.yml @@ -26,7 +26,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Add ready-to-merge label - uses: actions/github-script@v5 + uses: actions/github-script@v6 with: github-token: ${{ secrets.GH_TOKEN }} script: | @@ -59,7 +59,9 @@ jobs: body: `Hello, @${{ github.actor }}! 👋🏼 This PR is not up to date with the base branch and can't be merged. Please update your branch manually with the latest version of the base branch. - PRO-TIP: Add a comment to your PR with the text: \`/au\` or \`/autoupdate\` and our bot will take care of updating the branch in the future. The only requirement for this to work is to enable [Allow edits from maintainers](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/working-with-forks/allowing-changes-to-a-pull-request-branch-created-from-a-fork) option in your PR. + PRO-TIP: To request an update from the upstream branch, simply comment \`/u\` or \`/update\` and our bot will handle the update operation promptly. + + The only requirement for this to work is to enable [Allow edits from maintainers](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/working-with-forks/allowing-changes-to-a-pull-request-branch-created-from-a-fork) option in your PR. Also the update will not work if your fork is located in an organization, not under your personal profile. Thanks 😄` }) } @@ -76,7 +78,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Add do-not-merge label - uses: actions/github-script@v5 + uses: actions/github-script@v6 with: github-token: ${{ secrets.GH_TOKEN }} script: | @@ -98,7 +100,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Add autoupdate label - uses: actions/github-script@v5 + uses: actions/github-script@v6 with: github-token: ${{ secrets.GH_TOKEN }} script: | diff --git a/.github/workflows/automerge-for-humans-merging.yml b/.github/workflows/automerge-for-humans-merging.yml index 4bce61da2..9ba0be90b 100644 --- a/.github/workflows/automerge-for-humans-merging.yml +++ b/.github/workflows/automerge-for-humans-merging.yml @@ -43,7 +43,7 @@ jobs: | join("\n")' multiline: true - name: Automerge PR - uses: pascalgn/automerge-action@v0.14.3 + uses: pascalgn/automerge-action@22948e0bc22f0aa673800da838595a3e7347e584 #v0.15.6 https://github.com/pascalgn/automerge-action/releases/tag/v0.15.6 env: GITHUB_TOKEN: "${{ secrets.GH_TOKEN }}" MERGE_LABELS: "!do-not-merge,ready-to-merge" diff --git a/.github/workflows/automerge-for-humans-remove-ready-to-merge-label-on-edit.yml b/.github/workflows/automerge-for-humans-remove-ready-to-merge-label-on-edit.yml index b8fc9904d..00e7f9935 100644 --- a/.github/workflows/automerge-for-humans-remove-ready-to-merge-label-on-edit.yml +++ b/.github/workflows/automerge-for-humans-remove-ready-to-merge-label-on-edit.yml @@ -16,7 +16,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Remove label - uses: actions/github-script@v5 + uses: actions/github-script@v6 with: github-token: ${{ secrets.GH_TOKEN }} script: | diff --git a/.github/workflows/automerge-orphans.yml b/.github/workflows/automerge-orphans.yml index 20322ecb7..a17768535 100644 --- a/.github/workflows/automerge-orphans.yml +++ b/.github/workflows/automerge-orphans.yml @@ -13,8 +13,10 @@ jobs: name: Find orphans and notify runs-on: ubuntu-latest steps: + - name: Checkout repository + uses: actions/checkout@v3 - name: Get list of orphans - uses: actions/github-script@v3 + uses: actions/github-script@v6 id: orphans with: github-token: ${{ secrets.GITHUB_TOKEN }} @@ -50,10 +52,10 @@ jobs: } - if: steps.orphans.outputs.found == 'true' name: Convert markdown to slack markdown - uses: LoveToKnow/slackify-markdown-action@v1.0.0 + uses: asyncapi/.github/.github/actions/slackify-markdown@master id: issuemarkdown with: - text: "-> [${{steps.orphans.outputs.title}}](${{steps.orphans.outputs.url}})" + markdown: "-> [${{steps.orphans.outputs.title}}](${{steps.orphans.outputs.url}})" - if: steps.orphans.outputs.found == 'true' name: Send info about orphan to slack uses: rtCamp/action-slack-notify@v2 diff --git a/.github/workflows/automerge.yml b/.github/workflows/automerge.yml index 052a19c32..116b80652 100644 --- a/.github/workflows/automerge.yml +++ b/.github/workflows/automerge.yml @@ -1,7 +1,7 @@ # This action is centrally managed in https://github.com/asyncapi/.github/ # Don't make changes to this file in this repo as they will be overwritten with changes made to the same file in above mentioned repo. -name: Automerge release bump PR +name: Automerge PRs from bots on: pull_request_target: @@ -19,12 +19,12 @@ jobs: runs-on: ubuntu-latest steps: - name: Autoapproving - uses: hmarr/auto-approve-action@v2 + uses: hmarr/auto-approve-action@44888193675f29a83e04faf4002fa8c0b537b1e4 # v3.2.1 is used https://github.com/hmarr/auto-approve-action/releases/tag/v3.2.1 with: github-token: "${{ secrets.GH_TOKEN_BOT_EVE }}" - name: Label autoapproved - uses: actions/github-script@v5 + uses: actions/github-script@v6 with: github-token: ${{ secrets.GH_TOKEN }} script: | @@ -41,11 +41,11 @@ jobs: runs-on: ubuntu-latest steps: - name: Automerging - uses: pascalgn/automerge-action@v0.13.0 + uses: pascalgn/automerge-action@22948e0bc22f0aa673800da838595a3e7347e584 #v0.15.6 https://github.com/pascalgn/automerge-action/releases/tag/v0.15.6 env: GITHUB_TOKEN: "${{ secrets.GH_TOKEN }}" GITHUB_LOGIN: asyncapi-bot - MERGE_LABELS: "" + MERGE_LABELS: "!do-not-merge" MERGE_METHOD: "squash" MERGE_COMMIT_MESSAGE: "{pullRequest.title} (#{pullRequest.number})" MERGE_RETRIES: "20" diff --git a/.github/workflows/autoupdate.yml b/.github/workflows/autoupdate.yml index ad8e0198f..eeb77a47b 100644 --- a/.github/workflows/autoupdate.yml +++ b/.github/workflows/autoupdate.yml @@ -1,34 +1,34 @@ -# This action is centrally managed in https://github.com/asyncapi/.github/ -# Don't make changes to this file in this repo as they will be overwritten with changes made to the same file in above mentioned repo - -# This workflow is designed to work with: -# - autoapprove and automerge workflows for dependabot and asyncapibot. -# - special release branches that we from time to time create in upstream repos. If we open up PRs for them from the very beginning of the release, the release branch will constantly update with new things from the destination branch they are opened against - -# It uses GitHub Action that auto-updates pull requests branches, whenever changes are pushed to their destination branch. -# Autoupdating to latest destination branch works only in the context of upstream repo and not forks - -name: autoupdate - -on: - push: - branches-ignore: - - 'version-bump/**' - - 'dependabot/**' - - 'bot/**' - - 'all-contributors/**' - -jobs: - autoupdate-for-bot: - if: startsWith(github.repository, 'asyncapi/') - name: Autoupdate autoapproved PR created in the upstream - runs-on: ubuntu-latest - steps: - - name: Autoupdating - uses: docker://chinthakagodawita/autoupdate-action:v1 - env: - GITHUB_TOKEN: '${{ secrets.GH_TOKEN_BOT_EVE }}' - PR_FILTER: "labelled" - PR_LABELS: "autoupdate" - PR_READY_STATE: "ready_for_review" - MERGE_CONFLICT_ACTION: "ignore" +# This action is centrally managed in https://github.com/asyncapi/.github/ +# Don't make changes to this file in this repo as they will be overwritten with changes made to the same file in above mentioned repo + +# This workflow is designed to work with: +# - autoapprove and automerge workflows for dependabot and asyncapibot. +# - special release branches that we from time to time create in upstream repos. If we open up PRs for them from the very beginning of the release, the release branch will constantly update with new things from the destination branch they are opened against + +# It uses GitHub Action that auto-updates pull requests branches, whenever changes are pushed to their destination branch. +# Autoupdating to latest destination branch works only in the context of upstream repo and not forks + +name: autoupdate + +on: + push: + branches-ignore: + - 'version-bump/**' + - 'dependabot/**' + - 'bot/**' + - 'all-contributors/**' + +jobs: + autoupdate-for-bot: + if: startsWith(github.repository, 'asyncapi/') + name: Autoupdate autoapproved PR created in the upstream + runs-on: ubuntu-latest + steps: + - name: Autoupdating + uses: docker://chinthakagodawita/autoupdate-action:v1 + env: + GITHUB_TOKEN: '${{ secrets.GH_TOKEN_BOT_EVE }}' + PR_FILTER: "labelled" + PR_LABELS: "autoupdate" + PR_READY_STATE: "ready_for_review" + MERGE_CONFLICT_ACTION: "ignore" diff --git a/.github/workflows/bounty-program-commands.yml b/.github/workflows/bounty-program-commands.yml new file mode 100644 index 000000000..645e0c90d --- /dev/null +++ b/.github/workflows/bounty-program-commands.yml @@ -0,0 +1,126 @@ +# This workflow is centrally managed at https://github.com/asyncapi/.github/ +# Don't make changes to this file in this repository, as they will be overwritten with +# changes made to the same file in the abovementioned repository. + +# The purpose of this workflow is to allow Bounty Team members +# (https://github.com/orgs/asyncapi/teams/bounty_team) to issue commands to the +# organization's global AsyncAPI bot related to the Bounty Program, while at the +# same time preventing unauthorized users from misusing them. + +name: Bounty Program commands + +on: + issue_comment: + types: + - created + +env: + BOUNTY_PROGRAM_LABELS_JSON: | + [ + {"name": "bounty", "color": "0e8a16", "description": "Participation in the Bounty Program"} + ] + +jobs: + guard-against-unauthorized-use: + if: > + github.actor != ('aeworxet' || 'thulieblack') && + ( + startsWith(github.event.comment.body, '/bounty' ) + ) + + runs-on: ubuntu-latest + + steps: + - name: ❌ @${{github.actor}} made an unauthorized attempt to use a Bounty Program's command + uses: actions/github-script@v6 + + with: + github-token: ${{ secrets.GH_TOKEN }} + script: | + const commentText = `❌ @${{github.actor}} is not authorized to use the Bounty Program's commands. + These commands can only be used by members of the [Bounty Team](https://github.com/orgs/asyncapi/teams/bounty_team).`; + + console.log(`❌ @${{github.actor}} made an unauthorized attempt to use a Bounty Program's command.`); + github.rest.issues.createComment({ + issue_number: context.issue.number, + owner: context.repo.owner, + repo: context.repo.repo, + body: commentText + }) + + add-label-bounty: + if: > + github.actor == ('aeworxet' || 'thulieblack') && + ( + startsWith(github.event.comment.body, '/bounty' ) + ) + + runs-on: ubuntu-latest + + steps: + - name: Add label `bounty` + uses: actions/github-script@v6 + + with: + github-token: ${{ secrets.GH_TOKEN }} + script: | + const BOUNTY_PROGRAM_LABELS = JSON.parse(process.env.BOUNTY_PROGRAM_LABELS_JSON); + let LIST_OF_LABELS_FOR_REPO = await github.rest.issues.listLabelsForRepo({ + owner: context.repo.owner, + repo: context.repo.repo, + }); + + LIST_OF_LABELS_FOR_REPO = LIST_OF_LABELS_FOR_REPO.data.map(key => key.name); + + if (!LIST_OF_LABELS_FOR_REPO.includes(BOUNTY_PROGRAM_LABELS[0].name)) { + await github.rest.issues.createLabel({ + owner: context.repo.owner, + repo: context.repo.repo, + name: BOUNTY_PROGRAM_LABELS[0].name, + color: BOUNTY_PROGRAM_LABELS[0].color, + description: BOUNTY_PROGRAM_LABELS[0].description + }); + } + + console.log('Adding label `bounty`...'); + github.rest.issues.addLabels({ + issue_number: context.issue.number, + owner: context.repo.owner, + repo: context.repo.repo, + labels: [BOUNTY_PROGRAM_LABELS[0].name] + }) + + remove-label-bounty: + if: > + github.actor == ('aeworxet' || 'thulieblack') && + ( + startsWith(github.event.comment.body, '/unbounty' ) + ) + + runs-on: ubuntu-latest + + steps: + - name: Remove label `bounty` + uses: actions/github-script@v6 + + with: + github-token: ${{ secrets.GH_TOKEN }} + script: | + const BOUNTY_PROGRAM_LABELS = JSON.parse(process.env.BOUNTY_PROGRAM_LABELS_JSON); + let LIST_OF_LABELS_FOR_ISSUE = await github.rest.issues.listLabelsOnIssue({ + owner: context.repo.owner, + repo: context.repo.repo, + issue_number: context.issue.number, + }); + + LIST_OF_LABELS_FOR_ISSUE = LIST_OF_LABELS_FOR_ISSUE.data.map(key => key.name); + + if (LIST_OF_LABELS_FOR_ISSUE.includes(BOUNTY_PROGRAM_LABELS[0].name)) { + console.log('Removing label `bounty`...'); + github.rest.issues.removeLabel({ + issue_number: context.issue.number, + owner: context.repo.owner, + repo: context.repo.repo, + name: [BOUNTY_PROGRAM_LABELS[0].name] + }) + } diff --git a/.github/workflows/bump.yml b/.github/workflows/bump.yml index 68daa7c0c..77b3a9793 100644 --- a/.github/workflows/bump.yml +++ b/.github/workflows/bump.yml @@ -1,10 +1,4 @@ -# This action is centrally managed in https://github.com/asyncapi/.github/ -# Don't make changes to this file in this repo as they will be overwritten with changes made to the same file in above mentioned repo - -# Purpose of this action is to update npm package in libraries that use it. It is like dependabot for asyncapi npm modules only. -# It runs in a repo after merge of release commit and searches for other packages that use released package. Every found package gets updated with lates version - -name: Bump package version in dependent repos - if Node project +name: Bump package version in dependent repos on: # It cannot run on release event as when release is created then version is not yet bumped in package.json @@ -20,15 +14,13 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout repo - uses: actions/checkout@v2 - - name: Check if Node.js project and has package.json - id: packagejson - run: test -e ./package.json && echo "::set-output name=exists::true" || echo "::set-output name=exists::false" - - if: steps.packagejson.outputs.exists == 'true' - name: Bumping latest version of this package in other repositories - uses: derberg/npm-dependency-manager-for-your-github-org@v4 + uses: actions/checkout@v3 + - name: Bumping latest version of this package in other repositories + uses: derberg/npm-dependency-manager-for-your-github-org@1eafd3bf3974f21d395c1abac855cb04b295d570 # using v6.-.- https://github.com/derberg/npm-dependency-manager-for-your-github-org/releases/tag/v6 with: github_token: ${{ secrets.GH_TOKEN }} committer_username: asyncapi-bot committer_email: info@asyncapi.io - repos_to_ignore: html-template # this is temporary until react component releases 1.0, then it can be removed + repos_to_ignore: spec,bindings,saunter,server-api + packagejson_path: ./apps/generator + custom_id: "dependency update from asyncapi bot" diff --git a/.github/workflows/help-command.yml b/.github/workflows/help-command.yml index 03f891eba..3f4dcbc4c 100644 --- a/.github/workflows/help-command.yml +++ b/.github/workflows/help-command.yml @@ -13,32 +13,50 @@ jobs: if: ${{ github.event.issue.pull_request && contains(github.event.comment.body, '/help') && github.actor != 'asyncapi-bot' }} runs-on: ubuntu-latest steps: - - uses: actions-ecosystem/action-create-comment@v1 + - name: Add comment to PR + uses: actions/github-script@v6 with: - github_token: ${{ secrets.GH_TOKEN }} - body: | - Hello, @${{ github.actor }}! 👋🏼 + github-token: ${{ secrets.GH_TOKEN }} + script: | + //Yes to add comment to PR the same endpoint is use that we use to create a comment in issue + //For more details http://developer.github.com/v3/issues/comments/ + //Also proved by this action https://github.com/actions-ecosystem/action-create-comment/blob/main/src/main.ts + github.rest.issues.createComment({ + issue_number: context.issue.number, + owner: context.repo.owner, + repo: context.repo.repo, + body: `Hello, @${{ github.actor }}! 👋🏼 + + I'm 🧞🧞🧞 Genie 🧞🧞🧞 from the magic lamp. Looks like somebody needs a hand! + + At the moment the following comments are supported in pull requests: + + - \`/please-take-a-look\` or \`/ptal\` - This comment will add a comment to the PR asking for attention from the reviewrs who have not reviewed the PR yet. + - \`/ready-to-merge\` or \`/rtm\` - This comment will trigger automerge of PR in case all required checks are green, approvals in place and do-not-merge label is not added + - \`/do-not-merge\` or \`/dnm\` - This comment will block automerging even if all conditions are met and ready-to-merge label is added + - \`/autoupdate\` or \`/au\` - This comment will add \`autoupdate\` label to the PR and keeps your PR up-to-date to the target branch's future changes. Unless there is a merge conflict or it is a draft PR. (Currently only works for upstream branches.) + - \`/update\` or \`/u\` - This comment will update the PR with the latest changes from the target branch. Unless there is a merge conflict or it is a draft PR. NOTE: this only updates the PR once, so if you need to update again, you need to call the command again.` + }) - I'm Genie from the magic lamp. Looks like somebody needs a hand! 🆘 - - At the moment the following comments are supported in pull requests: - - - `/ready-to-merge` or `/rtm` - This comment will trigger automerge of PR in case all required checks are green, approvals in place and do-not-merge label is not added - - `/do-not-merge` or `/dnm` - This comment will block automerging even if all conditions are met and ready-to-merge label is added - - `/autoupdate` or `/au` - This comment will add `autoupdate` label to the PR and keeps your PR up-to-date to the target branch's future changes. Unless there is a merge conflict or it is a draft PR. create_help_comment_issue: if: ${{ !github.event.issue.pull_request && contains(github.event.comment.body, '/help') && github.actor != 'asyncapi-bot' }} runs-on: ubuntu-latest steps: - - uses: actions-ecosystem/action-create-comment@v1 + - name: Add comment to Issue + uses: actions/github-script@v6 with: - github_token: ${{ secrets.GH_TOKEN }} - body: | - Hello, @${{ github.actor }}! 👋🏼 - - I'm Genie from the magic lamp. Looks like somebody needs a hand! 🆘 - - At the moment the following comments are supported in issues: - - - `/good-first-issue {js | ts | java | go | docs | design | ci-cd} ` or `/gfi {js | ts | java | go | docs | design | ci-cd} ` - label an issue as a `good first issue`. - example: `/gfi js` or `/good-first-issue ci-cd` + github-token: ${{ secrets.GH_TOKEN }} + script: | + github.rest.issues.createComment({ + issue_number: context.issue.number, + owner: context.repo.owner, + repo: context.repo.repo, + body: `Hello, @${{ github.actor }}! 👋🏼 + + I'm 🧞🧞🧞 Genie 🧞🧞🧞 from the magic lamp. Looks like somebody needs a hand! + + At the moment the following comments are supported in issues: + + - \`/good-first-issue {js | ts | java | go | docs | design | ci-cd}\` or \`/gfi {js | ts | java | go | docs | design | ci-cd}\` - label an issue as a \`good first issue\`. + example: \`/gfi js\` or \`/good-first-issue ci-cd\`` + }) \ No newline at end of file diff --git a/.github/workflows/if-nodejs-pr-testing.yml b/.github/workflows/if-nodejs-pr-testing.yml index 1dcccd320..462e61316 100644 --- a/.github/workflows/if-nodejs-pr-testing.yml +++ b/.github/workflows/if-nodejs-pr-testing.yml @@ -14,48 +14,68 @@ jobs: runs-on: ${{ matrix.os }} strategy: matrix: - os: [ubuntu-latest, macos-latest, windows-latest] + # Using macos-13 instead of latest (macos-14) due to an issue with Puppeteer and such runner. + # See: https://github.com/puppeteer/puppeteer/issues/12327 and https://github.com/asyncapi/parser-js/issues/1001 + os: [ubuntu-latest, macos-13, windows-latest] steps: - - if: "github.event.pull_request.draft == false &&!((github.actor == 'asyncapi-bot' && startsWith(github.event.pull_request.title, 'ci: update global workflows')) || (github.actor == 'asyncapi-bot' && startsWith(github.event.pull_request.title, 'chore(release):')) || (github.actor == 'allcontributors' && startsWith(github.event.pull_request.title, 'docs: add')))" + - if: > + !github.event.pull_request.draft && !( + (github.actor == 'asyncapi-bot' && ( + startsWith(github.event.pull_request.title, 'ci: update of files from global .github repo') || + startsWith(github.event.pull_request.title, 'chore(release):') + )) || + (github.actor == 'asyncapi-bot-eve' && ( + startsWith(github.event.pull_request.title, 'ci: update of files from global .github repo') || + startsWith(github.event.pull_request.title, 'chore(release):') + )) || + (github.actor == 'allcontributors[bot]' && + startsWith(github.event.pull_request.title, 'docs: add') + ) + ) id: should_run name: Should Run - run: echo "::set-output name=shouldrun::true" + run: echo "shouldrun=true" >> $GITHUB_OUTPUT + shell: bash - if: steps.should_run.outputs.shouldrun == 'true' name: Set git to use LF #to once and for all finish neverending fight between Unix and Windows run: | git config --global core.autocrlf false git config --global core.eol lf + shell: bash - if: steps.should_run.outputs.shouldrun == 'true' name: Checkout repository - uses: actions/checkout@v2 + uses: actions/checkout@v4 - if: steps.should_run.outputs.shouldrun == 'true' name: Check if Node.js project and has package.json id: packagejson - run: test -e ./package.json && echo "::set-output name=exists::true" || echo "::set-output name=exists::false" + run: test -e ./package.json && echo "exists=true" >> $GITHUB_OUTPUT || echo "exists=false" >> $GITHUB_OUTPUT shell: bash + - if: steps.packagejson.outputs.exists == 'true' + name: Check package-lock version + uses: asyncapi/.github/.github/actions/get-node-version-from-package-lock@master + id: lockversion - if: steps.packagejson.outputs.exists == 'true' name: Setup Node.js - uses: actions/setup-node@v2 + uses: actions/setup-node@v4 with: - node-version: 14 - cache: 'npm' - cache-dependency-path: '**/package-lock.json' + node-version: "${{ steps.lockversion.outputs.version }}" + - if: steps.lockversion.outputs.version == '18' && matrix.os == 'windows-latest' + #npm cli 10 is buggy because of some cache issue + name: Install npm cli 8 + shell: bash + run: npm install -g npm@8.19.4 - if: steps.packagejson.outputs.exists == 'true' name: Install dependencies - id: first-installation - run: npm install --loglevel verbose - continue-on-error: true - - if: steps.first-installation.outputs.status == 'failure' && steps.packagejson.outputs.exists == 'true' - name: Clear NPM cache and install deps again - run: | - npm cache clean --force - npm install --loglevel verbose + shell: bash + run: npm ci - if: steps.packagejson.outputs.exists == 'true' name: Test - run: npm test - - if: steps.packagejson.outputs.exists == 'true' + run: npm test --if-present + - if: steps.packagejson.outputs.exists == 'true' && matrix.os == 'ubuntu-latest' + #linting should run just one and not on all possible operating systems name: Run linter - run: npm run lint + run: npm run lint --if-present - if: steps.packagejson.outputs.exists == 'true' name: Run release assets generation to make sure PR does not break it - run: npm run generate:assets + shell: bash + run: npm run generate:assets --if-present diff --git a/.github/workflows/if-nodejs-release.yml b/.github/workflows/if-nodejs-release.yml deleted file mode 100644 index 73bffb04a..000000000 --- a/.github/workflows/if-nodejs-release.yml +++ /dev/null @@ -1,113 +0,0 @@ -# This action is centrally managed in https://github.com/asyncapi/.github/ -# Don't make changes to this file in this repo as they will be overwritten with changes made to the same file in above mentioned repo - -# It does magic only if there is package.json file in the root of the project -name: Release - if Node project - -on: - push: - branches: - - master - # below lines are not enough to have release supported for these branches - # make sure configuration of `semantic-release` package mentions these branches - - next-spec - - next-major - - next-major-spec - - beta - - alpha - -jobs: - - test-nodejs: - # We just check the message of first commit as there is always just one commit because we squash into one before merging - # "commits" contains array of objects where one of the properties is commit "message" - # Release workflow will be skipped if release conventional commits are not used - if: | - startsWith( github.repository, 'asyncapi/' ) && - (startsWith( github.event.commits[0].message , 'fix:' ) || - startsWith( github.event.commits[0].message, 'fix!:' ) || - startsWith( github.event.commits[0].message, 'feat:' ) || - startsWith( github.event.commits[0].message, 'feat!:' )) - name: Test NodeJS release on ${{ matrix.os }} - runs-on: ${{ matrix.os }} - strategy: - matrix: - os: [ubuntu-latest, macos-latest, windows-latest] - steps: - - name: Set git to use LF #to once and for all finish neverending fight between Unix and Windows - run: | - git config --global core.autocrlf false - git config --global core.eol lf - - name: Checkout repository - uses: actions/checkout@v2 - - name: Check if Node.js project and has package.json - id: packagejson - run: test -e ./package.json && echo "::set-output name=exists::true" || echo "::set-output name=exists::false" - shell: bash - - if: steps.packagejson.outputs.exists == 'true' - name: Setup Node.js - uses: actions/setup-node@v2 - with: - node-version: 14 - cache: 'npm' - cache-dependency-path: '**/package-lock.json' - - if: steps.packagejson.outputs.exists == 'true' - name: Install dependencies - run: npm install - - if: steps.packagejson.outputs.exists == 'true' - name: Run test - run: npm test - - if: failure() # Only, on failure, send a message on the 94_bot-failing-ci slack channel - name: Report workflow run status to Slack - uses: 8398a7/action-slack@v3 - with: - status: ${{ job.status }} - fields: repo,action,workflow - text: 'Release workflow failed in testing job' - env: - SLACK_WEBHOOK_URL: ${{ secrets.SLACK_CI_FAIL_NOTIFY }} - - release: - needs: [test-nodejs] - name: Publish to any of NPM, Github, and Docker Hub - runs-on: ubuntu-latest - steps: - - name: Set git to use LF #to once and for all finish neverending fight between Unix and Windows - run: | - git config --global core.autocrlf false - git config --global core.eol lf - - name: Checkout repository - uses: actions/checkout@v2 - - name: Check if Node.js project and has package.json - id: packagejson - run: test -e ./package.json && echo "::set-output name=exists::true" || echo "::set-output name=exists::false" - - if: steps.packagejson.outputs.exists == 'true' - name: Setup Node.js - uses: actions/setup-node@v1 - with: - node-version: 14 - - if: steps.packagejson.outputs.exists == 'true' - name: Install dependencies - run: npm install - - if: steps.packagejson.outputs.exists == 'true' - name: Publish to any of NPM, Github, and Docker Hub - id: release - env: - GITHUB_TOKEN: ${{ secrets.GH_TOKEN }} - NPM_TOKEN: ${{ secrets.NPM_TOKEN }} - DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} - DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }} - GIT_AUTHOR_NAME: asyncapi-bot - GIT_AUTHOR_EMAIL: info@asyncapi.io - GIT_COMMITTER_NAME: asyncapi-bot - GIT_COMMITTER_EMAIL: info@asyncapi.io - run: npm run release - - if: failure() # Only, on failure, send a message on the 94_bot-failing-ci slack channel - name: Report workflow run status to Slack - uses: 8398a7/action-slack@v3 - with: - status: ${{ job.status }} - fields: repo,action,workflow - text: 'Release workflow failed in release job' - env: - SLACK_WEBHOOK_URL: ${{ secrets.SLACK_CI_FAIL_NOTIFY }} \ No newline at end of file diff --git a/.github/workflows/if-nodejs-version-bump.yml b/.github/workflows/if-nodejs-version-bump.yml deleted file mode 100644 index 5e7aa14ac..000000000 --- a/.github/workflows/if-nodejs-version-bump.yml +++ /dev/null @@ -1,65 +0,0 @@ -# This action is centrally managed in https://github.com/asyncapi/.github/ -# Don't make changes to this file in this repo as they will be overwritten with changes made to the same file in above mentioned repo - -# It does magic only if there is package.json file in the root of the project -name: Version bump - if Node.js project - -on: - release: - types: - - published - -jobs: - version_bump: - name: Generate assets and bump NodeJS - runs-on: ubuntu-latest - steps: - - name: Checkout repository - uses: actions/checkout@v2 - with: - # target branch of release. More info https://docs.github.com/en/rest/reference/repos#releases - # in case release is created from release branch then we need to checkout from given branch - # if @semantic-release/github is used to publish, the minimum version is 7.2.0 for proper working - ref: ${{ github.event.release.target_commitish }} - - name: Check if Node.js project and has package.json - id: packagejson - run: test -e ./package.json && echo "::set-output name=exists::true" || echo "::set-output name=exists::false" - - if: steps.packagejson.outputs.exists == 'true' - name: Setup Node.js - uses: actions/setup-node@v2 - with: - node-version: 14 - cache: 'npm' - cache-dependency-path: '**/package-lock.json' - - if: steps.packagejson.outputs.exists == 'true' - name: Install dependencies - run: npm install - - if: steps.packagejson.outputs.exists == 'true' - name: Assets generation - run: npm run generate:assets - - if: steps.packagejson.outputs.exists == 'true' - name: Bump version in package.json - # There is no need to substract "v" from the tag as version script handles it - # When adding "bump:version" script in package.json, make sure no tags are added by default (--no-git-tag-version) as they are already added by release workflow - # When adding "bump:version" script in package.json, make sure --allow-same-version is set in case someone forgot and updated package.json manually and we want to avoide this action to fail and raise confusion - run: VERSION=${{github.event.release.tag_name}} npm run bump:version - - if: steps.packagejson.outputs.exists == 'true' - name: Create Pull Request with updated asset files including package.json - uses: peter-evans/create-pull-request@v3 - with: - token: ${{ secrets.GH_TOKEN }} - commit-message: 'chore(release): ${{github.event.release.tag_name}}' - committer: asyncapi-bot - author: asyncapi-bot - title: 'chore(release): ${{github.event.release.tag_name}}' - body: 'Version bump in package.json for release [${{github.event.release.tag_name}}](${{github.event.release.html_url}})' - branch: version-bump/${{github.event.release.tag_name}} - - if: failure() # Only, on failure, send a message on the 94_bot-failing-ci slack channel - name: Report workflow run status to Slack - uses: 8398a7/action-slack@v3 - with: - status: ${{ job.status }} - fields: repo,action,workflow - text: 'Unable to bump the version in package.json after the release' - env: - SLACK_WEBHOOK_URL: ${{ secrets.SLACK_CI_FAIL_NOTIFY }} \ No newline at end of file diff --git a/.github/workflows/issues-prs-notifications.yml b/.github/workflows/issues-prs-notifications.yml index ca665404b..b8b20c6ba 100644 --- a/.github/workflows/issues-prs-notifications.yml +++ b/.github/workflows/issues-prs-notifications.yml @@ -21,10 +21,10 @@ jobs: runs-on: ubuntu-latest steps: - name: Convert markdown to slack markdown for issue - uses: LoveToKnow/slackify-markdown-action@v1.0.0 + uses: asyncapi/.github/.github/actions/slackify-markdown@master id: issuemarkdown with: - text: "[${{github.event.issue.title}}](${{github.event.issue.html_url}}) \n ${{github.event.issue.body}}" + markdown: "[${{github.event.issue.title}}](${{github.event.issue.html_url}}) \n ${{github.event.issue.body}}" - name: Send info about issue uses: rtCamp/action-slack-notify@v2 env: @@ -39,10 +39,10 @@ jobs: runs-on: ubuntu-latest steps: - name: Convert markdown to slack markdown for pull request - uses: LoveToKnow/slackify-markdown-action@v1.0.0 + uses: asyncapi/.github/.github/actions/slackify-markdown@master id: prmarkdown with: - text: "[${{github.event.pull_request.title}}](${{github.event.pull_request.html_url}}) \n ${{github.event.pull_request.body}}" + markdown: "[${{github.event.pull_request.title}}](${{github.event.pull_request.html_url}}) \n ${{github.event.pull_request.body}}" - name: Send info about pull request uses: rtCamp/action-slack-notify@v2 env: @@ -57,10 +57,10 @@ jobs: runs-on: ubuntu-latest steps: - name: Convert markdown to slack markdown for pull request - uses: LoveToKnow/slackify-markdown-action@v1.0.0 + uses: asyncapi/.github/.github/actions/slackify-markdown@master id: discussionmarkdown with: - text: "[${{github.event.discussion.title}}](${{github.event.discussion.html_url}}) \n ${{github.event.discussion.body}}" + markdown: "[${{github.event.discussion.title}}](${{github.event.discussion.html_url}}) \n ${{github.event.discussion.body}}" - name: Send info about pull request uses: rtCamp/action-slack-notify@v2 env: diff --git a/.github/workflows/link-check-cron.yml b/.github/workflows/link-check-cron.yml deleted file mode 100644 index 873d4297f..000000000 --- a/.github/workflows/link-check-cron.yml +++ /dev/null @@ -1,37 +0,0 @@ -# This workflow is centrally managed in https://github.com/asyncapi/.github/ -# Don't make changes to this file in this repo as they will be overwritten with changes made to the same file in above mentioned repo - -name: Check Markdown links (Weekly) - -on: - workflow_dispatch: - schedule: - # At 00:00 UTC on every Monday - - cron: '0 0 * * 0' - -jobs: - External-link-validation-weekly: - if: startsWith(github.repository, 'asyncapi/') - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v3 - - # Checks the status of hyperlinks in .md files - - name: Check links - uses: gaurav-nelson/github-action-markdown-link-check@0a51127e9955b855a9bbfa1ff5577f1d1338c9a5 #1.0.14 but pointing to commit for security reasons - with: - use-quiet-mode: 'yes' - use-verbose-mode: 'yes' - - # A configuration file can be included, indicating the properties of the link check action - # More information can be found here: https://github.com/tcort/markdown-link-check#config-file-format - # Create mlc_config.json file in the root of the directory - - - name: Report workflow run status to Slack - uses: 8398a7/action-slack@v3 - with: - status: ${{ job.status }} - fields: repo,action,workflow - env: - SLACK_WEBHOOK_URL: ${{ secrets.SLACK_CI_FAIL_NOTIFY }} - if: failure() # Only, on failure, send a message on the 94_bot-failing-ci slack channel diff --git a/.github/workflows/link-check-pr.yml b/.github/workflows/link-check-pr.yml deleted file mode 100644 index 51f6cf780..000000000 --- a/.github/workflows/link-check-pr.yml +++ /dev/null @@ -1,28 +0,0 @@ -# This workflow is centrally managed in https://github.com/asyncapi/.github/ -# Don't make changes to this file in this repo as they will be overwritten with changes made to the same file in above mentioned repo - -name: Check Markdown links - -on: - pull_request_target: - types: [synchronize, ready_for_review, opened, reopened] - paths: - - '**.md' - -jobs: - External-link-validation-on-PR: - runs-on: ubuntu-latest - steps: - - name: Checkout repo - uses: actions/checkout@v3 - - name: Check links - uses: gaurav-nelson/github-action-markdown-link-check@0a51127e9955b855a9bbfa1ff5577f1d1338c9a5 #1.0.14 but pointing to commit for security reasons - with: - use-quiet-mode: 'yes' - use-verbose-mode: 'yes' - check-modified-files-only: 'yes' # Only modified files are checked on PRs - - - # A configuration file can be included, indicating the properties of the link check action - # More information can be found here: https://github.com/tcort/markdown-link-check#config-file-format - # Create mlc_config.json file in the root of the directory diff --git a/.github/workflows/lint-pr-title.yml b/.github/workflows/lint-pr-title.yml index c4a942a91..77aa1c6e4 100644 --- a/.github/workflows/lint-pr-title.yml +++ b/.github/workflows/lint-pr-title.yml @@ -13,8 +13,8 @@ jobs: runs-on: ubuntu-latest steps: # Since this workflow is REQUIRED for a PR to be mergable, we have to have this 'if' statement in step level instead of job level. - - if: ${{ !contains(fromJson('["asyncapi-bot", "dependabot[bot]", "dependabot-preview[bot]", "allcontributors"]'), github.actor) }} - uses: amannn/action-semantic-pull-request@505e44b4f33b4c801f063838b3f053990ee46ea7 #version 4.6.0 + - if: ${{ !contains(fromJson('["asyncapi-bot", "dependabot[bot]", "dependabot-preview[bot]", "allcontributors[bot]"]'), github.actor) }} + uses: amannn/action-semantic-pull-request@c3cd5d1ea3580753008872425915e343e351ab54 #version 5.2.0 https://github.com/amannn/action-semantic-pull-request/releases/tag/v5.2.0 id: lint_pr_title env: GITHUB_TOKEN: ${{ secrets.GH_TOKEN}} @@ -24,9 +24,9 @@ jobs: The subject "{subject}" found in the pull request title "{title}" should start with a lowercase character. # Comments the error message from the above lint_pr_title action - - if: ${{ always() && steps.lint_pr_title.outputs.error_message != null && !contains(fromJson('["asyncapi-bot", "dependabot[bot]", "dependabot-preview[bot]", "allcontributors"]'), github.actor)}} + - if: ${{ always() && steps.lint_pr_title.outputs.error_message != null && !contains(fromJson('["asyncapi-bot", "dependabot[bot]", "dependabot-preview[bot]", "allcontributors[bot]"]'), github.actor)}} name: Comment on PR - uses: marocchino/sticky-pull-request-comment@39c5b5dc7717447d0cba270cd115037d32d28443 #version 2.2 + uses: marocchino/sticky-pull-request-comment@3d60a5b2dae89d44e0c6ddc69dd7536aec2071cd #use 2.5.0 https://github.com/marocchino/sticky-pull-request-comment/releases/tag/v2.5.0 with: header: pr-title-lint-error GITHUB_TOKEN: ${{ secrets.GH_TOKEN}} @@ -40,7 +40,7 @@ jobs: # deletes the error comment if the title is correct - if: ${{ steps.lint_pr_title.outputs.error_message == null }} name: delete the comment - uses: marocchino/sticky-pull-request-comment@39c5b5dc7717447d0cba270cd115037d32d28443 #version 2.2 + uses: marocchino/sticky-pull-request-comment@3d60a5b2dae89d44e0c6ddc69dd7536aec2071cd #use 2.5.0 https://github.com/marocchino/sticky-pull-request-comment/releases/tag/v2.5.0 with: header: pr-title-lint-error delete: true diff --git a/.github/workflows/local-generate-files.yml b/.github/workflows/local-generate-files.yml new file mode 100644 index 000000000..b18f2aa34 --- /dev/null +++ b/.github/workflows/local-generate-files.yml @@ -0,0 +1,50 @@ +# this workflow runs after releases to generate some files like for example api.md +name: Autogenerate API files + +on: + release: + types: + - published + +jobs: + version_bump: + name: Generate assets and bump NodeJS + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@v3 + with: + # target branch of release. More info https://docs.github.com/en/rest/reference/repos#releases + # in case release is created from release branch then we need to checkout from given branch + # if @semantic-release/github is used to publish, the minimum version is 7.2.0 for proper working + ref: ${{ github.event.release.target_commitish }} + - name: Check package-lock version + uses: asyncapi/.github/.github/actions/get-node-version-from-package-lock@master + id: lockversion + - name: Setup Node.js + uses: actions/setup-node@v3 + with: + node-version: "${{ steps.lockversion.outputs.version }}" + - name: Install dependencies + run: npm ci + - name: Assets generation + run: npm run generate:assets --if-present + - name: Create Pull Request with updated asset files including package.json + uses: peter-evans/create-pull-request@38e0b6e68b4c852a5500a94740f0e535e0d7ba54 # use 4.2.4 https://github.com/peter-evans/create-pull-request/releases/tag/v4.2.4 + with: + token: ${{ secrets.GH_TOKEN }} + commit-message: 'chore(release): ${{github.event.release.tag_name}}' + committer: asyncapi-bot + author: asyncapi-bot + title: 'chore: update assets' + body: 'Updating assets like for example API.md, but not only, that shoudl be generated and not manually updated' + branch: assets-update/${{github.event.release.tag_name}} + - if: failure() # Only, on failure, send a message on the 94_bot-failing-ci slack channel + name: Report workflow run status to Slack + uses: 8398a7/action-slack@v3 + with: + status: ${{ job.status }} + fields: repo,action,workflow + text: 'Unable to bump the version in package.json after the release' + env: + SLACK_WEBHOOK_URL: ${{ secrets.SLACK_CI_FAIL_NOTIFY }} diff --git a/.github/workflows/notify-tsc-members-mention.yml b/.github/workflows/notify-tsc-members-mention.yml index e681c18a0..d72fd85bd 100644 --- a/.github/workflows/notify-tsc-members-mention.yml +++ b/.github/workflows/notify-tsc-members-mention.yml @@ -31,14 +31,22 @@ jobs: name: TSC notification on every new issue runs-on: ubuntu-latest steps: + - name: Checkout repository + uses: actions/checkout@v3 + - name: Setup Node.js + uses: actions/setup-node@v3 + with: + node-version: 16 + cache: 'npm' + cache-dependency-path: '**/package-lock.json' ######### # Handling Slack notifications ######### - name: Convert markdown to slack markdown - uses: LoveToKnow/slackify-markdown-action@v1.0.0 + uses: asyncapi/.github/.github/actions/slackify-markdown@master id: issuemarkdown with: - text: "[${{github.event.issue.title}}](${{github.event.issue.html_url}}) \n ${{github.event.issue.body}}" + markdown: "[${{github.event.issue.title}}](${{github.event.issue.html_url}}) \n ${{github.event.issue.body}}" - name: Send info about issue uses: rtCamp/action-slack-notify@v2 env: @@ -49,19 +57,11 @@ jobs: ######### # Handling Mailchimp notifications ######### - - name: Checkout repository - uses: actions/checkout@v2 - - name: Setup Node.js - uses: actions/setup-node@v2 - with: - node-version: 16 - cache: 'npm' - cache-dependency-path: '**/package-lock.json' - name: Install deps run: npm install working-directory: ./.github/workflows/scripts/mailchimp - name: Send email with MailChimp - uses: actions/github-script@v4 + uses: actions/github-script@v6 env: CALENDAR_ID: ${{ secrets.CALENDAR_ID }} CALENDAR_SERVICE_ACCOUNT: ${{ secrets.CALENDAR_SERVICE_ACCOUNT }} @@ -76,14 +76,22 @@ jobs: name: TSC notification on every new pull request runs-on: ubuntu-latest steps: + - name: Checkout repository + uses: actions/checkout@v3 + - name: Setup Node.js + uses: actions/setup-node@v3 + with: + node-version: 16 + cache: 'npm' + cache-dependency-path: '**/package-lock.json' ######### # Handling Slack notifications ######### - name: Convert markdown to slack markdown - uses: LoveToKnow/slackify-markdown-action@v1.0.0 + uses: asyncapi/.github/.github/actions/slackify-markdown@master id: prmarkdown with: - text: "[${{github.event.pull_request.title}}](${{github.event.pull_request.html_url}}) \n ${{github.event.pull_request.body}}" + markdown: "[${{github.event.pull_request.title}}](${{github.event.pull_request.html_url}}) \n ${{github.event.pull_request.body}}" - name: Send info about pull request uses: rtCamp/action-slack-notify@v2 env: @@ -94,19 +102,11 @@ jobs: ######### # Handling Mailchimp notifications ######### - - name: Checkout repository - uses: actions/checkout@v2 - - name: Setup Node.js - uses: actions/setup-node@v2 - with: - node-version: 16 - cache: 'npm' - cache-dependency-path: '**/package-lock.json' - name: Install deps run: npm install working-directory: ./.github/workflows/scripts/mailchimp - name: Send email with MailChimp - uses: actions/github-script@v4 + uses: actions/github-script@v6 env: CALENDAR_ID: ${{ secrets.CALENDAR_ID }} CALENDAR_SERVICE_ACCOUNT: ${{ secrets.CALENDAR_SERVICE_ACCOUNT }} @@ -121,14 +121,22 @@ jobs: name: TSC notification on every new discussion runs-on: ubuntu-latest steps: + - name: Checkout repository + uses: actions/checkout@v3 + - name: Setup Node.js + uses: actions/setup-node@v3 + with: + node-version: 16 + cache: 'npm' + cache-dependency-path: '**/package-lock.json' ######### # Handling Slack notifications ######### - name: Convert markdown to slack markdown - uses: LoveToKnow/slackify-markdown-action@v1.0.0 + uses: asyncapi/.github/.github/actions/slackify-markdown@master id: discussionmarkdown with: - text: "[${{github.event.discussion.title}}](${{github.event.discussion.html_url}}) \n ${{github.event.discussion.body}}" + markdown: "[${{github.event.discussion.title}}](${{github.event.discussion.html_url}}) \n ${{github.event.discussion.body}}" - name: Send info about pull request uses: rtCamp/action-slack-notify@v2 env: @@ -139,19 +147,11 @@ jobs: ######### # Handling Mailchimp notifications ######### - - name: Checkout repository - uses: actions/checkout@v2 - - name: Setup Node.js - uses: actions/setup-node@v2 - with: - node-version: 16 - cache: 'npm' - cache-dependency-path: '**/package-lock.json' - name: Install deps run: npm install working-directory: ./.github/workflows/scripts/mailchimp - name: Send email with MailChimp - uses: actions/github-script@v4 + uses: actions/github-script@v6 env: CALENDAR_ID: ${{ secrets.CALENDAR_ID }} CALENDAR_SERVICE_ACCOUNT: ${{ secrets.CALENDAR_SERVICE_ACCOUNT }} @@ -166,14 +166,22 @@ jobs: name: TSC notification on every new comment in issue runs-on: ubuntu-latest steps: + - name: Checkout repository + uses: actions/checkout@v3 + - name: Setup Node.js + uses: actions/setup-node@v3 + with: + node-version: 16 + cache: 'npm' + cache-dependency-path: '**/package-lock.json' ######### # Handling Slack notifications ######### - name: Convert markdown to slack markdown - uses: LoveToKnow/slackify-markdown-action@v1.0.0 + uses: asyncapi/.github/.github/actions/slackify-markdown@master id: issuemarkdown with: - text: "[${{github.event.issue.title}}](${{github.event.comment.html_url}}) \n ${{github.event.comment.body}}" + markdown: "[${{github.event.issue.title}}](${{github.event.comment.html_url}}) \n ${{github.event.comment.body}}" - name: Send info about issue comment uses: rtCamp/action-slack-notify@v2 env: @@ -184,19 +192,11 @@ jobs: ######### # Handling Mailchimp notifications ######### - - name: Checkout repository - uses: actions/checkout@v2 - - name: Setup Node.js - uses: actions/setup-node@v2 - with: - node-version: 16 - cache: 'npm' - cache-dependency-path: '**/package-lock.json' - name: Install deps run: npm install working-directory: ./.github/workflows/scripts/mailchimp - name: Send email with MailChimp - uses: actions/github-script@v4 + uses: actions/github-script@v6 env: CALENDAR_ID: ${{ secrets.CALENDAR_ID }} CALENDAR_SERVICE_ACCOUNT: ${{ secrets.CALENDAR_SERVICE_ACCOUNT }} @@ -211,14 +211,22 @@ jobs: name: TSC notification on every new comment in pr runs-on: ubuntu-latest steps: + - name: Checkout repository + uses: actions/checkout@v3 + - name: Setup Node.js + uses: actions/setup-node@v3 + with: + node-version: 16 + cache: 'npm' + cache-dependency-path: '**/package-lock.json' ######### # Handling Slack notifications ######### - name: Convert markdown to slack markdown - uses: LoveToKnow/slackify-markdown-action@v1.0.0 + uses: asyncapi/.github/.github/actions/slackify-markdown@master id: prmarkdown with: - text: "[${{github.event.issue.title}}](${{github.event.comment.html_url}}) \n ${{github.event.comment.body}}" + markdown: "[${{github.event.issue.title}}](${{github.event.comment.html_url}}) \n ${{github.event.comment.body}}" - name: Send info about PR comment uses: rtCamp/action-slack-notify@v2 env: @@ -229,19 +237,11 @@ jobs: ######### # Handling Mailchimp notifications ######### - - name: Checkout repository - uses: actions/checkout@v2 - - name: Setup Node.js - uses: actions/setup-node@v2 - with: - node-version: 16 - cache: 'npm' - cache-dependency-path: '**/package-lock.json' - name: Install deps run: npm install working-directory: ./.github/workflows/scripts/mailchimp - name: Send email with MailChimp - uses: actions/github-script@v4 + uses: actions/github-script@v6 env: CALENDAR_ID: ${{ secrets.CALENDAR_ID }} CALENDAR_SERVICE_ACCOUNT: ${{ secrets.CALENDAR_SERVICE_ACCOUNT }} @@ -256,14 +256,22 @@ jobs: name: TSC notification on every new comment in discussion runs-on: ubuntu-latest steps: + - name: Checkout repository + uses: actions/checkout@v3 + - name: Setup Node.js + uses: actions/setup-node@v3 + with: + node-version: 16 + cache: 'npm' + cache-dependency-path: '**/package-lock.json' ######### # Handling Slack notifications ######### - name: Convert markdown to slack markdown - uses: LoveToKnow/slackify-markdown-action@v1.0.0 + uses: asyncapi/.github/.github/actions/slackify-markdown@master id: discussionmarkdown with: - text: "[${{github.event.discussion.title}}](${{github.event.comment.html_url}}) \n ${{github.event.comment.body}}" + markdown: "[${{github.event.discussion.title}}](${{github.event.comment.html_url}}) \n ${{github.event.comment.body}}" - name: Send info about discussion comment uses: rtCamp/action-slack-notify@v2 env: @@ -274,19 +282,11 @@ jobs: ######### # Handling Mailchimp notifications ######### - - name: Checkout repository - uses: actions/checkout@v2 - - name: Setup Node.js - uses: actions/setup-node@v2 - with: - node-version: 16 - cache: 'npm' - cache-dependency-path: '**/package-lock.json' - name: Install deps run: npm install working-directory: ./.github/workflows/scripts/mailchimp - name: Send email with MailChimp - uses: actions/github-script@v4 + uses: actions/github-script@v6 env: CALENDAR_ID: ${{ secrets.CALENDAR_ID }} CALENDAR_SERVICE_ACCOUNT: ${{ secrets.CALENDAR_SERVICE_ACCOUNT }} diff --git a/.github/workflows/please-take-a-look-command.yml b/.github/workflows/please-take-a-look-command.yml new file mode 100644 index 000000000..b26cbc41a --- /dev/null +++ b/.github/workflows/please-take-a-look-command.yml @@ -0,0 +1,54 @@ +# This action is centrally managed in https://github.com/asyncapi/.github/ +# Don't make changes to this file in this repo as they will be overwritten with changes made to the same file in above mentioned repo + +# It uses Github actions to listen for comments on issues and pull requests and +# if the comment contains /please-take-a-look or /ptal it will add a comment pinging +# the code-owners who are reviewers for PR + +name: Please take a Look + +on: + issue_comment: + types: [created] + +jobs: + ping-for-attention: + if: > + github.event.issue.pull_request && + github.event.issue.state != 'closed' && + github.actor != 'asyncapi-bot' && + ( + contains(github.event.comment.body, '/please-take-a-look') || + contains(github.event.comment.body, '/ptal') || + contains(github.event.comment.body, '/PTAL') + ) + runs-on: ubuntu-latest + steps: + - name: Check for Please Take a Look Command + uses: actions/github-script@v6 + with: + github-token: ${{ secrets.GH_TOKEN }} + script: | + const prDetailsUrl = context.payload.issue.pull_request.url; + const { data: pull } = await github.request(prDetailsUrl); + const reviewers = pull.requested_reviewers.map(reviewer => reviewer.login); + + const { data: reviews } = await github.rest.pulls.listReviews({ + owner: context.repo.owner, + repo: context.repo.repo, + pull_number: context.issue.number + }); + + const reviewersWhoHaveReviewed = reviews.map(review => review.user.login); + + const reviewersWhoHaveNotReviewed = reviewers.filter(reviewer => !reviewersWhoHaveReviewed.includes(reviewer)); + + if (reviewersWhoHaveNotReviewed.length > 0) { + const comment = reviewersWhoHaveNotReviewed.filter(reviewer => reviewer !== 'asyncapi-bot-eve' ).map(reviewer => `@${reviewer}`).join(' '); + await github.rest.issues.createComment({ + issue_number: context.issue.number, + owner: context.repo.owner, + repo: context.repo.repo, + body: `${comment} Please take a look at this PR. Thanks! :wave:` + }); + } diff --git a/.github/workflows/pr-testing-with-test-project.yml b/.github/workflows/pr-testing-with-test-project.yml index c611c7431..7c543f907 100644 --- a/.github/workflows/pr-testing-with-test-project.yml +++ b/.github/workflows/pr-testing-with-test-project.yml @@ -12,10 +12,10 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - node: [ '12', '14', '16' ] + node: ["18", "20"] steps: - name: Checkout repository - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Run test - run: NODE_IMAGE_TAG=${{ matrix.node }} docker-compose up --abort-on-container-exit --remove-orphans - working-directory: ./test/test-project + run: NODE_IMAGE_TAG=${{ matrix.node }} docker compose up --abort-on-container-exit --remove-orphans --force-recreate + working-directory: ./apps/generator/test/test-project diff --git a/.github/workflows/release-announcements.yml b/.github/workflows/release-announcements.yml index b2f3ba76d..9587cacee 100644 --- a/.github/workflows/release-announcements.yml +++ b/.github/workflows/release-announcements.yml @@ -14,11 +14,13 @@ jobs: name: Slack - notify on every release runs-on: ubuntu-latest steps: + - name: Checkout repository + uses: actions/checkout@v3 - name: Convert markdown to slack markdown for issue - uses: LoveToKnow/slackify-markdown-action@v1.0.0 + uses: asyncapi/.github/.github/actions/slackify-markdown@master id: markdown with: - text: "[${{github.event.release.tag_name}}](${{github.event.release.html_url}}) \n ${{ github.event.release.body }}" + markdown: "[${{github.event.release.tag_name}}](${{github.event.release.html_url}}) \n ${{ github.event.release.body }}" - name: Send info about release to Slack uses: rtCamp/action-slack-notify@v2 env: @@ -32,9 +34,9 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout repo - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Get version of last and previous release - uses: actions/github-script@v3 + uses: actions/github-script@v6 id: versions with: github-token: ${{ secrets.GITHUB_TOKEN }} @@ -60,15 +62,15 @@ jobs: - name: Identify release type id: releasetype # if previousver is not provided then this steps just logs information about missing version, no errors - run: echo "::set-output name=type::$(npx -q -p semver-diff-cli semver-diff ${{steps.versions.outputs.previousver}} ${{steps.versions.outputs.lastver}})" + run: echo "type=$(npx -q -p semver-diff-cli semver-diff ${{steps.versions.outputs.previousver}} ${{steps.versions.outputs.lastver}})" >> $GITHUB_OUTPUT - name: Get name of the person that is behind the newly released version id: author - run: echo "::set-output name=name::$(git log -1 --pretty=format:'%an')" + run: echo "name=$(git log -1 --pretty=format:'%an')" >> $GITHUB_OUTPUT - name: Publish information about the release to Twitter # tweet only if detected version change is not a patch # tweet goes out even if the type is not major or minor but "You need provide version number to compare." # it is ok, it just means we did not identify previous version as we are tweeting out information about the release for the first time if: steps.releasetype.outputs.type != 'null' && steps.releasetype.outputs.type != 'patch' # null means that versions are the same - uses: m1ner79/Github-Twittction@v1.0.1 + uses: m1ner79/Github-Twittction@d1e508b6c2170145127138f93c49b7c46c6ff3a7 # using 2.0.0 https://github.com/m1ner79/Github-Twittction/releases/tag/v2.0.0 with: twitter_status: "Release ${{github.event.release.tag_name}} for ${{github.repository}} is out in the wild 😱💪🍾🎂\n\nThank you for the contribution ${{ steps.author.outputs.name }} ${{github.event.release.html_url}}" twitter_consumer_key: ${{ secrets.TWITTER_CONSUMER_KEY }} diff --git a/.github/workflows/release-docker.yml b/.github/workflows/release-docker.yml index 02651069d..82366d8a8 100644 --- a/.github/workflows/release-docker.yml +++ b/.github/workflows/release-docker.yml @@ -1,7 +1,7 @@ name: Release Docker Image -on: +on: release: - types: + types: - published jobs: @@ -10,24 +10,52 @@ jobs: name: Docker build and push runs-on: ubuntu-latest steps: - - name: Checkout repository - uses: actions/checkout@v2 - with: - ref: master - name: Get version without v character id: version run: | VERSION=${{github.event.release.tag_name}} - VERSION_WITHOUT_V=${VERSION:1} - echo "::set-output name=value::$(echo $VERSION_WITHOUT_V)" - - name: Release to Docker - run: | - echo ${{secrets.DOCKER_PASSWORD}} | docker login -u ${{secrets.DOCKER_USERNAME}} --password-stdin - sleep 1m #docker image installs generator from npm, this sleep protects this step from any delays on npm side - npm run docker:build - docker tag asyncapi/generator:latest asyncapi/generator:${{ steps.version.outputs.value }} - docker push asyncapi/generator:${{ steps.version.outputs.value }} - docker push asyncapi/generator:latest + VERSION_WITHOUT_V=${VERSION##*@} + echo "value=${VERSION_WITHOUT_V}" >> $GITHUB_OUTPUT + + - name: Set Up QEMU + uses: docker/setup-qemu-action@v2 + + - name: Set Up Docker Buildx + uses: docker/setup-buildx-action@v2 + - name: login to Docker Hub + uses: docker/login-action@v2 + with: + username: ${{ secrets.DOCKER_USERNAME }} + password: ${{ secrets.DOCKER_PASSWORD }} + # This workflow triggers on GitHub Release, but it may start before the npm package is published. + - name: Sleep for 1s seconds + run: sleep 1s + - name: Build Image + uses: docker/build-push-action@v4 + with: + context: "{{defaultContext}}:apps/generator" + push: true + load: false + build-args: | + ASYNCAPI_GENERATOR_VERSION=${{ steps.version.outputs.value }} + tags: | + asyncapi/generator:${{ steps.version.outputs.value }} + asyncapi/generator:latest + platforms: linux/amd64,linux/arm64 + cache-from: type=gha + cache-to: type=gha + + - name: Check out the repository + uses: actions/checkout@v4 + + - name: Update Docker Hub Readme + uses: meeDamian/sync-readme@v1.0.6 + with: + user: ${{ secrets.DOCKER_USERNAME }} + pass: ${{ secrets.DOCKER_PASSWORD }} + slug: asyncapi/generator + readme: ./apps/generator/README.md + description: Use your AsyncAPI definition to generate literally anything. Markdown documentation, Node.js code, HTML documentation, anything! diff --git a/.github/workflows/release-with-changesets.yml b/.github/workflows/release-with-changesets.yml new file mode 100644 index 000000000..327dfd4eb --- /dev/null +++ b/.github/workflows/release-with-changesets.yml @@ -0,0 +1,137 @@ +# It does magic only if there is a package.json file in the root of the project +name: Release + +on: + push: + branches: + - master + # The below lines are not enough to have release supported for these branches + - next-spec + - next-major + - next-major-spec + - beta + - alpha + - next + +jobs: + test-nodejs: + # We just check the message of the first commit as there is always just one commit because we squash into one before merging + # "commits" contains an array of objects where one of the properties is the commit "message" + # Release workflow will be skipped if release conventional commits are not used + if: | + startsWith( github.repository, 'asyncapi/' ) && + (startsWith( github.event.commits[0].message , 'fix:' ) || + startsWith( github.event.commits[0].message, 'fix!:' ) || + startsWith( github.event.commits[0].message, 'feat:' ) || + startsWith( github.event.commits[0].message, 'chore(release):' ) || + startsWith( github.event.commits[0].message, 'feat!:' )) + name: Test NodeJS release on ${{ matrix.os }} + runs-on: ${{ matrix.os }} + strategy: + matrix: + # Using macos-13 instead of latest (macos-14) due to an issue with Puppeteer and such runner. + # See: https://github.com/puppeteer/puppeteer/issues/12327 and https://github.com/asyncapi/parser-js/issues/1001 + os: [ubuntu-latest, macos-13, windows-latest] + steps: + - name: Set git to use LF # To once and for all finish the never-ending fight between Unix and Windows + run: | + git config --global core.autocrlf false + git config --global core.eol lf + shell: bash + - name: Checkout repository + uses: actions/checkout@v4 + - name: Check if Node.js project and has package.json + id: packagejson + run: test -e ./package.json && echo "exists=true" >> $GITHUB_OUTPUT || echo "exists=false" >> $GITHUB_OUTPUT + shell: bash + - if: steps.packagejson.outputs.exists == 'true' + name: Check package-lock version + uses: asyncapi/.github/.github/actions/get-node-version-from-package-lock@master + id: lockversion + - if: steps.packagejson.outputs.exists == 'true' + name: Setup Node.js + uses: actions/setup-node@v4 + with: + node-version: "${{ steps.lockversion.outputs.version }}" + - if: steps.lockversion.outputs.version == '18' && matrix.os == 'windows-latest' + name: Install npm cli 8 + shell: bash + # npm cli 10 is buggy because of some cache issues + run: npm install -g npm@8.19.4 + - if: steps.packagejson.outputs.exists == 'true' + name: Install dependencies + shell: bash + run: npm ci + - if: steps.packagejson.outputs.exists == 'true' + name: Run test + run: npm test --if-present + - if: failure() # Only, on failure, send a message on the 94_bot-failing-ci slack channel + name: Report workflow run status to Slack + uses: 8398a7/action-slack@v3 + with: + status: ${{ job.status }} + fields: repo,action,workflow + text: "Release workflow failed in testing job" + env: + SLACK_WEBHOOK_URL: ${{ secrets.SLACK_CI_FAIL_NOTIFY }} + + release: + needs: [test-nodejs] + name: Publish to any of NPM, GitHub, or Docker Hub + runs-on: ubuntu-latest + steps: + - name: Set git to use LF # To once and for all finish the never-ending fight between Unix and Windows + run: | + git config --global core.autocrlf false + git config --global core.eol lf + - name: Checkout repository + uses: actions/checkout@v4 + - name: Check if Node.js project and has package.json + id: packagejson + run: test -e ./package.json && echo "exists=true" >> $GITHUB_OUTPUT || echo "exists=false" >> $GITHUB_OUTPUT + shell: bash + - if: steps.packagejson.outputs.exists == 'true' + name: Check package-lock version + uses: asyncapi/.github/.github/actions/get-node-version-from-package-lock@master + id: lockversion + - if: steps.packagejson.outputs.exists == 'true' + name: Setup Node.js + uses: actions/setup-node@v4 + with: + node-version: "${{ steps.lockversion.outputs.version }}" + - if: steps.packagejson.outputs.exists == 'true' + name: Install dependencies + shell: bash + run: npm ci + - if: steps.packagejson.outputs.exists == 'true' + name: Install changelog + shell: bash + # This step can be removed once the issue is fixed in the changeset package. + run: npm install @changesets/changelog-git@0.2.0 + - if: steps.packagejson.outputs.exists == 'true' + name: Publish to any of NPM, Github, and Docker Hub + #this step has 2 goals, it is either identifying that there is changeset file created and then this action creates a PR with version bump that will trigger release - or if it sees there is no changeset, and there are versions changes in package.json files, it publish new versions to NPM is they are not there yet + uses: changesets/action@v1 + id: release + with: + version: npx -p @changesets/cli@2.27.7 changeset version + commit: "chore(release): release and bump versions of packages" + title: "chore(release): release and bump versions of packages" + publish: npx -p @changesets/cli@2.27.7 changeset publish + setupGitUser: false + env: + GITHUB_TOKEN: ${{ secrets.GH_TOKEN }} + NPM_TOKEN: ${{ secrets.NPM_TOKEN }} + GIT_AUTHOR_NAME: asyncapi-bot + GIT_AUTHOR_EMAIL: info@asyncapi.io + GIT_COMMITTER_NAME: asyncapi-bot + GIT_COMMITTER_EMAIL: info@asyncapi.io + - if: failure() # Only, on failure, send a message on the 94_bot-failing-ci Slack channel + name: Report workflow run status to Slack + uses: 8398a7/action-slack@v3 + with: + status: ${{ job.status }} + fields: repo,action,workflow + text: "Release workflow failed in release job" + env: + SLACK_WEBHOOK_URL: ${{ secrets.SLACK_CI_FAIL_NOTIFY }} diff --git a/.github/workflows/scripts/mailchimp/index.js b/.github/workflows/scripts/mailchimp/index.js index 387176b41..4a200c6c4 100644 --- a/.github/workflows/scripts/mailchimp/index.js +++ b/.github/workflows/scripts/mailchimp/index.js @@ -27,8 +27,14 @@ module.exports = async (link, title) => { type: 'regular', recipients: { list_id: '6e3e437abe', - segments_opts: { - saved_segment_id: 'tsc-voting-email' + segment_opts: { + match: 'any', + conditions: [{ + condition_type: 'Interests', + field: 'interests-2801e38b9f', + op: 'interestcontains', + value: ['f7204f9b90'] + }] } }, settings: { diff --git a/.github/workflows/scripts/mailchimp/package-lock.json b/.github/workflows/scripts/mailchimp/package-lock.json index e7f570381..7ee7d84f8 100644 --- a/.github/workflows/scripts/mailchimp/package-lock.json +++ b/.github/workflows/scripts/mailchimp/package-lock.json @@ -7,25 +7,24 @@ "name": "schedule-email", "license": "Apache 2.0", "dependencies": { - "@actions/core": "1.9.1", + "@actions/core": "1.6.0", "@mailchimp/mailchimp_marketing": "3.0.74" } }, "node_modules/@actions/core": { - "version": "1.9.1", - "resolved": "https://registry.npmjs.org/@actions/core/-/core-1.9.1.tgz", - "integrity": "sha512-5ad+U2YGrmmiw6du20AQW5XuWo7UKN2052FjSV7MX+Wfjf8sCqcsZe62NfgHys4QI4/Y+vQvLKYL8jWtA1ZBTA==", + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/@actions/core/-/core-1.6.0.tgz", + "integrity": "sha512-NB1UAZomZlCV/LmJqkLhNTqtKfFXJZAUPcfl/zqG7EfsQdeUJtaWO98SGbuQ3pydJ3fHl2CvI/51OKYlCYYcaw==", "dependencies": { - "@actions/http-client": "^2.0.1", - "uuid": "^8.3.2" + "@actions/http-client": "^1.0.11" } }, "node_modules/@actions/http-client": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-2.0.1.tgz", - "integrity": "sha512-PIXiMVtz6VvyaRsGY268qvj57hXQEpsYogYOu2nrQhlf+XCGmZstmuZBbAybUl1nQGnvS1k1eEsQ69ZoD7xlSw==", + "version": "1.0.11", + "resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-1.0.11.tgz", + "integrity": "sha512-VRYHGQV1rqnROJqdMvGUbY/Kn8vriQe/F9HR2AlYHzmKuM/p3kjNuXhmdBfcVgsvRWTz5C5XW5xvndZrVBuAYg==", "dependencies": { - "tunnel": "^0.0.6" + "tunnel": "0.0.6" } }, "node_modules/@mailchimp/mailchimp_marketing": { @@ -332,32 +331,23 @@ "version": "1.0.2", "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", "integrity": "sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8=" - }, - "node_modules/uuid": { - "version": "8.3.2", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", - "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==", - "bin": { - "uuid": "dist/bin/uuid" - } } }, "dependencies": { "@actions/core": { - "version": "1.9.1", - "resolved": "https://registry.npmjs.org/@actions/core/-/core-1.9.1.tgz", - "integrity": "sha512-5ad+U2YGrmmiw6du20AQW5XuWo7UKN2052FjSV7MX+Wfjf8sCqcsZe62NfgHys4QI4/Y+vQvLKYL8jWtA1ZBTA==", + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/@actions/core/-/core-1.6.0.tgz", + "integrity": "sha512-NB1UAZomZlCV/LmJqkLhNTqtKfFXJZAUPcfl/zqG7EfsQdeUJtaWO98SGbuQ3pydJ3fHl2CvI/51OKYlCYYcaw==", "requires": { - "@actions/http-client": "^2.0.1", - "uuid": "^8.3.2" + "@actions/http-client": "^1.0.11" } }, "@actions/http-client": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-2.0.1.tgz", - "integrity": "sha512-PIXiMVtz6VvyaRsGY268qvj57hXQEpsYogYOu2nrQhlf+XCGmZstmuZBbAybUl1nQGnvS1k1eEsQ69ZoD7xlSw==", + "version": "1.0.11", + "resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-1.0.11.tgz", + "integrity": "sha512-VRYHGQV1rqnROJqdMvGUbY/Kn8vriQe/F9HR2AlYHzmKuM/p3kjNuXhmdBfcVgsvRWTz5C5XW5xvndZrVBuAYg==", "requires": { - "tunnel": "^0.0.6" + "tunnel": "0.0.6" } }, "@mailchimp/mailchimp_marketing": { @@ -602,11 +592,6 @@ "version": "1.0.2", "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", "integrity": "sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8=" - }, - "uuid": { - "version": "8.3.2", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", - "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==" } } -} +} \ No newline at end of file diff --git a/.github/workflows/scripts/mailchimp/package.json b/.github/workflows/scripts/mailchimp/package.json index 28b447ff7..cc50e43e0 100644 --- a/.github/workflows/scripts/mailchimp/package.json +++ b/.github/workflows/scripts/mailchimp/package.json @@ -3,7 +3,7 @@ "description": "This code is responsible for scheduling an email campaign. This file is centrally managed in https://github.com/asyncapi/.github/", "license": "Apache 2.0", "dependencies": { - "@actions/core": "1.9.1", + "@actions/core": "1.6.0", "@mailchimp/mailchimp_marketing": "3.0.74" } } \ No newline at end of file diff --git a/.github/workflows/sentiment-analysis.yml b/.github/workflows/sentiment-analysis.yml deleted file mode 100644 index cd8ab05f7..000000000 --- a/.github/workflows/sentiment-analysis.yml +++ /dev/null @@ -1,45 +0,0 @@ -# This action is centrally managed in https://github.com/asyncapi/.github/ -# Don't make changes to this file in this repo as they will be overwritten with changes made to the same file in above mentioned repo - -name: 'Sentiment Analysis' - -on: - issue_comment: - types: - - created - - edited - issues: - types: - - opened - - edited - pull_request: - types: - - opened - - edited - pull_request_review: - types: - - submitted - - edited - pull_request_review_comment: - types: - - created - - edited -jobs: - sentiments: - if: ${{ !contains(fromJson('["asyncapi-bot", "dependabot[bot]", "dependabot-preview[bot]", "allcontributors"]'), github.actor) }} - name: Checking sentiments - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - - name: Check sentiment - uses: derberg/code-of-conduct-sentiment-analysis-github-action@v1 - id: sentiments - with: - gcp_key: ${{ secrets.GCP_KEY_SENTIMENT }} - - uses: someimportantcompany/github-actions-slack-message@v1 - # this step runs only if sentiment is a negative number - if: steps.sentiments.outputs.sentiment < -0.6 - with: - webhook-url: ${{ secrets.SLACK_SENTIMENTS }} - text: Here ${{steps.sentiments.outputs.source}} you can find a potential negative text that requires your attention as the sentiment analysis score is ${{steps.sentiments.outputs.sentiment}} - color: orange \ No newline at end of file diff --git a/.github/workflows/templates-list-validation.yml b/.github/workflows/templates-list-validation.yml deleted file mode 100644 index ad99814bc..000000000 --- a/.github/workflows/templates-list-validation.yml +++ /dev/null @@ -1,16 +0,0 @@ -name: Checking if list of templates is up to date - -on: - schedule: - - cron: "0 0 * * *" - -jobs: - templates: - name: 'Check list of templates' - runs-on: ubuntu-latest - steps: - - name: Checkout repo - uses: actions/checkout@v2 - - uses: ./.github/templates-list-validator - with: - token: ${{ secrets.GITHUB_TOKEN }} \ No newline at end of file diff --git a/.github/workflows/update-docs-in-website.yml b/.github/workflows/update-docs-in-website.yml new file mode 100644 index 000000000..6394cefae --- /dev/null +++ b/.github/workflows/update-docs-in-website.yml @@ -0,0 +1,53 @@ +name: Update latest generator documentation in the website + +on: + push: + branches: + - 'master' + paths: + - 'docs/*.md' + +jobs: + Make-PR: + name: Make PR on website repository with updated latest generator documentation + runs-on: ubuntu-latest + env: + GITHUB_TOKEN: ${{ secrets.GH_TOKEN }} + steps: + - name: Checkout Current repository + uses: actions/checkout@v3 + with: + path: generator + - name: Checkout Another repository + uses: actions/checkout@v3 + with: + repository: asyncapi/website + path: website + token: ${{ env.GITHUB_TOKEN }} + - name: Config git + run: | + git config --global user.name asyncapi-bot + git config --global user.email info@asyncapi.io + - name: Create branch + working-directory: ./website + run: | + git checkout -b update-generator-docs-${{ github.sha }} + - name: Copy generator folder from Current Repo to Another + working-directory: ./website + run: | + rm -r ./markdown/docs/tools/generator + mkdir -p ./markdown/docs/tools/generator + rm ../generator/docs/README.md + rm -r ../generator/docs/jsdoc2md-handlebars + printf "%s\ntitle: Generator\nweight: 3\n%s" "---" "---"> ../generator/docs/_section.md + mv ../generator/docs/*.md ./markdown/docs/tools/generator + - name: Commit and push + working-directory: ./website + run: | + git add . + git commit -m "docs(generator): update latest generator docs" + git push https://${{ env.GITHUB_TOKEN }}@github.com/asyncapi/website + - name: Create PR + working-directory: ./website + run: | + gh pr create --title "docs(generator): update latest generator documentation" --body "Updated generator documentation is available and this PR introduces update to generator folder on the website" --head "update-generator-docs-${{ github.sha }}" diff --git a/.github/workflows/update-docs-on-docs-commits.yml b/.github/workflows/update-docs-on-docs-commits.yml index bb12c7bdd..b511e284d 100644 --- a/.github/workflows/update-docs-on-docs-commits.yml +++ b/.github/workflows/update-docs-on-docs-commits.yml @@ -1,3 +1,10 @@ +# This workflow is centrally managed in https://github.com/asyncapi/.github/ +# Don't make changes to this file in this repo as they will be overwritten with changes made to the same file in above mentioned repo + +# The given workflow is responsible for generating docs and creating PR with them when there is a commit with docs: prefix + +# This workflow will be updated in all repos with the topic get-global-docs-autoupdate + name: 'Update generated parts of documentation on docs: commits' on: @@ -9,22 +16,27 @@ jobs: docs-gen: name: 'Generate docs and create PR' runs-on: ubuntu-latest + # PR should be created within this GH action only if it is a docs: commit + # Otherwise it will conflict with release workflow + if: startsWith(github.event.commits[0].message, 'docs:') steps: - name: Checkout repo - uses: actions/checkout@v2 - - name: Setup Node.js - uses: actions/setup-node@v1 + uses: actions/checkout@v4 + - name: Check package-lock version + uses: asyncapi/.github/.github/actions/get-node-version-from-package-lock@master + id: lockversion + - name: Use Node.js + uses: actions/setup-node@v3 with: - node-version: 14 + node-version: "${{ steps.lockversion.outputs.version }}" + cache: 'npm' + cache-dependency-path: '**/package-lock.json' - name: Install dependencies run: npm ci - name: Regenerate docs - run: npm run generate:assets + run: npm run generate:assets --if-present - name: Create Pull Request with updated docs - # PR should be created within this GH action only if it is a docs: commit - # Otherwise it will conflict with release workflow - if: startsWith(github.event.commits[0].message, 'docs:') - uses: peter-evans/create-pull-request@v3 + uses: peter-evans/create-pull-request@153407881ec5c347639a548ade7d8ad1d6740e38 # uses 5.0.2 https://github.com/peter-evans/create-pull-request/releases/tag/v5.0.2 with: token: ${{ secrets.GH_TOKEN }} commit-message: 'chore: update generated docs' @@ -32,4 +44,14 @@ jobs: author: asyncapi-bot title: 'chore: update generated docs' body: 'Update of docs that are generated and were forgotten on PR level.' - branch: gen-docs-update + branch: gen-docs-update/${{ github.job }} + - name: Report workflow status to Slack + if: failure() # Only, on failure, send a message on the 94_bot-failing-ci slack channel + uses: 8398a7/action-slack@fbd6aa58ba854a740e11a35d0df80cb5d12101d8 #using https://github.com/8398a7/action-slack/releases/tag/v3.15.1 + with: + status: ${{ job.status }} + fields: repo,action,workflow + text: 'AsyncAPI docs generation workflow failed' + author_name: asyncapi-bot + env: + SLACK_WEBHOOK_URL: ${{ secrets.SLACK_CI_FAIL_NOTIFY }} \ No newline at end of file diff --git a/.github/workflows/update-maintainers.yml b/.github/workflows/update-maintainers.yml new file mode 100644 index 000000000..c4cd1baa4 --- /dev/null +++ b/.github/workflows/update-maintainers.yml @@ -0,0 +1,177 @@ +name: Update MAINTAINERS.yaml in community repo + +on: + push: + branches: + - master + paths: + - 'CODEOWNERS' + +jobs: + update-maintainers: + runs-on: ubuntu-latest + env: + GITHUB_TOKEN: ${{ secrets.GH_TOKEN }} + steps: + - name: Checkout main branch + uses: actions/checkout@v3 + with: + ref: master + path: current_state + + - name: Checkout one commit before last one + uses: actions/checkout@v3 + with: + fetch-depth: 2 + ref: master + path: previous_state + + - run: cd previous_state && git checkout HEAD^ + + - name: Checkout community repo + uses: actions/checkout@v3 + with: + repository: asyncapi/community + token: ${{ env.GITHUB_TOKEN }} + path: community + + - name: Setup Node.js + uses: actions/setup-node@v3 + with: + node-version: '16' + + - name: Install js-yaml + run: npm install js-yaml@3.14.1 + + - name: Compare CODEOWNERS + id: compare-codeowners + env: + GH_TOKEN: ${{ env.GITHUB_TOKEN }} + uses: actions/github-script@v6 + with: + script: | + const fs = require('fs'); + const yaml = require('js-yaml'); + + // Get repository name + const repoName = context.repo.repo; + + function extractGitHubUsernames(content) { + // Remove lines starting with # + content = content.replace(/^#.*$/mg, ""); + + const regex = /@([a-zA-Z0-9_-]+)/g; + const matches = content.match(regex); + if (!matches) { + return []; + } + return matches.map(match => match.substr(1)); + } + + const currentCodeowners = fs.readFileSync('./current_state/CODEOWNERS', 'utf8'); + const previousCodeowners = fs.readFileSync('./previous_state/CODEOWNERS', 'utf8'); + + const currentUsernames = extractGitHubUsernames(currentCodeowners); + const previousUsernames = extractGitHubUsernames(previousCodeowners); + + const addedUsernames = currentUsernames.filter(username => !previousUsernames.includes(username)); + const removedUsernames = previousUsernames.filter(username => !currentUsernames.includes(username)); + + core.info('Added Usernames:', addedUsernames); + core.info('Removed Usernames:', removedUsernames); + core.info(`ADDED_USERNAMES=${addedUsernames.join(', ')}`); + core.info(`REMOVED_USERNAMES=${removedUsernames.join(', ')}`); + + // Update MAINTAINERS.yaml + const maintainersFile = './community/MAINTAINERS.yaml'; + const maintainers = yaml.safeLoad(fs.readFileSync(maintainersFile, 'utf8')); + + + // Update for added usernames + for (const username of addedUsernames) { + // Exclude bot accounts + if (username === 'asyncapi-bot' || username === 'asyncapi-bot-eve') { + core.info('Skipping bot account:', username); + continue; // Skip the iteration for bot accounts + } + + const maintainer = maintainers.find(maintainer => maintainer.github === username.toLowerCase()); + if (!maintainer) { + const { data } = await github.rest.users.getByUsername({ username }); + const twitterUsername = data.twitter_username; + const newMaintainer = { + github: username, + isTscMember: false, + repos: [repoName] + }; + if (twitterUsername) { + newMaintainer.twitter = twitterUsername; + } + maintainers.push(newMaintainer); + core.info('Added maintainer:', username); + } else { + // If the maintainer already exists, check if the current repo is in their list + if (!maintainer.repos.includes(repoName)) { + maintainer.repos.push(repoName); + core.info('Added repository to existing maintainer:', username); + } else { + core.info(`Maintainer ${username} already exists and already has the repo. Skipping addition.`); + } + } + } + + // Update for removed usernames + for (const username of removedUsernames) { + const index = maintainers.findIndex(maintainer => maintainer.github === username); + if (index !== -1) { + const maintainer = maintainers[index]; + const repoIndex = maintainer.repos.findIndex(repo => repo === repoName); + + if (repoIndex !== -1) { + maintainer.repos.splice(repoIndex, 1); + core.info(`Removed repository ${repoName} from maintainer ${username}`); + if (maintainer.repos.length === 0) { + maintainers.splice(index, 1); + core.info(`Removed maintainer ${username} as they have no other repositories`); + } + } else { + core.info(`Repository ${repoName} not found for maintainer ${username}`); + } + } else { + core.info(`Maintainer ${username} does not exist. Skipping removal.`); + } + } + + // Write updated MAINTAINERS.yaml file + const updatedMaintainers = yaml.safeDump(maintainers); + fs.writeFileSync(maintainersFile, updatedMaintainers); + core.info('Updated MAINTAINERS.yaml:', updatedMaintainers); + + - name: Create new branch + working-directory: ./community + run: | + git checkout -b update-maintainers-${{ github.run_id }} + + - name: Commit and push + working-directory: ./community + run: | + git config --global user.email "info@asyncapi.io" + git config --global user.name "asyncapi-bot" + git add . + git commit -m "Update MAINTAINERS.yaml" + git push https://${{ env.GITHUB_TOKEN }}@github.com/asyncapi/community + + - name: Create PR + working-directory: ./community + run: | + gh pr create --title "docs(community): update latest maintainers list" --body "Updated Maintainers list is available and this PR introduces changes with latest information about Maintainers" --head update-maintainers-${{ github.run_id }} + + + - name: Report workflow run status to Slack + if: failure() # Only, on failure, send a message on the slack channel + uses: rtCamp/action-slack-notify@v2 + env: + SLACK_WEBHOOK: ${{ secrets.SLACK_CI_FAIL_NOTIFY }} + SLACK_TITLE: 🚨 Update maintainers list action failed 🚨 + SLACK_MESSAGE: Failed to update the maintainers list. + MSG_MINIMAL: true \ No newline at end of file diff --git a/.github/workflows/update-pr.yml b/.github/workflows/update-pr.yml new file mode 100644 index 000000000..2fa19b0ad --- /dev/null +++ b/.github/workflows/update-pr.yml @@ -0,0 +1,102 @@ +# This workflow is centrally managed in https://github.com/asyncapi/.github/ +# Don't make changes to this file in this repo as they will be overwritten with changes made to the same file in above mentioned repo + +# This workflow will run on every comment with /update or /u. And will create merge-commits for the PR. +# This also works with forks, not only with branches in the same repository/organization. +# Currently, does not work with forks in different organizations. + +# This workflow will be distributed to all repositories in the AsyncAPI organization + +name: Update PR branches from fork + +on: + issue_comment: + types: [created] + +jobs: + update-pr: + if: > + startsWith(github.repository, 'asyncapi/') && + github.event.issue.pull_request && + github.event.issue.state != 'closed' && ( + contains(github.event.comment.body, '/update') || + contains(github.event.comment.body, '/u') + ) + runs-on: ubuntu-latest + steps: + - name: Get Pull Request Details + id: pr + uses: actions/github-script@v7 + with: + github-token: ${{ secrets.GH_TOKEN || secrets.GITHUB_TOKEN }} + previews: 'merge-info-preview' # https://docs.github.com/en/graphql/overview/schema-previews#merge-info-preview-more-detailed-information-about-a-pull-requests-merge-state-preview + script: | + const prNumber = context.payload.issue.number; + core.debug(`PR Number: ${prNumber}`); + const { data: pr } = await github.rest.pulls.get({ + owner: context.repo.owner, + repo: context.repo.repo, + pull_number: prNumber + }); + + // If the PR has conflicts, we don't want to update it + const updateable = ['behind', 'blocked', 'unknown', 'draft', 'clean'].includes(pr.mergeable_state); + console.log(`PR #${prNumber} is ${pr.mergeable_state} and is ${updateable ? 'updateable' : 'not updateable'}`); + core.setOutput('updateable', updateable); + + core.debug(`Updating PR #${prNumber} with head ${pr.head.sha}`); + + return { + id: pr.node_id, + number: prNumber, + head: pr.head.sha, + } + - name: Update the Pull Request + if: steps.pr.outputs.updateable == 'true' + uses: actions/github-script@v7 + with: + github-token: ${{ secrets.GH_TOKEN || secrets.GITHUB_TOKEN }} + script: | + const mutation = `mutation update($input: UpdatePullRequestBranchInput!) { + updatePullRequestBranch(input: $input) { + pullRequest { + mergeable + } + } + }`; + + const pr_details = ${{ steps.pr.outputs.result }}; + + try { + const { data } = await github.graphql(mutation, { + input: { + pullRequestId: pr_details.id, + expectedHeadOid: pr_details.head, + } + }); + } catch (GraphQLError) { + core.debug(GraphQLError); + if ( + GraphQLError.name === 'GraphqlResponseError' && + GraphQLError.errors.some( + error => error.type === 'FORBIDDEN' || error.type === 'UNAUTHORIZED' + ) + ) { + // Add comment to PR if the bot doesn't have permissions to update the PR + const comment = `Hi @${context.actor}. Update of PR has failed. It can be due to one of the following reasons: + - I don't have permissions to update this PR. To update your fork with upstream using bot you need to enable [Allow edits from maintainers](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/working-with-forks/allowing-changes-to-a-pull-request-branch-created-from-a-fork) option in the PR. + - The fork is located in an organization, not under your personal profile. No solution for that. You are on your own with manual update. + - There may be a conflict in the PR. Please resolve the conflict and try again.`; + + await github.rest.issues.createComment({ + owner: context.repo.owner, + repo: context.repo.repo, + issue_number: context.issue.number, + body: comment + }); + + core.setFailed('Bot does not have permissions to update the PR'); + } else { + core.setFailed(GraphQLError.message); + } + } diff --git a/.github/workflows/welcome-first-time-contrib.yml b/.github/workflows/welcome-first-time-contrib.yml index e72fecef8..cbc23ce7c 100644 --- a/.github/workflows/welcome-first-time-contrib.yml +++ b/.github/workflows/welcome-first-time-contrib.yml @@ -14,14 +14,14 @@ on: jobs: welcome: name: Post welcome message - if: ${{ !contains(fromJson('["asyncapi-bot", "dependabot[bot]", "dependabot-preview[bot]", "allcontributors"]'), github.actor) }} + if: ${{ !contains(fromJson('["asyncapi-bot", "dependabot[bot]", "dependabot-preview[bot]", "allcontributors[bot]"]'), github.actor) }} runs-on: ubuntu-latest steps: - - uses: actions/github-script@v3 + - uses: actions/github-script@v6 with: github-token: ${{ secrets.GITHUB_TOKEN }} script: | - const issueMessage = `Welcome to AsyncAPI. Thanks a lot for reporting your first issue. Please check out our [contributors guide](https://github.com/asyncapi/community/blob/master/CONTRIBUTING.md) and the instructions about a [basic recommended setup](https://github.com/asyncapi/.github/blob/master/git-workflow.md) useful for opening a pull request.
Keep in mind there are also other channels you can use to interact with AsyncAPI community. For more details check out [this issue](https://github.com/asyncapi/asyncapi/issues/115).`; + const issueMessage = `Welcome to AsyncAPI. Thanks a lot for reporting your first issue. Please check out our [contributors guide](https://github.com/asyncapi/community/blob/master/CONTRIBUTING.md) and the instructions about a [basic recommended setup](https://github.com/asyncapi/community/blob/master/git-workflow.md) useful for opening a pull request.
Keep in mind there are also other channels you can use to interact with AsyncAPI community. For more details check out [this issue](https://github.com/asyncapi/asyncapi/issues/115).`; const prMessage = `Welcome to AsyncAPI. Thanks a lot for creating your first pull request. Please check out our [contributors guide](https://github.com/asyncapi/community/blob/master/CONTRIBUTING.md) useful for opening a pull request.
Keep in mind there are also other channels you can use to interact with AsyncAPI community. For more details check out [this issue](https://github.com/asyncapi/asyncapi/issues/115).`; if (!issueMessage && !prMessage) { throw new Error('Action must have at least one of issue-message or pr-message set'); @@ -65,7 +65,7 @@ jobs: if (isIssue) { const issueNumber = context.payload.issue.number; console.log(`Adding message: ${message} to issue #${issueNumber}`); - await github.issues.createComment({ + await github.rest.issues.createComment({ owner: context.payload.repository.owner.login, repo: context.payload.repository.name, issue_number: issueNumber, @@ -75,7 +75,7 @@ jobs: else { const pullNumber = context.payload.pull_request.number; console.log(`Adding message: ${message} to pull request #${pullNumber}`); - await github.pulls.createReview({ + await github.rest.pulls.createReview({ owner: context.payload.repository.owner.login, repo: context.payload.repository.name, pull_number: pullNumber, diff --git a/.gitignore b/.gitignore index 217e8cb0b..a53c05bd1 100644 --- a/.gitignore +++ b/.gitignore @@ -1,25 +1,17 @@ -.DS_Store -node_modules -npm-debug.log -output -.vscode -###################### -# Intellij -###################### -.idea/ -*.iml -*.iws -*.ipr -*.ids -*.orig -out/ + +**/node_modules/ + +# Local env files +.env +.env.local +.env.development.local +.env.test.local +.env.production.local + +# Testing coverage -test/temp/integrationTestResult -#we must commit this single package to the repo as integration tests depend on it and we do not want our auto bump workflows to affect package version -#yes, below looks strange but this is how it is done with git. Read more https://stackoverflow.com/questions/5533050/gitignore-exclude-folder-but-include-specific-subfolder -!test/test-project/node_modules/ -test/test-project/node_modules/* -!test/test-project/node_modules/@asyncapi/ -test/test-project/node_modules/@asyncapi/* -!test/test-project/node_modules/@asyncapi/html-template/ -test/test-project/package-lock.json \ No newline at end of file + +# Turbo +.turbo + +/.idea diff --git a/.releaserc b/.releaserc index 570b45784..354db24c3 100644 --- a/.releaserc +++ b/.releaserc @@ -1,10 +1,25 @@ ---- branches: -- master + - master + # by default release workflow reacts on push not only to master. + # This is why out of the box semantic release is configured for all these branches + - name: next-spec + prerelease: true + - name: next-major + prerelease: true + - name: next-major-spec + prerelease: true + - name: beta + prerelease: true + - name: alpha + prerelease: true + - name: next + prerelease: true + plugins: -- - "@semantic-release/commit-analyzer" - - preset: conventionalcommits -- - "@semantic-release/release-notes-generator" - - preset: conventionalcommits -- "@semantic-release/npm" -- "@semantic-release/github" \ No newline at end of file + - - "@semantic-release/commit-analyzer" + - preset: conventionalcommits + - - "@semantic-release/release-notes-generator" + - preset: conventionalcommits + - "@semantic-release/npm": + pkgRoot: ./apps/generator + - "@semantic-release/github" diff --git a/.sonarcloud.properties b/.sonarcloud.properties index c9486b17e..208c092d7 100644 --- a/.sonarcloud.properties +++ b/.sonarcloud.properties @@ -1,2 +1,2 @@ #we need to explicitly exclude them as some are commit to the repo -sonar.exclusions=test/test-project/node_modules/@asyncapi/html-template/**/* \ No newline at end of file +sonar.exclusions=test/**/* diff --git a/CODEOWNERS b/CODEOWNERS index ac61d1330..ae761c9c3 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -6,4 +6,7 @@ # The default owners are automatically added as reviewers when you open a pull request unless different owners are specified in the file. -* @fmvilas @derberg @magicmatatjahu @jonaslagoni @asyncapi-bot-eve +* @derberg @magicmatatjahu @jonaslagoni @asyncapi-bot-eve + +# All .md files +*.md @Florence-Njeri @derberg @asyncapi-bot-eve diff --git a/README.md b/README.md index 3d70d22f9..74d2c5056 100644 --- a/README.md +++ b/README.md @@ -1,8 +1,15 @@ [![AsyncAPI Generator](./assets/readme-banner.png)](https://www.asyncapi.com/tools/generator) +This is a Monorepo managed using [Turborepo](https://turbo.build/) and contains the following package: + +1. [Generator](apps/generator): This is a tool that you can use to generate whatever you want basing on the AsyncAPI specification file as an input. + +2. [Nunjucks-filters](apps/nunjucks-filters): This library contains generator filters that can be reused across multiple templates, helping to avoid redundant work. These filters are designed specifically for Nunjucks templates and are included by default with the generator, so there's no need to add them to dependencies seprately. + + ![npm](https://img.shields.io/npm/v/@asyncapi/generator?style=for-the-badge) ![npm](https://img.shields.io/npm/dt/@asyncapi/generator?style=for-the-badge) -> :warning: This package doesn't support AsyncAPI 1.x anymore. We recommend to upgrade to the latest AsyncAPI version using the [AsyncAPI converter](https://github.com/asyncapi/converter-js). If you need to convert documents on the fly, you may use the [Node.js](https://github.com/asyncapi/converter-js) or [Go](https://github.com/asyncapi/converter-go) converters. +> warning: This package doesn't support AsyncAPI 1.x anymore. We recommend to upgrade to the latest AsyncAPI version using the [AsyncAPI converter](https://github.com/asyncapi/converter-js) (You can refer to [installation guide](/apps/generator//docs//installation-guide.md)). If you need to convert documents on the fly, you may use the [Node.js](https://github.com/asyncapi/converter-js) or [Go](https://github.com/asyncapi/converter-go) converters. @@ -10,12 +17,6 @@ - [Overview](#overview) - [List of official generator templates](#list-of-official-generator-templates) -- [Requirements](#requirements) -- [Using from the command-line interface (CLI)](#using-from-the-command-line-interface-cli) - * [CLI installation](#cli-installation) - * [CLI usage](#cli-usage) -- [Generator version vs Template version](#generator-version-vs-template-version) -- [How to create a template](#how-to-create-a-template) - [Contributing](#contributing) - [Contributors ✨](#contributors-%E2%9C%A8) @@ -23,9 +24,7 @@ ## Overview -Generator is a tool that you can use to generate whatever you want basing on the AsyncAPI specification file as an input. - -To specify what exactly must be generated you create so called **template**. To create your own template, go to section that explains [How to create a template](#how-to-create-a-template). +Generator is a tool that you can use to generate whatever you want basing on the AsyncAPI specification file as an input. For more information [read the docs](https://www.asyncapi.com/docs/tools/generator). There is a large number of templates that are ready to use and are officially supported by the AsyncAPI Initiative. @@ -34,51 +33,123 @@ There is a large number of templates that are ready to use and are officially su -Template Name | Description | Source code ----|---|--- -`@asyncapi/nodejs-template` | Generates Nodejs service that uses Hermes package | [click here](https://github.com/asyncapi/nodejs-template) -`@asyncapi/nodejs-ws-template` | Generates Nodejs service that supports WebSockets protocol only | [click here](https://github.com/asyncapi/nodejs-ws-template) -`@asyncapi/java-template` | Generates Java JMS application | [click here](https://github.com/asyncapi/java-template) -`@asyncapi/java-spring-template` | Generates Java Spring service | [click here](https://github.com/asyncapi/java-spring-template) -`@asyncapi/java-spring-cloud-stream-template` | Generates Java Spring Cloud Stream service | [click here](https://github.com/asyncapi/java-spring-cloud-stream-template) -`@asyncapi/python-paho-template` | Generates Python service that uses Paho library | [click here](https://github.com/asyncapi/python-paho-template) -`@asyncapi/html-template` | Generates HTML documentation site | [click here](https://github.com/asyncapi/html-template) -`@asyncapi/markdown-template` | Generates documentation in Markdown file | [click here](https://github.com/asyncapi/markdown-template) -`@asyncapi/ts-nats-template` | Generates TypeScript NATS client | [click here](https://github.com/asyncapi/ts-nats-template/) -`@asyncapi/go-watermill-template` | Generates Go client using Watermill | [click here](https://github.com/asyncapi/go-watermill-template) -`@asyncapi/dotnet-nats-template` | Generates .NET C# client using NATS | [click here](https://github.com/asyncapi/dotnet-nats-template) +| Template Name | Description | Source code | +| --------------------------------------------- | --------------------------------------------------------------- | --------------------------------------------------------------------------- | +| `@asyncapi/nodejs-template` | Generates Nodejs service that uses Hermes package | [click here](https://github.com/asyncapi/nodejs-template) | +| `@asyncapi/nodejs-ws-template` | Generates Nodejs service that supports WebSockets protocol only | [click here](https://github.com/asyncapi/nodejs-ws-template) | +| `@asyncapi/java-template` | Generates Java JMS application | [click here](https://github.com/asyncapi/java-template) | +| `@asyncapi/java-spring-template` | Generates Java Spring service | [click here](https://github.com/asyncapi/java-spring-template) | +| `@asyncapi/java-spring-cloud-stream-template` | Generates Java Spring Cloud Stream service | [click here](https://github.com/asyncapi/java-spring-cloud-stream-template) | +| `@asyncapi/python-paho-template` | Generates Python service that uses Paho library | [click here](https://github.com/asyncapi/python-paho-template) | +| `@asyncapi/html-template` | Generates HTML documentation site | [click here](https://github.com/asyncapi/html-template) | +| `@asyncapi/markdown-template` | Generates documentation in Markdown file | [click here](https://github.com/asyncapi/markdown-template) | +| `@asyncapi/ts-nats-template` | Generates TypeScript NATS client | [click here](https://github.com/asyncapi/ts-nats-template/) | +| `@asyncapi/go-watermill-template` | Generates Go client using Watermill | [click here](https://github.com/asyncapi/go-watermill-template) | +| `@asyncapi/dotnet-nats-template` | Generates .NET C# client using NATS | [click here](https://github.com/asyncapi/dotnet-nats-template) | +| `@asyncapi/php-template` | Generates PHP client using RabbitMQ | [click here](https://github.com/asyncapi/php-template) | +| `@asyncapi/dotnet-rabbitmq-template` | Generates .NET C# client using RabbitMQ | [click here](https://github.com/asyncapi/dotnet-rabbitmq-template) | You can find above templates and the ones provided by the community in **[this list](https://github.com/search?q=topic%3Aasyncapi+topic%3Agenerator+topic%3Atemplate)** -## Requirements +# Generator Filters + +This library contains generator filters that can be reused across multiple templates, helping to avoid redundant work. These filters are designed specifically for Nunjucks templates and are included by default with the generator, so there's no need to add them to dependencies seprately. + +This library consists of: + +- Custom filters. Check out [API docs](apps/nunjucks-filters/docs/api.md) for complete list +- Lodash-powered filters. For the list of all available filters check [official docs](https://lodash.com/docs/) + +## Release Process + +To release a major/minor/patch: + +### Conventional Commits: + +To maintain a clear git history of commits and easily identify what each commit changed and whether it triggered a release, we use conventional commits. The feat and fix prefixes are particularly important as they are needed to trigger changesets. Using these prefixes ensures that the changes are correctly categorized and the versioning system functions as expected. + +For Example: +``` +feat: add new feature +``` + +#### Manual + +1. Create a new release markdown file in the `.changeset` directory. The filename should indicate what the change is about. + +2. Add the following content to the file in this particular format: + + ```markdown + --- + "@package-name-1": [type] (major/minor/patch) + "@package-name-2": [type] + --- + + [Provide a brief description of the changes. For example: Added a new Release GitHub Flow to the Turborepo. No new features or bugfixes were introduced.] + ``` + + For Example: + + ```markdown + --- + "@asyncapi/generator": minor + --- + + Adding new Release Github Flow to the Turborepo. No new features or bugfixes were introduced. + + ``` + +3. Include the file in your pull request. + +#### Using CLI + +1. Create a new release markdown file using changeset CLI. Below command will trigger an interactive prompt that you can use to specify release type and affected packages. + ```cli + npx -p @changesets/cli@2.27.7 changeset + ``` + +2. Include the file in your pull request. + +> [!TIP] +> For more detailed instructions, you can refer to the official documentation for creating a changeset: +[Adding a changeset](https://github.com/changesets/changesets/blob/main/docs/adding-a-changeset.md) + + -* Node.js v12.16 and higher -* npm v6.13.7 and higher +### Release Flow: -Install both packages using [official installer](https://nodejs.org/en/download/). After installation make sure both packages have proper version by running `node -v` and `npm -v`. To upgrade invalid npm version run `npm install npm@latest -g` +1. **Add a Changeset**: + - When you make changes that need to be released, create a markdown file in the `.changeset` directory stating the package name and level of change (major/minor/patch). -> The generator is tested at the moment against Node 14 and NPM 6. Using newer versions is enabled but we don't guarantee they work well. Please provide feedback via issues. +2. **Open a Pull Request**: + - Push your changes and open a Pull Request (PR). After the PR is merged the changeset file helps communicate the type of changes (major, minor, patch). -## Using from the command-line interface (CLI) +3. **CI Processes Changeset**: + - After PR is merged, a dedicated GitHub Actions release workflow runs using changeset action, -### CLI installation -Learn to install the AsyncAPI CLI from the [installation guide](installation-guide.md). + - This action reads the markdown files in the `.changeset` folder and creates a PR with the updated version of the package and removes the markdown file. For example: -### CLI usage + Before: + ```json + "name": "@asyncapi/generator", + "version": "2.0.1", + ``` -Learn more about different ways of using the CLI from the [usage document](usage.md). + After: + ```json + "name": "@asyncapi/generator", + "version": "3.0.1", + ``` -## Generator version vs Template version + - The new PR will also contain the description from the markdown files, -Learn more about versioning from the [versioning document](versioning.md). + - AsyncAPI bot automatically merge such release PR. -## How to create a template +4. **Release the Package**: -To create your own template, for example code generator for some specific language and technology, learn from the following resources: -- Read the [documentation](docs/README.md) -- Use [Template for Generator Templates](https://github.com/asyncapi/template-for-generator-templates) that showcases Generator features + - After the PR is merged, the CI/CD pipeline triggers again. The `changesets/action` step identifies that the PR was created by itself. It then verifies if the current version of the package is greater than the previously released version. If a difference is detected, it executes the publish command to release the updated package. ## Contributing @@ -92,24 +163,36 @@ Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/d - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

Fran Méndez

💬 🐛 💻 📖 🤔 🚧 🔌 👀 ⚠️

Jonas Lagoni

💬 🐛 💻 📖 🤔 🔌 👀 ⚠️

Lukasz Gornicki

💬 🐛 📝 💻 📖 🤔 🚧 🔌 👀 ⚠️ 🚇

Travis Reeder

🚇 📖

Semen

🐛 💻 📖 🤔 🔌 👀 ⚠️

Waleed Ashraf

💻 🐛

Sebastián

💻

Derk Muenchhausen

💻

Ben Timby

💻

Amanda Shafack

📖
Fran Méndez
Fran Méndez

💬 🐛 💻 📖 🤔 🚧 🔌 👀 ⚠️
Jonas Lagoni
Jonas Lagoni

💬 🐛 💻 📖 🤔 🔌 👀 ⚠️
Lukasz Gornicki
Lukasz Gornicki

💬 🐛 📝 💻 📖 🤔 🚧 🔌 👀 ⚠️ 🚇
Travis Reeder
Travis Reeder

🚇 📖
Semen
Semen

🐛 💻 📖 🤔 🔌 👀 ⚠️
Waleed Ashraf
Waleed Ashraf

💻 🐛
Sebastián
Sebastián

💻
Derk Muenchhausen
Derk Muenchhausen

💻
Ben Timby
Ben Timby

💻
Amanda  Shafack
Amanda Shafack

📖
Florence Njeri
Florence Njeri

📖 👀 🚇 🚧
Pratik Haldankar
Pratik Haldankar

📖 👀 🚧 📢
swastik suvam singh
swastik suvam singh

💻
GavinZhengOI
GavinZhengOI

📖
lmgyuan
lmgyuan

📖
pierrick-boule
pierrick-boule

💻 ⚠️ 📖
diff --git a/.dockerignore b/apps/generator/.dockerignore similarity index 100% rename from .dockerignore rename to apps/generator/.dockerignore diff --git a/apps/generator/.gitignore b/apps/generator/.gitignore new file mode 100644 index 000000000..670435e61 --- /dev/null +++ b/apps/generator/.gitignore @@ -0,0 +1,20 @@ +.DS_Store +node_modules +npm-debug.log +output +.vscode +###################### +# Intellij +###################### +.idea/ +*.iml +*.iws +*.ipr +*.ids +*.orig +out/ +coverage +test/temp/integrationTestResult +test/test-project/package-lock.json +test/test-project/verdaccio/storage/ +test/test-project/storage/ diff --git a/apps/generator/CHANGELOG.md b/apps/generator/CHANGELOG.md new file mode 100644 index 000000000..d83655139 --- /dev/null +++ b/apps/generator/CHANGELOG.md @@ -0,0 +1,39 @@ +# @asyncapi/generator + +## 2.3.0 + +### Minor Changes + +- 44fcc33: ts-node is registered only when it's actually needed + +## 2.2.0 + +### Minor Changes + +- 81dfd0c: Enable `noOverwriteGlobs` option for templates based on react rendering engine. + +## 2.1.3 + +### Patch Changes + +- 93fb8e8: Updated the method for importing the Nunjucks filter dependency + +## 2.1.2 + +### Patch Changes + +- a3e93ef: update the git context for the docker versioning. + +## 2.1.1 + +### Patch Changes + +- 36ee8a8: Fix docker image publishing. Removed package name from version tag for Docker tagging. + +## 2.1.0 + +### Minor Changes + +- 99a14a8: - New release pipeline supporting monorepo, + - Package `@asyncapi/generator-filters` is now part of `generator` repo and won't be released separately. New name of the library is `nunjuckis-filters`, + - By default `@asyncapi/generator-filters` package, known as package with a lot of different nunjucks filters, is registered and added to generator and you no longer have to configure it in your `package.json`. Package `@asyncapi/generator-filters` will no longer be published to NPM separately and is deprecated. diff --git a/Dockerfile b/apps/generator/Dockerfile similarity index 83% rename from Dockerfile rename to apps/generator/Dockerfile index e6bd013fa..426ece721 100644 --- a/Dockerfile +++ b/apps/generator/Dockerfile @@ -1,4 +1,6 @@ -FROM node:14-alpine +ARG ASYNCAPI_GENERATOR_VERSION=1.10.9 + +FROM node:18-alpine WORKDIR /app @@ -14,6 +16,6 @@ RUN apk --update add git chromium && \ rm /var/cache/apk/* # Installing latest released npm package -RUN npm install -g @asyncapi/generator +RUN npm install -g @asyncapi/generator@$ASYNCAPI_GENERATOR_VERSION ENTRYPOINT [ "ag" ] diff --git a/apps/generator/README.md b/apps/generator/README.md new file mode 100644 index 000000000..13f9ba7b3 --- /dev/null +++ b/apps/generator/README.md @@ -0,0 +1,28 @@ +Generator is a tool that you can use to generate whatever you want basing on the AsyncAPI specification file as an input. For more information [read the docs](https://www.asyncapi.com/docs/tools/generator). + +There is a large number of templates that are ready to use and are officially supported by the AsyncAPI Initiative. + +## List of official generator templates + + + + +| Template Name | Description | Source code | +| --------------------------------------------- | --------------------------------------------------------------- | --------------------------------------------------------------------------- | +| `@asyncapi/nodejs-template` | Generates Nodejs service that uses Hermes package | [click here](https://github.com/asyncapi/nodejs-template) | +| `@asyncapi/nodejs-ws-template` | Generates Nodejs service that supports WebSockets protocol only | [click here](https://github.com/asyncapi/nodejs-ws-template) | +| `@asyncapi/java-template` | Generates Java JMS application | [click here](https://github.com/asyncapi/java-template) | +| `@asyncapi/java-spring-template` | Generates Java Spring service | [click here](https://github.com/asyncapi/java-spring-template) | +| `@asyncapi/java-spring-cloud-stream-template` | Generates Java Spring Cloud Stream service | [click here](https://github.com/asyncapi/java-spring-cloud-stream-template) | +| `@asyncapi/python-paho-template` | Generates Python service that uses Paho library | [click here](https://github.com/asyncapi/python-paho-template) | +| `@asyncapi/html-template` | Generates HTML documentation site | [click here](https://github.com/asyncapi/html-template) | +| `@asyncapi/markdown-template` | Generates documentation in Markdown file | [click here](https://github.com/asyncapi/markdown-template) | +| `@asyncapi/ts-nats-template` | Generates TypeScript NATS client | [click here](https://github.com/asyncapi/ts-nats-template/) | +| `@asyncapi/go-watermill-template` | Generates Go client using Watermill | [click here](https://github.com/asyncapi/go-watermill-template) | +| `@asyncapi/dotnet-nats-template` | Generates .NET C# client using NATS | [click here](https://github.com/asyncapi/dotnet-nats-template) | +| `@asyncapi/php-template` | Generates PHP client using RabbitMQ | [click here](https://github.com/asyncapi/php-template) | +| `@asyncapi/dotnet-rabbitmq-template` | Generates .NET C# client using RabbitMQ | [click here](https://github.com/asyncapi/dotnet-rabbitmq-template) | + + + +You can find above templates and the ones provided by the community in **[this list](https://github.com/search?q=topic%3Aasyncapi+topic%3Agenerator+topic%3Atemplate)** diff --git a/cli.js b/apps/generator/cli.js similarity index 95% rename from cli.js rename to apps/generator/cli.js index 87c473a02..feb12446a 100755 --- a/cli.js +++ b/apps/generator/cli.js @@ -4,6 +4,7 @@ const path = require('path'); const os = require('os'); const program = require('commander'); const xfs = require('fs.extra'); +const { DiagnosticSeverity } = require('@asyncapi/parser/cjs'); const packageInfo = require('./package.json'); const Generator = require('./lib/generator'); const Watcher = require('./lib/watcher'); @@ -64,8 +65,10 @@ const mapBaseUrlParser = v => { const showError = err => { console.error(red('Something went wrong:')); console.error(red(err.stack || err.message)); - if (err.errors) console.error(red(JSON.stringify(err.errors))); - if (err.validationErrors) console.error(red(JSON.stringify(err.validationErrors, null, 4))); + if (err.diagnostics) { + const errorDiagnostics = err.diagnostics.filter(diagnostic => diagnostic.severity === DiagnosticSeverity.Error); + console.error(red(`Errors:\n${JSON.stringify(errorDiagnostics, undefined, 2)}`)); + } }; const showErrorAndExit = err => { showError(err); @@ -127,7 +130,7 @@ xfs.mkdirp(program.output, async err => { console.warn(`WARNING: ${template} is a remote template. Changes may be lost on subsequent installations.`); } - watcher.watch(watcherHandler, (paths) => { + await watcher.watch(watcherHandler, (paths) => { showErrorAndExit({ message: `[WATCHER] Could not find the file path ${paths}, are you sure it still exists? If it has been deleted or moved please rerun the generator.` }); }); } diff --git a/apps/generator/docs/README.md b/apps/generator/docs/README.md new file mode 100644 index 000000000..ea67a118d --- /dev/null +++ b/apps/generator/docs/README.md @@ -0,0 +1,3 @@ +## Documentation + +Read the docs on https://www.asyncapi.com/docs/tools/generator. You can still read the docs from here, but some of the links may be broken. \ No newline at end of file diff --git a/apps/generator/docs/api.md b/apps/generator/docs/api.md new file mode 100644 index 000000000..b59a87e33 --- /dev/null +++ b/apps/generator/docs/api.md @@ -0,0 +1,487 @@ +--- +title: Library API +weight: 75 +--- + +Reference API documentation for AsyncAPI Generator library. + + + +## Generator +**Kind**: global class + +* [Generator](#Generator) + * [new Generator(templateName, targetDir, options)](#new_Generator_new) + * _instance_ + * [.registry](#Generator+registry) : `Object` + * [.templateName](#Generator+templateName) : `String` + * [.targetDir](#Generator+targetDir) : `String` + * [.entrypoint](#Generator+entrypoint) : `String` + * [.noOverwriteGlobs](#Generator+noOverwriteGlobs) : `Array.` + * [.disabledHooks](#Generator+disabledHooks) : `Object.)>` + * [.output](#Generator+output) : `String` + * [.forceWrite](#Generator+forceWrite) : `Boolean` + * [.debug](#Generator+debug) : `Boolean` + * [.install](#Generator+install) : `Boolean` + * [.templateConfig](#Generator+templateConfig) : `Object` + * [.hooks](#Generator+hooks) : `Object` + * [.mapBaseUrlToFolder](#Generator+mapBaseUrlToFolder) : `Object` + * [.templateParams](#Generator+templateParams) : `Object` + * [.generate(asyncapiDocument, [parseOptions])](#Generator+generate) ⇒ `Promise.` + * [.validateAsyncAPIDocument(asyncapiDocument)](#Generator+validateAsyncAPIDocument) + * [.setupOutput()](#Generator+setupOutput) + * [.setupFSOutput()](#Generator+setupFSOutput) ⇒ `Promise.` + * [.setLogLevel()](#Generator+setLogLevel) ⇒ `void` + * [.installAndSetupTemplate()](#Generator+installAndSetupTemplate) ⇒ `Promise.<{templatePkgName: string, templatePkgPath: string}>` + * [.configureTemplateWorkflow(parseOptions)](#Generator+configureTemplateWorkflow) ⇒ `Promise.` + * [.handleEntrypoint()](#Generator+handleEntrypoint) ⇒ `Promise.` + * [.executeAfterHook()](#Generator+executeAfterHook) ⇒ `Promise.` + * [.parseInput()](#Generator+parseInput) + * [.configureTemplate()](#Generator+configureTemplate) + * ~~[.generateFromString(asyncapiString, [parseOptions])](#Generator+generateFromString) ⇒ `Promise`~~ + * [.generateFromURL(asyncapiURL)](#Generator+generateFromURL) ⇒ `Promise` + * [.generateFromFile(asyncapiFile)](#Generator+generateFromFile) ⇒ `Promise` + * [.installTemplate([force])](#Generator+installTemplate) + * _static_ + * [.getTemplateFile(templateName, filePath, [templatesDir])](#Generator.getTemplateFile) ⇒ `Promise` + + + + +### new Generator +Instantiates a new Generator object. + +**Params** + +- templateName `String` - Name of the template to generate. +- targetDir `String` - Path to the directory where the files will be generated. +- options `Object` + - [.templateParams] `Object.` - Optional parameters to pass to the template. Each template define their own params. + - [.entrypoint] `String` - Name of the file to use as the entry point for the rendering process. Use in case you want to use only a specific template file. Note: this potentially avoids rendering every file in the template. + - [.noOverwriteGlobs] `Array.` - List of globs to skip when regenerating the template. + - [.disabledHooks] `Object.)>` - Object with hooks to disable. The key is a hook type. If key has "true" value, then the generator skips all hooks from the given type. If the value associated with a key is a string with the name of a single hook, then the generator skips only this single hook name. If the value associated with a key is an array of strings, then the generator skips only hooks from the array. + - [.output] `String` ` = 'fs'` - Type of output. Can be either 'fs' (default) or 'string'. Only available when entrypoint is set. + - [.forceWrite] `Boolean` ` = false` - Force writing of the generated files to given directory even if it is a git repo with unstaged files or not empty dir. Default is set to false. + - [.install] `Boolean` ` = false` - Install the template and its dependencies, even when the template has already been installed. + - [.debug] `Boolean` ` = false` - Enable more specific errors in the console. At the moment it only shows specific errors about filters. Keep in mind that as a result errors about template are less descriptive. + - [.mapBaseUrlToFolder] `Object.` - Optional parameter to map schema references from a base url to a local base folder e.g. url=https://schema.example.com/crm/ folder=./test/docs/ . + - [.registry] `Object` - Optional parameter with private registry configuration + - [.url] `String` - Parameter to pass npm registry url + - [.auth] `String` - Optional parameter to pass npm registry username and password encoded with base64, formatted like username:password value should be encoded + - [.token] `String` - Optional parameter to pass npm registry auth token that you can grab from .npmrc file + +**Example** +```js +const path = require('path'); +const generator = new Generator('@asyncapi/html-template', path.resolve(__dirname, 'example')); +``` +**Example** *(Passing custom params to the template)* +```js +const path = require('path'); +const generator = new Generator('@asyncapi/html-template', path.resolve(__dirname, 'example'), { + templateParams: { + sidebarOrganization: 'byTags' + } +}); +``` + + + +* generator.registry : `Object`** : +Npm registry information. + +**Kind**: instance property of [`Generator`](#Generator) + + + +* generator.templateName : `String`** : +Name of the template to generate. + +**Kind**: instance property of [`Generator`](#Generator) + + + +* generator.targetDir : `String`** : +Path to the directory where the files will be generated. + +**Kind**: instance property of [`Generator`](#Generator) + + + +* generator.entrypoint : `String`** : +Name of the file to use as the entry point for the rendering process. Use in case you want to use only a specific template file. Note: this potentially avoids rendering every file in the template. + +**Kind**: instance property of [`Generator`](#Generator) + + + +* generator.noOverwriteGlobs : `Array.`** : +List of globs to skip when regenerating the template. + +**Kind**: instance property of [`Generator`](#Generator) + + + +* generator.disabledHooks : `Object.)>`** : +Object with hooks to disable. The key is a hook type. If key has "true" value, then the generator skips all hooks from the given type. If the value associated with a key is a string with the name of a single hook, then the generator skips only this single hook name. If the value associated with a key is an array of strings, then the generator skips only hooks from the array. + +**Kind**: instance property of [`Generator`](#Generator) + + + +* generator.output : `String`** : +Type of output. Can be either 'fs' (default) or 'string'. Only available when entrypoint is set. + +**Kind**: instance property of [`Generator`](#Generator) + + + +* generator.forceWrite : `Boolean`** : +Force writing of the generated files to given directory even if it is a git repo with unstaged files or not empty dir. Default is set to false. + +**Kind**: instance property of [`Generator`](#Generator) + + + +* generator.debug : `Boolean`** : +Enable more specific errors in the console. At the moment it only shows specific errors about filters. Keep in mind that as a result errors about template are less descriptive. + +**Kind**: instance property of [`Generator`](#Generator) + + + +* generator.install : `Boolean`** : +Install the template and its dependencies, even when the template has already been installed. + +**Kind**: instance property of [`Generator`](#Generator) + + + +* generator.templateConfig : `Object`** : +The template configuration. + +**Kind**: instance property of [`Generator`](#Generator) + + + +* generator.hooks : `Object`** : +Hooks object with hooks functions grouped by the hook type. + +**Kind**: instance property of [`Generator`](#Generator) + + + +* generator.mapBaseUrlToFolder : `Object`** : +Maps schema URL to folder. + +**Kind**: instance property of [`Generator`](#Generator) + + + +* generator.templateParams : `Object`** : +The template parameters. The structure for this object is based on each individual template. + +**Kind**: instance property of [`Generator`](#Generator) + + + +### generator.generate +Generates files from a given template and an AsyncAPIDocument object. + +**Kind**: instance method of [`Generator`](#Generator) +**Returns**: `Promise.` - A Promise that resolves when the generation is completed. +**Params** + +- asyncapiDocument `AsyncAPIDocument` | `string` - AsyncAPIDocument object to use as source. +- [parseOptions] `Object` ` = {}` - AsyncAPI Parser parse options. + Check out [@asyncapi/parser](https://www.github.com/asyncapi/parser-js) for more information. + Remember to use the right options for the right parser depending on the template you are using. + +**Example** +```js +await generator.generate(myAsyncAPIdocument); +console.log('Done!'); +``` +**Example** +```js +generator + .generate(myAsyncAPIdocument) + .then(() => { + console.log('Done!'); + }) + .catch(console.error); +``` +**Example** *(Using async/await)* +```js +try { + await generator.generate(myAsyncAPIdocument); + console.log('Done!'); +} catch (e) { + console.error(e); +} +``` + + + +### generator.validateAsyncAPIDocument +Validates the provided AsyncAPI document. + +**Kind**: instance method of [`Generator`](#Generator) +**Throws**: + +- `Error` Throws an error if the document is not valid. + +**Since**: 10/9/2023 - 4:26:33 PM +**Params** + +- asyncapiDocument `*` - The AsyncAPI document to be validated. + + + + +* generator.setupOutput()** : +Sets up the output configuration based on the specified output type. + +**Kind**: instance method of [`Generator`](#Generator) +**Throws**: + +- `Error` If 'output' is set to 'string' without providing 'entrypoint'. + +**Example** +```js +const generator = new Generator(); +await generator.setupOutput(); +``` + + + +* generator.setupFSOutput() ⇒ `Promise.`** : +Sets up the file system (FS) output configuration. + +This function creates the target directory if it does not exist and verifies +the target directory if forceWrite is not enabled. + +**Kind**: instance method of [`Generator`](#Generator) +**Returns**: `Promise.` - A promise that fulfills when the setup is complete. +**Throws**: + +- `Error` If verification of the target directory fails and forceWrite is not enabled. + + + + +* generator.setLogLevel() ⇒ `void`** : +Sets the log level based on the debug option. + +If the debug option is enabled, the log level is set to 'debug'. + +**Kind**: instance method of [`Generator`](#Generator) + + + +* generator.installAndSetupTemplate() ⇒ `Promise.<{templatePkgName: string, templatePkgPath: string}>`** : +Installs and sets up the template for code generation. + +This function installs the specified template using the provided installation option, +sets up the necessary directory paths, loads the template configuration, and returns +information about the installed template. + +**Kind**: instance method of [`Generator`](#Generator) +**Returns**: `Promise.<{templatePkgName: string, templatePkgPath: string}>` - A promise that resolves to an object containing the name and path of the installed template. + + + +### generator.configureTemplateWorkflow +Configures the template workflow based on provided parsing options. + +This function performs the following steps: +1. Parses the input AsyncAPI document using the specified parse options. +2. Validates the template configuration and parameters. +3. Configures the template based on the parsed AsyncAPI document. +4. Registers filters, hooks, and launches the 'generate:before' hook if applicable. + +**Kind**: instance method of [`Generator`](#Generator) +**Returns**: `Promise.` - A promise that resolves when the configuration is completed. +**Params** + +- parseOptions `*` - Options for parsing the AsyncAPI document. + + + + +* generator.handleEntrypoint() ⇒ `Promise.`** : +Handles the logic for the template entrypoint. + +If an entrypoint is specified: +- Resolves the absolute path of the entrypoint file. +- Throws an error if the entrypoint file doesn't exist. +- Generates a file or renders content based on the output type. +- Launches the 'generate:after' hook if the output is 'fs'. + +If no entrypoint is specified, generates the directory structure. + +**Kind**: instance method of [`Generator`](#Generator) +**Returns**: `Promise.` - A promise that resolves when the entrypoint logic is completed. + + + +* generator.executeAfterHook() ⇒ `Promise.`** : +Executes the 'generate:after' hook. + +Launches the after-hook to perform additional actions after code generation. + +**Kind**: instance method of [`Generator`](#Generator) +**Returns**: `Promise.` - A promise that resolves when the after-hook execution is completed. + + + +* generator.parseInput()** : +Parse the generator input based on the template `templateConfig.apiVersion` value. + +**Kind**: instance method of [`Generator`](#Generator) + + + +* generator.configureTemplate()** : +Configure the templates based the desired renderer. + +**Kind**: instance method of [`Generator`](#Generator) + + + +### ~~generator.generateFromString~~ +***Deprecated*** + +Generates files from a given template and AsyncAPI string. + +**Kind**: instance method of [`Generator`](#Generator) +**Params** + +- asyncapiString `String` - AsyncAPI string to use as source. +- [parseOptions] `Object` ` = {}` - AsyncAPI Parser parse options. Check out [@asyncapi/parser](https://www.github.com/asyncapi/parser-js) for more information. + +**Example** +```js +const asyncapiString = ` +asyncapi: '2.0.0' +info: + title: Example + version: 1.0.0 +... +`; +generator + .generateFromString(asyncapiString) + .then(() => { + console.log('Done!'); + }) + .catch(console.error); +``` +**Example** *(Using async/await)* +```js +const asyncapiString = ` +asyncapi: '2.0.0' +info: + title: Example + version: 1.0.0 +... +`; + +try { + await generator.generateFromString(asyncapiString); + console.log('Done!'); +} catch (e) { + console.error(e); +} +``` + + + +### generator.generateFromURL +Generates files from a given template and AsyncAPI file stored on external server. + +**Kind**: instance method of [`Generator`](#Generator) +**Params** + +- asyncapiURL `String` - Link to AsyncAPI file + +**Example** +```js +generator + .generateFromURL('https://example.com/asyncapi.yaml') + .then(() => { + console.log('Done!'); + }) + .catch(console.error); +``` +**Example** *(Using async/await)* +```js +try { + await generator.generateFromURL('https://example.com/asyncapi.yaml'); + console.log('Done!'); +} catch (e) { + console.error(e); +} +``` + + + +### generator.generateFromFile +Generates files from a given template and AsyncAPI file. + +**Kind**: instance method of [`Generator`](#Generator) +**Params** + +- asyncapiFile `String` - AsyncAPI file to use as source. + +**Example** +```js +generator + .generateFromFile('asyncapi.yaml') + .then(() => { + console.log('Done!'); + }) + .catch(console.error); +``` +**Example** *(Using async/await)* +```js +try { + await generator.generateFromFile('asyncapi.yaml'); + console.log('Done!'); +} catch (e) { + console.error(e); +} +``` + + + +### generator.installTemplate +Downloads and installs a template and its dependencies + +**Kind**: instance method of [`Generator`](#Generator) +**Params** + +- [force] `Boolean` ` = false` - Whether to force installation (and skip cache) or not. + + + + +### Generator.getTemplateFile +Returns the content of a given template file. + +**Kind**: static method of [`Generator`](#Generator) +**Params** + +- templateName `String` - Name of the template to generate. +- filePath `String` - Path to the file to render. Relative to the template directory. +- [templatesDir] `String` ` = DEFAULT_TEMPLATES_DIR` - Path to the directory where the templates are installed. + +**Example** +```js +const Generator = require('@asyncapi/generator'); +const content = await Generator.getTemplateFile('@asyncapi/html-template', 'partials/content.html'); +``` +**Example** *(Using a custom `templatesDir`)* +```js +const Generator = require('@asyncapi/generator'); +const content = await Generator.getTemplateFile('@asyncapi/html-template', 'partials/content.html', '~/my-templates'); +``` diff --git a/docs/asyncapi-document.md b/apps/generator/docs/asyncapi-document.md similarity index 89% rename from docs/asyncapi-document.md rename to apps/generator/docs/asyncapi-document.md index 6db97576a..dfd2db6e8 100644 --- a/docs/asyncapi-document.md +++ b/apps/generator/docs/asyncapi-document.md @@ -5,24 +5,25 @@ weight: 40 The **AsyncAPI document** defines a standard, protocol-agnostic interface that describes message-based or event-driven APIs. The AsyncAPI document allows people or machines communicating with one another, to understand the capabilities of an event-driven API without requiring access to the source code, documentation, or inspecting the network traffic. -This document allows you to define your API structures and formats, including channels the end user can subscribe to and the message formats they receive. +This document allows you to define your API structures and formats, including channels the end user can subscribe to and the message formats they receive. The documents describing the message-driven API under the AsyncAPI specification are represented as JSON objects and conform to JSON standards. YAML, a superset of JSON, can also be used to represent an API. -> - To learn how to create an AsyncAPI document or refresh your knowledge about the syntax and structure of the AsyncAPI document, check out our [latest specification documentation](https://www.asyncapi.com/docs/reference/specification/latest). +> - To learn how to create an AsyncAPI document or refresh your knowledge about the syntax and structure of the AsyncAPI document, check out our [latest specification documentation](https://www.asyncapi.com/docs/reference/specification/latest). > - You can develop, validate, and convert the AsyncAPI document to the latest version or preview your AsyncAPI document in a more readable way using the [AsyncAPI Studio](https://studio.asyncapi.com/). In the following sections, you'll learn about the inner working of the generator, what happens once the AsyncAPI document is fed to the generator, and how you can access the content of the document in your template. ## AsyncAPI document generation process -1. The **Generator** receives the **AsyncAPI Document** as input. + +1. The **Generator** receives the **AsyncAPI Document** as input. 2. The **Generator** sends to the **[Parser](parser)** the **asyncapiString** is a stringified version of the original **AsyncAPI Document** to validate and parse it. -3. The **Parser** validates the **AsyncAPI Document** using additional schema-related plugins, either the OpenAPI schema, RAML data types, or Avro schema. -4. If the **Parser** determines that the **AsyncAPI Document** is valid, it manipulates the original JSON/YAML document and provides a set of helper functions in return, bundling them together into an **asyncapi** variable that is an instance of [**AsyncAPIDocument**](https://github.com/asyncapi/parser-js/blob/master/API.md#module_@asyncapi/parser+AsyncAPIDocument). -5. At this point, the **Generator** passes the **originalAsyncAPI** and the **asyncapi** which make up part of the **[Template Context](asyncapi-context)** to the **Render Engine**. +3. The **Parser** validates the **AsyncAPI Document** using additional schema-related plugins, either the OpenAPI schema, RAML data types, or Avro schema. +4. If the **Parser** determines that the **AsyncAPI Document** is valid, it manipulates the original JSON/YAML document and provides a set of helper functions in return, bundling them together into an **asyncapi** variable that is an instance of [**AsyncAPIDocument**](https://github.com/asyncapi/parser-api/blob/master/docs/api.md#asyncapidocument). +5. At this point, the **Generator** passes the **originalAsyncAPI** and the **asyncapi** which make up part of the **[Template Context](template-context)** to the **Render Engine**. 6. The **Template Context** is accessible to the template files that are passed to either the [react](react-render-engine) or [nunjucks](nunjucks-render-engine) **Render Engines**. - -``` mermaid + +```mermaid graph LR A[Template Context] B{Generator} @@ -35,14 +36,17 @@ graph LR B--> | originalAsyncAPI -> Stringified document | A A --> D end - ``` +``` + The AsyncAPI document's content is accessible to you while writing your template in two distinct ways: -* The `originalAsyncAPI`, which is a stringified version of the AsyncAPI document provided as input, without any modifications. -* The `asyncapi` (`AsyncAPIDocument`) which is an object with a set of helper functions, that comes as a result of the `Parser` manipulating the `originalAyncAPI` .The `asyncapi` functions make it easier to access the contents of AsyncAPI documents in your templates. + +- The `originalAsyncAPI`, which is a stringified version of the AsyncAPI document provided as input, without any modifications. +- The `asyncapi` (`AsyncAPIDocument`) which is an object with a set of helper functions, that comes as a result of the `Parser` manipulating the `originalAyncAPI` .The `asyncapi` functions make it easier to access the contents of AsyncAPI documents in your templates. In the following sections, you will learn how to use either the **originalAsyncAPI** or the **asyncapi** in your template. -### Method 1: `originalAsyncAPI` and template +### Method 1: `originalAsyncAPI` and template + One way of using the contents of the AsyncAPI document inside your template files is by using its stringified version that reflects the same contents as the AsyncAPI document provided as input to the generator. You can access it directly in your templates using the `originalAsyncAPI` variable. You also access it via the [hooks](hooks) `generator.originalAsyncAPI` because `originalAsyncAPI` is also a part of the generator instance that is passed to hooks. ```js @@ -54,7 +58,7 @@ const path = require('path'); function createAsyncapiFile(generator) { const asyncapi = generator.originalAsyncAPI; let extension; - + try { JSON.parse(asyncapi); extension = 'json'; @@ -69,9 +73,9 @@ function createAsyncapiFile(generator) { fs.writeFileSync(asyncapiOutputLocation, asyncapi); ``` - ### Method 2: `asyncapi` and template -A major advantage of using `asyncapi` (which is an instance of `AsyncAPIDocument`) is that it enables the developer to easily access the AsyncAPI documents' content by simply invoking a function. + +A major advantage of using `asyncapi` (which is an instance of `AsyncAPIDocument`) is that it enables the developer to easily access the AsyncAPI documents' content by simply invoking a function. Once the specification YAML or JSON document is passed as input to the generator, it is passed on to the [Parser](parser) library, which then manipulates the asyncAPI document to a more structured document called the `AsyncAPIDocument`. Once the parser returns the document to the generator, the generator passes it to the render engine. The render engine makes the `AsyncAPIDocument` object accessible to your template through the `asyncapi` variable. @@ -80,8 +84,8 @@ For example, if you want to extract the version of your API from AsyncAPI docume In the sample code snippet below, notice how you can access the contents of the AsyncAPI document in your template using `asyncapi` variable from the context: ```js - const apiName = asyncapi.info().title(); - const channels = asyncapi.channels(); +const apiName = asyncapi.info().title(); +const channels = asyncapi.channels(); ``` -> To learn about the various instances you can use to access the documents' content, look at the API of the AsyncAPI JavaScript Parser and the structure of [AsyncAPIDocument](https://github.com/asyncapi/parser-js/blob/master/API.md#module_@asyncapi/parser+AsyncAPIDocument) \ No newline at end of file +> To learn about the various instances you can use to access the documents' content, look at the API of the AsyncAPI JavaScript Parser and the structure of [AsyncAPIDocument](https://github.com/asyncapi/parser-api/blob/master/docs/api.md#asyncapidocument) diff --git a/docs/configuration-file.md b/apps/generator/docs/configuration-file.md similarity index 87% rename from docs/configuration-file.md rename to apps/generator/docs/configuration-file.md index 75da9d528..c11cfd00e 100644 --- a/docs/configuration-file.md +++ b/apps/generator/docs/configuration-file.md @@ -1,6 +1,6 @@ --- title: "Configuration file" -weight: 30 +weight: 90 --- The `generator` property from `package.json` file must contain a JSON object that may have the following information: @@ -8,6 +8,7 @@ The `generator` property from `package.json` file must contain a JSON object tha |Name|Type|Description| |---|---|---| |`renderer`| String | Its value can be either `react` or `nunjucks` (default). +|`apiVersion`| String | Determines which **major** version of the [Parser-API](https://github.com/asyncapi/parser-api) the template uses. For example, `v2` for `v2.x.x`. If not specified, the Generator assumes the template is not compatible with the Parser-API so it will use the [Parser-JS v1 API](https://github.com/asyncapi/parser-js/tree/v1.18.1#api-documentation). For templates that need to support AsyncAPI specification v3 make sure to use `v3` [Parser-API](https://github.com/asyncapi/parser-api). If the template uses a version of the Parser-API that is not supported by the Generator, the Generator will throw an error. |`supportedProtocols`| [String] | A list with all the protocols this template supports. |`parameters`| Object[String, Object] | An object with all the parameters that can be passed when generating the template. When using the command line, it's done by indicating `--param name=value` or `-p name=value`. |`parameters[param].description`| String | A user-friendly description about the parameter. @@ -17,7 +18,7 @@ The `generator` property from `package.json` file must contain a JSON object tha |`conditionalFiles[filePath].subject`| String | The `subject` is a [JMESPath](http://jmespath.org/) query to grab the value you want to apply the condition to. It queries an object with the whole AsyncAPI document and, when specified, the given server. The object looks like this: `{ asyncapi: { ... }, server: { ... } }`. If the template supports `server` parameter, you can access server details like for example protocol this way: `server.protocol`. During validation with `conditionalFiles` only the server that template user selected is available in the specification file. For more information about `server` parameter [read about special parameters](#special-parameters). |`conditionalFiles[filePath].validation`| Object | The `validation` is a JSON Schema Draft 07 object. This JSON Schema definition will be applied to the JSON value resulting from the `subject` query. If validation doesn't have errors, the condition is met, and therefore the given file will be rendered. Otherwise, the file is ignored. Check [JSON Schema Validation](https://json-schema.org/draft-07/json-schema-validation.html#rfc.section.6) document for a list of all possible validation keywords. |`nonRenderableFiles`| [String] | A list of file paths or [globs](https://en.wikipedia.org/wiki/Glob_(programming)) that must be copied "as-is" to the target directory, i.e., without performing any rendering process. This is useful when you want to copy binary files. -|`generator`| [String] | A string representing the Generator version-range the template is compatible with. This value must follow the [semver](https://nodejs.dev/learn/semantic-versioning-using-npm) syntax. E.g., `>=1.0.0`, `>=1.0.0 <=2.0.0`, `~1.0.0`, `^1.0.0`, `1.0.0`, etc. [Read more about semver](https://docs.npmjs.com/about-semantic-versioning). +|`generator`| [String] | A string representing the generator version-range the template is compatible with. This value must follow the [semver](https://nodejs.dev/learn/semantic-versioning-using-npm) syntax. E.g., `>=1.0.0`, `>=1.0.0 <=2.0.0`, `~1.0.0`, `^1.0.0`, `1.0.0`, etc. [Read more about semver](https://docs.npmjs.com/about-semantic-versioning). |`filters`| [String] | A list of modules containing functions that can be used as Nunjucks filters. In case of external modules, remember they need to be added as a dependency in `package.json` of your template. |`hooks`| Object[String, String] or Object[String, Array[String]] | A list of modules containing hooks, except for the ones you keep locally in your template in default location. For each module you must specify the exact name of the hook that should be used in the template. For a single hook you can specify it as a string, for more you must pass an array of strings. In case of external modules, remember they need to be added as a dependency in `package.json` of your template. @@ -27,6 +28,7 @@ The `generator` property from `package.json` file must contain a JSON object tha "generator": { "renderer": "react", + "apiVersion": "v3", "supportedProtocols": ["amqp", "mqtt"], "parameters": { "server": { @@ -58,8 +60,8 @@ The `generator` property from `package.json` file must contain a JSON object tha "lib/lib/config.js" ], "generator": "<2.0.0", - "filters": [ - "@asyncapi/generator-filters" + "filters":[ + "my-package-with-filters" ], "hooks": { "@asyncapi/generator-hooks": "hookFunctionName" diff --git a/docs/file-templates.md b/apps/generator/docs/file-templates.md similarity index 52% rename from docs/file-templates.md rename to apps/generator/docs/file-templates.md index 52dc457d1..5ba4a9687 100644 --- a/docs/file-templates.md +++ b/apps/generator/docs/file-templates.md @@ -1,17 +1,17 @@ --- title: "File templates" -weight: 110 +weight: 140 --- It is possible to generate files for each specific object in your AsyncAPI documentation. For example, you can specify a filename like `$$channel$$.js` to generate a file for each channel defined in your AsyncAPI. The following file-template names and extra variables in them are available: - - `$$channel$$`, within the template-file you have access to two variables [`channel`](https://github.com/asyncapi/parser-js/blob/master/API.md#Channel) and [`channelName`](https://github.com/asyncapi/parser-js/blob/master/API.md#AsyncAPIDocument+channels). Where the `channel` contains the current channel being rendered. - - `$$message$$`, within the template-file you have access to two variables [`message`](https://github.com/asyncapi/parser-js/blob/master/API.md#Message) and [`messageName`](https://github.com/asyncapi/parser-js/blob/master/API.md#Message+uid). Where `message` contains the current message being rendered. - - `$$schema$$`, within the template-file you have access to two variables [`schema`](https://github.com/asyncapi/parser-js/blob/master/API.md#Schema) and [`schemaName`](https://github.com/asyncapi/parser-js/blob/master/API.md#Schema+uid). Where `schema` contains the current schema being rendered. Only schemas from [Components object](https://www.asyncapi.com/docs/specifications/2.0.0/#a-name-componentsobject-a-components-object) are used. - - `$$everySchema$$`, within the template-file you have access to two variables [`schema`](https://github.com/asyncapi/parser-js/blob/master/API.md#Schema) and [`schemaName`](https://github.com/asyncapi/parser-js/blob/master/API.md#Schema+uid). Where `schema` contains the current schema being rendered. Every [Schema object](https://www.asyncapi.com/docs/specifications/2.0.0/#schemaObject) from the entire AsyncAPI file is used. - - `$$objectSchema$$`, within the template-file you have access to two variables [`schema`](https://github.com/asyncapi/parser-js/blob/master/API.md#Schema) and [`schemaName`](https://github.com/asyncapi/parser-js/blob/master/API.md#Schema+uid). Where `schema` contains the current schema being rendered. All the [Schema objects](https://www.asyncapi.com/docs/specifications/2.0.0/#schemaObject) with type object is used. - - `$$parameter$$`, within the template-file you have access to two variables [`parameter`](https://github.com/asyncapi/parser-js/blob/master/API.md#ChannelParameter) and [`parameterName`](https://github.com/asyncapi/parser-js/blob/master/API.md#Channel+parameters). Where the `parameter` contains the current parameter being rendered. - - `$$securityScheme$$`, within the template-file you have access to two variables [`securityScheme`](https://github.com/asyncapi/parser-js/blob/master/API.md#SecurityScheme) and [`securitySchemeName`](https://github.com/asyncapi/parser-js/blob/master/API.md#Components+securitySchemes). Where `securityScheme` contains the current security scheme being rendered. + - `$$channel$$`, within the template-file you have access to two variables [`channel`](https://github.com/asyncapi/parser-api/blob/master/docs/api.md#channel) and [`channelName`](https://github.com/asyncapi/parser-api/blob/master/docs/api.md#channels). Where the `channel` contains the current channel being rendered. + - `$$message$$`, within the template-file you have access to two variables [`message`](https://github.com/asyncapi/parser-api/blob/master/docs/api.md#message) and [`messageName`](https://github.com/asyncapi/parser-api/blob/master/docs/api.md#message). Where `message` contains the current message being rendered. + - `$$schema$$`, within the template-file you have access to two variables [`schema`](https://github.com/asyncapi/parser-api/blob/master/docs/api.md#schema) and [`schemaName`](https://github.com/asyncapi/parser-api/blob/master/docs/api.md#schema). Where `schema` contains the current schema being rendered. Only schemas from [Components object](https://www.asyncapi.com/docs/reference/specification/latest#componentsObject) are used. + - `$$everySchema$$`, within the template-file you have access to two variables [`schema`](https://github.com/asyncapi/parser-api/blob/master/docs/api.md#schema) and [`schemaName`](https://github.com/asyncapi/parser-api/blob/master/docs/api.md#schemas). Where `schema` contains the current schema being rendered. Every [Schema object](https://www.asyncapi.com/docs/specifications/2.0.0/#schemaObject) from the entire AsyncAPI file is used. + - `$$objectSchema$$`, within the template-file you have access to two variables [`schema`](https://github.com/asyncapi/parser-api/blob/master/docs/api.md#schema) and [`schemaName`](https://github.com/asyncapi/parser-api/blob/master/docs/api.md#schemas). Where `schema` contains the current schema being rendered. All the [Schema objects](https://www.asyncapi.com/docs/reference/specification/latest#multiFormatSchemaObject) with type object is used. + - `$$parameter$$`, within the template-file you have access to two variables [`parameter`](https://github.com/asyncapi/parser-api/blob/master/docs/api.md#channelparameter) and [`parameterName`](https://github.com/asyncapi/parser-api/blob/master/docs/api.md#channelparameters). Where the `parameter` contains the current parameter being rendered. + - `$$securityScheme$$`, within the template-file you have access to two variables [`securityScheme`](https://github.com/asyncapi/parser-api/blob/master/docs/api.md#securityscheme) and [`securitySchemeName`](https://github.com/asyncapi/parser-api/blob/master/docs/api.md#securityschemes). Where `securityScheme` contains the current security scheme being rendered. The file name will be equal to `*Name` variable. diff --git a/apps/generator/docs/generator-template.md b/apps/generator/docs/generator-template.md new file mode 100644 index 000000000..be84e4587 --- /dev/null +++ b/apps/generator/docs/generator-template.md @@ -0,0 +1,619 @@ +--- +title: "Creating a template" +weight: 170 +--- + +This tutorial teaches you how to create a simple generator template using a Python MQTT client. You'll use the AsyncAPI document and the template you develop to generate Python code. Additionally, you'll create template code with a reusable component to reuse the custom functionality you create and test your code using an MQTT client. + +Suppose you can only sleep when the AC in your bedroom is set to 22 °C, and you can't sleep when the temperature drops or rises above that. You can install a smart monitor in your bedroom that keeps track of the temperature and notifies you to adjust it to your optimum temperature when it fluctuates. You will create a template to alert you when the bedroom's temperature fluctuates from 22 °C. + +In this tutorial: + +- You'll use the [Eclipse Mosquito](https://test.mosquitto.org) **MQTT broker**, which you'll connect to subscribe and publish messages using an MQTT client. +- You'll use [Python Paho-MQTT](https://pypi.org/project/paho-mqtt/) as the **MQTT client** in this project. +- You'll create a React template that will use the MQTT broker to allow you to monitor your bedroom's temperature and notify you when the temperature drops or rises above 22 °C. +- Lastly, create a reusable component for the output code's `sendTemperatureDrop` and `sendTemperatureRise` functions. + +## Background context + +There is a list of [community maintained templates](https://www.asyncapi.com/docs/tools/generator/template#generator-templates-list), but what if you do not find what you need? In that case, you'll create a user-defined template that generates custom output from the generator. +Before you create the template, you'll need to have an [AsyncAPI document](https://www.asyncapi.com/docs/tools/generator/asyncapi-document) that defines the properties you want to use in your template to test against. In this tutorial, you'll use the following template saved in the **test/fixtures/asyncapi.yml** file in your template project directory. + +``` yml + +asyncapi: 2.6.0 + +info: + title: Temperature Service + version: 1.0.0 + description: This service is in charge of processing all the events related to temperature. + +servers: + dev: + url: test.mosquitto.org + protocol: mqtt + +channels: + temperature/changed: + description: Updates the bedroom temperature in the database when the temperatures drops or goes up. + publish: + operationId: temperatureChange + message: + description: Message that is being sent when the temperature in the bedroom changes. + payload: + type: object + additionalProperties: false + properties: + temperatureId: + type: string +components: + schemas: + temperatureId: + type: object + additionalProperties: false + properties: + temperatureId: + type: string +``` + + + +- To generate code, use the [AsyncAPI CLI](https://www.asyncapi.com/tools/cli). If you don't have the CLI installed, follow [CLI installation guide](/docs/tools/generator/installation-guide#asyncapi-cli). +- If you are new to AsyncAPI Generator, check out the following docs: [template development](/docs/tools/generator/template-development), which explains the minimum requirements for a template and possible features. + + + +## Overview of steps + +1. Create a new directory for your template named **python-mqtt-client-template**. +2. Install the AsyncAPI CLI using the command `npm install -g @asyncapi/cli`. +3. Create a new folder **test/fixtures** with a file named **asyncapi.yml** in your fixtures directory. This file is used to define the **structure** of your template. You can copy the above example and paste it in your **asyncapi.yml** document. +4. Create a new file named **package.json** in your **python-mqtt-client-template** directory. This file is used to define the **dependencies** for your template. +5. Create a new folder **python-mqtt-client-template/template**. Create a new file named **index.js** in your **template** directory. This file is used to define the **logic** for your template. +6. Create a **test.py** file to validate the logic of your application. Don't worry about this file for now. The tutorial will tell you how to create it later. + +Now your directory should look like this: + +``` +python-mqtt-client-template +├── template +│ └── index.js +├── test +│ └── fixtures +│ └── asyncapi.yml +└── package.json +``` + +Lets break it down: + +### package.json file + +The **package.json** file is used to define the dependencies for your template. Add the following code snippet to your **package.json** file: + +``` json +{ + "name": "python-mqtt-client-template", + "version": "0.0.1", + "description": "A template that generates a Python MQTT client using MQTT.", + "generator": { + "renderer": "react", + "apiVersion": "v1", + "generator": ">=1.10.0 <2.0.0", + "supportedProtocols": ["mqtt"] + }, + "dependencies": { + "@asyncapi/generator-react-sdk": "^0.2.25" + }, + "devDependencies": { + "rimraf": "^5.0.0" + } +} +``` + +Here's what is contained in the code snippet above: + +- **name** -the name of your template. +- **version** - the current version of your template. +- **description** - a description of what your template does. +- **generator** - specify generator [specific configuration](https://www.asyncapi.com/docs/tools/generator/configuration-file). + - **renderer** - can either be [`react`](https://www.asyncapi.com/docs/tools/generator/react-render-engine) or [`nunjucks`](https://www.asyncapi.com/docs/tools/generator/nunjucks-render-engine). In this case the generator will pass your template to the react render engine to generate the output. + - **apiVersion** - specifies which major version of the [Parser-API](https://github.com/asyncapi/parser-api) your template will use. + - **generator** - a string representing the generator version-range your template is compatible with. + - **supportedProtocols** - A list that specifies which protocols are supported by your template. +- **dependencies** - specifies which version of [`@asyncapi/generator-react-sdk`](https://github.com/asyncapi/generator-react-sdk) should be used. + +Navigate to the ****python-mqtt-client-template** directory. Run the command `npm install` on your terminal to install the dependencies specified in **package.json**. + +### index.js file + +The **index.js** file is used to define the logic for your template. Inside the template folder, create an **index.js** file and add the code snippet below: + +```js +//1 +import { File } from '@asyncapi/generator-react-sdk' +//2 +export default function ({ asyncapi }) { +//3 + return {asyncapi.info().title()} +} +``` + +The code snippet above does the following: + +1. Import the `generator-react-sdk` dependency. +2. The `asyncapi` argument is an instance of the [AsyncAPI Parser](https://www.asyncapi.com/docs/tools/generator/parser). It will allow you to access the content of the AsyncAPI document in your template using helper functions. +3. The `asyncapi.info().title()` is using the info() helper function to return the info object from the AsyncAPI document illustrated in the code snippet below: + +``` json +info: + title: Temperature Service + version: 1.0.0 + description: This service is in charge of processing all the events related to temperature. +``` + +The `asyncapi.info().title()` returns `Temperature Service`. + +### Test using AsyncAPI CLI + +To see this in action, navigate to the **python-mqtt-client-template** directory. Then, run `asyncapi generate fromTemplate test/fixtures/asyncapi.yml ./ -o test/project` command on your terminal. If successful, you'll see the message below on your terminal: + +``` cmd +Generation in progress. Keep calm and wait a bit... done +Check out your shiny new generated files at output. +``` + +Navigating to the **test/project** directory. You should see a **client.py** file; the only content is `Temperature Service`. + +Let's break down the previous command: + +- `asyncapi generate fromTemplate` is how you use AsyncAPI generator via the AsyncAPI CLI. +- `test/fixtures/asyncapi.yml` points to your AsyncAPI document. +- `./` specifies the location of your template. +- `-o` specifies where to output the result. + +## Creating a template + +You will create an MQTT-supported template that will generate a Python client from the template and the AsyncAPI document above. + +In this section, you'll: + +1. Write the MQTT client code. +2. Write code to test the client works. +3. Update the template to use the client code. +4. Setup a script to help you run this code. +5. Template your code. + +### 1. Create the client + +Here is the sample code to be pasted in the client.py you generated above running the `asyncapi generate fromTemplate test/fixtures/asyncapi.yml ./ -o test/project` command. It uses the `paho-mqtt` package. + +``` python +# 1 +import paho.mqtt.client as mqtt +# 2 +mqttBroker = "test.mosquitto.org" + +class TemperatureServiceClient: + def __init__(self): + # 3 + self.client = mqtt.Client() + # 4 + self.client.connect(mqttBroker) + + + def sendTemperatureChange(self, id): + # 5 + topic = "temperature/changed" + # 6 + self.client.publish(topic, id) +``` + +Make sure you have the Paho-MQTT library installed. You can install it using pip with the `pip install paho-mqtt==1.6.1` command. Please note that this tutorial is based on Paho-MQTT version 1.6.1. The Paho-MQTT library has since been updated to version 2.0.0, which includes changes that are not covered in this tutorial. To ensure compatibility and to follow along without any issues, please install version 1.6.1 of the Paho-MQTT library. + +Let's break down the previous code snippet: + +1. Imports the MQTT module from the Paho package, which provides the MQTT client functionality. +2. Assigns the MQTT broker address `test.mosquitto.org` to the variable MQTT broker. This specifies the location where the MQTT client will connect to. +3. Defines an instance of the MQTT client object. This object will be used to establish a connection with the MQTT broker and perform MQTT operations. +4. Defines that on client instance creation, it connects to the broker. +5. The `sendTemperatureChange` is a function the client user invokes to publish a message to the broker, and its specific topic. + +In summary, this code sets up an MQTT client using the Paho-MQTT library. It connects to the `test.mosquitto.org` MQTT broker, and the `sendTemperatureChange()` method publishes temperature change information to the `temperature/changed` topic whenever called. + +### 2. Test the client + +You'll interact with the Temperature Service using the client module you created above. You'll create an instance of the client using `client = TemperatureServiceClient()` and then use `client.sendTemperatureChange` function to publish messages that Temperature Service is subscribed to. +Create a **test/project/test.py** file in your project and add the code snippet below: + +Now your directory should look like this: + +``` +python-mqtt-client-template +├── template +│ └── index.js +└── test + ├── fixtures + │ └── asyncapi.yml + └── project + ├── client.py + └── test.py +``` + +``` python +from client import TemperatureServiceClient +from random import randrange +import time + +client = TemperatureServiceClient() + +id_length = 8 +min_value = 10**(id_length-1) # Minimum value with 8 digits (e.g., 10000000) +max_value = 10**id_length - 1 # Maximum value with 8 digits (e.g., 99999999) + +while True: + randomId = randrange(min_value, max_value + 1) + client.sendTemperatureChange(randomId) + print("New temperature detected " + str(randomId) + " sent to temperature/changed") + time.sleep(1) + +``` + +Run the code above in your terminal using the command `python test.py`. You should see output similar to the snippet below logged on your terminal: + +``` cmd +New temperature detected 64250266 sent to temperature/changed +New temperature detected 36947728 sent to temperature/changed +New temperature detected 72955029 sent to temperature/changed +``` + +To make sure your **test.py** and client code works check if the broker really receives temperature-related messages. You can do it using an [MQTT CLI](https://hivemq.github.io/mqtt-cli/) using docker. Run the command `docker run hivemq/mqtt-cli sub -t temperature/changed -h test.mosquitto.org` in your terminal. It will download the image if you don't have it locally, then the CLI will connect to the broker, subscribe to the `temperature/changed` topic and then output the temperature ids on the terminal. + +### 3. Update the template with client code + +Open [**index.js**](#indexjs-file) and copy the content of [**client.py**](#1-create-the-client) and replace `{asyncapi.info().title()}` with it. It should look like the code snippet below now: + +``` js +import { File } from '@asyncapi/generator-react-sdk'; + +export default function ({ asyncapi }) { + return ( + + {`import paho.mqtt.client as mqtt + +mqttBroker = "test.mosquitto.org" + +class TemperatureServiceClient: + def __init__(self): + self.client = mqtt.Client() + self.client.connect(mqttBroker) + + + def sendTemperatureChange(self, id): + topic = "temperature/changed" + self.client.publish(topic, id)`} + + ) +} +``` + +### 4. Write script to run the test code + +In **package.json** you can have the scripts property that you invoke by calling `npm run `. Add these scripts to **package.json**: + +``` json + "scripts": { + "test:clean": "rimraf test/project/client.py", + "test:generate": "asyncapi generate fromTemplate test/fixtures/asyncapi.yml ./ --output test/project --force-write", + "test:start": "python test/project/test.py", + "test": "npm run test:clean && npm run test:generate && npm run test:start" + } +``` + +The 4 scripts above do the following: + +1. `test:clean`: This script uses the `rimraf` package to remove the old version of the file **test/project/client.py** every time you run your test. +2. `test:generate`: This script uses the AsyncAPI CLI to generate a new version of **client.py**. +3. `test:start`: This script runs the python code using **client.py**. +4. `test`: This script runs all the other scripts in proper order. + +Run `npm test` on your terminal to ensure everything works as expected. + +### 5. Template your code + +#### 5a. Add parameters to the configuration file + +You often have different runtime environments in programming, e.g., development and production. You will use different servers to spin both of these instances. You'll have two broker versions, one for production and the other for development. You have defined a dev server in the AsyncAPI document: + +```yml +servers: + dev: + url: test.mosquitto.org + protocol: mqtt +``` + +This will allow you to also define the broker you will use in production in the servers section above. +Therefore, we can template the code `mqttBroker = 'test.mosquitto.org'` in **index.js** so the value is populated dynamically at runtime depending on the specified server environment. + +The generator has a **parameters** object used to define parameters you use to dynamically modify your template code at runtime. It also supports the **server** parameter that defines the server configuration value. Navigate to **package.json** and add the snippet below: + +```json + "generator": { + # ...(redacted for brevity) + "parameters": { + "server": { + "description": "The server you want to use in the code.", + "required": true + } + } + } +``` + +`"required": true`: makes the parameter mandatory and once user forgets to add it to the cli command, a proper error message is yielded. +You'll pass the server to be used to generate your code using `--param server=dev` in the AsyncAPI CLI command. Failure to which you'll get an error: + +```cmd +Generator Error: This template requires the following missing params: server. +``` + +Update your `test:generate` script in **package.json** to include the server param + +```json +"test:generate": "asyncapi generate fromTemplate test/fixtures/asyncapi.yml ./ --output test/project --force-write --param server=dev" +``` + +You can now replace the static broker from `mqttBroker = 'test.mosquitto.org'` to `mqttBroker = "${asyncapi.servers().get(params.server).url()}"` in **index.js**. + +Now the template code looks like this: + +``` js +import { File } from '@asyncapi/generator-react-sdk'; + +// notice that now the template not only gets the instance of parsed AsyncAPI document but also the parameters +export default function ({ asyncapi, params }) { + + return ( + + {`import paho.mqtt.client as mqtt + +mqttBroker = "${asyncapi.servers().get(params.server).url()}" + +class TemperatureServiceClient: + def __init__(self): + self.client = mqtt.Client() + self.client.connect(mqttBroker) + + + def sendTemperatureChange(self, id): + topic = "temperature/changed" + self.client.publish(topic, id)`} + + ) +} +``` + +Run `npm test` to validate that your code still works as expected. + +#### 5b. Templating index.js with React + +Python takes indentation very seriously, and our generated output will be Python code. We, therefore, need to make sure the indentation in **index.js** looks right so the generated code is indented correctly. After templating the code in **index.js**, it will look like the following code snippet: + +```js +// 1 +import { File, Text } from '@asyncapi/generator-react-sdk' +export default function ({ asyncapi, params }) { + return ( + + // 2 + import paho.mqtt.client as mqtt + // 3 + mqttBroker = "{asyncapi.servers().get(params.server).url()}" + // 4 + class {asyncapi.info().title().replaceAll(' ', '')}Client: + // 5 + + {`def __init__(self): + self.client = mqtt.Client() + self.client.connect(mqttBroker)`} + + + ) +} +``` + +1. Import the **Text** component that will wrap strings so they are indented properly in the output. Your import statement should now look like this: `import { File, Text } from '@asyncapi/generator-react-sdk'`. +2. When the Paho module import is rendered in **client.py** file, it will add two extra new lines. +3. The broker url is templated in a `Text` component removing the `$` from the string template. +4. Dynamically get the class name **TemperatureServiceClient** from the AsyncAPI document from the **info** object using the Parser API using the code: `asyncapi.info().title()` . It will return `Temperature Service`, then remove the spaces and add `Client` as a suffix. +5. There is no templating needed in the `__init__` function, there is only hardcoded information. + +> If you're on the fence about which templating engine you should use in your template, check out the [React render engine](https://www.asyncapi.com/docs/tools/generator/react-render-engine) and [nunjucks render engine](https://www.asyncapi.com/docs/tools/generator/nunjucks-render-engine) documentation. +In the next section, you'll refactor your template to use React. + +#### 5c. Creating a reusable component + +Suppose you have two [channels](https://www.asyncapi.com/docs/concepts/channel), one to watch if the temperature drop below 22 °C and one to check if the temperature is above 22 °C, the generated output code would look like this: + +```python +import paho.mqtt.client as mqtt + +mqttBroker = "test.mosquitto.org" + +class TemperatureServiceClient: + + def __init__(self): + self.client = mqtt.Client() + self.client.connect(mqttBroker) + + def sendTemperatureDrop(self, id): + topic = "temperature/dropped" + self.client.publish(topic, id) + def sendTemperatureRise(self, id): + topic = "temperature/risen" + self.client.publish(topic, id) + +``` + +You'll then need to template to dynamically generate `sendTemperatureDrop` and `sendTemperatureRise` functions in the generated code based off the AsyncAPI document content. The goal is to write template code that returns functions for channels that the Temperature Service application is subscribed to. The template code to generate these functions will look like this: + +```js + + + +``` + +It's recommended to put reusable components outside the template directory in a new directory called components. You'll create a component that will dynamically generate functions in the output for as many channels as there are in your AsyncAPI document that contains a `publish` operation. Add the following code in the **python-mqtt-client-template/components/TopicFunction.js** file, after creating the **python-mqtt-client-template/components/** directory: + +```js +/* + * This component returns a block of functions that user can use to send messages to specific topic. + * As input it requires a list of Channel models from the parsed AsyncAPI document + */ +export function TopicFunction({ channels }) { + const topicsDetails = getTopics(channels) + let functions = '' + + topicsDetails.forEach((t) => { + functions += `def send${t.name}(self, id): + topic = "${t.topic}" + self.client.publish(topic, id)\n` + }) + + return functions +} + +/* + * This function returns a list of objects, one for each channel with two properties, name and topic + * name - holds information about the operationId provided in the AsyncAPI document + * topic - holds information about the address of the topic + * + * As input it requires a list of Channel models from the parsed AsyncAPI document + */ +function getTopics(channels) { + const channelsCanSendTo = channels + let topicsDetails = [] + + channelsCanSendTo.forEach((ch) => { + const topic = {} + const operationId = ch.operations().filterByReceive()[0].id() + topic.name = operationId.charAt(0).toUpperCase() + operationId.slice(1) + topic.topic = ch.address() + + topicsDetails.push(topic) + }) + + return topicsDetails +} +``` + +`{ channels }`: the `TopicFunction` component accepts a custom prop called channels and in your template code +`getTopics(channels)`: Returns a list of objects, one for each channel with two properties; name and topic. The **name** holds information about the `operationId` provided in the AsyncAPI document while the **topic** holds information about the address of the topic. + +Import the `TopicFunction` component in your template code in **index.js** and add the template code to generate the functions to topics that the `Temperature Service` application is subscribed to. In your case, the final version of your template code should look like this: + +```js +import { File, Text } from '@asyncapi/generator-react-sdk' +import { TopicFunction } from '../components/TopicFunction' + +export default function ({ asyncapi, params }) { + return ( + + import paho.mqtt.client as mqtt + + mqttBroker = "{asyncapi.servers().get(params.server).url()}" + + class {asyncapi.info().title().replaceAll(' ', '')}Client: + + + {`def __init__(self): + self.client = mqtt.Client() + self.client.connect(mqttBroker)`} + + + + + + + ) +} + +``` + +Now your directory should look like this: + +``` +python-mqtt-client-template +├── components +│ └── TopicFunction.js +├── template +│ └── index.js +└── test + ├── fixtures + │ └── asyncapi.yml + └── project + ├── client.py + └── test.py +``` + + +Run `npm test` on your terminal to ensure everything works as expected. + +In the next section, you'll add another channel to **asyncapi.yml** file called `temperature/dropped` and `temperature/risen` then run the template again to make sure it still works as expected. + +#### 5d. Update AsyncAPI document + +Update the AsyncAPI document to use two channels: + +```yml +channels: + temperature/dropped: + description: Notifies the user when the temperature drops past a certain point. + publish: + operationId: temperatureDrop + message: + description: Message that is being sent when the temperature drops past a certain point. + payload: + type: object + additionalProperties: false + properties: + temperatureId: + type: string + + temperature/risen: + description: Notifies the user when the temperature rises past a certain point. + publish: + operationId: temperatureRise + message: + description: Message that is being sent when the temperature rises past a certain point. + payload: + type: object + additionalProperties: false + properties: + temperatureId: + type: string +``` + +And update your test script in test.py to test the two functions as below: + +```py + client.sendTemperatureDrop(randomId) + print("Temperature drop detected " + str(randomId) + " sent to temperature/dropped") + client.sendTemperatureRise(randomId) + print("Temperature rise detected " + str(randomId) + " sent to temperature/risen") +``` + +Run `npm test` to validate that everything works as expected. You should see logs similar to the snippet below in your terminal: + +```cmd +Temperature drop detected 49040460 sent to temperature/dropped +Temperature rise detected 49040460 sent to temperature/risen +Temperature drop detected 66943992 sent to temperature/dropped +Temperature rise detected 66943992 sent to temperature/risen +``` + +## Where to go from here? + +Great job completing this tutorial! You have learnt how to use an AsyncAPI file to create a Python MQTT template and used it with the Paho-MQTT library in Python to connect to an MQTT broker and publish messages.😃 + +If you want to tinker with a completed template and see what it would look like in production, check out the [Paho-MQTT template](https://github.com/derberg/python-mqtt-client-template/tree/v1.0.0). You can also check out the accompanying [article about creating MQTT client code](https://www.brainfart.dev/blog/asyncapi-codegen-python). + +You can also check out the [MQTT beginners guide](https://medium.com/python-point/mqtt-basics-with-python-examples-7c758e605d4) tutorial to learn more about asynchronous messaging using MQTT. diff --git a/docs/hooks.md b/apps/generator/docs/hooks.md similarity index 94% rename from docs/hooks.md rename to apps/generator/docs/hooks.md index c628d3b9d..d2c06ddc0 100644 --- a/docs/hooks.md +++ b/apps/generator/docs/hooks.md @@ -1,6 +1,6 @@ --- title: "Hooks" -weight: 100 +weight: 130 --- Hooks are functions called by the generator on a specific moment in the generation process. Hooks can be anonymous functions but you can also add function names. These hooks can have arguments provided to them or being expected to return a value. @@ -14,11 +14,11 @@ The following types of hooks are currently supported: The generator parses: - All the files in the `.hooks` directory inside the template. -- All modules listed in the template configuration and triggers only hooks that names were added to the config. You can use the official AsyncAPI [hooks library](https://github.com/asyncapi/generator-hooks). To learn how to add hooks to configuration [read more about the configuration file](#configuration-file). +- All modules listed in the template configuration and triggers only hooks that names were added to the config. You can use the official AsyncAPI [hooks library](https://github.com/asyncapi/generator-hooks). To learn how to add hooks to configuration [read more about the configuration file](https://www.asyncapi.com/docs/tools/generator/configuration-file). ### Examples -> Some of the examples have names of hook functions provided and some not. Keep in mind that hook functions kept in template in default location do not require a name. Name is required only if you keep hooks in non default location or in a separate library, because such hooks need to be explicitly configured in the configuration file. For more details on hooks configuration [read more about the configuration file](#configuration-file). +> Some of the examples have names of hook functions provided and some not. Keep in mind that hook functions kept in template in default location do not require a name. Name is required only if you keep hooks in non default location or in a separate library, because such hooks need to be explicitly configured in the configuration file. For more details on hooks configuration [read more about the configuration file](https://www.asyncapi.com/docs/tools/generator/configuration-file). Most basic modules with hooks look like this: ```js @@ -78,4 +78,4 @@ module.exports = { return currentFilename.replace('-', '_') }; }; -``` \ No newline at end of file +``` diff --git a/docs/index.md b/apps/generator/docs/index.md similarity index 66% rename from docs/index.md rename to apps/generator/docs/index.md index 2789cc747..a80e02531 100644 --- a/docs/index.md +++ b/apps/generator/docs/index.md @@ -3,7 +3,10 @@ title: "Introduction" weight: 10 --- -The AsyncAPI generator is a tool that generates anything you want using the **[AsyncAPI Document](generator/asyncapi-documents)** and **[Template](generator/template)** that are supplied as inputs to the AsyncAPI CLI. The generator was built with extensibility in mind; you can use the generator to generate anything you want, provided that it can be defined in a template, such as code, diagrams, markdown files, microservices, and applications. A number of [community-maintained templates](https://github.com/search?q=topic%3Aasyncapi+topic%3Agenerator+topic%3Atemplate) are now available for immediate usage. +The AsyncAPI generator is a tool that generates anything you want using the **[AsyncAPI Document](generator/asyncapi-document)** and **[Template](generator/template)** that are supplied as inputs to the AsyncAPI CLI. The generator was built with extensibility in mind; you can use the generator to generate anything you want, provided that it can be defined in a template, such as code, diagrams, markdown files, microservices, and applications. A number of [community-maintained templates](https://github.com/search?q=topic%3Aasyncapi+topic%3Agenerator+topic%3Atemplate) are now available for immediate usage. + +> **Note:** +> If your primary objective is to generate models/classes for your event-driven architecture apps, use [AsyncAPI Modelina](/docs/tools/generator/model-generation), which is supported in the AsyncAPI CLI, instead of using the AsyncAPI Generator. Modelina is specifically designed for model generation and provides utilities for working with the AsyncAPI document. ### Generator use cases - Generation of interactive and understandable API documentation @@ -19,9 +22,9 @@ The AsyncAPI generator is a tool that generates anything you want using the **[A 1. The **Generator** receives the **[Template](generator/template)** and **[AsyncAPI Document](generator/asyncapi-document)** as inputs. 2. The **Generator** sends to the **[Parser](generator/parser)** the **asyncapiString** which is a stringified version of the original **AsyncAPI Document**. 3. The **Parser** uses additional plugins such as the OpenAPI, RAML, or Avro schemas to validate custom schemas of message payloads defined in the **AsyncAPI Document**. -4. If the **Parser** determines that the original **AsyncAPI Document** is valid, it manipulates the document and returns a set of helper functions and properties and bundles them together into an **asyncapi** variable that is an instance of [**AsyncAPIDocument**](https://github.com/asyncapi/parser-js/blob/master/API.md#module_@asyncapi/parser+AsyncAPIDocument). The **asyncapi** helper functions make it easier to access the contents of the AsyncAPI Document. -5. At this point, the **Generator** passes the **[asyncapi](generator/asyncapi-document#method-2-asyncapi-and-template)**, the **[originalAsyncAPI](generator/asyncapi-document#method-1-originalasyncapi-and-template)**, and the **params** which collectively make up the **[Template Context](generator/asyncapi-context)** to the **Render Engine**. -6. AsyncAPI has two **Render Engines**([react](generator/react-render-engine) and [nunjucks](generator/nunjucks-render-engine). Depending on which one you've specified in your `package.json`, the **Generator** knows the right **Render Engine** to pass both the **Template Files** and the **Template Context**. +4. If the **Parser** determines that the original **AsyncAPI Document** is valid, it manipulates the document and returns a set of helper functions and properties and bundles them together into an **asyncapi** variable that is an instance of [**AsyncAPIDocument**](https://github.com/asyncapi/parser-api/blob/master/docs/api.md#asyncapidocument). The **asyncapi** helper functions make it easier to access the contents of the AsyncAPI Document. +5. At this point, the **Generator** passes the **[asyncapi](generator/asyncapi-document#method-2-asyncapi-and-template)**, the **[originalAsyncAPI](generator/asyncapi-document#method-1-originalasyncapi-and-template)**, and the **params** which collectively make up the **[Template Context](generator/template-context)** to the **Render Engine**. +6. AsyncAPI has two **Render Engines**([react](generator/react-render-engine) and [nunjucks](generator/nunjucks-render-engine)). Depending on which one you've specified in your `package.json`, the **Generator** knows the right **Render Engine** to pass both the **Template Files** and the **Template Context**. 7. Once the **Render Engine** receives the **Template Files** and the **Template Context**, it injects all the dynamic values in your react or nunjucks based **Template Files** using the **Template Context**. As a result, the **Render Engine** generates **markdown**, **pdf**, **boilerplate code**, and **anything else** you specified to be generated as output. > You can generate anything you want using the generator as long as it can be defined in a **Template**. @@ -50,4 +53,4 @@ graph LR D --> J[Java Spring Boot] D --> K[Anything else] ``` -**`params`** are template-specific options passed to the `asyncapi generate fromTemplate` CLI command to customize the generated output. \ No newline at end of file +**`params`** are template-specific options passed to the `asyncapi generate fromTemplate` CLI command to customize the generated output. diff --git a/docs/installation-guide.md b/apps/generator/docs/installation-guide.md similarity index 92% rename from docs/installation-guide.md rename to apps/generator/docs/installation-guide.md index 69e81b53f..302be0497 100644 --- a/docs/installation-guide.md +++ b/apps/generator/docs/installation-guide.md @@ -9,8 +9,8 @@ You can use the generator library to generate whatever you want in your event-dr ## Prerequisites Before you install and use the AsyncAPI CLI and the generator library, ensure you meet the prerequisites below, then [install the CLI](#installation). -1. Node.js v12.16 and higher -2. Npm v6.13.7 and higher +1. Node.js v18.12.0 and higher +2. Npm v8.19.0 and higher To verify the versions of Node and Npm you have, run the following command on your terminal: ``` @@ -53,7 +53,7 @@ You can install in Linux by using `dpkg`, a package manager for debian: For further installation instructions for different operating systems, read the [AsyncAPI CLI documentation](https://github.com/asyncapi/cli#installation). > **Remember:** -> Each [community-developed template](https://github.com/search?q=topic%3Aasyncapi+topic%3Agenerator+topic%3Atemplate) is dependent on a certain version of the generator for it to work correctly. Before you install the generator CLI, check the template's `package.json` for the version of the generator CLI your template is compatible with. Read the [versioning docs](versioning) to learn why it's important to use certain generator versions with your templates. +> Each [community-developed template](https://github.com/search?q=topic%3Aasyncapi+topic%3Agenerator+topic%3Atemplate) is dependent on a certain version of the generator for it to work correctly. Before you install the AsyncAPI CLI, check the template's `package.json` for the version of the AsyncAPI CLI your template is compatible with. Read the [versioning docs](versioning) to learn why it's important to use certain generator versions with your templates. ### Update AsyncAPI CLI There are several reasons why you might want to update your generator version: diff --git a/apps/generator/docs/jsdoc2md-handlebars/custom-sig-name.hbs b/apps/generator/docs/jsdoc2md-handlebars/custom-sig-name.hbs new file mode 100644 index 000000000..f40172708 --- /dev/null +++ b/apps/generator/docs/jsdoc2md-handlebars/custom-sig-name.hbs @@ -0,0 +1,12 @@ +{{#if virtual}}*{{/if}}{{#with (parentObject)}}{{#if virtual}}*{{/if~}}{{/with~}} +{{#if name}}{{#sig~}} +{{{@depOpen}~}} +{{{@codeOpen}~}} +{{#if @prefix}}{{@prefix}} {{/if~}} +{{@parent~}} +{{@accessSymbol}}{{#if (isEvent)}}"{{{name}}}"{{else}}{{{escape name}}}{{/if~}} +{{{@codeClose}~}} +{{#if @suffix}} {{@suffix}}{{/if~}} +{{{@depClose}~}} +{{~/sig}}{{/if}} +{{#if virtual}}*{{/if}}{{#with (parentObject)}}{{#if virtual}}*{{/if~}}{{/with~}} diff --git a/apps/generator/docs/jsdoc2md-handlebars/defaultvalue.hbs b/apps/generator/docs/jsdoc2md-handlebars/defaultvalue.hbs new file mode 100644 index 000000000..a8af1b991 --- /dev/null +++ b/apps/generator/docs/jsdoc2md-handlebars/defaultvalue.hbs @@ -0,0 +1 @@ +{{#unless (equal defaultvalue undefined)}}`{{#if equals}} = {{/if}}{{#if (equal type.names.[0] "string")}}{{{json-stringify defaultvalue}}}{{else}}{{{defaultvalue}}}{{/if}}`{{/unless}} \ No newline at end of file diff --git a/apps/generator/docs/jsdoc2md-handlebars/header.hbs b/apps/generator/docs/jsdoc2md-handlebars/header.hbs new file mode 100644 index 000000000..e65afe298 --- /dev/null +++ b/apps/generator/docs/jsdoc2md-handlebars/header.hbs @@ -0,0 +1,12 @@ + + + +{{#if (equal kind "class")}} +{{>heading-indent}}{{>custom-sig-name}} +{{~else~}} +{{#if params}} +{{>heading-indent}}{{>custom-sig-name}} +{{~else~}} +* {{>sig-name}}** : +{{/if}} +{{/if}} diff --git a/apps/generator/docs/jsdoc2md-handlebars/link.hbs b/apps/generator/docs/jsdoc2md-handlebars/link.hbs new file mode 100644 index 000000000..844c6a1fa --- /dev/null +++ b/apps/generator/docs/jsdoc2md-handlebars/link.hbs @@ -0,0 +1,25 @@ +{{! usage: link to="namepath" html=true/false caption="optional caption"~}} + +{{~#if html~}} + + +{{~#link to~}} +{{#if url~}} +{{#if ../../caption}}{{../../../caption}}{{else}}{{name}}{{/if}} +{{~else~}} +{{#if ../../caption}}{{../../../caption}}{{else}}{{name}}{{/if~}} +{{/if~}} +{{/link~}} + + +{{~else~}} + +{{#link to~}} +{{#if url~}} +[`{{#if ../../caption}}{{{../../../caption}}}{{else}}{{{name}}}{{/if}}`]({{{url}}}) +{{~else~}} +`{{#if ../../caption}}{{{../../../caption}}}{{else}}{{{name}}}{{/if~}}` +{{~/if~}} +{{/link~}} + +{{/if~}} \ No newline at end of file diff --git a/apps/generator/docs/jsdoc2md-handlebars/main.hbs b/apps/generator/docs/jsdoc2md-handlebars/main.hbs new file mode 100644 index 000000000..543ba5676 --- /dev/null +++ b/apps/generator/docs/jsdoc2md-handlebars/main.hbs @@ -0,0 +1,8 @@ +--- +title: Library API +weight: 75 +--- + +Reference API documentation for AsyncAPI Generator library. +{{>main-index~}} +{{>all-docs~}} \ No newline at end of file diff --git a/apps/generator/docs/jsdoc2md-handlebars/params-table.hbs b/apps/generator/docs/jsdoc2md-handlebars/params-table.hbs new file mode 100644 index 000000000..928c8fb00 --- /dev/null +++ b/apps/generator/docs/jsdoc2md-handlebars/params-table.hbs @@ -0,0 +1,9 @@ +{{#if params}} +{{#params}}**Params** + +{{#each this~}} +{{indent}}- {{name}}{{#if type}} {{>linked-type-list types=type.names delimiter=" | " }}{{/if}}{{#unless (equal defaultvalue undefined)}} {{>defaultvalue equals=true ~}}{{/unless}}{{#if description}} - {{{inlineLinks description}}}{{/if}} +{{/each}} + +{{/params~}} +{{/if}} \ No newline at end of file diff --git a/apps/generator/docs/model-generation.md b/apps/generator/docs/model-generation.md new file mode 100644 index 000000000..f15ea8d24 --- /dev/null +++ b/apps/generator/docs/model-generation.md @@ -0,0 +1,84 @@ +--- +title: "Adding models generation in template" +weight: 200 +--- + +This guide will walk you through the process of enabling models/types generation in a template by using [Modelina](https://www.asyncapi.com/tools/modelina). + +Modelina is an AsyncAPI library designed for generating data models using inputs such as [AsyncAPI](generator/asyncapi-document), OpenAPI, or JSON schema inputs. Its functionality revolves around creating data models from the provided AsyncAPI document and the model template, which defines message payloads. It is better to use Modelina in your template to handle model generation rather than providing custom templates. + +You can integrate the work shown in this guide into a template by following the [tutorial about creating a template](https://www.asyncapi.com/docs/tools/generator/generator-template). + +In this guide, you'll learn how to use Modelina in a template code to enable support for Python data model generation. + +## Add Modelina dependency + +Install Modelina in your project using npm: `npm install --save @asyncapi/modelina`. + +Ensure your template's `package.json` file now contains Modelina pointing to its latest version: + + ```json + "dependencies": { + // ... + "@asyncapi/modelina": "^2.0.5" + // ... + } +``` + +## Create a models.js file + +Create a new directory in the **template** directory named **src/models** and create a **models.js** file within it. In the **models.js** file, add the following code: + +```python +// 1 +import { File } from '@asyncapi/generator-react-sdk'; +// 2 +import { PythonGenerator, FormatHelpers } from '@asyncapi/modelina'; + +/** + * @typedef RenderArgument + * @type {object} + * @property {AsyncAPIDocument} asyncapi document object received from the generator. + */ + +/** + * Render all schema models + * @param {RenderArgument} param0 + * @returns + */ +// 3 +export default async function schemaRender({ asyncapi }) { + // 4 + const pythonGenerator = new PythonGenerator(); + // 5 + const models = await pythonGenerator.generate(asyncapi); + // 6 + const files = []; + // 7 + for (const model of models) { + // 8 + const modelFileName = `${FormatHelpers.toPascalCase(model.modelName)}.py`; + // 9 + files.push({model.result}); + } + return files; +} +``` + +Let's break it down. The code snippet above does the following: + +1. The `File` component from the [generator react SDK](https://github.com/asyncapi/generator-react-sdk) is needed to handle the further rendering of generated models into files. +2. The `PythonGenerator` generator is the core needed for model generation. Additionally, you can import [FormatHelpers](https://github.com/asyncapi/modelina/blob/master/src/helpers/FormatHelpers.ts) that provides a set of helpers making it easier to modify model names to match your required case. +3. You can change the name `schemaRender` to anything else like `modelRenderer`. More importantly, this must be an `async` function and a default export. This function is invoked during generation process and should contain the logic behind models generation. +4. First, create an instance of the `PythonGenerator` model generator. If you decide to use present functionality from Modelina, you need to pass your presets here during instance creation. +5. The actual model generation is one line of code, and as a result you get an array of models that later you need to turn into files. +6. You need to define an array that must be returned from `schemaRender` function. The array must contain React components, and in this case, the `` component. +7. Iterate over generated models and use their content to create proper definitions of `` components. +8. Notice how using Modelina helpers, in this case the `toPascalCase` function, let's you make sure that the filename of your model follows specific case pattern. +9. Each component must be added into the `files` array that you later return from the default function. Notice the definition of the `` component that enables you to provide the name of resulting file and the content of the model. Notice also `model.result` that shows that initially generated array with models did not contain raw models content but a set of output objects that contain not only `result` but also other info, like for example `modelName`. + +With such a model template that uses Modelina, as a result of generation process you would receive a set of model files in `$OUTPUT_DIR/src/models` directory. + +## Conclusion + +Modelina provides a flexible and powerful way to generate data models from AsyncAPI, OpenAPI, or JSON Schema documents. By integrating Modelina you can much faster enable models generation in your template. diff --git a/docs/nunjucks-render-engine.md b/apps/generator/docs/nunjucks-render-engine.md similarity index 88% rename from docs/nunjucks-render-engine.md rename to apps/generator/docs/nunjucks-render-engine.md index 214cb9713..34626b49b 100644 --- a/docs/nunjucks-render-engine.md +++ b/apps/generator/docs/nunjucks-render-engine.md @@ -1,6 +1,6 @@ --- title: "Nunjucks render engine" -weight: 70 +weight: 120 --- [Nunjucks](https://mozilla.github.io/nunjucks) is the default render engine, however, we strongly recommend adopting the [React](#react) engine. @@ -11,7 +11,7 @@ weight: 70 1. Templates may contain `partials` (reusable chunks). They must be stored in the `.partials` directory under the template directory. [Read more about partials](#partials). 1. Templates may contain multiple files. Unless stated otherwise, all files will be rendered. 1. The default variables you have access to in any the template file are the following: - - `asyncapi` that is a parsed spec file object. Read the [API](https://github.com/asyncapi/parser-js/blob/master/API.md#AsyncAPIDocument) of the Parser to understand what structure you have access to in this parameter. + - `asyncapi` that is a parsed spec file object. Read the [API](https://github.com/asyncapi/parser-api/blob/master/docs/api.md#asyncapidocument) of the Parser to understand what structure you have access to in this parameter. - `originalAsyncAPI` that is an original spec file before it is parsed. - `params` that contain the parameters provided when generating. @@ -73,6 +73,7 @@ async function asyncCamelCase(str, callback) { } ``` -In case you have more than one template and want to reuse filters, you can put them in a single library. You can configure such a library in the template configuration under `filters` property. You can also use the official AsyncAPI [filters library](https://github.com/asyncapi/generator-filters). To learn how to add such filters to configuration [read more about the configuration file](#configuration-file). +In case you have more than one template and want to reuse filters, you can put them in a single library. You can configure such a library in the template configuration under `filters` property. To learn how to add such filters to configuration [read more about the configuration file](#configuration-file). +You can also use the official AsyncAPI [nunjucks-filters](/apps/nunjucks-filters) that are by default included in the generator library. \ No newline at end of file diff --git a/apps/generator/docs/parser.md b/apps/generator/docs/parser.md new file mode 100644 index 000000000..399308d67 --- /dev/null +++ b/apps/generator/docs/parser.md @@ -0,0 +1,96 @@ +--- +title: "Parser" +weight: 60 +--- + +## Parser + +The AsyncAPI Parser is a package used to parse and validate the [AsyncAPI documents](asyncapi-document) in your Node.js or browser application. These documents can be either in YAML or JSON format. + +The Parser validates these documents using dedicated schema-supported plugins. + +Supported schemas: + +- AsyncAPI schema (no plugin needed) +- OpenAPI schema +- JSON schema +- Avro schema +- RAML data-type schema + +The Parser allows the template developer to easily access schemas provided in the above supported formats. This is because the JavaScript parser converts all of them into JSON schema. + +If the document is valid, the Parser returns an `AsyncAPIDocument instance` with a set of helper functions that enable easier access to the contents of the AsyncAPI document. The parser provides dereferenced output. During the dereference process, the AsyncAPI parser substitutes a reference with a full definition. The dereferenced output is always in JSON format. The parser provides a message listing all errors if a document is invalid. The original AsyncAPI document is part of the [Template Context](template-context) as the generator also passes the original AsyncAPI document to the template context. + +The following AsyncAPI document example has two channels: `channelOne` and `channelTwo`. Each channel has one operation and a single message: + +```yaml +asyncapi: "2.5.0" +info: + title: Demo API + version: "1.0.0" +channels: + channelOne: + publish: + summary: This is the first sample channel + operationId: onMessage + message: + name: FirstMessage + payload: + id: + type: integer + minimum: 0 + description: Id of the channel + sentAt: + type: string + format: date-time + description: Date and time when the message was sent. + channelTwo: + publish: + summary: This is the second sample channel + operationId: messageRead + message: + name: SecondMessage + payload: + id: + type: integer + minimum: 0 + description: Id of the channel + sentAt: + type: string + format: date-time + description: Date and time when the message was sent. +``` + +We can use helper functions provided by the Parser to operate on the above JSON file. For example, we can use the helper method `asyncAPIDocument.channelNames()`, which returns an array of all channel names currently present in the AsyncAPI document. Another example where you can use a helper function is to list out messages present in your JSON file. Instead of fetching a single message one at a time, you can use the function `asyncAPIDocument.allMessages()` that returns the map of all messages in your AsyncAPI document. + +```js +const channelNames = asyncAPIDocument.channelNames(); +const messages = asyncAPIDocument.allMessages(); +``` + +> The Parser gives you access to a number of these [helper functions](https://github.com/asyncapi/parser-api/blob/master/docs/api.md) that you can implement to access the contents of your AsyncAPI document. + +## AsyncAPI document validation process + +1. **AsyncAPI document** is fed as an input to the Generator. +1. Generator sends the AsyncAPI document to the Parser as **asyncapiString**; the stringified version of the original AsyncAPI document. +1. The Parser uses additional plugins such as the OpenAPI, RAML, or Avro schemas to validate custom schemas of message payloads defined in the AsyncAPI document. +1. If the AsyncAPI document is invalid, it throws an error based on the encountered failure type. For example, if the AsyncAPI document is not a string nor a JavaScript object, the Parser throws an `invalid-document-type` error. + Similarly, you may encounter errors such as: + - `invalid-json` + - `invalid-yaml` + - `impossible-to-convert-to-json` +1. If the document is valid, the Parser modifies the AsyncAPI document, returns a set of helper functions, and bundles them together into the **asyncapi** variable. These helper functions in the form of an **asyncapi** variable are passed to the **Template Context**. +1. The Template Context passes all of these values to the [**Render Engine**](react-render-engine) of your choice. Finally, the Render Engine generates whatever output you may have specified in your template. (i.e. code, documentation, diagrams, pdfs, applications, etc.) + +```mermaid +graph TD + A[AsyncAPI document] --> B + B[Generator] -->|asyncapiString| C(Parser) + C --> D{Validation} + D -->|invalid| B + D -->|asyncapi -> helper functions| E[Template Context] + E --> F[Render Engine] +``` + +> To learn more about the Parser and access all of its features, check out the AsyncAPI [Parser’s GitHub repository](https://github.com/asyncapi/parser-js). diff --git a/docs/react-render-engine.md b/apps/generator/docs/react-render-engine.md similarity index 66% rename from docs/react-render-engine.md rename to apps/generator/docs/react-render-engine.md index 788604ec3..1cf74b983 100644 --- a/docs/react-render-engine.md +++ b/apps/generator/docs/react-render-engine.md @@ -1,6 +1,6 @@ --- title: "React render engine" -weight: 60 +weight: 110 --- [React](https://reactjs.org) is the render engine that we strongly suggest you should use for any new templates. The only reason it is not the default render engine is to stay backward compatible. @@ -15,7 +15,7 @@ weight: 60 When writing React templates you decide whether to use CommonJS, ES5, or ES6 modules since everything is bundled together before the rendering process takes over. We use our own React renderer which can be found in the [Generator React SDK](https://github.com/asyncapi/generator-react-sdk). There you can find information about how the renderer works or how we transpile your template files. -Your React template always require `@asyncapi/generator-react-sdk` as a dependency. `@asyncapi/generator-react-sdk` is required to access the `File` component which is required as a root component for a file to be rendered. Furthermore it provides some common components to make your development easier, like `Text` or `Indent`. +Your React template always requires `@asyncapi/generator-react-sdk` as a dependency. `@asyncapi/generator-react-sdk` is required to access the `File` component which is required as a root component for a file to be rendered. Furthermore, it provides some common components to make your development easier, like `Text` or `Indent`. Let's consider a basic React template as the one below called `MyTemplate.js`: @@ -31,9 +31,9 @@ export default function({ asyncapi, params, originalAsyncAPI }) { } ``` -The exported default function returns a `File` component as a root component which the [Generator](https://github.com/asyncapi/generator) uses to figure out what file should be generated. In our case we overwrite the default functionality of saving the file as `MyTemplate.js` but instead use the filename `asyncapi.md`. It is then specified that we should render `Some text that should render as is\n` within that file. Notice the `\n` character at the end, this is something that is automatically added after the `Text` component. +The exported default function returns a `File` component as a root component which the generator uses to determine what file should be generated. In our case, we overwrite the default functionality of saving the file as `MyTemplate.js` but instead use the filename `asyncapi.md`. It is then specified that we should render `Some text that should render as is\n` within that file. Notice the `\n` character at the end, which is automatically added after the `Text` component. -For further information about components, props etc. see the [Generator React SDK](https://github.com/asyncapi/generator-react-sdk) +For further information about components, props, etc, see the [Generator React SDK](https://github.com/asyncapi/generator-react-sdk) ### Common assumptions @@ -41,15 +41,15 @@ For further information about components, props etc. see the [Generator React SD - `File` is the root component - The file is not in the list of `nonRenderableFiles` in the template configuration 1. New lines are automatically added after each `Text` component. -1. The props you have access to in rendering function is: - - `asyncapi` that is a parsed spec file object. Read the [API](https://github.com/asyncapi/parser-js/blob/master/API.md#AsyncAPIDocument) of the Parser to understand what structure you have access to in this parameter. - - `originalAsyncAPI` that is an original spec file before it is parsed. +1. The props you have access to in the rendering function are: + - `asyncapi` which is a parsed spec file object. Read the [API](https://github.com/asyncapi/parser-api/blob/master/docs/api.md#asyncapidocument) of the Parser to understand what structure you have access to in this parameter. + - `originalAsyncAPI` which is an original spec file before it is parsed. - `params` that contain the parameters provided when generating. 1. All the file templates are supported where the variables are provided after the default props as listed above. ### Debugging React template in VSCode -With React, it enables you to debug your templates. For Visual Studio Code, we have created a boilerplate [launch configuration](https://code.visualstudio.com/docs/editor/debugging#_launch-configurations) to enable debug in your template. Add the following launch configuration: +With React, it enables you to debug your templates. For Visual Studio Code, we have created a boilerplate [launch configuration](https://code.visualstudio.com/docs/editor/debugging#_launch-configurations) to enable debugging in your template. Add the following launch configuration: ```json { @@ -75,6 +75,6 @@ With React, it enables you to debug your templates. For Visual Studio Code, we h } ``` -Now replace `./asyncapi.yml` with your document of choice. Replace `./template` with the path to your React template. You can now debug your template by adding any breakpoints you want and inspect your code. +Now replace `./asyncapi.yml` with your document of choice. Replace `./template` with the path to your React template. You can now debug your template by adding any breakpoints you want and inspecting your code. diff --git a/docs/special-file-names.md b/apps/generator/docs/special-file-names.md similarity index 97% rename from docs/special-file-names.md rename to apps/generator/docs/special-file-names.md index 8f6af3259..536eee841 100644 --- a/docs/special-file-names.md +++ b/apps/generator/docs/special-file-names.md @@ -1,6 +1,6 @@ --- title: "Special file names" -weight: 130 +weight: 160 --- We use NPM behind the scenes to download and install the templates. Since NPM will not fetch files like `.gitignore`, you should name them differently. Luckily, the Generator will take care of renaming them back for you. The following is a table of the special file names: diff --git a/docs/template-context.md b/apps/generator/docs/template-context.md similarity index 90% rename from docs/template-context.md rename to apps/generator/docs/template-context.md index 01c662e85..b6b01fd94 100644 --- a/docs/template-context.md +++ b/apps/generator/docs/template-context.md @@ -1,10 +1,10 @@ --- title: "Template context" -weight: 60 +weight: 100 --- While using the generator tool, you may want dynamic values populated to your templates and rendered in the output. The generator can achieve that using the **template context**. -The **template context** allows you to access the contents of the [AsyncAPI document](asyncapi-document.md) and inject dynamic values to the template files passed to the asyncAPI CLI during the generation process. The render engine then displays these dynamically assigned values in the output. +The **template context** allows you to access the contents of the [AsyncAPI document](asyncapi-document) and inject dynamic values to the template files passed to the asyncAPI CLI during the generation process. The render engine then displays these dynamically assigned values in the output. ## Generation process 1. The **Generator** receives **Template** and **params** as input. diff --git a/apps/generator/docs/template-development.md b/apps/generator/docs/template-development.md new file mode 100644 index 000000000..423222505 --- /dev/null +++ b/apps/generator/docs/template-development.md @@ -0,0 +1,161 @@ +--- +title: "Template development" +weight: 80 +--- +> **Note** +> It is advised against attempting to manually template types and models from scratch using the AsyncAPI templating engines such as Nunjucks and React render engines. Instead, it is recommended to use [AsyncAPI Modelina](/docs/tools/generator/model-generation) a dedicated library for model generation. + +## Minimum template requirements + +Let's break down the minimum template requirements: the `template` directory and a `package.json` file. + +> You can also check [Template for Generator Templates](https://github.com/asyncapi/template-for-generator-templates) project to see show-case template based on the AsyncAPI Generator. + +### `template` directory + +The `template` directory holds all the files that will be used for generating the output. The generator will process all the files stored in this directory. + +The following code is an example of an `index.js` file inside the `template` folder. +```js +import { File, Text } from "@asyncapi/generator-react-sdk"; + +export default function ({ asyncapi, params, originalAsyncAPI }) { + return ( + + My application's markdown file. + App name: **{asyncapi.info().title()}** + + ); +} +``` + +The above example will produce an `asyncapi.md` file where usage of the AsyncAPI document information (i.e. the `title`) is demonstrated. + +### `package.json` file + +Before the generation process begins, the generator installs the template into its dependencies. A `package.json` file is necessary to identify the template name. + +The following block shows an example `package.json` file that points to the [React Render Engine](react-render-engine) and necessary dependencies: + +```json +{ + "name": "myTemplate", + "generator": { + "renderer": "react" + }, + "dependencies": { + "@asyncapi/generator-react-sdk": "^0.2.25" + } +} +``` + +Every template must depend on the [`@asyncapi/generator-react-sdk` package](https://github.com/asyncapi/generator-react-sdk), which contains a template file's basic components. + +## Additional configuration options + +You must configure the generator's `package.json` file to contain JSON objects with the required parameters for template configuration, such as: + +|Name|Type|Description| +|---|---|---| +|`renderer`| String | Its value can be either `react` or `nunjucks` (default). +|`supportedProtocols`| [String] | A list with all the protocols this template supports. +|`parameters`| Object[String, Object] | An object with all the parameters that can be passed when generating the template. When using the command line, it's done by indicating `--param name=value` or `-p name=value`. +|`parameters[param].description`| String | A user-friendly description about the parameter. +|`parameters[param].default`| Any | Default value of the parameter if not specified. Shouldn't be used for mandatory `required=true` parameters. +|`parameters[param].required`| Boolean | Whether the parameter is required or not. + +The above table lists some configuration options that help the generator achieve a specific set of tasks throughout the generation process. The `generator` property from 'package.json' contains all the configuration information. To learn more about template configuration and various supported parameters, read the [generator configuration file](configuration-file). + +> Whenever you make a change to the package.json, make sure you perform an update by running `npm install`; this command synchronizes with the `package-lock.json` and validates the file. + +### `package.json` configuration options + +The following examples show some advanced configurations that we can use in our `package.json` file: + +```json +{ + "name": "myTemplate", + "generator": { + "renderer": "react", + "supportedProtocols": [ + "mqtt" + ] + }, + "dependencies": { + "@asyncapi/generator-react-sdk": "^0.2.25" + } +} +``` +The above `package.json` file has a newly added configuration called `supportedProtocols` which is set to a list containing only `mqtt`. This configuration displays all the protocols that this template supports. You can have multiple supported protocols in our template. + +For example, if you want to generate an output using the above template, you need to have an AsyncAPI document with servers that use `mqtt` to generate your desired output. If your AsyncAPI document has server connections with `kafka`, the generation process will be terminated since the only supported protocol mentioned is `mqtt`. + +### Accessing template parameters + +Additionally, we can also have a configuration called `parameters`, which is an object with all the parameters that can be passed when generating the template: + +```json +{ + "name": "myTemplate", + "generator": { + "renderer": "react", + "supportedProtocols": [ + "mqtt" + ], + "parameters": { + "version": { + "description": "Overrides application version under `info.version` in the AsyncAPI document.", + "required": false + } + } + }, + "dependencies": { + "@asyncapi/generator-react-sdk": "^0.2.25" + } +} +``` + +The default version of your application is always fetched from your AsyncAPI document. The above configuration helps the template user override the existing version with a new one on the command line. + +The changes done in the template will be as follows: + +Original: + +```js +App name: **{ asyncapi.info().title() }** +``` + +Newer: + +```js +App name: **{ asyncapi.info().title() }** +Version is: **{params.version || asyncapi.info.version()}** +``` + +Now that you have added all the configuration options, you can start the generation process using the AsyncAPI CLI. You can pass these parameters via the CLI: `--param name=value or -p name=value`. +The above configuration helps template users override the existing version with a new version on the command line. (Example: `-p version=2.0.0`) + +## Hooks + +[Hooks](hooks) enable templates to perform multiple tasks. You can add Hooks to your template as fractions of code. In the template, you must store it in the `hooks` directory under the template directory. You can also store it in other modules and external libraries or configure it inside the template. The generation process can perform multiple actions. + +**Templates** can perform multiple actions _before_ or _after_ the generation process with the help of **hooks**. + +Hooks help you change the specification version with the new `version` that you can pass before the generation process even begins: + +```js +module.exports = { + 'generate:before': ({ asyncapi, templateParams = {} }) => { + const version = templateParams.version || asyncapi.info().version(); + asyncapi._json.info.version = version; + } +}; +``` +This can be an even better alternative to overriding the `version` parameter we discussed in the previous section. A markdown document will be generated, and the AsyncAPI document passed to the generator will be returned with the overwritten version. + +The updated template looks like the following: + +```js +App name: **{ asyncapi.info().title() }** +Version: **{asyncapi.info.version()}** +``` diff --git a/apps/generator/docs/template.md b/apps/generator/docs/template.md new file mode 100644 index 000000000..b8f31db01 --- /dev/null +++ b/apps/generator/docs/template.md @@ -0,0 +1,73 @@ +--- +title: "Template" +weight: 50 +--- + +## Template + +A template is a project that specifies the generation process output by using the AsyncAPI generator and an [AsyncAPI document](asyncapi-document). These files describe the generation results depending on the AsyncAPI document's content. + +Examples outputs: + +- Code +- Documentation +- Markdown diagrams +- Python and Java applications + +A template is an independent Node.js project unrelated to the `generator` repository. AsyncAPI templates are managed, released, and published separately. You can also create templates and manage templates on your own. + +The generator uses the official [Arborist](https://www.npmjs.com/package/@npmcli/arborist) NPM library. (This means templates do not have to be published to package managers to use them.) Arborist helps the generator fetch the template's source code and use it for the generation process. By default, this library pulls data from the default NPM registry, which is https://registry.npmjs.org. You can also configure the generator to fetch templates that are private or hosted in different NPM registry + +You can store template projects on a local drive or as a `git` repository during the development process. + +## Template generation process + +1. Template is provided as input to the **Generator**. +2. **asyncapi** is the original AsyncAPI document injected into your template file by default. +3. **params** are the parameters you pass to the AsyncAPI CLI. Later, you can also pass these **params** further to other components. +4. The generator passes both the original **asyncapi**, the original AsyncAPI document, and the **params** to the **Template Context**. +5. Concurrently, the generator passes **Template files** to the **Render engine** as well. AsyncAPI uses two render engines — _react_ and _nunjucks_. +6. Once the Render Engine receives both the Template Files and the Template Context, it injects all the dynamic values into your react or nunjucks engine, based on the Template Files using the Template Context. +7. The render engine generates whatever output you may have specified in your template. (i.e. code, documentation, diagrams, pdfs, applications, etc.) + +```mermaid +graph LR + A[Template Context] + B{Generator} + D[Render Engine] + F[Template] --> B + subgraph Generator Library + B --> | asyncapi | A + B--> | params | A + A --> D + B --> | Template Files | D + end +``` + +## Generator `templates` list + +AsyncAPI has a list of available templates to enhance your generation process. Templates are stored as repositories on AsyncAPI's official GitHub profile. + + + + +Template Name | Description | Source code +---|---|--- +`@asyncapi/nodejs-template` | Generates Node.js service that uses Hermes package | [Node.js template](https://github.com/asyncapi/nodejs-template) +`@asyncapi/nodejs-ws-template` | Generates Node.js service that supports WebSocket protocol only | [Node.js WebSocket template](https://github.com/asyncapi/nodejs-ws-template) +`@asyncapi/java-template` | Generates Java JMS application | [Java template](https://github.com/asyncapi/java-template) +`@asyncapi/java-spring-template` | Generates Java Spring service | [Java spring template](https://github.com/asyncapi/java-spring-template) +`@asyncapi/java-spring-cloud-stream-template` | Generates Java Spring Cloud Stream service | [Java spring cloud stream template](https://github.com/asyncapi/java-spring-cloud-stream-template) +`@asyncapi/python-paho-template` | Generates Python service that uses Paho library | [Python paho template](https://github.com/asyncapi/python-paho-template) +`@asyncapi/html-template` | Generates HTML documentation site | [HTML template](https://github.com/asyncapi/html-template) +`@asyncapi/markdown-template` | Generates documentation in Markdown file | [Markdown template](https://github.com/asyncapi/markdown-template) +`@asyncapi/ts-nats-template` | Generates TypeScript NATS client | [TypeScript/Node.js NATS template](https://github.com/asyncapi/ts-nats-template/) +`@asyncapi/go-watermill-template` | Generates Go client using Watermill | [GO watermill template](https://github.com/asyncapi/go-watermill-template) +`@asyncapi/dotnet-nats-template` | Generates .NET C# client using NATS | [.NET C# NATS template](https://github.com/asyncapi/dotnet-nats-template) + + + +> Some of these templates are maintained by various third-party organizations. The README file usually contains this information and more, such as configuration options the user can pass to the template, usage, technical requirements, etc. + +> Check out all our community [generator templates](https://github.com/search?q=topic%3Aasyncapi+topic%3Agenerator+topic%3Atemplate) + diff --git a/docs/typescript-support.md b/apps/generator/docs/typescript-support.md similarity index 98% rename from docs/typescript-support.md rename to apps/generator/docs/typescript-support.md index 5a84ef9b2..5c785cd3c 100644 --- a/docs/typescript-support.md +++ b/apps/generator/docs/typescript-support.md @@ -1,6 +1,6 @@ --- title: "TypeScript support" -weight: 120 +weight: 150 --- The AsyncAPI generator has TypeScript support for [hooks](#hooks) and Nunjucks's [filters](#filters). Assumptions: diff --git a/docs/usage.md b/apps/generator/docs/usage.md similarity index 63% rename from docs/usage.md rename to apps/generator/docs/usage.md index 698d3bb39..344044680 100644 --- a/docs/usage.md +++ b/apps/generator/docs/usage.md @@ -4,28 +4,33 @@ weight: 30 --- There are two ways to use the generator: -- [Generator CLI](#generator-cli) +- [AsyncAPI CLI](#generator-cli) - [Generator library](#using-as-a-modulepackage) -## Generator CLI +## AsyncAPI CLI +Generates whatever you want using templates compatible with AsyncAPI Generator. ```bash -Usage: asyncapi generate fromTemplate