Push before general cleanup to avoid merge errors

This commit is contained in:
2022-11-13 11:09:44 -05:00
parent a5196ef862
commit d5d6f84995
1451 changed files with 2065486 additions and 54 deletions

View File

@@ -0,0 +1,31 @@
import { writeFileSync } from 'fs';
export declare const libraryListUrl = "https://raw.githubusercontent.com/googleapis/google-cloud-node/main/libraries.json";
export interface LibraryMetadata {
name: string;
name_pretty: string;
product_documentation: string;
client_documentation: string;
issue_tracker: string;
release_level: string;
language: string;
repo: string;
distribution_name: string;
api_id: string;
requires_billing: boolean;
}
export interface Disclaimer {
api: string;
package: string;
}
export declare const gfs: {
writeFileSync: typeof writeFileSync;
};
/**
* Reach out to google-cloud-node, and get a list of available client libraries
* that are veneer or GAPIC. Use that to populate a JSON file that will be
* used during generation to call out improved clients in READMEs for a given
* API.
*
* To use this, run `node build/src/generator/disclaimers`.
*/
export declare function main(): Promise<void>;

View File

@@ -0,0 +1,45 @@
"use strict";
// Copyright 2020 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
Object.defineProperty(exports, "__esModule", { value: true });
exports.main = exports.gfs = exports.libraryListUrl = void 0;
const gaxios_1 = require("gaxios");
const fs_1 = require("fs");
exports.libraryListUrl = 'https://raw.githubusercontent.com/googleapis/google-cloud-node/main/libraries.json';
// exported for mocking purposes
exports.gfs = {
writeFileSync: fs_1.writeFileSync,
};
/**
* Reach out to google-cloud-node, and get a list of available client libraries
* that are veneer or GAPIC. Use that to populate a JSON file that will be
* used during generation to call out improved clients in READMEs for a given
* API.
*
* To use this, run `node build/src/generator/disclaimers`.
*/
async function main() {
const res = await (0, gaxios_1.request)({ url: exports.libraryListUrl });
const disclaimers = res.data.map(lib => {
return {
api: lib.api_id.split('.')[0],
package: lib.distribution_name,
};
});
exports.gfs.writeFileSync('./disclaimers.json', JSON.stringify(disclaimers, null, 2));
}
exports.main = main;
if (require.main === module) {
main();
}

34
node_modules/googleapis/build/src/generator/docs.d.ts generated vendored Normal file
View File

@@ -0,0 +1,34 @@
import * as execa from 'execa';
import * as fs from 'fs';
export declare const gfs: {
mkdir: typeof fs.mkdirSync;
exists: typeof fs.existsSync;
writeFile: typeof fs.writeFile.__promisify__;
readdir: typeof fs.readdir.__promisify__;
execa: {
(file: string, arguments?: readonly string[] | undefined, options?: execa.Options<string> | undefined): execa.ExecaChildProcess<string>;
(file: string, arguments?: readonly string[] | undefined, options?: execa.Options<null> | undefined): execa.ExecaChildProcess<Buffer>;
(file: string, options?: execa.Options<string> | undefined): execa.ExecaChildProcess<string>;
(file: string, options?: execa.Options<null> | undefined): execa.ExecaChildProcess<Buffer>;
sync(file: string, arguments?: readonly string[] | undefined, options?: execa.SyncOptions<string> | undefined): execa.ExecaSyncReturnValue<string>;
sync(file: string, arguments?: readonly string[] | undefined, options?: execa.SyncOptions<null> | undefined): execa.ExecaSyncReturnValue<Buffer>;
sync(file: string, options?: execa.SyncOptions<string> | undefined): execa.ExecaSyncReturnValue<string>;
sync(file: string, options?: execa.SyncOptions<null> | undefined): execa.ExecaSyncReturnValue<Buffer>;
command(command: string, options?: execa.Options<string> | undefined): execa.ExecaChildProcess<string>;
command(command: string, options?: execa.Options<null> | undefined): execa.ExecaChildProcess<Buffer>;
commandSync(command: string, options?: execa.SyncOptions<string> | undefined): execa.ExecaSyncReturnValue<string>;
commandSync(command: string, options?: execa.SyncOptions<null> | undefined): execa.ExecaSyncReturnValue<Buffer>;
node(scriptPath: string, arguments?: readonly string[] | undefined, options?: execa.NodeOptions<string> | undefined): execa.ExecaChildProcess<string>;
node(scriptPath: string, arguments?: readonly string[] | undefined, options?: execa.Options<null> | undefined): execa.ExecaChildProcess<Buffer>;
node(scriptPath: string, options?: execa.Options<string> | undefined): execa.ExecaChildProcess<string>;
node(scriptPath: string, options?: execa.Options<null> | undefined): execa.ExecaChildProcess<Buffer>;
};
};
/**
* Iterate over each API directory, and use the `compodoc` tool to generate
* reference API documentation in the `docs` folder. This folder is ignored
* in git, so a publish must be done with `npm run publish-docs`.
*
* To use this, run `npm run generate-docs`.
*/
export declare function main(): Promise<void>;

74
node_modules/googleapis/build/src/generator/docs.js generated vendored Normal file
View File

@@ -0,0 +1,74 @@
"use strict";
// Copyright 2018 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
Object.defineProperty(exports, "__esModule", { value: true });
exports.main = exports.gfs = void 0;
const execa = require("execa");
const fs = require("fs");
const nunjucks = require("nunjucks");
const path = require("path");
const util_1 = require("util");
const p_queue_1 = require("p-queue");
const srcPath = path.join(__dirname, '../../../src');
const apiPath = path.join(srcPath, 'apis');
const templatePath = path.join(srcPath, 'generator/templates/index.html.njk');
const docsPath = path.join(__dirname, '../../../docs');
const indexPath = path.join(docsPath, 'index.html');
exports.gfs = {
mkdir: fs.mkdirSync,
exists: fs.existsSync,
writeFile: (0, util_1.promisify)(fs.writeFile),
readdir: (0, util_1.promisify)(fs.readdir),
execa,
};
/**
* Iterate over each API directory, and use the `compodoc` tool to generate
* reference API documentation in the `docs` folder. This folder is ignored
* in git, so a publish must be done with `npm run publish-docs`.
*
* To use this, run `npm run generate-docs`.
*/
async function main() {
if (!exports.gfs.exists(docsPath)) {
exports.gfs.mkdir(docsPath);
}
const children = await exports.gfs.readdir(apiPath);
const dirs = children.filter(x => {
return !x.endsWith('.ts');
});
const contents = nunjucks.render(templatePath, { apis: dirs });
await exports.gfs.writeFile(indexPath, contents);
const q = new p_queue_1.default({ concurrency: 50 });
console.log(`Generating docs for ${dirs.length} APIs...`);
let i = 0;
const promises = dirs.map(dir => {
return q
.add(() => exports.gfs.execa(process.execPath, [
'--max-old-space-size=8192',
'./node_modules/.bin/compodoc',
`src/apis/${dir}`,
'-d',
`./docs/${dir}`,
]))
.then(() => {
i++;
console.log(`[${i}/${dirs.length}] ${dir}`);
});
});
await Promise.all(promises);
}
exports.main = main;
if (require.main === module) {
main();
}

View File

@@ -0,0 +1,69 @@
import * as gapi from 'googleapis-common';
export declare type Schema = {
[index: string]: {};
};
export declare const DISCOVERY_URL = "https://www.googleapis.com/discovery/v1/apis/";
export interface Change {
action: 'ADDED' | 'DELETED' | 'CHANGED';
keyName: string;
}
export interface ChangeSet {
changes: Change[];
api: gapi.Schema;
}
export interface DownloadOptions {
includePrivate?: boolean;
discoveryUrl: string;
downloadPath: string;
}
export declare const gfs: {
mkdir: (dir: string) => Promise<string | undefined>;
writeFile: (path: string, obj: {}) => void;
readFile: (path: string) => string;
};
/**
* Download all discovery documents into the /discovery directory.
* @param options
*/
export declare function downloadDiscoveryDocs(options: DownloadOptions): Promise<ChangeSet[]>;
/**
* Determine if any of the changes in the discovery docs were interesting
* @param newDoc New downloaded schema
* @param oldDoc The existing schema from disk
*/
export declare function shouldUpdate(newDoc: {}, oldDoc: {}): boolean;
/**
* Given an arbitrary object, recursively sort the properties on the object
* by the name of the key. For example:
* {
* b: 1,
* a: 2
* }
* becomes....
* {
* a: 2,
* b: 1
* }
* @param obj Object to be sorted
* @returns object with sorted keys
*/
export declare function sortKeys(obj: Schema): Schema;
/**
* Get a diff between the two
*/
export declare function getDiffs(oldDoc: Schema, newDoc: Schema): Change[];
/**
* Given a complex nested object, flatten the key paths so this:
* {
* a: {
* b: 2
* },
* c: 3
* }
* becomes ...
* {
* 'a.b': 2
* c: 3
* }
*/
export declare function flattenObject(doc: Schema, flat?: Schema, prefix?: string): Schema;

212
node_modules/googleapis/build/src/generator/download.js generated vendored Normal file
View File

@@ -0,0 +1,212 @@
"use strict";
// Copyright 2020 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
Object.defineProperty(exports, "__esModule", { value: true });
exports.flattenObject = exports.getDiffs = exports.sortKeys = exports.shouldUpdate = exports.downloadDiscoveryDocs = exports.gfs = exports.DISCOVERY_URL = void 0;
const minimist = require("yargs-parser");
const path = require("path");
const fs = require("fs");
const p_queue_1 = require("p-queue");
const gaxios_1 = require("gaxios");
const mkdirp = require("mkdirp");
exports.DISCOVERY_URL = 'https://www.googleapis.com/discovery/v1/apis/';
// exported for mocking purposes
exports.gfs = {
mkdir: async (dir) => mkdirp(dir),
writeFile: (path, obj) => {
fs.writeFileSync(path, JSON.stringify(obj, null, 2));
},
readFile: (path) => {
return fs.readFileSync(path, 'utf8');
},
};
/**
* Download all discovery documents into the /discovery directory.
* @param options
*/
async function downloadDiscoveryDocs(options) {
await exports.gfs.mkdir(options.downloadPath);
const headers = options.includePrivate
? {}
: { 'X-User-Ip': '0.0.0.0' };
console.log(`sending request to ${options.discoveryUrl}`);
const res = await (0, gaxios_1.request)({ url: options.discoveryUrl, headers });
const apis = res.data.items;
const indexPath = path.join(options.downloadPath, 'index.json');
exports.gfs.writeFile(indexPath, res.data);
const queue = new p_queue_1.default({ concurrency: 25 });
console.log(`Downloading ${apis.length} APIs...`);
const changes = await queue.addAll(apis.map(api => async () => {
console.log(`Downloading ${api.id}...`);
const apiPath = path.join(options.downloadPath, api.id.replace(':', '-') + '.json');
const url = api.discoveryRestUrl;
const changeSet = { api, changes: [] };
try {
const res = await (0, gaxios_1.request)({ url });
// The keys in the downloaded JSON come back in an arbitrary order from
// request to request. Sort them before storing.
const newDoc = sortKeys(res.data);
let updateFile = true;
try {
const oldDoc = JSON.parse(await exports.gfs.readFile(apiPath));
updateFile = shouldUpdate(newDoc, oldDoc);
changeSet.changes = getDiffs(oldDoc, newDoc);
}
catch (_a) {
// If the file doesn't exist, that's fine it's just new
}
if (updateFile) {
exports.gfs.writeFile(apiPath, newDoc);
}
}
catch (e) {
console.error(`Error downloading: ${url}`);
}
return changeSet;
}));
return changes;
}
exports.downloadDiscoveryDocs = downloadDiscoveryDocs;
const ignoreLines = /^\s+"(?:etag|revision)": ".+"/;
/**
* Determine if any of the changes in the discovery docs were interesting
* @param newDoc New downloaded schema
* @param oldDoc The existing schema from disk
*/
function shouldUpdate(newDoc, oldDoc) {
const [newLines, oldLines] = [newDoc, oldDoc].map(doc => JSON.stringify(doc, null, 2)
.split('\n')
.filter(l => !ignoreLines.test(l))
.join('\n'));
return newLines !== oldLines;
}
exports.shouldUpdate = shouldUpdate;
/**
* Given an arbitrary object, recursively sort the properties on the object
* by the name of the key. For example:
* {
* b: 1,
* a: 2
* }
* becomes....
* {
* a: 2,
* b: 1
* }
* @param obj Object to be sorted
* @returns object with sorted keys
*/
function sortKeys(obj) {
const sorted = {};
let keys = Object.keys(obj);
keys = keys.sort();
for (const key of keys) {
// typeof [] === 'object', which is maddening
if (!Array.isArray(obj[key]) && typeof obj[key] === 'object') {
sorted[key] = sortKeys(obj[key]);
}
else {
sorted[key] = obj[key];
}
}
return sorted;
}
exports.sortKeys = sortKeys;
/**
* Get a diff between the two
*/
function getDiffs(oldDoc, newDoc) {
const changes = new Array();
const flatOld = flattenObject(oldDoc);
const flatNew = flattenObject(newDoc);
// find deleted nodes
Object.keys(flatOld).forEach(key => {
if (!Object.prototype.hasOwnProperty.call(flatNew, key)) {
changes.push({
action: 'DELETED',
keyName: key,
});
}
});
// find added nodes
Object.keys(flatNew).forEach(key => {
if (!Object.prototype.hasOwnProperty.call(flatOld, key)) {
changes.push({
action: 'ADDED',
keyName: key,
});
}
});
// find updated nodes
Object.keys(flatOld).forEach(key => {
let oldValue = flatOld[key];
if (Array.isArray(oldValue)) {
oldValue = oldValue.join(', ');
}
let newValue = flatNew[key];
if (newValue) {
if (Array.isArray(newValue)) {
newValue = newValue.join(', ');
}
if (newValue !== oldValue && key !== 'revision' && key !== 'etag') {
changes.push({
action: 'CHANGED',
keyName: key,
});
}
}
});
return changes;
}
exports.getDiffs = getDiffs;
/**
* Given a complex nested object, flatten the key paths so this:
* {
* a: {
* b: 2
* },
* c: 3
* }
* becomes ...
* {
* 'a.b': 2
* c: 3
* }
*/
function flattenObject(doc, flat = {}, prefix = '') {
const keys = Object.keys(doc);
const newPrefix = prefix ? `${prefix}.` : '';
for (const key of keys) {
const fullKey = newPrefix + key;
const value = doc[key];
if (!Array.isArray(value) && typeof value === 'object') {
flattenObject(value, flat, fullKey);
}
else {
flat[fullKey] = value;
}
}
return flat;
}
exports.flattenObject = flattenObject;
/**
* Allow this file to be directly run via `npm run download`, or imported
* and used by `generator.ts`
*/
if (require.main === module) {
const argv = minimist(process.argv.slice(2));
const discoveryUrl = argv['discovery-url'] || exports.DISCOVERY_URL;
const downloadPath = argv['download-path'] || path.join(__dirname, '../../../discovery');
downloadDiscoveryDocs({ discoveryUrl, downloadPath });
}

View File

@@ -0,0 +1,28 @@
import { SchemaItem, SchemaMethod, SchemaParameters } from 'googleapis-common';
export declare function getObjectType(item: SchemaItem): string;
export declare function isSimpleType(type: string): boolean;
export declare function cleanPropertyName(prop: string): string;
export declare function camelify(name: string): string;
export declare function getType(item: SchemaItem): string;
/**
* Clean a string of comment tags.
* @param str - String to process
* @return Single line string processed
*/
export declare function cleanComments(str?: string): string;
export declare function getPathParams(params: SchemaParameters): string[];
export declare function getSafeParamName(param: string): string;
export declare function hasResourceParam(method: SchemaMethod): boolean;
/**
* Build a string used to create a URL from the discovery doc provided URL.
* replace double slashes with single slash (except in https://)
* @private
* @param input URL to build from
* @return Resulting built URL
*/
export declare function buildurl(input?: string): string;
/**
* Attempt to turn a regex into a more human readable form.
* @param regex pattern for the given parameter
*/
export declare function unRegex(regex: string): string;

158
node_modules/googleapis/build/src/generator/filters.js generated vendored Normal file
View File

@@ -0,0 +1,158 @@
"use strict";
// Copyright 2020 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
Object.defineProperty(exports, "__esModule", { value: true });
exports.unRegex = exports.buildurl = exports.hasResourceParam = exports.getSafeParamName = exports.getPathParams = exports.cleanComments = exports.getType = exports.camelify = exports.cleanPropertyName = exports.isSimpleType = exports.getObjectType = void 0;
function getObjectType(item) {
if (item.additionalProperties) {
const valueType = getType(item.additionalProperties);
return `{ [key: string]: ${valueType}; }`;
}
else if (item.properties) {
const fields = item.properties;
const objectType = Object.keys(fields)
.map(field => `${cleanPropertyName(field)}?: ${getType(fields[field])};`)
.join(' ');
return `{ ${objectType} }`;
}
else {
return 'any';
}
}
exports.getObjectType = getObjectType;
function isSimpleType(type) {
return !type.includes('{');
}
exports.isSimpleType = isSimpleType;
function cleanPropertyName(prop) {
const match = prop.match(/[-@.]/);
return match ? `'${prop}'` : prop;
}
exports.cleanPropertyName = cleanPropertyName;
function camelify(name) {
// If the name has a `-`, remove it and camelize.
// Ex: `well-known` => `wellKnown`
if (name.includes('-')) {
const parts = name.split('-').filter(x => !!x);
name = parts
.map((part, i) => {
if (i === 0) {
return part;
}
return part.charAt(0).toUpperCase() + part.slice(1);
})
.join('');
}
return name;
}
exports.camelify = camelify;
function getType(item) {
if (item.$ref) {
return `Schema$${item.$ref}`;
}
switch (item.type) {
case 'integer':
return 'number';
case 'object':
return getObjectType(item);
case 'array': {
const innerType = getType(item.items);
if (isSimpleType(innerType)) {
return `${innerType}[]`;
}
else {
return `Array<${innerType}>`;
}
}
default:
return item.type;
}
}
exports.getType = getType;
/**
* Clean a string of comment tags.
* @param str - String to process
* @return Single line string processed
*/
function cleanComments(str) {
if (!str) {
return '';
}
// Convert /* into /x and */ into x/
return str
.replace(/\*\//g, 'x/')
.replace(/\/\*/g, '/x')
.replace(/}/g, '\\}')
.replace(/>/g, '\\>');
}
exports.cleanComments = cleanComments;
function getPathParams(params) {
const pathParams = new Array();
if (typeof params !== 'object') {
params = {};
}
Object.keys(params).forEach(key => {
if (params[key].location === 'path') {
pathParams.push(key);
}
});
return pathParams;
}
exports.getPathParams = getPathParams;
function getSafeParamName(param) {
if (RESERVED_PARAMS.indexOf(param) !== -1) {
return param + '_';
}
return param;
}
exports.getSafeParamName = getSafeParamName;
function hasResourceParam(method) {
return !!method.parameters && !!method.parameters['resource'];
}
exports.hasResourceParam = hasResourceParam;
const RESERVED_PARAMS = ['resource', 'media', 'auth'];
/**
* Build a string used to create a URL from the discovery doc provided URL.
* replace double slashes with single slash (except in https://)
* @private
* @param input URL to build from
* @return Resulting built URL
*/
function buildurl(input) {
return input ? `'${input}'`.replace(/([^:]\/)\/+/g, '$1') : '';
}
exports.buildurl = buildurl;
/**
* Attempt to turn a regex into a more human readable form.
* @param regex pattern for the given parameter
*/
function unRegex(regex) {
// example: ^projects/[^/]+$' ==> projects/my-project
let pattern = regex;
if (typeof regex !== 'string') {
return '';
}
// remove leading ^
if (pattern.startsWith('^')) {
pattern = pattern.slice(1);
}
// remove trailing $
if (pattern.endsWith('$')) {
pattern = pattern.slice(0, pattern.length - 1);
}
// replace projects placeholders
pattern = pattern.replace(/\^?(\w+)s\/\[\^\/\]\+\$?/g, '$1s/my-$1');
return pattern;
}
exports.unRegex = unRegex;

View File

@@ -0,0 +1,49 @@
import { Schema } from 'googleapis-common';
import { ChangeSet } from './download';
export interface GeneratorOptions {
debug?: boolean;
includePrivate?: boolean;
}
interface PkgData {
name: string;
version: string;
desc: string;
}
export declare class Generator {
private env;
private options;
private state;
/**
* Generator for generating API endpoints
* @param options Options for generation
*/
constructor(options?: GeneratorOptions);
/**
* Log output of generator. Works just like console.log.
*/
private log;
/**
* Write to the state log, which is used for debugging.
* @param id DiscoveryRestUrl of the endpoint to log
* @param message
*/
private logResult;
/**
* Generate all APIs and write to files.
*/
generateAllAPIs(discoveryUrl: string, useCache: boolean): Promise<ChangeSet[]>;
generateIndex(metadata: Schema[], directory?: string): Promise<void>;
getPkgPathAndData(apisPath: string, file: string, desc: string, defaultVersion?: string): Promise<[string, PkgData]>;
/**
* Generate API file given discovery URL
* @param apiDiscoveryUri URL or filename of discovery doc for API
*/
generateAPI(apiDiscoveryUrl: string): Promise<string>;
private generate;
/**
* Render a nunjucks template, format it, and write to disk
*/
private render;
generateReleasePleaseConfig(): Promise<void>;
}
export {};

View File

@@ -0,0 +1,313 @@
"use strict";
// Copyright 2014 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
Object.defineProperty(exports, "__esModule", { value: true });
exports.Generator = void 0;
const fs = require("fs");
const mkdirp = require("mkdirp");
const nunjucks = require("nunjucks");
const path = require("path");
const util = require("util");
const p_queue_1 = require("p-queue");
const prettier = require("prettier");
const minimist = require("yargs-parser");
const gaxios_1 = require("gaxios");
const download_1 = require("./download");
const download_2 = require("./download");
const filters = require("./filters");
const samplegen_1 = require("./samplegen");
const writeFile = util.promisify(fs.writeFile);
const readDir = util.promisify(fs.readdir);
const readFile = util.promisify(fs.readFile);
const stat = util.promisify(fs.stat);
const srcPath = path.join(__dirname, '../../../src');
const TEMPLATES_DIR = path.join(srcPath, 'generator/templates');
const API_TEMPLATE = path.join(TEMPLATES_DIR, 'api-endpoint.njk');
// eslint-disable-next-line @typescript-eslint/no-var-requires
const disclaimers = require('../../../disclaimers.json');
class Generator {
/**
* Generator for generating API endpoints
* @param options Options for generation
*/
constructor(options = {}) {
this.state = new Map();
this.options = options;
this.env = new nunjucks.Environment(new nunjucks.FileSystemLoader(TEMPLATES_DIR), { trimBlocks: true });
this.env.addFilter('buildurl', filters.buildurl);
this.env.addFilter('getType', filters.getType);
this.env.addFilter('cleanPropertyName', filters.cleanPropertyName);
this.env.addFilter('cleanComments', filters.cleanComments);
this.env.addFilter('camelify', filters.camelify);
this.env.addFilter('getPathParams', filters.getPathParams);
this.env.addFilter('getSafeParamName', filters.getSafeParamName);
this.env.addFilter('hasResourceParam', filters.hasResourceParam);
}
/**
* Log output of generator. Works just like console.log.
*/
log(...args) {
if (this.options && this.options.debug) {
console.log(...args);
}
}
/**
* Write to the state log, which is used for debugging.
* @param id DiscoveryRestUrl of the endpoint to log
* @param message
*/
logResult(id, message) {
if (!this.state.has(id)) {
this.state.set(id, new Array());
}
this.state.get(id).push(message);
}
/**
* Generate all APIs and write to files.
*/
async generateAllAPIs(discoveryUrl, useCache) {
const ignore = require('../../../ignore.json').ignore;
const discoveryPath = path.join(__dirname, '../../../discovery');
let changes = new Array();
if (useCache) {
console.log('Reading from cache...');
}
else {
changes = await (0, download_2.downloadDiscoveryDocs)({
includePrivate: this.options.includePrivate,
discoveryUrl,
downloadPath: discoveryPath,
});
}
const indexPath = path.join(discoveryPath, 'index.json');
const file = await readFile(indexPath, 'utf8');
const apis = JSON.parse(file).items;
const queue = new p_queue_1.default({ concurrency: 50 });
console.log(`Generating ${apis.length} APIs...`);
await queue.addAll(apis.map(api => async () => {
// look at ignore.json to find a list of APIs to ignore
if (ignore.includes(api.id)) {
this.log(`Skipping API ${api.id}`);
return;
}
this.log(`Generating API for ${api.id}...`);
this.logResult(api.discoveryRestUrl, 'Attempting first generateAPI call...');
try {
const apiPath = path.join(discoveryPath, api.id.replace(':', '-') + '.json');
await this.generateAPI(apiPath);
this.logResult(api.discoveryRestUrl, 'GenerateAPI call success!');
}
catch (e) {
this.logResult(api.discoveryRestUrl, `GenerateAPI call failed with error: ${e}, moving on.`);
console.error(`Failed to generate API: ${api.id}`);
console.error(e);
console.log(api.id +
'\n-----------\n' +
util.inspect(this.state.get(api.discoveryRestUrl), {
maxArrayLength: null,
}) +
'\n');
}
}));
await this.generateIndex(apis);
return changes;
}
async generateIndex(metadata, directory) {
var _a;
const apis = {};
const apisPath = path.join(srcPath, 'apis');
const indexPath = path.join(apisPath, 'index.ts');
const rootIndexPath = path.join(apisPath, '../', 'index.ts');
// Dynamically discover available APIs
const files = await readDir(apisPath);
for (const file of files) {
const filePath = path.join(apisPath, file);
if (!(await stat(filePath)).isDirectory()) {
continue;
}
if (directory && file !== directory) {
continue;
}
apis[file] = {};
const files = await readDir(path.join(apisPath, file));
for (const version of files) {
const parts = path.parse(version);
if (!version.endsWith('.d.ts') &&
parts.ext === '.ts' &&
version !== 'index.ts') {
apis[file][version] = parts.name;
const desc = (_a = metadata.find(x => x.name === file)) === null || _a === void 0 ? void 0 : _a.description;
// generate the index.ts
const apiIdxPath = path.join(apisPath, file, 'index.ts');
const apiIndexData = { name: file, api: apis[file] };
await this.render('api-index.njk', apiIndexData, apiIdxPath);
// generate the package.json
const [pkgPath, pkgData] = await this.getPkgPathAndData(apisPath, file, desc || '');
await this.render('package.json', pkgData, pkgPath);
// generate the README.md
const rdPath = path.join(apisPath, file, 'README.md');
const disclaimer = disclaimers.find(disclaimer => {
return disclaimer.api === file;
});
await this.render('README.md.njk', { name: file, desc, disclaimer }, rdPath);
// generate the tsconfig.json
const tsPath = path.join(apisPath, file, 'tsconfig.json');
await this.render('tsconfig.json.njk', {}, tsPath);
// generate the webpack.config.js
const wpPath = path.join(apisPath, file, 'webpack.config.js');
await this.render('webpack.config.js.njk', { name: file }, wpPath);
}
}
}
if (directory) {
return;
}
await this.render('index.njk', { apis }, indexPath);
await this.render('root-index.njk', { apis }, rootIndexPath);
}
async getPkgPathAndData(apisPath, file, desc, defaultVersion = '0.1.0') {
const pkgPath = path.join(apisPath, file, 'package.json');
const packageData = { name: file, desc, version: defaultVersion };
// Use the version from the existing package.json, if possible:
try {
const pkgRaw = await readFile(pkgPath, 'utf8');
const pkg = JSON.parse(pkgRaw);
packageData.version = pkg.version;
}
catch (err) {
if (err.code === 'ENOENT') {
console.info(`${pkgPath} not found`);
}
else {
throw err;
}
}
return [pkgPath, packageData];
}
/**
* Generate API file given discovery URL
* @param apiDiscoveryUri URL or filename of discovery doc for API
*/
async generateAPI(apiDiscoveryUrl) {
const isUrl = apiDiscoveryUrl.startsWith('https://');
let filePath;
if (!isUrl) {
this.log(`Reading from file ${path.relative('.', apiDiscoveryUrl)}`);
const file = await readFile(apiDiscoveryUrl, 'utf-8');
filePath = await this.generate(apiDiscoveryUrl, JSON.parse(file));
}
else {
this.log(`Reading from url ${apiDiscoveryUrl}`);
const res = await (0, gaxios_1.request)({ url: apiDiscoveryUrl });
filePath = await this.generate(apiDiscoveryUrl, res.data);
}
return filePath;
}
async generate(apiDiscoveryUrl, schema) {
this.logResult(apiDiscoveryUrl, 'Generating APIs...');
const apiPath = path.join(srcPath, 'apis', schema.name);
const exportFilename = path.join(apiPath, schema.version + '.ts');
await mkdirp(path.dirname(exportFilename));
// populate the `method.fragment` property with samples
(0, samplegen_1.addFragments)(schema);
// generate the API (ex: src/apis/youtube/v3.ts)
await this.render(API_TEMPLATE, { api: schema }, exportFilename);
// generate samples on disk at:
// src/apis/<service>/samples/<version>/<method>
// generateSamples(apiPath, schema);
this.logResult(apiDiscoveryUrl, 'Template generation complete.');
return exportFilename;
}
/**
* Render a nunjucks template, format it, and write to disk
*/
async render(templatePath, data, outputPath) {
let output = this.env.render(templatePath, data);
const ext = path.extname(outputPath);
if (ext === '.js' || ext === '.ts') {
output = prettier.format(output, {
bracketSpacing: false,
singleQuote: true,
trailingComma: 'es5',
arrowParens: 'avoid',
parser: 'typescript',
});
}
await writeFile(outputPath, output, { encoding: 'utf8' });
}
async generateReleasePleaseConfig() {
/*
1. pull in disclaimers (files we're not going to generate)
2. list all folders inside apis directory
3. find the delta from 2 - 1
4. fill out bootstrap sha
*/
const disclaimers = require('../../../disclaimers.json');
const excludedAPIs = disclaimers.map(x => x.api);
const apis = fs.readdirSync(path.join(srcPath, 'apis'), {
withFileTypes: true,
});
const releasableAPIs = apis
.filter(e => e.isDirectory() && !excludedAPIs.includes(e.name))
.map(x => x.name);
const rootPath = path.join(__dirname, '../../../');
// Bootstrap sha is used the first time the releaser runs when it grabs the initial commits
// Afterwards, it uses the most recent release as a starting point
const releasePleaseConfig = {
'bootstrap-sha': '6e61af34c0bfdfc3d6f973bffcd6a7e2420590d2',
packages: {},
};
const releasePleaseManifest = {};
for (const api of releasableAPIs) {
releasePleaseConfig.packages[`src/apis/${api}`] = {};
releasePleaseManifest[`src/apis/${api}`] =
require(`../../../src/apis/${api}/package.json`).version;
}
// Include the root library in the config:
releasePleaseManifest['.'] = require('../../../package.json').version;
releasePleaseConfig.packages['.'] = {};
fs.writeFileSync(path.resolve(rootPath, './release-please-config.json'), JSON.stringify(releasePleaseConfig, null, 2), 'utf8');
fs.writeFileSync(path.resolve(rootPath, './.release-please-manifest.json'), JSON.stringify(releasePleaseManifest, null, 2), 'utf8');
}
}
exports.Generator = Generator;
async function main() {
const argv = minimist(process.argv.slice(2));
const discoveryUrl = argv['discovery-url'];
const useCache = argv['use-cache'];
const includePrivate = argv['include-private'];
console.log(`useCache: ${useCache}`);
console.log(`includePrivate: ${includePrivate}`);
const gen = new Generator({ debug: true, includePrivate });
if (!discoveryUrl && argv._.length > 0) {
argv._.forEach(async (url) => {
console.log(`Generating API for ${url}`);
const filePath = await gen.generateAPI('' + url);
const filePathParts = filePath.split('/');
await gen.generateIndex([], filePathParts[filePathParts.length - 2]);
console.log('Generated API for ' + url);
});
}
else {
console.log('Generating APIs...');
await gen.generateAllAPIs(discoveryUrl || download_1.DISCOVERY_URL, useCache);
// Re-generates release-please manifest and config files
console.log('Generating .release-please-manifest.json and release-please-config.json');
gen.generateReleasePleaseConfig();
console.log('Finished generating APIs!');
}
}
if (require.main === module) {
main().catch(console.error);
}

View File

@@ -0,0 +1,25 @@
import { Schema, SchemaMethod, SchemaMethods, SchemaResources } from 'googleapis-common';
/**
* Given a top level Schema, collect every method on all resource objects.
* Generate a sample, format it, and attach to the `method.fragment` field.
* @param schema Top level schema for the API.
*/
export declare function addFragments(schema: Schema): Promise<void>;
/**
* Generate all samples, and write them into the samples folder on disk.
* @param apiPath Location on disk where the API lives.
* @param schema The top level Schema containing API information.
*/
export declare function generateSamples(apiPath: string, schema: Schema): Promise<void>;
interface MethodBag {
methods?: SchemaMethods;
resources?: SchemaResources;
}
/**
* Iterate over items in the schema recursively, and return a flattened
* list of all methods.
* @param bag
* @param methods
*/
export declare function getAllMethods(bag: MethodBag, methods?: SchemaMethod[]): SchemaMethod[];
export {};

View File

@@ -0,0 +1,142 @@
"use strict";
// Copyright 2020 Google LLC
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
Object.defineProperty(exports, "__esModule", { value: true });
exports.getAllMethods = exports.generateSamples = exports.addFragments = void 0;
const path = require("path");
const mkdirp = require("mkdirp");
const prettier = require("prettier");
const nunjucks = require("nunjucks");
const filters = require("./filters");
const fs = require("fs");
const util = require("util");
const writeFile = util.promisify(fs.writeFile);
const srcPath = path.join(__dirname, '../../../src');
const TEMPLATES_DIR = path.join(srcPath, 'generator/templates');
// eslint-disable-next-line @typescript-eslint/no-var-requires
const prettierConfig = require('../../../node_modules/gts/.prettierrc.json');
prettierConfig.parser = 'babel';
const env = new nunjucks.Environment(new nunjucks.FileSystemLoader(TEMPLATES_DIR), { trimBlocks: true });
env.addFilter('unRegex', filters.unRegex);
env.addFilter('cleanPropertyName', filters.cleanPropertyName);
env.addFilter('cleanComments', filters.cleanComments);
/**
* Given a top level Schema, collect every method on all resource objects.
* Generate a sample, format it, and attach to the `method.fragment` field.
* @param schema Top level schema for the API.
*/
async function addFragments(schema) {
const methods = getAllMethods(schema);
for (const method of methods) {
const sampleData = getSample(schema, method);
sampleData.standalone = false;
let sample = env.render('sample.njk', sampleData);
sample = prettier.format(sample, prettierConfig);
method.fragment = sample;
}
}
exports.addFragments = addFragments;
/**
* Generate all samples, and write them into the samples folder on disk.
* @param apiPath Location on disk where the API lives.
* @param schema The top level Schema containing API information.
*/
async function generateSamples(apiPath, schema) {
const samplesPath = path.join(apiPath, 'samples', schema.version);
await mkdirp(samplesPath);
const methods = getAllMethods(schema);
for (const method of methods) {
const sampleData = getSample(schema, method);
sampleData.standalone = true;
const samplePath = path.join(samplesPath, `${method.id}.js`);
let sample = env.render('sample.njk', sampleData);
sample = prettier.format(sample, prettierConfig);
await writeFile(samplePath, sample, { encoding: 'utf8' });
}
}
exports.generateSamples = generateSamples;
function getSample(schema, method) {
let responseExample;
if (method.response) {
const item = schema.schemas[method.response.$ref];
responseExample = flattenSchema(item, schema.schemas);
}
let requestExample;
if (method.request) {
const item = schema.schemas[method.request.$ref];
requestExample = flattenSchema(item, schema.schemas);
}
const sampleData = {
api: schema,
method,
responseExample,
requestExample,
};
return sampleData;
}
/**
* Iterate over items in the schema recursively, and return a flattened
* list of all methods.
* @param bag
* @param methods
*/
function getAllMethods(bag, methods) {
if (!methods) {
methods = new Array();
}
if (bag.methods) {
for (const m of Object.keys(bag.methods)) {
methods.push(bag.methods[m]);
}
}
if (bag.resources) {
for (const r of Object.keys(bag.resources)) {
getAllMethods(bag.resources[r], methods);
}
}
return methods;
}
exports.getAllMethods = getAllMethods;
/**
* Provide a flattened representation of what the structure for a
* given request or response could look like.
*/
function flattenSchema(item, schemas) {
// tslint:disable-next-line no-any
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const result = {};
if (item.properties) {
for (const [name, details] of Object.entries(item.properties)) {
result[name] = getExamplePropertyValue(name, details, schemas);
}
}
return result;
}
function getExamplePropertyValue(name, details,
// eslint-disable-next-line @typescript-eslint/no-unused-vars
schemas) {
switch (details.type) {
case 'string':
return `my_${name}`;
case 'boolean':
return false;
case 'object':
return {};
case 'integer':
return 0;
case 'array':
return [];
default:
return {};
}
}

28
node_modules/googleapis/build/src/generator/synth.d.ts generated vendored Normal file
View File

@@ -0,0 +1,28 @@
import { ChangeSet } from './download';
export declare enum Semverity {
PATCH = 1,
MINOR = 2,
MAJOR = 3
}
export interface Changelog {
title: string;
description: string;
semverity: Semverity;
}
export interface SynthOptions {
useCache?: boolean;
}
export declare function synth(options?: SynthOptions): Promise<void>;
/**
* Given a set of changes, generate a changelog.
*/
export declare function createChangelog(changeSets: ChangeSet[]): {
semverity: Semverity;
changelog: string;
};
export declare function getPrefix(semverity: Semverity): "fix" | "feat";
/**
* Given a set of changes, figure out if the total
* changeset is semver patch, minor, or major.
*/
export declare function getSemverity(changeSets: ChangeSet[]): Semverity;

197
node_modules/googleapis/build/src/generator/synth.js generated vendored Normal file
View File

@@ -0,0 +1,197 @@
"use strict";
// Copyright 2019 Google LLC
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
Object.defineProperty(exports, "__esModule", { value: true });
exports.getSemverity = exports.getPrefix = exports.createChangelog = exports.synth = exports.Semverity = void 0;
const execa = require("execa");
const path = require("path");
const fs = require("fs");
const gaxios = require("gaxios");
const minimist = require("yargs-parser");
const generator_1 = require("./generator");
const download_1 = require("./download");
var Semverity;
(function (Semverity) {
Semverity[Semverity["PATCH"] = 1] = "PATCH";
Semverity[Semverity["MINOR"] = 2] = "MINOR";
Semverity[Semverity["MAJOR"] = 3] = "MAJOR";
})(Semverity = exports.Semverity || (exports.Semverity = {}));
async function synth(options = {}) {
var _a, _b, _c, _d, _e;
const gen = new generator_1.Generator();
let changeSets = [];
if (!options.useCache) {
console.log('Removing old APIs...');
changeSets = await gen.generateAllAPIs(download_1.DISCOVERY_URL, false);
}
const statusResult = await execa('git', ['status', '--porcelain']);
const status = statusResult.stdout;
const statusFiles = status.split('\n').map(x => x.slice(3));
const apiDir = path.resolve('./src/apis');
const files = fs.readdirSync(apiDir);
const githubToken = process.env.GITHUB_TOKEN;
if (!githubToken) {
throw new Error('please include a GITHUB_TOKEN');
}
const codeBotToken = process.env.CODE_BOT_TOKEN;
if (!codeBotToken) {
throw new Error('please include a CODE_BOT_TOKEN');
}
// only set these while running in the GitHub Actions environment
if (process.env.GITHUB_ACTIONS) {
await execa('git', ['config', 'user.email', 'yoshi-automation@google.com']);
await execa('git', ['config', 'user.name', 'Yoshi Automation']);
}
const dirs = files.filter(f => {
return (fs.statSync(path.join(apiDir, f)).isDirectory() &&
statusFiles.filter(x => x.startsWith(`src/apis/${f}/`)).length > 0);
});
console.log(`Changes found in ${dirs.length} APIs`);
const branch = 'autodisco';
const changelogs = new Array();
let totalSemverity = 0;
await execa('git', ['checkout', '-B', branch]);
for (const dir of dirs) {
const apiChangeSets = changeSets.filter(x => x.api.name === dir);
const { semverity, changelog } = createChangelog(apiChangeSets);
changelogs.push(changelog);
if (semverity > totalSemverity) {
totalSemverity = semverity;
}
const prefix = getPrefix(semverity);
const postfix = semverity === Semverity.MAJOR ? '!' : '';
console.log(`Submitting change for ${dir}...`);
const title = `${prefix}(${dir})${postfix}: update the API`;
await execa('git', ['add', path.join('src/apis', dir)]);
if (statusFiles.filter(x => x.startsWith(`discovery/${dir}-`)).length > 0) {
await execa('git', ['add', `discovery/${dir}-*`]);
}
// Write commit message to file, since it might be large enough to
// cause spawn E2BIG in CI/CD:
const message = changelog ? `${title}\n\n${changelog}` : title;
fs.writeFileSync('message.txt', message, 'utf8');
const commitParams = ['commit', '-F', 'message.txt'];
await execa('git', commitParams);
fs.unlinkSync('message.txt');
}
await execa('git', ['add', '-A']);
await execa('git', ['commit', '-m', 'feat: regenerate index files']);
const prefix = getPrefix(totalSemverity);
await execa('git', ['push', 'origin', branch, '--force']);
try {
// Open the pull request with the YOSHI_CODE_BOT_TOKEN
await gaxios.request({
method: 'POST',
headers: {
Authorization: `token ${codeBotToken}`,
},
url: 'https://api.github.com/repos/googleapis/google-api-nodejs-client/pulls',
data: {
title: `${prefix}: run the generator`,
head: branch,
base: 'main',
body: changelogs.join('\n\n').slice(0, 65000),
},
});
}
catch (e) {
if ((_a = e.response) === null || _a === void 0 ? void 0 : _a.data) {
console.error((_b = e.response) === null || _b === void 0 ? void 0 : _b.data);
if ((_d = (_c = e.response) === null || _c === void 0 ? void 0 : _c.data) === null || _d === void 0 ? void 0 : _d.errors) {
for (const err of (_e = e.response) === null || _e === void 0 ? void 0 : _e.data.errors) {
console.error(err);
}
}
}
throw e;
}
await execa('git', ['checkout', 'main']);
}
exports.synth = synth;
/**
* Given a set of changes, generate a changelog.
*/
function createChangelog(changeSets) {
const changelog = [];
const semverity = getSemverity(changeSets);
if (semverity === Semverity.MAJOR) {
changelog.push('BREAKING CHANGE: This release has breaking changes.');
}
for (const changeSet of changeSets) {
if (changeSet.changes.length > 0) {
changelog.push(`\n#### ${changeSet.api.id}\n`);
for (const action of ['DELETED', 'ADDED', 'CHANGED']) {
const inScope = changeSet.changes.filter(x => x.action === action);
if (inScope.length > 0) {
changelog.push(`The following keys were ${action.toLowerCase()}:`);
for (const r of inScope) {
changelog.push(`- ${r.keyName}`);
}
changelog.push('');
}
}
}
changelog.push('');
}
return {
semverity,
changelog: changelog.join('\n'),
};
}
exports.createChangelog = createChangelog;
function getPrefix(semverity) {
switch (semverity) {
case Semverity.PATCH:
return 'fix';
case Semverity.MINOR:
case Semverity.MAJOR:
return 'feat';
}
}
exports.getPrefix = getPrefix;
/**
* Given a set of changes, figure out if the total
* changeset is semver patch, minor, or major.
*/
function getSemverity(changeSets) {
let semverity = Semverity.PATCH;
for (const changeSet of changeSets) {
for (const change of changeSet.changes) {
let changeSemverity;
switch (change.action) {
case 'ADDED':
changeSemverity = Semverity.MINOR;
break;
case 'CHANGED':
changeSemverity = Semverity.PATCH;
break;
case 'DELETED':
changeSemverity = Semverity.MAJOR;
break;
}
if (changeSemverity > semverity) {
semverity = changeSemverity;
}
}
}
return semverity;
}
exports.getSemverity = getSemverity;
if (require.main === module) {
const argv = minimist(process.argv.slice(2));
const useCache = !!argv['use-cache'];
synth({ useCache }).catch(err => {
console.error(err);
throw err;
});
}