Firebase Update
This commit is contained in:
93
express-server/node_modules/google-gax/CHANGELOG.md
generated
vendored
Normal file
93
express-server/node_modules/google-gax/CHANGELOG.md
generated
vendored
Normal file
@@ -0,0 +1,93 @@
|
||||
# Changelog
|
||||
|
||||
[npm history][1]
|
||||
|
||||
[1]: https://www.npmjs.com/package/gax-nodejs?activeTab=versions
|
||||
|
||||
## v0.22.1
|
||||
|
||||
11-12-2018 16:56 PST
|
||||
|
||||
|
||||
### Dependencies
|
||||
- Update grpc dependency ([#353](https://github.com/googleapis/gax-nodejs/pull/353))
|
||||
|
||||
### Internal / Testing Changes
|
||||
- Update eslintignore config ([#352](https://github.com/googleapis/gax-nodejs/pull/352))
|
||||
|
||||
## v0.22.0
|
||||
|
||||
11-12-2018 15:05 PST
|
||||
|
||||
### New Features
|
||||
- feat: to support GRPC-GCP Extension, include additional options in grpcOptions ([#328](https://github.com/googleapis/gax-nodejs/pull/328))
|
||||
|
||||
## v0.21.0
|
||||
|
||||
### 11-10-2018 11:27 PST
|
||||
This is a minor service release that largely contains updates to other modules. The upgrade to `google-proto-files` and `walkdir` in particular should improve load time of the module by at least ~100ms.
|
||||
|
||||
### Dependencies
|
||||
- fix(deps): update dependency google-proto-files to ^0.18.0 ([#348](https://github.com/googleapis/gax-nodejs/pull/348))
|
||||
- fix: use `walkdir` instead of `globby` ([#346](https://github.com/googleapis/gax-nodejs/pull/346))
|
||||
- chore(deps): update dependency through2 to v3 ([#343](https://github.com/googleapis/gax-nodejs/pull/343))
|
||||
- chore: update grpc to ^1.15.1 ([#316](https://github.com/googleapis/gax-nodejs/pull/316))
|
||||
- fix(deps): update dependency @grpc/grpc-js to ^0.3.0 ([#308](https://github.com/googleapis/gax-nodejs/pull/308))
|
||||
|
||||
### Internal / Testing Changes
|
||||
- fix: improve types, remove dead code ([#340](https://github.com/googleapis/gax-nodejs/pull/340))
|
||||
- refactor: enable noImplicitThis in the tsconfig ([#347](https://github.com/googleapis/gax-nodejs/pull/347))
|
||||
- refactor: drop extend and lodash.flatten ([#345](https://github.com/googleapis/gax-nodejs/pull/345))
|
||||
- chore: remove temporary folder ([#339](https://github.com/googleapis/gax-nodejs/pull/339))
|
||||
- chore: use latest npm on Windows ([#344](https://github.com/googleapis/gax-nodejs/pull/344))
|
||||
- refactor: clean up lodash and use strict ([#342](https://github.com/googleapis/gax-nodejs/pull/342))
|
||||
- chore: include build in eslintignore ([#337](https://github.com/googleapis/gax-nodejs/pull/337))
|
||||
- chore: system tests for gax ([#334](https://github.com/googleapis/gax-nodejs/pull/334))
|
||||
- chore: update issue templates ([#333](https://github.com/googleapis/gax-nodejs/pull/333))
|
||||
- Update issue templates
|
||||
- chore: remove old issue template ([#329](https://github.com/googleapis/gax-nodejs/pull/329))
|
||||
- build: run tests on node11 ([#327](https://github.com/googleapis/gax-nodejs/pull/327))
|
||||
- fix: better types for GAPIC clients ([#326](https://github.com/googleapis/gax-nodejs/pull/326))
|
||||
- chores(build): do not collect sponge.xml from windows builds ([#325](https://github.com/googleapis/gax-nodejs/pull/325))
|
||||
- chores(build): run codecov on continuous builds ([#324](https://github.com/googleapis/gax-nodejs/pull/324))
|
||||
- chore: update new issue template ([#323](https://github.com/googleapis/gax-nodejs/pull/323))
|
||||
- build: fix codecov uploading on Kokoro ([#320](https://github.com/googleapis/gax-nodejs/pull/320))
|
||||
- fix(deps): update dependency google-proto-files to ^0.17.0 ([#317](https://github.com/googleapis/gax-nodejs/pull/317))
|
||||
- chore(deps): update dependency sinon to v7 ([#319](https://github.com/googleapis/gax-nodejs/pull/319))
|
||||
- Update kokoro config ([#315](https://github.com/googleapis/gax-nodejs/pull/315))
|
||||
- chore(deps): update dependency typescript to ~3.1.0 ([#313](https://github.com/googleapis/gax-nodejs/pull/313))
|
||||
- Update CI config ([#312](https://github.com/googleapis/gax-nodejs/pull/312))
|
||||
- build: prevent system/sample-test from leaking credentials
|
||||
- Update the kokoro config ([#309](https://github.com/googleapis/gax-nodejs/pull/309))
|
||||
- test: remove appveyor config ([#307](https://github.com/googleapis/gax-nodejs/pull/307))
|
||||
- Update CI config ([#306](https://github.com/googleapis/gax-nodejs/pull/306))
|
||||
- Enable prefer-const in the eslint config ([#304](https://github.com/googleapis/gax-nodejs/pull/304))
|
||||
- Enable no-var in eslint ([#303](https://github.com/googleapis/gax-nodejs/pull/303))
|
||||
|
||||
## v0.18.0
|
||||
|
||||
### Implementation Changes
|
||||
BREAKING CHANGE:
|
||||
- fix: drop support for node.js 4.x and 9.x (#262)
|
||||
|
||||
### New Features
|
||||
|
||||
### Dependencies
|
||||
- refactor: add dependency on @grpc/proto-loader (#229)
|
||||
- chore(deps): update dependency typescript to v3 (#275)
|
||||
- fix(deps): update dependency @grpc/proto-loader to ^0.3.0 (#269)
|
||||
- chore(deps): update dependency gts to ^0.8.0 (#266)
|
||||
- chore(package): Update gts to the latest version 🚀 (#255)
|
||||
- chore(package): update @types/globby to version 8.0.0 (#257)
|
||||
|
||||
### Documentation
|
||||
- Add Code of Conduct
|
||||
|
||||
### Internal / Testing Changes
|
||||
- chore: move mocha options to mocha.opts (#274)
|
||||
- test: fixing timeouts (#264)
|
||||
- Configure Renovate (#258)
|
||||
- fix: fix typo in a test (#260)
|
||||
- fix: update linking for samples (#259)
|
||||
- refactor: remove prettier, eslint, jshint (#254)
|
||||
|
||||
25
express-server/node_modules/google-gax/LICENSE
generated
vendored
Normal file
25
express-server/node_modules/google-gax/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,25 @@
|
||||
Copyright 2016, Google Inc.
|
||||
All rights reserved.
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are
|
||||
met:
|
||||
* Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
* Redistributions in binary form must reproduce the above
|
||||
copyright notice, this list of conditions and the following disclaimer
|
||||
in the documentation and/or other materials provided with the
|
||||
distribution.
|
||||
* Neither the name of Google Inc. nor the names of its
|
||||
contributors may be used to endorse or promote products derived from
|
||||
this software without specific prior written permission.
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
40
express-server/node_modules/google-gax/README.md
generated
vendored
Normal file
40
express-server/node_modules/google-gax/README.md
generated
vendored
Normal file
@@ -0,0 +1,40 @@
|
||||
<img src="https://avatars0.githubusercontent.com/u/1342004?v=3&s=96" alt="Google Inc. logo" title="Google" align="right" height="96" width="96"/>
|
||||
|
||||
# Google API Extensions for Node.js
|
||||
|
||||
[![Release Level][releaselevelimg]][releaselevel]
|
||||
[![npm version][npmimg]][npm]
|
||||
[![CircleCI][circleimg]][circle]
|
||||
[![Code Coverage][codecovimg]][codecov]
|
||||
|
||||
Google API Extensions for Node.js (gax-nodejs) is a set of modules which aids the development of APIs for clients and servers based on [gRPC][grpc] and Google API conventions.
|
||||
|
||||
Application code will rarely need to use most of the classes within this library directly, but code generated automatically from the API definition files in [Google APIs][googleapis] can use services such as page streaming and request bundling to provide a more convenient and idiomatic API surface to callers.
|
||||
|
||||
## Installation
|
||||
```sh
|
||||
$ npm install google-gax
|
||||
```
|
||||
|
||||
## Contributing
|
||||
Contributions to this library are always welcome and highly encouraged. See the [CONTRIBUTING][contributing] documentation for more information on how to get started.
|
||||
|
||||
## Details
|
||||
For detailed documentation of the modules in gax-nodejs, please check out the [docs][docs].
|
||||
|
||||
## License
|
||||
BSD - See [LICENSE][license] for more information.
|
||||
|
||||
[circle]: https://circleci.com/gh/googleapis/gax-nodejs
|
||||
[circleimg]: https://circleci.com/gh/googleapis/gax-nodejs.svg?style=shield
|
||||
[codecovimg]: https://codecov.io/github/googleapis/gax-nodejs/coverage.svg?branch=master
|
||||
[codecov]: https://codecov.io/github/googleapis/gax-nodejs?branch=master
|
||||
[contributing]: https://github.com/googleapis/gax-nodejs/blob/master/CONTRIBUTING.md
|
||||
[docs]: http://googleapis.github.io/gax-nodejs/
|
||||
[license]: https://github.com/googleapis/gax-nodejs/blob/master/LICENSE
|
||||
[npmimg]: https://img.shields.io/npm/v/google-gax.svg
|
||||
[npm]: https://www.npmjs.org/package/google-gax
|
||||
[googleapis]: https://github.com/googleapis/googleapis/
|
||||
[grpc]: http://grpc.io
|
||||
[releaselevel]: https://cloud.google.com/terms/launch-stages
|
||||
[releaselevelimg]: https://img.shields.io/badge/Release%20Level-Alpha-ff69b4.svg
|
||||
4
express-server/node_modules/google-gax/build/src/GoogleError.d.ts
generated
vendored
Normal file
4
express-server/node_modules/google-gax/build/src/GoogleError.d.ts
generated
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
export declare class GoogleError extends Error {
|
||||
code?: number;
|
||||
note?: string;
|
||||
}
|
||||
36
express-server/node_modules/google-gax/build/src/GoogleError.js
generated
vendored
Normal file
36
express-server/node_modules/google-gax/build/src/GoogleError.js
generated
vendored
Normal file
@@ -0,0 +1,36 @@
|
||||
"use strict";
|
||||
/*
|
||||
* Copyright 2018, Google Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without
|
||||
* modification, are permitted provided that the following conditions are
|
||||
* met:
|
||||
*
|
||||
* * Redistributions of source code must retain the above copyright
|
||||
* notice, this list of conditions and the following disclaimer.
|
||||
* * Redistributions in binary form must reproduce the above
|
||||
* copyright notice, this list of conditions and the following disclaimer
|
||||
* in the documentation and/or other materials provided with the
|
||||
* distribution.
|
||||
* * Neither the name of Google Inc. nor the names of its
|
||||
* contributors may be used to endorse or promote products derived from
|
||||
* this software without specific prior written permission.
|
||||
*
|
||||
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
class GoogleError extends Error {
|
||||
}
|
||||
exports.GoogleError = GoogleError;
|
||||
//# sourceMappingURL=GoogleError.js.map
|
||||
1
express-server/node_modules/google-gax/build/src/GoogleError.js.map
generated
vendored
Normal file
1
express-server/node_modules/google-gax/build/src/GoogleError.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"GoogleError.js","sourceRoot":"","sources":["../../src/GoogleError.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA6BG;;AAEH,MAAa,WAAY,SAAQ,KAAK;CAGrC;AAHD,kCAGC"}
|
||||
121
express-server/node_modules/google-gax/build/src/api_callable.d.ts
generated
vendored
Normal file
121
express-server/node_modules/google-gax/build/src/api_callable.d.ts
generated
vendored
Normal file
@@ -0,0 +1,121 @@
|
||||
/**
|
||||
* Provides function wrappers that implement page streaming and retrying.
|
||||
*/
|
||||
import { CallSettings } from './gax';
|
||||
import { GoogleError } from './GoogleError';
|
||||
export interface ArgumentFunction {
|
||||
(argument: {}, callback: APICallback): void;
|
||||
}
|
||||
/**
|
||||
* @callback APICallback
|
||||
* @param {?Error} error
|
||||
* @param {?Object} response
|
||||
*/
|
||||
export declare type APICallback = (err: GoogleError | null, response?: any, next?: {} | null, rawResponse?: {} | null) => void;
|
||||
/**
|
||||
* @callback APIFunc
|
||||
* @param {Object} argument
|
||||
* @param {grpc.Metadata} metadata
|
||||
* @param {Object} options
|
||||
* @param {APICallback} callback
|
||||
*/
|
||||
export declare type APIFunc = (argument: {}, metadata: {}, options: {}, callback: APICallback) => Canceller;
|
||||
/**
|
||||
* @callback APICall
|
||||
* @param {Object} argument
|
||||
* @param {CallOptions} callOptions
|
||||
* @param {APICallback} callback
|
||||
* @return {Promise|Stream|undefined}
|
||||
*/
|
||||
export interface APICall {
|
||||
(argument?: {} | null, callOptions?: {} | null, callback?: APICallback): any;
|
||||
}
|
||||
export declare class Canceller {
|
||||
callback?: APICallback;
|
||||
cancelFunc?: () => void;
|
||||
completed: boolean;
|
||||
/**
|
||||
* Canceller manages callback, API calls, and cancellation
|
||||
* of the API calls.
|
||||
* @param {APICallback=} callback
|
||||
* The callback to be called asynchronously when the API call
|
||||
* finishes.
|
||||
* @constructor
|
||||
* @property {APICallback} callback
|
||||
* The callback function to be called.
|
||||
* @private
|
||||
*/
|
||||
constructor(callback?: APICallback);
|
||||
/**
|
||||
* Cancels the ongoing promise.
|
||||
*/
|
||||
cancel(): void;
|
||||
/**
|
||||
* Call calls the specified function. Result will be used to fulfill
|
||||
* the promise.
|
||||
*
|
||||
* @param {function(Object, APICallback=)} aFunc
|
||||
* A function for an API call.
|
||||
* @param {Object} argument
|
||||
* A request object.
|
||||
*/
|
||||
call(aFunc: (obj: {}, callback: APICallback) => PromiseCanceller, argument: {}): void;
|
||||
}
|
||||
export interface CancellablePromise<T = any> extends Promise<T> {
|
||||
cancel(): void;
|
||||
}
|
||||
export declare class PromiseCanceller<T = any> extends Canceller {
|
||||
promise: CancellablePromise<T>;
|
||||
/**
|
||||
* PromiseCanceller is Canceller, but it holds a promise when
|
||||
* the API call finishes.
|
||||
* @param {Function} PromiseCtor - A constructor for a promise that implements
|
||||
* the ES6 specification of promise.
|
||||
* @constructor
|
||||
* @private
|
||||
*/
|
||||
constructor(PromiseCtor: PromiseConstructor);
|
||||
}
|
||||
export interface ApiCallOtherArgs {
|
||||
options?: {
|
||||
deadline?: Date;
|
||||
};
|
||||
headers?: {};
|
||||
metadataBuilder: (abTests?: {}, headers?: {}) => {};
|
||||
}
|
||||
/**
|
||||
* Creates an API caller for normal methods.
|
||||
*
|
||||
* @private
|
||||
* @constructor
|
||||
*/
|
||||
export declare class NormalApiCaller {
|
||||
init(settings: {
|
||||
promise: PromiseConstructor;
|
||||
}, callback: APICallback): PromiseCanceller | Canceller;
|
||||
wrap(func: Function): Function;
|
||||
call(apiCall: APICall, argument: {}, settings: {}, canceller: PromiseCanceller): void;
|
||||
fail(canceller: PromiseCanceller, err: GoogleError): void;
|
||||
result(canceller: PromiseCanceller): CancellablePromise<any> | undefined;
|
||||
}
|
||||
/**
|
||||
* Converts an rpc call into an API call governed by the settings.
|
||||
*
|
||||
* In typical usage, `func` will be a promsie to a callable used to make an rpc
|
||||
* request. This will mostly likely be a bound method from a request stub used
|
||||
* to make an rpc call. It is not a direct function but a Promise instance,
|
||||
* because of its asynchronism (typically, obtaining the auth information).
|
||||
*
|
||||
* The result is a function which manages the API call with the given settings
|
||||
* and the options on the invocation.
|
||||
*
|
||||
* @param {Promise.<APIFunc>} funcWithAuth - is a promise to be used to make
|
||||
* a bare rpc call. This is a Promise instead of a bare function because
|
||||
* the rpc call will be involeved with asynchronous authentications.
|
||||
* @param {CallSettings} settings - provides the settings for this call
|
||||
* @param {Object=} optDescriptor - optionally specify the descriptor for
|
||||
* the method call.
|
||||
* @return {APICall} func - a bound method on a request stub used
|
||||
* to make an rpc call.
|
||||
*/
|
||||
export declare function createApiCall(funcWithAuth: Promise<APIFunc>, settings: CallSettings, optDescriptor?: any): APICall;
|
||||
321
express-server/node_modules/google-gax/build/src/api_callable.js
generated
vendored
Normal file
321
express-server/node_modules/google-gax/build/src/api_callable.js
generated
vendored
Normal file
@@ -0,0 +1,321 @@
|
||||
"use strict";
|
||||
/*
|
||||
* Copyright 2016, Google Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without
|
||||
* modification, are permitted provided that the following conditions are
|
||||
* met:
|
||||
*
|
||||
* * Redistributions of source code must retain the above copyright
|
||||
* notice, this list of conditions and the following disclaimer.
|
||||
* * Redistributions in binary form must reproduce the above
|
||||
* copyright notice, this list of conditions and the following disclaimer
|
||||
* in the documentation and/or other materials provided with the
|
||||
* distribution.
|
||||
* * Neither the name of Google Inc. nor the names of its
|
||||
* contributors may be used to endorse or promote products derived from
|
||||
* this software without specific prior written permission.
|
||||
*
|
||||
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
class Canceller {
|
||||
/**
|
||||
* Canceller manages callback, API calls, and cancellation
|
||||
* of the API calls.
|
||||
* @param {APICallback=} callback
|
||||
* The callback to be called asynchronously when the API call
|
||||
* finishes.
|
||||
* @constructor
|
||||
* @property {APICallback} callback
|
||||
* The callback function to be called.
|
||||
* @private
|
||||
*/
|
||||
constructor(callback) {
|
||||
this.callback = callback;
|
||||
this.completed = false;
|
||||
}
|
||||
/**
|
||||
* Cancels the ongoing promise.
|
||||
*/
|
||||
cancel() {
|
||||
if (this.completed) {
|
||||
return;
|
||||
}
|
||||
this.completed = true;
|
||||
if (this.cancelFunc) {
|
||||
this.cancelFunc();
|
||||
}
|
||||
else {
|
||||
this.callback(new Error('cancelled'));
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Call calls the specified function. Result will be used to fulfill
|
||||
* the promise.
|
||||
*
|
||||
* @param {function(Object, APICallback=)} aFunc
|
||||
* A function for an API call.
|
||||
* @param {Object} argument
|
||||
* A request object.
|
||||
*/
|
||||
call(aFunc, argument) {
|
||||
if (this.completed) {
|
||||
return;
|
||||
}
|
||||
// tslint:disable-next-line no-any
|
||||
const canceller = aFunc(argument, (...args) => {
|
||||
this.completed = true;
|
||||
args.unshift(this.callback);
|
||||
setImmediate.apply(null, args);
|
||||
});
|
||||
this.cancelFunc = () => {
|
||||
canceller.cancel();
|
||||
};
|
||||
}
|
||||
}
|
||||
exports.Canceller = Canceller;
|
||||
// tslint:disable-next-line no-any
|
||||
class PromiseCanceller extends Canceller {
|
||||
/**
|
||||
* PromiseCanceller is Canceller, but it holds a promise when
|
||||
* the API call finishes.
|
||||
* @param {Function} PromiseCtor - A constructor for a promise that implements
|
||||
* the ES6 specification of promise.
|
||||
* @constructor
|
||||
* @private
|
||||
*/
|
||||
// tslint:disable-next-line variable-name
|
||||
constructor(PromiseCtor) {
|
||||
super();
|
||||
this.promise = new PromiseCtor((resolve, reject) => {
|
||||
this.callback = (err, response, next, rawResponse) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
}
|
||||
else {
|
||||
resolve([response, next, rawResponse]);
|
||||
}
|
||||
};
|
||||
});
|
||||
this.promise.cancel = () => {
|
||||
this.cancel();
|
||||
};
|
||||
}
|
||||
}
|
||||
exports.PromiseCanceller = PromiseCanceller;
|
||||
/**
|
||||
* Updates aFunc so that it gets called with the timeout as its final arg.
|
||||
*
|
||||
* This converts a function, aFunc, into another function with updated deadline.
|
||||
*
|
||||
* @private
|
||||
*
|
||||
* @param {APIFunc} aFunc - a function to be updated.
|
||||
* @param {number} timeout - to be added to the original function as it final
|
||||
* positional arg.
|
||||
* @param {Object} otherArgs - the additional arguments to be passed to aFunc.
|
||||
* @param {Object=} abTests - the A/B testing key/value pairs.
|
||||
* @return {function(Object, APICallback)}
|
||||
* the function with other arguments and the timeout.
|
||||
*/
|
||||
function addTimeoutArg(aFunc, timeout, otherArgs, abTests) {
|
||||
// TODO: this assumes the other arguments consist of metadata and options,
|
||||
// which is specific to gRPC calls. Remove the hidden dependency on gRPC.
|
||||
return function timeoutFunc(argument, callback) {
|
||||
const now = new Date();
|
||||
const options = otherArgs.options || {};
|
||||
options.deadline = new Date(now.getTime() + timeout);
|
||||
const metadata = otherArgs.metadataBuilder ?
|
||||
otherArgs.metadataBuilder(abTests, otherArgs.headers || {}) :
|
||||
null;
|
||||
return aFunc(argument, metadata, options, callback);
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Creates a function equivalent to aFunc, but that retries on certain
|
||||
* exceptions.
|
||||
*
|
||||
* @private
|
||||
*
|
||||
* @param {APIFunc} aFunc - A function.
|
||||
* @param {RetryOptions} retry - Configures the exceptions upon which the
|
||||
* function eshould retry, and the parameters to the exponential backoff retry
|
||||
* algorithm.
|
||||
* @param {Object} otherArgs - the additional arguments to be passed to aFunc.
|
||||
* @return {function(Object, APICallback)} A function that will retry.
|
||||
*/
|
||||
function retryable(aFunc, retry, otherArgs) {
|
||||
const delayMult = retry.backoffSettings.retryDelayMultiplier;
|
||||
const maxDelay = retry.backoffSettings.maxRetryDelayMillis;
|
||||
const timeoutMult = retry.backoffSettings.rpcTimeoutMultiplier;
|
||||
const maxTimeout = retry.backoffSettings.maxRpcTimeoutMillis;
|
||||
let delay = retry.backoffSettings.initialRetryDelayMillis;
|
||||
let timeout = retry.backoffSettings.initialRpcTimeoutMillis;
|
||||
/**
|
||||
* Equivalent to ``aFunc``, but retries upon transient failure.
|
||||
*
|
||||
* Retrying is done through an exponential backoff algorithm configured
|
||||
* by the options in ``retry``.
|
||||
* @param {Object} argument The request object.
|
||||
* @param {APICallback} callback The callback.
|
||||
* @return {function()} cancel function.
|
||||
*/
|
||||
return function retryingFunc(argument, callback) {
|
||||
let canceller;
|
||||
let timeoutId;
|
||||
let now = new Date();
|
||||
let deadline;
|
||||
if (retry.backoffSettings.totalTimeoutMillis) {
|
||||
deadline = now.getTime() + retry.backoffSettings.totalTimeoutMillis;
|
||||
}
|
||||
let retries = 0;
|
||||
const maxRetries = retry.backoffSettings.maxRetries;
|
||||
// TODO: define A/B testing values for retry behaviors.
|
||||
/** Repeat the API call as long as necessary. */
|
||||
function repeat() {
|
||||
timeoutId = null;
|
||||
if (deadline && now.getTime() >= deadline) {
|
||||
callback(new Error('Retry total timeout exceeded before any ' +
|
||||
'response was received'));
|
||||
return;
|
||||
}
|
||||
if (retries && retries >= maxRetries) {
|
||||
callback(new Error('Exceeded maximum number of retries before any ' +
|
||||
'response was received'));
|
||||
return;
|
||||
}
|
||||
retries++;
|
||||
const toCall = addTimeoutArg(aFunc, timeout, otherArgs);
|
||||
canceller = toCall(argument, (err, response, next, rawResponse) => {
|
||||
if (!err) {
|
||||
callback(null, response, next, rawResponse);
|
||||
return;
|
||||
}
|
||||
canceller = null;
|
||||
if (retry.retryCodes.indexOf(err.code) < 0) {
|
||||
err.note = 'Exception occurred in retry method that was ' +
|
||||
'not classified as transient';
|
||||
callback(err);
|
||||
}
|
||||
else {
|
||||
const toSleep = Math.random() * delay;
|
||||
timeoutId = setTimeout(() => {
|
||||
now = new Date();
|
||||
delay = Math.min(delay * delayMult, maxDelay);
|
||||
timeout = Math.min(timeout * timeoutMult, maxTimeout, deadline - now.getTime());
|
||||
repeat();
|
||||
}, toSleep);
|
||||
}
|
||||
});
|
||||
}
|
||||
if (maxRetries && deadline) {
|
||||
callback(new Error('Cannot set both totalTimeoutMillis and maxRetries ' +
|
||||
'in backoffSettings.'));
|
||||
}
|
||||
else {
|
||||
repeat();
|
||||
}
|
||||
return {
|
||||
cancel() {
|
||||
if (timeoutId) {
|
||||
clearTimeout(timeoutId);
|
||||
}
|
||||
if (canceller) {
|
||||
canceller.cancel();
|
||||
}
|
||||
else {
|
||||
callback(new Error('cancelled'));
|
||||
}
|
||||
},
|
||||
};
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Creates an API caller for normal methods.
|
||||
*
|
||||
* @private
|
||||
* @constructor
|
||||
*/
|
||||
class NormalApiCaller {
|
||||
init(settings, callback) {
|
||||
if (callback) {
|
||||
return new Canceller(callback);
|
||||
}
|
||||
return new PromiseCanceller(settings.promise);
|
||||
}
|
||||
wrap(func) {
|
||||
return func;
|
||||
}
|
||||
call(apiCall, argument, settings, canceller) {
|
||||
canceller.call(apiCall, argument);
|
||||
}
|
||||
fail(canceller, err) {
|
||||
canceller.callback(err);
|
||||
}
|
||||
result(canceller) {
|
||||
if (canceller.promise) {
|
||||
return canceller.promise;
|
||||
}
|
||||
return;
|
||||
}
|
||||
}
|
||||
exports.NormalApiCaller = NormalApiCaller;
|
||||
/**
|
||||
* Converts an rpc call into an API call governed by the settings.
|
||||
*
|
||||
* In typical usage, `func` will be a promsie to a callable used to make an rpc
|
||||
* request. This will mostly likely be a bound method from a request stub used
|
||||
* to make an rpc call. It is not a direct function but a Promise instance,
|
||||
* because of its asynchronism (typically, obtaining the auth information).
|
||||
*
|
||||
* The result is a function which manages the API call with the given settings
|
||||
* and the options on the invocation.
|
||||
*
|
||||
* @param {Promise.<APIFunc>} funcWithAuth - is a promise to be used to make
|
||||
* a bare rpc call. This is a Promise instead of a bare function because
|
||||
* the rpc call will be involeved with asynchronous authentications.
|
||||
* @param {CallSettings} settings - provides the settings for this call
|
||||
* @param {Object=} optDescriptor - optionally specify the descriptor for
|
||||
* the method call.
|
||||
* @return {APICall} func - a bound method on a request stub used
|
||||
* to make an rpc call.
|
||||
*/
|
||||
function createApiCall(funcWithAuth, settings,
|
||||
// tslint:disable-next-line no-any
|
||||
optDescriptor) {
|
||||
const apiCaller = optDescriptor ? optDescriptor.apiCaller(settings) : new NormalApiCaller();
|
||||
return function apiCallInner(request, callOptions, callback) {
|
||||
const thisSettings = settings.merge(callOptions);
|
||||
const status = apiCaller.init(thisSettings, callback);
|
||||
funcWithAuth
|
||||
.then(func => {
|
||||
func = apiCaller.wrap(func);
|
||||
const retry = thisSettings.retry;
|
||||
if (retry && retry.retryCodes && retry.retryCodes.length > 0) {
|
||||
return retryable(func, thisSettings.retry, thisSettings.otherArgs);
|
||||
}
|
||||
return addTimeoutArg(func, thisSettings.timeout, thisSettings.otherArgs);
|
||||
})
|
||||
.then(apiCall => {
|
||||
apiCaller.call(apiCall, request, thisSettings, status);
|
||||
})
|
||||
.catch(err => {
|
||||
apiCaller.fail(status, err);
|
||||
});
|
||||
return apiCaller.result(status);
|
||||
};
|
||||
}
|
||||
exports.createApiCall = createApiCall;
|
||||
//# sourceMappingURL=api_callable.js.map
|
||||
1
express-server/node_modules/google-gax/build/src/api_callable.js.map
generated
vendored
Normal file
1
express-server/node_modules/google-gax/build/src/api_callable.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
26
express-server/node_modules/google-gax/build/src/auth.d.ts
generated
vendored
Normal file
26
express-server/node_modules/google-gax/build/src/auth.d.ts
generated
vendored
Normal file
@@ -0,0 +1,26 @@
|
||||
import { GoogleError } from './GoogleError';
|
||||
/**
|
||||
* @callback GetCredentialsFunc
|
||||
*
|
||||
* To authorize requests through gRPC, we must get the raw google-auth-library
|
||||
* auth client object.
|
||||
*
|
||||
* @param {function()} callback - The callback function.
|
||||
* @param {Object} opts - options values for configuring auth
|
||||
* @param {(String|String[])} opts.scopes - the scope or scopes to use when
|
||||
* obtaining the credentials.
|
||||
* @param {Object} opts.sslCreds - when specified, this is used instead
|
||||
* of default credentials.
|
||||
*/
|
||||
/**
|
||||
* Creates a promise which resolves a auth credential.
|
||||
*
|
||||
* @param {GetCredentialsFunc} getCredentials - the callback used to
|
||||
* obtain the credentials.
|
||||
* @param {Object} opts - the optional arguments to be passed to
|
||||
* getCredentials.
|
||||
* @return {Promise} A promise which resolves to the credential.
|
||||
*/
|
||||
export declare function createCredPromise(getCredentials: GetCredentialsFunc, opts?: {}): Promise<{}>;
|
||||
export declare type GetCredentialsCallback = (err: GoogleError | null, credentials: {}) => void;
|
||||
export declare type GetCredentialsFunc = (callback: GetCredentialsCallback, opts?: {}) => void;
|
||||
70
express-server/node_modules/google-gax/build/src/auth.js
generated
vendored
Normal file
70
express-server/node_modules/google-gax/build/src/auth.js
generated
vendored
Normal file
@@ -0,0 +1,70 @@
|
||||
"use strict";
|
||||
/*
|
||||
*
|
||||
* Copyright 2016, Google Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without
|
||||
* modification, are permitted provided that the following conditions are
|
||||
* met:
|
||||
*
|
||||
* * Redistributions of source code must retain the above copyright
|
||||
* notice, this list of conditions and the following disclaimer.
|
||||
* * Redistributions in binary form must reproduce the above
|
||||
* copyright notice, this list of conditions and the following disclaimer
|
||||
* in the documentation and/or other materials provided with the
|
||||
* distribution.
|
||||
* * Neither the name of Google Inc. nor the names of its
|
||||
* contributors may be used to endorse or promote products derived from
|
||||
* this software without specific prior written permission.
|
||||
*
|
||||
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*
|
||||
*/
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
/**
|
||||
* @callback GetCredentialsFunc
|
||||
*
|
||||
* To authorize requests through gRPC, we must get the raw google-auth-library
|
||||
* auth client object.
|
||||
*
|
||||
* @param {function()} callback - The callback function.
|
||||
* @param {Object} opts - options values for configuring auth
|
||||
* @param {(String|String[])} opts.scopes - the scope or scopes to use when
|
||||
* obtaining the credentials.
|
||||
* @param {Object} opts.sslCreds - when specified, this is used instead
|
||||
* of default credentials.
|
||||
*/
|
||||
/**
|
||||
* Creates a promise which resolves a auth credential.
|
||||
*
|
||||
* @param {GetCredentialsFunc} getCredentials - the callback used to
|
||||
* obtain the credentials.
|
||||
* @param {Object} opts - the optional arguments to be passed to
|
||||
* getCredentials.
|
||||
* @return {Promise} A promise which resolves to the credential.
|
||||
*/
|
||||
function createCredPromise(getCredentials, opts) {
|
||||
return new Promise((resolve, reject) => {
|
||||
getCredentials((err, credentials) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
}
|
||||
else {
|
||||
resolve(credentials);
|
||||
}
|
||||
}, opts);
|
||||
});
|
||||
}
|
||||
exports.createCredPromise = createCredPromise;
|
||||
//# sourceMappingURL=auth.js.map
|
||||
1
express-server/node_modules/google-gax/build/src/auth.js.map
generated
vendored
Normal file
1
express-server/node_modules/google-gax/build/src/auth.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"auth.js","sourceRoot":"","sources":["../../src/auth.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA+BG;;AAIH;;;;;;;;;;;;GAYG;AAEH;;;;;;;;GAQG;AACH,SAAgB,iBAAiB,CAC7B,cAAkC,EAAE,IAAS;IAC/C,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;QACrC,cAAc,CAAC,CAAC,GAAG,EAAE,WAAW,EAAE,EAAE;YAClC,IAAI,GAAG,EAAE;gBACP,MAAM,CAAC,GAAG,CAAC,CAAC;aACb;iBAAM;gBACL,OAAO,CAAC,WAAW,CAAC,CAAC;aACtB;QACH,CAAC,EAAE,IAAI,CAAC,CAAC;IACX,CAAC,CAAC,CAAC;AACL,CAAC;AAXD,8CAWC"}
|
||||
216
express-server/node_modules/google-gax/build/src/bundling.d.ts
generated
vendored
Normal file
216
express-server/node_modules/google-gax/build/src/bundling.d.ts
generated
vendored
Normal file
@@ -0,0 +1,216 @@
|
||||
/// <reference types="node" />
|
||||
import { NormalApiCaller, APICall, PromiseCanceller, APICallback } from './api_callable';
|
||||
import { CallSettings } from './gax';
|
||||
/**
|
||||
* Compute the identifier of the `obj`. The objects of the same ID
|
||||
* will be bundled together.
|
||||
*
|
||||
* @param {Object} obj - The request object.
|
||||
* @param {String[]} discriminatorFields - The array of field names.
|
||||
* A field name may include '.' as a separator, which is used to
|
||||
* indicate object traversal.
|
||||
* @return {String|undefined} - the identifier string, or undefined if any
|
||||
* discriminator.
|
||||
* fields do not exist.
|
||||
*/
|
||||
export declare function computeBundleId(obj: {}, discriminatorFields: string[]): string | undefined;
|
||||
export interface SubResponseInfo {
|
||||
field: string;
|
||||
start?: number;
|
||||
end?: number;
|
||||
}
|
||||
export interface TaskElement {
|
||||
}
|
||||
export interface TaskData {
|
||||
elements: TaskElement[];
|
||||
bytes: number;
|
||||
callback: TaskCallback;
|
||||
cancelled?: boolean;
|
||||
}
|
||||
export interface TaskCallback extends APICallback {
|
||||
id?: string;
|
||||
}
|
||||
/**
|
||||
* Creates a deep copy of the object with the consideration of subresponse
|
||||
* fields for bundling.
|
||||
*
|
||||
* @param {Object} obj - The source object.
|
||||
* @param {Object?} subresponseInfo - The information to copy the subset of
|
||||
* the field for the response. Do nothing if it's null.
|
||||
* @param {String} subresponseInfo.field - The field name.
|
||||
* @param {number} subresponseInfo.start - The offset where the copying
|
||||
* element should starts with.
|
||||
* @param {number} subresponseInfo.end - The ending index where the copying
|
||||
* region of the elements ends.
|
||||
* @return {Object} The copied object.
|
||||
* @private
|
||||
*/
|
||||
export declare function deepCopyForResponse(obj: any, subresponseInfo: SubResponseInfo | null): any;
|
||||
export declare class Task {
|
||||
_apiCall: APICall;
|
||||
_request: {
|
||||
[index: string]: TaskElement[];
|
||||
};
|
||||
_bundledField: string;
|
||||
_subresponseField?: string | null;
|
||||
_data: TaskData[];
|
||||
callCanceller?: PromiseCanceller;
|
||||
/**
|
||||
* A task coordinates the execution of a single bundle.
|
||||
*
|
||||
* @param {function} apiCall - The function to conduct calling API.
|
||||
* @param {Object} bundlingRequest - The base request object to be used
|
||||
* for the actual API call.
|
||||
* @param {string} bundledField - The name of the field in bundlingRequest
|
||||
* to be bundled.
|
||||
* @param {string=} subresponseField - The name of the field in the response
|
||||
* to be passed to the callback.
|
||||
* @constructor
|
||||
* @private
|
||||
*/
|
||||
constructor(apiCall: APICall, bundlingRequest: {}, bundledField: string, subresponseField?: string | null);
|
||||
/**
|
||||
* Returns the number of elements in a task.
|
||||
* @return {number} The number of elements.
|
||||
*/
|
||||
getElementCount(): number;
|
||||
/**
|
||||
* Returns the total byte size of the elements in a task.
|
||||
* @return {number} The byte size.
|
||||
*/
|
||||
getRequestByteSize(): number;
|
||||
/**
|
||||
* Invokes the actual API call with current elements.
|
||||
* @return {string[]} - the list of ids for invocations to be run.
|
||||
*/
|
||||
run(): string[];
|
||||
/**
|
||||
* Appends the list of elements into the task.
|
||||
* @param {Object[]} elements - the new list of elements.
|
||||
* @param {number} bytes - the byte size required to encode elements in the API.
|
||||
* @param {APICallback} callback - the callback of the method call.
|
||||
*/
|
||||
extend(elements: TaskElement[], bytes: number, callback: TaskCallback): void;
|
||||
/**
|
||||
* Cancels a part of elements.
|
||||
* @param {string} id - The identifier of the part of elements.
|
||||
* @return {boolean} Whether the entire task will be canceled or not.
|
||||
*/
|
||||
cancel(id: string): boolean;
|
||||
}
|
||||
export interface BundleOptions {
|
||||
elementCountLimit: number;
|
||||
requestByteLimit: number;
|
||||
elementCountThreshold: number;
|
||||
requestByteThreshold: number;
|
||||
delayThreshold: number;
|
||||
}
|
||||
export declare class BundleExecutor {
|
||||
_options: BundleOptions;
|
||||
_descriptor: BundleDescriptor;
|
||||
_tasks: {
|
||||
[index: string]: Task;
|
||||
};
|
||||
_timers: {
|
||||
[index: string]: NodeJS.Timer;
|
||||
};
|
||||
_invocations: {
|
||||
[index: string]: string;
|
||||
};
|
||||
_invocationId: number;
|
||||
/**
|
||||
* Organizes requests for an api service that requires to bundle them.
|
||||
*
|
||||
* @param {BundleOptions} bundleOptions - configures strategy this instance
|
||||
* uses when executing bundled functions.
|
||||
* @param {BundleDescriptor} bundleDescriptor - the description of the bundling.
|
||||
* @constructor
|
||||
*/
|
||||
constructor(bundleOptions: BundleOptions, bundleDescriptor: BundleDescriptor);
|
||||
/**
|
||||
* Schedule a method call.
|
||||
*
|
||||
* @param {function} apiCall - the function for an API call.
|
||||
* @param {Object} request - the request object to be bundled with others.
|
||||
* @param {APICallback} callback - the callback to be called when the method finished.
|
||||
* @return {function()} - the function to cancel the scheduled invocation.
|
||||
*/
|
||||
schedule(apiCall: APICall, request: {
|
||||
[index: string]: Array<{}> | string;
|
||||
}, callback?: TaskCallback): any;
|
||||
/**
|
||||
* Clears scheduled timeout if it exists.
|
||||
*
|
||||
* @param {String} bundleId - the id for the task whose timeout needs to be
|
||||
* cleared.
|
||||
* @private
|
||||
*/
|
||||
_maybeClearTimeout(bundleId: string): void;
|
||||
/**
|
||||
* Cancels an event.
|
||||
*
|
||||
* @param {String} id - The id for the event in the task.
|
||||
* @private
|
||||
*/
|
||||
_cancel(id: string): void;
|
||||
/**
|
||||
* Invokes a task.
|
||||
*
|
||||
* @param {String} bundleId - The id for the task.
|
||||
* @private
|
||||
*/
|
||||
_runNow(bundleId: string): void;
|
||||
}
|
||||
export declare class Bundleable extends NormalApiCaller {
|
||||
bundler: BundleExecutor;
|
||||
/**
|
||||
* Creates an API caller that bundles requests.
|
||||
*
|
||||
* @private
|
||||
* @constructor
|
||||
* @param {BundleExecutor} bundler - bundles API calls.
|
||||
*/
|
||||
constructor(bundler: BundleExecutor);
|
||||
call(apiCall: APICall, argument: {}, settings: CallSettings, status: any): void;
|
||||
}
|
||||
export declare class BundleDescriptor {
|
||||
bundledField: string;
|
||||
requestDiscriminatorFields: string[];
|
||||
subresponseField: string | null;
|
||||
byteLengthFunction: Function;
|
||||
/**
|
||||
* Describes the structure of bundled call.
|
||||
*
|
||||
* requestDiscriminatorFields may include '.' as a separator, which is used to
|
||||
* indicate object traversal. This allows fields in nested objects to be used
|
||||
* to determine what request to bundle.
|
||||
*
|
||||
* @property {String} bundledField
|
||||
* @property {String} requestDiscriminatorFields
|
||||
* @property {String} subresponseField
|
||||
* @property {Function} byteLengthFunction
|
||||
*
|
||||
* @param {String} bundledField - the repeated field in the request message
|
||||
* that will have its elements aggregated by bundling.
|
||||
* @param {String} requestDiscriminatorFields - a list of fields in the
|
||||
* target request message class that are used to detemrine which request
|
||||
* messages should be bundled together.
|
||||
* @param {String} subresponseField - an optional field, when present it
|
||||
* indicates the field in the response message that should be used to
|
||||
* demultiplex the response into multiple response messages.
|
||||
* @param {Function} byteLengthFunction - a function to obtain the byte
|
||||
* length to be consumed for the bundled field messages. Because Node.JS
|
||||
* protobuf.js/gRPC uses builtin Objects for the user-visible data and
|
||||
* internally they are encoded/decoded in protobuf manner, this function
|
||||
* is actually necessary to calculate the byte length.
|
||||
* @constructor
|
||||
*/
|
||||
constructor(bundledField: string, requestDiscriminatorFields: string[], subresponseField: string | null, byteLengthFunction: Function);
|
||||
/**
|
||||
* Returns a new API caller.
|
||||
* @private
|
||||
* @param {CallSettings} settings - the current settings.
|
||||
* @return {Bundleable} - the new bundling API caller.
|
||||
*/
|
||||
apiCaller(settings: CallSettings): Bundleable;
|
||||
}
|
||||
501
express-server/node_modules/google-gax/build/src/bundling.js
generated
vendored
Normal file
501
express-server/node_modules/google-gax/build/src/bundling.js
generated
vendored
Normal file
@@ -0,0 +1,501 @@
|
||||
"use strict";
|
||||
/*
|
||||
* Copyright 2016, Google Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without
|
||||
* modification, are permitted provided that the following conditions are
|
||||
* met:
|
||||
*
|
||||
* * Redistributions of source code must retain the above copyright
|
||||
* notice, this list of conditions and the following disclaimer.
|
||||
* * Redistributions in binary form must reproduce the above
|
||||
* copyright notice, this list of conditions and the following disclaimer
|
||||
* in the documentation and/or other materials provided with the
|
||||
* distribution.
|
||||
* * Neither the name of Google Inc. nor the names of its
|
||||
* contributors may be used to endorse or promote products derived from
|
||||
* this software without specific prior written permission.
|
||||
*
|
||||
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
/**
|
||||
* Provides behavior that supports request bundling.
|
||||
*/
|
||||
const at = require("lodash.at");
|
||||
const api_callable_1 = require("./api_callable");
|
||||
/**
|
||||
* A function which does nothing. Used for an empty cancellation funciton.
|
||||
* @private
|
||||
*/
|
||||
function noop() { }
|
||||
/**
|
||||
* Compute the identifier of the `obj`. The objects of the same ID
|
||||
* will be bundled together.
|
||||
*
|
||||
* @param {Object} obj - The request object.
|
||||
* @param {String[]} discriminatorFields - The array of field names.
|
||||
* A field name may include '.' as a separator, which is used to
|
||||
* indicate object traversal.
|
||||
* @return {String|undefined} - the identifier string, or undefined if any
|
||||
* discriminator.
|
||||
* fields do not exist.
|
||||
*/
|
||||
function computeBundleId(obj, discriminatorFields) {
|
||||
const ids = [];
|
||||
let hasIds = false;
|
||||
for (let i = 0; i < discriminatorFields.length; ++i) {
|
||||
const id = at(obj, discriminatorFields[i])[0];
|
||||
if (id === undefined) {
|
||||
ids.push(null);
|
||||
}
|
||||
else {
|
||||
hasIds = true;
|
||||
ids.push(id);
|
||||
}
|
||||
}
|
||||
if (!hasIds) {
|
||||
return undefined;
|
||||
}
|
||||
return JSON.stringify(ids);
|
||||
}
|
||||
exports.computeBundleId = computeBundleId;
|
||||
/**
|
||||
* Creates a deep copy of the object with the consideration of subresponse
|
||||
* fields for bundling.
|
||||
*
|
||||
* @param {Object} obj - The source object.
|
||||
* @param {Object?} subresponseInfo - The information to copy the subset of
|
||||
* the field for the response. Do nothing if it's null.
|
||||
* @param {String} subresponseInfo.field - The field name.
|
||||
* @param {number} subresponseInfo.start - The offset where the copying
|
||||
* element should starts with.
|
||||
* @param {number} subresponseInfo.end - The ending index where the copying
|
||||
* region of the elements ends.
|
||||
* @return {Object} The copied object.
|
||||
* @private
|
||||
*/
|
||||
function deepCopyForResponse(
|
||||
// tslint:disable-next-line no-any
|
||||
obj, subresponseInfo) {
|
||||
// tslint:disable-next-line no-any
|
||||
let result;
|
||||
if (obj === null) {
|
||||
return null;
|
||||
}
|
||||
if (obj === undefined) {
|
||||
return undefined;
|
||||
}
|
||||
if (Array.isArray(obj)) {
|
||||
result = [];
|
||||
obj.forEach(element => {
|
||||
result.push(deepCopyForResponse(element, null));
|
||||
});
|
||||
return result;
|
||||
}
|
||||
// Some objects (such as ByteBuffer) have copy method.
|
||||
if (obj.copy !== undefined) {
|
||||
return obj.copy();
|
||||
}
|
||||
// ArrayBuffer should be copied through slice().
|
||||
if (obj instanceof ArrayBuffer) {
|
||||
return obj.slice(0);
|
||||
}
|
||||
if (typeof obj === 'object') {
|
||||
result = {};
|
||||
Object.keys(obj).forEach(key => {
|
||||
if (subresponseInfo && key === subresponseInfo.field &&
|
||||
Array.isArray(obj[key])) {
|
||||
// Note that subresponses are not deep-copied. This is safe because
|
||||
// those subresponses are not shared among callbacks.
|
||||
result[key] =
|
||||
obj[key].slice(subresponseInfo.start, subresponseInfo.end);
|
||||
}
|
||||
else {
|
||||
result[key] = deepCopyForResponse(obj[key], null);
|
||||
}
|
||||
});
|
||||
return result;
|
||||
}
|
||||
return obj;
|
||||
}
|
||||
exports.deepCopyForResponse = deepCopyForResponse;
|
||||
class Task {
|
||||
/**
|
||||
* A task coordinates the execution of a single bundle.
|
||||
*
|
||||
* @param {function} apiCall - The function to conduct calling API.
|
||||
* @param {Object} bundlingRequest - The base request object to be used
|
||||
* for the actual API call.
|
||||
* @param {string} bundledField - The name of the field in bundlingRequest
|
||||
* to be bundled.
|
||||
* @param {string=} subresponseField - The name of the field in the response
|
||||
* to be passed to the callback.
|
||||
* @constructor
|
||||
* @private
|
||||
*/
|
||||
constructor(apiCall, bundlingRequest, bundledField, subresponseField) {
|
||||
this._apiCall = apiCall;
|
||||
this._request = bundlingRequest;
|
||||
this._bundledField = bundledField;
|
||||
this._subresponseField = subresponseField;
|
||||
this._data = [];
|
||||
}
|
||||
/**
|
||||
* Returns the number of elements in a task.
|
||||
* @return {number} The number of elements.
|
||||
*/
|
||||
getElementCount() {
|
||||
let count = 0;
|
||||
for (let i = 0; i < this._data.length; ++i) {
|
||||
count += this._data[i].elements.length;
|
||||
}
|
||||
return count;
|
||||
}
|
||||
/**
|
||||
* Returns the total byte size of the elements in a task.
|
||||
* @return {number} The byte size.
|
||||
*/
|
||||
getRequestByteSize() {
|
||||
let size = 0;
|
||||
for (let i = 0; i < this._data.length; ++i) {
|
||||
size += this._data[i].bytes;
|
||||
}
|
||||
return size;
|
||||
}
|
||||
/**
|
||||
* Invokes the actual API call with current elements.
|
||||
* @return {string[]} - the list of ids for invocations to be run.
|
||||
*/
|
||||
run() {
|
||||
if (this._data.length === 0) {
|
||||
return [];
|
||||
}
|
||||
const request = this._request;
|
||||
const elements = [];
|
||||
const ids = [];
|
||||
for (let i = 0; i < this._data.length; ++i) {
|
||||
elements.push.apply(elements, this._data[i].elements);
|
||||
ids.push(this._data[i].callback.id);
|
||||
}
|
||||
request[this._bundledField] = elements;
|
||||
const self = this;
|
||||
this.callCanceller =
|
||||
this._apiCall(request, (err, response) => {
|
||||
const responses = [];
|
||||
if (err) {
|
||||
self._data.forEach(() => {
|
||||
responses.push(null);
|
||||
});
|
||||
}
|
||||
else {
|
||||
let subresponseInfo = null;
|
||||
if (self._subresponseField) {
|
||||
subresponseInfo = {
|
||||
field: self._subresponseField,
|
||||
start: 0,
|
||||
};
|
||||
}
|
||||
self._data.forEach(data => {
|
||||
if (subresponseInfo) {
|
||||
subresponseInfo.end =
|
||||
subresponseInfo.start + data.elements.length;
|
||||
}
|
||||
responses.push(deepCopyForResponse(response, subresponseInfo));
|
||||
if (subresponseInfo) {
|
||||
subresponseInfo.start = subresponseInfo.end;
|
||||
}
|
||||
});
|
||||
}
|
||||
for (let i = 0; i < self._data.length; ++i) {
|
||||
if (self._data[i].cancelled) {
|
||||
self._data[i].callback(new Error('cancelled'));
|
||||
}
|
||||
else {
|
||||
self._data[i].callback(err, responses[i]);
|
||||
}
|
||||
}
|
||||
});
|
||||
return ids;
|
||||
}
|
||||
/**
|
||||
* Appends the list of elements into the task.
|
||||
* @param {Object[]} elements - the new list of elements.
|
||||
* @param {number} bytes - the byte size required to encode elements in the API.
|
||||
* @param {APICallback} callback - the callback of the method call.
|
||||
*/
|
||||
extend(elements, bytes, callback) {
|
||||
this._data.push({
|
||||
elements,
|
||||
bytes,
|
||||
callback,
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Cancels a part of elements.
|
||||
* @param {string} id - The identifier of the part of elements.
|
||||
* @return {boolean} Whether the entire task will be canceled or not.
|
||||
*/
|
||||
cancel(id) {
|
||||
if (this.callCanceller) {
|
||||
let allCancelled = true;
|
||||
this._data.forEach(d => {
|
||||
if (d.callback.id === id) {
|
||||
d.cancelled = true;
|
||||
}
|
||||
if (!d.cancelled) {
|
||||
allCancelled = false;
|
||||
}
|
||||
});
|
||||
if (allCancelled) {
|
||||
this.callCanceller.cancel();
|
||||
}
|
||||
return allCancelled;
|
||||
}
|
||||
for (let i = 0; i < this._data.length; ++i) {
|
||||
if (this._data[i].callback.id === id) {
|
||||
this._data[i].callback(new Error('cancelled'));
|
||||
this._data.splice(i, 1);
|
||||
break;
|
||||
}
|
||||
}
|
||||
return this._data.length === 0;
|
||||
}
|
||||
}
|
||||
exports.Task = Task;
|
||||
class BundleExecutor {
|
||||
/**
|
||||
* Organizes requests for an api service that requires to bundle them.
|
||||
*
|
||||
* @param {BundleOptions} bundleOptions - configures strategy this instance
|
||||
* uses when executing bundled functions.
|
||||
* @param {BundleDescriptor} bundleDescriptor - the description of the bundling.
|
||||
* @constructor
|
||||
*/
|
||||
constructor(bundleOptions, bundleDescriptor) {
|
||||
this._options = bundleOptions;
|
||||
this._descriptor = bundleDescriptor;
|
||||
this._tasks = {};
|
||||
this._timers = {};
|
||||
this._invocations = {};
|
||||
this._invocationId = 0;
|
||||
}
|
||||
/**
|
||||
* Schedule a method call.
|
||||
*
|
||||
* @param {function} apiCall - the function for an API call.
|
||||
* @param {Object} request - the request object to be bundled with others.
|
||||
* @param {APICallback} callback - the callback to be called when the method finished.
|
||||
* @return {function()} - the function to cancel the scheduled invocation.
|
||||
*/
|
||||
schedule(apiCall, request, callback) {
|
||||
const bundleId = computeBundleId(request, this._descriptor.requestDiscriminatorFields);
|
||||
callback = (callback || noop);
|
||||
if (bundleId === undefined) {
|
||||
console.warn('The request does not have enough information for request bundling. ' +
|
||||
'Invoking immediately. Request: ' + JSON.stringify(request) +
|
||||
' discriminator fields: ' +
|
||||
this._descriptor.requestDiscriminatorFields);
|
||||
return apiCall(request, callback);
|
||||
}
|
||||
if (!(bundleId in this._tasks)) {
|
||||
this._tasks[bundleId] = new Task(apiCall, request, this._descriptor.bundledField, this._descriptor.subresponseField);
|
||||
}
|
||||
let task = this._tasks[bundleId];
|
||||
callback.id = String(this._invocationId++);
|
||||
this._invocations[callback.id] = bundleId;
|
||||
const bundledField = request[this._descriptor.bundledField];
|
||||
const elementCount = bundledField.length;
|
||||
let requestBytes = 0;
|
||||
const self = this;
|
||||
bundledField.forEach(obj => {
|
||||
requestBytes += this._descriptor.byteLengthFunction(obj);
|
||||
});
|
||||
const countLimit = this._options.elementCountLimit || 0;
|
||||
const byteLimit = this._options.requestByteLimit || 0;
|
||||
if ((countLimit > 0 && elementCount >= countLimit) ||
|
||||
(byteLimit > 0 && requestBytes >= byteLimit)) {
|
||||
let message;
|
||||
if (countLimit > 0 && elementCount >= countLimit) {
|
||||
message = 'The number of elements ' + elementCount +
|
||||
' exceeds the limit ' + this._options.elementCountLimit;
|
||||
}
|
||||
else {
|
||||
message = 'The required bytes ' + requestBytes + ' exceeds the limit ' +
|
||||
this._options.requestByteLimit;
|
||||
}
|
||||
callback(new Error(message));
|
||||
return {
|
||||
cancel: noop,
|
||||
};
|
||||
}
|
||||
const existingCount = task.getElementCount();
|
||||
const existingBytes = task.getRequestByteSize();
|
||||
if ((countLimit > 0 && elementCount + existingCount >= countLimit) ||
|
||||
(byteLimit > 0 && requestBytes + existingBytes >= byteLimit)) {
|
||||
this._runNow(bundleId);
|
||||
this._tasks[bundleId] = new Task(apiCall, request, this._descriptor.bundledField, this._descriptor.subresponseField);
|
||||
task = this._tasks[bundleId];
|
||||
}
|
||||
task.extend(bundledField, requestBytes, callback);
|
||||
const ret = {
|
||||
cancel() {
|
||||
self._cancel(callback.id);
|
||||
},
|
||||
};
|
||||
const countThreshold = this._options.elementCountThreshold || 0;
|
||||
const sizeThreshold = this._options.requestByteThreshold || 0;
|
||||
if ((countThreshold > 0 && task.getElementCount() >= countThreshold) ||
|
||||
(sizeThreshold > 0 && task.getRequestByteSize() >= sizeThreshold)) {
|
||||
this._runNow(bundleId);
|
||||
return ret;
|
||||
}
|
||||
if (!(bundleId in this._timers) && this._options.delayThreshold > 0) {
|
||||
this._timers[bundleId] = setTimeout(() => {
|
||||
delete this._timers[bundleId];
|
||||
this._runNow(bundleId);
|
||||
}, this._options.delayThreshold);
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
/**
|
||||
* Clears scheduled timeout if it exists.
|
||||
*
|
||||
* @param {String} bundleId - the id for the task whose timeout needs to be
|
||||
* cleared.
|
||||
* @private
|
||||
*/
|
||||
_maybeClearTimeout(bundleId) {
|
||||
if (bundleId in this._timers) {
|
||||
const timerId = this._timers[bundleId];
|
||||
delete this._timers[bundleId];
|
||||
clearTimeout(timerId);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Cancels an event.
|
||||
*
|
||||
* @param {String} id - The id for the event in the task.
|
||||
* @private
|
||||
*/
|
||||
_cancel(id) {
|
||||
if (!(id in this._invocations)) {
|
||||
return;
|
||||
}
|
||||
const bundleId = this._invocations[id];
|
||||
if (!(bundleId in this._tasks)) {
|
||||
return;
|
||||
}
|
||||
const task = this._tasks[bundleId];
|
||||
delete this._invocations[id];
|
||||
if (task.cancel(id)) {
|
||||
this._maybeClearTimeout(bundleId);
|
||||
delete this._tasks[bundleId];
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Invokes a task.
|
||||
*
|
||||
* @param {String} bundleId - The id for the task.
|
||||
* @private
|
||||
*/
|
||||
_runNow(bundleId) {
|
||||
if (!(bundleId in this._tasks)) {
|
||||
console.warn('no such bundleid: ' + bundleId);
|
||||
return;
|
||||
}
|
||||
this._maybeClearTimeout(bundleId);
|
||||
const task = this._tasks[bundleId];
|
||||
delete this._tasks[bundleId];
|
||||
task.run().forEach(id => {
|
||||
delete this._invocations[id];
|
||||
});
|
||||
}
|
||||
}
|
||||
exports.BundleExecutor = BundleExecutor;
|
||||
class Bundleable extends api_callable_1.NormalApiCaller {
|
||||
/**
|
||||
* Creates an API caller that bundles requests.
|
||||
*
|
||||
* @private
|
||||
* @constructor
|
||||
* @param {BundleExecutor} bundler - bundles API calls.
|
||||
*/
|
||||
constructor(bundler) {
|
||||
super();
|
||||
this.bundler = bundler;
|
||||
}
|
||||
// tslint:disable-next-line no-any
|
||||
call(apiCall, argument, settings, status) {
|
||||
if (settings.isBundling) {
|
||||
status.call((argument, callback) => {
|
||||
this.bundler.schedule(apiCall, argument, callback);
|
||||
}, argument);
|
||||
}
|
||||
else {
|
||||
api_callable_1.NormalApiCaller.prototype.call.call(this, apiCall, argument, settings, status);
|
||||
}
|
||||
}
|
||||
}
|
||||
exports.Bundleable = Bundleable;
|
||||
class BundleDescriptor {
|
||||
/**
|
||||
* Describes the structure of bundled call.
|
||||
*
|
||||
* requestDiscriminatorFields may include '.' as a separator, which is used to
|
||||
* indicate object traversal. This allows fields in nested objects to be used
|
||||
* to determine what request to bundle.
|
||||
*
|
||||
* @property {String} bundledField
|
||||
* @property {String} requestDiscriminatorFields
|
||||
* @property {String} subresponseField
|
||||
* @property {Function} byteLengthFunction
|
||||
*
|
||||
* @param {String} bundledField - the repeated field in the request message
|
||||
* that will have its elements aggregated by bundling.
|
||||
* @param {String} requestDiscriminatorFields - a list of fields in the
|
||||
* target request message class that are used to detemrine which request
|
||||
* messages should be bundled together.
|
||||
* @param {String} subresponseField - an optional field, when present it
|
||||
* indicates the field in the response message that should be used to
|
||||
* demultiplex the response into multiple response messages.
|
||||
* @param {Function} byteLengthFunction - a function to obtain the byte
|
||||
* length to be consumed for the bundled field messages. Because Node.JS
|
||||
* protobuf.js/gRPC uses builtin Objects for the user-visible data and
|
||||
* internally they are encoded/decoded in protobuf manner, this function
|
||||
* is actually necessary to calculate the byte length.
|
||||
* @constructor
|
||||
*/
|
||||
constructor(bundledField, requestDiscriminatorFields, subresponseField, byteLengthFunction) {
|
||||
if (!byteLengthFunction && typeof subresponseField === 'function') {
|
||||
byteLengthFunction = subresponseField;
|
||||
subresponseField = null;
|
||||
}
|
||||
this.bundledField = bundledField;
|
||||
this.requestDiscriminatorFields = requestDiscriminatorFields;
|
||||
this.subresponseField = subresponseField;
|
||||
this.byteLengthFunction = byteLengthFunction;
|
||||
}
|
||||
/**
|
||||
* Returns a new API caller.
|
||||
* @private
|
||||
* @param {CallSettings} settings - the current settings.
|
||||
* @return {Bundleable} - the new bundling API caller.
|
||||
*/
|
||||
apiCaller(settings) {
|
||||
return new Bundleable(new BundleExecutor(settings.bundleOptions, this));
|
||||
}
|
||||
}
|
||||
exports.BundleDescriptor = BundleDescriptor;
|
||||
//# sourceMappingURL=bundling.js.map
|
||||
1
express-server/node_modules/google-gax/build/src/bundling.js.map
generated
vendored
Normal file
1
express-server/node_modules/google-gax/build/src/bundling.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
394
express-server/node_modules/google-gax/build/src/gax.d.ts
generated
vendored
Normal file
394
express-server/node_modules/google-gax/build/src/gax.d.ts
generated
vendored
Normal file
@@ -0,0 +1,394 @@
|
||||
/**
|
||||
* Copyright 2016, Google Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without
|
||||
* modification, are permitted provided that the following conditions are
|
||||
* met:
|
||||
*
|
||||
* * Redistributions of source code must retain the above copyright
|
||||
* notice, this list of conditions and the following disclaimer.
|
||||
* Redistributions in binary form must reproduce the above
|
||||
* copyright notice, this list of conditions and the following disclaimer
|
||||
* in the documentation and/or other materials provided with the
|
||||
* distribution.
|
||||
* * Neither the name of Google Inc. nor the names of its
|
||||
* contributors may be used to endorse or promote products derived from
|
||||
* this software without specific prior written permission.
|
||||
*
|
||||
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
/**
|
||||
* Google API Extensions
|
||||
*/
|
||||
import { BundleOptions } from './bundling';
|
||||
/**
|
||||
* Encapsulates the overridable settings for a particular API call.
|
||||
*
|
||||
* ``CallOptions`` is an optional arg for all GAX API calls. It is used to
|
||||
* configure the settings of a specific API call.
|
||||
*
|
||||
* When provided, its values override the GAX service defaults for that
|
||||
* particular call.
|
||||
*
|
||||
* Typically the API clients will accept this as the second to the last
|
||||
* argument. See the examples below.
|
||||
* @typedef {Object} CallOptions
|
||||
* @property {number=} timeout - The client-side timeout for API calls.
|
||||
* @property {RetryOptions=} retry - determines whether and how to retry
|
||||
* on transient errors. When set to null, the call will not retry.
|
||||
* @property {boolean=} autoPaginate - If set to false and the call is
|
||||
* configured for paged iteration, page unrolling is not performed, instead
|
||||
* the callback will be called with the response object.
|
||||
* @property {Object=} pageToken - If set and the call is configured for
|
||||
* paged iteration, paged iteration is not performed and requested with this
|
||||
* pageToken.
|
||||
* @property {number} maxResults - If set and the call is configured for
|
||||
* paged iteration, the call will stop when the number of response elements
|
||||
* reaches to the specified size. By default, it will unroll the page to
|
||||
* the end of the list.
|
||||
* @property {boolean=} isBundling - If set to false and the call is configured
|
||||
* for bundling, bundling is not performed.
|
||||
* @property {BackoffSettings=} longrunning - BackoffSettings used for polling.
|
||||
* @property {Function=} promise - A constructor for a promise that implements the ES6
|
||||
* specification of promise which will be used to create promises. If not
|
||||
* provided, native promises will be used.
|
||||
* @example
|
||||
* // suppress bundling for bundled method.
|
||||
* api.bundlingMethod(
|
||||
* param, {optParam: aValue, isBundling: false}, function(err, response) {
|
||||
* // handle response.
|
||||
* });
|
||||
* @example
|
||||
* // suppress streaming for page-streaming method.
|
||||
* api.pageStreamingMethod(
|
||||
* param, {optParam: aValue, autoPaginate: false}, function(err, page) {
|
||||
* // not returning a stream, but callback is called with the paged response.
|
||||
* });
|
||||
*/
|
||||
/**
|
||||
* Per-call configurable settings for retrying upon transient failure.
|
||||
* @typedef {Object} RetryOptions
|
||||
* @property {String[]} retryCodes
|
||||
* @property {BackoffSettings} backoffSettings
|
||||
*/
|
||||
export declare class RetryOptions {
|
||||
retryCodes: number[];
|
||||
backoffSettings: BackoffSettings;
|
||||
constructor(retryCodes: number[], backoffSettings: BackoffSettings);
|
||||
}
|
||||
/**
|
||||
* Parameters to the exponential backoff algorithm for retrying.
|
||||
* @typedef {Object} BackoffSettings
|
||||
* @property {number} initialRetryDelayMillis - the initial delay time,
|
||||
* in milliseconds, between the completion of the first failed request and the
|
||||
* initiation of the first retrying request.
|
||||
* @property {number} retryDelayMultiplier - the multiplier by which to
|
||||
* increase the delay time between the completion of failed requests, and the
|
||||
* initiation of the subsequent retrying request.
|
||||
* @property {number} maxRetryDelayMillis - the maximum delay time, in
|
||||
* milliseconds, between requests. When this value is reached,
|
||||
* ``retryDelayMultiplier`` will no longer be used to increase delay time.
|
||||
* @property {number} initialRpcTimeoutMillis - the initial timeout parameter
|
||||
* to the request.
|
||||
* @propetry {number} rpcTimeoutMultiplier - the multiplier by which to
|
||||
* increase the timeout parameter between failed requests.
|
||||
* @property {number} maxRpcTimeoutMillis - the maximum timeout parameter, in
|
||||
* milliseconds, for a request. When this value is reached,
|
||||
* ``rpcTimeoutMultiplier`` will no longer be used to increase the timeout.
|
||||
* @property {number} totalTimeoutMillis - the total time, in milliseconds,
|
||||
* starting from when the initial request is sent, after which an error will
|
||||
* be returned, regardless of the retrying attempts made meanwhile.
|
||||
*/
|
||||
export interface BackoffSettings {
|
||||
maxRetries?: number;
|
||||
initialRetryDelayMillis: number;
|
||||
retryDelayMultiplier: number;
|
||||
maxRetryDelayMillis: number;
|
||||
initialRpcTimeoutMillis: number | null;
|
||||
maxRpcTimeoutMillis: number | null;
|
||||
totalTimeoutMillis: number | null;
|
||||
rpcTimeoutMultiplier: number | null;
|
||||
}
|
||||
/**
|
||||
* Parameter to configure bundling behavior.
|
||||
* @typedef {Object} BundleOptions
|
||||
* @property {number} elementCountThreshold -
|
||||
* the bundled request will be sent once the count of outstanding elements
|
||||
* in the repeated field reaches this value.
|
||||
* @property {number} elementCountLimit -
|
||||
* represents a hard limit on the number of elements in the repeated field
|
||||
* of the bundle; if adding a request to a bundle would exceed this value,
|
||||
* the bundle is sent and the new request is added to a fresh bundle. It is
|
||||
* invalid for a single request to exceed this limit.
|
||||
* @property {number} requestByteThreshold -
|
||||
* the bundled request will be sent once the count of bytes in the request
|
||||
* reaches this value. Note that this value is pessimistically approximated
|
||||
* by summing the bytesizes of the elements in the repeated field, and
|
||||
* therefore may be an under-approximation.
|
||||
* @property {number} requestByteLimit -
|
||||
* represents a hard limit on the size of the bundled request; if adding
|
||||
* a request to a bundle would exceed this value, the bundle is sent and
|
||||
* the new request is added to a fresh bundle. It is invalid for a single
|
||||
* request to exceed this limit. Note that this value is pessimistically
|
||||
* approximated by summing the bytesizes of the elements in the repeated
|
||||
* field, with a buffer applied to correspond to the resulting
|
||||
* under-approximation.
|
||||
* @property {number} delayThreshold -
|
||||
* the bundled request will be sent this amount of time after the first
|
||||
* element in the bundle was added to it.
|
||||
*/
|
||||
export interface CallOptions {
|
||||
timeout?: number;
|
||||
retry?: RetryOptions | null;
|
||||
autoPaginate?: boolean;
|
||||
pageToken?: number;
|
||||
maxResults?: number;
|
||||
maxRetries?: number;
|
||||
otherArgs?: {
|
||||
[index: string]: any;
|
||||
};
|
||||
bundleOptions?: BundleOptions | null;
|
||||
isBundling?: boolean;
|
||||
longrunning?: boolean | null;
|
||||
promise?: PromiseConstructor;
|
||||
}
|
||||
export declare class CallSettings {
|
||||
timeout: number;
|
||||
retry?: RetryOptions | null;
|
||||
autoPaginate?: boolean;
|
||||
pageToken?: number;
|
||||
maxResults?: number;
|
||||
otherArgs: {
|
||||
[index: string]: any;
|
||||
};
|
||||
bundleOptions?: BundleOptions | null;
|
||||
isBundling: boolean;
|
||||
longrunning?: boolean | null;
|
||||
promise: PromiseConstructor;
|
||||
/**
|
||||
* @param {Object} settings - An object containing parameters of this settings.
|
||||
* @param {number} settings.timeout - The client-side timeout for API calls.
|
||||
* This parameter is ignored for retrying calls.
|
||||
* @param {RetryOptions} settings.retry - The configuration for retrying upon
|
||||
* transient error. If set to null, this call will not retry.
|
||||
* @param {boolean} settings.autoPaginate - If there is no `pageDescriptor`,
|
||||
* this attrbute has no meaning. Otherwise, determines whether a page
|
||||
* streamed response should make the page structure transparent to the user by
|
||||
* flattening the repeated field in the returned generator.
|
||||
* @param {number} settings.pageToken - If there is no `pageDescriptor`,
|
||||
* this attribute has no meaning. Otherwise, determines the page token used
|
||||
* in the page streaming request.
|
||||
* @param {Object} settings.otherArgs - Additional arguments to be passed to
|
||||
* the API calls.
|
||||
* @param {Function=} settings.promise - A constructor for a promise that
|
||||
* implements the ES6 specification of promise. If not provided, native
|
||||
* promises will be used.
|
||||
*
|
||||
* @constructor
|
||||
*/
|
||||
constructor(settings?: CallOptions);
|
||||
/**
|
||||
* Returns a new CallSettings merged from this and a CallOptions object.
|
||||
*
|
||||
* @param {CallOptions} options - an instance whose values override
|
||||
* those in this object. If null, ``merge`` returns a copy of this
|
||||
* object
|
||||
* @return {CallSettings} The merged CallSettings instance.
|
||||
*/
|
||||
merge(options?: CallOptions | null): CallSettings;
|
||||
}
|
||||
/**
|
||||
* Per-call configurable settings for retrying upon transient failure.
|
||||
*
|
||||
* @param {String[]} retryCodes - a list of Google API canonical error codes
|
||||
* upon which a retry should be attempted.
|
||||
* @param {BackoffSettings} backoffSettings - configures the retry
|
||||
* exponential backoff algorithm.
|
||||
* @return {RetryOptions} A new RetryOptions object.
|
||||
*
|
||||
*/
|
||||
export declare function createRetryOptions(retryCodes: number[], backoffSettings: BackoffSettings): RetryOptions;
|
||||
/**
|
||||
* Parameters to the exponential backoff algorithm for retrying.
|
||||
*
|
||||
* @param {number} initialRetryDelayMillis - the initial delay time,
|
||||
* in milliseconds, between the completion of the first failed request and the
|
||||
* initiation of the first retrying request.
|
||||
* @param {number} retryDelayMultiplier - the multiplier by which to
|
||||
* increase the delay time between the completion of failed requests, and the
|
||||
* initiation of the subsequent retrying request.
|
||||
* @param {number} maxRetryDelayMillis - the maximum delay time, in
|
||||
* milliseconds, between requests. When this value is reached,
|
||||
* ``retryDelayMultiplier`` will no longer be used to increase delay time.
|
||||
* @param {number} initialRpcTimeoutMillis - the initial timeout parameter
|
||||
* to the request.
|
||||
* @param {number} rpcTimeoutMultiplier - the multiplier by which to
|
||||
* increase the timeout parameter between failed requests.
|
||||
* @param {number} maxRpcTimeoutMillis - the maximum timeout parameter, in
|
||||
* milliseconds, for a request. When this value is reached,
|
||||
* ``rpcTimeoutMultiplier`` will no longer be used to increase the timeout.
|
||||
* @param {number} totalTimeoutMillis - the total time, in milliseconds,
|
||||
* starting from when the initial request is sent, after which an error will
|
||||
* be returned, regardless of the retrying attempts made meanwhile.
|
||||
* @return {BackoffSettings} a new settings.
|
||||
*
|
||||
*/
|
||||
export declare function createBackoffSettings(initialRetryDelayMillis: number, retryDelayMultiplier: number, maxRetryDelayMillis: number, initialRpcTimeoutMillis: number | null, rpcTimeoutMultiplier: number | null, maxRpcTimeoutMillis: number | null, totalTimeoutMillis: number | null): BackoffSettings;
|
||||
/**
|
||||
* Parameters to the exponential backoff algorithm for retrying.
|
||||
* This function is unsupported, and intended for internal use only.
|
||||
*
|
||||
* @param {number} initialRetryDelayMillis - the initial delay time,
|
||||
* in milliseconds, between the completion of the first failed request and the
|
||||
* initiation of the first retrying request.
|
||||
* @param {number} retryDelayMultiplier - the multiplier by which to
|
||||
* increase the delay time between the completion of failed requests, and the
|
||||
* initiation of the subsequent retrying request.
|
||||
* @param {number} maxRetryDelayMillis - the maximum delay time, in
|
||||
* milliseconds, between requests. When this value is reached,
|
||||
* ``retryDelayMultiplier`` will no longer be used to increase delay time.
|
||||
* @param {number} initialRpcTimeoutMillis - the initial timeout parameter
|
||||
* to the request.
|
||||
* @param {number} rpcTimeoutMultiplier - the multiplier by which to
|
||||
* increase the timeout parameter between failed requests.
|
||||
* @param {number} maxRpcTimeoutMillis - the maximum timeout parameter, in
|
||||
* milliseconds, for a request. When this value is reached,
|
||||
* ``rpcTimeoutMultiplier`` will no longer be used to increase the timeout.
|
||||
* @param {number} maxRetries - the maximum number of retrying attempts that
|
||||
* will be made. If reached, an error will be returned.
|
||||
* @return {BackoffSettings} a new settings.
|
||||
*
|
||||
*/
|
||||
export declare function createMaxRetriesBackoffSettings(initialRetryDelayMillis: number, retryDelayMultiplier: number, maxRetryDelayMillis: number, initialRpcTimeoutMillis: number, rpcTimeoutMultiplier: number, maxRpcTimeoutMillis: number, maxRetries: number): {
|
||||
initialRetryDelayMillis: number;
|
||||
retryDelayMultiplier: number;
|
||||
maxRetryDelayMillis: number;
|
||||
initialRpcTimeoutMillis: number;
|
||||
rpcTimeoutMultiplier: number;
|
||||
maxRpcTimeoutMillis: number;
|
||||
maxRetries: number;
|
||||
};
|
||||
/**
|
||||
* Creates a new {@link BundleOptions}.
|
||||
*
|
||||
* @private
|
||||
* @param {Object} options - An object to hold optional parameters. See
|
||||
* properties for the content of options.
|
||||
* @return {BundleOptions} - A new options.
|
||||
*/
|
||||
export declare function createBundleOptions(options: BundlingConfig): BundleOptions;
|
||||
export interface ServiceConfig {
|
||||
retry_codes: {
|
||||
[index: string]: string[];
|
||||
};
|
||||
retry_params: {
|
||||
[index: string]: RetryParamsConfig;
|
||||
};
|
||||
methods: {
|
||||
[index: string]: MethodConfig;
|
||||
};
|
||||
}
|
||||
export interface RetryParamsConfig {
|
||||
initial_retry_delay_millis: number;
|
||||
retry_delay_multiplier: number;
|
||||
max_retry_delay_millis: number;
|
||||
initial_rpc_timeout_millis: number;
|
||||
rpc_timeout_multiplier: number;
|
||||
max_rpc_timeout_millis: number;
|
||||
total_timeout_millis: number;
|
||||
}
|
||||
export interface MethodConfig {
|
||||
retry_codes_name: string;
|
||||
retry_params_name: string;
|
||||
bundling: BundlingConfig;
|
||||
timeout_millis?: number;
|
||||
}
|
||||
export interface BundlingConfig {
|
||||
[index: string]: number;
|
||||
element_count_threshold: number;
|
||||
element_count_limit: number;
|
||||
request_byte_threshold: number;
|
||||
request_byte_limit: number;
|
||||
delay_threshold_millis: number;
|
||||
}
|
||||
export interface ClientConfig {
|
||||
interfaces?: {
|
||||
[index: string]: ServiceConfig;
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Constructs a dictionary mapping method names to {@link CallSettings}.
|
||||
*
|
||||
* The `clientConfig` parameter is parsed from a client configuration JSON
|
||||
* file of the form:
|
||||
*
|
||||
* {
|
||||
* "interfaces": {
|
||||
* "google.fake.v1.ServiceName": {
|
||||
* "retry_codes": {
|
||||
* "idempotent": ["UNAVAILABLE", "DEADLINE_EXCEEDED"],
|
||||
* "non_idempotent": []
|
||||
* },
|
||||
* "retry_params": {
|
||||
* "default": {
|
||||
* "initial_retry_delay_millis": 100,
|
||||
* "retry_delay_multiplier": 1.2,
|
||||
* "max_retry_delay_millis": 1000,
|
||||
* "initial_rpc_timeout_millis": 2000,
|
||||
* "rpc_timeout_multiplier": 1.5,
|
||||
* "max_rpc_timeout_millis": 30000,
|
||||
* "total_timeout_millis": 45000
|
||||
* }
|
||||
* },
|
||||
* "methods": {
|
||||
* "CreateFoo": {
|
||||
* "retry_codes_name": "idempotent",
|
||||
* "retry_params_name": "default"
|
||||
* },
|
||||
* "Publish": {
|
||||
* "retry_codes_name": "non_idempotent",
|
||||
* "retry_params_name": "default",
|
||||
* "bundling": {
|
||||
* "element_count_threshold": 40,
|
||||
* "element_count_limit": 200,
|
||||
* "request_byte_threshold": 90000,
|
||||
* "request_byte_limit": 100000,
|
||||
* "delay_threshold_millis": 100
|
||||
* }
|
||||
* }
|
||||
* }
|
||||
* }
|
||||
* }
|
||||
* }
|
||||
*
|
||||
* @param {String} serviceName - The fully-qualified name of this
|
||||
* service, used as a key into the client config file (in the
|
||||
* example above, this value should be 'google.fake.v1.ServiceName').
|
||||
* @param {Object} clientConfig - A dictionary parsed from the
|
||||
* standard API client config file.
|
||||
* @param {Object} configOverrides - A dictionary in the same structure of
|
||||
* client_config to override the settings.
|
||||
* @param {Object.<string, string[]>} retryNames - A dictionary mapping the strings
|
||||
* referring to response status codes to objects representing
|
||||
* those codes.
|
||||
* @param {Object} otherArgs - the non-request arguments to be passed to the API
|
||||
* calls.
|
||||
* @param {Function=} promise - A constructor for a promise that implements the
|
||||
* ES6 specification of promise. If not provided, native promises will be used.
|
||||
* @return {Object} A mapping from method name to CallSettings, or null if the
|
||||
* service is not found in the config.
|
||||
*/
|
||||
export declare function constructSettings(serviceName: string, clientConfig: ClientConfig, configOverrides: ClientConfig, retryNames: {
|
||||
[index: string]: number;
|
||||
}, otherArgs?: {}, promise?: PromiseConstructor): any;
|
||||
503
express-server/node_modules/google-gax/build/src/gax.js
generated
vendored
Normal file
503
express-server/node_modules/google-gax/build/src/gax.js
generated
vendored
Normal file
@@ -0,0 +1,503 @@
|
||||
"use strict";
|
||||
/**
|
||||
* Copyright 2016, Google Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without
|
||||
* modification, are permitted provided that the following conditions are
|
||||
* met:
|
||||
*
|
||||
* * Redistributions of source code must retain the above copyright
|
||||
* notice, this list of conditions and the following disclaimer.
|
||||
* Redistributions in binary form must reproduce the above
|
||||
* copyright notice, this list of conditions and the following disclaimer
|
||||
* in the documentation and/or other materials provided with the
|
||||
* distribution.
|
||||
* * Neither the name of Google Inc. nor the names of its
|
||||
* contributors may be used to endorse or promote products derived from
|
||||
* this software without specific prior written permission.
|
||||
*
|
||||
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
/**
|
||||
* Encapsulates the overridable settings for a particular API call.
|
||||
*
|
||||
* ``CallOptions`` is an optional arg for all GAX API calls. It is used to
|
||||
* configure the settings of a specific API call.
|
||||
*
|
||||
* When provided, its values override the GAX service defaults for that
|
||||
* particular call.
|
||||
*
|
||||
* Typically the API clients will accept this as the second to the last
|
||||
* argument. See the examples below.
|
||||
* @typedef {Object} CallOptions
|
||||
* @property {number=} timeout - The client-side timeout for API calls.
|
||||
* @property {RetryOptions=} retry - determines whether and how to retry
|
||||
* on transient errors. When set to null, the call will not retry.
|
||||
* @property {boolean=} autoPaginate - If set to false and the call is
|
||||
* configured for paged iteration, page unrolling is not performed, instead
|
||||
* the callback will be called with the response object.
|
||||
* @property {Object=} pageToken - If set and the call is configured for
|
||||
* paged iteration, paged iteration is not performed and requested with this
|
||||
* pageToken.
|
||||
* @property {number} maxResults - If set and the call is configured for
|
||||
* paged iteration, the call will stop when the number of response elements
|
||||
* reaches to the specified size. By default, it will unroll the page to
|
||||
* the end of the list.
|
||||
* @property {boolean=} isBundling - If set to false and the call is configured
|
||||
* for bundling, bundling is not performed.
|
||||
* @property {BackoffSettings=} longrunning - BackoffSettings used for polling.
|
||||
* @property {Function=} promise - A constructor for a promise that implements the ES6
|
||||
* specification of promise which will be used to create promises. If not
|
||||
* provided, native promises will be used.
|
||||
* @example
|
||||
* // suppress bundling for bundled method.
|
||||
* api.bundlingMethod(
|
||||
* param, {optParam: aValue, isBundling: false}, function(err, response) {
|
||||
* // handle response.
|
||||
* });
|
||||
* @example
|
||||
* // suppress streaming for page-streaming method.
|
||||
* api.pageStreamingMethod(
|
||||
* param, {optParam: aValue, autoPaginate: false}, function(err, page) {
|
||||
* // not returning a stream, but callback is called with the paged response.
|
||||
* });
|
||||
*/
|
||||
/**
|
||||
* Per-call configurable settings for retrying upon transient failure.
|
||||
* @typedef {Object} RetryOptions
|
||||
* @property {String[]} retryCodes
|
||||
* @property {BackoffSettings} backoffSettings
|
||||
*/
|
||||
class RetryOptions {
|
||||
constructor(retryCodes, backoffSettings) {
|
||||
this.retryCodes = retryCodes;
|
||||
this.backoffSettings = backoffSettings;
|
||||
}
|
||||
}
|
||||
exports.RetryOptions = RetryOptions;
|
||||
class CallSettings {
|
||||
/**
|
||||
* @param {Object} settings - An object containing parameters of this settings.
|
||||
* @param {number} settings.timeout - The client-side timeout for API calls.
|
||||
* This parameter is ignored for retrying calls.
|
||||
* @param {RetryOptions} settings.retry - The configuration for retrying upon
|
||||
* transient error. If set to null, this call will not retry.
|
||||
* @param {boolean} settings.autoPaginate - If there is no `pageDescriptor`,
|
||||
* this attrbute has no meaning. Otherwise, determines whether a page
|
||||
* streamed response should make the page structure transparent to the user by
|
||||
* flattening the repeated field in the returned generator.
|
||||
* @param {number} settings.pageToken - If there is no `pageDescriptor`,
|
||||
* this attribute has no meaning. Otherwise, determines the page token used
|
||||
* in the page streaming request.
|
||||
* @param {Object} settings.otherArgs - Additional arguments to be passed to
|
||||
* the API calls.
|
||||
* @param {Function=} settings.promise - A constructor for a promise that
|
||||
* implements the ES6 specification of promise. If not provided, native
|
||||
* promises will be used.
|
||||
*
|
||||
* @constructor
|
||||
*/
|
||||
constructor(settings) {
|
||||
settings = settings || {};
|
||||
this.timeout = settings.timeout || 30 * 1000;
|
||||
this.retry = settings.retry;
|
||||
this.autoPaginate =
|
||||
'autoPaginate' in settings ? settings.autoPaginate : true;
|
||||
this.pageToken = settings.pageToken;
|
||||
this.maxResults = settings.maxResults;
|
||||
this.otherArgs = settings.otherArgs || {};
|
||||
this.bundleOptions = settings.bundleOptions;
|
||||
this.isBundling = 'isBundling' in settings ? settings.isBundling : true;
|
||||
this.longrunning = 'longrunning' in settings ? settings.longrunning : null;
|
||||
this.promise = 'promise' in settings ? settings.promise : Promise;
|
||||
}
|
||||
/**
|
||||
* Returns a new CallSettings merged from this and a CallOptions object.
|
||||
*
|
||||
* @param {CallOptions} options - an instance whose values override
|
||||
* those in this object. If null, ``merge`` returns a copy of this
|
||||
* object
|
||||
* @return {CallSettings} The merged CallSettings instance.
|
||||
*/
|
||||
merge(options) {
|
||||
if (!options) {
|
||||
return new CallSettings(this);
|
||||
}
|
||||
let timeout = this.timeout;
|
||||
let retry = this.retry;
|
||||
let autoPaginate = this.autoPaginate;
|
||||
let pageToken = this.pageToken;
|
||||
let maxResults = this.maxResults;
|
||||
let otherArgs = this.otherArgs;
|
||||
let isBundling = this.isBundling;
|
||||
let longrunning = this.longrunning;
|
||||
let promise = this.promise;
|
||||
if ('timeout' in options) {
|
||||
timeout = options.timeout;
|
||||
}
|
||||
if ('retry' in options) {
|
||||
retry = options.retry;
|
||||
}
|
||||
if ('autoPaginate' in options && !options.autoPaginate) {
|
||||
autoPaginate = false;
|
||||
}
|
||||
if ('pageToken' in options) {
|
||||
autoPaginate = false;
|
||||
pageToken = options.pageToken;
|
||||
}
|
||||
if ('maxResults' in options) {
|
||||
maxResults = options.maxResults;
|
||||
}
|
||||
if ('otherArgs' in options) {
|
||||
otherArgs = {};
|
||||
// tslint:disable-next-line forin
|
||||
for (const key in this.otherArgs) {
|
||||
otherArgs[key] = this.otherArgs[key];
|
||||
}
|
||||
// tslint:disable-next-line forin
|
||||
for (const optionsKey in options.otherArgs) {
|
||||
otherArgs[optionsKey] = options.otherArgs[optionsKey];
|
||||
}
|
||||
}
|
||||
if ('isBundling' in options) {
|
||||
isBundling = options.isBundling;
|
||||
}
|
||||
if ('maxRetries' in options) {
|
||||
retry.backoffSettings.maxRetries = options.maxRetries;
|
||||
delete retry.backoffSettings.totalTimeoutMillis;
|
||||
}
|
||||
if ('longrunning' in options) {
|
||||
longrunning = options.longrunning;
|
||||
}
|
||||
if ('promise' in options) {
|
||||
promise = options.promise;
|
||||
}
|
||||
return new CallSettings({
|
||||
timeout,
|
||||
retry,
|
||||
bundleOptions: this.bundleOptions,
|
||||
longrunning,
|
||||
autoPaginate,
|
||||
pageToken,
|
||||
maxResults,
|
||||
otherArgs,
|
||||
isBundling,
|
||||
promise,
|
||||
});
|
||||
}
|
||||
}
|
||||
exports.CallSettings = CallSettings;
|
||||
/**
|
||||
* Per-call configurable settings for retrying upon transient failure.
|
||||
*
|
||||
* @param {String[]} retryCodes - a list of Google API canonical error codes
|
||||
* upon which a retry should be attempted.
|
||||
* @param {BackoffSettings} backoffSettings - configures the retry
|
||||
* exponential backoff algorithm.
|
||||
* @return {RetryOptions} A new RetryOptions object.
|
||||
*
|
||||
*/
|
||||
function createRetryOptions(retryCodes, backoffSettings) {
|
||||
return {
|
||||
retryCodes,
|
||||
backoffSettings,
|
||||
};
|
||||
}
|
||||
exports.createRetryOptions = createRetryOptions;
|
||||
/**
|
||||
* Parameters to the exponential backoff algorithm for retrying.
|
||||
*
|
||||
* @param {number} initialRetryDelayMillis - the initial delay time,
|
||||
* in milliseconds, between the completion of the first failed request and the
|
||||
* initiation of the first retrying request.
|
||||
* @param {number} retryDelayMultiplier - the multiplier by which to
|
||||
* increase the delay time between the completion of failed requests, and the
|
||||
* initiation of the subsequent retrying request.
|
||||
* @param {number} maxRetryDelayMillis - the maximum delay time, in
|
||||
* milliseconds, between requests. When this value is reached,
|
||||
* ``retryDelayMultiplier`` will no longer be used to increase delay time.
|
||||
* @param {number} initialRpcTimeoutMillis - the initial timeout parameter
|
||||
* to the request.
|
||||
* @param {number} rpcTimeoutMultiplier - the multiplier by which to
|
||||
* increase the timeout parameter between failed requests.
|
||||
* @param {number} maxRpcTimeoutMillis - the maximum timeout parameter, in
|
||||
* milliseconds, for a request. When this value is reached,
|
||||
* ``rpcTimeoutMultiplier`` will no longer be used to increase the timeout.
|
||||
* @param {number} totalTimeoutMillis - the total time, in milliseconds,
|
||||
* starting from when the initial request is sent, after which an error will
|
||||
* be returned, regardless of the retrying attempts made meanwhile.
|
||||
* @return {BackoffSettings} a new settings.
|
||||
*
|
||||
*/
|
||||
function createBackoffSettings(initialRetryDelayMillis, retryDelayMultiplier, maxRetryDelayMillis, initialRpcTimeoutMillis, rpcTimeoutMultiplier, maxRpcTimeoutMillis, totalTimeoutMillis) {
|
||||
return {
|
||||
initialRetryDelayMillis,
|
||||
retryDelayMultiplier,
|
||||
maxRetryDelayMillis,
|
||||
initialRpcTimeoutMillis,
|
||||
rpcTimeoutMultiplier,
|
||||
maxRpcTimeoutMillis,
|
||||
totalTimeoutMillis,
|
||||
};
|
||||
}
|
||||
exports.createBackoffSettings = createBackoffSettings;
|
||||
/**
|
||||
* Parameters to the exponential backoff algorithm for retrying.
|
||||
* This function is unsupported, and intended for internal use only.
|
||||
*
|
||||
* @param {number} initialRetryDelayMillis - the initial delay time,
|
||||
* in milliseconds, between the completion of the first failed request and the
|
||||
* initiation of the first retrying request.
|
||||
* @param {number} retryDelayMultiplier - the multiplier by which to
|
||||
* increase the delay time between the completion of failed requests, and the
|
||||
* initiation of the subsequent retrying request.
|
||||
* @param {number} maxRetryDelayMillis - the maximum delay time, in
|
||||
* milliseconds, between requests. When this value is reached,
|
||||
* ``retryDelayMultiplier`` will no longer be used to increase delay time.
|
||||
* @param {number} initialRpcTimeoutMillis - the initial timeout parameter
|
||||
* to the request.
|
||||
* @param {number} rpcTimeoutMultiplier - the multiplier by which to
|
||||
* increase the timeout parameter between failed requests.
|
||||
* @param {number} maxRpcTimeoutMillis - the maximum timeout parameter, in
|
||||
* milliseconds, for a request. When this value is reached,
|
||||
* ``rpcTimeoutMultiplier`` will no longer be used to increase the timeout.
|
||||
* @param {number} maxRetries - the maximum number of retrying attempts that
|
||||
* will be made. If reached, an error will be returned.
|
||||
* @return {BackoffSettings} a new settings.
|
||||
*
|
||||
*/
|
||||
function createMaxRetriesBackoffSettings(initialRetryDelayMillis, retryDelayMultiplier, maxRetryDelayMillis, initialRpcTimeoutMillis, rpcTimeoutMultiplier, maxRpcTimeoutMillis, maxRetries) {
|
||||
return {
|
||||
initialRetryDelayMillis,
|
||||
retryDelayMultiplier,
|
||||
maxRetryDelayMillis,
|
||||
initialRpcTimeoutMillis,
|
||||
rpcTimeoutMultiplier,
|
||||
maxRpcTimeoutMillis,
|
||||
maxRetries,
|
||||
};
|
||||
}
|
||||
exports.createMaxRetriesBackoffSettings = createMaxRetriesBackoffSettings;
|
||||
/**
|
||||
* Creates a new {@link BundleOptions}.
|
||||
*
|
||||
* @private
|
||||
* @param {Object} options - An object to hold optional parameters. See
|
||||
* properties for the content of options.
|
||||
* @return {BundleOptions} - A new options.
|
||||
*/
|
||||
function createBundleOptions(options) {
|
||||
const params = [
|
||||
'element_count_threshold',
|
||||
'element_count_limit',
|
||||
'request_byte_threshold',
|
||||
'request_byte_limit',
|
||||
'delay_threshold_millis',
|
||||
];
|
||||
params.forEach(param => {
|
||||
if (param in options && typeof options[param] !== 'number') {
|
||||
throw new Error(`${param} should be a number`);
|
||||
}
|
||||
});
|
||||
const elementCountThreshold = options.element_count_threshold || 0;
|
||||
const elementCountLimit = options.element_count_limit || 0;
|
||||
const requestByteThreshold = options.request_byte_threshold || 0;
|
||||
const requestByteLimit = options.request_byte_limit || 0;
|
||||
const delayThreshold = options.delay_threshold_millis || 0;
|
||||
if (elementCountThreshold === 0 && requestByteThreshold === 0 &&
|
||||
delayThreshold === 0) {
|
||||
throw new Error('one threshold should be > 0');
|
||||
}
|
||||
return {
|
||||
elementCountThreshold,
|
||||
elementCountLimit,
|
||||
requestByteThreshold,
|
||||
requestByteLimit,
|
||||
delayThreshold,
|
||||
};
|
||||
}
|
||||
exports.createBundleOptions = createBundleOptions;
|
||||
/**
|
||||
* Helper for {@link constructSettings}
|
||||
*
|
||||
* @private
|
||||
*
|
||||
* @param {Object} methodConfig - A dictionary representing a single
|
||||
* `methods` entry of the standard API client config file. (See
|
||||
* {@link constructSettings} for information on this yaml.)
|
||||
* @param {?Object} retryCodes - A dictionary parsed from the
|
||||
* `retry_codes_def` entry of the standard API client config
|
||||
* file. (See {@link constructSettings} for information on this yaml.)
|
||||
* @param {Object} retryParams - A dictionary parsed from the
|
||||
* `retry_params` entry of the standard API client config
|
||||
* file. (See {@link constructSettings} for information on this yaml.)
|
||||
* @param {Object} retryNames - A dictionary mapping the string names
|
||||
* used in the standard API client config file to API response
|
||||
* status codes.
|
||||
* @return {?RetryOptions} The new retry options.
|
||||
*/
|
||||
function constructRetry(methodConfig, retryCodes, retryParams, retryNames) {
|
||||
if (!methodConfig) {
|
||||
return null;
|
||||
}
|
||||
let codes = null;
|
||||
if (retryCodes && 'retry_codes_name' in methodConfig) {
|
||||
const retryCodesName = methodConfig['retry_codes_name'];
|
||||
codes = (retryCodes[retryCodesName] || []).map(name => {
|
||||
return Number(retryNames[name]);
|
||||
});
|
||||
}
|
||||
let backoffSettings = null;
|
||||
if (retryParams && 'retry_params_name' in methodConfig) {
|
||||
const params = retryParams[methodConfig.retry_params_name];
|
||||
backoffSettings = createBackoffSettings(params.initial_retry_delay_millis, params.retry_delay_multiplier, params.max_retry_delay_millis, params.initial_rpc_timeout_millis, params.rpc_timeout_multiplier, params.max_rpc_timeout_millis, params.total_timeout_millis);
|
||||
}
|
||||
return createRetryOptions(codes, backoffSettings);
|
||||
}
|
||||
/**
|
||||
* Helper for {@link constructSettings}
|
||||
*
|
||||
* Takes two retry options, and merges them into a single RetryOption instance.
|
||||
*
|
||||
* @private
|
||||
*
|
||||
* @param {RetryOptions} retry - The base RetryOptions.
|
||||
* @param {RetryOptions} overrides - The RetryOptions used for overriding
|
||||
* `retry`. Use the values if it is not null. If entire `overrides` is null,
|
||||
* ignore the base retry and return null.
|
||||
* @return {?RetryOptions} The merged RetryOptions.
|
||||
*/
|
||||
function mergeRetryOptions(retry, overrides) {
|
||||
if (!overrides) {
|
||||
return null;
|
||||
}
|
||||
if (!overrides.retryCodes && !overrides.backoffSettings) {
|
||||
return retry;
|
||||
}
|
||||
let codes = retry.retryCodes;
|
||||
if (overrides.retryCodes) {
|
||||
codes = overrides.retryCodes;
|
||||
}
|
||||
let backoffSettings = retry.backoffSettings;
|
||||
if (overrides.backoffSettings) {
|
||||
backoffSettings = overrides.backoffSettings;
|
||||
}
|
||||
return createRetryOptions(codes, backoffSettings);
|
||||
}
|
||||
/**
|
||||
* Constructs a dictionary mapping method names to {@link CallSettings}.
|
||||
*
|
||||
* The `clientConfig` parameter is parsed from a client configuration JSON
|
||||
* file of the form:
|
||||
*
|
||||
* {
|
||||
* "interfaces": {
|
||||
* "google.fake.v1.ServiceName": {
|
||||
* "retry_codes": {
|
||||
* "idempotent": ["UNAVAILABLE", "DEADLINE_EXCEEDED"],
|
||||
* "non_idempotent": []
|
||||
* },
|
||||
* "retry_params": {
|
||||
* "default": {
|
||||
* "initial_retry_delay_millis": 100,
|
||||
* "retry_delay_multiplier": 1.2,
|
||||
* "max_retry_delay_millis": 1000,
|
||||
* "initial_rpc_timeout_millis": 2000,
|
||||
* "rpc_timeout_multiplier": 1.5,
|
||||
* "max_rpc_timeout_millis": 30000,
|
||||
* "total_timeout_millis": 45000
|
||||
* }
|
||||
* },
|
||||
* "methods": {
|
||||
* "CreateFoo": {
|
||||
* "retry_codes_name": "idempotent",
|
||||
* "retry_params_name": "default"
|
||||
* },
|
||||
* "Publish": {
|
||||
* "retry_codes_name": "non_idempotent",
|
||||
* "retry_params_name": "default",
|
||||
* "bundling": {
|
||||
* "element_count_threshold": 40,
|
||||
* "element_count_limit": 200,
|
||||
* "request_byte_threshold": 90000,
|
||||
* "request_byte_limit": 100000,
|
||||
* "delay_threshold_millis": 100
|
||||
* }
|
||||
* }
|
||||
* }
|
||||
* }
|
||||
* }
|
||||
* }
|
||||
*
|
||||
* @param {String} serviceName - The fully-qualified name of this
|
||||
* service, used as a key into the client config file (in the
|
||||
* example above, this value should be 'google.fake.v1.ServiceName').
|
||||
* @param {Object} clientConfig - A dictionary parsed from the
|
||||
* standard API client config file.
|
||||
* @param {Object} configOverrides - A dictionary in the same structure of
|
||||
* client_config to override the settings.
|
||||
* @param {Object.<string, string[]>} retryNames - A dictionary mapping the strings
|
||||
* referring to response status codes to objects representing
|
||||
* those codes.
|
||||
* @param {Object} otherArgs - the non-request arguments to be passed to the API
|
||||
* calls.
|
||||
* @param {Function=} promise - A constructor for a promise that implements the
|
||||
* ES6 specification of promise. If not provided, native promises will be used.
|
||||
* @return {Object} A mapping from method name to CallSettings, or null if the
|
||||
* service is not found in the config.
|
||||
*/
|
||||
function constructSettings(serviceName, clientConfig, configOverrides, retryNames, otherArgs, promise) {
|
||||
otherArgs = otherArgs || {};
|
||||
// tslint:disable-next-line no-any
|
||||
const defaults = {};
|
||||
const serviceConfig = (clientConfig.interfaces || {})[serviceName];
|
||||
if (!serviceConfig) {
|
||||
return null;
|
||||
}
|
||||
const overrides = (configOverrides.interfaces || {})[serviceName] || {};
|
||||
const methods = serviceConfig.methods;
|
||||
const overridingMethods = overrides.methods || {};
|
||||
// tslint:disable-next-line forin
|
||||
for (const methodName in methods) {
|
||||
const methodConfig = methods[methodName];
|
||||
const jsName = methodName[0].toLowerCase() + methodName.slice(1);
|
||||
let retry = constructRetry(methodConfig, serviceConfig.retry_codes, serviceConfig.retry_params, retryNames);
|
||||
let bundlingConfig = methodConfig.bundling;
|
||||
let timeout = methodConfig.timeout_millis;
|
||||
if (methodName in overridingMethods) {
|
||||
const overridingMethod = overridingMethods[methodName];
|
||||
if (overridingMethod) {
|
||||
if ('bundling' in overridingMethod) {
|
||||
bundlingConfig = overridingMethod.bundling;
|
||||
}
|
||||
if ('timeout_millis' in overridingMethod) {
|
||||
timeout = overridingMethod.timeout_millis;
|
||||
}
|
||||
}
|
||||
retry = mergeRetryOptions(retry, constructRetry(overridingMethod, overrides.retry_codes, overrides.retry_params, retryNames));
|
||||
}
|
||||
defaults[jsName] = new CallSettings({
|
||||
timeout,
|
||||
retry,
|
||||
bundleOptions: bundlingConfig ? createBundleOptions(bundlingConfig) :
|
||||
null,
|
||||
otherArgs,
|
||||
promise: promise || Promise,
|
||||
});
|
||||
}
|
||||
return defaults;
|
||||
}
|
||||
exports.constructSettings = constructSettings;
|
||||
//# sourceMappingURL=gax.js.map
|
||||
1
express-server/node_modules/google-gax/build/src/gax.js.map
generated
vendored
Normal file
1
express-server/node_modules/google-gax/build/src/gax.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
133
express-server/node_modules/google-gax/build/src/grpc.d.ts
generated
vendored
Normal file
133
express-server/node_modules/google-gax/build/src/grpc.d.ts
generated
vendored
Normal file
@@ -0,0 +1,133 @@
|
||||
/// <reference types="node" />
|
||||
import * as grpcProtoLoaderTypes from '@grpc/proto-loader';
|
||||
import { GoogleAuth, GoogleAuthOptions } from 'google-auth-library';
|
||||
import * as grpcTypes from 'grpc';
|
||||
import { OutgoingHttpHeaders } from 'http';
|
||||
import * as protobuf from 'protobufjs';
|
||||
import * as gax from './gax';
|
||||
export { GrpcObject } from 'grpc';
|
||||
export interface GrpcClientOptions extends GoogleAuthOptions {
|
||||
auth?: GoogleAuth;
|
||||
promise?: PromiseConstructor;
|
||||
grpc?: GrpcModule;
|
||||
}
|
||||
export interface MetadataValue {
|
||||
equals: Function;
|
||||
}
|
||||
export interface Metadata {
|
||||
new (): Metadata;
|
||||
set: (key: {}, value?: {} | null) => void;
|
||||
clone: () => Metadata;
|
||||
value: MetadataValue;
|
||||
get: (key: {}) => {};
|
||||
}
|
||||
export declare type GrpcModule = typeof grpcTypes & {
|
||||
status: {
|
||||
[index: string]: number;
|
||||
};
|
||||
};
|
||||
export interface ClientStubOptions {
|
||||
servicePath: string;
|
||||
port: number;
|
||||
sslCreds: grpcTypes.ChannelCredentials;
|
||||
}
|
||||
export declare class ClientStub extends grpcTypes.Client {
|
||||
[name: string]: Function;
|
||||
}
|
||||
export declare class GrpcClient {
|
||||
auth: GoogleAuth;
|
||||
promise: PromiseConstructor;
|
||||
grpc: GrpcModule;
|
||||
grpcVersion: string;
|
||||
grpcProtoLoader: typeof grpcProtoLoaderTypes;
|
||||
/**
|
||||
* A class which keeps the context of gRPC and auth for the gRPC.
|
||||
*
|
||||
* @param {Object=} options - The optional parameters. It will be directly
|
||||
* passed to google-auth-library library, so parameters like keyFile or
|
||||
* credentials will be valid.
|
||||
* @param {Object=} options.auth - An instance of google-auth-library.
|
||||
* When specified, this auth instance will be used instead of creating
|
||||
* a new one.
|
||||
* @param {Object=} options.grpc - When specified, this will be used
|
||||
* for the 'grpc' module in this context. By default, it will load the grpc
|
||||
* module in the standard way.
|
||||
* @param {Function=} options.promise - A constructor for a promise that
|
||||
* implements the ES6 specification of promise. If not provided, native
|
||||
* promises will be used.
|
||||
* @constructor
|
||||
*/
|
||||
constructor(options?: GrpcClientOptions);
|
||||
/**
|
||||
* Creates a gRPC credentials. It asks the auth data if necessary.
|
||||
* @private
|
||||
* @param {Object} opts - options values for configuring credentials.
|
||||
* @param {Object=} opts.sslCreds - when specified, this is used instead
|
||||
* of default channel credentials.
|
||||
* @return {Promise} The promise which will be resolved to the gRPC credential.
|
||||
*/
|
||||
_getCredentials(opts: ClientStubOptions): Promise<grpcTypes.ChannelCredentials>;
|
||||
/**
|
||||
* Loads the gRPC service from the proto file at the given path and with the
|
||||
* given options.
|
||||
* @param filename The path to the proto file.
|
||||
* @param options Options for loading the proto file.
|
||||
*/
|
||||
loadFromProto(filename: string, options: grpcProtoLoaderTypes.Options): grpcTypes.GrpcObject;
|
||||
/**
|
||||
* Load grpc proto service from a filename hooking in googleapis common protos
|
||||
* when necessary.
|
||||
* @param {String} protoPath - The directory to search for the protofile.
|
||||
* @param {String} filename - The filename of the proto to be loaded.
|
||||
* @return {Object<string, *>} The gRPC loaded result (the toplevel namespace
|
||||
* object).
|
||||
*/
|
||||
loadProto(protoPath: string, filename: string): grpcTypes.GrpcObject;
|
||||
static _resolveFile(protoPath: string, filename: string): string;
|
||||
metadataBuilder(headers: OutgoingHttpHeaders): (abTests?: {} | undefined, moreHeaders?: OutgoingHttpHeaders | undefined) => grpcTypes.Metadata;
|
||||
/**
|
||||
* A wrapper of {@link constructSettings} function under the gRPC context.
|
||||
*
|
||||
* Most of parameters are common among constructSettings, please take a look.
|
||||
* @param {string} serviceName - The fullly-qualified name of the service.
|
||||
* @param {Object} clientConfig - A dictionary of the client config.
|
||||
* @param {Object} configOverrides - A dictionary of overriding configs.
|
||||
* @param {Object} headers - A dictionary of additional HTTP header name to
|
||||
* its value.
|
||||
* @return {Object} A mapping of method names to CallSettings.
|
||||
*/
|
||||
constructSettings(serviceName: string, clientConfig: gax.ClientConfig, configOverrides: gax.ClientConfig, headers: OutgoingHttpHeaders): any;
|
||||
/**
|
||||
* Creates a gRPC stub with current gRPC and auth.
|
||||
* @param {function} CreateStub - The constructor function of the stub.
|
||||
* @param {Object} options - The optional arguments to customize
|
||||
* gRPC connection. This options will be passed to the constructor of
|
||||
* gRPC client too.
|
||||
* @param {string} options.servicePath - The name of the server of the service.
|
||||
* @param {number} options.port - The port of the service.
|
||||
* @param {grpcTypes.ClientCredentials=} options.sslCreds - The credentials to be used
|
||||
* to set up gRPC connection.
|
||||
* @return {Promise} A promse which resolves to a gRPC stub instance.
|
||||
*/
|
||||
createStub(CreateStub: typeof ClientStub, options: ClientStubOptions): Promise<ClientStub>;
|
||||
/**
|
||||
* Creates a 'bytelength' function for a given proto message class.
|
||||
*
|
||||
* See {@link BundleDescriptor} about the meaning of the return value.
|
||||
*
|
||||
* @param {function} message - a constructor function that is generated by
|
||||
* protobuf.js. Assumes 'encoder' field in the message.
|
||||
* @return {function(Object):number} - a function to compute the byte length
|
||||
* for an object.
|
||||
*/
|
||||
static createByteLengthFunction(message: {
|
||||
encode: (obj: {}) => {
|
||||
finish: () => Array<{}>;
|
||||
};
|
||||
}): (obj: {}) => number;
|
||||
}
|
||||
export declare class GoogleProtoFilesRoot extends protobuf.Root {
|
||||
constructor(...args: Array<{}>);
|
||||
resolvePath(originPath: string, includePath: string): string;
|
||||
static _findIncludePath(originPath: string, includePath: string): string;
|
||||
}
|
||||
308
express-server/node_modules/google-gax/build/src/grpc.js
generated
vendored
Normal file
308
express-server/node_modules/google-gax/build/src/grpc.js
generated
vendored
Normal file
@@ -0,0 +1,308 @@
|
||||
"use strict";
|
||||
/*
|
||||
* Copyright 2016, Google Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without
|
||||
* modification, are permitted provided that the following conditions are
|
||||
* met:
|
||||
*
|
||||
* * Redistributions of source code must retain the above copyright
|
||||
* notice, this list of conditions and the following disclaimer.
|
||||
* * Redistributions in binary form must reproduce the above
|
||||
* copyright notice, this list of conditions and the following disclaimer
|
||||
* in the documentation and/or other materials provided with the
|
||||
* distribution.
|
||||
* * Neither the name of Google Inc. nor the names of its
|
||||
* contributors may be used to endorse or promote products derived from
|
||||
* this software without specific prior written permission.
|
||||
*
|
||||
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*
|
||||
*/
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : new P(function (resolve) { resolve(result.value); }).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const grpcProtoLoaderTypes = require("@grpc/proto-loader"); // for types only
|
||||
const fs = require("fs");
|
||||
const google_auth_library_1 = require("google-auth-library");
|
||||
const google_proto_files_1 = require("google-proto-files");
|
||||
const grpcTypes = require("grpc"); // for types only
|
||||
const path = require("path");
|
||||
const protobuf = require("protobufjs");
|
||||
const semver = require("semver");
|
||||
const walk = require("walkdir");
|
||||
const gax = require("./gax");
|
||||
const googleProtoFilesDir = path.normalize(google_proto_files_1.getProtoPath('..'));
|
||||
// INCLUDE_DIRS is passed to @grpc/proto-loader
|
||||
const INCLUDE_DIRS = [];
|
||||
INCLUDE_DIRS.push(googleProtoFilesDir);
|
||||
// COMMON_PROTO_FILES logic is here for protobufjs loads (see
|
||||
// GoogleProtoFilesRoot below)
|
||||
const COMMON_PROTO_DIRS = [
|
||||
// This list of directories is defined here:
|
||||
// https://github.com/googleapis/googleapis/blob/master/gapic/packaging/common_protos.yaml
|
||||
'api',
|
||||
path.join('iam', 'v1'),
|
||||
path.join('logging', 'type'),
|
||||
'longrunning',
|
||||
'protobuf',
|
||||
'rpc',
|
||||
'type',
|
||||
].map(dir => path.join(googleProtoFilesDir, 'google', dir));
|
||||
const COMMON_PROTO_FILES = COMMON_PROTO_DIRS
|
||||
.map(dir => {
|
||||
return walk.sync(dir)
|
||||
.filter(f => path.extname(f) === '.proto')
|
||||
.map(f => path.normalize(f).substring(googleProtoFilesDir.length + 1));
|
||||
})
|
||||
.reduce((a, c) => a.concat(c), []);
|
||||
class ClientStub extends grpcTypes.Client {
|
||||
}
|
||||
exports.ClientStub = ClientStub;
|
||||
class GrpcClient {
|
||||
/**
|
||||
* A class which keeps the context of gRPC and auth for the gRPC.
|
||||
*
|
||||
* @param {Object=} options - The optional parameters. It will be directly
|
||||
* passed to google-auth-library library, so parameters like keyFile or
|
||||
* credentials will be valid.
|
||||
* @param {Object=} options.auth - An instance of google-auth-library.
|
||||
* When specified, this auth instance will be used instead of creating
|
||||
* a new one.
|
||||
* @param {Object=} options.grpc - When specified, this will be used
|
||||
* for the 'grpc' module in this context. By default, it will load the grpc
|
||||
* module in the standard way.
|
||||
* @param {Function=} options.promise - A constructor for a promise that
|
||||
* implements the ES6 specification of promise. If not provided, native
|
||||
* promises will be used.
|
||||
* @constructor
|
||||
*/
|
||||
constructor(options = {}) {
|
||||
this.auth = options.auth || new google_auth_library_1.GoogleAuth(options);
|
||||
this.promise = options.promise || Promise;
|
||||
if ('grpc' in options) {
|
||||
this.grpc = options.grpc;
|
||||
this.grpcVersion = '';
|
||||
}
|
||||
else {
|
||||
// EXPERIMENTAL: If GOOGLE_CLOUD_USE_GRPC_JS is set, use the JS-based
|
||||
// implementation of the gRPC client instead. Requires http2 (Node 8+).
|
||||
if (semver.satisfies(process.version, '8.x') &&
|
||||
process.env.GOOGLE_CLOUD_USE_GRPC_JS) {
|
||||
this.grpc = require('@grpc/grpc-js');
|
||||
this.grpcVersion = require('@grpc/grpc-js/package.json').version;
|
||||
}
|
||||
else {
|
||||
this.grpc = require('grpc');
|
||||
this.grpcVersion = require('grpc/package.json').version;
|
||||
}
|
||||
}
|
||||
this.grpcProtoLoader = require('@grpc/proto-loader');
|
||||
}
|
||||
/**
|
||||
* Creates a gRPC credentials. It asks the auth data if necessary.
|
||||
* @private
|
||||
* @param {Object} opts - options values for configuring credentials.
|
||||
* @param {Object=} opts.sslCreds - when specified, this is used instead
|
||||
* of default channel credentials.
|
||||
* @return {Promise} The promise which will be resolved to the gRPC credential.
|
||||
*/
|
||||
_getCredentials(opts) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
if (opts.sslCreds) {
|
||||
return opts.sslCreds;
|
||||
}
|
||||
const grpc = this.grpc;
|
||||
const sslCreds = grpc.credentials.createSsl();
|
||||
const client = yield this.auth.getClient();
|
||||
const credentials = grpc.credentials.combineChannelCredentials(sslCreds, grpc.credentials.createFromGoogleCredential(client));
|
||||
return credentials;
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Loads the gRPC service from the proto file at the given path and with the
|
||||
* given options.
|
||||
* @param filename The path to the proto file.
|
||||
* @param options Options for loading the proto file.
|
||||
*/
|
||||
loadFromProto(filename, options) {
|
||||
const packageDef = grpcProtoLoaderTypes.loadSync(filename, options);
|
||||
return this.grpc.loadPackageDefinition(packageDef);
|
||||
}
|
||||
/**
|
||||
* Load grpc proto service from a filename hooking in googleapis common protos
|
||||
* when necessary.
|
||||
* @param {String} protoPath - The directory to search for the protofile.
|
||||
* @param {String} filename - The filename of the proto to be loaded.
|
||||
* @return {Object<string, *>} The gRPC loaded result (the toplevel namespace
|
||||
* object).
|
||||
*/
|
||||
loadProto(protoPath, filename) {
|
||||
const resolvedPath = GrpcClient._resolveFile(protoPath, filename);
|
||||
const retval = this.grpc.loadObject(protobuf.loadSync(resolvedPath, new GoogleProtoFilesRoot()));
|
||||
return retval;
|
||||
}
|
||||
static _resolveFile(protoPath, filename) {
|
||||
if (fs.existsSync(path.join(protoPath, filename))) {
|
||||
return path.join(protoPath, filename);
|
||||
}
|
||||
else if (COMMON_PROTO_FILES.indexOf(filename) > -1) {
|
||||
return path.join(googleProtoFilesDir, filename);
|
||||
}
|
||||
throw new Error(filename + ' could not be found in ' + protoPath);
|
||||
}
|
||||
metadataBuilder(headers) {
|
||||
const Metadata = this.grpc.Metadata;
|
||||
const baseMetadata = new Metadata();
|
||||
// tslint:disable-next-line forin
|
||||
for (const key in headers) {
|
||||
const value = headers[key];
|
||||
if (Array.isArray(value)) {
|
||||
value.forEach(v => baseMetadata.add(key, v));
|
||||
}
|
||||
else {
|
||||
baseMetadata.set(key, `${value}`);
|
||||
}
|
||||
}
|
||||
return function buildMetadata(abTests, moreHeaders) {
|
||||
// TODO: bring the A/B testing info into the metadata.
|
||||
let copied = false;
|
||||
let metadata = baseMetadata;
|
||||
if (moreHeaders) {
|
||||
for (const key in moreHeaders) {
|
||||
if (key.toLowerCase() !== 'x-goog-api-client' &&
|
||||
moreHeaders.hasOwnProperty(key)) {
|
||||
if (!copied) {
|
||||
copied = true;
|
||||
metadata = metadata.clone();
|
||||
}
|
||||
const value = moreHeaders[key];
|
||||
if (Array.isArray(value)) {
|
||||
value.forEach(v => metadata.add(key, v));
|
||||
}
|
||||
else {
|
||||
metadata.set(key, `${value}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return metadata;
|
||||
};
|
||||
}
|
||||
/**
|
||||
* A wrapper of {@link constructSettings} function under the gRPC context.
|
||||
*
|
||||
* Most of parameters are common among constructSettings, please take a look.
|
||||
* @param {string} serviceName - The fullly-qualified name of the service.
|
||||
* @param {Object} clientConfig - A dictionary of the client config.
|
||||
* @param {Object} configOverrides - A dictionary of overriding configs.
|
||||
* @param {Object} headers - A dictionary of additional HTTP header name to
|
||||
* its value.
|
||||
* @return {Object} A mapping of method names to CallSettings.
|
||||
*/
|
||||
constructSettings(serviceName, clientConfig, configOverrides, headers) {
|
||||
return gax.constructSettings(serviceName, clientConfig, configOverrides, this.grpc.status, { metadataBuilder: this.metadataBuilder(headers) }, this.promise);
|
||||
}
|
||||
/**
|
||||
* Creates a gRPC stub with current gRPC and auth.
|
||||
* @param {function} CreateStub - The constructor function of the stub.
|
||||
* @param {Object} options - The optional arguments to customize
|
||||
* gRPC connection. This options will be passed to the constructor of
|
||||
* gRPC client too.
|
||||
* @param {string} options.servicePath - The name of the server of the service.
|
||||
* @param {number} options.port - The port of the service.
|
||||
* @param {grpcTypes.ClientCredentials=} options.sslCreds - The credentials to be used
|
||||
* to set up gRPC connection.
|
||||
* @return {Promise} A promse which resolves to a gRPC stub instance.
|
||||
*/
|
||||
// tslint:disable-next-line variable-name
|
||||
createStub(CreateStub, options) {
|
||||
const serviceAddress = options.servicePath + ':' + options.port;
|
||||
return this._getCredentials(options).then(credentials => {
|
||||
const grpcOptions = {};
|
||||
Object.keys(options).forEach(key => {
|
||||
if (key.indexOf('grpc.') === 0) {
|
||||
grpcOptions[key] = options[key];
|
||||
}
|
||||
else if (key.indexOf('grpc_gcp.') === 0) {
|
||||
// This prefix is used to pass additional arguments that aren't
|
||||
// options for grpc. Strip the prefix before passing.
|
||||
const prefixLength = 'grpc_gcp.'.length;
|
||||
grpcOptions[key.substr(prefixLength)] = options[key];
|
||||
}
|
||||
});
|
||||
return new CreateStub(serviceAddress, credentials, grpcOptions);
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Creates a 'bytelength' function for a given proto message class.
|
||||
*
|
||||
* See {@link BundleDescriptor} about the meaning of the return value.
|
||||
*
|
||||
* @param {function} message - a constructor function that is generated by
|
||||
* protobuf.js. Assumes 'encoder' field in the message.
|
||||
* @return {function(Object):number} - a function to compute the byte length
|
||||
* for an object.
|
||||
*/
|
||||
static createByteLengthFunction(message) {
|
||||
return function getByteLength(obj) {
|
||||
return message.encode(obj).finish().length;
|
||||
};
|
||||
}
|
||||
}
|
||||
exports.GrpcClient = GrpcClient;
|
||||
class GoogleProtoFilesRoot extends protobuf.Root {
|
||||
constructor(...args) {
|
||||
super(...args);
|
||||
}
|
||||
// Causes the loading of an included proto to check if it is a common
|
||||
// proto. If it is a common proto, use the google-proto-files proto.
|
||||
resolvePath(originPath, includePath) {
|
||||
originPath = path.normalize(originPath);
|
||||
includePath = path.normalize(includePath);
|
||||
// Fully qualified paths don't need to be resolved.
|
||||
if (path.isAbsolute(includePath)) {
|
||||
if (!fs.existsSync(includePath)) {
|
||||
throw new Error('The include `' + includePath + '` was not found.');
|
||||
}
|
||||
return includePath;
|
||||
}
|
||||
if (COMMON_PROTO_FILES.indexOf(includePath) > -1) {
|
||||
return path.join(googleProtoFilesDir, includePath);
|
||||
}
|
||||
return GoogleProtoFilesRoot._findIncludePath(originPath, includePath);
|
||||
}
|
||||
static _findIncludePath(originPath, includePath) {
|
||||
originPath = path.normalize(originPath);
|
||||
includePath = path.normalize(includePath);
|
||||
let current = originPath;
|
||||
let found = fs.existsSync(path.join(current, includePath));
|
||||
while (!found && current.length > 0) {
|
||||
current = current.substring(0, current.lastIndexOf(path.sep));
|
||||
found = fs.existsSync(path.join(current, includePath));
|
||||
}
|
||||
if (!found) {
|
||||
throw new Error('The include `' + includePath + '` was not found.');
|
||||
}
|
||||
return path.join(current, includePath);
|
||||
}
|
||||
}
|
||||
exports.GoogleProtoFilesRoot = GoogleProtoFilesRoot;
|
||||
//# sourceMappingURL=grpc.js.map
|
||||
1
express-server/node_modules/google-gax/build/src/grpc.js.map
generated
vendored
Normal file
1
express-server/node_modules/google-gax/build/src/grpc.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
52
express-server/node_modules/google-gax/build/src/index.d.ts
generated
vendored
Normal file
52
express-server/node_modules/google-gax/build/src/index.d.ts
generated
vendored
Normal file
@@ -0,0 +1,52 @@
|
||||
/**
|
||||
* Copyright 2016, Google Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without
|
||||
* modification, are permitted provided that the following conditions are
|
||||
* met:
|
||||
*
|
||||
* * Redistributions of source code must retain the above copyright
|
||||
* notice, this list of conditions and the following disclaimer.
|
||||
* Redistributions in binary form must reproduce the above
|
||||
* copyright notice, this list of conditions and the following disclaimer
|
||||
* in the documentation and/or other materials provided with the
|
||||
* distribution.
|
||||
* * Neither the name of Google Inc. nor the names of its
|
||||
* contributors may be used to endorse or promote products derived from
|
||||
* this software without specific prior written permission.
|
||||
*
|
||||
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
import { GrpcClient, GrpcClientOptions } from './grpc';
|
||||
import * as operationsClient from './operations_client';
|
||||
import * as routingHeader from './routing_header';
|
||||
export { GoogleAuth, GoogleAuthOptions } from 'google-auth-library';
|
||||
export { createApiCall } from './api_callable';
|
||||
export { BundleDescriptor, BundleExecutor } from './bundling';
|
||||
export { CallOptions, ClientConfig, constructSettings } from './gax';
|
||||
export { GoogleError } from './GoogleError';
|
||||
export { ClientStub, ClientStubOptions, GoogleProtoFilesRoot, GrpcClient, GrpcClientOptions, GrpcModule, GrpcObject, Metadata, MetadataValue } from './grpc';
|
||||
export { LongrunningDescriptor, operation } from './longrunning';
|
||||
export { PageDescriptor } from './paged_iteration';
|
||||
export { PathTemplate } from './path_template';
|
||||
export { StreamDescriptor, StreamType } from './streaming';
|
||||
export { routingHeader };
|
||||
declare function lro(options: GrpcClientOptions): operationsClient.OperationsClientBuilder;
|
||||
declare namespace lro {
|
||||
var SERVICE_ADDRESS: string;
|
||||
var ALL_SCOPES: string[];
|
||||
}
|
||||
export { lro };
|
||||
export declare const createByteLengthFunction: typeof GrpcClient.createByteLengthFunction;
|
||||
export declare const version: any;
|
||||
72
express-server/node_modules/google-gax/build/src/index.js
generated
vendored
Normal file
72
express-server/node_modules/google-gax/build/src/index.js
generated
vendored
Normal file
@@ -0,0 +1,72 @@
|
||||
"use strict";
|
||||
/**
|
||||
* Copyright 2016, Google Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without
|
||||
* modification, are permitted provided that the following conditions are
|
||||
* met:
|
||||
*
|
||||
* * Redistributions of source code must retain the above copyright
|
||||
* notice, this list of conditions and the following disclaimer.
|
||||
* Redistributions in binary form must reproduce the above
|
||||
* copyright notice, this list of conditions and the following disclaimer
|
||||
* in the documentation and/or other materials provided with the
|
||||
* distribution.
|
||||
* * Neither the name of Google Inc. nor the names of its
|
||||
* contributors may be used to endorse or promote products derived from
|
||||
* this software without specific prior written permission.
|
||||
*
|
||||
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const grpc_1 = require("./grpc");
|
||||
const operationsClient = require("./operations_client");
|
||||
const routingHeader = require("./routing_header");
|
||||
exports.routingHeader = routingHeader;
|
||||
var google_auth_library_1 = require("google-auth-library");
|
||||
exports.GoogleAuth = google_auth_library_1.GoogleAuth;
|
||||
var api_callable_1 = require("./api_callable");
|
||||
exports.createApiCall = api_callable_1.createApiCall;
|
||||
var bundling_1 = require("./bundling");
|
||||
exports.BundleDescriptor = bundling_1.BundleDescriptor;
|
||||
exports.BundleExecutor = bundling_1.BundleExecutor;
|
||||
var gax_1 = require("./gax");
|
||||
exports.constructSettings = gax_1.constructSettings;
|
||||
var GoogleError_1 = require("./GoogleError");
|
||||
exports.GoogleError = GoogleError_1.GoogleError;
|
||||
var grpc_2 = require("./grpc");
|
||||
exports.ClientStub = grpc_2.ClientStub;
|
||||
exports.GoogleProtoFilesRoot = grpc_2.GoogleProtoFilesRoot;
|
||||
exports.GrpcClient = grpc_2.GrpcClient;
|
||||
var longrunning_1 = require("./longrunning");
|
||||
exports.LongrunningDescriptor = longrunning_1.LongrunningDescriptor;
|
||||
exports.operation = longrunning_1.operation;
|
||||
var paged_iteration_1 = require("./paged_iteration");
|
||||
exports.PageDescriptor = paged_iteration_1.PageDescriptor;
|
||||
var path_template_1 = require("./path_template");
|
||||
exports.PathTemplate = path_template_1.PathTemplate;
|
||||
var streaming_1 = require("./streaming");
|
||||
exports.StreamDescriptor = streaming_1.StreamDescriptor;
|
||||
exports.StreamType = streaming_1.StreamType;
|
||||
function lro(options) {
|
||||
options = Object.assign({ scopes: lro.ALL_SCOPES }, options);
|
||||
const gaxGrpc = new grpc_1.GrpcClient(options);
|
||||
return new operationsClient.OperationsClientBuilder(gaxGrpc);
|
||||
}
|
||||
exports.lro = lro;
|
||||
lro.SERVICE_ADDRESS = operationsClient.SERVICE_ADDRESS;
|
||||
lro.ALL_SCOPES = operationsClient.ALL_SCOPES;
|
||||
exports.createByteLengthFunction = grpc_1.GrpcClient.createByteLengthFunction;
|
||||
exports.version = require('../../package.json').version;
|
||||
//# sourceMappingURL=index.js.map
|
||||
1
express-server/node_modules/google-gax/build/src/index.js.map
generated
vendored
Normal file
1
express-server/node_modules/google-gax/build/src/index.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA6BG;;AAEH,iCAAqD;AACrD,wDAAwD;AACxD,kDAAkD;AAY1C,sCAAa;AAVrB,2DAAkE;AAA1D,2CAAA,UAAU,CAAA;AAClB,+CAA6C;AAArC,uCAAA,aAAa,CAAA;AACrB,uCAA4D;AAApD,sCAAA,gBAAgB,CAAA;AAAE,oCAAA,cAAc,CAAA;AACxC,6BAAmE;AAAhC,kCAAA,iBAAiB,CAAA;AACpD,6CAA0C;AAAlC,oCAAA,WAAW,CAAA;AACnB,+BAA2J;AAAnJ,4BAAA,UAAU,CAAA;AAAqB,sCAAA,oBAAoB,CAAA;AAAE,4BAAA,UAAU,CAAA;AACvE,6CAA+D;AAAvD,8CAAA,qBAAqB,CAAA;AAAE,kCAAA,SAAS,CAAA;AACxC,qDAAiD;AAAzC,2CAAA,cAAc,CAAA;AACtB,iDAA6C;AAArC,uCAAA,YAAY,CAAA;AACpB,yCAAyD;AAAjD,uCAAA,gBAAgB,CAAA;AAAE,iCAAA,UAAU,CAAA;AAGpC,SAAS,GAAG,CAAC,OAA0B;IACrC,OAAO,GAAG,MAAM,CAAC,MAAM,CAAC,EAAC,MAAM,EAAE,GAAG,CAAC,UAAU,EAAC,EAAE,OAAO,CAAC,CAAC;IAC3D,MAAM,OAAO,GAAG,IAAI,iBAAU,CAAC,OAAO,CAAC,CAAC;IACxC,OAAO,IAAI,gBAAgB,CAAC,uBAAuB,CAAC,OAAO,CAAC,CAAC;AAC/D,CAAC;AAKO,kBAAG;AAHX,GAAG,CAAC,eAAe,GAAG,gBAAgB,CAAC,eAAe,CAAC;AACvD,GAAG,CAAC,UAAU,GAAG,gBAAgB,CAAC,UAAU,CAAC;AAGhC,QAAA,wBAAwB,GAAG,iBAAU,CAAC,wBAAwB,CAAC;AAC/D,QAAA,OAAO,GAAG,OAAO,CAAC,oBAAoB,CAAC,CAAC,OAAO,CAAC"}
|
||||
163
express-server/node_modules/google-gax/build/src/longrunning.d.ts
generated
vendored
Normal file
163
express-server/node_modules/google-gax/build/src/longrunning.d.ts
generated
vendored
Normal file
@@ -0,0 +1,163 @@
|
||||
/// <reference types="node" />
|
||||
import { EventEmitter } from 'events';
|
||||
import { APICall, APICallback, CancellablePromise, NormalApiCaller, PromiseCanceller } from './api_callable';
|
||||
import { BackoffSettings, CallOptions } from './gax';
|
||||
import { GoogleError } from './GoogleError';
|
||||
import { Metadata } from './grpc';
|
||||
import { OperationsClient } from './operations_client';
|
||||
/**
|
||||
* A callback to upack a google.protobuf.Any message.
|
||||
* @callback anyDecoder
|
||||
* @param {google.protobuf.Any} message - The message to unpacked.
|
||||
* @return {Object} - The unpacked message.
|
||||
*/
|
||||
export interface AnyDecoder {
|
||||
(message: {}): Metadata;
|
||||
}
|
||||
/**
|
||||
* @callback GetOperationCallback
|
||||
* @param {?Error} error
|
||||
* @param {?Object} result
|
||||
* @param {?Object} metadata
|
||||
* @param {?google.longrunning.Operation} rawResponse
|
||||
*/
|
||||
export interface GetOperationCallback {
|
||||
(err?: Error | null, result?: {}, metadata?: {}, rawResponse?: Operation): void;
|
||||
}
|
||||
export declare class LongrunningDescriptor {
|
||||
operationsClient: OperationsClient;
|
||||
responseDecoder: AnyDecoder;
|
||||
metadataDecoder: AnyDecoder;
|
||||
/**
|
||||
* Describes the structure of a page-streaming call.
|
||||
*
|
||||
* @property {OperationsClient} operationsClient
|
||||
* @property {anyDecoder} responseDecoder
|
||||
* @property {anyDecoder} metadataDecoder
|
||||
*
|
||||
* @param {OperationsClient} operationsClient - The client used to poll or
|
||||
* cancel an operation.
|
||||
* @param {anyDecoder=} responseDecoder - The decoder to unpack
|
||||
* the response message.
|
||||
* @param {anyDecoder=} metadataDecoder - The decoder to unpack
|
||||
* the metadata message.
|
||||
*
|
||||
* @constructor
|
||||
*/
|
||||
constructor(operationsClient: OperationsClient, responseDecoder: AnyDecoder, metadataDecoder: AnyDecoder);
|
||||
apiCaller(): LongrunningApiCaller;
|
||||
}
|
||||
export declare class LongrunningApiCaller extends NormalApiCaller {
|
||||
longrunningDescriptor: LongrunningDescriptor;
|
||||
/**
|
||||
* Creates an API caller that performs polling on a long running operation.
|
||||
*
|
||||
* @private
|
||||
* @constructor
|
||||
* @param {LongrunningDescriptor} longrunningDescriptor - Holds the
|
||||
* decoders used for unpacking responses and the operationsClient
|
||||
* used for polling the operation.
|
||||
*/
|
||||
constructor(longrunningDescriptor: LongrunningDescriptor);
|
||||
call(apiCall: APICall, argument: {}, settings: CallOptions, canceller: PromiseCanceller): void;
|
||||
_wrapOperation(apiCall: APICall, settings: CallOptions, argument: {}, callback: APICallback): any;
|
||||
}
|
||||
export declare class Operation extends EventEmitter {
|
||||
completeListeners: number;
|
||||
hasActiveListeners: boolean;
|
||||
latestResponse: Operation;
|
||||
longrunningDescriptor: LongrunningDescriptor;
|
||||
result: {} | null;
|
||||
metadata: Metadata | null;
|
||||
backoffSettings: BackoffSettings;
|
||||
_callOptions?: CallOptions;
|
||||
currentCallPromise_?: CancellablePromise;
|
||||
name?: string;
|
||||
done?: boolean;
|
||||
error?: GoogleError;
|
||||
response?: {
|
||||
value: {};
|
||||
};
|
||||
/**
|
||||
* Wrapper for a google.longrunnung.Operation.
|
||||
*
|
||||
* @constructor
|
||||
*
|
||||
* @param {google.longrunning.Operation} grpcOp - The operation to be wrapped.
|
||||
* @param {LongrunningDescriptor} longrunningDescriptor - This defines the
|
||||
* operations service client and unpacking mechanisms for the operation.
|
||||
* @param {BackoffSettings} backoffSettings - The backoff settings used in
|
||||
* in polling the operation.
|
||||
* @param {CallOptions=} callOptions - CallOptions used in making get operation
|
||||
* requests.
|
||||
*/
|
||||
constructor(grpcOp: Operation, longrunningDescriptor: LongrunningDescriptor, backoffSettings: BackoffSettings, callOptions?: CallOptions);
|
||||
/**
|
||||
* Begin listening for events on the operation. This method keeps track of how
|
||||
* many "complete" listeners are registered and removed, making sure polling
|
||||
* is handled automatically.
|
||||
*
|
||||
* As long as there is one active "complete" listener, the connection is open.
|
||||
* When there are no more listeners, the polling stops.
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
_listenForEvents(): void;
|
||||
/**
|
||||
* Cancels current polling api call and cancels the operation.
|
||||
*
|
||||
* @return {Promise} the promise of the OperationsClient#cancelOperation api
|
||||
* request.
|
||||
*/
|
||||
cancel(): any;
|
||||
/**
|
||||
* Get the updated status of the operation. If the Operation has previously
|
||||
* completed, this will use the status of the cached completed operation.
|
||||
*
|
||||
* - callback(err): Operation failed
|
||||
* - callback(null, result, metadata, rawResponse): Operation complete
|
||||
* - callback(null, null, metadata, rawResponse): Operation incomplete
|
||||
*
|
||||
* @param {getOperationCallback} callback - Callback to handle the polled
|
||||
* operation result and metadata.
|
||||
* @return {Promise|undefined} - This returns a promise if a callback is not specified.
|
||||
* The promise resolves to an array where the first element is the unpacked
|
||||
* result, the second element is the metadata, and the third element is the
|
||||
* raw response of the api call. The promise rejects if the operation returns
|
||||
* an error.
|
||||
*/
|
||||
getOperation(): Promise<{}>;
|
||||
getOperation(callback: GetOperationCallback): void;
|
||||
_unpackResponse(op: Operation, callback?: GetOperationCallback): void;
|
||||
/**
|
||||
* Poll `getOperation` to check the operation's status. This runs a loop to
|
||||
* ping using the backoff strategy specified at initialization.
|
||||
*
|
||||
* Note: This method is automatically called once a "complete" event handler
|
||||
* is registered on the operation.
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
startPolling_(): void;
|
||||
/**
|
||||
* Wraps the `complete` and `error` events in a Promise.
|
||||
*
|
||||
* @return {promise} - Promise that resolves on operation completion and rejects
|
||||
* on operation error.
|
||||
*/
|
||||
promise(): Promise<{}>;
|
||||
}
|
||||
/**
|
||||
* Method used to create Operation objects.
|
||||
*
|
||||
* @constructor
|
||||
*
|
||||
* @param {google.longrunning.Operation} op - The operation to be wrapped.
|
||||
* @param {LongrunningDescriptor} longrunningDescriptor - This defines the
|
||||
* operations service client and unpacking mechanisms for the operation.
|
||||
* @param {BackoffSettings} backoffSettings - The backoff settings used in
|
||||
* in polling the operation.
|
||||
* @param {CallOptions=} callOptions - CallOptions used in making get operation
|
||||
* requests.
|
||||
*/
|
||||
export declare function operation(op: Operation, longrunningDescriptor: LongrunningDescriptor, backoffSettings: BackoffSettings, callOptions?: CallOptions): Operation;
|
||||
325
express-server/node_modules/google-gax/build/src/longrunning.js
generated
vendored
Normal file
325
express-server/node_modules/google-gax/build/src/longrunning.js
generated
vendored
Normal file
@@ -0,0 +1,325 @@
|
||||
"use strict";
|
||||
/*
|
||||
* Copyright 2016, Google Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without
|
||||
* modification, are permitted provided that the following conditions are
|
||||
* met:
|
||||
*
|
||||
* * Redistributions of source code must retain the above copyright
|
||||
* notice, this list of conditions and the following disclaimer.
|
||||
* * Redistributions in binary form must reproduce the above
|
||||
* copyright notice, this list of conditions and the following disclaimer
|
||||
* in the documentation and/or other materials provided with the
|
||||
* distribution.
|
||||
* * Neither the name of Google Inc. nor the names of its
|
||||
* contributors may be used to endorse or promote products derived from
|
||||
* this software without specific prior written permission.
|
||||
*
|
||||
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const events_1 = require("events");
|
||||
const api_callable_1 = require("./api_callable");
|
||||
const gax_1 = require("./gax");
|
||||
const GoogleError_1 = require("./GoogleError");
|
||||
class LongrunningDescriptor {
|
||||
/**
|
||||
* Describes the structure of a page-streaming call.
|
||||
*
|
||||
* @property {OperationsClient} operationsClient
|
||||
* @property {anyDecoder} responseDecoder
|
||||
* @property {anyDecoder} metadataDecoder
|
||||
*
|
||||
* @param {OperationsClient} operationsClient - The client used to poll or
|
||||
* cancel an operation.
|
||||
* @param {anyDecoder=} responseDecoder - The decoder to unpack
|
||||
* the response message.
|
||||
* @param {anyDecoder=} metadataDecoder - The decoder to unpack
|
||||
* the metadata message.
|
||||
*
|
||||
* @constructor
|
||||
*/
|
||||
constructor(operationsClient, responseDecoder, metadataDecoder) {
|
||||
this.operationsClient = operationsClient;
|
||||
this.responseDecoder = responseDecoder;
|
||||
this.metadataDecoder = metadataDecoder;
|
||||
}
|
||||
apiCaller() {
|
||||
return new LongrunningApiCaller(this);
|
||||
}
|
||||
}
|
||||
exports.LongrunningDescriptor = LongrunningDescriptor;
|
||||
class LongrunningApiCaller extends api_callable_1.NormalApiCaller {
|
||||
/**
|
||||
* Creates an API caller that performs polling on a long running operation.
|
||||
*
|
||||
* @private
|
||||
* @constructor
|
||||
* @param {LongrunningDescriptor} longrunningDescriptor - Holds the
|
||||
* decoders used for unpacking responses and the operationsClient
|
||||
* used for polling the operation.
|
||||
*/
|
||||
constructor(longrunningDescriptor) {
|
||||
super();
|
||||
this.longrunningDescriptor = longrunningDescriptor;
|
||||
}
|
||||
call(apiCall, argument, settings, canceller) {
|
||||
canceller.call((argument, callback) => {
|
||||
return this._wrapOperation(apiCall, settings, argument, callback);
|
||||
}, argument);
|
||||
}
|
||||
_wrapOperation(apiCall, settings, argument, callback) {
|
||||
// TODO: this code defies all logic, and just can't be accurate.
|
||||
// tslint:disable-next-line no-any
|
||||
let backoffSettings = settings.longrunning;
|
||||
if (!backoffSettings) {
|
||||
backoffSettings =
|
||||
gax_1.createBackoffSettings(100, 1.3, 60000, null, null, null, null);
|
||||
}
|
||||
const longrunningDescriptor = this.longrunningDescriptor;
|
||||
return apiCall(argument, (err, rawResponse) => {
|
||||
if (err) {
|
||||
callback(err, null, rawResponse);
|
||||
return;
|
||||
}
|
||||
const operation = new Operation(rawResponse, longrunningDescriptor, backoffSettings, settings);
|
||||
callback(null, operation, rawResponse);
|
||||
});
|
||||
}
|
||||
}
|
||||
exports.LongrunningApiCaller = LongrunningApiCaller;
|
||||
class Operation extends events_1.EventEmitter {
|
||||
/**
|
||||
* Wrapper for a google.longrunnung.Operation.
|
||||
*
|
||||
* @constructor
|
||||
*
|
||||
* @param {google.longrunning.Operation} grpcOp - The operation to be wrapped.
|
||||
* @param {LongrunningDescriptor} longrunningDescriptor - This defines the
|
||||
* operations service client and unpacking mechanisms for the operation.
|
||||
* @param {BackoffSettings} backoffSettings - The backoff settings used in
|
||||
* in polling the operation.
|
||||
* @param {CallOptions=} callOptions - CallOptions used in making get operation
|
||||
* requests.
|
||||
*/
|
||||
constructor(grpcOp, longrunningDescriptor, backoffSettings, callOptions) {
|
||||
super();
|
||||
this.completeListeners = 0;
|
||||
this.hasActiveListeners = false;
|
||||
this.latestResponse = grpcOp;
|
||||
this.longrunningDescriptor = longrunningDescriptor;
|
||||
this.result = null;
|
||||
this.metadata = null;
|
||||
this.backoffSettings = backoffSettings;
|
||||
this._unpackResponse(grpcOp);
|
||||
this._listenForEvents();
|
||||
this._callOptions = callOptions;
|
||||
}
|
||||
/**
|
||||
* Begin listening for events on the operation. This method keeps track of how
|
||||
* many "complete" listeners are registered and removed, making sure polling
|
||||
* is handled automatically.
|
||||
*
|
||||
* As long as there is one active "complete" listener, the connection is open.
|
||||
* When there are no more listeners, the polling stops.
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
_listenForEvents() {
|
||||
this.on('newListener', event => {
|
||||
if (event === 'complete') {
|
||||
this.completeListeners++;
|
||||
if (!this.hasActiveListeners) {
|
||||
this.hasActiveListeners = true;
|
||||
this.startPolling_();
|
||||
}
|
||||
}
|
||||
});
|
||||
this.on('removeListener', event => {
|
||||
if (event === 'complete' && --this.completeListeners === 0) {
|
||||
this.hasActiveListeners = false;
|
||||
}
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Cancels current polling api call and cancels the operation.
|
||||
*
|
||||
* @return {Promise} the promise of the OperationsClient#cancelOperation api
|
||||
* request.
|
||||
*/
|
||||
cancel() {
|
||||
if (this.currentCallPromise_) {
|
||||
this.currentCallPromise_.cancel();
|
||||
}
|
||||
const operationsClient = this.longrunningDescriptor.operationsClient;
|
||||
return operationsClient.cancelOperation({ name: this.latestResponse.name });
|
||||
}
|
||||
getOperation(callback) {
|
||||
const self = this;
|
||||
const operationsClient = this.longrunningDescriptor.operationsClient;
|
||||
function promisifyResponse() {
|
||||
if (!callback) {
|
||||
// tslint:disable-next-line variable-name
|
||||
const PromiseCtor = self._callOptions.promise;
|
||||
return new PromiseCtor((resolve, reject) => {
|
||||
if (self.latestResponse.error) {
|
||||
const error = new GoogleError_1.GoogleError(self.latestResponse.error.message);
|
||||
error.code = self.latestResponse.error.code;
|
||||
reject(error);
|
||||
}
|
||||
else {
|
||||
resolve([self.result, self.metadata, self.latestResponse]);
|
||||
}
|
||||
});
|
||||
}
|
||||
return;
|
||||
}
|
||||
if (this.latestResponse.done) {
|
||||
this._unpackResponse(this.latestResponse, callback);
|
||||
return promisifyResponse();
|
||||
}
|
||||
this.currentCallPromise_ = operationsClient.getOperation({ name: this.latestResponse.name }, this._callOptions);
|
||||
const noCallbackPromise = this.currentCallPromise_.then(responses => {
|
||||
self.latestResponse = responses[0];
|
||||
self._unpackResponse(responses[0], callback);
|
||||
return promisifyResponse();
|
||||
});
|
||||
if (!callback) {
|
||||
return noCallbackPromise;
|
||||
}
|
||||
}
|
||||
_unpackResponse(op, callback) {
|
||||
const responseDecoder = this.longrunningDescriptor.responseDecoder;
|
||||
const metadataDecoder = this.longrunningDescriptor.metadataDecoder;
|
||||
let response;
|
||||
let metadata;
|
||||
if (op.done) {
|
||||
if (op.result === 'error') {
|
||||
const error = new GoogleError_1.GoogleError(op.error.message);
|
||||
error.code = op.error.code;
|
||||
if (callback) {
|
||||
callback(error);
|
||||
}
|
||||
return;
|
||||
}
|
||||
if (responseDecoder && op.response) {
|
||||
response = responseDecoder(op.response.value);
|
||||
this.result = response;
|
||||
}
|
||||
}
|
||||
if (metadataDecoder && op.metadata) {
|
||||
metadata = metadataDecoder(op.metadata.value);
|
||||
this.metadata = metadata;
|
||||
}
|
||||
if (callback) {
|
||||
callback(null, response, metadata, op);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Poll `getOperation` to check the operation's status. This runs a loop to
|
||||
* ping using the backoff strategy specified at initialization.
|
||||
*
|
||||
* Note: This method is automatically called once a "complete" event handler
|
||||
* is registered on the operation.
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
startPolling_() {
|
||||
const self = this;
|
||||
let now = new Date();
|
||||
const delayMult = this.backoffSettings.retryDelayMultiplier;
|
||||
const maxDelay = this.backoffSettings.maxRetryDelayMillis;
|
||||
let delay = this.backoffSettings.initialRetryDelayMillis;
|
||||
let deadline = Infinity;
|
||||
if (this.backoffSettings.totalTimeoutMillis) {
|
||||
deadline = now.getTime() + this.backoffSettings.totalTimeoutMillis;
|
||||
}
|
||||
let previousMetadataBytes;
|
||||
if (this.latestResponse.metadata) {
|
||||
previousMetadataBytes = this.latestResponse.metadata.value;
|
||||
}
|
||||
function emit() {
|
||||
self.emit.apply(self, Array.prototype.slice.call(arguments, 0));
|
||||
}
|
||||
function retry() {
|
||||
if (!self.hasActiveListeners) {
|
||||
return;
|
||||
}
|
||||
if (now.getTime() >= deadline) {
|
||||
setImmediate(emit, 'error', new Error('Total timeout exceeded before ' +
|
||||
'any response was received'));
|
||||
return;
|
||||
}
|
||||
self.getOperation((err, result, metadata, rawResponse) => {
|
||||
if (err) {
|
||||
setImmediate(emit, 'error', err);
|
||||
return;
|
||||
}
|
||||
if (!result) {
|
||||
if (rawResponse.metadata &&
|
||||
(!previousMetadataBytes ||
|
||||
!rawResponse.metadata.value.equals(previousMetadataBytes))) {
|
||||
setImmediate(emit, 'progress', metadata, rawResponse);
|
||||
previousMetadataBytes = rawResponse.metadata.value;
|
||||
}
|
||||
setTimeout(() => {
|
||||
now = new Date();
|
||||
delay = Math.min(delay * delayMult, maxDelay);
|
||||
retry();
|
||||
}, delay);
|
||||
return;
|
||||
}
|
||||
setImmediate(emit, 'complete', result, metadata, rawResponse);
|
||||
});
|
||||
}
|
||||
retry();
|
||||
}
|
||||
/**
|
||||
* Wraps the `complete` and `error` events in a Promise.
|
||||
*
|
||||
* @return {promise} - Promise that resolves on operation completion and rejects
|
||||
* on operation error.
|
||||
*/
|
||||
promise() {
|
||||
const self = this;
|
||||
// tslint:disable-next-line variable-name
|
||||
const PromiseCtor = this._callOptions.promise;
|
||||
return new PromiseCtor((resolve, reject) => {
|
||||
self.on('error', reject)
|
||||
.on('complete', (result, metadata, rawResponse) => {
|
||||
resolve([result, metadata, rawResponse]);
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
exports.Operation = Operation;
|
||||
/**
|
||||
* Method used to create Operation objects.
|
||||
*
|
||||
* @constructor
|
||||
*
|
||||
* @param {google.longrunning.Operation} op - The operation to be wrapped.
|
||||
* @param {LongrunningDescriptor} longrunningDescriptor - This defines the
|
||||
* operations service client and unpacking mechanisms for the operation.
|
||||
* @param {BackoffSettings} backoffSettings - The backoff settings used in
|
||||
* in polling the operation.
|
||||
* @param {CallOptions=} callOptions - CallOptions used in making get operation
|
||||
* requests.
|
||||
*/
|
||||
function operation(op, longrunningDescriptor, backoffSettings, callOptions) {
|
||||
return new Operation(op, longrunningDescriptor, backoffSettings, callOptions);
|
||||
}
|
||||
exports.operation = operation;
|
||||
//# sourceMappingURL=longrunning.js.map
|
||||
1
express-server/node_modules/google-gax/build/src/longrunning.js.map
generated
vendored
Normal file
1
express-server/node_modules/google-gax/build/src/longrunning.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
289
express-server/node_modules/google-gax/build/src/operations_client.d.ts
generated
vendored
Normal file
289
express-server/node_modules/google-gax/build/src/operations_client.d.ts
generated
vendored
Normal file
@@ -0,0 +1,289 @@
|
||||
export declare const SERVICE_ADDRESS = "longrunning.googleapis.com";
|
||||
/**
|
||||
* The scopes needed to make gRPC calls to all of the methods defined in
|
||||
* this service.
|
||||
*/
|
||||
export declare const ALL_SCOPES: string[];
|
||||
/**
|
||||
* Manages long-running operations with an API service.
|
||||
*
|
||||
* When an API method normally takes long time to complete, it can be designed
|
||||
* to return {@link Operation} to the client, and the client can use this
|
||||
* interface to receive the real response asynchronously by polling the
|
||||
* operation resource, or pass the operation resource to another API (such as
|
||||
* Google Cloud Pub/Sub API) to receive the response. Any API service that
|
||||
* returns long-running operations should implement the `Operations` interface
|
||||
* so developers can have a consistent client experience.
|
||||
*
|
||||
* This will be created through a builder function which can be obtained by the
|
||||
* module. See the following example of how to initialize the module and how to
|
||||
* access to the builder.
|
||||
* @see {@link operationsClient}
|
||||
*
|
||||
* @class
|
||||
*/
|
||||
export declare class OperationsClient {
|
||||
auth: any;
|
||||
constructor(gaxGrpc: any, grpcClients: any, opts: any);
|
||||
/**
|
||||
* Get the project ID used by this class.
|
||||
* @aram {function(Error, string)} callback - the callback to be called with
|
||||
* the current project Id.
|
||||
*/
|
||||
getProjectId(callback: (err: Error | null, projectId?: string) => void): any;
|
||||
/**
|
||||
* Gets the latest state of a long-running operation. Clients can use this
|
||||
* method to poll the operation result at intervals as recommended by the API
|
||||
* service.
|
||||
*
|
||||
* @param {Object} request
|
||||
* The request object that will be sent.
|
||||
* @param {string} request.name
|
||||
* The name of the operation resource.
|
||||
* @param {Object=} options
|
||||
* Optional parameters. You can override the default settings for this call,
|
||||
* e.g, timeout, retries, paginations, etc. See [gax.CallOptions]{@link
|
||||
* https://googleapis.github.io/gax-nodejs/global.html#CallOptions} for the
|
||||
* details.
|
||||
* @param {function(?Error, ?Object)=} callback
|
||||
* The function which will be called with the result of the API call.
|
||||
*
|
||||
* The second parameter to the callback is an object representing
|
||||
* [google.longrunning.Operation]{@link
|
||||
* external:"google.longrunning.Operation"}.
|
||||
* @return {Promise} - The promise which resolves to an array.
|
||||
* The first element of the array is an object representing
|
||||
* [google.longrunning.Operation]{@link
|
||||
* external:"google.longrunning.Operation"}. The promise has a method named
|
||||
* "cancel" which cancels the ongoing API call.
|
||||
*
|
||||
* @example
|
||||
*
|
||||
* var client = longrunning.operationsClient();
|
||||
* var name = '';
|
||||
* client.getOperation({name: name}).then(function(responses) {
|
||||
* var response = responses[0];
|
||||
* // doThingsWith(response)
|
||||
* }).catch(function(err) {
|
||||
* console.error(err);
|
||||
* });
|
||||
*/
|
||||
getOperation(request: {}, options: {}, callback?: any): any;
|
||||
/**
|
||||
* Lists operations that match the specified filter in the request. If the
|
||||
* server doesn't support this method, it returns `UNIMPLEMENTED`.
|
||||
*
|
||||
* NOTE: the `name` binding below allows API services to override the binding
|
||||
* to use different resource name schemes.
|
||||
*
|
||||
* @param {Object} request
|
||||
* The request object that will be sent.
|
||||
* @param {string} request.name
|
||||
* The name of the operation collection.
|
||||
* @param {string} request.filter
|
||||
* The standard list filter.
|
||||
* @param {number=} request.pageSize
|
||||
* The maximum number of resources contained in the underlying API
|
||||
* response. If page streaming is performed per-resource, this
|
||||
* parameter does not affect the return value. If page streaming is
|
||||
* performed per-page, this determines the maximum number of
|
||||
* resources in a page.
|
||||
* @param {Object=} options
|
||||
* Optional parameters. You can override the default settings for this call,
|
||||
* e.g, timeout, retries, paginations, etc. See [gax.CallOptions]{@link
|
||||
* https://googleapis.github.io/gax-nodejs/global.html#CallOptions} for the
|
||||
* details.
|
||||
* @param {function(?Error, ?Array, ?Object, ?Object)=} callback
|
||||
* The function which will be called with the result of the API call.
|
||||
*
|
||||
* The second parameter to the callback is Array of
|
||||
* [google.longrunning.Operation]{@link
|
||||
* external:"google.longrunning.Operation"}.
|
||||
*
|
||||
* When autoPaginate: false is specified through options, it contains the
|
||||
* result in a single response. If the response indicates the next page
|
||||
* exists, the third parameter is set to be used for the next request object.
|
||||
* The fourth parameter keeps the raw response object of an object
|
||||
* representing [google.longrunning.ListOperationsResponse]{@link
|
||||
* external:"google.longrunning.ListOperationsResponse"}.
|
||||
* @return {Promise} - The promise which resolves to an array.
|
||||
* The first element of the array is Array of
|
||||
* [google.longrunning.Operation]{@link
|
||||
* external:"google.longrunning.Operation"}.
|
||||
*
|
||||
* When autoPaginate: false is specified through options, the array has
|
||||
* three elements. The first element is Array of
|
||||
* [google.longrunning.Operation]{@link
|
||||
* external:"google.longrunning.Operation"} in a single response. The second
|
||||
* element is the next request object if the response indicates the next page
|
||||
* exists, or null. The third element is an object representing
|
||||
* [google.longrunning.ListOperationsResponse]{@link
|
||||
* external:"google.longrunning.ListOperationsResponse"}.
|
||||
*
|
||||
* The promise has a method named "cancel" which cancels the ongoing API
|
||||
* call.
|
||||
*
|
||||
* @example
|
||||
*
|
||||
* var client = longrunning.operationsClient();
|
||||
* var name = '';
|
||||
* var filter = '';
|
||||
* var request = {
|
||||
* name: name,
|
||||
* filter: filter
|
||||
* };
|
||||
* // Iterate over all elements.
|
||||
* client.listOperations(request).then(function(responses) {
|
||||
* var resources = responses[0];
|
||||
* for (var i = 0; i < resources.length; ++i) {
|
||||
* // doThingsWith(resources[i])
|
||||
* }
|
||||
* }).catch(function(err) {
|
||||
* console.error(err);
|
||||
* });
|
||||
*
|
||||
* // Or obtain the paged response.
|
||||
* var options = {autoPaginate: false};
|
||||
* function callback(responses) {
|
||||
* // The actual resources in a response.
|
||||
* var resources = responses[0];
|
||||
* // The next request if the response shows there's more responses.
|
||||
* var nextRequest = responses[1];
|
||||
* // The actual response object, if necessary.
|
||||
* // var rawResponse = responses[2];
|
||||
* for (var i = 0; i < resources.length; ++i) {
|
||||
* // doThingsWith(resources[i]);
|
||||
* }
|
||||
* if (nextRequest) {
|
||||
* // Fetch the next page.
|
||||
* return client.listOperations(nextRequest, options).then(callback);
|
||||
* }
|
||||
* }
|
||||
* client.listOperations(request, options)
|
||||
* .then(callback)
|
||||
* .catch(function(err) {
|
||||
* console.error(err);
|
||||
* });
|
||||
*/
|
||||
listOperations(request: any, options: any, callback: any): any;
|
||||
/**
|
||||
* Equivalent to {@link listOperations}, but returns a NodeJS Stream object.
|
||||
*
|
||||
* This fetches the paged responses for {@link listOperations} continuously
|
||||
* and invokes the callback registered for 'data' event for each element in
|
||||
* the responses.
|
||||
*
|
||||
* The returned object has 'end' method when no more elements are required.
|
||||
*
|
||||
* autoPaginate option will be ignored.
|
||||
*
|
||||
* @see {@link https://nodejs.org/api/stream.html}
|
||||
*
|
||||
* @param {Object} request
|
||||
* The request object that will be sent.
|
||||
* @param {string} request.name
|
||||
* The name of the operation collection.
|
||||
* @param {string} request.filter
|
||||
* The standard list filter.
|
||||
* @param {number=} request.pageSize
|
||||
* The maximum number of resources contained in the underlying API
|
||||
* response. If page streaming is performed per-resource, this
|
||||
* parameter does not affect the return value. If page streaming is
|
||||
* performed per-page, this determines the maximum number of
|
||||
* resources in a page.
|
||||
* @param {Object=} options
|
||||
* Optional parameters. You can override the default settings for this call,
|
||||
* e.g, timeout, retries, paginations, etc. See [gax.CallOptions]{@link
|
||||
* https://googleapis.github.io/gax-nodejs/global.html#CallOptions} for the
|
||||
* details.
|
||||
* @return {Stream}
|
||||
* An object stream which emits an object representing
|
||||
* [google.longrunning.Operation]{@link
|
||||
* external:"google.longrunning.Operation"} on 'data' event.
|
||||
*
|
||||
* @example
|
||||
*
|
||||
* var client = longrunning.operationsClient();
|
||||
* var name = '';
|
||||
* var filter = '';
|
||||
* var request = {
|
||||
* name: name,
|
||||
* filter: filter
|
||||
* };
|
||||
* client.listOperationsStream(request).on('data', function(element) {
|
||||
* // doThingsWith(element)
|
||||
* }).on('error', function(err) {
|
||||
* console.error(err);
|
||||
* });
|
||||
*/
|
||||
listOperationsStream(request: any, options: any): any;
|
||||
/**
|
||||
* Starts asynchronous cancellation on a long-running operation. The server
|
||||
* makes a best effort to cancel the operation, but success is not
|
||||
* guaranteed. If the server doesn't support this method, it returns
|
||||
* `google.rpc.Code.UNIMPLEMENTED`. Clients can use
|
||||
* {@link Operations.GetOperation} or
|
||||
* other methods to check whether the cancellation succeeded or whether the
|
||||
* operation completed despite cancellation. On successful cancellation,
|
||||
* the operation is not deleted; instead, it becomes an operation with
|
||||
* an {@link Operation.error} value with a {@link google.rpc.Status.code} of
|
||||
* 1, corresponding to `Code.CANCELLED`.
|
||||
*
|
||||
* @param {Object} request
|
||||
* The request object that will be sent.
|
||||
* @param {string} request.name
|
||||
* The name of the operation resource to be cancelled.
|
||||
* @param {Object=} options
|
||||
* Optional parameters. You can override the default settings for this call,
|
||||
* e.g, timeout, retries, paginations, etc. See [gax.CallOptions]{@link
|
||||
* https://googleapis.github.io/gax-nodejs/global.html#CallOptions} for the
|
||||
* details.
|
||||
* @param {function(?Error)=} callback
|
||||
* The function which will be called with the result of the API call.
|
||||
* @return {Promise} - The promise which resolves when API call finishes.
|
||||
* The promise has a method named "cancel" which cancels the ongoing API
|
||||
* call.
|
||||
*
|
||||
* @example
|
||||
*
|
||||
* var client = longrunning.operationsClient();
|
||||
* var name = '';
|
||||
* client.cancelOperation({name: name}).catch(function(err) {
|
||||
* console.error(err);
|
||||
* });
|
||||
*/
|
||||
cancelOperation(request: any, options?: any, callback?: any): any;
|
||||
/**
|
||||
* Deletes a long-running operation. This method indicates that the client is
|
||||
* no longer interested in the operation result. It does not cancel the
|
||||
* operation. If the server doesn't support this method, it returns
|
||||
* `google.rpc.Code.UNIMPLEMENTED`.
|
||||
*
|
||||
* @param {Object} request
|
||||
* The request object that will be sent.
|
||||
* @param {string} request.name
|
||||
* The name of the operation resource to be deleted.
|
||||
* @param {Object=} options
|
||||
* Optional parameters. You can override the default settings for this call,
|
||||
* e.g, timeout, retries, paginations, etc. See [gax.CallOptions]{@link
|
||||
* https://googleapis.github.io/gax-nodejs/global.html#CallOptions} for the
|
||||
* details.
|
||||
* @param {function(?Error)=} callback
|
||||
* The function which will be called with the result of the API call.
|
||||
* @return {Promise} - The promise which resolves when API call finishes.
|
||||
* The promise has a method named "cancel" which cancels the ongoing API
|
||||
* call.
|
||||
*
|
||||
* @example
|
||||
*
|
||||
* var client = longrunning.operationsClient();
|
||||
* var name = '';
|
||||
* client.deleteOperation({name: name}).catch(function(err) {
|
||||
* console.error(err);
|
||||
* });
|
||||
*/
|
||||
deleteOperation(request: any, options: any, callback: any): any;
|
||||
}
|
||||
export declare class OperationsClientBuilder {
|
||||
constructor(gaxGrpc: any);
|
||||
}
|
||||
426
express-server/node_modules/google-gax/build/src/operations_client.js
generated
vendored
Normal file
426
express-server/node_modules/google-gax/build/src/operations_client.js
generated
vendored
Normal file
@@ -0,0 +1,426 @@
|
||||
"use strict";
|
||||
/*
|
||||
* Copyright 2016 Google Inc. All rights reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
* EDITING INSTRUCTIONS
|
||||
* This file was generated from the file
|
||||
* https://github.com/googleapis/googleapis/blob/master/google/longrunning/operations.proto,
|
||||
* and updates to that file get reflected here through a refresh process.
|
||||
* For the short term, the refresh process will only be runnable by Google
|
||||
* engineers.
|
||||
*
|
||||
* The only allowed edits are to method and file documentation. A 3-way
|
||||
* merge preserves those additions if the generated source changes.
|
||||
*/
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const google_proto_files_1 = require("google-proto-files");
|
||||
const apiCallable = require("./api_callable");
|
||||
const gax = require("./gax");
|
||||
const pagedIteration = require("./paged_iteration");
|
||||
const pathTemplate = require("./path_template");
|
||||
const configData = require('./operations_client_config');
|
||||
Object.assign(gax, apiCallable);
|
||||
Object.assign(gax, pathTemplate);
|
||||
Object.assign(gax, pagedIteration);
|
||||
exports.SERVICE_ADDRESS = 'longrunning.googleapis.com';
|
||||
const DEFAULT_SERVICE_PORT = 443;
|
||||
const CODE_GEN_NAME_VERSION = 'gapic/0.7.1';
|
||||
const PAGE_DESCRIPTORS = {
|
||||
listOperations: new gax['PageDescriptor']('pageToken', 'nextPageToken', 'operations'),
|
||||
};
|
||||
/**
|
||||
* The scopes needed to make gRPC calls to all of the methods defined in
|
||||
* this service.
|
||||
*/
|
||||
exports.ALL_SCOPES = [];
|
||||
/**
|
||||
* Manages long-running operations with an API service.
|
||||
*
|
||||
* When an API method normally takes long time to complete, it can be designed
|
||||
* to return {@link Operation} to the client, and the client can use this
|
||||
* interface to receive the real response asynchronously by polling the
|
||||
* operation resource, or pass the operation resource to another API (such as
|
||||
* Google Cloud Pub/Sub API) to receive the response. Any API service that
|
||||
* returns long-running operations should implement the `Operations` interface
|
||||
* so developers can have a consistent client experience.
|
||||
*
|
||||
* This will be created through a builder function which can be obtained by the
|
||||
* module. See the following example of how to initialize the module and how to
|
||||
* access to the builder.
|
||||
* @see {@link operationsClient}
|
||||
*
|
||||
* @class
|
||||
*/
|
||||
class OperationsClient {
|
||||
constructor(gaxGrpc, grpcClients, opts) {
|
||||
opts = Object.assign({
|
||||
servicePath: exports.SERVICE_ADDRESS,
|
||||
port: DEFAULT_SERVICE_PORT,
|
||||
clientConfig: {},
|
||||
}, opts);
|
||||
const googleApiClient = ['gl-node/' + process.versions.node];
|
||||
if (opts.libName && opts.libVersion) {
|
||||
googleApiClient.push(opts.libName + '/' + opts.libVersion);
|
||||
}
|
||||
googleApiClient.push(CODE_GEN_NAME_VERSION, 'gax/' + gax['version'], 'grpc/' + gaxGrpc.grpcVersion);
|
||||
const defaults = gaxGrpc.constructSettings('google.longrunning.Operations', configData, opts.clientConfig, { 'x-goog-api-client': googleApiClient.join(' ') });
|
||||
const self = this;
|
||||
this.auth = gaxGrpc.auth;
|
||||
const operationsStub = gaxGrpc.createStub(grpcClients.google.longrunning.Operations, opts);
|
||||
const operationsStubMethods = [
|
||||
'getOperation',
|
||||
'listOperations',
|
||||
'cancelOperation',
|
||||
'deleteOperation',
|
||||
];
|
||||
operationsStubMethods.forEach(methodName => {
|
||||
self['_' + methodName] = gax['createApiCall'](operationsStub.then(operationsStub => {
|
||||
return (...args) => {
|
||||
return operationsStub[methodName].apply(operationsStub, args);
|
||||
};
|
||||
}), defaults[methodName], PAGE_DESCRIPTORS[methodName]);
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Get the project ID used by this class.
|
||||
* @aram {function(Error, string)} callback - the callback to be called with
|
||||
* the current project Id.
|
||||
*/
|
||||
getProjectId(callback) {
|
||||
return this.auth.getProjectId(callback);
|
||||
}
|
||||
// Service calls
|
||||
/**
|
||||
* Gets the latest state of a long-running operation. Clients can use this
|
||||
* method to poll the operation result at intervals as recommended by the API
|
||||
* service.
|
||||
*
|
||||
* @param {Object} request
|
||||
* The request object that will be sent.
|
||||
* @param {string} request.name
|
||||
* The name of the operation resource.
|
||||
* @param {Object=} options
|
||||
* Optional parameters. You can override the default settings for this call,
|
||||
* e.g, timeout, retries, paginations, etc. See [gax.CallOptions]{@link
|
||||
* https://googleapis.github.io/gax-nodejs/global.html#CallOptions} for the
|
||||
* details.
|
||||
* @param {function(?Error, ?Object)=} callback
|
||||
* The function which will be called with the result of the API call.
|
||||
*
|
||||
* The second parameter to the callback is an object representing
|
||||
* [google.longrunning.Operation]{@link
|
||||
* external:"google.longrunning.Operation"}.
|
||||
* @return {Promise} - The promise which resolves to an array.
|
||||
* The first element of the array is an object representing
|
||||
* [google.longrunning.Operation]{@link
|
||||
* external:"google.longrunning.Operation"}. The promise has a method named
|
||||
* "cancel" which cancels the ongoing API call.
|
||||
*
|
||||
* @example
|
||||
*
|
||||
* var client = longrunning.operationsClient();
|
||||
* var name = '';
|
||||
* client.getOperation({name: name}).then(function(responses) {
|
||||
* var response = responses[0];
|
||||
* // doThingsWith(response)
|
||||
* }).catch(function(err) {
|
||||
* console.error(err);
|
||||
* });
|
||||
*/
|
||||
getOperation(request, options, callback) {
|
||||
if (options instanceof Function && callback === undefined) {
|
||||
callback = options;
|
||||
options = {};
|
||||
}
|
||||
if (options === undefined) {
|
||||
options = {};
|
||||
}
|
||||
return this['_getOperation'](request, options, callback);
|
||||
}
|
||||
/**
|
||||
* Lists operations that match the specified filter in the request. If the
|
||||
* server doesn't support this method, it returns `UNIMPLEMENTED`.
|
||||
*
|
||||
* NOTE: the `name` binding below allows API services to override the binding
|
||||
* to use different resource name schemes.
|
||||
*
|
||||
* @param {Object} request
|
||||
* The request object that will be sent.
|
||||
* @param {string} request.name
|
||||
* The name of the operation collection.
|
||||
* @param {string} request.filter
|
||||
* The standard list filter.
|
||||
* @param {number=} request.pageSize
|
||||
* The maximum number of resources contained in the underlying API
|
||||
* response. If page streaming is performed per-resource, this
|
||||
* parameter does not affect the return value. If page streaming is
|
||||
* performed per-page, this determines the maximum number of
|
||||
* resources in a page.
|
||||
* @param {Object=} options
|
||||
* Optional parameters. You can override the default settings for this call,
|
||||
* e.g, timeout, retries, paginations, etc. See [gax.CallOptions]{@link
|
||||
* https://googleapis.github.io/gax-nodejs/global.html#CallOptions} for the
|
||||
* details.
|
||||
* @param {function(?Error, ?Array, ?Object, ?Object)=} callback
|
||||
* The function which will be called with the result of the API call.
|
||||
*
|
||||
* The second parameter to the callback is Array of
|
||||
* [google.longrunning.Operation]{@link
|
||||
* external:"google.longrunning.Operation"}.
|
||||
*
|
||||
* When autoPaginate: false is specified through options, it contains the
|
||||
* result in a single response. If the response indicates the next page
|
||||
* exists, the third parameter is set to be used for the next request object.
|
||||
* The fourth parameter keeps the raw response object of an object
|
||||
* representing [google.longrunning.ListOperationsResponse]{@link
|
||||
* external:"google.longrunning.ListOperationsResponse"}.
|
||||
* @return {Promise} - The promise which resolves to an array.
|
||||
* The first element of the array is Array of
|
||||
* [google.longrunning.Operation]{@link
|
||||
* external:"google.longrunning.Operation"}.
|
||||
*
|
||||
* When autoPaginate: false is specified through options, the array has
|
||||
* three elements. The first element is Array of
|
||||
* [google.longrunning.Operation]{@link
|
||||
* external:"google.longrunning.Operation"} in a single response. The second
|
||||
* element is the next request object if the response indicates the next page
|
||||
* exists, or null. The third element is an object representing
|
||||
* [google.longrunning.ListOperationsResponse]{@link
|
||||
* external:"google.longrunning.ListOperationsResponse"}.
|
||||
*
|
||||
* The promise has a method named "cancel" which cancels the ongoing API
|
||||
* call.
|
||||
*
|
||||
* @example
|
||||
*
|
||||
* var client = longrunning.operationsClient();
|
||||
* var name = '';
|
||||
* var filter = '';
|
||||
* var request = {
|
||||
* name: name,
|
||||
* filter: filter
|
||||
* };
|
||||
* // Iterate over all elements.
|
||||
* client.listOperations(request).then(function(responses) {
|
||||
* var resources = responses[0];
|
||||
* for (var i = 0; i < resources.length; ++i) {
|
||||
* // doThingsWith(resources[i])
|
||||
* }
|
||||
* }).catch(function(err) {
|
||||
* console.error(err);
|
||||
* });
|
||||
*
|
||||
* // Or obtain the paged response.
|
||||
* var options = {autoPaginate: false};
|
||||
* function callback(responses) {
|
||||
* // The actual resources in a response.
|
||||
* var resources = responses[0];
|
||||
* // The next request if the response shows there's more responses.
|
||||
* var nextRequest = responses[1];
|
||||
* // The actual response object, if necessary.
|
||||
* // var rawResponse = responses[2];
|
||||
* for (var i = 0; i < resources.length; ++i) {
|
||||
* // doThingsWith(resources[i]);
|
||||
* }
|
||||
* if (nextRequest) {
|
||||
* // Fetch the next page.
|
||||
* return client.listOperations(nextRequest, options).then(callback);
|
||||
* }
|
||||
* }
|
||||
* client.listOperations(request, options)
|
||||
* .then(callback)
|
||||
* .catch(function(err) {
|
||||
* console.error(err);
|
||||
* });
|
||||
*/
|
||||
listOperations(request, options, callback) {
|
||||
if (options instanceof Function && callback === undefined) {
|
||||
callback = options;
|
||||
options = {};
|
||||
}
|
||||
if (options === undefined) {
|
||||
options = {};
|
||||
}
|
||||
return this['_listOperations'](request, options, callback);
|
||||
}
|
||||
/**
|
||||
* Equivalent to {@link listOperations}, but returns a NodeJS Stream object.
|
||||
*
|
||||
* This fetches the paged responses for {@link listOperations} continuously
|
||||
* and invokes the callback registered for 'data' event for each element in
|
||||
* the responses.
|
||||
*
|
||||
* The returned object has 'end' method when no more elements are required.
|
||||
*
|
||||
* autoPaginate option will be ignored.
|
||||
*
|
||||
* @see {@link https://nodejs.org/api/stream.html}
|
||||
*
|
||||
* @param {Object} request
|
||||
* The request object that will be sent.
|
||||
* @param {string} request.name
|
||||
* The name of the operation collection.
|
||||
* @param {string} request.filter
|
||||
* The standard list filter.
|
||||
* @param {number=} request.pageSize
|
||||
* The maximum number of resources contained in the underlying API
|
||||
* response. If page streaming is performed per-resource, this
|
||||
* parameter does not affect the return value. If page streaming is
|
||||
* performed per-page, this determines the maximum number of
|
||||
* resources in a page.
|
||||
* @param {Object=} options
|
||||
* Optional parameters. You can override the default settings for this call,
|
||||
* e.g, timeout, retries, paginations, etc. See [gax.CallOptions]{@link
|
||||
* https://googleapis.github.io/gax-nodejs/global.html#CallOptions} for the
|
||||
* details.
|
||||
* @return {Stream}
|
||||
* An object stream which emits an object representing
|
||||
* [google.longrunning.Operation]{@link
|
||||
* external:"google.longrunning.Operation"} on 'data' event.
|
||||
*
|
||||
* @example
|
||||
*
|
||||
* var client = longrunning.operationsClient();
|
||||
* var name = '';
|
||||
* var filter = '';
|
||||
* var request = {
|
||||
* name: name,
|
||||
* filter: filter
|
||||
* };
|
||||
* client.listOperationsStream(request).on('data', function(element) {
|
||||
* // doThingsWith(element)
|
||||
* }).on('error', function(err) {
|
||||
* console.error(err);
|
||||
* });
|
||||
*/
|
||||
listOperationsStream(request, options) {
|
||||
if (options === undefined) {
|
||||
options = {};
|
||||
}
|
||||
return PAGE_DESCRIPTORS.listOperations.createStream(this['_listOperations'], request, options);
|
||||
}
|
||||
/**
|
||||
* Starts asynchronous cancellation on a long-running operation. The server
|
||||
* makes a best effort to cancel the operation, but success is not
|
||||
* guaranteed. If the server doesn't support this method, it returns
|
||||
* `google.rpc.Code.UNIMPLEMENTED`. Clients can use
|
||||
* {@link Operations.GetOperation} or
|
||||
* other methods to check whether the cancellation succeeded or whether the
|
||||
* operation completed despite cancellation. On successful cancellation,
|
||||
* the operation is not deleted; instead, it becomes an operation with
|
||||
* an {@link Operation.error} value with a {@link google.rpc.Status.code} of
|
||||
* 1, corresponding to `Code.CANCELLED`.
|
||||
*
|
||||
* @param {Object} request
|
||||
* The request object that will be sent.
|
||||
* @param {string} request.name
|
||||
* The name of the operation resource to be cancelled.
|
||||
* @param {Object=} options
|
||||
* Optional parameters. You can override the default settings for this call,
|
||||
* e.g, timeout, retries, paginations, etc. See [gax.CallOptions]{@link
|
||||
* https://googleapis.github.io/gax-nodejs/global.html#CallOptions} for the
|
||||
* details.
|
||||
* @param {function(?Error)=} callback
|
||||
* The function which will be called with the result of the API call.
|
||||
* @return {Promise} - The promise which resolves when API call finishes.
|
||||
* The promise has a method named "cancel" which cancels the ongoing API
|
||||
* call.
|
||||
*
|
||||
* @example
|
||||
*
|
||||
* var client = longrunning.operationsClient();
|
||||
* var name = '';
|
||||
* client.cancelOperation({name: name}).catch(function(err) {
|
||||
* console.error(err);
|
||||
* });
|
||||
*/
|
||||
cancelOperation(request, options, callback) {
|
||||
if (options instanceof Function && callback === undefined) {
|
||||
callback = options;
|
||||
options = {};
|
||||
}
|
||||
if (options === undefined) {
|
||||
options = {};
|
||||
}
|
||||
return this['_cancelOperation'](request, options, callback);
|
||||
}
|
||||
/**
|
||||
* Deletes a long-running operation. This method indicates that the client is
|
||||
* no longer interested in the operation result. It does not cancel the
|
||||
* operation. If the server doesn't support this method, it returns
|
||||
* `google.rpc.Code.UNIMPLEMENTED`.
|
||||
*
|
||||
* @param {Object} request
|
||||
* The request object that will be sent.
|
||||
* @param {string} request.name
|
||||
* The name of the operation resource to be deleted.
|
||||
* @param {Object=} options
|
||||
* Optional parameters. You can override the default settings for this call,
|
||||
* e.g, timeout, retries, paginations, etc. See [gax.CallOptions]{@link
|
||||
* https://googleapis.github.io/gax-nodejs/global.html#CallOptions} for the
|
||||
* details.
|
||||
* @param {function(?Error)=} callback
|
||||
* The function which will be called with the result of the API call.
|
||||
* @return {Promise} - The promise which resolves when API call finishes.
|
||||
* The promise has a method named "cancel" which cancels the ongoing API
|
||||
* call.
|
||||
*
|
||||
* @example
|
||||
*
|
||||
* var client = longrunning.operationsClient();
|
||||
* var name = '';
|
||||
* client.deleteOperation({name: name}).catch(function(err) {
|
||||
* console.error(err);
|
||||
* });
|
||||
*/
|
||||
deleteOperation(request, options, callback) {
|
||||
if (options instanceof Function && callback === undefined) {
|
||||
callback = options;
|
||||
options = {};
|
||||
}
|
||||
if (options === undefined) {
|
||||
options = {};
|
||||
}
|
||||
return this['_deleteOperation'](request, options, callback);
|
||||
}
|
||||
}
|
||||
exports.OperationsClient = OperationsClient;
|
||||
class OperationsClientBuilder {
|
||||
constructor(gaxGrpc) {
|
||||
const protoFilesRoot = google_proto_files_1.getProtoPath('..');
|
||||
const operationsClient = gaxGrpc.loadProto(protoFilesRoot, 'google/longrunning/operations.proto');
|
||||
Object.assign(this, operationsClient.google.longrunning);
|
||||
/**
|
||||
* Build a new instance of {@link OperationsClient}.
|
||||
*
|
||||
* @param {Object=} opts - The optional parameters.
|
||||
* @param {String=} opts.servicePath
|
||||
* The domain name of the API remote host.
|
||||
* @param {number=} opts.port
|
||||
* The port on which to connect to the remote host.
|
||||
* @param {grpc.ClientCredentials=} opts.sslCreds
|
||||
* A ClientCredentials for use with an SSL-enabled channel.
|
||||
* @param {Object=} opts.clientConfig
|
||||
* The customized config to build the call settings. See
|
||||
* {@link gax.constructSettings} for the format.
|
||||
*/
|
||||
this['operationsClient'] = opts => {
|
||||
return new OperationsClient(gaxGrpc, operationsClient, opts);
|
||||
};
|
||||
Object.assign(this['operationsClient'], OperationsClient);
|
||||
}
|
||||
}
|
||||
exports.OperationsClientBuilder = OperationsClientBuilder;
|
||||
//# sourceMappingURL=operations_client.js.map
|
||||
1
express-server/node_modules/google-gax/build/src/operations_client.js.map
generated
vendored
Normal file
1
express-server/node_modules/google-gax/build/src/operations_client.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"operations_client.js","sourceRoot":"","sources":["../../src/operations_client.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;;;;;;;;;;;GAwBG;;AAEH,2DAAgD;AAEhD,8CAA8C;AAC9C,6BAA6B;AAC7B,oDAAoD;AACpD,gDAAgD;AAEhD,MAAM,UAAU,GAAG,OAAO,CAAC,4BAA4B,CAAC,CAAC;AAEzD,MAAM,CAAC,MAAM,CAAC,GAAG,EAAE,WAAW,CAAC,CAAC;AAChC,MAAM,CAAC,MAAM,CAAC,GAAG,EAAE,YAAY,CAAC,CAAC;AACjC,MAAM,CAAC,MAAM,CAAC,GAAG,EAAE,cAAc,CAAC,CAAC;AAEtB,QAAA,eAAe,GAAG,4BAA4B,CAAC;AAE5D,MAAM,oBAAoB,GAAG,GAAG,CAAC;AAEjC,MAAM,qBAAqB,GAAG,aAAa,CAAC;AAE5C,MAAM,gBAAgB,GAAG;IACvB,cAAc,EACV,IAAI,GAAG,CAAC,gBAAgB,CAAC,CAAC,WAAW,EAAE,eAAe,EAAE,YAAY,CAAC;CAC1E,CAAC;AAEF;;;GAGG;AACU,QAAA,UAAU,GAAa,EAAE,CAAC;AAEvC;;;;;;;;;;;;;;;;;GAiBG;AACH,MAAa,gBAAgB;IAG3B,YAAY,OAAO,EAAE,WAAW,EAAE,IAAI;QACpC,IAAI,GAAG,MAAM,CAAC,MAAM,CAChB;YACE,WAAW,EAAE,uBAAe;YAC5B,IAAI,EAAE,oBAAoB;YAC1B,YAAY,EAAE,EAAE;SACjB,EACD,IAAI,CAAC,CAAC;QAEV,MAAM,eAAe,GAAG,CAAC,UAAU,GAAG,OAAO,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QAC7D,IAAI,IAAI,CAAC,OAAO,IAAI,IAAI,CAAC,UAAU,EAAE;YACnC,eAAe,CAAC,IAAI,CAAC,IAAI,CAAC,OAAO,GAAG,GAAG,GAAG,IAAI,CAAC,UAAU,CAAC,CAAC;SAC5D;QACD,eAAe,CAAC,IAAI,CAChB,qBAAqB,EAAE,MAAM,GAAG,GAAG,CAAC,SAAS,CAAC,EAC9C,OAAO,GAAG,OAAO,CAAC,WAAW,CAAC,CAAC;QAEnC,MAAM,QAAQ,GAAG,OAAO,CAAC,iBAAiB,CACtC,+BAA+B,EAAE,UAAU,EAAE,IAAI,CAAC,YAAY,EAC9D,EAAC,mBAAmB,EAAE,eAAe,CAAC,IAAI,CAAC,GAAG,CAAC,EAAC,CAAC,CAAC;QAEtD,MAAM,IAAI,GAAG,IAAI,CAAC;QAElB,IAAI,CAAC,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC;QACzB,MAAM,cAAc,GAChB,OAAO,CAAC,UAAU,CAAC,WAAW,CAAC,MAAM,CAAC,WAAW,CAAC,UAAU,EAAE,IAAI,CAAC,CAAC;QACxE,MAAM,qBAAqB,GAAG;YAC5B,cAAc;YACd,gBAAgB;YAChB,iBAAiB;YACjB,iBAAiB;SAClB,CAAC;QACF,qBAAqB,CAAC,OAAO,CAAC,UAAU,CAAC,EAAE;YACzC,IAAI,CAAC,GAAG,GAAG,UAAU,CAAC,GAAG,GAAG,CAAC,eAAe,CAAC,CACzC,cAAc,CAAC,IAAI,CAAC,cAAc,CAAC,EAAE;gBACnC,OAAO,CAAC,GAAG,IAAe,EAAE,EAAE;oBAC5B,OAAO,cAAc,CAAC,UAAU,CAAC,CAAC,KAAK,CAAC,cAAc,EAAE,IAAI,CAAC,CAAC;gBAChE,CAAC,CAAC;YACJ,CAAC,CAAC,EACF,QAAQ,CAAC,UAAU,CAAC,EAAE,gBAAgB,CAAC,UAAU,CAAC,CAAC,CAAC;QAC1D,CAAC,CAAC,CAAC;IACL,CAAC;IAED;;;;OAIG;IACH,YAAY,CAAC,QAAuD;QAClE,OAAO,IAAI,CAAC,IAAI,CAAC,YAAY,CAAC,QAAQ,CAAC,CAAC;IAC1C,CAAC;IAED,gBAAgB;IAEhB;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;OAoCG;IACH,YAAY,CAAC,OAAW,EAAE,OAAW,EAAE,QAAS;QAC9C,IAAI,OAAO,YAAY,QAAQ,IAAI,QAAQ,KAAK,SAAS,EAAE;YACzD,QAAQ,GAAG,OAAO,CAAC;YACnB,OAAO,GAAG,EAAE,CAAC;SACd;QACD,IAAI,OAAO,KAAK,SAAS,EAAE;YACzB,OAAO,GAAG,EAAE,CAAC;SACd;QAED,OAAO,IAAI,CAAC,eAAe,CAAC,CAAC,OAAO,EAAE,OAAO,EAAE,QAAQ,CAAC,CAAC;IAC3D,CAAC;IAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;OA+FG;IACH,cAAc,CAAC,OAAO,EAAE,OAAO,EAAE,QAAQ;QACvC,IAAI,OAAO,YAAY,QAAQ,IAAI,QAAQ,KAAK,SAAS,EAAE;YACzD,QAAQ,GAAG,OAAO,CAAC;YACnB,OAAO,GAAG,EAAE,CAAC;SACd;QACD,IAAI,OAAO,KAAK,SAAS,EAAE;YACzB,OAAO,GAAG,EAAE,CAAC;SACd;QAED,OAAO,IAAI,CAAC,iBAAiB,CAAC,CAAC,OAAO,EAAE,OAAO,EAAE,QAAQ,CAAC,CAAC;IAC7D,CAAC;IAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;OAiDG;IACH,oBAAoB,CAAC,OAAO,EAAE,OAAO;QACnC,IAAI,OAAO,KAAK,SAAS,EAAE;YACzB,OAAO,GAAG,EAAE,CAAC;SACd;QAED,OAAO,gBAAgB,CAAC,cAAc,CAAC,YAAY,CAC/C,IAAI,CAAC,iBAAiB,CAAC,EAAE,OAAO,EAAE,OAAO,CAAC,CAAC;IACjD,CAAC;IAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;OAkCG;IACH,eAAe,CAAC,OAAO,EAAE,OAAQ,EAAE,QAAS;QAC1C,IAAI,OAAO,YAAY,QAAQ,IAAI,QAAQ,KAAK,SAAS,EAAE;YACzD,QAAQ,GAAG,OAAO,CAAC;YACnB,OAAO,GAAG,EAAE,CAAC;SACd;QACD,IAAI,OAAO,KAAK,SAAS,EAAE;YACzB,OAAO,GAAG,EAAE,CAAC;SACd;QAED,OAAO,IAAI,CAAC,kBAAkB,CAAC,CAAC,OAAO,EAAE,OAAO,EAAE,QAAQ,CAAC,CAAC;IAC9D,CAAC;IAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;OA4BG;IACH,eAAe,CAAC,OAAO,EAAE,OAAO,EAAE,QAAQ;QACxC,IAAI,OAAO,YAAY,QAAQ,IAAI,QAAQ,KAAK,SAAS,EAAE;YACzD,QAAQ,GAAG,OAAO,CAAC;YACnB,OAAO,GAAG,EAAE,CAAC;SACd;QACD,IAAI,OAAO,KAAK,SAAS,EAAE;YACzB,OAAO,GAAG,EAAE,CAAC;SACd;QAED,OAAO,IAAI,CAAC,kBAAkB,CAAC,CAAC,OAAO,EAAE,OAAO,EAAE,QAAQ,CAAC,CAAC;IAC9D,CAAC;CACF;AAxWD,4CAwWC;AAED,MAAa,uBAAuB;IAClC,YAAY,OAAO;QACjB,MAAM,cAAc,GAAG,iCAAY,CAAC,IAAI,CAAC,CAAC;QAC1C,MAAM,gBAAgB,GAAG,OAAO,CAAC,SAAS,CACtC,cAAc,EAAE,qCAAqC,CAAC,CAAC;QAC3D,MAAM,CAAC,MAAM,CAAC,IAAI,EAAE,gBAAgB,CAAC,MAAM,CAAC,WAAW,CAAC,CAAC;QAEzD;;;;;;;;;;;;;WAaG;QACH,IAAI,CAAC,kBAAkB,CAAC,GAAG,IAAI,CAAC,EAAE;YAChC,OAAO,IAAI,gBAAgB,CAAC,OAAO,EAAE,gBAAgB,EAAE,IAAI,CAAC,CAAC;QAC/D,CAAC,CAAC;QACF,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,kBAAkB,CAAC,EAAE,gBAAgB,CAAC,CAAC;IAC5D,CAAC;CACF;AA1BD,0DA0BC"}
|
||||
46
express-server/node_modules/google-gax/build/src/operations_client_config.json
generated
vendored
Normal file
46
express-server/node_modules/google-gax/build/src/operations_client_config.json
generated
vendored
Normal file
@@ -0,0 +1,46 @@
|
||||
{
|
||||
"interfaces": {
|
||||
"google.longrunning.Operations": {
|
||||
"retry_codes": {
|
||||
"idempotent": [
|
||||
"DEADLINE_EXCEEDED",
|
||||
"UNAVAILABLE"
|
||||
],
|
||||
"non_idempotent": []
|
||||
},
|
||||
"retry_params": {
|
||||
"default": {
|
||||
"initial_retry_delay_millis": 100,
|
||||
"retry_delay_multiplier": 1.3,
|
||||
"max_retry_delay_millis": 60000,
|
||||
"initial_rpc_timeout_millis": 90000,
|
||||
"rpc_timeout_multiplier": 1.0,
|
||||
"max_rpc_timeout_millis": 90000,
|
||||
"total_timeout_millis": 600000
|
||||
}
|
||||
},
|
||||
"methods": {
|
||||
"GetOperation": {
|
||||
"timeout_millis": 60000,
|
||||
"retry_codes_name": "idempotent",
|
||||
"retry_params_name": "default"
|
||||
},
|
||||
"ListOperations": {
|
||||
"timeout_millis": 60000,
|
||||
"retry_codes_name": "idempotent",
|
||||
"retry_params_name": "default"
|
||||
},
|
||||
"CancelOperation": {
|
||||
"timeout_millis": 60000,
|
||||
"retry_codes_name": "idempotent",
|
||||
"retry_params_name": "default"
|
||||
},
|
||||
"DeleteOperation": {
|
||||
"timeout_millis": 60000,
|
||||
"retry_codes_name": "idempotent",
|
||||
"retry_params_name": "default"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
92
express-server/node_modules/google-gax/build/src/paged_iteration.d.ts
generated
vendored
Normal file
92
express-server/node_modules/google-gax/build/src/paged_iteration.d.ts
generated
vendored
Normal file
@@ -0,0 +1,92 @@
|
||||
/**
|
||||
* Copyright 2016, Google Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without
|
||||
* modification, are permitted provided that the following conditions are
|
||||
* met:
|
||||
*
|
||||
* * Redistributions of source code must retain the above copyright
|
||||
* notice, this list of conditions and the following disclaimer.
|
||||
* Redistributions in binary form must reproduce the above
|
||||
* copyright notice, this list of conditions and the following disclaimer
|
||||
* in the documentation and/or other materials provided with the
|
||||
* distribution.
|
||||
* * Neither the name of Google Inc. nor the names of its
|
||||
* contributors may be used to endorse or promote products derived from
|
||||
* this software without specific prior written permission.
|
||||
*
|
||||
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
/// <reference types="node" />
|
||||
import { Transform } from 'stream';
|
||||
import { APICall, APICallback, NormalApiCaller } from './api_callable';
|
||||
export declare class PagedIteration extends NormalApiCaller {
|
||||
pageDescriptor: PageDescriptor;
|
||||
/**
|
||||
* Creates an API caller that returns a stream to performs page-streaming.
|
||||
*
|
||||
* @private
|
||||
* @constructor
|
||||
* @param {PageDescriptor} pageDescriptor - indicates the structure
|
||||
* of page streaming to be performed.
|
||||
*/
|
||||
constructor(pageDescriptor: PageDescriptor);
|
||||
createActualCallback(request: {
|
||||
[index: string]: {};
|
||||
}, callback: APICallback): (err: Error | null, response: {
|
||||
[index: string]: {};
|
||||
}) => void;
|
||||
wrap(func: Function): (argument: any, metadata: any, options: any, callback: any) => any;
|
||||
init(settings: {}, callback: APICallback): any;
|
||||
call(apiCall: APICall, argument: {
|
||||
[index: string]: {};
|
||||
}, settings: any, canceller: any): void;
|
||||
}
|
||||
export declare class PageDescriptor {
|
||||
requestPageTokenField: string;
|
||||
responsePageTokenField: string;
|
||||
requestPageSizeField?: string;
|
||||
resourceField: string;
|
||||
/**
|
||||
* Describes the structure of a page-streaming call.
|
||||
*
|
||||
* @property {String} requestPageTokenField
|
||||
* @property {String} responsePageTokenField
|
||||
* @property {String} resourceField
|
||||
*
|
||||
* @param {String} requestPageTokenField - The field name of the page token in
|
||||
* the request.
|
||||
* @param {String} responsePageTokenField - The field name of the page token in
|
||||
* the response.
|
||||
* @param {String} resourceField - The resource field name.
|
||||
*
|
||||
* @constructor
|
||||
*/
|
||||
constructor(requestPageTokenField: string, responsePageTokenField: string, resourceField: string);
|
||||
/**
|
||||
* Creates a new object Stream which emits the resource on 'data' event.
|
||||
* @private
|
||||
* @param {ApiCall} apiCall - the callable object.
|
||||
* @param {Object} request - the request object.
|
||||
* @param {CallOptions=} options - the call options to customize the api call.
|
||||
* @return {Stream} - a new object Stream.
|
||||
*/
|
||||
createStream(apiCall: any, request: any, options: any): Transform;
|
||||
/**
|
||||
* Returns a new API caller.
|
||||
* @private
|
||||
* @return {PageStreamable} - the page streaming caller.
|
||||
*/
|
||||
apiCaller(): PagedIteration;
|
||||
}
|
||||
203
express-server/node_modules/google-gax/build/src/paged_iteration.js
generated
vendored
Normal file
203
express-server/node_modules/google-gax/build/src/paged_iteration.js
generated
vendored
Normal file
@@ -0,0 +1,203 @@
|
||||
"use strict";
|
||||
/**
|
||||
* Copyright 2016, Google Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without
|
||||
* modification, are permitted provided that the following conditions are
|
||||
* met:
|
||||
*
|
||||
* * Redistributions of source code must retain the above copyright
|
||||
* notice, this list of conditions and the following disclaimer.
|
||||
* Redistributions in binary form must reproduce the above
|
||||
* copyright notice, this list of conditions and the following disclaimer
|
||||
* in the documentation and/or other materials provided with the
|
||||
* distribution.
|
||||
* * Neither the name of Google Inc. nor the names of its
|
||||
* contributors may be used to endorse or promote products derived from
|
||||
* this software without specific prior written permission.
|
||||
*
|
||||
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const ended = require("is-stream-ended");
|
||||
const stream_1 = require("stream");
|
||||
const api_callable_1 = require("./api_callable");
|
||||
class PagedIteration extends api_callable_1.NormalApiCaller {
|
||||
/**
|
||||
* Creates an API caller that returns a stream to performs page-streaming.
|
||||
*
|
||||
* @private
|
||||
* @constructor
|
||||
* @param {PageDescriptor} pageDescriptor - indicates the structure
|
||||
* of page streaming to be performed.
|
||||
*/
|
||||
constructor(pageDescriptor) {
|
||||
super();
|
||||
this.pageDescriptor = pageDescriptor;
|
||||
}
|
||||
createActualCallback(request, callback) {
|
||||
const self = this;
|
||||
return function fetchNextPageToken(err, response) {
|
||||
if (err) {
|
||||
callback(err);
|
||||
return;
|
||||
}
|
||||
const resources = response[self.pageDescriptor.resourceField];
|
||||
const pageToken = response[self.pageDescriptor.responsePageTokenField];
|
||||
if (pageToken) {
|
||||
request[self.pageDescriptor.requestPageTokenField] = pageToken;
|
||||
callback(err, resources, request, response);
|
||||
}
|
||||
else {
|
||||
callback(err, resources, null, response);
|
||||
}
|
||||
};
|
||||
}
|
||||
wrap(func) {
|
||||
const self = this;
|
||||
return function wrappedCall(argument, metadata, options, callback) {
|
||||
return func(argument, metadata, options, self.createActualCallback(argument, callback));
|
||||
};
|
||||
}
|
||||
init(settings, callback) {
|
||||
return api_callable_1.NormalApiCaller.prototype.init.call(this, settings, callback);
|
||||
}
|
||||
call(apiCall, argument, settings, canceller) {
|
||||
argument = Object.assign({}, argument);
|
||||
if (settings.pageToken) {
|
||||
argument[this.pageDescriptor.requestPageTokenField] = settings.pageToken;
|
||||
}
|
||||
if (settings.pageSize) {
|
||||
argument[this.pageDescriptor.requestPageSizeField] = settings.pageSize;
|
||||
}
|
||||
if (!settings.autoPaginate) {
|
||||
api_callable_1.NormalApiCaller.prototype.call.call(this, apiCall, argument, settings, canceller);
|
||||
return;
|
||||
}
|
||||
const maxResults = settings.maxResults || -1;
|
||||
const allResources = [];
|
||||
function pushResources(err, resources, next) {
|
||||
if (err) {
|
||||
canceller.callback(err);
|
||||
return;
|
||||
}
|
||||
for (let i = 0; i < resources.length; ++i) {
|
||||
allResources.push(resources[i]);
|
||||
if (allResources.length === maxResults) {
|
||||
next = null;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!next) {
|
||||
canceller.callback(null, allResources);
|
||||
return;
|
||||
}
|
||||
setImmediate(apiCall, next, pushResources);
|
||||
}
|
||||
setImmediate(apiCall, argument, pushResources);
|
||||
}
|
||||
}
|
||||
exports.PagedIteration = PagedIteration;
|
||||
class PageDescriptor {
|
||||
/**
|
||||
* Describes the structure of a page-streaming call.
|
||||
*
|
||||
* @property {String} requestPageTokenField
|
||||
* @property {String} responsePageTokenField
|
||||
* @property {String} resourceField
|
||||
*
|
||||
* @param {String} requestPageTokenField - The field name of the page token in
|
||||
* the request.
|
||||
* @param {String} responsePageTokenField - The field name of the page token in
|
||||
* the response.
|
||||
* @param {String} resourceField - The resource field name.
|
||||
*
|
||||
* @constructor
|
||||
*/
|
||||
constructor(requestPageTokenField, responsePageTokenField, resourceField) {
|
||||
this.requestPageTokenField = requestPageTokenField;
|
||||
this.responsePageTokenField = responsePageTokenField;
|
||||
this.resourceField = resourceField;
|
||||
}
|
||||
/**
|
||||
* Creates a new object Stream which emits the resource on 'data' event.
|
||||
* @private
|
||||
* @param {ApiCall} apiCall - the callable object.
|
||||
* @param {Object} request - the request object.
|
||||
* @param {CallOptions=} options - the call options to customize the api call.
|
||||
* @return {Stream} - a new object Stream.
|
||||
*/
|
||||
createStream(apiCall, request, options) {
|
||||
const stream = new stream_1.PassThrough({ objectMode: true });
|
||||
options = Object.assign({}, options, { autoPaginate: false });
|
||||
const maxResults = 'maxResults' in options ? options.maxResults : -1;
|
||||
let pushCount = 0;
|
||||
let started = false;
|
||||
function callback(err, resources, next) {
|
||||
if (err) {
|
||||
stream.emit('error', err);
|
||||
return;
|
||||
}
|
||||
for (let i = 0; i < resources.length; ++i) {
|
||||
if (ended(stream)) {
|
||||
return;
|
||||
}
|
||||
if (resources[i] === null) {
|
||||
continue;
|
||||
}
|
||||
stream.push(resources[i]);
|
||||
pushCount++;
|
||||
if (pushCount === maxResults) {
|
||||
stream.end();
|
||||
}
|
||||
}
|
||||
if (ended(stream)) {
|
||||
return;
|
||||
}
|
||||
if (!next) {
|
||||
stream.end();
|
||||
return;
|
||||
}
|
||||
// When pageToken is specified in the original options, it will overwrite
|
||||
// the page token field in the next request. Therefore it must be cleared.
|
||||
if ('pageToken' in options) {
|
||||
delete options.pageToken;
|
||||
}
|
||||
if (stream.isPaused()) {
|
||||
request = next;
|
||||
started = false;
|
||||
}
|
||||
else {
|
||||
setImmediate(apiCall, next, options, callback);
|
||||
}
|
||||
}
|
||||
stream.on('resume', () => {
|
||||
if (!started) {
|
||||
started = true;
|
||||
apiCall(request, options, callback);
|
||||
}
|
||||
});
|
||||
return stream;
|
||||
}
|
||||
/**
|
||||
* Returns a new API caller.
|
||||
* @private
|
||||
* @return {PageStreamable} - the page streaming caller.
|
||||
*/
|
||||
apiCaller() {
|
||||
return new PagedIteration(this);
|
||||
}
|
||||
}
|
||||
exports.PageDescriptor = PageDescriptor;
|
||||
//# sourceMappingURL=paged_iteration.js.map
|
||||
1
express-server/node_modules/google-gax/build/src/paged_iteration.js.map
generated
vendored
Normal file
1
express-server/node_modules/google-gax/build/src/paged_iteration.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"paged_iteration.js","sourceRoot":"","sources":["../../src/paged_iteration.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA6BG;;AAEH,yCAAyC;AACzC,mCAA8C;AAE9C,iDAAqE;AAErE,MAAa,cAAe,SAAQ,8BAAe;IAEjD;;;;;;;OAOG;IACH,YAAY,cAA8B;QACxC,KAAK,EAAE,CAAC;QACR,IAAI,CAAC,cAAc,GAAG,cAAc,CAAC;IACvC,CAAC;IAED,oBAAoB,CAAC,OAA8B,EAAE,QAAqB;QACxE,MAAM,IAAI,GAAG,IAAI,CAAC;QAClB,OAAO,SAAS,kBAAkB,CAC9B,GAAe,EAAE,QAA+B;YAClD,IAAI,GAAG,EAAE;gBACP,QAAQ,CAAC,GAAG,CAAC,CAAC;gBACd,OAAO;aACR;YACD,MAAM,SAAS,GAAG,QAAQ,CAAC,IAAI,CAAC,cAAc,CAAC,aAAa,CAAC,CAAC;YAC9D,MAAM,SAAS,GAAG,QAAQ,CAAC,IAAI,CAAC,cAAc,CAAC,sBAAsB,CAAC,CAAC;YACvE,IAAI,SAAS,EAAE;gBACb,OAAO,CAAC,IAAI,CAAC,cAAc,CAAC,qBAAqB,CAAC,GAAG,SAAS,CAAC;gBAC/D,QAAQ,CAAC,GAAG,EAAE,SAAS,EAAE,OAAO,EAAE,QAAQ,CAAC,CAAC;aAC7C;iBAAM;gBACL,QAAQ,CAAC,GAAG,EAAE,SAAS,EAAE,IAAI,EAAE,QAAQ,CAAC,CAAC;aAC1C;QACH,CAAC,CAAC;IACJ,CAAC;IAED,IAAI,CAAC,IAAc;QACjB,MAAM,IAAI,GAAG,IAAI,CAAC;QAClB,OAAO,SAAS,WAAW,CAAC,QAAQ,EAAE,QAAQ,EAAE,OAAO,EAAE,QAAQ;YAC/D,OAAO,IAAI,CACP,QAAQ,EAAE,QAAQ,EAAE,OAAO,EAC3B,IAAI,CAAC,oBAAoB,CAAC,QAAQ,EAAE,QAAQ,CAAC,CAAC,CAAC;QACrD,CAAC,CAAC;IACJ,CAAC;IAED,IAAI,CAAC,QAAY,EAAE,QAAqB;QACtC,OAAO,8BAAe,CAAC,SAAS,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,EAAE,QAAQ,EAAE,QAAQ,CAAC,CAAC;IACvE,CAAC;IAED,IAAI,CAAC,OAAgB,EAAE,QAA+B,EAAE,QAAQ,EAAE,SAAS;QACzE,QAAQ,GAAG,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,QAAQ,CAAC,CAAC;QACvC,IAAI,QAAQ,CAAC,SAAS,EAAE;YACtB,QAAQ,CAAC,IAAI,CAAC,cAAc,CAAC,qBAAqB,CAAC,GAAG,QAAQ,CAAC,SAAS,CAAC;SAC1E;QACD,IAAI,QAAQ,CAAC,QAAQ,EAAE;YACrB,QAAQ,CAAC,IAAI,CAAC,cAAc,CAAC,oBAAqB,CAAC,GAAG,QAAQ,CAAC,QAAQ,CAAC;SACzE;QACD,IAAI,CAAC,QAAQ,CAAC,YAAY,EAAE;YAC1B,8BAAe,CAAC,SAAS,CAAC,IAAI,CAAC,IAAI,CAC/B,IAAI,EAAE,OAAO,EAAE,QAAQ,EAAE,QAAQ,EAAE,SAAS,CAAC,CAAC;YAClD,OAAO;SACR;QAED,MAAM,UAAU,GAAG,QAAQ,CAAC,UAAU,IAAI,CAAC,CAAC,CAAC;QAC7C,MAAM,YAAY,GAAc,EAAE,CAAC;QACnC,SAAS,aAAa,CAAC,GAAG,EAAE,SAAS,EAAE,IAAI;YACzC,IAAI,GAAG,EAAE;gBACP,SAAS,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC;gBACxB,OAAO;aACR;YAED,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,SAAS,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;gBACzC,YAAY,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC;gBAChC,IAAI,YAAY,CAAC,MAAM,KAAK,UAAU,EAAE;oBACtC,IAAI,GAAG,IAAI,CAAC;oBACZ,MAAM;iBACP;aACF;YACD,IAAI,CAAC,IAAI,EAAE;gBACT,SAAS,CAAC,QAAQ,CAAC,IAAI,EAAE,YAAY,CAAC,CAAC;gBACvC,OAAO;aACR;YACD,YAAY,CAAC,OAAO,EAAE,IAAI,EAAE,aAAa,CAAC,CAAC;QAC7C,CAAC;QAED,YAAY,CAAC,OAAO,EAAE,QAAQ,EAAE,aAAa,CAAC,CAAC;IACjD,CAAC;CACF;AArFD,wCAqFC;AAED,MAAa,cAAc;IAKzB;;;;;;;;;;;;;;OAcG;IACH,YACI,qBAA6B,EAAE,sBAA8B,EAC7D,aAAqB;QACvB,IAAI,CAAC,qBAAqB,GAAG,qBAAqB,CAAC;QACnD,IAAI,CAAC,sBAAsB,GAAG,sBAAsB,CAAC;QACrD,IAAI,CAAC,aAAa,GAAG,aAAa,CAAC;IACrC,CAAC;IAED;;;;;;;OAOG;IACH,YAAY,CAAC,OAAO,EAAE,OAAO,EAAE,OAAO;QACpC,MAAM,MAAM,GAAG,IAAI,oBAAW,CAAC,EAAC,UAAU,EAAE,IAAI,EAAC,CAAC,CAAC;QACnD,OAAO,GAAG,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,OAAO,EAAE,EAAC,YAAY,EAAE,KAAK,EAAC,CAAC,CAAC;QAC5D,MAAM,UAAU,GAAG,YAAY,IAAI,OAAO,CAAC,CAAC,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;QACrE,IAAI,SAAS,GAAG,CAAC,CAAC;QAClB,IAAI,OAAO,GAAG,KAAK,CAAC;QACpB,SAAS,QAAQ,CAAC,GAAG,EAAE,SAAS,EAAE,IAAI;YACpC,IAAI,GAAG,EAAE;gBACP,MAAM,CAAC,IAAI,CAAC,OAAO,EAAE,GAAG,CAAC,CAAC;gBAC1B,OAAO;aACR;YACD,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,SAAS,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;gBACzC,IAAI,KAAK,CAAC,MAAM,CAAC,EAAE;oBACjB,OAAO;iBACR;gBACD,IAAI,SAAS,CAAC,CAAC,CAAC,KAAK,IAAI,EAAE;oBACzB,SAAS;iBACV;gBACD,MAAM,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC;gBAC1B,SAAS,EAAE,CAAC;gBACZ,IAAI,SAAS,KAAK,UAAU,EAAE;oBAC5B,MAAM,CAAC,GAAG,EAAE,CAAC;iBACd;aACF;YACD,IAAI,KAAK,CAAC,MAAM,CAAC,EAAE;gBACjB,OAAO;aACR;YACD,IAAI,CAAC,IAAI,EAAE;gBACT,MAAM,CAAC,GAAG,EAAE,CAAC;gBACb,OAAO;aACR;YACD,yEAAyE;YACzE,0EAA0E;YAC1E,IAAI,WAAW,IAAI,OAAO,EAAE;gBAC1B,OAAO,OAAO,CAAC,SAAS,CAAC;aAC1B;YACD,IAAI,MAAM,CAAC,QAAQ,EAAE,EAAE;gBACrB,OAAO,GAAG,IAAI,CAAC;gBACf,OAAO,GAAG,KAAK,CAAC;aACjB;iBAAM;gBACL,YAAY,CAAC,OAAO,EAAE,IAAI,EAAE,OAAO,EAAE,QAAQ,CAAC,CAAC;aAChD;QACH,CAAC;QACD,MAAM,CAAC,EAAE,CAAC,QAAQ,EAAE,GAAG,EAAE;YACvB,IAAI,CAAC,OAAO,EAAE;gBACZ,OAAO,GAAG,IAAI,CAAC;gBACf,OAAO,CAAC,OAAO,EAAE,OAAO,EAAE,QAAQ,CAAC,CAAC;aACrC;QACH,CAAC,CAAC,CAAC;QACH,OAAO,MAAM,CAAC;IAChB,CAAC;IAED;;;;OAIG;IACH,SAAS;QACP,OAAO,IAAI,cAAc,CAAC,IAAI,CAAC,CAAC;IAClC,CAAC;CACF;AAhGD,wCAgGC"}
|
||||
21
express-server/node_modules/google-gax/build/src/parser_extras.d.ts
generated
vendored
Normal file
21
express-server/node_modules/google-gax/build/src/parser_extras.d.ts
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
import { Segment } from './path_template';
|
||||
export declare const BINDING = 1;
|
||||
export declare const END_BINDING = 2;
|
||||
export declare const TERMINAL = 3;
|
||||
/**
|
||||
* Completes the parsing of the segments
|
||||
*
|
||||
* Validates them, and transforms them into the object used by the
|
||||
* PathTemplate class.
|
||||
*
|
||||
* @private
|
||||
*
|
||||
* @param {Segments[]} segments the parsed segments
|
||||
* @param {Object} initializes the attributes of a PathTemplate
|
||||
* @return {Object} Returns segments and size
|
||||
* @throws {TypeError} if multiple path wildcards exist
|
||||
*/
|
||||
export declare function finishParse(segments: Segment[]): {
|
||||
segments: Segment[];
|
||||
size: number;
|
||||
};
|
||||
111
express-server/node_modules/google-gax/build/src/parser_extras.js
generated
vendored
Normal file
111
express-server/node_modules/google-gax/build/src/parser_extras.js
generated
vendored
Normal file
@@ -0,0 +1,111 @@
|
||||
"use strict";
|
||||
/*
|
||||
*
|
||||
* Copyright 2016, Google Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without
|
||||
* modification, are permitted provided that the following conditions are
|
||||
* met:
|
||||
*
|
||||
* * Redistributions of source code must retain the above copyright
|
||||
* notice, this list of conditions and the following disclaimer.
|
||||
* * Redistributions in binary form must reproduce the above
|
||||
* copyright notice, this list of conditions and the following disclaimer
|
||||
* in the documentation and/or other materials provided with the
|
||||
* distribution.
|
||||
* * Neither the name of Google Inc. nor the names of its
|
||||
* contributors may be used to endorse or promote products derived from
|
||||
* this software without specific prior written permission.
|
||||
*
|
||||
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*
|
||||
*/
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const util = require("util");
|
||||
/* constants used in the pegjs parser */
|
||||
exports.BINDING = 1;
|
||||
exports.END_BINDING = 2;
|
||||
exports.TERMINAL = 3;
|
||||
/**
|
||||
* Checks that segments only has one terminal segment that is a path wildcard.
|
||||
*
|
||||
* @private
|
||||
*
|
||||
* @param {Segments[]} segments the parsed segments
|
||||
* @throws {TypeError} if there are too many
|
||||
*/
|
||||
function allowOnePathWildcard(segments) {
|
||||
let hasPathWildcard = false;
|
||||
for (let i = 0; i < segments.length; i++) {
|
||||
const s = segments[i];
|
||||
if (s.kind !== exports.TERMINAL || s.literal !== '**') {
|
||||
continue;
|
||||
}
|
||||
if (hasPathWildcard) {
|
||||
const tooManyWildcards = 'cannot contain more than one path wildcard';
|
||||
throw new TypeError(tooManyWildcards);
|
||||
}
|
||||
hasPathWildcard = true;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Counts the number of terminal segments.
|
||||
*
|
||||
* @private
|
||||
*
|
||||
* @param {Segments[]} segments the parsed segments
|
||||
* @return {number} the number of terminal segments in the template
|
||||
*/
|
||||
function countTerminals(segments) {
|
||||
return segments.filter(x => x.kind === exports.TERMINAL).length;
|
||||
}
|
||||
/**
|
||||
* Updates missing literals of each of the binding segments.
|
||||
*
|
||||
* @private
|
||||
*
|
||||
* @param {Segments[]} segments the parsed segments
|
||||
*/
|
||||
function updateBindingLiterals(segments) {
|
||||
let bindingIndex = 0;
|
||||
segments.forEach(s => {
|
||||
if (s.kind === exports.BINDING && !s.literal) {
|
||||
s.literal = util.format('$%d', bindingIndex);
|
||||
bindingIndex += 1;
|
||||
}
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Completes the parsing of the segments
|
||||
*
|
||||
* Validates them, and transforms them into the object used by the
|
||||
* PathTemplate class.
|
||||
*
|
||||
* @private
|
||||
*
|
||||
* @param {Segments[]} segments the parsed segments
|
||||
* @param {Object} initializes the attributes of a PathTemplate
|
||||
* @return {Object} Returns segments and size
|
||||
* @throws {TypeError} if multiple path wildcards exist
|
||||
*/
|
||||
function finishParse(segments) {
|
||||
allowOnePathWildcard(segments);
|
||||
updateBindingLiterals(segments);
|
||||
return {
|
||||
segments,
|
||||
size: countTerminals(segments),
|
||||
};
|
||||
}
|
||||
exports.finishParse = finishParse;
|
||||
//# sourceMappingURL=parser_extras.js.map
|
||||
1
express-server/node_modules/google-gax/build/src/parser_extras.js.map
generated
vendored
Normal file
1
express-server/node_modules/google-gax/build/src/parser_extras.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"parser_extras.js","sourceRoot":"","sources":["../../src/parser_extras.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA+BG;;AAEH,6BAA6B;AAG7B,wCAAwC;AAC3B,QAAA,OAAO,GAAG,CAAC,CAAC;AACZ,QAAA,WAAW,GAAG,CAAC,CAAC;AAChB,QAAA,QAAQ,GAAG,CAAC,CAAC;AAE1B;;;;;;;GAOG;AACH,SAAS,oBAAoB,CAAC,QAAmB;IAC/C,IAAI,eAAe,GAAG,KAAK,CAAC;IAC5B,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,QAAQ,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;QACxC,MAAM,CAAC,GAAG,QAAQ,CAAC,CAAC,CAAC,CAAC;QACtB,IAAI,CAAC,CAAC,IAAI,KAAK,gBAAQ,IAAI,CAAC,CAAC,OAAO,KAAK,IAAI,EAAE;YAC7C,SAAS;SACV;QACD,IAAI,eAAe,EAAE;YACnB,MAAM,gBAAgB,GAAG,4CAA4C,CAAC;YACtE,MAAM,IAAI,SAAS,CAAC,gBAAgB,CAAC,CAAC;SACvC;QACD,eAAe,GAAG,IAAI,CAAC;KACxB;AACH,CAAC;AAED;;;;;;;GAOG;AACH,SAAS,cAAc,CAAC,QAAmB;IACzC,OAAO,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,KAAK,gBAAQ,CAAC,CAAC,MAAM,CAAC;AAC1D,CAAC;AAED;;;;;;GAMG;AACH,SAAS,qBAAqB,CAAC,QAAmB;IAChD,IAAI,YAAY,GAAG,CAAC,CAAC;IACrB,QAAQ,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE;QACnB,IAAI,CAAC,CAAC,IAAI,KAAK,eAAO,IAAI,CAAC,CAAC,CAAC,OAAO,EAAE;YACpC,CAAC,CAAC,OAAO,GAAG,IAAI,CAAC,MAAM,CAAC,KAAK,EAAE,YAAY,CAAC,CAAC;YAC7C,YAAY,IAAI,CAAC,CAAC;SACnB;IACH,CAAC,CAAC,CAAC;AACL,CAAC;AAED;;;;;;;;;;;;GAYG;AACH,SAAgB,WAAW,CAAC,QAAmB;IAC7C,oBAAoB,CAAC,QAAQ,CAAC,CAAC;IAC/B,qBAAqB,CAAC,QAAQ,CAAC,CAAC;IAChC,OAAO;QACL,QAAQ;QACR,IAAI,EAAE,cAAc,CAAC,QAAQ,CAAC;KAC/B,CAAC;AACJ,CAAC;AAPD,kCAOC"}
|
||||
45
express-server/node_modules/google-gax/build/src/path_template.d.ts
generated
vendored
Normal file
45
express-server/node_modules/google-gax/build/src/path_template.d.ts
generated
vendored
Normal file
@@ -0,0 +1,45 @@
|
||||
export interface ParseResult {
|
||||
size: number;
|
||||
segments: Segment[];
|
||||
}
|
||||
export interface Segment {
|
||||
kind: number;
|
||||
literal: string;
|
||||
}
|
||||
export declare type Bindings = {
|
||||
[index: string]: string;
|
||||
};
|
||||
export declare class PathTemplate {
|
||||
private readonly parseResult;
|
||||
readonly size: number;
|
||||
readonly segments: Segment[];
|
||||
/**
|
||||
* @param {String} data the of the template
|
||||
*
|
||||
* @constructor
|
||||
*/
|
||||
constructor(data: string);
|
||||
/**
|
||||
* Matches a fully-qualified path template string.
|
||||
*
|
||||
* @param {String} path a fully-qualified path template string
|
||||
* @return {Object} contains const names matched to binding values
|
||||
* @throws {TypeError} if path can't be matched to this template
|
||||
*/
|
||||
match(path: string): Bindings;
|
||||
/**
|
||||
* Renders a path template using the provided bindings.
|
||||
*
|
||||
* @param {Object} bindings a mapping of const names to binding strings
|
||||
* @return {String} a rendered representation of the path template
|
||||
* @throws {TypeError} if a key is missing, or if a sub-template cannot be
|
||||
* parsed
|
||||
*/
|
||||
render(bindings: Bindings): string;
|
||||
/**
|
||||
* Renders the path template.
|
||||
*
|
||||
* @return {string} contains const names matched to binding values
|
||||
*/
|
||||
inspect(): string;
|
||||
}
|
||||
176
express-server/node_modules/google-gax/build/src/path_template.js
generated
vendored
Normal file
176
express-server/node_modules/google-gax/build/src/path_template.js
generated
vendored
Normal file
@@ -0,0 +1,176 @@
|
||||
"use strict";
|
||||
/*
|
||||
*
|
||||
* Copyright 2016, Google Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without
|
||||
* modification, are permitted provided that the following conditions are
|
||||
* met:
|
||||
*
|
||||
* * Redistributions of source code must retain the above copyright
|
||||
* notice, this list of conditions and the following disclaimer.
|
||||
* * Redistributions in binary form must reproduce the above
|
||||
* copyright notice, this list of conditions and the following disclaimer
|
||||
* in the documentation and/or other materials provided with the
|
||||
* distribution.
|
||||
* * Neither the name of Google Inc. nor the names of its
|
||||
* contributors may be used to endorse or promote products derived from
|
||||
* this software without specific prior written permission.
|
||||
*
|
||||
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*
|
||||
*/
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
/*
|
||||
* Path template utility.
|
||||
*/
|
||||
const has = require("lodash.has");
|
||||
const util = require("util");
|
||||
const extras = require("./parser_extras");
|
||||
const parser = require('./path_template_parser');
|
||||
class PathTemplate {
|
||||
get size() {
|
||||
return this.parseResult.size;
|
||||
}
|
||||
get segments() {
|
||||
return this.parseResult.segments;
|
||||
}
|
||||
/**
|
||||
* @param {String} data the of the template
|
||||
*
|
||||
* @constructor
|
||||
*/
|
||||
constructor(data) {
|
||||
this.parseResult = extras.finishParse(parser.parse(data));
|
||||
}
|
||||
/**
|
||||
* Matches a fully-qualified path template string.
|
||||
*
|
||||
* @param {String} path a fully-qualified path template string
|
||||
* @return {Object} contains const names matched to binding values
|
||||
* @throws {TypeError} if path can't be matched to this template
|
||||
*/
|
||||
match(path) {
|
||||
const pathSegments = path.split('/');
|
||||
const bindings = {};
|
||||
let segmentCount = this.size;
|
||||
let current;
|
||||
let index = 0;
|
||||
this.segments.forEach(segment => {
|
||||
if (index > pathSegments.length) {
|
||||
return;
|
||||
}
|
||||
if (segment.kind === extras.BINDING) {
|
||||
current = segment.literal;
|
||||
}
|
||||
else if (segment.kind === extras.TERMINAL) {
|
||||
if (segment.literal === '*') {
|
||||
bindings[current] = pathSegments[index];
|
||||
index += 1;
|
||||
}
|
||||
else if (segment.literal === '**') {
|
||||
const size = pathSegments.length - segmentCount + 1;
|
||||
segmentCount += size - 1;
|
||||
bindings[current] = pathSegments.slice(index, index + size).join('/');
|
||||
index += size;
|
||||
}
|
||||
else if (segment.literal === pathSegments[index]) {
|
||||
index += 1;
|
||||
}
|
||||
else {
|
||||
const msg = util.format('mismatched literal (index=%d): \'%s\' != \'%s\'', index, segment.literal, pathSegments[index]);
|
||||
throw new TypeError(msg);
|
||||
}
|
||||
}
|
||||
});
|
||||
if (index !== pathSegments.length || index !== segmentCount) {
|
||||
const msg = util.format('match error: could not instantiate a path template from %s', path);
|
||||
throw new TypeError(msg);
|
||||
}
|
||||
return bindings;
|
||||
}
|
||||
/**
|
||||
* Renders a path template using the provided bindings.
|
||||
*
|
||||
* @param {Object} bindings a mapping of const names to binding strings
|
||||
* @return {String} a rendered representation of the path template
|
||||
* @throws {TypeError} if a key is missing, or if a sub-template cannot be
|
||||
* parsed
|
||||
*/
|
||||
render(bindings) {
|
||||
const out = [];
|
||||
let inABinding = false;
|
||||
this.segments.forEach(segment => {
|
||||
if (segment.kind === extras.BINDING) {
|
||||
if (!has(bindings, segment.literal)) {
|
||||
const msg = util.format('Value for key %s is not provided in %s', segment.literal, bindings);
|
||||
throw new TypeError(msg);
|
||||
}
|
||||
const tmp = new PathTemplate(bindings[segment.literal]);
|
||||
Array.prototype.push.apply(out, tmp.segments);
|
||||
inABinding = true;
|
||||
}
|
||||
else if (segment.kind === extras.END_BINDING) {
|
||||
inABinding = false;
|
||||
}
|
||||
else if (inABinding) {
|
||||
return;
|
||||
}
|
||||
else {
|
||||
out.push(segment);
|
||||
}
|
||||
});
|
||||
const result = formatSegments(out);
|
||||
this.match(result);
|
||||
return result;
|
||||
}
|
||||
/**
|
||||
* Renders the path template.
|
||||
*
|
||||
* @return {string} contains const names matched to binding values
|
||||
*/
|
||||
inspect() {
|
||||
return formatSegments(this.segments);
|
||||
}
|
||||
}
|
||||
exports.PathTemplate = PathTemplate;
|
||||
/**
|
||||
* Creates the string representattion for the segments.
|
||||
* @param {Object[]} segments - The array of segments.
|
||||
* @return {string} - A string representing segments in the path template
|
||||
* format.
|
||||
*/
|
||||
function formatSegments(segments) {
|
||||
let out = '';
|
||||
let slash = true;
|
||||
segments.forEach(segment => {
|
||||
if (segment.kind === extras.TERMINAL) {
|
||||
if (slash) {
|
||||
out += '/';
|
||||
}
|
||||
out += segment.literal;
|
||||
return;
|
||||
}
|
||||
slash = true;
|
||||
if (segment.kind === extras.BINDING) {
|
||||
out += '/{' + segment.literal + '=';
|
||||
slash = false;
|
||||
}
|
||||
else {
|
||||
out += segment.literal + '}';
|
||||
}
|
||||
});
|
||||
return out.substring(1);
|
||||
}
|
||||
//# sourceMappingURL=path_template.js.map
|
||||
1
express-server/node_modules/google-gax/build/src/path_template.js.map
generated
vendored
Normal file
1
express-server/node_modules/google-gax/build/src/path_template.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"path_template.js","sourceRoot":"","sources":["../../src/path_template.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA+BG;;AAEH;;GAEG;AAEH,kCAAmC;AACnC,6BAA6B;AAC7B,0CAA0C;AAC1C,MAAM,MAAM,GAAG,OAAO,CAAC,wBAAwB,CAAC,CAAC;AAgBjD,MAAa,YAAY;IAGvB,IAAI,IAAI;QACN,OAAO,IAAI,CAAC,WAAW,CAAC,IAAI,CAAC;IAC/B,CAAC;IAED,IAAI,QAAQ;QACV,OAAO,IAAI,CAAC,WAAW,CAAC,QAAQ,CAAC;IACnC,CAAC;IAED;;;;OAIG;IACH,YAAY,IAAY;QACtB,IAAI,CAAC,WAAW,GAAG,MAAM,CAAC,WAAW,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC;IAC5D,CAAC;IAED;;;;;;OAMG;IACH,KAAK,CAAC,IAAY;QAChB,MAAM,YAAY,GAAG,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;QACrC,MAAM,QAAQ,GAAa,EAAE,CAAC;QAC9B,IAAI,YAAY,GAAG,IAAI,CAAC,IAAI,CAAC;QAC7B,IAAI,OAAe,CAAC;QACpB,IAAI,KAAK,GAAG,CAAC,CAAC;QACd,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE;YAC9B,IAAI,KAAK,GAAG,YAAY,CAAC,MAAM,EAAE;gBAC/B,OAAO;aACR;YACD,IAAI,OAAO,CAAC,IAAI,KAAK,MAAM,CAAC,OAAO,EAAE;gBACnC,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;aAC3B;iBAAM,IAAI,OAAO,CAAC,IAAI,KAAK,MAAM,CAAC,QAAQ,EAAE;gBAC3C,IAAI,OAAO,CAAC,OAAO,KAAK,GAAG,EAAE;oBAC3B,QAAQ,CAAC,OAAO,CAAC,GAAG,YAAY,CAAC,KAAK,CAAC,CAAC;oBACxC,KAAK,IAAI,CAAC,CAAC;iBACZ;qBAAM,IAAI,OAAO,CAAC,OAAO,KAAK,IAAI,EAAE;oBACnC,MAAM,IAAI,GAAG,YAAY,CAAC,MAAM,GAAG,YAAY,GAAG,CAAC,CAAC;oBACpD,YAAY,IAAI,IAAI,GAAG,CAAC,CAAC;oBACzB,QAAQ,CAAC,OAAO,CAAC,GAAG,YAAY,CAAC,KAAK,CAAC,KAAK,EAAE,KAAK,GAAG,IAAI,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;oBACtE,KAAK,IAAI,IAAI,CAAC;iBACf;qBAAM,IAAI,OAAO,CAAC,OAAO,KAAK,YAAY,CAAC,KAAK,CAAC,EAAE;oBAClD,KAAK,IAAI,CAAC,CAAC;iBACZ;qBAAM;oBACL,MAAM,GAAG,GAAG,IAAI,CAAC,MAAM,CACnB,iDAAiD,EAAE,KAAK,EACxD,OAAO,CAAC,OAAO,EAAE,YAAY,CAAC,KAAK,CAAC,CAAC,CAAC;oBAC1C,MAAM,IAAI,SAAS,CAAC,GAAG,CAAC,CAAC;iBAC1B;aACF;QACH,CAAC,CAAC,CAAC;QACH,IAAI,KAAK,KAAK,YAAY,CAAC,MAAM,IAAI,KAAK,KAAK,YAAY,EAAE;YAC3D,MAAM,GAAG,GAAG,IAAI,CAAC,MAAM,CACnB,4DAA4D,EAAE,IAAI,CAAC,CAAC;YACxE,MAAM,IAAI,SAAS,CAAC,GAAG,CAAC,CAAC;SAC1B;QACD,OAAO,QAAQ,CAAC;IAClB,CAAC;IAED;;;;;;;OAOG;IACH,MAAM,CAAC,QAAkB;QACvB,MAAM,GAAG,GAAc,EAAE,CAAC;QAC1B,IAAI,UAAU,GAAG,KAAK,CAAC;QACvB,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE;YAC9B,IAAI,OAAO,CAAC,IAAI,KAAK,MAAM,CAAC,OAAO,EAAE;gBACnC,IAAI,CAAC,GAAG,CAAC,QAAQ,EAAE,OAAO,CAAC,OAAO,CAAC,EAAE;oBACnC,MAAM,GAAG,GAAG,IAAI,CAAC,MAAM,CACnB,wCAAwC,EAAE,OAAO,CAAC,OAAO,EACzD,QAAQ,CAAC,CAAC;oBACd,MAAM,IAAI,SAAS,CAAC,GAAG,CAAC,CAAC;iBAC1B;gBACD,MAAM,GAAG,GAAG,IAAI,YAAY,CAAC,QAAQ,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC,CAAC;gBACxD,KAAK,CAAC,SAAS,CAAC,IAAI,CAAC,KAAK,CAAC,GAAG,EAAE,GAAG,CAAC,QAAQ,CAAC,CAAC;gBAC9C,UAAU,GAAG,IAAI,CAAC;aACnB;iBAAM,IAAI,OAAO,CAAC,IAAI,KAAK,MAAM,CAAC,WAAW,EAAE;gBAC9C,UAAU,GAAG,KAAK,CAAC;aACpB;iBAAM,IAAI,UAAU,EAAE;gBACrB,OAAO;aACR;iBAAM;gBACL,GAAG,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;aACnB;QACH,CAAC,CAAC,CAAC;QAEH,MAAM,MAAM,GAAG,cAAc,CAAC,GAAG,CAAC,CAAC;QACnC,IAAI,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC;QACnB,OAAO,MAAM,CAAC;IAChB,CAAC;IAED;;;;OAIG;IACH,OAAO;QACL,OAAO,cAAc,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;IACvC,CAAC;CACF;AA9GD,oCA8GC;AAED;;;;;GAKG;AACH,SAAS,cAAc,CAAC,QAAmB;IACzC,IAAI,GAAG,GAAG,EAAE,CAAC;IACb,IAAI,KAAK,GAAG,IAAI,CAAC;IACjB,QAAQ,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE;QACzB,IAAI,OAAO,CAAC,IAAI,KAAK,MAAM,CAAC,QAAQ,EAAE;YACpC,IAAI,KAAK,EAAE;gBACT,GAAG,IAAI,GAAG,CAAC;aACZ;YACD,GAAG,IAAI,OAAO,CAAC,OAAO,CAAC;YACvB,OAAO;SACR;QACD,KAAK,GAAG,IAAI,CAAC;QACb,IAAI,OAAO,CAAC,IAAI,KAAK,MAAM,CAAC,OAAO,EAAE;YACnC,GAAG,IAAI,IAAI,GAAG,OAAO,CAAC,OAAO,GAAG,GAAG,CAAC;YACpC,KAAK,GAAG,KAAK,CAAC;SACf;aAAM;YACL,GAAG,IAAI,OAAO,CAAC,OAAO,GAAG,GAAG,CAAC;SAC9B;IACH,CAAC,CAAC,CAAC;IACH,OAAO,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC;AAC1B,CAAC"}
|
||||
645
express-server/node_modules/google-gax/build/src/path_template_parser.js
generated
vendored
Normal file
645
express-server/node_modules/google-gax/build/src/path_template_parser.js
generated
vendored
Normal file
@@ -0,0 +1,645 @@
|
||||
module.exports = (() => {
|
||||
/*
|
||||
* Generated by PEG.js 0.9.0.
|
||||
*
|
||||
* http://pegjs.org/
|
||||
*/
|
||||
|
||||
function peg$subclass(child, parent) {
|
||||
function ctor() {
|
||||
this.constructor = child;
|
||||
}
|
||||
ctor.prototype = parent.prototype;
|
||||
child.prototype = new ctor();
|
||||
}
|
||||
|
||||
function peg$SyntaxError(message, expected, found, location) {
|
||||
this.message = message;
|
||||
this.expected = expected;
|
||||
this.found = found;
|
||||
this.location = location;
|
||||
this.name = 'SyntaxError';
|
||||
|
||||
if (typeof Error.captureStackTrace === 'function') {
|
||||
Error.captureStackTrace(this, peg$SyntaxError);
|
||||
}
|
||||
}
|
||||
|
||||
peg$subclass(peg$SyntaxError, Error);
|
||||
|
||||
function peg$parse(input) {
|
||||
const options = arguments.length > 1 ? arguments[1] : {};
|
||||
const parser = this;
|
||||
const peg$FAILED = {};
|
||||
const peg$startRuleFunctions = {template: peg$parsetemplate};
|
||||
let peg$startRuleFunction = peg$parsetemplate;
|
||||
const peg$c0 = '/';
|
||||
const peg$c1 = {type: 'literal', value: '/', description: '"/"'};
|
||||
const peg$c2 = (segments) => {
|
||||
return segments;
|
||||
};
|
||||
const peg$c3 = (s, segments) => {
|
||||
return s.concat(segments);
|
||||
};
|
||||
const peg$c4 = s => {
|
||||
return s;
|
||||
};
|
||||
const peg$c5 = '{';
|
||||
const peg$c6 = {type: 'literal', value: '{', description: '"{"'};
|
||||
const peg$c7 = '=';
|
||||
const peg$c8 = {type: 'literal', value: '=', description: '"="'};
|
||||
const peg$c9 = '}';
|
||||
const peg$c10 = {type: 'literal', value: '}', description: '"}"'};
|
||||
const peg$c11 = (l, segments) => {
|
||||
return ([
|
||||
{kind: extras.BINDING, literal: l},
|
||||
segments,
|
||||
{kind: extras.END_BINDING, literal: ''},
|
||||
])
|
||||
.reduce((a, b) => a.concat(b), []);
|
||||
};
|
||||
const peg$c12 = l => {
|
||||
return [
|
||||
{kind: extras.BINDING, literal: l},
|
||||
{kind: extras.TERMINAL, literal: '*'},
|
||||
{kind: extras.END_BINDING, literal: ''},
|
||||
];
|
||||
};
|
||||
const peg$c13 = (t, segments) => {
|
||||
return t.concat(segments);
|
||||
};
|
||||
const peg$c14 = t => {
|
||||
if (t[0].literal === '*' || t[0].literal === '**') {
|
||||
return [
|
||||
{
|
||||
kind: extras.BINDING,
|
||||
},
|
||||
t[0],
|
||||
{kind: extras.END_BINDING, literal: ''},
|
||||
];
|
||||
} else {
|
||||
return t;
|
||||
}
|
||||
};
|
||||
const peg$c15 = '**';
|
||||
const peg$c16 = {type: 'literal', value: '**', description: '"**"'};
|
||||
const peg$c17 = '*';
|
||||
const peg$c18 = {type: 'literal', value: '*', description: '"*"'};
|
||||
const peg$c19 = l => {
|
||||
return [{kind: extras.TERMINAL, literal: l}];
|
||||
};
|
||||
const peg$c20 = /^[^*=}{\/]/;
|
||||
const peg$c21 = {type: 'class', value: '[^*=}{/]', description: '[^*=}{/]'};
|
||||
const peg$c22 = cs => {
|
||||
return cs.join('');
|
||||
};
|
||||
let peg$currPos = 0;
|
||||
let peg$savedPos = 0;
|
||||
const peg$posDetailsCache = [{line: 1, column: 1, seenCR: false}];
|
||||
let peg$maxFailPos = 0;
|
||||
let peg$maxFailExpected = [];
|
||||
const peg$silentFails = 0;
|
||||
let peg$result;
|
||||
|
||||
if ('startRule' in options) {
|
||||
if (!(options.startRule in peg$startRuleFunctions)) {
|
||||
throw new Error(
|
||||
'Can\'t start parsing from rule "' + options.startRule + '".');
|
||||
}
|
||||
|
||||
peg$startRuleFunction = peg$startRuleFunctions[options.startRule];
|
||||
}
|
||||
|
||||
function text() {
|
||||
return input.substring(peg$savedPos, peg$currPos);
|
||||
}
|
||||
|
||||
function location() {
|
||||
return peg$computeLocation(peg$savedPos, peg$currPos);
|
||||
}
|
||||
|
||||
function expected(description) {
|
||||
throw peg$buildException(
|
||||
null, [{type: 'other', description}],
|
||||
input.substring(peg$savedPos, peg$currPos),
|
||||
peg$computeLocation(peg$savedPos, peg$currPos));
|
||||
}
|
||||
|
||||
function error(message) {
|
||||
throw peg$buildException(
|
||||
message, null, input.substring(peg$savedPos, peg$currPos),
|
||||
peg$computeLocation(peg$savedPos, peg$currPos));
|
||||
}
|
||||
|
||||
function peg$computePosDetails(pos) {
|
||||
let details = peg$posDetailsCache[pos], p, ch;
|
||||
|
||||
if (details) {
|
||||
return details;
|
||||
} else {
|
||||
p = pos - 1;
|
||||
while (!peg$posDetailsCache[p]) {
|
||||
p--;
|
||||
}
|
||||
|
||||
details = peg$posDetailsCache[p];
|
||||
details = {
|
||||
line: details.line,
|
||||
column: details.column,
|
||||
seenCR: details.seenCR,
|
||||
};
|
||||
|
||||
while (p < pos) {
|
||||
ch = input.charAt(p);
|
||||
if (ch === '\n') {
|
||||
if (!details.seenCR) {
|
||||
details.line++;
|
||||
}
|
||||
details.column = 1;
|
||||
details.seenCR = false;
|
||||
} else if (ch === '\r' || ch === '\u2028' || ch === '\u2029') {
|
||||
details.line++;
|
||||
details.column = 1;
|
||||
details.seenCR = true;
|
||||
} else {
|
||||
details.column++;
|
||||
details.seenCR = false;
|
||||
}
|
||||
|
||||
p++;
|
||||
}
|
||||
|
||||
peg$posDetailsCache[pos] = details;
|
||||
return details;
|
||||
}
|
||||
}
|
||||
|
||||
function peg$computeLocation(startPos, endPos) {
|
||||
const startPosDetails = peg$computePosDetails(startPos),
|
||||
endPosDetails = peg$computePosDetails(endPos);
|
||||
|
||||
return {
|
||||
start: {
|
||||
offset: startPos,
|
||||
line: startPosDetails.line,
|
||||
column: startPosDetails.column,
|
||||
},
|
||||
end: {
|
||||
offset: endPos,
|
||||
line: endPosDetails.line,
|
||||
column: endPosDetails.column,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
function peg$fail(expected) {
|
||||
if (peg$currPos < peg$maxFailPos) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (peg$currPos > peg$maxFailPos) {
|
||||
peg$maxFailPos = peg$currPos;
|
||||
peg$maxFailExpected = [];
|
||||
}
|
||||
|
||||
peg$maxFailExpected.push(expected);
|
||||
}
|
||||
|
||||
function peg$buildException(message, expected, found, location) {
|
||||
function cleanupExpected(expected) {
|
||||
let i = 1;
|
||||
|
||||
expected.sort((a, b) => {
|
||||
if (a.description < b.description) {
|
||||
return -1;
|
||||
} else if (a.description > b.description) {
|
||||
return 1;
|
||||
} else {
|
||||
return 0;
|
||||
}
|
||||
});
|
||||
|
||||
while (i < expected.length) {
|
||||
if (expected[i - 1] === expected[i]) {
|
||||
expected.splice(i, 1);
|
||||
} else {
|
||||
i++;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function buildMessage(expected, found) {
|
||||
function stringEscape(s) {
|
||||
function hex(ch) {
|
||||
return ch.charCodeAt(0).toString(16).toUpperCase();
|
||||
}
|
||||
|
||||
return s.replace(/\\/g, '\\\\')
|
||||
.replace(/"/g, '\\"')
|
||||
.replace(/\x08/g, '\\b')
|
||||
.replace(/\t/g, '\\t')
|
||||
.replace(/\n/g, '\\n')
|
||||
.replace(/\f/g, '\\f')
|
||||
.replace(/\r/g, '\\r')
|
||||
.replace(
|
||||
/[\x00-\x07\x0B\x0E\x0F]/g,
|
||||
ch => {
|
||||
return '\\x0' + hex(ch);
|
||||
})
|
||||
.replace(
|
||||
/[\x10-\x1F\x80-\xFF]/g,
|
||||
ch => {
|
||||
return '\\x' + hex(ch);
|
||||
})
|
||||
.replace(
|
||||
/[\u0100-\u0FFF]/g,
|
||||
ch => {
|
||||
return '\\u0' + hex(ch);
|
||||
})
|
||||
.replace(/[\u1000-\uFFFF]/g, ch => {
|
||||
return '\\u' + hex(ch);
|
||||
});
|
||||
}
|
||||
|
||||
const expectedDescs = new Array(expected.length);
|
||||
let expectedDesc, foundDesc, i;
|
||||
|
||||
for (i = 0; i < expected.length; i++) {
|
||||
expectedDescs[i] = expected[i].description;
|
||||
}
|
||||
|
||||
expectedDesc = expected.length > 1 ?
|
||||
expectedDescs.slice(0, -1).join(', ') + ' or ' +
|
||||
expectedDescs[expected.length - 1] :
|
||||
expectedDescs[0];
|
||||
|
||||
foundDesc = found ? '"' + stringEscape(found) + '"' : 'end of input';
|
||||
|
||||
return 'Expected ' + expectedDesc + ' but ' + foundDesc + ' found.';
|
||||
}
|
||||
|
||||
if (expected !== null) {
|
||||
cleanupExpected(expected);
|
||||
}
|
||||
|
||||
return new peg$SyntaxError(
|
||||
message !== null ? message : buildMessage(expected, found), expected,
|
||||
found, location);
|
||||
}
|
||||
|
||||
function peg$parsetemplate() {
|
||||
let s0, s1, s2;
|
||||
|
||||
s0 = peg$currPos;
|
||||
if (input.charCodeAt(peg$currPos) === 47) {
|
||||
s1 = peg$c0;
|
||||
peg$currPos++;
|
||||
} else {
|
||||
s1 = peg$FAILED;
|
||||
if (peg$silentFails === 0) {
|
||||
peg$fail(peg$c1);
|
||||
}
|
||||
}
|
||||
if (s1 !== peg$FAILED) {
|
||||
s2 = peg$parsebound_segments();
|
||||
if (s2 !== peg$FAILED) {
|
||||
peg$savedPos = s0;
|
||||
s1 = peg$c2(s2);
|
||||
s0 = s1;
|
||||
} else {
|
||||
peg$currPos = s0;
|
||||
s0 = peg$FAILED;
|
||||
}
|
||||
} else {
|
||||
peg$currPos = s0;
|
||||
s0 = peg$FAILED;
|
||||
}
|
||||
if (s0 === peg$FAILED) {
|
||||
s0 = peg$currPos;
|
||||
s1 = peg$parsebound_segments();
|
||||
if (s1 !== peg$FAILED) {
|
||||
peg$savedPos = s0;
|
||||
s1 = peg$c2(s1);
|
||||
}
|
||||
s0 = s1;
|
||||
}
|
||||
|
||||
return s0;
|
||||
}
|
||||
|
||||
function peg$parsebound_segments() {
|
||||
let s0, s1, s2, s3;
|
||||
|
||||
s0 = peg$currPos;
|
||||
s1 = peg$parsebound_segment();
|
||||
if (s1 !== peg$FAILED) {
|
||||
if (input.charCodeAt(peg$currPos) === 47) {
|
||||
s2 = peg$c0;
|
||||
peg$currPos++;
|
||||
} else {
|
||||
s2 = peg$FAILED;
|
||||
if (peg$silentFails === 0) {
|
||||
peg$fail(peg$c1);
|
||||
}
|
||||
}
|
||||
if (s2 !== peg$FAILED) {
|
||||
s3 = peg$parsebound_segments();
|
||||
if (s3 !== peg$FAILED) {
|
||||
peg$savedPos = s0;
|
||||
s1 = peg$c3(s1, s3);
|
||||
s0 = s1;
|
||||
} else {
|
||||
peg$currPos = s0;
|
||||
s0 = peg$FAILED;
|
||||
}
|
||||
} else {
|
||||
peg$currPos = s0;
|
||||
s0 = peg$FAILED;
|
||||
}
|
||||
} else {
|
||||
peg$currPos = s0;
|
||||
s0 = peg$FAILED;
|
||||
}
|
||||
if (s0 === peg$FAILED) {
|
||||
s0 = peg$parsebound_segment();
|
||||
}
|
||||
|
||||
return s0;
|
||||
}
|
||||
|
||||
function peg$parsebound_segment() {
|
||||
let s0, s1;
|
||||
|
||||
s0 = peg$currPos;
|
||||
s1 = peg$parsebound_terminal();
|
||||
if (s1 === peg$FAILED) {
|
||||
s1 = peg$parsevariable();
|
||||
}
|
||||
if (s1 !== peg$FAILED) {
|
||||
peg$savedPos = s0;
|
||||
s1 = peg$c4(s1);
|
||||
}
|
||||
s0 = s1;
|
||||
|
||||
return s0;
|
||||
}
|
||||
|
||||
function peg$parsevariable() {
|
||||
let s0, s1, s2, s3, s4, s5;
|
||||
|
||||
s0 = peg$currPos;
|
||||
if (input.charCodeAt(peg$currPos) === 123) {
|
||||
s1 = peg$c5;
|
||||
peg$currPos++;
|
||||
} else {
|
||||
s1 = peg$FAILED;
|
||||
if (peg$silentFails === 0) {
|
||||
peg$fail(peg$c6);
|
||||
}
|
||||
}
|
||||
if (s1 !== peg$FAILED) {
|
||||
s2 = peg$parseliteral();
|
||||
if (s2 !== peg$FAILED) {
|
||||
if (input.charCodeAt(peg$currPos) === 61) {
|
||||
s3 = peg$c7;
|
||||
peg$currPos++;
|
||||
} else {
|
||||
s3 = peg$FAILED;
|
||||
if (peg$silentFails === 0) {
|
||||
peg$fail(peg$c8);
|
||||
}
|
||||
}
|
||||
if (s3 !== peg$FAILED) {
|
||||
s4 = peg$parseunbound_segments();
|
||||
if (s4 !== peg$FAILED) {
|
||||
if (input.charCodeAt(peg$currPos) === 125) {
|
||||
s5 = peg$c9;
|
||||
peg$currPos++;
|
||||
} else {
|
||||
s5 = peg$FAILED;
|
||||
if (peg$silentFails === 0) {
|
||||
peg$fail(peg$c10);
|
||||
}
|
||||
}
|
||||
if (s5 !== peg$FAILED) {
|
||||
peg$savedPos = s0;
|
||||
s1 = peg$c11(s2, s4);
|
||||
s0 = s1;
|
||||
} else {
|
||||
peg$currPos = s0;
|
||||
s0 = peg$FAILED;
|
||||
}
|
||||
} else {
|
||||
peg$currPos = s0;
|
||||
s0 = peg$FAILED;
|
||||
}
|
||||
} else {
|
||||
peg$currPos = s0;
|
||||
s0 = peg$FAILED;
|
||||
}
|
||||
} else {
|
||||
peg$currPos = s0;
|
||||
s0 = peg$FAILED;
|
||||
}
|
||||
} else {
|
||||
peg$currPos = s0;
|
||||
s0 = peg$FAILED;
|
||||
}
|
||||
if (s0 === peg$FAILED) {
|
||||
s0 = peg$currPos;
|
||||
if (input.charCodeAt(peg$currPos) === 123) {
|
||||
s1 = peg$c5;
|
||||
peg$currPos++;
|
||||
} else {
|
||||
s1 = peg$FAILED;
|
||||
if (peg$silentFails === 0) {
|
||||
peg$fail(peg$c6);
|
||||
}
|
||||
}
|
||||
if (s1 !== peg$FAILED) {
|
||||
s2 = peg$parseliteral();
|
||||
if (s2 !== peg$FAILED) {
|
||||
if (input.charCodeAt(peg$currPos) === 125) {
|
||||
s3 = peg$c9;
|
||||
peg$currPos++;
|
||||
} else {
|
||||
s3 = peg$FAILED;
|
||||
if (peg$silentFails === 0) {
|
||||
peg$fail(peg$c10);
|
||||
}
|
||||
}
|
||||
if (s3 !== peg$FAILED) {
|
||||
peg$savedPos = s0;
|
||||
s1 = peg$c12(s2);
|
||||
s0 = s1;
|
||||
} else {
|
||||
peg$currPos = s0;
|
||||
s0 = peg$FAILED;
|
||||
}
|
||||
} else {
|
||||
peg$currPos = s0;
|
||||
s0 = peg$FAILED;
|
||||
}
|
||||
} else {
|
||||
peg$currPos = s0;
|
||||
s0 = peg$FAILED;
|
||||
}
|
||||
}
|
||||
|
||||
return s0;
|
||||
}
|
||||
|
||||
function peg$parseunbound_segments() {
|
||||
let s0, s1, s2, s3;
|
||||
|
||||
s0 = peg$currPos;
|
||||
s1 = peg$parseunbound_terminal();
|
||||
if (s1 !== peg$FAILED) {
|
||||
if (input.charCodeAt(peg$currPos) === 47) {
|
||||
s2 = peg$c0;
|
||||
peg$currPos++;
|
||||
} else {
|
||||
s2 = peg$FAILED;
|
||||
if (peg$silentFails === 0) {
|
||||
peg$fail(peg$c1);
|
||||
}
|
||||
}
|
||||
if (s2 !== peg$FAILED) {
|
||||
s3 = peg$parseunbound_segments();
|
||||
if (s3 !== peg$FAILED) {
|
||||
peg$savedPos = s0;
|
||||
s1 = peg$c13(s1, s3);
|
||||
s0 = s1;
|
||||
} else {
|
||||
peg$currPos = s0;
|
||||
s0 = peg$FAILED;
|
||||
}
|
||||
} else {
|
||||
peg$currPos = s0;
|
||||
s0 = peg$FAILED;
|
||||
}
|
||||
} else {
|
||||
peg$currPos = s0;
|
||||
s0 = peg$FAILED;
|
||||
}
|
||||
if (s0 === peg$FAILED) {
|
||||
s0 = peg$parseunbound_terminal();
|
||||
}
|
||||
|
||||
return s0;
|
||||
}
|
||||
|
||||
function peg$parsebound_terminal() {
|
||||
let s0, s1;
|
||||
|
||||
s0 = peg$currPos;
|
||||
s1 = peg$parseunbound_terminal();
|
||||
if (s1 !== peg$FAILED) {
|
||||
peg$savedPos = s0;
|
||||
s1 = peg$c14(s1);
|
||||
}
|
||||
s0 = s1;
|
||||
|
||||
return s0;
|
||||
}
|
||||
|
||||
function peg$parseunbound_terminal() {
|
||||
let s0, s1;
|
||||
|
||||
s0 = peg$currPos;
|
||||
if (input.substr(peg$currPos, 2) === peg$c15) {
|
||||
s1 = peg$c15;
|
||||
peg$currPos += 2;
|
||||
} else {
|
||||
s1 = peg$FAILED;
|
||||
if (peg$silentFails === 0) {
|
||||
peg$fail(peg$c16);
|
||||
}
|
||||
}
|
||||
if (s1 === peg$FAILED) {
|
||||
if (input.charCodeAt(peg$currPos) === 42) {
|
||||
s1 = peg$c17;
|
||||
peg$currPos++;
|
||||
} else {
|
||||
s1 = peg$FAILED;
|
||||
if (peg$silentFails === 0) {
|
||||
peg$fail(peg$c18);
|
||||
}
|
||||
}
|
||||
if (s1 === peg$FAILED) {
|
||||
s1 = peg$parseliteral();
|
||||
}
|
||||
}
|
||||
if (s1 !== peg$FAILED) {
|
||||
peg$savedPos = s0;
|
||||
s1 = peg$c19(s1);
|
||||
}
|
||||
s0 = s1;
|
||||
|
||||
return s0;
|
||||
}
|
||||
|
||||
function peg$parseliteral() {
|
||||
let s0, s1, s2;
|
||||
|
||||
s0 = peg$currPos;
|
||||
s1 = [];
|
||||
if (peg$c20.test(input.charAt(peg$currPos))) {
|
||||
s2 = input.charAt(peg$currPos);
|
||||
peg$currPos++;
|
||||
} else {
|
||||
s2 = peg$FAILED;
|
||||
if (peg$silentFails === 0) {
|
||||
peg$fail(peg$c21);
|
||||
}
|
||||
}
|
||||
if (s2 !== peg$FAILED) {
|
||||
while (s2 !== peg$FAILED) {
|
||||
s1.push(s2);
|
||||
if (peg$c20.test(input.charAt(peg$currPos))) {
|
||||
s2 = input.charAt(peg$currPos);
|
||||
peg$currPos++;
|
||||
} else {
|
||||
s2 = peg$FAILED;
|
||||
if (peg$silentFails === 0) {
|
||||
peg$fail(peg$c21);
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
s1 = peg$FAILED;
|
||||
}
|
||||
if (s1 !== peg$FAILED) {
|
||||
peg$savedPos = s0;
|
||||
s1 = peg$c22(s1);
|
||||
}
|
||||
s0 = s1;
|
||||
|
||||
return s0;
|
||||
}
|
||||
|
||||
const extras = require('./parser_extras');
|
||||
|
||||
peg$result = peg$startRuleFunction();
|
||||
|
||||
if (peg$result !== peg$FAILED && peg$currPos === input.length) {
|
||||
return peg$result;
|
||||
} else {
|
||||
if (peg$result !== peg$FAILED && peg$currPos < input.length) {
|
||||
peg$fail({type: 'end', description: 'end of input'});
|
||||
}
|
||||
|
||||
throw peg$buildException(
|
||||
null, peg$maxFailExpected,
|
||||
peg$maxFailPos < input.length ? input.charAt(peg$maxFailPos) : null,
|
||||
peg$maxFailPos < input.length ?
|
||||
peg$computeLocation(peg$maxFailPos, peg$maxFailPos + 1) :
|
||||
peg$computeLocation(peg$maxFailPos, peg$maxFailPos));
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
SyntaxError: peg$SyntaxError,
|
||||
parse: peg$parse,
|
||||
};
|
||||
})();
|
||||
17
express-server/node_modules/google-gax/build/src/routing_header.d.ts
generated
vendored
Normal file
17
express-server/node_modules/google-gax/build/src/routing_header.d.ts
generated
vendored
Normal file
@@ -0,0 +1,17 @@
|
||||
/**
|
||||
* Helpers for constructing routing headers.
|
||||
*
|
||||
* These headers are used by Google infrastructure to determine how to route
|
||||
* requests, especially for services that are regional.
|
||||
*
|
||||
* Generally, these headers are specified as gRPC metadata.
|
||||
*/
|
||||
/**
|
||||
* Constructs the routing header from the given params
|
||||
*
|
||||
* @param {Object} params - the request header parameters.
|
||||
* @return {string} the routing header value.
|
||||
*/
|
||||
export declare function fromParams(params: {
|
||||
[index: string]: {};
|
||||
}): string;
|
||||
51
express-server/node_modules/google-gax/build/src/routing_header.js
generated
vendored
Normal file
51
express-server/node_modules/google-gax/build/src/routing_header.js
generated
vendored
Normal file
@@ -0,0 +1,51 @@
|
||||
"use strict";
|
||||
/*
|
||||
* Copyright 2017, Google Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without
|
||||
* modification, are permitted provided that the following conditions are
|
||||
* met:
|
||||
*
|
||||
* * Redistributions of source code must retain the above copyright
|
||||
* notice, this list of conditions and the following disclaimer.
|
||||
* * Redistributions in binary form must reproduce the above
|
||||
* copyright notice, this list of conditions and the following disclaimer
|
||||
* in the documentation and/or other materials provided with the
|
||||
* distribution.
|
||||
* * Neither the name of Google Inc. nor the names of its
|
||||
* contributors may be used to endorse or promote products derived from
|
||||
* this software without specific prior written permission.
|
||||
*
|
||||
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
/**
|
||||
* Helpers for constructing routing headers.
|
||||
*
|
||||
* These headers are used by Google infrastructure to determine how to route
|
||||
* requests, especially for services that are regional.
|
||||
*
|
||||
* Generally, these headers are specified as gRPC metadata.
|
||||
*/
|
||||
/**
|
||||
* Constructs the routing header from the given params
|
||||
*
|
||||
* @param {Object} params - the request header parameters.
|
||||
* @return {string} the routing header value.
|
||||
*/
|
||||
function fromParams(params) {
|
||||
return Object.keys(params).map(key => `${key}=${params[key]}`).join('&');
|
||||
}
|
||||
exports.fromParams = fromParams;
|
||||
//# sourceMappingURL=routing_header.js.map
|
||||
1
express-server/node_modules/google-gax/build/src/routing_header.js.map
generated
vendored
Normal file
1
express-server/node_modules/google-gax/build/src/routing_header.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"routing_header.js","sourceRoot":"","sources":["../../src/routing_header.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA6BG;;AAEH;;;;;;;GAOG;AAEH;;;;;GAKG;AACH,SAAgB,UAAU,CAAC,MAA6B;IACtD,OAAO,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,GAAG,IAAI,MAAM,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AAC3E,CAAC;AAFD,gCAEC"}
|
||||
102
express-server/node_modules/google-gax/build/src/streaming.d.ts
generated
vendored
Normal file
102
express-server/node_modules/google-gax/build/src/streaming.d.ts
generated
vendored
Normal file
@@ -0,0 +1,102 @@
|
||||
/**
|
||||
* Copyright 2016, Google Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without
|
||||
* modification, are permitted provided that the following conditions are
|
||||
* met:
|
||||
*
|
||||
* * Redistributions of source code must retain the above copyright
|
||||
* notice, this list of conditions and the following disclaimer.
|
||||
* Redistributions in binary form must reproduce the above
|
||||
* copyright notice, this list of conditions and the following disclaimer
|
||||
* in the documentation and/or other materials provided with the
|
||||
* distribution.
|
||||
* * Neither the name of Google Inc. nor the names of its
|
||||
* contributors may be used to endorse or promote products derived from
|
||||
* this software without specific prior written permission.
|
||||
*
|
||||
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
/// <reference types="node" />
|
||||
import * as Duplexify from 'duplexify';
|
||||
import { Duplex, Stream } from 'stream';
|
||||
import { APICall, APICallback } from './api_callable';
|
||||
/**
|
||||
* The type of gRPC streaming.
|
||||
* @enum {number}
|
||||
*/
|
||||
export declare enum StreamType {
|
||||
/** Client sends a single request, server streams responses. */
|
||||
SERVER_STREAMING = 1,
|
||||
/** Client streams requests, server returns a single response. */
|
||||
CLIENT_STREAMING = 2,
|
||||
/** Both client and server stream objects. */
|
||||
BIDI_STREAMING = 3
|
||||
}
|
||||
export declare class StreamProxy extends Duplexify {
|
||||
type: {};
|
||||
private _callback?;
|
||||
private _isCancelCalled;
|
||||
stream?: Duplex & {
|
||||
cancel: () => void;
|
||||
};
|
||||
/**
|
||||
* StreamProxy is a proxy to gRPC-streaming method.
|
||||
*
|
||||
* @private
|
||||
* @constructor
|
||||
* @param {StreamType} type - the type of gRPC stream.
|
||||
* @param {ApiCallback} callback - the callback for further API call.
|
||||
*/
|
||||
constructor(type: StreamType, callback: APICallback);
|
||||
cancel(): void;
|
||||
/**
|
||||
* Forward events from an API request stream to the user's stream.
|
||||
* @param {Stream} stream - The API request stream.
|
||||
*/
|
||||
forwardEvents(stream: Stream): void;
|
||||
/**
|
||||
* Specifies the target stream.
|
||||
* @param {ApiCall} apiCall - the API function to be called.
|
||||
* @param {Object} argument - the argument to be passed to the apiCall.
|
||||
*/
|
||||
setStream(apiCall: APICall, argument: {}): void;
|
||||
}
|
||||
export declare class GrpcStreamable {
|
||||
descriptor: StreamDescriptor;
|
||||
/**
|
||||
* An API caller for methods of gRPC streaming.
|
||||
* @private
|
||||
* @constructor
|
||||
* @param {StreamDescriptor} descriptor - the descriptor of the method structure.
|
||||
*/
|
||||
constructor(descriptor: StreamDescriptor);
|
||||
init(settings: {}, callback: APICallback): StreamProxy;
|
||||
wrap(func: Function): Function;
|
||||
call(apiCall: APICall, argument: {}, settings: {}, stream: StreamProxy): void;
|
||||
fail(stream: Stream, err: Error): void;
|
||||
result(stream: Stream): Stream;
|
||||
}
|
||||
export declare class StreamDescriptor {
|
||||
type: StreamType;
|
||||
/**
|
||||
* Describes the structure of gRPC streaming call.
|
||||
* @constructor
|
||||
* @param {StreamType} streamType - the type of streaming.
|
||||
*/
|
||||
constructor(streamType: StreamType);
|
||||
apiCaller(settings: {
|
||||
retry: null;
|
||||
}): GrpcStreamable;
|
||||
}
|
||||
205
express-server/node_modules/google-gax/build/src/streaming.js
generated
vendored
Normal file
205
express-server/node_modules/google-gax/build/src/streaming.js
generated
vendored
Normal file
@@ -0,0 +1,205 @@
|
||||
"use strict";
|
||||
/**
|
||||
* Copyright 2016, Google Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without
|
||||
* modification, are permitted provided that the following conditions are
|
||||
* met:
|
||||
*
|
||||
* * Redistributions of source code must retain the above copyright
|
||||
* notice, this list of conditions and the following disclaimer.
|
||||
* Redistributions in binary form must reproduce the above
|
||||
* copyright notice, this list of conditions and the following disclaimer
|
||||
* in the documentation and/or other materials provided with the
|
||||
* distribution.
|
||||
* * Neither the name of Google Inc. nor the names of its
|
||||
* contributors may be used to endorse or promote products derived from
|
||||
* this software without specific prior written permission.
|
||||
*
|
||||
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
/* This file describes the gRPC-streaming. */
|
||||
const Duplexify = require("duplexify");
|
||||
const retryRequest = require('retry-request');
|
||||
/**
|
||||
* The type of gRPC streaming.
|
||||
* @enum {number}
|
||||
*/
|
||||
var StreamType;
|
||||
(function (StreamType) {
|
||||
/** Client sends a single request, server streams responses. */
|
||||
StreamType[StreamType["SERVER_STREAMING"] = 1] = "SERVER_STREAMING";
|
||||
/** Client streams requests, server returns a single response. */
|
||||
StreamType[StreamType["CLIENT_STREAMING"] = 2] = "CLIENT_STREAMING";
|
||||
/** Both client and server stream objects. */
|
||||
StreamType[StreamType["BIDI_STREAMING"] = 3] = "BIDI_STREAMING";
|
||||
})(StreamType = exports.StreamType || (exports.StreamType = {}));
|
||||
class StreamProxy extends Duplexify {
|
||||
/**
|
||||
* StreamProxy is a proxy to gRPC-streaming method.
|
||||
*
|
||||
* @private
|
||||
* @constructor
|
||||
* @param {StreamType} type - the type of gRPC stream.
|
||||
* @param {ApiCallback} callback - the callback for further API call.
|
||||
*/
|
||||
constructor(type, callback) {
|
||||
super(undefined, undefined, {
|
||||
objectMode: true,
|
||||
readable: type !== StreamType.CLIENT_STREAMING,
|
||||
writable: type !== StreamType.SERVER_STREAMING,
|
||||
});
|
||||
this.type = type;
|
||||
this._callback = callback;
|
||||
this._isCancelCalled = false;
|
||||
}
|
||||
cancel() {
|
||||
if (this.stream) {
|
||||
this.stream.cancel();
|
||||
}
|
||||
else {
|
||||
this._isCancelCalled = true;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Forward events from an API request stream to the user's stream.
|
||||
* @param {Stream} stream - The API request stream.
|
||||
*/
|
||||
forwardEvents(stream) {
|
||||
const eventsToForward = ['metadata', 'response', 'status'];
|
||||
eventsToForward.forEach(event => {
|
||||
stream.on(event, this.emit.bind(this, event));
|
||||
});
|
||||
// We also want to supply the status data as 'response' event to support
|
||||
// the behavior of google-cloud-node expects.
|
||||
// see:
|
||||
// https://github.com/GoogleCloudPlatform/google-cloud-node/pull/1775#issuecomment-259141029
|
||||
// https://github.com/GoogleCloudPlatform/google-cloud-node/blob/116436fa789d8b0f7fc5100b19b424e3ec63e6bf/packages/common/src/grpc-service.js#L355
|
||||
stream.on('metadata', metadata => {
|
||||
// Create a response object with succeeds.
|
||||
// TODO: unify this logic with the decoration of gRPC response when it's
|
||||
// added. see: https://github.com/googleapis/gax-nodejs/issues/65
|
||||
stream.emit('response', {
|
||||
code: 200,
|
||||
details: '',
|
||||
message: 'OK',
|
||||
metadata,
|
||||
});
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Specifies the target stream.
|
||||
* @param {ApiCall} apiCall - the API function to be called.
|
||||
* @param {Object} argument - the argument to be passed to the apiCall.
|
||||
*/
|
||||
setStream(apiCall, argument) {
|
||||
if (this.type === StreamType.SERVER_STREAMING) {
|
||||
const retryStream = retryRequest(null, {
|
||||
objectMode: true,
|
||||
request: () => {
|
||||
if (this._isCancelCalled) {
|
||||
if (this.stream) {
|
||||
this.stream.cancel();
|
||||
}
|
||||
return;
|
||||
}
|
||||
const stream = apiCall(argument, this._callback);
|
||||
this.stream = stream;
|
||||
this.forwardEvents(stream);
|
||||
return stream;
|
||||
},
|
||||
});
|
||||
this.setReadable(retryStream);
|
||||
return;
|
||||
}
|
||||
const stream = apiCall(argument, this._callback);
|
||||
this.stream = stream;
|
||||
this.forwardEvents(stream);
|
||||
if (this.type === StreamType.CLIENT_STREAMING) {
|
||||
this.setWritable(stream);
|
||||
}
|
||||
if (this.type === StreamType.BIDI_STREAMING) {
|
||||
this.setReadable(stream);
|
||||
this.setWritable(stream);
|
||||
}
|
||||
if (this._isCancelCalled && this.stream) {
|
||||
this.stream.cancel();
|
||||
}
|
||||
}
|
||||
}
|
||||
exports.StreamProxy = StreamProxy;
|
||||
class GrpcStreamable {
|
||||
/**
|
||||
* An API caller for methods of gRPC streaming.
|
||||
* @private
|
||||
* @constructor
|
||||
* @param {StreamDescriptor} descriptor - the descriptor of the method structure.
|
||||
*/
|
||||
constructor(descriptor) {
|
||||
this.descriptor = descriptor;
|
||||
}
|
||||
init(settings, callback) {
|
||||
return new StreamProxy(this.descriptor.type, callback);
|
||||
}
|
||||
wrap(func) {
|
||||
switch (this.descriptor.type) {
|
||||
case StreamType.SERVER_STREAMING:
|
||||
return (argument, metadata, options) => {
|
||||
return func(argument, metadata, options);
|
||||
};
|
||||
case StreamType.CLIENT_STREAMING:
|
||||
return (argument, metadata, options, callback) => {
|
||||
return func(metadata, options, callback);
|
||||
};
|
||||
case StreamType.BIDI_STREAMING:
|
||||
return (argument, metadata, options) => {
|
||||
return func(metadata, options);
|
||||
};
|
||||
default:
|
||||
console.error('Unknown stream type', this.descriptor.type);
|
||||
}
|
||||
return func;
|
||||
}
|
||||
call(apiCall, argument, settings, stream) {
|
||||
stream.setStream(apiCall, argument);
|
||||
}
|
||||
fail(stream, err) {
|
||||
stream.emit('error', err);
|
||||
}
|
||||
result(stream) {
|
||||
return stream;
|
||||
}
|
||||
}
|
||||
exports.GrpcStreamable = GrpcStreamable;
|
||||
class StreamDescriptor {
|
||||
/**
|
||||
* Describes the structure of gRPC streaming call.
|
||||
* @constructor
|
||||
* @param {StreamType} streamType - the type of streaming.
|
||||
*/
|
||||
constructor(streamType) {
|
||||
this.type = streamType;
|
||||
}
|
||||
apiCaller(settings) {
|
||||
// Right now retrying does not work with gRPC-streaming, because retryable
|
||||
// assumes an API call returns an event emitter while gRPC-streaming methods
|
||||
// return Stream.
|
||||
// TODO: support retrying.
|
||||
settings.retry = null;
|
||||
return new GrpcStreamable(this);
|
||||
}
|
||||
}
|
||||
exports.StreamDescriptor = StreamDescriptor;
|
||||
//# sourceMappingURL=streaming.js.map
|
||||
1
express-server/node_modules/google-gax/build/src/streaming.js.map
generated
vendored
Normal file
1
express-server/node_modules/google-gax/build/src/streaming.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"streaming.js","sourceRoot":"","sources":["../../src/streaming.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA6BG;;AAEH,6CAA6C;AAE7C,uCAAuC;AAKvC,MAAM,YAAY,GAAG,OAAO,CAAC,eAAe,CAAC,CAAC;AAE9C;;;GAGG;AACH,IAAY,UASX;AATD,WAAY,UAAU;IACpB,+DAA+D;IAC/D,mEAAoB,CAAA;IAEpB,iEAAiE;IACjE,mEAAoB,CAAA;IAEpB,6CAA6C;IAC7C,+DAAkB,CAAA;AACpB,CAAC,EATW,UAAU,GAAV,kBAAU,KAAV,kBAAU,QASrB;AAED,MAAa,WAAY,SAAQ,SAAS;IAKxC;;;;;;;OAOG;IACH,YAAY,IAAgB,EAAE,QAAqB;QACjD,KAAK,CAAC,SAAS,EAAE,SAAS,EAAE;YAC1B,UAAU,EAAE,IAAI;YAChB,QAAQ,EAAE,IAAI,KAAK,UAAU,CAAC,gBAAgB;YAC9C,QAAQ,EAAE,IAAI,KAAK,UAAU,CAAC,gBAAgB;SAC9B,CAAC,CAAC;QACpB,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC;QACjB,IAAI,CAAC,SAAS,GAAG,QAAQ,CAAC;QAC1B,IAAI,CAAC,eAAe,GAAG,KAAK,CAAC;IAC/B,CAAC;IAED,MAAM;QACJ,IAAI,IAAI,CAAC,MAAM,EAAE;YACf,IAAI,CAAC,MAAM,CAAC,MAAM,EAAE,CAAC;SACtB;aAAM;YACL,IAAI,CAAC,eAAe,GAAG,IAAI,CAAC;SAC7B;IACH,CAAC;IAED;;;OAGG;IACH,aAAa,CAAC,MAAc;QAC1B,MAAM,eAAe,GAAG,CAAC,UAAU,EAAE,UAAU,EAAE,QAAQ,CAAC,CAAC;QAE3D,eAAe,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE;YAC9B,MAAM,CAAC,EAAE,CAAC,KAAK,EAAE,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,EAAE,KAAK,CAAC,CAAC,CAAC;QAChD,CAAC,CAAC,CAAC;QAEH,wEAAwE;QACxE,6CAA6C;QAC7C,OAAO;QACP,4FAA4F;QAC5F,kJAAkJ;QAClJ,MAAM,CAAC,EAAE,CAAC,UAAU,EAAE,QAAQ,CAAC,EAAE;YAC/B,0CAA0C;YAC1C,wEAAwE;YACxE,iEAAiE;YACjE,MAAM,CAAC,IAAI,CAAC,UAAU,EAAE;gBACtB,IAAI,EAAE,GAAG;gBACT,OAAO,EAAE,EAAE;gBACX,OAAO,EAAE,IAAI;gBACb,QAAQ;aACT,CAAC,CAAC;QACL,CAAC,CAAC,CAAC;IACL,CAAC;IAED;;;;OAIG;IACH,SAAS,CAAC,OAAgB,EAAE,QAAY;QACtC,IAAI,IAAI,CAAC,IAAI,KAAK,UAAU,CAAC,gBAAgB,EAAE;YAC7C,MAAM,WAAW,GAAG,YAAY,CAAC,IAAI,EAAE;gBACrC,UAAU,EAAE,IAAI;gBAChB,OAAO,EAAE,GAAG,EAAE;oBACZ,IAAI,IAAI,CAAC,eAAe,EAAE;wBACxB,IAAI,IAAI,CAAC,MAAM,EAAE;4BACf,IAAI,CAAC,MAAM,CAAC,MAAM,EAAE,CAAC;yBACtB;wBACD,OAAO;qBACR;oBACD,MAAM,MAAM,GAAG,OAAO,CAAC,QAAQ,EAAE,IAAI,CAAC,SAAS,CAAC,CAAC;oBACjD,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;oBACrB,IAAI,CAAC,aAAa,CAAC,MAAM,CAAC,CAAC;oBAC3B,OAAO,MAAM,CAAC;gBAChB,CAAC;aACF,CAAC,CAAC;YACH,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,CAAC;YAC9B,OAAO;SACR;QAED,MAAM,MAAM,GAAG,OAAO,CAAC,QAAQ,EAAE,IAAI,CAAC,SAAS,CAAC,CAAC;QACjD,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;QACrB,IAAI,CAAC,aAAa,CAAC,MAAM,CAAC,CAAC;QAE3B,IAAI,IAAI,CAAC,IAAI,KAAK,UAAU,CAAC,gBAAgB,EAAE;YAC7C,IAAI,CAAC,WAAW,CAAC,MAAM,CAAC,CAAC;SAC1B;QAED,IAAI,IAAI,CAAC,IAAI,KAAK,UAAU,CAAC,cAAc,EAAE;YAC3C,IAAI,CAAC,WAAW,CAAC,MAAM,CAAC,CAAC;YACzB,IAAI,CAAC,WAAW,CAAC,MAAM,CAAC,CAAC;SAC1B;QAED,IAAI,IAAI,CAAC,eAAe,IAAI,IAAI,CAAC,MAAM,EAAE;YACvC,IAAI,CAAC,MAAM,CAAC,MAAM,EAAE,CAAC;SACtB;IACH,CAAC;CACF;AAxGD,kCAwGC;AAED,MAAa,cAAc;IAGzB;;;;;OAKG;IACH,YAAY,UAA4B;QACtC,IAAI,CAAC,UAAU,GAAG,UAAU,CAAC;IAC/B,CAAC;IAED,IAAI,CAAC,QAAY,EAAE,QAAqB;QACtC,OAAO,IAAI,WAAW,CAAC,IAAI,CAAC,UAAU,CAAC,IAAI,EAAE,QAAQ,CAAC,CAAC;IACzD,CAAC;IAED,IAAI,CAAC,IAAc;QACjB,QAAQ,IAAI,CAAC,UAAU,CAAC,IAAI,EAAE;YAC5B,KAAK,UAAU,CAAC,gBAAgB;gBAC9B,OAAO,CAAC,QAAY,EAAE,QAAY,EAAE,OAAW,EAAE,EAAE;oBACjD,OAAO,IAAI,CAAC,QAAQ,EAAE,QAAQ,EAAE,OAAO,CAAC,CAAC;gBAC3C,CAAC,CAAC;YACJ,KAAK,UAAU,CAAC,gBAAgB;gBAC9B,OAAO,CAAC,QAAY,EAAE,QAAY,EAAE,OAAW,EAAE,QAAY,EAAE,EAAE;oBAC/D,OAAO,IAAI,CAAC,QAAQ,EAAE,OAAO,EAAE,QAAQ,CAAC,CAAC;gBAC3C,CAAC,CAAC;YACJ,KAAK,UAAU,CAAC,cAAc;gBAC5B,OAAO,CAAC,QAAY,EAAE,QAAY,EAAE,OAAW,EAAE,EAAE;oBACjD,OAAO,IAAI,CAAC,QAAQ,EAAE,OAAO,CAAC,CAAC;gBACjC,CAAC,CAAC;YACJ;gBACE,OAAO,CAAC,KAAK,CAAC,qBAAqB,EAAE,IAAI,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC;SAC9D;QACD,OAAO,IAAI,CAAC;IACd,CAAC;IAED,IAAI,CAAC,OAAgB,EAAE,QAAY,EAAE,QAAY,EAAE,MAAmB;QACpE,MAAM,CAAC,SAAS,CAAC,OAAO,EAAE,QAAQ,CAAC,CAAC;IACtC,CAAC;IAED,IAAI,CAAC,MAAc,EAAE,GAAU;QAC7B,MAAM,CAAC,IAAI,CAAC,OAAO,EAAE,GAAG,CAAC,CAAC;IAC5B,CAAC;IAED,MAAM,CAAC,MAAc;QACnB,OAAO,MAAM,CAAC;IAChB,CAAC;CACF;AAhDD,wCAgDC;AAED,MAAa,gBAAgB;IAE3B;;;;OAIG;IACH,YAAY,UAAsB;QAChC,IAAI,CAAC,IAAI,GAAG,UAAU,CAAC;IACzB,CAAC;IAED,SAAS,CAAC,QAAuB;QAC/B,0EAA0E;QAC1E,4EAA4E;QAC5E,iBAAiB;QACjB,0BAA0B;QAC1B,QAAQ,CAAC,KAAK,GAAG,IAAI,CAAC;QACtB,OAAO,IAAI,cAAc,CAAC,IAAI,CAAC,CAAC;IAClC,CAAC;CACF;AAnBD,4CAmBC"}
|
||||
15
express-server/node_modules/google-gax/node_modules/.bin/pbjs
generated
vendored
Normal file
15
express-server/node_modules/google-gax/node_modules/.bin/pbjs
generated
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
#!/bin/sh
|
||||
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
|
||||
|
||||
case `uname` in
|
||||
*CYGWIN*) basedir=`cygpath -w "$basedir"`;;
|
||||
esac
|
||||
|
||||
if [ -x "$basedir/node" ]; then
|
||||
"$basedir/node" "$basedir/../protobufjs/bin/pbjs" "$@"
|
||||
ret=$?
|
||||
else
|
||||
node "$basedir/../protobufjs/bin/pbjs" "$@"
|
||||
ret=$?
|
||||
fi
|
||||
exit $ret
|
||||
7
express-server/node_modules/google-gax/node_modules/.bin/pbjs.cmd
generated
vendored
Normal file
7
express-server/node_modules/google-gax/node_modules/.bin/pbjs.cmd
generated
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
@IF EXIST "%~dp0\node.exe" (
|
||||
"%~dp0\node.exe" "%~dp0\..\protobufjs\bin\pbjs" %*
|
||||
) ELSE (
|
||||
@SETLOCAL
|
||||
@SET PATHEXT=%PATHEXT:;.JS;=;%
|
||||
node "%~dp0\..\protobufjs\bin\pbjs" %*
|
||||
)
|
||||
15
express-server/node_modules/google-gax/node_modules/.bin/pbts
generated
vendored
Normal file
15
express-server/node_modules/google-gax/node_modules/.bin/pbts
generated
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
#!/bin/sh
|
||||
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
|
||||
|
||||
case `uname` in
|
||||
*CYGWIN*) basedir=`cygpath -w "$basedir"`;;
|
||||
esac
|
||||
|
||||
if [ -x "$basedir/node" ]; then
|
||||
"$basedir/node" "$basedir/../protobufjs/bin/pbts" "$@"
|
||||
ret=$?
|
||||
else
|
||||
node "$basedir/../protobufjs/bin/pbts" "$@"
|
||||
ret=$?
|
||||
fi
|
||||
exit $ret
|
||||
7
express-server/node_modules/google-gax/node_modules/.bin/pbts.cmd
generated
vendored
Normal file
7
express-server/node_modules/google-gax/node_modules/.bin/pbts.cmd
generated
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
@IF EXIST "%~dp0\node.exe" (
|
||||
"%~dp0\node.exe" "%~dp0\..\protobufjs\bin\pbts" %*
|
||||
) ELSE (
|
||||
@SETLOCAL
|
||||
@SET PATHEXT=%PATHEXT:;.JS;=;%
|
||||
node "%~dp0\..\protobufjs\bin\pbts" %*
|
||||
)
|
||||
21
express-server/node_modules/google-gax/node_modules/@types/node/LICENSE
generated
vendored
Normal file
21
express-server/node_modules/google-gax/node_modules/@types/node/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE
|
||||
16
express-server/node_modules/google-gax/node_modules/@types/node/README.md
generated
vendored
Normal file
16
express-server/node_modules/google-gax/node_modules/@types/node/README.md
generated
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
# Installation
|
||||
> `npm install --save @types/node`
|
||||
|
||||
# Summary
|
||||
This package contains type definitions for Node.js (http://nodejs.org/).
|
||||
|
||||
# Details
|
||||
Files were exported from https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/node
|
||||
|
||||
Additional Details
|
||||
* Last updated: Wed, 19 Dec 2018 18:17:49 GMT
|
||||
* Dependencies: none
|
||||
* Global values: Buffer, NodeJS, SlowBuffer, Symbol, __dirname, __filename, clearImmediate, clearInterval, clearTimeout, console, exports, global, module, process, require, setImmediate, setInterval, setTimeout
|
||||
|
||||
# Credits
|
||||
These definitions were written by Microsoft TypeScript <https://github.com/Microsoft>, DefinitelyTyped <https://github.com/DefinitelyTyped>, Alberto Schiabel <https://github.com/jkomyno>, Alexander T. <https://github.com/a-tarasyuk>, Alvis HT Tang <https://github.com/alvis>, Andrew Makarov <https://github.com/r3nya>, Bruno Scheufler <https://github.com/brunoscheufler>, Chigozirim C. <https://github.com/smac89>, Christian Vaagland Tellnes <https://github.com/tellnes>, Deividas Bakanas <https://github.com/DeividasBakanas>, Eugene Y. Q. Shen <https://github.com/eyqs>, Flarna <https://github.com/Flarna>, Hannes Magnusson <https://github.com/Hannes-Magnusson-CK>, Hoàng Văn Khải <https://github.com/KSXGitHub>, Huw <https://github.com/hoo29>, Kelvin Jin <https://github.com/kjin>, Klaus Meinhardt <https://github.com/ajafff>, Lishude <https://github.com/islishude>, Mariusz Wiktorczyk <https://github.com/mwiktorczyk>, Matthieu Sieben <https://github.com/matthieusieben>, Mohsen Azimi <https://github.com/mohsen1>, Nicolas Even <https://github.com/n-e>, Nicolas Voigt <https://github.com/octo-sniffle>, Parambir Singh <https://github.com/parambirs>, Sebastian Silbermann <https://github.com/eps1lon>, Simon Schick <https://github.com/SimonSchick>, Thomas den Hollander <https://github.com/ThomasdenH>, Wilco Bakker <https://github.com/WilcoBakker>, wwwy3y3 <https://github.com/wwwy3y3>, Zane Hannan AU <https://github.com/ZaneHannanAU>, Jeremie Rodriguez <https://github.com/jeremiergz>, Samuel Ainsworth <https://github.com/samuela>.
|
||||
9219
express-server/node_modules/google-gax/node_modules/@types/node/index.d.ts
generated
vendored
Normal file
9219
express-server/node_modules/google-gax/node_modules/@types/node/index.d.ts
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
3163
express-server/node_modules/google-gax/node_modules/@types/node/inspector.d.ts
generated
vendored
Normal file
3163
express-server/node_modules/google-gax/node_modules/@types/node/inspector.d.ts
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
176
express-server/node_modules/google-gax/node_modules/@types/node/package.json
generated
vendored
Normal file
176
express-server/node_modules/google-gax/node_modules/@types/node/package.json
generated
vendored
Normal file
@@ -0,0 +1,176 @@
|
||||
{
|
||||
"_from": "@types/node@^10.1.0",
|
||||
"_id": "@types/node@10.12.18",
|
||||
"_inBundle": false,
|
||||
"_integrity": "sha512-fh+pAqt4xRzPfqA6eh3Z2y6fyZavRIumvjhaCL753+TVkGKGhpPeyrJG2JftD0T9q4GF00KjefsQ+PQNDdWQaQ==",
|
||||
"_location": "/google-gax/@types/node",
|
||||
"_phantomChildren": {},
|
||||
"_requested": {
|
||||
"type": "range",
|
||||
"registry": true,
|
||||
"raw": "@types/node@^10.1.0",
|
||||
"name": "@types/node",
|
||||
"escapedName": "@types%2fnode",
|
||||
"scope": "@types",
|
||||
"rawSpec": "^10.1.0",
|
||||
"saveSpec": null,
|
||||
"fetchSpec": "^10.1.0"
|
||||
},
|
||||
"_requiredBy": [
|
||||
"/google-gax/protobufjs"
|
||||
],
|
||||
"_resolved": "https://registry.npmjs.org/@types/node/-/node-10.12.18.tgz",
|
||||
"_shasum": "1d3ca764718915584fcd9f6344621b7672665c67",
|
||||
"_spec": "@types/node@^10.1.0",
|
||||
"_where": "D:\\Desktop\\Git\\Firebase\\SmartShopperFirebase\\node_modules\\google-gax\\node_modules\\protobufjs",
|
||||
"bugs": {
|
||||
"url": "https://github.com/DefinitelyTyped/DefinitelyTyped/issues"
|
||||
},
|
||||
"bundleDependencies": false,
|
||||
"contributors": [
|
||||
{
|
||||
"name": "Microsoft TypeScript",
|
||||
"url": "https://github.com/Microsoft"
|
||||
},
|
||||
{
|
||||
"name": "DefinitelyTyped",
|
||||
"url": "https://github.com/DefinitelyTyped"
|
||||
},
|
||||
{
|
||||
"name": "Alberto Schiabel",
|
||||
"url": "https://github.com/jkomyno"
|
||||
},
|
||||
{
|
||||
"name": "Alexander T.",
|
||||
"url": "https://github.com/a-tarasyuk"
|
||||
},
|
||||
{
|
||||
"name": "Alvis HT Tang",
|
||||
"url": "https://github.com/alvis"
|
||||
},
|
||||
{
|
||||
"name": "Andrew Makarov",
|
||||
"url": "https://github.com/r3nya"
|
||||
},
|
||||
{
|
||||
"name": "Bruno Scheufler",
|
||||
"url": "https://github.com/brunoscheufler"
|
||||
},
|
||||
{
|
||||
"name": "Chigozirim C.",
|
||||
"url": "https://github.com/smac89"
|
||||
},
|
||||
{
|
||||
"name": "Christian Vaagland Tellnes",
|
||||
"url": "https://github.com/tellnes"
|
||||
},
|
||||
{
|
||||
"name": "Deividas Bakanas",
|
||||
"url": "https://github.com/DeividasBakanas"
|
||||
},
|
||||
{
|
||||
"name": "Eugene Y. Q. Shen",
|
||||
"url": "https://github.com/eyqs"
|
||||
},
|
||||
{
|
||||
"name": "Flarna",
|
||||
"url": "https://github.com/Flarna"
|
||||
},
|
||||
{
|
||||
"name": "Hannes Magnusson",
|
||||
"url": "https://github.com/Hannes-Magnusson-CK"
|
||||
},
|
||||
{
|
||||
"name": "Hoàng Văn Khải",
|
||||
"url": "https://github.com/KSXGitHub"
|
||||
},
|
||||
{
|
||||
"name": "Huw",
|
||||
"url": "https://github.com/hoo29"
|
||||
},
|
||||
{
|
||||
"name": "Kelvin Jin",
|
||||
"url": "https://github.com/kjin"
|
||||
},
|
||||
{
|
||||
"name": "Klaus Meinhardt",
|
||||
"url": "https://github.com/ajafff"
|
||||
},
|
||||
{
|
||||
"name": "Lishude",
|
||||
"url": "https://github.com/islishude"
|
||||
},
|
||||
{
|
||||
"name": "Mariusz Wiktorczyk",
|
||||
"url": "https://github.com/mwiktorczyk"
|
||||
},
|
||||
{
|
||||
"name": "Matthieu Sieben",
|
||||
"url": "https://github.com/matthieusieben"
|
||||
},
|
||||
{
|
||||
"name": "Mohsen Azimi",
|
||||
"url": "https://github.com/mohsen1"
|
||||
},
|
||||
{
|
||||
"name": "Nicolas Even",
|
||||
"url": "https://github.com/n-e"
|
||||
},
|
||||
{
|
||||
"name": "Nicolas Voigt",
|
||||
"url": "https://github.com/octo-sniffle"
|
||||
},
|
||||
{
|
||||
"name": "Parambir Singh",
|
||||
"url": "https://github.com/parambirs"
|
||||
},
|
||||
{
|
||||
"name": "Sebastian Silbermann",
|
||||
"url": "https://github.com/eps1lon"
|
||||
},
|
||||
{
|
||||
"name": "Simon Schick",
|
||||
"url": "https://github.com/SimonSchick"
|
||||
},
|
||||
{
|
||||
"name": "Thomas den Hollander",
|
||||
"url": "https://github.com/ThomasdenH"
|
||||
},
|
||||
{
|
||||
"name": "Wilco Bakker",
|
||||
"url": "https://github.com/WilcoBakker"
|
||||
},
|
||||
{
|
||||
"name": "wwwy3y3",
|
||||
"url": "https://github.com/wwwy3y3"
|
||||
},
|
||||
{
|
||||
"name": "Zane Hannan AU",
|
||||
"url": "https://github.com/ZaneHannanAU"
|
||||
},
|
||||
{
|
||||
"name": "Jeremie Rodriguez",
|
||||
"url": "https://github.com/jeremiergz"
|
||||
},
|
||||
{
|
||||
"name": "Samuel Ainsworth",
|
||||
"url": "https://github.com/samuela"
|
||||
}
|
||||
],
|
||||
"dependencies": {},
|
||||
"deprecated": false,
|
||||
"description": "TypeScript definitions for Node.js",
|
||||
"homepage": "https://github.com/DefinitelyTyped/DefinitelyTyped#readme",
|
||||
"license": "MIT",
|
||||
"main": "",
|
||||
"name": "@types/node",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/DefinitelyTyped/DefinitelyTyped.git"
|
||||
},
|
||||
"scripts": {},
|
||||
"typeScriptVersion": "2.0",
|
||||
"types": "index",
|
||||
"typesPublisherContentHash": "2ab4e2583634afae0837756aa0330daeca55b67e8b9947d540c0efdd33becf3d",
|
||||
"version": "10.12.18"
|
||||
}
|
||||
202
express-server/node_modules/google-gax/node_modules/long/LICENSE
generated
vendored
Normal file
202
express-server/node_modules/google-gax/node_modules/long/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,202 @@
|
||||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
246
express-server/node_modules/google-gax/node_modules/long/README.md
generated
vendored
Normal file
246
express-server/node_modules/google-gax/node_modules/long/README.md
generated
vendored
Normal file
@@ -0,0 +1,246 @@
|
||||
long.js
|
||||
=======
|
||||
|
||||
A Long class for representing a 64 bit two's-complement integer value derived from the [Closure Library](https://github.com/google/closure-library)
|
||||
for stand-alone use and extended with unsigned support.
|
||||
|
||||
[](https://travis-ci.org/dcodeIO/long.js)
|
||||
|
||||
Background
|
||||
----------
|
||||
|
||||
As of [ECMA-262 5th Edition](http://ecma262-5.com/ELS5_HTML.htm#Section_8.5), "all the positive and negative integers
|
||||
whose magnitude is no greater than 2<sup>53</sup> are representable in the Number type", which is "representing the
|
||||
doubleprecision 64-bit format IEEE 754 values as specified in the IEEE Standard for Binary Floating-Point Arithmetic".
|
||||
The [maximum safe integer](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Number/MAX_SAFE_INTEGER)
|
||||
in JavaScript is 2<sup>53</sup>-1.
|
||||
|
||||
Example: 2<sup>64</sup>-1 is 1844674407370955**1615** but in JavaScript it evaluates to 1844674407370955**2000**.
|
||||
|
||||
Furthermore, bitwise operators in JavaScript "deal only with integers in the range −2<sup>31</sup> through
|
||||
2<sup>31</sup>−1, inclusive, or in the range 0 through 2<sup>32</sup>−1, inclusive. These operators accept any value of
|
||||
the Number type but first convert each such value to one of 2<sup>32</sup> integer values."
|
||||
|
||||
In some use cases, however, it is required to be able to reliably work with and perform bitwise operations on the full
|
||||
64 bits. This is where long.js comes into play.
|
||||
|
||||
Usage
|
||||
-----
|
||||
|
||||
The class is compatible with CommonJS and AMD loaders and is exposed globally as `Long` if neither is available.
|
||||
|
||||
```javascript
|
||||
var Long = require("long");
|
||||
|
||||
var longVal = new Long(0xFFFFFFFF, 0x7FFFFFFF);
|
||||
|
||||
console.log(longVal.toString());
|
||||
...
|
||||
```
|
||||
|
||||
API
|
||||
---
|
||||
|
||||
### Constructor
|
||||
|
||||
* new **Long**(low: `number`, high: `number`, unsigned?: `boolean`)<br />
|
||||
Constructs a 64 bit two's-complement integer, given its low and high 32 bit values as *signed* integers. See the from* functions below for more convenient ways of constructing Longs.
|
||||
|
||||
### Fields
|
||||
|
||||
* Long#**low**: `number`<br />
|
||||
The low 32 bits as a signed value.
|
||||
|
||||
* Long#**high**: `number`<br />
|
||||
The high 32 bits as a signed value.
|
||||
|
||||
* Long#**unsigned**: `boolean`<br />
|
||||
Whether unsigned or not.
|
||||
|
||||
### Constants
|
||||
|
||||
* Long.**ZERO**: `Long`<br />
|
||||
Signed zero.
|
||||
|
||||
* Long.**ONE**: `Long`<br />
|
||||
Signed one.
|
||||
|
||||
* Long.**NEG_ONE**: `Long`<br />
|
||||
Signed negative one.
|
||||
|
||||
* Long.**UZERO**: `Long`<br />
|
||||
Unsigned zero.
|
||||
|
||||
* Long.**UONE**: `Long`<br />
|
||||
Unsigned one.
|
||||
|
||||
* Long.**MAX_VALUE**: `Long`<br />
|
||||
Maximum signed value.
|
||||
|
||||
* Long.**MIN_VALUE**: `Long`<br />
|
||||
Minimum signed value.
|
||||
|
||||
* Long.**MAX_UNSIGNED_VALUE**: `Long`<br />
|
||||
Maximum unsigned value.
|
||||
|
||||
### Utility
|
||||
|
||||
* Long.**isLong**(obj: `*`): `boolean`<br />
|
||||
Tests if the specified object is a Long.
|
||||
|
||||
* Long.**fromBits**(lowBits: `number`, highBits: `number`, unsigned?: `boolean`): `Long`<br />
|
||||
Returns a Long representing the 64 bit integer that comes by concatenating the given low and high bits. Each is assumed to use 32 bits.
|
||||
|
||||
* Long.**fromBytes**(bytes: `number[]`, unsigned?: `boolean`, le?: `boolean`): `Long`<br />
|
||||
Creates a Long from its byte representation.
|
||||
|
||||
* Long.**fromBytesLE**(bytes: `number[]`, unsigned?: `boolean`): `Long`<br />
|
||||
Creates a Long from its little endian byte representation.
|
||||
|
||||
* Long.**fromBytesBE**(bytes: `number[]`, unsigned?: `boolean`): `Long`<br />
|
||||
Creates a Long from its big endian byte representation.
|
||||
|
||||
* Long.**fromInt**(value: `number`, unsigned?: `boolean`): `Long`<br />
|
||||
Returns a Long representing the given 32 bit integer value.
|
||||
|
||||
* Long.**fromNumber**(value: `number`, unsigned?: `boolean`): `Long`<br />
|
||||
Returns a Long representing the given value, provided that it is a finite number. Otherwise, zero is returned.
|
||||
|
||||
* Long.**fromString**(str: `string`, unsigned?: `boolean`, radix?: `number`)<br />
|
||||
Long.**fromString**(str: `string`, radix: `number`)<br />
|
||||
Returns a Long representation of the given string, written using the specified radix.
|
||||
|
||||
* Long.**fromValue**(val: `*`, unsigned?: `boolean`): `Long`<br />
|
||||
Converts the specified value to a Long using the appropriate from* function for its type.
|
||||
|
||||
### Methods
|
||||
|
||||
* Long#**add**(addend: `Long | number | string`): `Long`<br />
|
||||
Returns the sum of this and the specified Long.
|
||||
|
||||
* Long#**and**(other: `Long | number | string`): `Long`<br />
|
||||
Returns the bitwise AND of this Long and the specified.
|
||||
|
||||
* Long#**compare**/**comp**(other: `Long | number | string`): `number`<br />
|
||||
Compares this Long's value with the specified's. Returns `0` if they are the same, `1` if the this is greater and `-1` if the given one is greater.
|
||||
|
||||
* Long#**divide**/**div**(divisor: `Long | number | string`): `Long`<br />
|
||||
Returns this Long divided by the specified.
|
||||
|
||||
* Long#**equals**/**eq**(other: `Long | number | string`): `boolean`<br />
|
||||
Tests if this Long's value equals the specified's.
|
||||
|
||||
* Long#**getHighBits**(): `number`<br />
|
||||
Gets the high 32 bits as a signed integer.
|
||||
|
||||
* Long#**getHighBitsUnsigned**(): `number`<br />
|
||||
Gets the high 32 bits as an unsigned integer.
|
||||
|
||||
* Long#**getLowBits**(): `number`<br />
|
||||
Gets the low 32 bits as a signed integer.
|
||||
|
||||
* Long#**getLowBitsUnsigned**(): `number`<br />
|
||||
Gets the low 32 bits as an unsigned integer.
|
||||
|
||||
* Long#**getNumBitsAbs**(): `number`<br />
|
||||
Gets the number of bits needed to represent the absolute value of this Long.
|
||||
|
||||
* Long#**greaterThan**/**gt**(other: `Long | number | string`): `boolean`<br />
|
||||
Tests if this Long's value is greater than the specified's.
|
||||
|
||||
* Long#**greaterThanOrEqual**/**gte**/**ge**(other: `Long | number | string`): `boolean`<br />
|
||||
Tests if this Long's value is greater than or equal the specified's.
|
||||
|
||||
* Long#**isEven**(): `boolean`<br />
|
||||
Tests if this Long's value is even.
|
||||
|
||||
* Long#**isNegative**(): `boolean`<br />
|
||||
Tests if this Long's value is negative.
|
||||
|
||||
* Long#**isOdd**(): `boolean`<br />
|
||||
Tests if this Long's value is odd.
|
||||
|
||||
* Long#**isPositive**(): `boolean`<br />
|
||||
Tests if this Long's value is positive.
|
||||
|
||||
* Long#**isZero**/**eqz**(): `boolean`<br />
|
||||
Tests if this Long's value equals zero.
|
||||
|
||||
* Long#**lessThan**/**lt**(other: `Long | number | string`): `boolean`<br />
|
||||
Tests if this Long's value is less than the specified's.
|
||||
|
||||
* Long#**lessThanOrEqual**/**lte**/**le**(other: `Long | number | string`): `boolean`<br />
|
||||
Tests if this Long's value is less than or equal the specified's.
|
||||
|
||||
* Long#**modulo**/**mod**/**rem**(divisor: `Long | number | string`): `Long`<br />
|
||||
Returns this Long modulo the specified.
|
||||
|
||||
* Long#**multiply**/**mul**(multiplier: `Long | number | string`): `Long`<br />
|
||||
Returns the product of this and the specified Long.
|
||||
|
||||
* Long#**negate**/**neg**(): `Long`<br />
|
||||
Negates this Long's value.
|
||||
|
||||
* Long#**not**(): `Long`<br />
|
||||
Returns the bitwise NOT of this Long.
|
||||
|
||||
* Long#**notEquals**/**neq**/**ne**(other: `Long | number | string`): `boolean`<br />
|
||||
Tests if this Long's value differs from the specified's.
|
||||
|
||||
* Long#**or**(other: `Long | number | string`): `Long`<br />
|
||||
Returns the bitwise OR of this Long and the specified.
|
||||
|
||||
* Long#**shiftLeft**/**shl**(numBits: `Long | number | string`): `Long`<br />
|
||||
Returns this Long with bits shifted to the left by the given amount.
|
||||
|
||||
* Long#**shiftRight**/**shr**(numBits: `Long | number | string`): `Long`<br />
|
||||
Returns this Long with bits arithmetically shifted to the right by the given amount.
|
||||
|
||||
* Long#**shiftRightUnsigned**/**shru**/**shr_u**(numBits: `Long | number | string`): `Long`<br />
|
||||
Returns this Long with bits logically shifted to the right by the given amount.
|
||||
|
||||
* Long#**subtract**/**sub**(subtrahend: `Long | number | string`): `Long`<br />
|
||||
Returns the difference of this and the specified Long.
|
||||
|
||||
* Long#**toBytes**(le?: `boolean`): `number[]`<br />
|
||||
Converts this Long to its byte representation.
|
||||
|
||||
* Long#**toBytesLE**(): `number[]`<br />
|
||||
Converts this Long to its little endian byte representation.
|
||||
|
||||
* Long#**toBytesBE**(): `number[]`<br />
|
||||
Converts this Long to its big endian byte representation.
|
||||
|
||||
* Long#**toInt**(): `number`<br />
|
||||
Converts the Long to a 32 bit integer, assuming it is a 32 bit integer.
|
||||
|
||||
* Long#**toNumber**(): `number`<br />
|
||||
Converts the Long to a the nearest floating-point representation of this value (double, 53 bit mantissa).
|
||||
|
||||
* Long#**toSigned**(): `Long`<br />
|
||||
Converts this Long to signed.
|
||||
|
||||
* Long#**toString**(radix?: `number`): `string`<br />
|
||||
Converts the Long to a string written in the specified radix.
|
||||
|
||||
* Long#**toUnsigned**(): `Long`<br />
|
||||
Converts this Long to unsigned.
|
||||
|
||||
* Long#**xor**(other: `Long | number | string`): `Long`<br />
|
||||
Returns the bitwise XOR of this Long and the given one.
|
||||
|
||||
Building
|
||||
--------
|
||||
|
||||
To build an UMD bundle to `dist/long.js`, run:
|
||||
|
||||
```
|
||||
$> npm install
|
||||
$> npm run build
|
||||
```
|
||||
|
||||
Running the [tests](./tests):
|
||||
|
||||
```
|
||||
$> npm test
|
||||
```
|
||||
2
express-server/node_modules/google-gax/node_modules/long/dist/long.js
generated
vendored
Normal file
2
express-server/node_modules/google-gax/node_modules/long/dist/long.js
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
1
express-server/node_modules/google-gax/node_modules/long/dist/long.js.map
generated
vendored
Normal file
1
express-server/node_modules/google-gax/node_modules/long/dist/long.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
1
express-server/node_modules/google-gax/node_modules/long/index.js
generated
vendored
Normal file
1
express-server/node_modules/google-gax/node_modules/long/index.js
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
module.exports = require("./src/long");
|
||||
63
express-server/node_modules/google-gax/node_modules/long/package.json
generated
vendored
Normal file
63
express-server/node_modules/google-gax/node_modules/long/package.json
generated
vendored
Normal file
@@ -0,0 +1,63 @@
|
||||
{
|
||||
"_from": "long@^4.0.0",
|
||||
"_id": "long@4.0.0",
|
||||
"_inBundle": false,
|
||||
"_integrity": "sha512-XsP+KhQif4bjX1kbuSiySJFNAehNxgLb6hPRGJ9QsUr8ajHkuXGdrHmFUTUUXhDwVX2R5bY4JNZEwbUiMhV+MA==",
|
||||
"_location": "/google-gax/long",
|
||||
"_phantomChildren": {},
|
||||
"_requested": {
|
||||
"type": "range",
|
||||
"registry": true,
|
||||
"raw": "long@^4.0.0",
|
||||
"name": "long",
|
||||
"escapedName": "long",
|
||||
"rawSpec": "^4.0.0",
|
||||
"saveSpec": null,
|
||||
"fetchSpec": "^4.0.0"
|
||||
},
|
||||
"_requiredBy": [
|
||||
"/google-gax/protobufjs"
|
||||
],
|
||||
"_resolved": "https://registry.npmjs.org/long/-/long-4.0.0.tgz",
|
||||
"_shasum": "9a7b71cfb7d361a194ea555241c92f7468d5bf28",
|
||||
"_spec": "long@^4.0.0",
|
||||
"_where": "D:\\Desktop\\Git\\Firebase\\SmartShopperFirebase\\node_modules\\google-gax\\node_modules\\protobufjs",
|
||||
"author": {
|
||||
"name": "Daniel Wirtz",
|
||||
"email": "dcode@dcode.io"
|
||||
},
|
||||
"bugs": {
|
||||
"url": "https://github.com/dcodeIO/long.js/issues"
|
||||
},
|
||||
"bundleDependencies": false,
|
||||
"dependencies": {},
|
||||
"deprecated": false,
|
||||
"description": "A Long class for representing a 64-bit two's-complement integer value.",
|
||||
"devDependencies": {
|
||||
"webpack": "^3.10.0"
|
||||
},
|
||||
"files": [
|
||||
"index.js",
|
||||
"LICENSE",
|
||||
"README.md",
|
||||
"src/long.js",
|
||||
"dist/long.js",
|
||||
"dist/long.js.map"
|
||||
],
|
||||
"homepage": "https://github.com/dcodeIO/long.js#readme",
|
||||
"keywords": [
|
||||
"math"
|
||||
],
|
||||
"license": "Apache-2.0",
|
||||
"main": "src/long.js",
|
||||
"name": "long",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/dcodeIO/long.js.git"
|
||||
},
|
||||
"scripts": {
|
||||
"build": "webpack",
|
||||
"test": "node tests"
|
||||
},
|
||||
"version": "4.0.0"
|
||||
}
|
||||
1323
express-server/node_modules/google-gax/node_modules/long/src/long.js
generated
vendored
Normal file
1323
express-server/node_modules/google-gax/node_modules/long/src/long.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
935
express-server/node_modules/google-gax/node_modules/protobufjs/CHANGELOG.md
generated
vendored
Normal file
935
express-server/node_modules/google-gax/node_modules/protobufjs/CHANGELOG.md
generated
vendored
Normal file
@@ -0,0 +1,935 @@
|
||||
# [6.8.8](https://github.com/dcodeIO/protobuf.js/releases/tag/6.8.8)
|
||||
|
||||
## Fixed
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/3001425b0d896d14188307cd0cc84ce195ad9e04) Persist recent index.d.ts changes in JSDoc<br />
|
||||
|
||||
# [6.8.7](https://github.com/dcodeIO/protobuf.js/releases/tag/6.8.7)
|
||||
|
||||
## Fixed
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/e8449c4bf1269a2cc423708db6f0b47a383d33f0) Fix package browser field descriptor ([#1046](https://github.com/dcodeIO/protobuf.js/issues/1046))<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/996b3fa0c598ecc73302bfc39208c44830f07b1a) Fix static codegen issues with uglifyjs3<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/a06317139b92fdd8c6b3b188fb7b9704dc8ccbf1) Fix lint issues / pbts on windows<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/a927a6646e8fdddebcb3e13bc8b28b041b3ee40a) Fix empty 'bytes' field decoding, now using Buffer where applicable ([#1020](https://github.com/dcodeIO/protobuf.js/issues/1020))<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/f13a81fb41fbef2ce9dcee13f23b7276c83fbcfd) Fix circular dependency of Namespace and Enum ([#994](https://github.com/dcodeIO/protobuf.js/issues/994))<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/c05c58fad61c16e5ce20ca19758e4782cdd5d2e3) Ignore optional commas in aggregate options ([#999](https://github.com/dcodeIO/protobuf.js/issues/999))<br />
|
||||
|
||||
## New
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/36fc964b8db1e4372c76b1baf9f03857cd875b07) Make Message<T> have a default type param ([#1086](https://github.com/dcodeIO/protobuf.js/issues/1086))<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/996b3fa0c598ecc73302bfc39208c44830f07b1a) Explicitly define service method names when generating static code, see [#857](https://github.com/dcodeIO/protobuf.js/issues/857)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/07c5d59e1da8c5533a39007ba332928206281408) Also handle services in ext/descriptor ([#1001](https://github.com/dcodeIO/protobuf.js/issues/1001))<br />
|
||||
|
||||
## CLI
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/2c5ef95818a310243f88ffba0331cd47ee603c0a) Extend list of ignored ESLint rules for pbjs, fixes [#1085](https://github.com/dcodeIO/protobuf.js/issues/1085)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/8576b49ad3e55b8beae2a8f044c51040484eef12) Fix declared return type of pbjs/pbts callback ([#1025](https://github.com/dcodeIO/protobuf.js/issues/1025))<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/9fceaa69667895e609a3ed78eb2efa7a0ecfb890) Added an option to pbts to allow custom imports ([#1038](https://github.com/dcodeIO/protobuf.js/issues/1038))<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/65d113b0079fa2570837f3cf95268ce24714a248) Get node executable path from process.execPath ([#1018](https://github.com/dcodeIO/protobuf.js/issues/1018))<br />
|
||||
|
||||
## Other
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/b611875cfbc1f98d8973a2e86f1506de84f00049) Slim down CI testing and remove some not ultimately necesssary dependencies with audit issues<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/812b38ddabb35e154f9ff94f32ad8ce2a70310f1) Move global handling to util, see [#995](https://github.com/dcodeIO/protobuf.js/issues/995)<br />
|
||||
|
||||
# [6.8.6](https://github.com/dcodeIO/protobuf.js/releases/tag/6.8.6)
|
||||
|
||||
## Fixed
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/2ee1028d631a328e152d7e09f2a0e0c5c83dc2aa) Fix typeRefRe being vulnerable to ReDoS<br />
|
||||
|
||||
# [6.8.5](https://github.com/dcodeIO/protobuf.js/releases/tag/6.8.6)
|
||||
|
||||
## New
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/462132f222d8febb8211d839635aad5b82dc6315) Preserve comments when serializing/deserializing with toJSON and fromJSON. ([#983](https://github.com/dcodeIO/protobuf.js/issues/983))<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/d29c0caa715a14214fc755b3cf10ac119cdaf199) Add more details to some frequent error messages ([#962](https://github.com/dcodeIO/protobuf.js/issues/962))<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/8400f87ad8ed2b47e659bc8bb6c3cf2467802425) Add IParseOptions#alternateCommentMode ([#968](https://github.com/dcodeIO/protobuf.js/issues/968))<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/d6e3b9e218896ec1910e02448b5ee87e4d96ede6) Added field_mask to built-in common wrappers ([#982](https://github.com/dcodeIO/protobuf.js/issues/982))<br />
|
||||
|
||||
## Other
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/635fef013fbb3523536d92c690ffd7d84829db35) Remove code climate config in order to use 'in-app' config instead<br />
|
||||
|
||||
# [6.8.4](https://github.com/dcodeIO/protobuf.js/releases/tag/6.8.4)
|
||||
|
||||
## Other
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/69440c023e6962c644715a0c95363ddf19db648f) Update jsdoc dependency (pinned vulnerable marked)<br />
|
||||
|
||||
# [6.8.3](https://github.com/dcodeIO/protobuf.js/releases/tag/6.8.3)
|
||||
|
||||
## CLI
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/cc991a058b0636f3454166c76de7b664cf23a8f4) Use correct safeProp in json-module target, see [#956](https://github.com/dcodeIO/protobuf.js/issues/956)<br />
|
||||
|
||||
# [6.8.2](https://github.com/dcodeIO/protobuf.js/releases/tag/6.8.2)
|
||||
|
||||
## Other
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/6fc6481d790648e9e2169a961ad31a732398c911) Include dist files in npm package, see [#955](https://github.com/dcodeIO/protobuf.js/issues/955)<br />
|
||||
|
||||
# [6.8.1](https://github.com/dcodeIO/protobuf.js/releases/tag/6.8.1)
|
||||
|
||||
## Fixed
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/db2dd49f6aab6ecd606eee334b95cc0969e483c2) Prevent invalid JSDoc names when generating service methods, see [#870](https://github.com/dcodeIO/protobuf.js/issues/870)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/62297998d681357ada70fb370b99bac5573e5054) Prevent parse errors when generating service method names, see [#870](https://github.com/dcodeIO/protobuf.js/issues/870)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/478f332e0fc1d0c318a70b1514b1d59c8c200c37) Support parsing nested option-values with or without ':' ([#951](https://github.com/dcodeIO/protobuf.js/issues/951), fixes [#946](https://github.com/dcodeIO/protobuf.js/issues/946))<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/83477ca8e0e1f814ac79a642ea656f047563613a) Add support for reserved keyword in enums ([#950](https://github.com/dcodeIO/protobuf.js/issues/950), fixes [#949](https://github.com/dcodeIO/protobuf.js/issues/949))<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/c482a5b76fd57769eae4308793e3ff8725264664) Unified safe property escapes and added a test for [#834](https://github.com/dcodeIO/protobuf.js/issues/834)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/1724581c36ecc4fc166ea14a9dd57af5e093a467) Fix codegen if type name starts with "Object"<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/adecd544c5fcbeba28d502645f895024e3552970) Fixed dependency for json-module to use "light".<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/2a8dd74fca70d4e6fb41328a7cee81d1d50ad7ad) Basic support for URL prefixes in google.protobuf.Any types.<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/be78a3d9bc8d9618950c77f9e261b422670042ce) fixed 'error is not defined linter warning when using static/static-module and es6<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/c712447b309ae81134c7afd60f8dfa5ecd3be230) Fixed wrong type_url for any type (no leading '.' allowed).<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/145bda25ee1de2c0678ce7b8a093669ec2526b1d) Fixed fromObject() for google.protobuf.Any types.<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/7dec43d9d847481ad93fca498fd970b3a4a14b11) Handle case where 'extendee' is undefined in ext/descriptor<br />
|
||||
|
||||
## CLI
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/20a26271423319085d321878edc5166a5449e68a) Sanitize CR-only line endings (coming from jsdoc?)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/19d2af12b5db5a0f668f50b0cae3ee0f8a7affc2) Make sure enum typings become generated ([#884](https://github.com/dcodeIO/protobuf.js/issues/884) didn't solve this)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/a2c72c08b0265b112d367fa3d33407ff0de955b9) Remove exclude and include patterns from jsdoc config<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/9afb8a2ff27c1e0a999d7331f3f65f568f5cced5) Skip defaults when generating proto3<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/952c7d1b478cc7c6de82475a17a1387992e8651f) Wait for both the 'end' and 'close' event to happen before finishing in pbts, see [#863](https://github.com/dcodeIO/protobuf.js/issues/863)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/ed7e2e71f5cde27c4128f4f2e3f4782cc51fbec7) Accept null for optional fields in generated static code<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/27cc66a539251216ef10aea04652d58113949df9) Annotate TS classes with @implements<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/05e7e0636727008c72549459b8594fa0442d346f) Annotate virtual oneofs as string literal unions<br />
|
||||
|
||||
## New
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/685adb0e7ef0f50e4b93a105013547884957cc98) Also check for reserved ids and names in enums<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/843d0d5b927968025ca11babff28495dd3bb2863) Also support 'reserved' in enum descriptors<br />
|
||||
|
||||
## Other
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/a8376b57fb0a858adff9dc8a1d1b5372eff9d85c) Include just relevant files in npm package, fixes [#781](https://github.com/dcodeIO/protobuf.js/issues/781)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/bda1bc6917c681516f6be8be8f0e84ba1262c4ce) Fix travis build<br />
|
||||
|
||||
# [6.8.0](https://github.com/dcodeIO/protobuf.js/releases/tag/6.8.0)
|
||||
|
||||
## Breaking
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/ff858003f525db542cbb270777b6fab3a230c9bb) Replaced Buffer and Long types with interfaces and removed stubs<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/22f907c49adbbdf09b72bde5299271dbe0ee9cbe) Removed Message#toObject in favor of having just the static version (unnecessary static code otherwise)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/c97b61811248df002f1fb93557b982bc0aa27309) Everything uses interfaces now instead of typedefs (SomethingProperties is now ISomething)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/b9f179064f3ddf683f13e0d4e17840301be64010) ReflectionObject#toJSON properly omits explicit undefined values<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/7a6f98b5e74f9e9142f9be3ba0683caeaff916c4) Initial implementation of TypeScript decorators<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/7a6f98b5e74f9e9142f9be3ba0683caeaff916c4) Refactored protobuf.Class away<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/7a6f98b5e74f9e9142f9be3ba0683caeaff916c4) TypeScript definitions now have (a lot of) generics<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/7a6f98b5e74f9e9142f9be3ba0683caeaff916c4) Removed deprecated features<br />
|
||||
|
||||
## Fixed
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/c306d19d806eb697913ffa2b8613f650127a4c50) Added 'undefined' besides 'null' as a valid value of an optional field, fixes [#826](https://github.com/dcodeIO/protobuf.js/issues/826)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/c5518c3bac0da9c2045e6f1baf0dee915afb4221) Fixed an issue with codegen typings, see [#819](https://github.com/dcodeIO/protobuf.js/issues/819)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/66d149e92ff1baddfdfd4b6a88ca9bcea6fc6195) Ported utf8 chunking mechanism to base64 as well, fixes [#800](https://github.com/dcodeIO/protobuf.js/issues/800)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/e1f9d9856c98a0f0eb1aa8bdf4ac0df467bee8b9) Also be more verbose when defining properties for ES6, fixes [#820](https://github.com/dcodeIO/protobuf.js/issues/820)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/cf36097305ab02047be5014eabeccc3154e18bde) Generate more verbose JSDoc comments for ES6 support, fixes [#820](https://github.com/dcodeIO/protobuf.js/issues/820)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/f2959795330966f13cb65bbb6034c88a01fc0bcc) Emit a maximum of one error var when generating verifiers, fixes [#786](https://github.com/dcodeIO/protobuf.js/issues/786)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/3b848a10b39c1897ca1ea3b5149ef72ae43fcd11) Fixed missing semicolon after 'extensions' and 'reserved' when generating proto files, fixes [#810](https://github.com/dcodeIO/protobuf.js/issues/810)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/eb1b40497e14a09facbc370676f486bed1376f52) Call npm with '--no-bin-links' when installing CLI deps, fixes [#823](https://github.com/dcodeIO/protobuf.js/issues/823)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/429de19d851477f1df2804d5bc0be30228cd0924) Fix Reader argument conversion in static module<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/03194c203d6ff61ae825e66f8a29ca204fa503b9) Use JSDoc, they said, it documents code, they said. Fixes [#770](https://github.com/dcodeIO/protobuf.js/issues/770)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/ec6a133ff541c638517e00f47b772990207c8640) parser should not confuse previous trailing line comments with comments for the next declaration, see [#762](https://github.com/dcodeIO/protobuf.js/issues/762)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/0589ace4dc9e5c565ff996cf6e6bf94e63f43c4e) Types should not clear constructor with cache (fixes decorators)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/056ecc3834a3b323aaaa676957efcbe3f52365a0) Namespace#lookup should also check in nested namespaces (wtf)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/ed34b093839652db2ff7b84db87857fc57d96038) Reader#bytes should also support plain arrays<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/514afcfa890aa598e93254576c4fd6062e0eff3b) Fix markdown for pipe in code in table<br />
|
||||
|
||||
## New
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/17c2797592bc4effd9aaae3ba9777c9550bb75ac) Upgrade to codegen 2<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/57d7d35ddbb9e3a28c396b4ef1ae3b150eeb8035) ext/descriptor enables interoperability between reflection and descriptor.proto (experimental), see [#757](https://github.com/dcodeIO/protobuf.js/issues/757)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/3939667ef1f37b025bd7f9476015890496d50e00) Added 'json' conversion option for proto3 JSON mapping compatibility of NaN and Infinity + additional documentation of util.toJSONOptions, see [#351](https://github.com/dcodeIO/protobuf.js/issues/351)<br />
|
||||
|
||||
## CLI
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/4eac28c7d3acefb0af7b82c62cf8d19bf3e7d37b) Use protobuf/minimal when pbjs target is static-module<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/3a959453fe63706c38ebbacda208e1f25f27dc99) Added closure wrapper<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/13bf9c2635e6a1a2711670fc8e28ae9d7b8d1c8f) Various improvements to statically generated JSDoc, also fixes [#772](https://github.com/dcodeIO/protobuf.js/issues/772)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/ffdc93c7cf7c8a716316b00864ea7c510e05b0c8) Check incompatible properties for namespaces only in tsd-jsdoc<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/fb3f9c70436d4f81bcd0bf62b71af4d253390e4f) Additional tsd-jsdoc handling of properties inside of namespaces and TS specific API exposure<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/2dcae25c99e2ed8afd01e27d21b106633b8c31b9) Several improvements to tsd-jsdoc emitted comments<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/ff858003f525db542cbb270777b6fab3a230c9bb) Further TypeScript definition improvements<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/22f907c49adbbdf09b72bde5299271dbe0ee9cbe) Relieved tsd files from unnecessary comments<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/22f907c49adbbdf09b72bde5299271dbe0ee9cbe) Generate TS namespaces for vars and functions with properties<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/b355115e619c6595ac9d91897cfe628ef0e46054) Prefer @tstype over @type when generating typedefs (tsd-jsdoc)<br />
|
||||
|
||||
## Docs
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/23f4b990375efcac2c144592cf4ca558722dcf2d) Replaced nullable types with explicit type|null for better tooling compatibility, also fixes [#766](https://github.com/dcodeIO/protobuf.js/issues/766) and fixes 767<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/6493f52013c92a34b8305a25068ec7b8c4c29d54) Added more info to ext/descriptor README, see [#757](https://github.com/dcodeIO/protobuf.js/issues/757)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/ef92da3768d8746dbfe72e77232f78b879fc811d) Additional notes on ext/descriptor<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/b646cf7499791a41b75eef2de1a80fb558d4159e) Updated CHANGELOG so everyone knows what's going on (and soon, breaking)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/35a663757efe188bea552aef017837bc6c6a481a) Additional docs on TS/decorators usage<br />
|
||||
|
||||
## Other
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/9726be0888a9461721447677e9dece16a682b9f6) Updated dist files<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/9726be0888a9461721447677e9dece16a682b9f6) Added package-lock.json<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/114f7ea9fa3813003afc3ebb453b2dd2262808e1) Minor formatting<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/8a6e464954b472fdbb4d46d9270fe3b4b3c7272d) Generate files<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/42f8a97630bcb30d197b0f1d6cbdd96879d27f96) Remove the no-constructor arg<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/6446247cd7edbb77f03dc42c557f568811286a39) Remove the ctor option.<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/2059ee0f6f951575d5c5d2dc5eb06b6fa34e27aa) Add support to generate types for JSON object.<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/7445da0f8cb2e450eff17723f25f366daaf3bbbb) aspromise performance pass<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/3f8b74ba6726567eaf68c4d447c120f75eac042f) codegen 2 performance pass, [#653](https://github.com/dcodeIO/protobuf.js/issues/653) might benefit<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/d44a7eec2fd393e5cb24196fb5818c8c278a0f34) Fixed minimal library including reflection functionality<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/a18e6db9f02696c66032bce7ef4c0eb0568a8048) Minor compression ratio tuning<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/b49a4edd38395e209bedac2e0bfb7b9d5c4e980b) Fixed failing test case + coverage<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/8f7111cacd236501b7e26791b9747b1974a2d9eb) Improved fromObject wrapper for google.protobuf.Any.<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/0e471a2516bde3cd3c27b2691afa0dcfbb01f042) Fixed failing tokenize test case<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/5867f076d8510fa97e3bd6642bbe61960f7fd196) Removed debug build, made it an extension<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/22f907c49adbbdf09b72bde5299271dbe0ee9cbe) Regenerated dist files<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/5bc3541d2da19e2857dc884f743d37c27e8e21f2) Even more documentation and typings for ext/descriptor<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/773e6347b57e4a5236b1ef0bb8d361e4b233caf7) ext/descriptor docs<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/773e6347b57e4a5236b1ef0bb8d361e4b233caf7) Decorators coverage<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/9a23ded94729ceeea2f87cb7e8460eaaaf1c8269) ext/descriptor support for various standard options, see [#757](https://github.com/dcodeIO/protobuf.js/issues/757)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/2d8ce6ec0abd261f9b261a44a0a258fdf57ecec3) ext/descriptor passes descriptor.proto test with no differences, see [#757](https://github.com/dcodeIO/protobuf.js/issues/757)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/3a20968c6d676312e4f2a510f7e079e0e0819daf) Properly remove unnecessary (packed) options from JSON descriptors<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/2a30df8bd5f20d91143a38c2232dafc3a6f3a7bd) Use typedefs in ext/descriptor (like everywhere else), see [#757](https://github.com/dcodeIO/protobuf.js/issues/757)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/1fc911cef01e081c04fb82ead685f49dde1403bb) Fixed obvious issues with ext/descriptor, does not throw anymore when throwing descriptor.proto itself at it, see [#757](https://github.com/dcodeIO/protobuf.js/issues/757)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/6c37dbd14f39dad687f2f89f1558a875f7dcc882) Added still missing root traversal to ext/descriptor, see [#757](https://github.com/dcodeIO/protobuf.js/issues/757)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/7ab136daa5eb2769b616b6b7522e45a4e33a59f6) Initial map fields support for ext/descriptor, see [#757](https://github.com/dcodeIO/protobuf.js/issues/757)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/708552bb84508364b6e6fdf73906aa69e83854e1) Added infrastructure for TypeScript support of extensions<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/23f26defa793b371c16b5f920fbacb3fb66bdf22) TypeScript generics improvements<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/e49bef863c0fb10257ec1001a3c5561755f2ec6b) More ext/descriptor progress, see [#757](https://github.com/dcodeIO/protobuf.js/issues/757)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/6b94336c1e6eec0f2eb1bd5dca73a7a8e71a2153) Just export the relevant namespace in ext/descriptor<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/fbb99489ed0c095174feff8f53431d30fb6c34a0) Initial descriptor.proto extension for reflection interoperability, see [#757](https://github.com/dcodeIO/protobuf.js/issues/757)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/48e66d975bf7b4e6bdbb68ec24386c98b16c54c5) Moved custom wrappers to its own module instead, also makes the API easier to use manually, see [#677](https://github.com/dcodeIO/protobuf.js/issues/677)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/0c6e639d08fdf9be12677bf678563ea631bafb2c) Added infrastructure for custom wrapping/unwrapping of special types, see [#677](https://github.com/dcodeIO/protobuf.js/issues/677)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/0425b584f49841d87a8249fef30c78cc31c1c742) More decorator progress (MapField.d, optional Type.d)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/7a6f98b5e74f9e9142f9be3ba0683caeaff916c4) tsd-jsdoc now has limited generics support<br />
|
||||
|
||||
# [6.7.3](https://github.com/dcodeIO/protobuf.js/releases/tag/6.7.3)
|
||||
|
||||
## Other
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/57f1da64945f2dc5537c6eaa53e08e8fdd477b67) long, @types/long and @types/node are just dependencies, see [#753](https://github.com/dcodeIO/protobuf.js/issues/753)<br />
|
||||
|
||||
# [6.7.2](https://github.com/dcodeIO/protobuf.js/releases/tag/6.7.2)
|
||||
|
||||
## New
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/a7621be0a56585defc72d863f4e891e476905692) Split up NamespaceDescriptor to make nested plain namespaces a thing, see [#749](https://github.com/dcodeIO/protobuf.js/issues/749)<br />
|
||||
|
||||
## Docs
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/e980e72ae3d4697ef0426c8a51608d31f516a2c4) More README<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/1f76749d0b9a780c7b6cb56be304f7327d74ebdb) Replaced 'runtime message' with 'message instance' for clarity<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/e6b6dedb550edbd0e54e212799e42aae2f1a87f1) Rephrased the Usage section around the concept of valid messages<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/0d8100ba87be768ebdec834ca2759693e0bf4325) Added toolset diagram to README<br />
|
||||
|
||||
## Other
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/3405ae8d1ea775c96c30d1ef5cde666c9c7341b3) Touched benchmark output metrics once more<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/e36b228f4bb8b1cd835bf31f8605b759a7f1f501) Fixed failing browser test<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/7b3bdb562ee7d30c1a557d7b7851d55de3091da4) Output more human friendly metrics from benchmark<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/59e447889057c4575f383630942fd308a35c12e6) Stripped down static bench code to what's necessary<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/f88dad098282ece65f5d6e224ca38305a8431829) Revamped benchmark, now also covers Google's JS implementation<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/45356be81ba7796faee0d4d8ad324abdd9f301fb) Updated dependencies and dist files<br />
|
||||
|
||||
# [6.7.1](https://github.com/dcodeIO/protobuf.js/releases/tag/6.7.1)
|
||||
|
||||
## New
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/3d23eed6f7c79007969672f06c1a9ccd691e2411) Made .verify behave more like .encode, see [#710](https://github.com/dcodeIO/protobuf.js/issues/710)<br />
|
||||
|
||||
## CLI
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/bed514290c105c3b606f760f2abba80510721c77) With null/undefined eliminated by constructors and .create, document message fields as non-optional where applicable (ideally used with TS & strictNullChecks), see [#743](https://github.com/dcodeIO/protobuf.js/issues/743)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/007b2329842679ddf994df7ec0f9c70e73ee3caf) Renamed --strict-long/message to --force-long/message with backward compatible aliases, see [#741](https://github.com/dcodeIO/protobuf.js/issues/741)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/6aae71f75e82ffd899869b0c952daf98991421b8) Keep $Properties with --strict-message but require actual instances within, see [#741](https://github.com/dcodeIO/protobuf.js/issues/741)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/c812cef0eff26998f14c9d58d4486464ad7b2bbc) Added --strict-message option to pbjs to strictly reference message instances instead of $Properties, see [#741](https://github.com/dcodeIO/protobuf.js/issues/741)<br />
|
||||
|
||||
## Docs
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/412407de9afb7ec3a999c4c9a3a1f388f971fce7) Restructured README<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/1c4d9d7f024bfa096ddc24aabbdf39211ed8637a) Added more information on typings usage, see [#744](https://github.com/dcodeIO/protobuf.js/issues/744)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/602065e16862751c515c2f3391ee8b880e8140b1) Clarified typescript example in README, see [#744](https://github.com/dcodeIO/protobuf.js/issues/744)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/79d0ba2cc71a156910a9d937683af164df694f08) Clarified that the service API targets clients consuming a service, see [#742](https://github.com/dcodeIO/protobuf.js/issues/742)<br />
|
||||
|
||||
## Other
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/a66f76452ba050088efd1aaebf3c503a55e6287c) Omit copying of undefined or null in constructors and .create, see [#743](https://github.com/dcodeIO/protobuf.js/issues/743)<br />
|
||||
|
||||
# [6.7.0](https://github.com/dcodeIO/protobuf.js/releases/tag/6.7.0)
|
||||
|
||||
## Breaking
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/9c1bbf10e445c3495b23a354f9cbee951b4b20f0) Namespace#lookupEnum should actually look up the reflected enum and not just its values<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/44a8d3af5da578c2e6bbe0a1b948d469bbe27ca1) Decoder now throws if required fields are missing, see [#695](https://github.com/dcodeIO/protobuf.js/issues/695) / [#696](https://github.com/dcodeIO/protobuf.js/issues/696)<br />
|
||||
|
||||
## Fixed
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/d1e3122e326480fdd44e96afd76ee72e9744b246) Added functionality to filter for multiple types at once in lookup(), used by lookupTypeOrEnum(), fixes [#740](https://github.com/dcodeIO/protobuf.js/issues/740)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/8aa21268aa5e0f568cb39e99a83b99ccb4084381) Ensure that fields have been resolved when looking up js types in static target, see [#731](https://github.com/dcodeIO/protobuf.js/issues/731)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/f755d36829b9f1effd7960fab3a86a141aeb9fea) Properly copy fields array before sorting in toObject, fixes [#729](https://github.com/dcodeIO/protobuf.js/issues/729)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/a06691f5b87f7e90fed0115b78ce6febc4479206) Actually emit TS compatible enums in static target if not aliases, see [#720](https://github.com/dcodeIO/protobuf.js/issues/720)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/b01bb58dec92ebf6950846d9b8d8e3df5442b15d) Hardened tokenize/parse, esp. comment parsing, see [#713](https://github.com/dcodeIO/protobuf.js/issues/713)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/bc76ad732fc0689cb0a2aeeb91b06ec5331d7972) Exclude any fields part of some oneof when populating defaults in toObject, see [#710](https://github.com/dcodeIO/protobuf.js/issues/710)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/68cdb5f11fdbb950623be089f98e1356cb7b1ea3) Most of the parser is not case insensitive, see [#705](https://github.com/dcodeIO/protobuf.js/issues/705)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/3e930b907a834a7da759478b8d3f52fef1da22d8) Retain options argument in Root#load when used with promises, see [#684](https://github.com/dcodeIO/protobuf.js/issues/684)<br />
|
||||
|
||||
## New
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/3c14ef42b3c8f2fef2d96d65d6e288211f86c9ef) Created a micromodule from (currently still bundled) float support<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/7ecae9e9f2e1324ef72bf5073463e01deff50cd6) util.isset(obj, prop) can be used to test if a message property is considered to be set, see [#728](https://github.com/dcodeIO/protobuf.js/issues/728)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/c04d4a5ab8f91899bd3e1b17fe4407370ef8abb7) Implemented stubs for long.js / node buffers to be used where either one isn't wanted, see [#718](https://github.com/dcodeIO/protobuf.js/issues/718)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/b9574ad02521a31ebd509cdaa269e7807da78d7c) Simplified reusing / replacing internal constructors<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/1f97b7af05b49ef69bd6e9d54906d1b7583f42c4) Constructors/.create always initialize proper mutable objects/arrays, see [#700](https://github.com/dcodeIO/protobuf.js/issues/700)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/adb4bb001a894dd8d00bcfe03457497eb994f6ba) Verifiers return an error if multiple fields part of the same oneof are set, see [#710](https://github.com/dcodeIO/protobuf.js/issues/710)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/fe93d436b430d01b563318bff591e0dd408c06a4) Added `oneofs: true` to ConversionOptions, see [#710](https://github.com/dcodeIO/protobuf.js/issues/710)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/228c882410d47a26576f839b15f1601e8aa7914d) Optional fields handle null just like undefined regardless of type see [#709](https://github.com/dcodeIO/protobuf.js/issues/709)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/da6af8138afa5343a47c12a8beedb99889c0dd51) Encoders no longer examine virtual oneof properties but encode whatever is present, see [#710](https://github.com/dcodeIO/protobuf.js/issues/710)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/ac26a7aa60359a37dbddaad139c0134b592b3325) pbjs now generates multiple exports when using ES6 syntax, see [#686](https://github.com/dcodeIO/protobuf.js/issues/686)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/c1ca65dc6987384af6f9fac2fbd7700fcf5765b2) Sequentially serialize fields ordered by id, as of the spec.<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/26d9fadb21a85ca0b5609156c26453ae875e4933) decode throws specific ProtocolError with a reference to the so far decoded message if required fields are missing + example<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/2b5577b238a452ae86aa395fb2ad3a3f45d755dc) Reader.create asserts that `buffer` is a valid buffer, see [#695](https://github.com/dcodeIO/protobuf.js/issues/695)<br />
|
||||
|
||||
## CLI
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/6f74d30f059e33a4678f28e7a50dc4878c54bed2) Exclude JSDoc on typedefs from generated d.ts files because typescript@next, see [#737](https://github.com/dcodeIO/protobuf.js/issues/737)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/2ebb1b781812e77de914cd260e7ab69612ffd99e) Prepare static code with estraverse instead of regular expressions, see [#732](https://github.com/dcodeIO/protobuf.js/issues/732)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/29ce6cae0cacc0f1d87ca47e64be6a81325aaa55) Moved tsd-jsdoc to future cli package, see [#716](https://github.com/dcodeIO/protobuf.js/issues/716)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/8de21e1a947ddb50a167147dd63ad29d37b6a891) $Properties are just a type that's satisfied, not implemented, by classes, see [#723](https://github.com/dcodeIO/protobuf.js/issues/723)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/4bfe0c239b9c337f8fa64ea64f6a71baf5639b84) More progress on decoupling the CLI<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/8a60174932d15198883ac3f07000ab4e7179a695) Fixed computed array indexes not being renamed in static code, see [#726](https://github.com/dcodeIO/protobuf.js/issues/726)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/8d9981588d17709791846de63f1f3bfd09433b03) Check upfront if key-var is required in static decoders with maps, see [#726](https://github.com/dcodeIO/protobuf.js/issues/726)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/16adff0c7b67c69a2133b6aac375365c5f2bdbf7) Fixed handling of stdout if callback is specified, see [#724](https://github.com/dcodeIO/protobuf.js/issues/724)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/6423a419fe45e648593833bf535ba1736b31ef63) Preparations for moving the CLI to its own package, see [#716](https://github.com/dcodeIO/protobuf.js/issues/716)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/afefa3de09620f50346bdcfa04d52952824c3c8d) Properly implement $Properties interface in JSDoc, see [#723](https://github.com/dcodeIO/protobuf.js/issues/723)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/a1f23e09fb5635275bb7646dfafc70caef74c6b8) Recursively use $Properties inside of $Properties in static code, see [#717](https://github.com/dcodeIO/protobuf.js/issues/717)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/c3f0a2124c661bb9ba35f92c21a98a4405d30b47) Added --strict-long option to pbjs to always emit 'Long' instead of 'number|Long' (only relevant with long.js), see [#718](https://github.com/dcodeIO/protobuf.js/issues/718)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/0bc4a14501f84f93afd6ce2933ad00749c82f4df) Statically emitted long type is 'Long' now instead of '$protobuf.Long', see [#718](https://github.com/dcodeIO/protobuf.js/issues/718)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/a75625d176b7478e0e506f05e2cee5e3d7a0d89a) Decoupled message properties as an interface in static code for TS intellisense support, see [#717](https://github.com/dcodeIO/protobuf.js/issues/717)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/23f14a61e8c2f68b06d1bb4ed20b938764c78860) Static code statically resolves types[..], see [#715](https://github.com/dcodeIO/protobuf.js/issues/715)<br />
|
||||
|
||||
## Docs
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/ef71e77726b6bf5978b948d598c18bf8b237ade4) Added type definitions for all possible JSON descriptors<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/4bfe0c239b9c337f8fa64ea64f6a71baf5639b84) Explained the JSON structure in README and moved CLI specific information to the CLI package<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/3ba3ad762f7486b4806ad1c45764e92a81ca24dd) Added information on how to use the stubs to README, see [#718](https://github.com/dcodeIO/protobuf.js/issues/718)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/a5dbba41341bf44876cd4226f08044f88148f37d) Added 'What is a valid message' section to README<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/6f8f2c1fdf92e6f81363d77bc059820b2376fe32) Added a hint on using .create to initial example<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/3ad28ec920e0fe8d0223db28804a7b3f8a6880c2) Even more usage for README<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/5a1f861a0f6b582faae7a4cc5c6ca7e4418086da) Additional information on general usage (README)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/320dea5a1d1387c72759e10a17afd77dc48c3de0) Restructured README to Installation, Usage and Examples sections<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/1c9055dd69f7696d2582942b307a1ac8ac0f5533) Added a longish section on the correct use of the toolset to README<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/99667c8e1ff0fd3dac83ce8c0cff5d0b1e347310) Added a few additional notes on core methods to README, see [#710](https://github.com/dcodeIO/protobuf.js/issues/710)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/2130bc97e44567e766ea8efacb365383c909dbd4) Extended traverse-types example, see [#693](https://github.com/dcodeIO/protobuf.js/issues/693)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/13e4aa3ff274ab42f1302e16fd59d074c5587b5b) Better explain how .verify, .encode and .decode are connected<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/7502dd2dfdaea111e5c1a902c524ad0a51ff9bd4) Documented that Type#encode respectively Message.encode do not implicitly .verify, see [#696](https://github.com/dcodeIO/protobuf.js/issues/696) [ci-skip]<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/e7e123aa0b6c05eb4156a761739e37c008a3cbc1) Documented throwing behavior of Reader.create and Message.decode<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/0fcde32306da77f02cb1ea81ed18a32cee01f17b) Added error handling notes to README, see [#696](https://github.com/dcodeIO/protobuf.js/issues/696)<br />
|
||||
|
||||
## Other
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/fef924e5f708f14dac5713aedc484535d36bfb47) Use @protobufjs/float<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/fef924e5f708f14dac5713aedc484535d36bfb47) Rebuilt dist files for 6.7.0<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/ca0dce2d7f34cd45e4c1cc753a97c58e05b3b9d2) Updated deps, ts fixes and regenerated dist files<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/2c2d4002d6776f3edde608bd813c37d798d87e6b) Manually merged gentests improvements, fixes [#733](https://github.com/dcodeIO/protobuf.js/issues/733)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/e4a6b6f81fa492a63b12f0da0c381612deff1973) Make sure that util.Long is overridden by AMD loaders only if present, see [#730](https://github.com/dcodeIO/protobuf.js/issues/730)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/fff1eb297a728ed6d334c591e7d796636859aa9a) Coverage for util.isset and service as a namespace<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/8401a47d030214a54b5ee30426ebc7a9d9c3773d) Shortened !== undefined && !== null to equivalent != null in static code<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/e1dd1bc2667de73bb65d876162131be2a4d9fef4) With stubs in place, 'number|Long' return values can be just 'Long' instead, see [#718](https://github.com/dcodeIO/protobuf.js/issues/718)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/404ba8e03a63f708a70a72f0208e0ca9826fe20b) Just alias as the actual ideal type when using stubs, see [#718](https://github.com/dcodeIO/protobuf.js/issues/718)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/270cc94c7c4b8ad84d19498672bfc854b55130c9) General cleanup + regenerated dist/test files<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/017161ce97ceef3b2d0ce648651a4636f187d78b) Simplified camel case regex, see [#714](https://github.com/dcodeIO/protobuf.js/issues/714)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/d410fd20f35d2a35eb314783b17b6570a40a99e8) Regenerated dist files and changelog for 6.7.0<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/88ca8f0d1eb334646ca2625c78e63fdd57221408) Retain alias order in static code for what it's worth, see [#712](https://github.com/dcodeIO/protobuf.js/issues/712)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/2a74fbf551e934b3212273e6a28ad65ac4436faf) Everything can be block- or line-style when parsing, see [#713](https://github.com/dcodeIO/protobuf.js/issues/713)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/47bb95a31784b935b9ced52aa773b9d66236105e) Determine necessary aliases depending on config, see [#712](https://github.com/dcodeIO/protobuf.js/issues/712)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/588ffd9b129869de0abcef1d69bfa18f2f25d8e1) Use more precise types for message-like plain objects<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/37b39c8d1a5307eea09aa24d7fd9233a8df5b7b6) Regenerated dist files<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/9c94813f9a5f1eb114d7c6112f7e87cb116fe9da) Regenerated relevant files<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/d7493efe1a86a60f6cdcf7976523e69523d3f7a3) Moved field comparer to util<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/fe917652f88df17d4dbaae1cd74f470385342be2) Updated tests to use new simplified encoder logic<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/b69173b4e7b514c40bb4a85b54ca5465492a235b) Updated path to tsd-jsdoc template used by pbts, see [#707](https://github.com/dcodeIO/protobuf.js/issues/707)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/5041fad9defdb0bc8131560e92f3b454d8e45273) Additional restructuring for moving configuration files out of the root folder<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/c0b7c9fa6309d345c4ce8e06fd86f27528f4ea66) Added codegen support for constructor functions, see [#700](https://github.com/dcodeIO/protobuf.js/issues/700)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/4573f9aabd7e8f883e530f4d0b055e5ec9b75219) Attempted to fix broken custom error test<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/4b49f500fce156b164c757d8f17be2338f767c82) Trying out a more aggressive aproach for custom error subclasses<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/95cd64ee514dc60d10daac5180726ff39594e8e8) Moved a few things out of the root folder<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/db1030ed257f9699a0bcf3bad0bbe8acccf5d766) Coverage for encoder compat. / protocolerror<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/948a4caf5092453fa091ac7a594ccd1cc5b503d2) Updated dist and generated test files<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/3ead13e83ecdc8715fbab916f7ccaf3fbfdf59ed) Added tslint<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/364e7d457ed4c11328e609f600a57b7bc4888554) Exclude dist/ from codeclimate checks<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/6e81fcb05f25386e3997399e6596e9d9414f0286) Also lint cli utilities<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/e7e123aa0b6c05eb4156a761739e37c008a3cbc1) Cache any regexp instance (perf)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/d89c45f8af0293fb34e6f12b37ceca49083e1faa) Use code climate badges<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/e70fbe3492c37f009dbaccf910c1e0f81e8f0f44) Updated travis to pipe to codeclimate, coverage<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/a7ab1036906bb7638193a9e991cb62c86108880a) More precise linter configuration<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/58688c178257051ceb2dfea8a63eb6be7dcf1cf1) Added codeclimate<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/4b21e00adc6fae42e6a88deaeb0b7c077c6ca50e) Moved cli deps placeholder creation to post install script<br />
|
||||
|
||||
# [6.6.5](https://github.com/dcodeIO/protobuf.js/releases/tag/6.6.5)
|
||||
|
||||
## Fixed
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/478ee51194878f24be8607e42e5259952607bd44) sfixed64 is not zig-zag encoded, see [#692](https://github.com/dcodeIO/protobuf.js/issues/692)<br />
|
||||
|
||||
## CLI
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/7a944538c89492abbed147915acea611f11c03a2) Added a placeholder to cli deps node_modules folder to make sure node can load from it<br />
|
||||
|
||||
## Other
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/83142e420eb1167b2162063a092ae8d89c9dd4b2) Restructured a few failing tests<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/367d55523a3ae88f21d47aa96447ec3e943d4620) Traversal example + minimalistic documentation<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/8eeffcbcd027c929e2a76accad588c61dfa2e37c) Added a custom getters/setters example for gRPC<br />
|
||||
|
||||
# [6.6.4](https://github.com/dcodeIO/protobuf.js/releases/tag/6.6.4)
|
||||
|
||||
## Fixed
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/88eb7a603a21643d5012a374c7d246f4c27620f3) Made sure that LongBits ctor is always called with unsigned 32 bits + static codegen compat., fixes [#690](https://github.com/dcodeIO/protobuf.js/issues/690)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/50e82fa7759be035a67c7818a1e3ebe0d6f453b6) Properly handle multiple ../.. in path.normalize, see [#688](https://github.com/dcodeIO/protobuf.js/issues/688)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/7c3506b3f0c5a08a887e97313828af0c21effc61) Post-merge, also tackles [#683](https://github.com/dcodeIO/protobuf.js/issues/683) (packed option for repeated enum values)<br />
|
||||
|
||||
## New
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/7f3f4600bcae6f2e4dadd5cdb055886193a539b7) Verify accepts non-null objects only, see [#685](https://github.com/dcodeIO/protobuf.js/issues/685)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/d65c22936183d04014d6a8eb880ae0ec33aeba6d) allow_alias enum option was not being honored. This case is now handled and a test case was added<br />
|
||||
|
||||
## CLI
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/2ddb76b6e93174787a68f68fb28d26b8ece7cc56) Added an experimental --sparse option to limit pbjs output to actually referenced types within main files<br />
|
||||
|
||||
## Docs
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/33d14c97600ed954193301aecbf8492076dd0179) Added explicit hint on Uint8Array to initial example, see [#670](https://github.com/dcodeIO/protobuf.js/issues/670)<br />
|
||||
|
||||
## Other
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/cbd4c622912688b47658fea00fd53603049b5104) Ranges and names support for reserved fields, see [#676](https://github.com/dcodeIO/protobuf.js/issues/676)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/487f8922d879955ba22f89b036f897b9753b0355) Updated depdendencies / rebuilt dist files<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/37536e5fa7a15fbc851040e09beb465bc22d9cf3) Use ?: instead of |undefined in .d.ts files<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/f8b415a2fc2d1b1eff19333600a010bcaaebf890) Mark optional fields as possibly being undefined<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/2ddb76b6e93174787a68f68fb28d26b8ece7cc56) Added a few more common google types from google/api, see [#433](https://github.com/dcodeIO/protobuf.js/issues/433)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/d246024f4c7d13ca970c91a757e2f47432a619df) Minor optimizations to dependencies, build process and tsd<br />
|
||||
|
||||
# [6.6.3](https://github.com/dcodeIO/protobuf.js/releases/tag/6.6.3)
|
||||
|
||||
## Fixed
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/0be01a14915e3e510038808fedbc67192a182d9b) Support node 4.2.0 to 4.4.7 buffers + travis case, see [#665](https://github.com/dcodeIO/protobuf.js/issues/665)<br />
|
||||
|
||||
## CLI
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/6a0920b2c32e7963741693f5a773b89f4b262688) Added ES6 syntax flag to pbjs, see [#667](https://github.com/dcodeIO/protobuf.js/issues/667)<br />
|
||||
|
||||
## Docs
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/c365242bdc28a47f5c6ab91bae34c277d1044eb3) Reference Buffer for BufferReader/Writer, see [#668](https://github.com/dcodeIO/protobuf.js/issues/668)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/43976072d13bb760a0689b54cc35bdea6817ca0d) Slightly shortened README<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/e64cf65b09047755899ec2330ca0fc2f4d7932c2) Additional notes on the distinction of different use cases / distributions, see [#666](https://github.com/dcodeIO/protobuf.js/issues/666)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/83758c99275c2bbd30f63ea1661284578f5c9d91) Extended README with additional information on JSON format<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/fdc3102689e8a3e8345eee5ead07ba3c9c3fe80c) Added extended usage instructions for TypeScript and custom classes to README, see [#666](https://github.com/dcodeIO/protobuf.js/issues/666)<br />
|
||||
|
||||
## Other
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/3701488cca6bc56ce6b7ad93c7b80e16de2571a7) Updated dist files<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/579068a45e285c7d2c69b359716dd6870352f46f) Updated test cases to use new buffer util<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/0be01a14915e3e510038808fedbc67192a182d9b) Added fetch test cases + some test cleanup<br />
|
||||
|
||||
# [6.6.2](https://github.com/dcodeIO/protobuf.js/releases/tag/6.6.2)
|
||||
|
||||
## Fixed
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/3aea1bf3d4920dc01603fda25b86e6436ae45ec2) Properly replace short vars when beautifying static code, see [#663](https://github.com/dcodeIO/protobuf.js/issues/663)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/b6cf228a82152f72f21b1b307983126395313470) Use custom prelude in order to exclude any module loader code from source (for webpack), see [#658](https://github.com/dcodeIO/protobuf.js/issues/658)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/2b12fb7db9d4eaa3b76b7198539946e97db684c4) Make sure to check optional inner messages for null when encoding, see [#658](https://github.com/dcodeIO/protobuf.js/issues/658)<br />
|
||||
|
||||
## New
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/276a594771329da8334984771cb536de7322d5b4) Initial attempt on a backwards compatible fetch implementation with binary support, see [#661](https://github.com/dcodeIO/protobuf.js/issues/661)<br />
|
||||
|
||||
## Other
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/2d81864fa5c4dac75913456d582e0bea9cf0dd80) Root#resolvePath skips files when returning null, see [#368](https://github.com/dcodeIO/protobuf.js/issues/368)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/aab3ec1a757aff0f11402c3fb943c003f092c1af) Changes callback on failed response decode in rpc service to pass actual error instead of 'error' string<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/9044178c052299670108f10621d6e9b3d56e8a40) Travis should exit with the respective error when running sauce tests<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/73721f12072d77263e72a3b27cd5cf9409db9f8b) Moved checks whether a test case is applicable to parent case<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/3fcd88c3f9b1a084b06cab2d5881cb5bb895869d) Added eventemitter tests and updated micromodule dependencies (so far)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/2db4305ca67d003d57aa14eb23f25eb6c3672034) Added lib/path tests and updated a few dependencies<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/2b12fb7db9d4eaa3b76b7198539946e97db684c4) Moved micro modules to lib so they can have their own tests etc.<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/b6dfa9f0a4c899b5c217d60d1c2bb835e06b2122) Updated travis<br />
|
||||
|
||||
# [6.6.1](https://github.com/dcodeIO/protobuf.js/releases/tag/6.6.1)
|
||||
|
||||
## Fixed
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/039ac77b062ee6ebf4ec84a5e6c6ece221e63401) Properly set up reflection when using light build<br />
|
||||
|
||||
# [6.6.0](https://github.com/dcodeIO/protobuf.js/releases/tag/6.6.0) ([release](https://github.com/dcodeIO/protobuf.js/releases/tag/6.6.0))
|
||||
|
||||
## Breaking
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/cdfe6bfba27fa1a1d0e61887597ad4bb16d7e5ed) Inlined / refactored away .testJSON, see [#653](https://github.com/dcodeIO/protobuf.js/issues/653)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/6a483a529ef9345ed217a23394a136db0d9f7771) Refactored util.extend away<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/27b16351f3286468e539c2ab382de4b52667cf5e) Reflected and statically generated services use common utility, now work exactly the same<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/dca26badfb843a597f81e98738e2fda3f66c7341) fromObject now throws for entirely bogus values (repeated, map and inner message fields), fixes [#601](https://github.com/dcodeIO/protobuf.js/issues/601)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/4bff9c356ef5c10b4aa34d1921a3b513e03dbb3d) Cleaned up library distributions, now is full / light / minimal with proper browserify support for each<br />
|
||||
|
||||
## Fixed
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/301f7762ef724229cd1df51e496eed8cfd2f10eb) Do not randomly remove slashes from comments, fixes [#656](https://github.com/dcodeIO/protobuf.js/issues/656)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/ef7be352baaec26bdcdce01a71fbee47bbdeec15) Properly parse nested textformat options, also tackles [#655](https://github.com/dcodeIO/protobuf.js/issues/655)<br />
|
||||
|
||||
## New
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/b4f4f48f1949876ae92808b0a5ca5f2b29cc011c) Relieved the requirement to call .resolveAll() on roots in order to populate static code-compatible properties, see [#653](https://github.com/dcodeIO/protobuf.js/issues/653)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/56c8ec4196d461383c3e1f271da02553d877ae81) Added a (highly experimental) debug build as a starting point for [#653](https://github.com/dcodeIO/protobuf.js/issues/653)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/c5d291f9bab045385c5938ba0f6cdf50a315461f) Full build depends on light build depends on minimal build, shares all relevant code<br />
|
||||
|
||||
## CLI
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/735da4315a98a6960f3b5089115e308548b91c07) Also reuse specified root in pbjs for JSON modules, see [#653](https://github.com/dcodeIO/protobuf.js/issues/653)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/3a056244d3acf339722d56549469a8df018e682e) Reuse specified root name in pbjs to be able to split definitions over multiple files more easily, see [#653](https://github.com/dcodeIO/protobuf.js/issues/653)<br />
|
||||
|
||||
## Docs
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/28ddf756ab83cc890761ef2bd84a0788d2ad040d) Improved pbjs/pbts examples, better covers reflection with definitions for static modules<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/6f0b44aea6cf72d23042810f05a7cede85239eb3) Fixed centered formatting on npm<br />
|
||||
|
||||
## Other
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/dd96dcdacb8eae94942f7016b8dc37a2569fe420) Various other minor improvements / assertions refactored away, see [#653](https://github.com/dcodeIO/protobuf.js/issues/653)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/3317a76fb56b9b31bb07ad672d6bdda94b79b6c3) Fixed some common reflection deopt sites, see [#653](https://github.com/dcodeIO/protobuf.js/issues/653)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/6a483a529ef9345ed217a23394a136db0d9f7771) Reflection performance pass, see [#653](https://github.com/dcodeIO/protobuf.js/issues/653)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/6a483a529ef9345ed217a23394a136db0d9f7771) Added TS definitions to alternative builds' index files<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/6a483a529ef9345ed217a23394a136db0d9f7771) Removed unnecessary prototype aliases, improves gzip ratio<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/641625fd64aca55b1163845e6787b58054ac36ec) Unified behaviour of and docs on Class constructor / Class.create<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/7299929b37267af2100237d4f8b4ed8610b9f7e1) Statically generated services actually inherit from rpc.Service<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/f4cf75e4e4192910b52dd5864a32ee138bd4e508) Do not try to run sauce tests for PRs<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/33da148e2b750ce06591c1c66ce4c46ccecc3c8f) Added utility to enable/disable debugging extensions to experimental debug build<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/fdb1a729ae5f8ab762c51699bc4bb721102ef0c8) Fixed node 0.12 tests<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/6bc5bb4a7649d6b91a5944a9ae20178d004c8856) Fixed coverage<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/6f0b44aea6cf72d23042810f05a7cede85239eb3) Added a test case for [#652](https://github.com/dcodeIO/protobuf.js/issues/652)<br />
|
||||
|
||||
# [6.5.3](https://github.com/dcodeIO/protobuf.js/releases/tag/6.5.3)
|
||||
|
||||
## Other
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/799d0303bf289bb720f2b27af59e44c3197f3fb7) In fromObject, check if object is already a runtime message, see [#652](https://github.com/dcodeIO/protobuf.js/issues/652)<br />
|
||||
|
||||
# [6.5.2](https://github.com/dcodeIO/protobuf.js/releases/tag/6.5.2)
|
||||
|
||||
## Docs
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/8cff92fe3b7ddb1930371edb4937cd0db9216e52) Added coverage reporting<br />
|
||||
|
||||
## CLI
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/cbaaae99b4e39a859664df0e6d20f0491169f489) Added version scheme warning to everything CLI so that we don't need this overly explicit in README<br />
|
||||
|
||||
## Other
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/6877b3399f1a4c33568221bffb4e298b01b14439) Coverage progress, 100%<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/711a9eb55cb796ec1e51af7d56ef2ebbd5903063) Coverage progress<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/e7526283ee4dd82231235afefbfad6af54ba8970) Attempted to fix badges once and for all<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/5aa296c901c2b460ee3be4530ede394e2a45e0ea) Coverage progress<br />
|
||||
|
||||
# [6.5.1](https://github.com/dcodeIO/protobuf.js/releases/tag/6.5.1)
|
||||
|
||||
## CLI
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/9719fd2fa8fd97899c54712a238091e8fd1c57b2) Reuse module paths when looking up cli dependencies, see [#648](https://github.com/dcodeIO/protobuf.js/issues/648)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/6302655d1304cf662f556be5d9fe7a016fcedc3c) Check actual module directories to determine if cli dependencies are present and bootstrap semver, see [#648](https://github.com/dcodeIO/protobuf.js/issues/648)<br />
|
||||
|
||||
## Docs
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/dfc7c4323bf98fb26ddcfcfbb6896a6d6e8450a4) Added a note on semver-incompatibility, see [#649](https://github.com/dcodeIO/protobuf.js/issues/649)<br />
|
||||
|
||||
## Other
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/49053ffa0ea8a4ba5ae048706dba1ab6f3bc803b) Coverage progress<br />
|
||||
|
||||
# [6.5.0](https://github.com/dcodeIO/protobuf.js/releases/tag/6.5.0) ([release](https://github.com/dcodeIO/protobuf.js/releases/tag/6.5.0))
|
||||
|
||||
## Breaking
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/3946e0fefea415f52a16ea7a74109ff40eee9643) Initial upgrade of converters to real generated functions, see [#620](https://github.com/dcodeIO/protobuf.js/issues/620)<br />
|
||||
|
||||
## Fixed
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/08cda241a3e095f3123f8a991bfd80aa3eae9400) An enum's default value present as a string looks up using typeDefault, not defaultValue which is an array if repeated<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/c7e14b1d684aaba2080195cc83900288c5019bbc) Use common utility for virtual oneof getters and setters in both reflection and static code, see [#644](https://github.com/dcodeIO/protobuf.js/issues/644)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/508984b7ff9529906be282375d36fdbada66b8e6) Properly use Type.toObject/Message.toObject within converters, see [#641](https://github.com/dcodeIO/protobuf.js/issues/641)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/5bca18f2d32e8687986e23edade7c2aeb6b6bac1) Generate null/undefined assertion in fromObject if actually NOT an enum, see [#620](https://github.com/dcodeIO/protobuf.js/issues/620)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/508984b7ff9529906be282375d36fdbada66b8e6) Replace ALL occurencies of types[%d].values in static code, see [#641](https://github.com/dcodeIO/protobuf.js/issues/641)<br />
|
||||
|
||||
## New
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/9b090bb1673aeb9b8f1d7162316fce4d7a3348f0) Switched to own property-aware encoders for compatibility, see [#639](https://github.com/dcodeIO/protobuf.js/issues/639)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/340d6aa82ac17c4a761c681fa71d5a0955032c8b) Now also parses comments, sets them on reflected objects and re-uses them when generating static code, see [#640](https://github.com/dcodeIO/protobuf.js/issues/640)<br />
|
||||
|
||||
## CLI
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/3cb82628159db4d2aa721b63619b16aadc5f1981) Further improved generated static code style<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/cda5c5452fa0797f1e4c375471aef96f844711f1) Removed scoping iifes from generated static code<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/def7b45fb9b5e01028cfa3bf2ecd8272575feb4d) Removed even more clutter from generated static code<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/dbd19fd9d3a57d033aad1d7173f7f66db8f8db3e) Removed various clutter from generated static code<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/1cc8a2460c7e161c9bc58fa441ec88e752df409c) Made sure that static target's replacement regexes don't match fields<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/d4272dbf5d0b2577af8efb74a94d246e2e0d728e) Also accept (trailing) triple-slash comments for compatibility with protoc-gen-doc, see [#640](https://github.com/dcodeIO/protobuf.js/issues/640)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/0a3862b75fa60ef732e0cd36d623f025acc2fb45) Use semver to validate that CLI dependencies actually satisfy the required version, see [#637](https://github.com/dcodeIO/protobuf.js/issues/637)<br />
|
||||
|
||||
## Docs
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/9e360ea6a74d41307483e51f18769df7f5b047b9) Added a hint on documenting .proto files for static code<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/d2a97bb818474645cf7ce1832952b2c3c739b234) Documented internally used codegen partials for what it's worth<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/079388ca65dfd581d74188a6ae49cfa01b103809) Updated converter documentation<br />
|
||||
|
||||
## Other
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/168e448dba723d98be05c55dd24769dfe3f43d35) Bundler provides useful stuff to uglify and a global var without extra bloat<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/32e0529387ef97182ad0b9ae135fd8b883ed66b4) Cleaned and categorized tests, coverage progress<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/3325e86930a3cb70358c689cb3016c1be991628f) Properly removed builtins from bundle<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/2c94b641fc5700c8781ac0b9fe796debac8d6893) Call hasOwnProperty builtin as late as possible decreasing the probability of having to call it at all (perf)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/818bcacde267be70a75e689f480a3caad6f80cf7) Slightly hardened codegen sprintf<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/818bcacde267be70a75e689f480a3caad6f80cf7) Significantly improved uint32 write performance<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/b5daa272407cb31945fd38c34bbef7c9edd1db1c) Cleaned up test case data and removed unused files<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/c280a4a18c6d81c3468177b2ea58ae3bc4f25e73) Removed now useless trailing comment checks, see [#640](https://github.com/dcodeIO/protobuf.js/issues/640)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/44167db494c49d9e4b561a66ad9ce2d8ed865a21) Ensured that pbjs' beautify does not break regular expressions in generated verify functions<br />
|
||||
|
||||
# [6.4.6](https://github.com/dcodeIO/protobuf.js/releases/tag/6.4.6)
|
||||
|
||||
## Fixed
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/e11012ce047e8b231ba7d8cc896b8e3a88bcb902) Case-sensitively test for legacy group definitions, fixes [#638](https://github.com/dcodeIO/protobuf.js/issues/638)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/7e57f4cdd284f886b936511b213a6468e4ddcdce) Properly parse text format options + simple test case, fixes [#636](https://github.com/dcodeIO/protobuf.js/issues/636)<br />
|
||||
|
||||
## Docs
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/fe4d97bbc4d33ce94352dde62ddcd44ead02d7ad) Added SVG logo, see [#629](https://github.com/dcodeIO/protobuf.js/issues/629)<br />
|
||||
|
||||
## Other
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/57990f7ed8ad5c512c28ad040908cee23bbf2aa8) Also refactored Service and Type to inherit from NamespaceBase, see [#635](https://github.com/dcodeIO/protobuf.js/issues/635)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/fe4d97bbc4d33ce94352dde62ddcd44ead02d7ad) Moved TS-compatible Namespace features to a virtual NamespaceBase class, compiles with strictNullChecks by default now, see [#635](https://github.com/dcodeIO/protobuf.js/issues/635)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/fe4d97bbc4d33ce94352dde62ddcd44ead02d7ad) Minor codegen enhancements<br />
|
||||
|
||||
# [6.4.5](https://github.com/dcodeIO/protobuf.js/releases/tag/6.4.5)
|
||||
|
||||
## Fixed
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/1154ce0867306e810cf62a5b41bdb0b765aa8ff3) Properly handle empty/noop Writer#ldelim, fixes [#625](https://github.com/dcodeIO/protobuf.js/issues/625)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/f303049f92c53970619375653be46fbb4e3b7d78) Properly annotate map fields in pbjs, fixes [#624](https://github.com/dcodeIO/protobuf.js/issues/624)<br />
|
||||
|
||||
## New
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/4b786282a906387e071a5a28e4842a46df588c7d) Made sure that Writer#bytes is always able to handle plain arrays<br />
|
||||
|
||||
## Other
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/1e6a8d10f291a16631376dd85d5dd385937e6a55) Slightly restructured utility to better support static code default values<br />
|
||||
|
||||
# [6.4.4](https://github.com/dcodeIO/protobuf.js/releases/tag/6.4.4)
|
||||
|
||||
## Fixed
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/26d68e36e438b590589e5beaec418c63b8f939cf) Dynamically resolve jsdoc when running pbts, fixes [#622](https://github.com/dcodeIO/protobuf.js/issues/622)<br />
|
||||
|
||||
## Docs
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/69c04d7d374e70337352cec9b445301cd7fe60d6) Explain 6.4.2 vs 6.4.3 in changelog<br />
|
||||
|
||||
# [6.4.3](https://github.com/dcodeIO/protobuf.js/releases/tag/6.4.4)
|
||||
|
||||
## Fixed
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/c2c39fc7cec5634ecd1fbaebbe199bf097269097) Fixed invalid definition of Field#packed property, also introduced decoder.compat mode (packed fields, on by default)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/11fb1a66ae31af675d0d9ce0240cd8e920ae75e7) Always decode packed/non-packed based on wire format only, see [#602](https://github.com/dcodeIO/protobuf.js/issues/602)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/c9a61e574f5a2b06f6b15b14c0c0ff56f8381d1f) Use full library for JSON modules and runtime dependency for static modules, fixes [#621](https://github.com/dcodeIO/protobuf.js/issues/621)<br />
|
||||
|
||||
## CLI
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/e88d13ca7ee971451b57d056f747215f37dfd3d7) Additional workarounds for on demand CLI dependencies, see [#618](https://github.com/dcodeIO/protobuf.js/issues/618)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/44f6357557ab3d881310024342bcc1e0d336a20c) Revised automatic setup of cli dependencies, see [#618](https://github.com/dcodeIO/protobuf.js/issues/618)<br />
|
||||
|
||||
## Other
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/e027a3c7855368837e477ce074ac65f191bf774a) Removed Android 4.0 test (no longer supported by sauce)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/8ba3c5efd182bc80fc36f9d5fe5e2b615b358236) Removed some unused utility, slightly more efficient codegen, additional comments<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/f22a34a071753bca416732ec4d01892263f543fb) Updated tests for new package.json layout<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/f22a34a071753bca416732ec4d01892263f543fb) Added break/continue label support to codegen<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/f2ffa0731aea7c431c59e452e0f74247d815a352) Updated dependencies, rebuilt dist files and changed logo to use an absolute url<br />
|
||||
|
||||
6.4.2 had been accidentally published as 6.4.3.
|
||||
|
||||
# [6.4.1](https://github.com/dcodeIO/protobuf.js/releases/tag/6.4.1)
|
||||
|
||||
## Fixed
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/9035d4872e32d6402c8e4d8c915d4f24d5192ea9) Added more default value checks to converter, fixes [#616](https://github.com/dcodeIO/protobuf.js/issues/616)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/62eef58aa3b002115ebded0fa58acc770cd4e4f4) Respect long defaults in converters<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/e3170a160079a3a7a99997a2661cdf654cb69e24) Convert inner messages and undefined/null values more thoroughly, fixes [#615](https://github.com/dcodeIO/protobuf.js/issues/615)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/b52089efcb9827537012bebe83d1a15738e214f4) Always use first defined enum value as field default, fixes [#613](https://github.com/dcodeIO/protobuf.js/issues/613)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/64f95f9fa1bbe42717d261aeec5c16d1a7aedcfb) Install correct 'tmp' dependency when running pbts without dev dependencies installed, fixes [#612](https://github.com/dcodeIO/protobuf.js/issues/612)<br />
|
||||
|
||||
## New
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/cba46c389ed56737184e5bc2bcce07243d52e5ce) Generate named constructors for runtime messages, see [#588](https://github.com/dcodeIO/protobuf.js/issues/588)<br />
|
||||
|
||||
## CLI
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/ee20b81f9451c56dc106177bbf9758840b99d0f8) pbjs/pbts no longer generate any volatile headers, see [#614](https://github.com/dcodeIO/protobuf.js/issues/614)<br />
|
||||
|
||||
## Docs
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/ec9d517d0b87ebe489f02097c2fc8005fae38904) Attempted to make broken shields less annoying<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/5cd4c2f2a94bc3c0f2c580040bce28dd42eaccec) Updated README<br />
|
||||
|
||||
## Other
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/0643f93f5c0d96ed0ece5b47f54993ac3a827f1b) Some cleanup and added a logo<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/169638382de9efe35a1079c5f2045c33b858059a) use $protobuf.Long<br />
|
||||
|
||||
# [6.4.0](https://github.com/dcodeIO/protobuf.js/releases/tag/6.4.0) ([release](https://github.com/dcodeIO/protobuf.js/releases/tag/6.4.0))
|
||||
|
||||
## Breaking
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/a017bf8a2dbdb7f9e7ce4c026bb6845174feb3b1) Dropped IE8 support<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/39bc1031bb502f8b677b3736dd283736ea4d92c1) Removed now unused util.longNeq which was used by early static code<br />
|
||||
|
||||
## Fixed
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/5915ff972482e7db2a73629244ab8a93685b2e55) Do not swallow errors in loadSync, also accept negative enum values in Enum#add, fixes [#609](https://github.com/dcodeIO/protobuf.js/issues/609)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/fde56c0de69b480343931264a01a1ead1e3156ec) Improved bytes field support, also fixes [#606](https://github.com/dcodeIO/protobuf.js/issues/606)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/0c03f327115d57c4cd5eea3a9a1fad672ed6bd44) Fall back to browser Reader when passing an Uint8Array under node, fixes [#605](https://github.com/dcodeIO/protobuf.js/issues/605)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/7eb3d456370d7d66b0856e32b2d2602abf598516) Respect optional properties when writing interfaces in tsd-jsdoc, fixes [#598](https://github.com/dcodeIO/protobuf.js/issues/598)<br />
|
||||
|
||||
## New
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/bcadffecb3a8b98fbbd34b45bae0e6af58f9c810) Instead of protobuf.parse.keepCase, fall back to protobuf.parse.defaults holding all possible defaults, see [#608](https://github.com/dcodeIO/protobuf.js/issues/608)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/a4d6a2af0d57a2e0cccf31e3462c8b2465239f8b) Added global ParseOptions#keepCase fallback as protobuf.parse.keepCase, see [#608](https://github.com/dcodeIO/protobuf.js/issues/608)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/a017bf8a2dbdb7f9e7ce4c026bb6845174feb3b1) Converters use code generation and support custom implementations<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/28ce07d9812f5e1743afef95a94532d2c9488a84) Be more verbose when throwing invalid wire type errors, see [#602](https://github.com/dcodeIO/protobuf.js/issues/602)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/40074bb69c3ca4fcefe09d4cfe01f3a86844a7e8) Added an asJSON-option to always populate array fields, even if defaults=false, see [#597](https://github.com/dcodeIO/protobuf.js/issues/597)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/a7d23240a278aac0bf01767b6096d692c09ae1ce) Attempt to improve TypeScript support by using explicit exports<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/cec253fb9a177ac810ec96f4f87186506091fa37) Copy-pasted typescript definitions to micro modules, see [#599](https://github.com/dcodeIO/protobuf.js/issues/599)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/1f18453c7bfcce65c258fa98a3e3d4577d2e550f) Emit an error on resolveAll() if any extension fields cannot be resolved, see [#595](https://github.com/dcodeIO/protobuf.js/issues/595) + test case<br />
|
||||
|
||||
## CLI
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/804739dbb75359b0034db0097fe82081e3870a53) Removed 'not recommend' label for --keep-case, see [#608](https://github.com/dcodeIO/protobuf.js/issues/608)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/9681854526f1813a6ef08becf130ef2fbc28b638) Added customizable linter configuration to pbjs<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/9681854526f1813a6ef08becf130ef2fbc28b638) Added stdin support to pbjs and pbts<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/407223b5ceca3304bc65cb48888abfdc917d5800) Static code no longer uses IE8 support utility<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/a017bf8a2dbdb7f9e7ce4c026bb6845174feb3b1) Generated static code now supports asJSON/from<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/3c775535517b8385a1d3c1bf056f3da3b4266f8c) Added support for TypeScript enums to pbts<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/0cda72a55a1f2567a5d981dc5d924e55b8070513) Added a few helpful comments to static code<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/24b293c297feff8bda5ee7a2f8f3f83d77c156d0) Slightly beautify statically generated code<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/65637ffce20099df97ffbcdce50faccc8e97c366) Do not wrap main definition as a module and export directly instead<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/65637ffce20099df97ffbcdce50faccc8e97c366) Generate prettier definitions with --no-comments<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/20d8a2dd93d3bbb6990594286f992e703fc4e334) Added variable arguments support to tsd-jsdoc<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/8493dbd9a923693e943f710918937d83ae3c4572) Reference dependency imports as a module to prevent name collisions, see [#596](https://github.com/dcodeIO/protobuf.js/issues/596)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/39a2ea361c50d7f4aaa0408a0d55bb13823b906c) Removed now unnecessary comment lines in generated static code<br />
|
||||
|
||||
## Docs
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/a4e41b55471d83a8bf265c6641c3c6e0eee82e31) Added notes on CSP-restricted environments to README, see [#593](https://github.com/dcodeIO/protobuf.js/issues/593)<br />
|
||||
|
||||
## Other
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/1a3effdad171ded0608e8da021ba8f9dd017f2ff) Added test case for asJSON with arrays=true, see [#597](https://github.com/dcodeIO/protobuf.js/issues/597)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/751a90f509b68a5f410d1f1844ccff2fc1fc056a) Added a tape adapter to assert message equality accross browsers<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/fde56c0de69b480343931264a01a1ead1e3156ec) Refactored some internal utility away<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/805291086f6212d1f108b3d8f36325cf1739c0bd) Reverted previous attempt on [#597](https://github.com/dcodeIO/protobuf.js/issues/597)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/c5160217ea95996375460c5403dfe37b913d392e) Minor tsd-jsdoc refactor<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/961dd03061fc2c43ab3bf22b3f9f5165504c1002) Removed unused sandbox files<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/f625eb8b0762f8f5d35bcd5fc445e52b92d8e77d) Updated package.json of micro modules to reference types, see [#599](https://github.com/dcodeIO/protobuf.js/issues/599)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/46ec8209b21cf9ff09ae8674e2a5bbc49fd4991b) Reference dependencies as imports in generated typescript definitions, see [#596](https://github.com/dcodeIO/protobuf.js/issues/596)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/3bab132b871798c7c50c60a4c14c2effdffa372e) Allow null values on optional long fields, see [#590](https://github.com/dcodeIO/protobuf.js/issues/590)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/31da56c177f1e11ffe0072ad5f58a55e3f8008fd) Various jsdoc improvements and a workaround for d.ts generation, see [#592](https://github.com/dcodeIO/protobuf.js/issues/592)<br />
|
||||
|
||||
# [6.3.1](https://github.com/dcodeIO/protobuf.js/releases/tag/6.3.1)
|
||||
|
||||
## Fixed
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/95ed6e9e8268711db24f44f0d7e58dd278ddac4c) Empty inner messages are always present on the wire + test case + removed now unused Writer#ldelim parameter, see [#585](https://github.com/dcodeIO/protobuf.js/issues/585)<br />
|
||||
|
||||
## CLI
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/e8a4d5373b1a00cc6eafa5b201b91d0e250cc00b) Expose tsd-jsdoc's comments option to pbts as --no-comments, see [#587](https://github.com/dcodeIO/protobuf.js/issues/587)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/6fe099259b5985d873ba5bec88c049d7491a11cc) Increase child process max buffer when running jsdoc from pbts, see [#587](https://github.com/dcodeIO/protobuf.js/issues/587)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/3d84ecdb4788d71b5d3928e74db78e8e54695f0a) pbjs now generates more convenient dot-notation property accessors<br />
|
||||
|
||||
## Other
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/1e0ebc064e4f2566cebf525d526d0b701447bd6a) And fixed IE8 again (should probably just drop IE8 for good)<br />
|
||||
|
||||
# [6.3.0](https://github.com/dcodeIO/protobuf.js/releases/tag/6.3.0)
|
||||
|
||||
## Breaking
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/a97956b1322b6ee62d4fc9af885658cd5855e521) Moved camelCase/underScore away from util to where actually used<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/c144e7386529b53235a4a5bdd8383bdb322f2825) Renamed asJSON option keys (enum to enums, long to longs) because enum is a reserved keyword<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/5b9ade428dca2df6a13277522f2916e22092a98b) Moved JSON/Message conversion to its own source file and added Message/Type.from + test case, see [#575](https://github.com/dcodeIO/protobuf.js/issues/575)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/0b0de2458a1ade1ccd4ceb789697be13290f856b) Relicensed the library and its components to BSD-3-Clause to match the official implementation (again)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/22a64c641d4897965035cc80e92667bd243f182f) Dropped support for browser buffer entirely (is an Uint8Array anyway), ensures performance and makes things simpler<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/22a64c641d4897965035cc80e92667bd243f182f) Removed dead parts of the Reader API<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/964f65a9dd94ae0a18b8be3d9a9c1b0b1fdf6424) Refactored BufferReader/Writer to their own files and removed unnecessary operations (node always has FloatXXArray and browser buffer uses ieee anyway)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/bfac0ea9afa3dbaf5caf79ddf0600c3c7772a538) Stripped out fallback encoder/decoder/verifier completely (even IE8 supports codegen), significantly reduces bundle size, can use static codegen elsewhere<br />
|
||||
|
||||
## Fixed
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/c3023a2f51fc74547f6c6e53cf75feed60f3a25c) Actually concatenate mixed custom options when parsing<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/0d66b839df0acec2aea0566d2c0bbcec46c3cd1d) Fixed a couple of issues with alternative browser builds<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/33706cdc201bc863774c4af6ac2c38ad96a276e6) Properly set long defaults on prototypes<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/0ea2740f0774b4c5c349b9c303f3fb2c2743c37b) Fixed reference error in minimal runtime, see [#580](https://github.com/dcodeIO/protobuf.js/issues/580)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/741b6d8fde84d9574676a729a29a428d99f0a0a0) Non-repeated empty messages are always present on the wire, see [#581](https://github.com/dcodeIO/protobuf.js/issues/581)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/7fac9d6a39bf42d316c1676082a2d0804bc55934) Properly check Buffer.prototype.set with node v4<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/3ad8108eab57e2b061ee6f1fddf964abe3f4cbc7) Prevent NRE and properly annotate verify signature in tsd-jsdoc, fixed [#572](https://github.com/dcodeIO/protobuf.js/issues/572)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/6c2415d599847cbdadc17dee3cdf369fc9facade) Fix directly using Buffer instead of util.Buffer<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/19e906c2a15acc6178b3bba6b19c2f021e681176) Added filter type to Namespace#lookup, fixes [#569](https://github.com/dcodeIO/protobuf.js/issues/569)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/9c9a66bf393d9d6927f35a9c18abf5d1c31db912) Fixed parsing enum inner options, see [#565](https://github.com/dcodeIO/protobuf.js/issues/565)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/ea7ba8b83890084d61012cb5386dc11dadfb3908) Fixed release links in README files<br />
|
||||
|
||||
## New
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/442471363f99e67fa97044f234a47b3c9b929dfa) Added a noparse build for completeness<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/bfee1cc3624d0fa21f9553c2f6ce2fcf7fcc09b7) Now compresses .gz files using zopfli to make them useful beyond being just a reference<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/aed134aa1cd7edd801de77c736cf5efe6fa61cb0) Updated non-bundled google types folder with missing descriptors and added wrappers to core<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/0b0de2458a1ade1ccd4ceb789697be13290f856b) Replaced the ieee754 implementation for old browsers with a faster, use-case specific one + simple test case<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/99ad9cc08721b834a197d4bbb67fa152d7ad79aa) Added .create to statically generated types and uppercase nested elements to reflection namespaces, see [#576](https://github.com/dcodeIO/protobuf.js/issues/576)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/99ad9cc08721b834a197d4bbb67fa152d7ad79aa) Also added Namespace#getEnum for completeness, see [#576](https://github.com/dcodeIO/protobuf.js/issues/576)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/ef43acff547c0cd84cfb7a892fe94504a586e491) Added Namespace#getEnum and changed #lookupEnum to the same behavior, see [#576](https://github.com/dcodeIO/protobuf.js/issues/576)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/1fcfdfe21c1b321d975a8a96d133a452c9a9c0d8) Added a heap of coverage comments for usually unused code paths to open things up<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/c234de7f0573ee30ed1ecb15aa82b74c0f994876) Added codegen test to determine if any ancient browsers don't actually support it<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/fed2000e7e461efdb1c3a1a1aeefa8b255a7c20b) Added legacy groups support to pbjs, see [#568](https://github.com/dcodeIO/protobuf.js/issues/568)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/974a1321da3614832aa0a5b2e7c923f66e4ba8ae) Initial support for legacy groups + test case, see [#568](https://github.com/dcodeIO/protobuf.js/issues/568)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/9c9a66bf393d9d6927f35a9c18abf5d1c31db912) Added asJSON bytes as Buffer, see [#566](https://github.com/dcodeIO/protobuf.js/issues/566)<br />
|
||||
|
||||
## CLI
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/c60cd397e902ae6851c017f2c298520b8336cbee) Annotated callback types in pbjs-generated services, see [#582](https://github.com/dcodeIO/protobuf.js/issues/582)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/3e7e4fc59e6d2d6c862410b4b427fbedccdb237b) Removed type/ns alias comment in static target to not confuse jsdoc unnecessarily<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/99ad9cc08721b834a197d4bbb67fa152d7ad79aa) Made pbjs use loadSync for deterministic outputs, see [#573](https://github.com/dcodeIO/protobuf.js/issues/573)<br />
|
||||
|
||||
## Docs
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/4d1f5facfcaaf5f2ab6a70b12443ff1b66e7b94e) Updated documentation on runtime and noparse builds<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/c59647a7542cbc4292248787e5f32bb99a9b8d46) Fixed an issue with the changelog generator skipping some commits<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/24f2c03af9f13f5404259866fdc8fed33bfaae25) Added notes on how to use pbjs and pbts programmatically<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/3544576116146b209246d71c7f7a9ed687950b26) Manually sorted old changelog entries<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/d5812571f335bae68f924aa1098519683a9f3e44) Initial changelog generator, see [#574](https://github.com/dcodeIO/protobuf.js/issues/574)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/ab3e236a967a032a98267a648f84d129fdb4d4a6) Added static/JSON module interchangeability to README<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/7939a4bd8baca5f7e07530fc93f27911a6d91c6f) Updated README and bundler according to dynamic require calls<br />
|
||||
|
||||
## Other
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/93e04f1db4a9ef3accff8d071c75be3d74c0cd4a) Added basic services test case<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/b5a068f5b79b6f00c4b05d9ac458878650ffa09a) Just polyfill Buffer.from / .allocUnsafe for good<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/4375a485789e14f7bf24bece819001154a03dca2) Added a test case to find out if all the fallbacks are just for IE8<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/deb2e82ed7eda41d065a09d120e91c0f7ecf1e6a) Commented out float assertions in float test including explanation<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/d3ebd5745b024033fbc2410ecad4d4e02abd67db) Expose array implementation used with (older) browsers on util for tests<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/b1b6a813c93da4c7459755186aa02ef2f3765c94) Updated test cases<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/99dc5faa7b39fdad8ebc102de4463f8deb7f48ff) Added assumptions to float test case<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/948ca2e3c5c62fedcd918d75539c261abf1a7347) Updated travis config to use C++11<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/c59647a7542cbc4292248787e5f32bb99a9b8d46) Updated / added additional LICENSE files where appropriate<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/333f0221814be976874862dc83d0b216e07d4012) Integrated changelog into build process, now also has 'npm run make' for everything, see [#574](https://github.com/dcodeIO/protobuf.js/issues/574)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/ab3e236a967a032a98267a648f84d129fdb4d4a6) Minor optimizations through providing type-hints<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/ab3e236a967a032a98267a648f84d129fdb4d4a6) Reverted shortened switch statements in verifier<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/ab3e236a967a032a98267a648f84d129fdb4d4a6) Enums can't be map key types<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/8ef6975b0bd372b79e9b638f43940424824e7176) Use custom require (now a micromodule) for all optional modules, see [#571](https://github.com/dcodeIO/protobuf.js/issues/571)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/e226f001e4e4633d64c52be4abc1915d7b7bd515) Support usage when size = 0<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/19e906c2a15acc6178b3bba6b19c2f021e681176) Reverted aliases frequently used in codegen for better gzip ratio<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/47b51ec95a540681cbed0bac1b2f02fc4cf0b73d) Shrinked bundle size - a bit<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/f8451f0058fdf7a1fac15ffc529e4e899c6b343c) Can finally run with --trace-deopt again without crashes<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/9c9a66bf393d9d6927f35a9c18abf5d1c31db912) Other minor optimizations<br />
|
||||
|
||||
# [6.2.1](https://github.com/dcodeIO/protobuf.js/releases/tag/6.2.1)
|
||||
|
||||
## New
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/1a6fdc9a11fb08506d09351f8e853384c2b8be25) Added ParseOptions to protobuf.parse and --keep-case for .proto sources to pbjs, see [#564](https://github.com/dcodeIO/protobuf.js/issues/564)<br />
|
||||
|
||||
## Other
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/fc383d0721d83f66b2d941f0d9361621839327e9) Better TypeScript definition support for @property-annotated objects<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/4531d75cddee9a99adcac814d52613116ba789f3) Can't just inline longNeq but can be simplified<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/8f25377cf99036794ba13b160a5060f312d1a7e7) Array abuse and varint optimization<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/90b201209a03e8022ada0ab9182f338fa0813651) Updated dependencies<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/f1110b0993ec86e0a4aee1735bd75b901952cb36) Other minor improvements to short ifs<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/c079c900e2d61c63d5508eafacbd00163d377482) Reader/Writer example<br />
|
||||
|
||||
# [6.2.0](https://github.com/dcodeIO/protobuf.js/releases/tag/6.2.0)
|
||||
|
||||
## Fixed
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/9b7b92a4c7f8caa460d687778dc0628a74cdde37) Fixed reserved names re, also ensure valid service method names, see [#559](https://github.com/dcodeIO/protobuf.js/issues/559)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/a83425049c9a78c5607bc35e8089e08ce78a741e) Fix d.ts whitespace on empty lines, added tsd-jsdoc LICENSE<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/5f9bede280aa998afb7898e8d2718b4a229e8e6f) Fix asJSON defaults option, make it work for repeated fields.<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/b0aef62191b65cbb305ece84a6652d76f98da259) Inlined any Reader/Writer#tag calls, also fixes [#556](https://github.com/dcodeIO/protobuf.js/issues/556)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/4d091d41caad9e63cd64003a08210b78878e01dd) Fix building default dist files with explicit runtime=false<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/096dfb686f88db38ed2d8111ed7aac36f8ba658a) Apply asJSON recursively<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/19c269f1dce1b35fa190f264896d0865a54a4fff) Ensure working reflection class names with minified builds<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/9c769504e0ffa6cbe0b6f8cdc14f1231bed7ee34) Lazily resolve (some) cyclic dependencies, see [#560](https://github.com/dcodeIO/protobuf.js/issues/560)<br />
|
||||
|
||||
## New
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/da07d8bbbede4175cc45ca46d883210c1082e295) Added protobuf.roots to minimal runtime, see [#554](https://github.com/dcodeIO/protobuf.js/issues/554)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/8f407a18607334185afcc85ee98dc1478322bd01) Repo now includes a restructured version of tsd-jsdoc with our changes incorporated for issues/prs, see [#550](https://github.com/dcodeIO/protobuf.js/issues/550)<br />
|
||||
|
||||
## CLI
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/1b5e4250415c6169eadb405561242f847d75044b) Updated pbjs arguments<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/4750e3111b9fdb107d0fc811e99904fbcdbb6de1) Pipe tsd-jsdoc output (requires dcodeIO/tsd-jsdoc/master) and respect cwd, see [#550](https://github.com/dcodeIO/protobuf.js/issues/550)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/75f4b6cb6325a3fc7cd8fed3de5dbe0b6b29c748) tsd-jsdoc progress<br />
|
||||
|
||||
## Docs
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/766171e4c8b6650ea9c6bc3e76c9c96973c2f546) README<br />
|
||||
|
||||
## Other
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/c33835cb1fe1872d823e94b0fff024dc624323e8) Added GH issue template<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/6f9ffb6307476d48f45dc4f936744b82982d386b) Path micromodule, dependencies<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/0b9b1d8505743995c5328daab1f1e124debc63bd) Test case for [#556](https://github.com/dcodeIO/protobuf.js/issues/556)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/74b2c5c5d33a46c3751ebeadc9d934d4ccb8286c) Raw alloc benchmark<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/fb74223b7273530d8baa53437ee96c65a387436d) Other minor optimizations<br />
|
||||
|
||||
# [6.1.1](https://github.com/dcodeIO/protobuf.js/releases/tag/6.1.1)
|
||||
|
||||
## Fixed
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/baea920fa6bf5746e0a7888cdbb089cd5d94fc90) Properly encode/decode map kv pairs as repeated messages (codegen and fallback), see [#547](https://github.com/dcodeIO/protobuf.js/issues/547)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/28a1d26f28daf855c949614ef485237c6bf316e5) Make genVerifyKey actually generate conditions for 32bit values and bool, fixes [#546](https://github.com/dcodeIO/protobuf.js/issues/546)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/3e9d8ea9a5cbb2e029b5c892714edd6926d2e5a7) Fix to generation of verify methods for bytes<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/e7893675ccdf18f0fdaea8f9a054a6b5402b060e) Take special care of oneofs when encoding (i.e. when explicitly set to defaults), see [#542](https://github.com/dcodeIO/protobuf.js/issues/542)<br />
|
||||
|
||||
## New
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/52cd8b5a891ec8e11611127c8cfa6b3a91ff78e3) Added Message#asJSON option for bytes conversion<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/01365ba9116ca1649b682635bb29814657c4133c) Added Namespace#lookupType and Namespace#lookupService (throw instead of returning null), see [#544](https://github.com/dcodeIO/protobuf.js/issues/544)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/a54fbc918ef6bd627113f05049ff704e07bf33b4) Provide prebuilt browser versions of the static runtime<br />
|
||||
|
||||
## CLI
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/3783af7ca9187a1d9b1bb278ca69e0188c7e4c66) Initial pbts CLI for generating TypeScript definitions, see [#550](https://github.com/dcodeIO/protobuf.js/issues/550)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/b8bce03405196b1779727f246229fd9217b4303d) Refactored json/static-module targets to use common wrappers<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/691231fbc453a243f48a97bfb86794ab5718ef49) Refactor cli to support multiple built-in wrappers, added named roots instead of always using global.root and added additionally necessary eslint comments, see [#540](https://github.com/dcodeIO/protobuf.js/issues/540)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/e3e77d0c7dc973d3a5948a49d123bdaf8a048030) Annotate namespaces generated by static target, see [#550](https://github.com/dcodeIO/protobuf.js/issues/550)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/aff21a71e6bd949647b1b7721ea4e1fe16bcd933) static target: Basic support for oneof fields, see [#542](https://github.com/dcodeIO/protobuf.js/issues/542)<br />
|
||||
|
||||
## Docs
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/b6b00aa7b0cd35e0e8f3c16b322788e9942668d4) Fix to reflection documentation<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/ed86f3acbeb6145be5f24dcd05efb287b539e61b) README on minimal runtime / available downloads<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/d31590b82d8bafe6657bf877d403f01a034ab4ba) Notes on descriptors vs static modules<br />
|
||||
|
||||
## Other
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/ce41d0ef21cee2d918bdc5c3b542d3b7638b6ead) A lot of minor optimizations to performance and gzip ratio<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/ecbb4a52fbab445e63bf23b91539e853efaefa47) Minimized base64 tables<br />
|
||||
|
||||
# [6.1.0](https://github.com/dcodeIO/protobuf.js/releases/tag/6.1.0)
|
||||
|
||||
## Breaking
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/a46cc4934b7e888ae80e06fd7fdf91e5bc7f54f5) Removed as-function overload for Reader/Writer, profiler stub, optimized version of Reader#int32<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/7983ee0ba15dc5c1daad82a067616865051848c9) Refactored Prototype and inherits away, is now Class and Message for more intuitive documentation and type refs<br />
|
||||
|
||||
## Fixed
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/c3c70fe3a47fd4f7c85dc80e1af7d9403fe349cd) Fixed failing test case on node < 6<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/66be5983321dd06460382d045eb87ed72a186776) Fixed serialization order of sfixed64, fixes [#536](https://github.com/dcodeIO/protobuf.js/issues/536)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/7def340833f9f1cc41f4835bd0d62e203b54d9eb) Fixed serialization order of fixed64, fallback to parseInt with no long lib, see [#534](https://github.com/dcodeIO/protobuf.js/issues/534)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/98a58d40ca7ee7afb1f76c5804e82619104644f6) Actually allow undefined as service method type, fixes [#528](https://github.com/dcodeIO/protobuf.js/issues/528)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/38d867fc50a4d7eb1ca07525c9e4c71b8782443e) Do not skip optional delimiter after aggregate options, fixes [#520](https://github.com/dcodeIO/protobuf.js/issues/520)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/67449db7c7416cbc59ad230c168cf6e6b6dba0c5) Verify empty base64 encoded strings for bytes fields, see [#535](https://github.com/dcodeIO/protobuf.js/issues/535)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/ef0fcb6d525c5aab13a39b4f393adf03f751c8c9) wrong spell role should be rule<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/55db92e21a26c04f524aeecb2316968c000e744d) decodeDelimited always forks if writer is specified, see [#531](https://github.com/dcodeIO/protobuf.js/issues/531)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/ebae1e18152617f11ac07827828f5740d4f2eb7e) Mimic spec-compliant behaviour in oneof getVirtual, see [#523](https://github.com/dcodeIO/protobuf.js/issues/523)<br />
|
||||
|
||||
## New
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/a0398f5880c434ff88fd8d420ba07cc29c5d39d3) Initial base64 string support for bytes fields, see [#535](https://github.com/dcodeIO/protobuf.js/issues/535)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/6a6c00c3e1def5d35c7fcaa1bbb6ce4e0fe67544) Initial type-checking verifier, see [#526](https://github.com/dcodeIO/protobuf.js/issues/526), added to bench out of competition<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/3aa984e063cd73e4687102b4abd8adc16582dbc4) Initial loadSync (node only), see [#529](https://github.com/dcodeIO/protobuf.js/issues/529)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/f1370ff5b0db2ebb73b975a3d7c7bd5b901cbfac) Initial RPC service implementaion, see [#529](https://github.com/dcodeIO/protobuf.js/issues/529)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/090d8eaf10704a811a73e1becd52f2307cbcad48) added 'defaults' option to Prototype#asJSON, see [#521](https://github.com/dcodeIO/protobuf.js/issues/521)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/7c28483d65cde148e61fe9993f1716960b39e049) Use Uint8Array pool in browsers, just like node does with buffers<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/4157a0ec2e54c4d19794cb16edddcd8d4fbd3e76) Also validate map fields, see [#526](https://github.com/dcodeIO/protobuf.js/issues/526) (this really needs some tests)<br />
|
||||
|
||||
## CLI
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/0ce099bf4f4666fd00403a2839e6da628b8328a9) Added json-module target to pbjs, renamed static to static-module, see [#522](https://github.com/dcodeIO/protobuf.js/issues/522)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/1d99442fe65fcaa2f9e33cc0186ef1336057e0cf) updated internals and static target to use immutable objects on prototypes<br />
|
||||
|
||||
## Docs
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/e6eaa91b9fe021b3356d4d7e42033a877bc45871) Added a couple of alternative signatures, protobuf.load returns promise or undefined, aliased Reader/Writer-as-function signature with Reader/Writer.create for typed dialects, see [#518](https://github.com/dcodeIO/protobuf.js/issues/518)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/9df6a3d4a654c3e122f97d9a594574c7bbb412da) Added variations for Root#load, see [#527](https://github.com/dcodeIO/protobuf.js/issues/527)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/193e65c006a8df8e9b72e0f23ace14a94952ee36) Added benchmark and profile related information to README<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/228a2027de35238feb867cb0485c78c755c4d17d) Added service example to README, see [#529](https://github.com/dcodeIO/protobuf.js/issues/529)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/1a8c720714bf867f1f0195b4690faefa4f65e66a) README on tests<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/014fb668dcf853874c67e3e0aeb7b488a149d35c) Update README/dist to reflect recent changes<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/11d844c010c5a22eff9d5824714fb67feca77b26) Minimal documentation for micromodules<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/47608dd8595b0df2b30dd18fef4b8207f73ed56a) Document all the callbacks, see [#527](https://github.com/dcodeIO/protobuf.js/issues/527)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/3891ab07bbe20cf84701605aa62453a6dbdb6af2) Documented streaming-rpc example a bit<br />
|
||||
|
||||
## Other
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/5606cb1bc41bc90cb069de676650729186b38640) Removed the need for triple-slash references in .d.ts by providing a minimal Long interface, see [#527](https://github.com/dcodeIO/protobuf.js/issues/527), see [#530](https://github.com/dcodeIO/protobuf.js/issues/530)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/adf3cc3d340f8b2a596c892c64457b15e42a771b) Transition to micromodules<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/f3a9589b74af6a1bf175f2b1994badf703d7abc4) Refactored argument order of utf8 for plausibility<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/14c207ed6e05a61e756fa4192efb2fa219734dd6) Restructured reusable micromodules<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/b510ba258986271f07007aebc5dcfea7cfd90cf4) Can't use Uint8Array#set on node < 6 buffers<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/78952a50ceee8e196b4f156eb01f7f693b5b8aac) Test case for [#531](https://github.com/dcodeIO/protobuf.js/issues/531)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/954577c6b421f7d7f4905bcc32f57e4ebaf548da) Safer signaling for synchronous load, see [#529](https://github.com/dcodeIO/protobuf.js/issues/529)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/9ea3766ff1b8fb7ccad028f44efe27d3b019eeb7) Proper end of stream signaling to rpcImpl, see [#529](https://github.com/dcodeIO/protobuf.js/issues/529)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/e4faf7fac9b34d4776f3c15dfef8d2ae54104567) Moved event emitter to util, also accepts listener context, see [#529](https://github.com/dcodeIO/protobuf.js/issues/529)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/9bdec62793ce77c954774cc19106bde4132f24fc) Probably the worst form of hiding require programmatically, see [#527](https://github.com/dcodeIO/protobuf.js/issues/527)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/4462d8b05d3aba37c865cf53e09b3199cf051a92) Attempt to hide require('fs') from webpack, see [#527](https://github.com/dcodeIO/protobuf.js/issues/527)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/7c3bf8d32cbf831b251730b3876c35c901926300) Trying out jsdoc variations, see [#527](https://github.com/dcodeIO/protobuf.js/issues/527)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/bb4059467287fefda8f966de575fd0f8f9690bd3) by the way, why not include the json->proto functionality into "util"?<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/f1008e6ee53ee50358e19c10df8608e950be4be3) Update proto.js<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/fc9014822d9cdeae8c6e454ccb66ee28f579826c) Automatic profile generation and processing<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/2a2f6dcab5beaaa98e55a005b3d02643c45504d6) Generalized buffer pool and moved it to util<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/53a16bf3ada4a60cc09757712e0046f3f2d9d094) Make shields visible on npm, yey<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/9004b9d0c5135a7f6df208ea658258bf2f9e6fc9) More shields, I love shields, and maybe a workaround for travis timing out when sauce takes forever<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/060a7916a2715a9e4cd4d05d7c331bec33e60b7e) Trying SauceLabs with higher concurrency<br />
|
||||
|
||||
# [6.0.2](https://github.com/dcodeIO/protobuf.js/releases/tag/6.0.2)
|
||||
|
||||
## Fixed
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/23d664384900eb65e44910def45f04be996fbba1) Fix packable float/double see [#513](https://github.com/dcodeIO/protobuf.js/issues/513)<br />
|
||||
|
||||
## New
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/54283d39c4c955b6a84f7f53d4940eec39e4df5e) Handle oneofs in prototype ctor, add non-ES5 fallbacks, test case<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/0ae66752362899b8407918a759b09938e82436e1) Be nice to AMD, allow reconfiguration of Reader/Writer interface<br />
|
||||
|
||||
## CLI
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/00f3574ef4ee8b237600e41839bf0066719c4469) Initial static codegen target for reference<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/81e36a7c14d89b487dfe7cfb2f8380fcdf0df392) pbjs static target services support<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/4885b8239eb74c72e665787ea0ece3336e493d7f) pbjs static target progress, uses customizable wrapper template<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/ad5abe7bac7885ba4f68df7eeb800d2e3b81750b) Static pbjs target progress, now generates usable CommonJS code, see [#512](https://github.com/dcodeIO/protobuf.js/issues/512)<br />
|
||||
|
||||
## Docs
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/d9634d218849fb49ff5dfb4597bbb2c2d43bbf08) TypeScript example<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/fce8276193a5a9fabad5e5fbeb2ccd4f0f3294a9) Adjectives, notes on browserify<br />
|
||||
|
||||
## Other
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/23d664384900eb65e44910def45f04be996fbba1) Refactor runtime util into separate file, reader/writer uses runtime util<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/f91c432a498bebc0adecef1562061b392611f51a) Also optimize reader with what we have learned<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/d83f799519fe69808c88e83d9ad66c645d15e963) More (shameless) writer over-optimization<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/8a2dbc610a06fe3a1a2695a3ab032d073b77760d) Trading package size for float speed<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/95c5538cfaf1daf6b4990f6aa7599779aaacf99f) Skip defining getters and setters on IE8 entirely, automate defining fallbacks<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/09865d069303e795e475c82afe2b2267abaa59ea) Unified proto/reflection/classes/static encoding API to always return a writer<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/98d6ae186a48416e4ff3030987caed285f40a4f7) plain js utf8 is faster for short strings<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/79fbbf48b8e4dc9c41dcbdef2b73c5f2608b0318) improve TypeScript support. add simple test script.<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/96fa07adec8b0ae05e07c2c40383267f25f2fc92) Use long.js dependency in tests, reference types instead of paths in .d.ts see [#503](https://github.com/dcodeIO/protobuf.js/issues/503)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/5785dee15d07fbcd14025a96686707173bd649a0) Restructured encoder / decoder to better support static code gen<br />
|
||||
|
||||
# [6.0.1](https://github.com/dcodeIO/protobuf.js/releases/tag/6.0.1)
|
||||
|
||||
## Fixed
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/799c1c1a84b255d1831cc84c3d24e61b36fa2530) Add support for long strings, fixes [#509](https://github.com/dcodeIO/protobuf.js/issues/509)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/6e5fdb67cb34f90932e95a51370e1652acc55b4c) expose zero on LongBits, fixes [#508](https://github.com/dcodeIO/protobuf.js/issues/508)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/aa922c07490f185c5f97cf28ebbd65200fc5e377) Fixed issues with Root.fromJSON/#addJSON, search global for Long<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/51fe45656b530efbba6dad92f92db2300aa18761) Properly exclude browserify's annoying _process, again, fixes [#502](https://github.com/dcodeIO/protobuf.js/issues/502)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/3c16e462a28c36abbc8a176eab9ac2e10ba68597) Remember loaded files earlier to prevent race conditions, fixes [#501](https://github.com/dcodeIO/protobuf.js/issues/501)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/4012a00a0578185d92fb6e7d3babd059fee6d6ab) Allow negative enum ids even if super inefficient (encodes as 10 bytes), fixes [#499](https://github.com/dcodeIO/protobuf.js/issues/499), fixes [#500](https://github.com/dcodeIO/protobuf.js/issues/500)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/96dd8f1729ad72e29dbe08dd01bc0ba08446dbe6) set resolvedResponseType on resolve(), fixes [#497](https://github.com/dcodeIO/protobuf.js/issues/497)<br />
|
||||
|
||||
## New
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/d3ae961765e193ec11227d96d699463de346423f) Initial take on runtime services, see [#507](https://github.com/dcodeIO/protobuf.js/issues/507)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/90cd46b3576ddb2d0a6fc6ae55da512db4be3acc) Include dist/ in npm package for frontend use<br />
|
||||
|
||||
## CLI
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/4affa1b7c0544229fb5f0d3948df6d832f6feadb) pbjs proto target field options, language-level compliance with jspb test.proto<br />
|
||||
|
||||
## Docs
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/6a06e95222d741c47a51bcec85cd20317de7c0b0) always use Uint8Array in docs for tsd, see [#503](https://github.com/dcodeIO/protobuf.js/issues/503)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/637698316e095fc35f62a304daaca22654974966) Notes on dist files<br />
|
||||
|
||||
## Other
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/29ff3f10e367d6a2ae15fb4254f4073541559c65) Update eslint env<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/943be1749c7d37945c11d1ebffbed9112c528d9f) Browser field in package.json isn't required<br />
|
||||
39
express-server/node_modules/google-gax/node_modules/protobufjs/LICENSE
generated
vendored
Normal file
39
express-server/node_modules/google-gax/node_modules/protobufjs/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,39 @@
|
||||
This license applies to all parts of protobuf.js except those files
|
||||
either explicitly including or referencing a different license or
|
||||
located in a directory containing a different LICENSE file.
|
||||
|
||||
---
|
||||
|
||||
Copyright (c) 2016, Daniel Wirtz All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are
|
||||
met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
* Redistributions in binary form must reproduce the above copyright
|
||||
notice, this list of conditions and the following disclaimer in the
|
||||
documentation and/or other materials provided with the distribution.
|
||||
* Neither the name of its author, nor the names of its contributors
|
||||
may be used to endorse or promote products derived from this software
|
||||
without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
---
|
||||
|
||||
Code generated by the command line utilities is owned by the owner
|
||||
of the input file used when generating it. This code is not
|
||||
standalone and requires a support library to be linked with it. This
|
||||
support library is itself covered by the above license.
|
||||
879
express-server/node_modules/google-gax/node_modules/protobufjs/README.md
generated
vendored
Normal file
879
express-server/node_modules/google-gax/node_modules/protobufjs/README.md
generated
vendored
Normal file
@@ -0,0 +1,879 @@
|
||||
<h1><p align="center"><img alt="protobuf.js" src="https://github.com/dcodeIO/protobuf.js/raw/master/pbjs.png" width="120" height="104" /></p></h1>
|
||||
<p align="center"><a href="https://npmjs.org/package/protobufjs"><img src="https://img.shields.io/npm/v/protobufjs.svg" alt=""></a> <a href="https://travis-ci.org/dcodeIO/protobuf.js"><img src="https://travis-ci.org/dcodeIO/protobuf.js.svg?branch=master" alt=""></a> <a href="https://npmjs.org/package/protobufjs"><img src="https://img.shields.io/npm/dm/protobufjs.svg" alt=""></a> <a href="https://www.paypal.com/cgi-bin/webscr?cmd=_donations&business=dcode%40dcode.io&item_name=Open%20Source%20Software%20Donation&item_number=dcodeIO%2Fprotobuf.js"><img alt="donate ❤" src="https://img.shields.io/badge/donate-❤-ff2244.svg"></a></p>
|
||||
|
||||
**Protocol Buffers** are a language-neutral, platform-neutral, extensible way of serializing structured data for use in communications protocols, data storage, and more, originally designed at Google ([see](https://developers.google.com/protocol-buffers/)).
|
||||
|
||||
**protobuf.js** is a pure JavaScript implementation with [TypeScript](https://www.typescriptlang.org) support for [node.js](https://nodejs.org) and the browser. It's easy to use, blazingly fast and works out of the box with [.proto](https://developers.google.com/protocol-buffers/docs/proto) files!
|
||||
|
||||
Contents
|
||||
--------
|
||||
|
||||
* [Installation](#installation)<br />
|
||||
How to include protobuf.js in your project.
|
||||
|
||||
* [Usage](#usage)<br />
|
||||
A brief introduction to using the toolset.
|
||||
|
||||
* [Valid Message](#valid-message)
|
||||
* [Toolset](#toolset)<br />
|
||||
|
||||
* [Examples](#examples)<br />
|
||||
A few examples to get you started.
|
||||
|
||||
* [Using .proto files](#using-proto-files)
|
||||
* [Using JSON descriptors](#using-json-descriptors)
|
||||
* [Using reflection only](#using-reflection-only)
|
||||
* [Using custom classes](#using-custom-classes)
|
||||
* [Using services](#using-services)
|
||||
* [Usage with TypeScript](#usage-with-typescript)<br />
|
||||
|
||||
* [Command line](#command-line)<br />
|
||||
How to use the command line utility.
|
||||
|
||||
* [pbjs for JavaScript](#pbjs-for-javascript)
|
||||
* [pbts for TypeScript](#pbts-for-typescript)
|
||||
* [Reflection vs. static code](#reflection-vs-static-code)
|
||||
* [Command line API](#command-line-api)<br />
|
||||
|
||||
* [Additional documentation](#additional-documentation)<br />
|
||||
A list of available documentation resources.
|
||||
|
||||
* [Performance](#performance)<br />
|
||||
A few internals and a benchmark on performance.
|
||||
|
||||
* [Compatibility](#compatibility)<br />
|
||||
Notes on compatibility regarding browsers and optional libraries.
|
||||
|
||||
* [Building](#building)<br />
|
||||
How to build the library and its components yourself.
|
||||
|
||||
Installation
|
||||
---------------
|
||||
|
||||
### node.js
|
||||
|
||||
```
|
||||
$> npm install protobufjs [--save --save-prefix=~]
|
||||
```
|
||||
|
||||
```js
|
||||
var protobuf = require("protobufjs");
|
||||
```
|
||||
|
||||
**Note** that this library's versioning scheme is not semver-compatible for historical reasons. For guaranteed backward compatibility, always depend on `~6.A.B` instead of `^6.A.B` (hence the `--save-prefix` above).
|
||||
|
||||
### Browsers
|
||||
|
||||
Development:
|
||||
|
||||
```
|
||||
<script src="//cdn.rawgit.com/dcodeIO/protobuf.js/6.X.X/dist/protobuf.js"></script>
|
||||
```
|
||||
|
||||
Production:
|
||||
|
||||
```
|
||||
<script src="//cdn.rawgit.com/dcodeIO/protobuf.js/6.X.X/dist/protobuf.min.js"></script>
|
||||
```
|
||||
|
||||
**Remember** to replace the version tag with the exact [release](https://github.com/dcodeIO/protobuf.js/tags) your project depends upon.
|
||||
|
||||
The library supports CommonJS and AMD loaders and also exports globally as `protobuf`.
|
||||
|
||||
### Distributions
|
||||
|
||||
Where bundle size is a factor, there are additional stripped-down versions of the [full library][dist-full] (~19kb gzipped) available that exclude certain functionality:
|
||||
|
||||
* When working with JSON descriptors (i.e. generated by [pbjs](#pbjs-for-javascript)) and/or reflection only, see the [light library][dist-light] (~16kb gzipped) that excludes the parser. CommonJS entry point is:
|
||||
|
||||
```js
|
||||
var protobuf = require("protobufjs/light");
|
||||
```
|
||||
|
||||
* When working with statically generated code only, see the [minimal library][dist-minimal] (~6.5kb gzipped) that also excludes reflection. CommonJS entry point is:
|
||||
|
||||
```js
|
||||
var protobuf = require("protobufjs/minimal");
|
||||
```
|
||||
|
||||
[dist-full]: https://github.com/dcodeIO/protobuf.js/tree/master/dist
|
||||
[dist-light]: https://github.com/dcodeIO/protobuf.js/tree/master/dist/light
|
||||
[dist-minimal]: https://github.com/dcodeIO/protobuf.js/tree/master/dist/minimal
|
||||
|
||||
Usage
|
||||
-----
|
||||
|
||||
Because JavaScript is a dynamically typed language, protobuf.js introduces the concept of a **valid message** in order to provide the best possible [performance](#performance) (and, as a side product, proper typings):
|
||||
|
||||
### Valid message
|
||||
|
||||
> A valid message is an object (1) not missing any required fields and (2) exclusively composed of JS types understood by the wire format writer.
|
||||
|
||||
There are two possible types of valid messages and the encoder is able to work with both of these for convenience:
|
||||
|
||||
* **Message instances** (explicit instances of message classes with default values on their prototype) always (have to) satisfy the requirements of a valid message by design and
|
||||
* **Plain JavaScript objects** that just so happen to be composed in a way satisfying the requirements of a valid message as well.
|
||||
|
||||
In a nutshell, the wire format writer understands the following types:
|
||||
|
||||
| Field type | Expected JS type (create, encode) | Conversion (fromObject)
|
||||
|------------|-----------------------------------|------------------------
|
||||
| s-/u-/int32<br />s-/fixed32 | `number` (32 bit integer) | <code>value | 0</code> if signed<br />`value >>> 0` if unsigned
|
||||
| s-/u-/int64<br />s-/fixed64 | `Long`-like (optimal)<br />`number` (53 bit integer) | `Long.fromValue(value)` with long.js<br />`parseInt(value, 10)` otherwise
|
||||
| float<br />double | `number` | `Number(value)`
|
||||
| bool | `boolean` | `Boolean(value)`
|
||||
| string | `string` | `String(value)`
|
||||
| bytes | `Uint8Array` (optimal)<br />`Buffer` (optimal under node)<br />`Array.<number>` (8 bit integers) | `base64.decode(value)` if a `string`<br />`Object` with non-zero `.length` is assumed to be buffer-like
|
||||
| enum | `number` (32 bit integer) | Looks up the numeric id if a `string`
|
||||
| message | Valid message | `Message.fromObject(value)`
|
||||
|
||||
* Explicit `undefined` and `null` are considered as not set if the field is optional.
|
||||
* Repeated fields are `Array.<T>`.
|
||||
* Map fields are `Object.<string,T>` with the key being the string representation of the respective value or an 8 characters long binary hash string for `Long`-likes.
|
||||
* Types marked as *optimal* provide the best performance because no conversion step (i.e. number to low and high bits or base64 string to buffer) is required.
|
||||
|
||||
### Toolset
|
||||
|
||||
With that in mind and again for performance reasons, each message class provides a distinct set of methods with each method doing just one thing. This avoids unnecessary assertions / redundant operations where performance is a concern but also forces a user to perform verification (of plain JavaScript objects that *might* just so happen to be a valid message) explicitly where necessary - for example when dealing with user input.
|
||||
|
||||
**Note** that `Message` below refers to any message class.
|
||||
|
||||
* **Message.verify**(message: `Object`): `null|string`<br />
|
||||
verifies that a **plain JavaScript object** satisfies the requirements of a valid message and thus can be encoded without issues. Instead of throwing, it returns the error message as a string, if any.
|
||||
|
||||
```js
|
||||
var payload = "invalid (not an object)";
|
||||
var err = AwesomeMessage.verify(payload);
|
||||
if (err)
|
||||
throw Error(err);
|
||||
```
|
||||
|
||||
* **Message.encode**(message: `Message|Object` [, writer: `Writer`]): `Writer`<br />
|
||||
encodes a **message instance** or valid **plain JavaScript object**. This method does not implicitly verify the message and it's up to the user to make sure that the payload is a valid message.
|
||||
|
||||
```js
|
||||
var buffer = AwesomeMessage.encode(message).finish();
|
||||
```
|
||||
|
||||
* **Message.encodeDelimited**(message: `Message|Object` [, writer: `Writer`]): `Writer`<br />
|
||||
works like `Message.encode` but additionally prepends the length of the message as a varint.
|
||||
|
||||
* **Message.decode**(reader: `Reader|Uint8Array`): `Message`<br />
|
||||
decodes a buffer to a **message instance**. If required fields are missing, it throws a `util.ProtocolError` with an `instance` property set to the so far decoded message. If the wire format is invalid, it throws an `Error`.
|
||||
|
||||
```js
|
||||
try {
|
||||
var decodedMessage = AwesomeMessage.decode(buffer);
|
||||
} catch (e) {
|
||||
if (e instanceof protobuf.util.ProtocolError) {
|
||||
// e.instance holds the so far decoded message with missing required fields
|
||||
} else {
|
||||
// wire format is invalid
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
* **Message.decodeDelimited**(reader: `Reader|Uint8Array`): `Message`<br />
|
||||
works like `Message.decode` but additionally reads the length of the message prepended as a varint.
|
||||
|
||||
* **Message.create**(properties: `Object`): `Message`<br />
|
||||
creates a new **message instance** from a set of properties that satisfy the requirements of a valid message. Where applicable, it is recommended to prefer `Message.create` over `Message.fromObject` because it doesn't perform possibly redundant conversion.
|
||||
|
||||
```js
|
||||
var message = AwesomeMessage.create({ awesomeField: "AwesomeString" });
|
||||
```
|
||||
|
||||
* **Message.fromObject**(object: `Object`): `Message`<br />
|
||||
converts any non-valid **plain JavaScript object** to a **message instance** using the conversion steps outlined within the table above.
|
||||
|
||||
```js
|
||||
var message = AwesomeMessage.fromObject({ awesomeField: 42 });
|
||||
// converts awesomeField to a string
|
||||
```
|
||||
|
||||
* **Message.toObject**(message: `Message` [, options: `ConversionOptions`]): `Object`<br />
|
||||
converts a **message instance** to an arbitrary **plain JavaScript object** for interoperability with other libraries or storage. The resulting plain JavaScript object *might* still satisfy the requirements of a valid message depending on the actual conversion options specified, but most of the time it does not.
|
||||
|
||||
```js
|
||||
var object = AwesomeMessage.toObject(message, {
|
||||
enums: String, // enums as string names
|
||||
longs: String, // longs as strings (requires long.js)
|
||||
bytes: String, // bytes as base64 encoded strings
|
||||
defaults: true, // includes default values
|
||||
arrays: true, // populates empty arrays (repeated fields) even if defaults=false
|
||||
objects: true, // populates empty objects (map fields) even if defaults=false
|
||||
oneofs: true // includes virtual oneof fields set to the present field's name
|
||||
});
|
||||
```
|
||||
|
||||
For reference, the following diagram aims to display relationships between the different methods and the concept of a valid message:
|
||||
|
||||
<p align="center"><img alt="Toolset Diagram" src="http://dcode.io/protobuf.js/toolset.svg" /></p>
|
||||
|
||||
> In other words: `verify` indicates that calling `create` or `encode` directly on the plain object will [result in a valid message respectively] succeed. `fromObject`, on the other hand, does conversion from a broader range of plain objects to create valid messages. ([ref](https://github.com/dcodeIO/protobuf.js/issues/748#issuecomment-291925749))
|
||||
|
||||
Examples
|
||||
--------
|
||||
|
||||
### Using .proto files
|
||||
|
||||
It is possible to load existing .proto files using the full library, which parses and compiles the definitions to ready to use (reflection-based) message classes:
|
||||
|
||||
```protobuf
|
||||
// awesome.proto
|
||||
package awesomepackage;
|
||||
syntax = "proto3";
|
||||
|
||||
message AwesomeMessage {
|
||||
string awesome_field = 1; // becomes awesomeField
|
||||
}
|
||||
```
|
||||
|
||||
```js
|
||||
protobuf.load("awesome.proto", function(err, root) {
|
||||
if (err)
|
||||
throw err;
|
||||
|
||||
// Obtain a message type
|
||||
var AwesomeMessage = root.lookupType("awesomepackage.AwesomeMessage");
|
||||
|
||||
// Exemplary payload
|
||||
var payload = { awesomeField: "AwesomeString" };
|
||||
|
||||
// Verify the payload if necessary (i.e. when possibly incomplete or invalid)
|
||||
var errMsg = AwesomeMessage.verify(payload);
|
||||
if (errMsg)
|
||||
throw Error(errMsg);
|
||||
|
||||
// Create a new message
|
||||
var message = AwesomeMessage.create(payload); // or use .fromObject if conversion is necessary
|
||||
|
||||
// Encode a message to an Uint8Array (browser) or Buffer (node)
|
||||
var buffer = AwesomeMessage.encode(message).finish();
|
||||
// ... do something with buffer
|
||||
|
||||
// Decode an Uint8Array (browser) or Buffer (node) to a message
|
||||
var message = AwesomeMessage.decode(buffer);
|
||||
// ... do something with message
|
||||
|
||||
// If the application uses length-delimited buffers, there is also encodeDelimited and decodeDelimited.
|
||||
|
||||
// Maybe convert the message back to a plain object
|
||||
var object = AwesomeMessage.toObject(message, {
|
||||
longs: String,
|
||||
enums: String,
|
||||
bytes: String,
|
||||
// see ConversionOptions
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
Additionally, promise syntax can be used by omitting the callback, if preferred:
|
||||
|
||||
```js
|
||||
protobuf.load("awesome.proto")
|
||||
.then(function(root) {
|
||||
...
|
||||
});
|
||||
```
|
||||
|
||||
### Using JSON descriptors
|
||||
|
||||
The library utilizes JSON descriptors that are equivalent to a .proto definition. For example, the following is identical to the .proto definition seen above:
|
||||
|
||||
```json
|
||||
// awesome.json
|
||||
{
|
||||
"nested": {
|
||||
"AwesomeMessage": {
|
||||
"fields": {
|
||||
"awesomeField": {
|
||||
"type": "string",
|
||||
"id": 1
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
JSON descriptors closely resemble the internal reflection structure:
|
||||
|
||||
| Type (T) | Extends | Type-specific properties
|
||||
|--------------------|--------------------|-------------------------
|
||||
| *ReflectionObject* | | options
|
||||
| *Namespace* | *ReflectionObject* | nested
|
||||
| Root | *Namespace* | **nested**
|
||||
| Type | *Namespace* | **fields**
|
||||
| Enum | *ReflectionObject* | **values**
|
||||
| Field | *ReflectionObject* | rule, **type**, **id**
|
||||
| MapField | Field | **keyType**
|
||||
| OneOf | *ReflectionObject* | **oneof** (array of field names)
|
||||
| Service | *Namespace* | **methods**
|
||||
| Method | *ReflectionObject* | type, **requestType**, **responseType**, requestStream, responseStream
|
||||
|
||||
* **Bold properties** are required. *Italic types* are abstract.
|
||||
* `T.fromJSON(name, json)` creates the respective reflection object from a JSON descriptor
|
||||
* `T#toJSON()` creates a JSON descriptor from the respective reflection object (its name is used as the key within the parent)
|
||||
|
||||
Exclusively using JSON descriptors instead of .proto files enables the use of just the light library (the parser isn't required in this case).
|
||||
|
||||
A JSON descriptor can either be loaded the usual way:
|
||||
|
||||
```js
|
||||
protobuf.load("awesome.json", function(err, root) {
|
||||
if (err) throw err;
|
||||
|
||||
// Continue at "Obtain a message type" above
|
||||
});
|
||||
```
|
||||
|
||||
Or it can be loaded inline:
|
||||
|
||||
```js
|
||||
var jsonDescriptor = require("./awesome.json"); // exemplary for node
|
||||
|
||||
var root = protobuf.Root.fromJSON(jsonDescriptor);
|
||||
|
||||
// Continue at "Obtain a message type" above
|
||||
```
|
||||
|
||||
### Using reflection only
|
||||
|
||||
Both the full and the light library include full reflection support. One could, for example, define the .proto definitions seen in the examples above using just reflection:
|
||||
|
||||
```js
|
||||
...
|
||||
var Root = protobuf.Root,
|
||||
Type = protobuf.Type,
|
||||
Field = protobuf.Field;
|
||||
|
||||
var AwesomeMessage = new Type("AwesomeMessage").add(new Field("awesomeField", 1, "string"));
|
||||
|
||||
var root = new Root().define("awesomepackage").add(AwesomeMessage);
|
||||
|
||||
// Continue at "Create a new message" above
|
||||
...
|
||||
```
|
||||
|
||||
Detailed information on the reflection structure is available within the [API documentation](#additional-documentation).
|
||||
|
||||
### Using custom classes
|
||||
|
||||
Message classes can also be extended with custom functionality and it is also possible to register a custom constructor with a reflected message type:
|
||||
|
||||
```js
|
||||
...
|
||||
|
||||
// Define a custom constructor
|
||||
function AwesomeMessage(properties) {
|
||||
// custom initialization code
|
||||
...
|
||||
}
|
||||
|
||||
// Register the custom constructor with its reflected type (*)
|
||||
root.lookupType("awesomepackage.AwesomeMessage").ctor = AwesomeMessage;
|
||||
|
||||
// Define custom functionality
|
||||
AwesomeMessage.customStaticMethod = function() { ... };
|
||||
AwesomeMessage.prototype.customInstanceMethod = function() { ... };
|
||||
|
||||
// Continue at "Create a new message" above
|
||||
```
|
||||
|
||||
(*) Besides referencing its reflected type through `AwesomeMessage.$type` and `AwesomeMesage#$type`, the respective custom class is automatically populated with:
|
||||
|
||||
* `AwesomeMessage.create`
|
||||
* `AwesomeMessage.encode` and `AwesomeMessage.encodeDelimited`
|
||||
* `AwesomeMessage.decode` and `AwesomeMessage.decodeDelimited`
|
||||
* `AwesomeMessage.verify`
|
||||
* `AwesomeMessage.fromObject`, `AwesomeMessage.toObject`, `AwesomeMessage#toObject` and `AwesomeMessage#toJSON`
|
||||
|
||||
Afterwards, decoded messages of this type are `instanceof AwesomeMessage`.
|
||||
|
||||
Alternatively, it is also possible to reuse and extend the internal constructor if custom initialization code is not required:
|
||||
|
||||
```js
|
||||
...
|
||||
|
||||
// Reuse the internal constructor
|
||||
var AwesomeMessage = root.lookupType("awesomepackage.AwesomeMessage").ctor;
|
||||
|
||||
// Define custom functionality
|
||||
AwesomeMessage.customStaticMethod = function() { ... };
|
||||
AwesomeMessage.prototype.customInstanceMethod = function() { ... };
|
||||
|
||||
// Continue at "Create a new message" above
|
||||
```
|
||||
|
||||
### Using services
|
||||
|
||||
The library also supports consuming services but it doesn't make any assumptions about the actual transport channel. Instead, a user must provide a suitable RPC implementation, which is an asynchronous function that takes the reflected service method, the binary request and a node-style callback as its parameters:
|
||||
|
||||
```js
|
||||
function rpcImpl(method, requestData, callback) {
|
||||
// perform the request using an HTTP request or a WebSocket for example
|
||||
var responseData = ...;
|
||||
// and call the callback with the binary response afterwards:
|
||||
callback(null, responseData);
|
||||
}
|
||||
```
|
||||
|
||||
Example:
|
||||
|
||||
```protobuf
|
||||
// greeter.proto
|
||||
syntax = "proto3";
|
||||
|
||||
service Greeter {
|
||||
rpc SayHello (HelloRequest) returns (HelloReply) {}
|
||||
}
|
||||
|
||||
message HelloRequest {
|
||||
string name = 1;
|
||||
}
|
||||
|
||||
message HelloReply {
|
||||
string message = 1;
|
||||
}
|
||||
```
|
||||
|
||||
```js
|
||||
...
|
||||
var Greeter = root.lookup("Greeter");
|
||||
var greeter = Greeter.create(/* see above */ rpcImpl, /* request delimited? */ false, /* response delimited? */ false);
|
||||
|
||||
greeter.sayHello({ name: 'you' }, function(err, response) {
|
||||
console.log('Greeting:', response.message);
|
||||
});
|
||||
```
|
||||
|
||||
Services also support promises:
|
||||
|
||||
```js
|
||||
greeter.sayHello({ name: 'you' })
|
||||
.then(function(response) {
|
||||
console.log('Greeting:', response.message);
|
||||
});
|
||||
```
|
||||
|
||||
There is also an [example for streaming RPC](https://github.com/dcodeIO/protobuf.js/blob/master/examples/streaming-rpc.js).
|
||||
|
||||
Note that the service API is meant for clients. Implementing a server-side endpoint pretty much always requires transport channel (i.e. http, websocket, etc.) specific code with the only common denominator being that it decodes and encodes messages.
|
||||
|
||||
### Usage with TypeScript
|
||||
|
||||
The library ships with its own [type definitions](https://github.com/dcodeIO/protobuf.js/blob/master/index.d.ts) and modern editors like [Visual Studio Code](https://code.visualstudio.com/) will automatically detect and use them for code completion.
|
||||
|
||||
The npm package depends on [@types/node](https://www.npmjs.com/package/@types/node) because of `Buffer` and [@types/long](https://www.npmjs.com/package/@types/long) because of `Long`. If you are not building for node and/or not using long.js, it should be safe to exclude them manually.
|
||||
|
||||
#### Using the JS API
|
||||
|
||||
The API shown above works pretty much the same with TypeScript. However, because everything is typed, accessing fields on instances of dynamically generated message classes requires either using bracket-notation (i.e. `message["awesomeField"]`) or explicit casts. Alternatively, it is possible to use a [typings file generated for its static counterpart](#pbts-for-typescript).
|
||||
|
||||
```ts
|
||||
import { load } from "protobufjs"; // respectively "./node_modules/protobufjs"
|
||||
|
||||
load("awesome.proto", function(err, root) {
|
||||
if (err)
|
||||
throw err;
|
||||
|
||||
// example code
|
||||
const AwesomeMessage = root.lookupType("awesomepackage.AwesomeMessage");
|
||||
|
||||
let message = AwesomeMessage.create({ awesomeField: "hello" });
|
||||
console.log(`message = ${JSON.stringify(message)}`);
|
||||
|
||||
let buffer = AwesomeMessage.encode(message).finish();
|
||||
console.log(`buffer = ${Array.prototype.toString.call(buffer)}`);
|
||||
|
||||
let decoded = AwesomeMessage.decode(buffer);
|
||||
console.log(`decoded = ${JSON.stringify(decoded)}`);
|
||||
});
|
||||
```
|
||||
|
||||
#### Using generated static code
|
||||
|
||||
If you generated static code to `bundle.js` using the CLI and its type definitions to `bundle.d.ts`, then you can just do:
|
||||
|
||||
```ts
|
||||
import { AwesomeMessage } from "./bundle.js";
|
||||
|
||||
// example code
|
||||
let message = AwesomeMessage.create({ awesomeField: "hello" });
|
||||
let buffer = AwesomeMessage.encode(message).finish();
|
||||
let decoded = AwesomeMessage.decode(buffer);
|
||||
```
|
||||
|
||||
#### Using decorators
|
||||
|
||||
The library also includes an early implementation of [decorators](https://www.typescriptlang.org/docs/handbook/decorators.html).
|
||||
|
||||
**Note** that decorators are an experimental feature in TypeScript and that declaration order is important depending on the JS target. For example, `@Field.d(2, AwesomeArrayMessage)` requires that `AwesomeArrayMessage` has been defined earlier when targeting `ES5`.
|
||||
|
||||
```ts
|
||||
import { Message, Type, Field, OneOf } from "protobufjs/light"; // respectively "./node_modules/protobufjs/light.js"
|
||||
|
||||
export class AwesomeSubMessage extends Message<AwesomeSubMessage> {
|
||||
|
||||
@Field.d(1, "string")
|
||||
public awesomeString: string;
|
||||
|
||||
}
|
||||
|
||||
export enum AwesomeEnum {
|
||||
ONE = 1,
|
||||
TWO = 2
|
||||
}
|
||||
|
||||
@Type.d("SuperAwesomeMessage")
|
||||
export class AwesomeMessage extends Message<AwesomeMessage> {
|
||||
|
||||
@Field.d(1, "string", "optional", "awesome default string")
|
||||
public awesomeField: string;
|
||||
|
||||
@Field.d(2, AwesomeSubMessage)
|
||||
public awesomeSubMessage: AwesomeSubMessage;
|
||||
|
||||
@Field.d(3, AwesomeEnum, "optional", AwesomeEnum.ONE)
|
||||
public awesomeEnum: AwesomeEnum;
|
||||
|
||||
@OneOf.d("awesomeSubMessage", "awesomeEnum")
|
||||
public which: string;
|
||||
|
||||
}
|
||||
|
||||
// example code
|
||||
let message = new AwesomeMessage({ awesomeField: "hello" });
|
||||
let buffer = AwesomeMessage.encode(message).finish();
|
||||
let decoded = AwesomeMessage.decode(buffer);
|
||||
```
|
||||
|
||||
Supported decorators are:
|
||||
|
||||
* **Type.d(typeName?: `string`)** *(optional)*<br />
|
||||
annotates a class as a protobuf message type. If `typeName` is not specified, the constructor's runtime function name is used for the reflected type.
|
||||
|
||||
* **Field.d<T>(fieldId: `number`, fieldType: `string | Constructor<T>`, fieldRule?: `"optional" | "required" | "repeated"`, defaultValue?: `T`)**<br />
|
||||
annotates a property as a protobuf field with the specified id and protobuf type.
|
||||
|
||||
* **MapField.d<T extends { [key: string]: any }>(fieldId: `number`, fieldKeyType: `string`, fieldValueType. `string | Constructor<{}>`)**<br />
|
||||
annotates a property as a protobuf map field with the specified id, protobuf key and value type.
|
||||
|
||||
* **OneOf.d<T extends string>(...fieldNames: `string[]`)**<br />
|
||||
annotates a property as a protobuf oneof covering the specified fields.
|
||||
|
||||
Other notes:
|
||||
|
||||
* Decorated types reside in `protobuf.roots["decorated"]` using a flat structure, so no duplicate names.
|
||||
* Enums are copied to a reflected enum with a generic name on decorator evaluation because referenced enum objects have no runtime name the decorator could use.
|
||||
* Default values must be specified as arguments to the decorator instead of using a property initializer for proper prototype behavior.
|
||||
* Property names on decorated classes must not be renamed on compile time (i.e. by a minifier) because decorators just receive the original field name as a string.
|
||||
|
||||
**ProTip!** Not as pretty, but you can [use decorators in plain JavaScript](https://github.com/dcodeIO/protobuf.js/blob/master/examples/js-decorators.js) as well.
|
||||
|
||||
Command line
|
||||
------------
|
||||
|
||||
**Note** that moving the CLI to [its own package](./cli) is a work in progress. At the moment, it's still part of the main package.
|
||||
|
||||
The command line interface (CLI) can be used to translate between file formats and to generate static code as well as TypeScript definitions.
|
||||
|
||||
### pbjs for JavaScript
|
||||
|
||||
```
|
||||
Translates between file formats and generates static code.
|
||||
|
||||
-t, --target Specifies the target format. Also accepts a path to require a custom target.
|
||||
|
||||
json JSON representation
|
||||
json-module JSON representation as a module
|
||||
proto2 Protocol Buffers, Version 2
|
||||
proto3 Protocol Buffers, Version 3
|
||||
static Static code without reflection (non-functional on its own)
|
||||
static-module Static code without reflection as a module
|
||||
|
||||
-p, --path Adds a directory to the include path.
|
||||
|
||||
-o, --out Saves to a file instead of writing to stdout.
|
||||
|
||||
--sparse Exports only those types referenced from a main file (experimental).
|
||||
|
||||
Module targets only:
|
||||
|
||||
-w, --wrap Specifies the wrapper to use. Also accepts a path to require a custom wrapper.
|
||||
|
||||
default Default wrapper supporting both CommonJS and AMD
|
||||
commonjs CommonJS wrapper
|
||||
amd AMD wrapper
|
||||
es6 ES6 wrapper (implies --es6)
|
||||
closure A closure adding to protobuf.roots where protobuf is a global
|
||||
|
||||
-r, --root Specifies an alternative protobuf.roots name.
|
||||
|
||||
-l, --lint Linter configuration. Defaults to protobuf.js-compatible rules:
|
||||
|
||||
eslint-disable block-scoped-var, no-redeclare, no-control-regex, no-prototype-builtins
|
||||
|
||||
--es6 Enables ES6 syntax (const/let instead of var)
|
||||
|
||||
Proto sources only:
|
||||
|
||||
--keep-case Keeps field casing instead of converting to camel case.
|
||||
|
||||
Static targets only:
|
||||
|
||||
--no-create Does not generate create functions used for reflection compatibility.
|
||||
--no-encode Does not generate encode functions.
|
||||
--no-decode Does not generate decode functions.
|
||||
--no-verify Does not generate verify functions.
|
||||
--no-convert Does not generate convert functions like from/toObject
|
||||
--no-delimited Does not generate delimited encode/decode functions.
|
||||
--no-beautify Does not beautify generated code.
|
||||
--no-comments Does not output any JSDoc comments.
|
||||
|
||||
--force-long Enfores the use of 'Long' for s-/u-/int64 and s-/fixed64 fields.
|
||||
--force-message Enfores the use of message instances instead of plain objects.
|
||||
|
||||
usage: pbjs [options] file1.proto file2.json ... (or pipe) other | pbjs [options] -
|
||||
```
|
||||
|
||||
For production environments it is recommended to bundle all your .proto files to a single .json file, which minimizes the number of network requests and avoids any parser overhead (hint: works with just the **light** library):
|
||||
|
||||
```
|
||||
$> pbjs -t json file1.proto file2.proto > bundle.json
|
||||
```
|
||||
|
||||
Now, either include this file in your final bundle:
|
||||
|
||||
```js
|
||||
var root = protobuf.Root.fromJSON(require("./bundle.json"));
|
||||
```
|
||||
|
||||
or load it the usual way:
|
||||
|
||||
```js
|
||||
protobuf.load("bundle.json", function(err, root) {
|
||||
...
|
||||
});
|
||||
```
|
||||
|
||||
Generated static code, on the other hand, works with just the **minimal** library. For example
|
||||
|
||||
```
|
||||
$> pbjs -t static-module -w commonjs -o compiled.js file1.proto file2.proto
|
||||
```
|
||||
|
||||
will generate static code for definitions within `file1.proto` and `file2.proto` to a CommonJS module `compiled.js`.
|
||||
|
||||
**ProTip!** Documenting your .proto files with `/** ... */`-blocks or (trailing) `/// ...` lines translates to generated static code.
|
||||
|
||||
|
||||
### pbts for TypeScript
|
||||
|
||||
```
|
||||
Generates TypeScript definitions from annotated JavaScript files.
|
||||
|
||||
-o, --out Saves to a file instead of writing to stdout.
|
||||
|
||||
-g, --global Name of the global object in browser environments, if any.
|
||||
|
||||
--no-comments Does not output any JSDoc comments.
|
||||
|
||||
Internal flags:
|
||||
|
||||
-n, --name Wraps everything in a module of the specified name.
|
||||
|
||||
-m, --main Whether building the main library without any imports.
|
||||
|
||||
usage: pbts [options] file1.js file2.js ... (or) other | pbts [options] -
|
||||
```
|
||||
|
||||
Picking up on the example above, the following not only generates static code to a CommonJS module `compiled.js` but also its respective TypeScript definitions to `compiled.d.ts`:
|
||||
|
||||
```
|
||||
$> pbjs -t static-module -w commonjs -o compiled.js file1.proto file2.proto
|
||||
$> pbts -o compiled.d.ts compiled.js
|
||||
```
|
||||
|
||||
Additionally, TypeScript definitions of static modules are compatible with their reflection-based counterparts (i.e. as exported by JSON modules), as long as the following conditions are met:
|
||||
|
||||
1. Instead of using `new SomeMessage(...)`, always use `SomeMessage.create(...)` because reflection objects do not provide a constructor.
|
||||
2. Types, services and enums must start with an uppercase letter to become available as properties of the reflected types as well (i.e. to be able to use `MyMessage.MyEnum` instead of `root.lookup("MyMessage.MyEnum")`).
|
||||
|
||||
For example, the following generates a JSON module `bundle.js` and a `bundle.d.ts`, but no static code:
|
||||
|
||||
```
|
||||
$> pbjs -t json-module -w commonjs -o bundle.js file1.proto file2.proto
|
||||
$> pbjs -t static-module file1.proto file2.proto | pbts -o bundle.d.ts -
|
||||
```
|
||||
|
||||
### Reflection vs. static code
|
||||
|
||||
While using .proto files directly requires the full library respectively pure reflection/JSON the light library, pretty much all code but the relatively short descriptors is shared.
|
||||
|
||||
Static code, on the other hand, requires just the minimal library, but generates additional source code without any reflection features. This also implies that there is a break-even point where statically generated code becomes larger than descriptor-based code once the amount of code generated exceeds the size of the full respectively light library.
|
||||
|
||||
There is no significant difference performance-wise as the code generated statically is pretty much the same as generated at runtime and both are largely interchangeable as seen in the previous section.
|
||||
|
||||
| Source | Library | Advantages | Tradeoffs
|
||||
|--------|---------|------------|-----------
|
||||
| .proto | full | Easily editable<br />Interoperability with other libraries<br />No compile step | Some parsing and possibly network overhead
|
||||
| JSON | light | Easily editable<br />No parsing overhead<br />Single bundle (no network overhead) | protobuf.js specific<br />Has a compile step
|
||||
| static | minimal | Works where `eval` access is restricted<br />Fully documented<br />Small footprint for small protos | Can be hard to edit<br />No reflection<br />Has a compile step
|
||||
|
||||
### Command line API
|
||||
|
||||
Both utilities can be used programmatically by providing command line arguments and a callback to their respective `main` functions:
|
||||
|
||||
```js
|
||||
var pbjs = require("protobufjs/cli/pbjs"); // or require("protobufjs/cli").pbjs / .pbts
|
||||
|
||||
pbjs.main([ "--target", "json-module", "path/to/myproto.proto" ], function(err, output) {
|
||||
if (err)
|
||||
throw err;
|
||||
// do something with output
|
||||
});
|
||||
```
|
||||
|
||||
Additional documentation
|
||||
------------------------
|
||||
|
||||
#### Protocol Buffers
|
||||
* [Google's Developer Guide](https://developers.google.com/protocol-buffers/docs/overview)
|
||||
|
||||
#### protobuf.js
|
||||
* [API Documentation](http://dcode.io/protobuf.js)
|
||||
* [CHANGELOG](https://github.com/dcodeIO/protobuf.js/blob/master/CHANGELOG.md)
|
||||
* [Frequently asked questions](https://github.com/dcodeIO/protobuf.js/wiki) on our wiki
|
||||
|
||||
#### Community
|
||||
* [Questions and answers](http://stackoverflow.com/search?tab=newest&q=protobuf.js) on StackOverflow
|
||||
|
||||
Performance
|
||||
-----------
|
||||
The package includes a benchmark that compares protobuf.js performance to native JSON (as far as this is possible) and [Google's JS implementation](https://github.com/google/protobuf/tree/master/js). On an i7-2600K running node 6.9.1 it yields:
|
||||
|
||||
```
|
||||
benchmarking encoding performance ...
|
||||
|
||||
protobuf.js (reflect) x 541,707 ops/sec ±1.13% (87 runs sampled)
|
||||
protobuf.js (static) x 548,134 ops/sec ±1.38% (89 runs sampled)
|
||||
JSON (string) x 318,076 ops/sec ±0.63% (93 runs sampled)
|
||||
JSON (buffer) x 179,165 ops/sec ±2.26% (91 runs sampled)
|
||||
google-protobuf x 74,406 ops/sec ±0.85% (86 runs sampled)
|
||||
|
||||
protobuf.js (static) was fastest
|
||||
protobuf.js (reflect) was 0.9% ops/sec slower (factor 1.0)
|
||||
JSON (string) was 41.5% ops/sec slower (factor 1.7)
|
||||
JSON (buffer) was 67.6% ops/sec slower (factor 3.1)
|
||||
google-protobuf was 86.4% ops/sec slower (factor 7.3)
|
||||
|
||||
benchmarking decoding performance ...
|
||||
|
||||
protobuf.js (reflect) x 1,383,981 ops/sec ±0.88% (93 runs sampled)
|
||||
protobuf.js (static) x 1,378,925 ops/sec ±0.81% (93 runs sampled)
|
||||
JSON (string) x 302,444 ops/sec ±0.81% (93 runs sampled)
|
||||
JSON (buffer) x 264,882 ops/sec ±0.81% (93 runs sampled)
|
||||
google-protobuf x 179,180 ops/sec ±0.64% (94 runs sampled)
|
||||
|
||||
protobuf.js (reflect) was fastest
|
||||
protobuf.js (static) was 0.3% ops/sec slower (factor 1.0)
|
||||
JSON (string) was 78.1% ops/sec slower (factor 4.6)
|
||||
JSON (buffer) was 80.8% ops/sec slower (factor 5.2)
|
||||
google-protobuf was 87.0% ops/sec slower (factor 7.7)
|
||||
|
||||
benchmarking combined performance ...
|
||||
|
||||
protobuf.js (reflect) x 275,900 ops/sec ±0.78% (90 runs sampled)
|
||||
protobuf.js (static) x 290,096 ops/sec ±0.96% (90 runs sampled)
|
||||
JSON (string) x 129,381 ops/sec ±0.77% (90 runs sampled)
|
||||
JSON (buffer) x 91,051 ops/sec ±0.94% (90 runs sampled)
|
||||
google-protobuf x 42,050 ops/sec ±0.85% (91 runs sampled)
|
||||
|
||||
protobuf.js (static) was fastest
|
||||
protobuf.js (reflect) was 4.7% ops/sec slower (factor 1.0)
|
||||
JSON (string) was 55.3% ops/sec slower (factor 2.2)
|
||||
JSON (buffer) was 68.6% ops/sec slower (factor 3.2)
|
||||
google-protobuf was 85.5% ops/sec slower (factor 6.9)
|
||||
```
|
||||
|
||||
These results are achieved by
|
||||
|
||||
* generating type-specific encoders, decoders, verifiers and converters at runtime
|
||||
* configuring the reader/writer interface according to the environment
|
||||
* using node-specific functionality where beneficial and, of course
|
||||
* avoiding unnecessary operations through splitting up [the toolset](#toolset).
|
||||
|
||||
You can also run [the benchmark](https://github.com/dcodeIO/protobuf.js/blob/master/bench/index.js) ...
|
||||
|
||||
```
|
||||
$> npm run bench
|
||||
```
|
||||
|
||||
and [the profiler](https://github.com/dcodeIO/protobuf.js/blob/master/bench/prof.js) yourself (the latter requires a recent version of node):
|
||||
|
||||
```
|
||||
$> npm run prof <encode|decode|encode-browser|decode-browser> [iterations=10000000]
|
||||
```
|
||||
|
||||
Note that as of this writing, the benchmark suite performs significantly slower on node 7.2.0 compared to 6.9.1 because moths.
|
||||
|
||||
Compatibility
|
||||
-------------
|
||||
|
||||
* Works in all modern and not-so-modern browsers except IE8.
|
||||
* Because the internals of this package do not rely on `google/protobuf/descriptor.proto`, options are parsed and presented literally.
|
||||
* If typed arrays are not supported by the environment, plain arrays will be used instead.
|
||||
* Support for pre-ES5 environments (except IE8) can be achieved by [using a polyfill](https://github.com/dcodeIO/protobuf.js/blob/master/scripts/polyfill.js).
|
||||
* Support for [Content Security Policy](https://w3c.github.io/webappsec-csp/)-restricted environments (like Chrome extensions without [unsafe-eval](https://developer.chrome.com/extensions/contentSecurityPolicy#relaxing-eval)) can be achieved by generating and using static code instead.
|
||||
* If a proper way to work with 64 bit values (uint64, int64 etc.) is required, just install [long.js](https://github.com/dcodeIO/long.js) alongside this library. All 64 bit numbers will then be returned as a `Long` instance instead of a possibly unsafe JavaScript number ([see](https://github.com/dcodeIO/long.js)).
|
||||
* For descriptor.proto interoperability, see [ext/descriptor](https://github.com/dcodeIO/protobuf.js/tree/master/ext/descriptor)
|
||||
|
||||
Building
|
||||
--------
|
||||
|
||||
To build the library or its components yourself, clone it from GitHub and install the development dependencies:
|
||||
|
||||
```
|
||||
$> git clone https://github.com/dcodeIO/protobuf.js.git
|
||||
$> cd protobuf.js
|
||||
$> npm install
|
||||
```
|
||||
|
||||
Building the respective development and production versions with their respective source maps to `dist/`:
|
||||
|
||||
```
|
||||
$> npm run build
|
||||
```
|
||||
|
||||
Building the documentation to `docs/`:
|
||||
|
||||
```
|
||||
$> npm run docs
|
||||
```
|
||||
|
||||
Building the TypeScript definition to `index.d.ts`:
|
||||
|
||||
```
|
||||
$> npm run types
|
||||
```
|
||||
|
||||
### Browserify integration
|
||||
|
||||
By default, protobuf.js integrates into any browserify build-process without requiring any optional modules. Hence:
|
||||
|
||||
* If int64 support is required, explicitly require the `long` module somewhere in your project as it will be excluded otherwise. This assumes that a global `require` function is present that protobuf.js can call to obtain the long module.
|
||||
|
||||
If there is no global `require` function present after bundling, it's also possible to assign the long module programmatically:
|
||||
|
||||
```js
|
||||
var Long = ...;
|
||||
|
||||
protobuf.util.Long = Long;
|
||||
protobuf.configure();
|
||||
```
|
||||
|
||||
* If you have any special requirements, there is [the bundler](https://github.com/dcodeIO/protobuf.js/blob/master/scripts/bundle.js) for reference.
|
||||
|
||||
**License:** [BSD 3-Clause License](https://opensource.org/licenses/BSD-3-Clause)
|
||||
6
express-server/node_modules/google-gax/node_modules/protobufjs/bin/pbjs
generated
vendored
Normal file
6
express-server/node_modules/google-gax/node_modules/protobufjs/bin/pbjs
generated
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
#!/usr/bin/env node
|
||||
var path = require("path"),
|
||||
cli = require(path.join(__dirname, "..", "cli", "pbjs.js"));
|
||||
var ret = cli.main(process.argv.slice(2));
|
||||
if (typeof ret === 'number')
|
||||
process.exit(ret);
|
||||
6
express-server/node_modules/google-gax/node_modules/protobufjs/bin/pbts
generated
vendored
Normal file
6
express-server/node_modules/google-gax/node_modules/protobufjs/bin/pbts
generated
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
#!/usr/bin/env node
|
||||
var path = require("path"),
|
||||
cli = require(path.join(__dirname, "..", "cli", "pbts.js"));
|
||||
var ret = cli.main(process.argv.slice(2));
|
||||
if (typeof ret === 'number')
|
||||
process.exit(ret);
|
||||
33
express-server/node_modules/google-gax/node_modules/protobufjs/cli/LICENSE
generated
vendored
Normal file
33
express-server/node_modules/google-gax/node_modules/protobufjs/cli/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,33 @@
|
||||
Copyright (c) 2016, Daniel Wirtz All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are
|
||||
met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
* Redistributions in binary form must reproduce the above copyright
|
||||
notice, this list of conditions and the following disclaimer in the
|
||||
documentation and/or other materials provided with the distribution.
|
||||
* Neither the name of its author, nor the names of its contributors
|
||||
may be used to endorse or promote products derived from this software
|
||||
without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
---
|
||||
|
||||
Code generated by the command line utilities is owned by the owner
|
||||
of the input file used when generating it. This code is not
|
||||
standalone and requires a support library to be linked with it. This
|
||||
support library is itself covered by the above license.
|
||||
11
express-server/node_modules/google-gax/node_modules/protobufjs/cli/README.md
generated
vendored
Normal file
11
express-server/node_modules/google-gax/node_modules/protobufjs/cli/README.md
generated
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
protobufjs-cli
|
||||
==============
|
||||
[](https://www.npmjs.com/package/protobufjs-cli)
|
||||
|
||||
Command line interface (CLI) for [protobuf.js](https://github.com/dcodeIO/protobuf.js). Translates between file formats and generates static code as well as TypeScript definitions.
|
||||
|
||||
* [CLI Documentation](https://github.com/dcodeIO/protobuf.js#command-line)
|
||||
|
||||
**Note** that moving the CLI to its own package is a work in progress. At the moment, it's still part of the main package.
|
||||
|
||||
**License:** [BSD 3-Clause License](https://opensource.org/licenses/BSD-3-Clause)
|
||||
6
express-server/node_modules/google-gax/node_modules/protobufjs/cli/bin/pbjs
generated
vendored
Normal file
6
express-server/node_modules/google-gax/node_modules/protobufjs/cli/bin/pbjs
generated
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
#!/usr/bin/env node
|
||||
var path = require("path"),
|
||||
cli = require(path.join(__dirname, "..", "pbjs.js"));
|
||||
var ret = cli.main(process.argv.slice(2));
|
||||
if (typeof ret === 'number')
|
||||
process.exit(ret);
|
||||
6
express-server/node_modules/google-gax/node_modules/protobufjs/cli/bin/pbts
generated
vendored
Normal file
6
express-server/node_modules/google-gax/node_modules/protobufjs/cli/bin/pbts
generated
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
#!/usr/bin/env node
|
||||
var path = require("path"),
|
||||
cli = require(path.join(__dirname, "..", "pbts.js"));
|
||||
var ret = cli.main(process.argv.slice(2));
|
||||
if (typeof ret === 'number')
|
||||
process.exit(ret);
|
||||
3
express-server/node_modules/google-gax/node_modules/protobufjs/cli/index.d.ts
generated
vendored
Normal file
3
express-server/node_modules/google-gax/node_modules/protobufjs/cli/index.d.ts
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
import * as pbjs from "./pbjs.js";
|
||||
import * as pbts from "./pbts.js";
|
||||
export { pbjs, pbts };
|
||||
3
express-server/node_modules/google-gax/node_modules/protobufjs/cli/index.js
generated
vendored
Normal file
3
express-server/node_modules/google-gax/node_modules/protobufjs/cli/index.js
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
"use strict";
|
||||
exports.pbjs = require("./pbjs");
|
||||
exports.pbts = require("./pbts");
|
||||
18
express-server/node_modules/google-gax/node_modules/protobufjs/cli/lib/tsd-jsdoc.json
generated
vendored
Normal file
18
express-server/node_modules/google-gax/node_modules/protobufjs/cli/lib/tsd-jsdoc.json
generated
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
{
|
||||
"tags": {
|
||||
"allowUnknownTags": false
|
||||
},
|
||||
"plugins": [
|
||||
"./tsd-jsdoc/plugin"
|
||||
],
|
||||
"opts": {
|
||||
"encoding" : "utf8",
|
||||
"recurse" : true,
|
||||
"lenient" : true,
|
||||
"template" : "./tsd-jsdoc",
|
||||
|
||||
"private" : false,
|
||||
"comments" : true,
|
||||
"destination" : false
|
||||
}
|
||||
}
|
||||
21
express-server/node_modules/google-gax/node_modules/protobufjs/cli/lib/tsd-jsdoc/LICENSE
generated
vendored
Normal file
21
express-server/node_modules/google-gax/node_modules/protobufjs/cli/lib/tsd-jsdoc/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
The MIT License
|
||||
|
||||
Copyright (c) 2016 Chad Engler
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
23
express-server/node_modules/google-gax/node_modules/protobufjs/cli/lib/tsd-jsdoc/README.md
generated
vendored
Normal file
23
express-server/node_modules/google-gax/node_modules/protobufjs/cli/lib/tsd-jsdoc/README.md
generated
vendored
Normal file
@@ -0,0 +1,23 @@
|
||||
protobuf.js fork of tsd-jsdoc
|
||||
=============================
|
||||
|
||||
This is a modified version of [tsd-jsdoc](https://github.com/englercj/tsd-jsdoc) v1.0.1 for use with protobuf.js, parked here so we can process issues and pull requests. The ultimate goal is to switch back to the a recent version of tsd-jsdoc once it meets our needs.
|
||||
|
||||
Options
|
||||
-------
|
||||
|
||||
* **module: `string`**<br />
|
||||
Wraps everything in a module of the specified name.
|
||||
|
||||
* **private: `boolean`**<br />
|
||||
Includes private members when set to `true`.
|
||||
|
||||
* **comments: `boolean`**<br />
|
||||
Skips comments when explicitly set to `false`.
|
||||
|
||||
* **destination: `string|boolean`**<br />
|
||||
Saves to the specified destination file or to console when set to `false`.
|
||||
|
||||
Setting options on the command line
|
||||
-----------------------------------
|
||||
Providing `-q, --query <queryString>` on the command line will set respectively override existing options. Example: `-q module=protobufjs`
|
||||
21
express-server/node_modules/google-gax/node_modules/protobufjs/cli/lib/tsd-jsdoc/plugin.js
generated
vendored
Normal file
21
express-server/node_modules/google-gax/node_modules/protobufjs/cli/lib/tsd-jsdoc/plugin.js
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
"use strict";
|
||||
exports.defineTags = function(dictionary) {
|
||||
|
||||
dictionary.defineTag("template", {
|
||||
mustHaveValue: true,
|
||||
canHaveType: false,
|
||||
canHaveName: false,
|
||||
onTagged: function(doclet, tag) {
|
||||
(doclet.templates || (doclet.templates = [])).push(tag.text);
|
||||
}
|
||||
});
|
||||
|
||||
dictionary.defineTag("tstype", {
|
||||
mustHaveValue: true,
|
||||
canHaveType: false,
|
||||
canHaveName: false,
|
||||
onTagged: function(doclet, tag) {
|
||||
doclet.tsType = tag.text;
|
||||
}
|
||||
});
|
||||
};
|
||||
693
express-server/node_modules/google-gax/node_modules/protobufjs/cli/lib/tsd-jsdoc/publish.js
generated
vendored
Normal file
693
express-server/node_modules/google-gax/node_modules/protobufjs/cli/lib/tsd-jsdoc/publish.js
generated
vendored
Normal file
@@ -0,0 +1,693 @@
|
||||
"use strict";
|
||||
|
||||
var fs = require("fs");
|
||||
|
||||
// output stream
|
||||
var out = null;
|
||||
|
||||
// documentation data
|
||||
var data = null;
|
||||
|
||||
// already handled objects, by name
|
||||
var seen = {};
|
||||
|
||||
// indentation level
|
||||
var indent = 0;
|
||||
|
||||
// whether indent has been written for the current line yet
|
||||
var indentWritten = false;
|
||||
|
||||
// provided options
|
||||
var options = {};
|
||||
|
||||
// queued interfaces
|
||||
var queuedInterfaces = [];
|
||||
|
||||
// whether writing the first line
|
||||
var firstLine = true;
|
||||
|
||||
// JSDoc hook
|
||||
exports.publish = function publish(taffy, opts) {
|
||||
options = opts || {};
|
||||
|
||||
// query overrides options
|
||||
if (options.query)
|
||||
Object.keys(options.query).forEach(function(key) {
|
||||
if (key !== "query")
|
||||
switch (options[key] = options.query[key]) {
|
||||
case "true":
|
||||
options[key] = true;
|
||||
break;
|
||||
case "false":
|
||||
options[key] = false;
|
||||
break;
|
||||
case "null":
|
||||
options[key] = null;
|
||||
break;
|
||||
}
|
||||
});
|
||||
|
||||
// remove undocumented
|
||||
taffy({ undocumented: true }).remove();
|
||||
taffy({ ignore: true }).remove();
|
||||
taffy({ inherited: true }).remove();
|
||||
|
||||
// remove private
|
||||
if (!options.private)
|
||||
taffy({ access: "private" }).remove();
|
||||
|
||||
// setup output
|
||||
out = options.destination
|
||||
? fs.createWriteStream(options.destination)
|
||||
: process.stdout;
|
||||
|
||||
try {
|
||||
// setup environment
|
||||
data = taffy().get();
|
||||
indent = 0;
|
||||
indentWritten = false;
|
||||
firstLine = true;
|
||||
|
||||
// wrap everything in a module if configured
|
||||
if (options.module) {
|
||||
writeln("export = ", options.module, ";");
|
||||
writeln();
|
||||
writeln("declare namespace ", options.module, " {");
|
||||
writeln();
|
||||
++indent;
|
||||
}
|
||||
|
||||
// handle all
|
||||
getChildrenOf(undefined).forEach(function(child) {
|
||||
handleElement(child, null);
|
||||
});
|
||||
|
||||
// process queued
|
||||
while (queuedInterfaces.length) {
|
||||
var element = queuedInterfaces.shift();
|
||||
begin(element);
|
||||
writeInterface(element);
|
||||
writeln(";");
|
||||
}
|
||||
|
||||
// end wrap
|
||||
if (options.module) {
|
||||
--indent;
|
||||
writeln("}");
|
||||
}
|
||||
|
||||
// close file output
|
||||
if (out !== process.stdout)
|
||||
out.end();
|
||||
|
||||
} finally {
|
||||
// gc environment objects
|
||||
out = data = null;
|
||||
seen = options = {};
|
||||
queuedInterfaces = [];
|
||||
}
|
||||
};
|
||||
|
||||
//
|
||||
// Utility
|
||||
//
|
||||
|
||||
// writes one or multiple strings
|
||||
function write() {
|
||||
var s = Array.prototype.slice.call(arguments).join("");
|
||||
if (!indentWritten) {
|
||||
for (var i = 0; i < indent; ++i)
|
||||
s = " " + s;
|
||||
indentWritten = true;
|
||||
}
|
||||
out.write(s);
|
||||
firstLine = false;
|
||||
}
|
||||
|
||||
// writes zero or multiple strings, followed by a new line
|
||||
function writeln() {
|
||||
var s = Array.prototype.slice.call(arguments).join("");
|
||||
if (s.length)
|
||||
write(s, "\n");
|
||||
else if (!firstLine)
|
||||
out.write("\n");
|
||||
indentWritten = false;
|
||||
}
|
||||
|
||||
var keepTags = [
|
||||
"param",
|
||||
"returns",
|
||||
"throws",
|
||||
"see"
|
||||
];
|
||||
|
||||
// parses a comment into text and tags
|
||||
function parseComment(comment) {
|
||||
var lines = comment.replace(/^ *\/\*\* *|^ *\*\/| *\*\/ *$|^ *\* */mg, "").trim().split(/\r?\n|\r/g); // property.description has just "\r" ?!
|
||||
var desc;
|
||||
var text = [];
|
||||
var tags = null;
|
||||
for (var i = 0; i < lines.length; ++i) {
|
||||
var match = /^@(\w+)\b/.exec(lines[i]);
|
||||
if (match) {
|
||||
if (!tags) {
|
||||
tags = [];
|
||||
desc = text;
|
||||
}
|
||||
text = [];
|
||||
tags.push({ name: match[1], text: text });
|
||||
lines[i] = lines[i].substring(match[1].length + 1).trim();
|
||||
}
|
||||
if (lines[i].length || text.length)
|
||||
text.push(lines[i]);
|
||||
}
|
||||
return {
|
||||
text: desc || text,
|
||||
tags: tags || []
|
||||
};
|
||||
}
|
||||
|
||||
// writes a comment
|
||||
function writeComment(comment, otherwiseNewline) {
|
||||
if (!comment || options.comments === false) {
|
||||
if (otherwiseNewline)
|
||||
writeln();
|
||||
return;
|
||||
}
|
||||
if (typeof comment !== "object")
|
||||
comment = parseComment(comment);
|
||||
comment.tags = comment.tags.filter(function(tag) {
|
||||
return keepTags.indexOf(tag.name) > -1 && (tag.name !== "returns" || tag.text[0] !== "{undefined}");
|
||||
});
|
||||
writeln();
|
||||
if (!comment.tags.length && comment.text.length < 2) {
|
||||
writeln("/** " + comment.text[0] + " */");
|
||||
return;
|
||||
}
|
||||
writeln("/**");
|
||||
comment.text.forEach(function(line) {
|
||||
if (line.length)
|
||||
writeln(" * ", line);
|
||||
else
|
||||
writeln(" *");
|
||||
});
|
||||
comment.tags.forEach(function(tag) {
|
||||
var started = false;
|
||||
if (tag.text.length) {
|
||||
tag.text.forEach(function(line, i) {
|
||||
if (i > 0)
|
||||
write(" * ");
|
||||
else if (tag.name !== "throws")
|
||||
line = line.replace(/^\{[^\s]*} ?/, "");
|
||||
if (!line.length)
|
||||
return;
|
||||
if (!started) {
|
||||
write(" * @", tag.name, " ");
|
||||
started = true;
|
||||
}
|
||||
writeln(line);
|
||||
});
|
||||
}
|
||||
});
|
||||
writeln(" */");
|
||||
}
|
||||
|
||||
// recursively replaces all occurencies of re's match
|
||||
function replaceRecursive(name, re, fn) {
|
||||
var found;
|
||||
|
||||
function replacer() {
|
||||
found = true;
|
||||
return fn.apply(null, arguments);
|
||||
}
|
||||
|
||||
do {
|
||||
found = false;
|
||||
name = name.replace(re, replacer);
|
||||
} while (found);
|
||||
return name;
|
||||
}
|
||||
|
||||
// tests if an element is considered to be a class or class-like
|
||||
function isClassLike(element) {
|
||||
return isClass(element) || isInterface(element);
|
||||
}
|
||||
|
||||
// tests if an element is considered to be a class
|
||||
function isClass(element) {
|
||||
return element && element.kind === "class";
|
||||
}
|
||||
|
||||
// tests if an element is considered to be an interface
|
||||
function isInterface(element) {
|
||||
return element && (element.kind === "interface" || element.kind === "mixin");
|
||||
}
|
||||
|
||||
// tests if an element is considered to be a namespace
|
||||
function isNamespace(element) {
|
||||
return element && (element.kind === "namespace" || element.kind === "module");
|
||||
}
|
||||
|
||||
// gets all children of the specified parent
|
||||
function getChildrenOf(parent) {
|
||||
var memberof = parent ? parent.longname : undefined;
|
||||
return data.filter(function(element) {
|
||||
return element.memberof === memberof;
|
||||
});
|
||||
}
|
||||
|
||||
// gets the literal type of an element
|
||||
function getTypeOf(element) {
|
||||
if (element.tsType)
|
||||
return element.tsType.replace(/\r?\n|\r/g, "\n");
|
||||
var name = "any";
|
||||
var type = element.type;
|
||||
if (type && type.names && type.names.length) {
|
||||
if (type.names.length === 1)
|
||||
name = element.type.names[0].trim();
|
||||
else
|
||||
name = "(" + element.type.names.join("|") + ")";
|
||||
} else
|
||||
return name;
|
||||
|
||||
// Replace catchalls with any
|
||||
name = name.replace(/\*|\bmixed\b/g, "any");
|
||||
|
||||
// Ensure upper case Object for map expressions below
|
||||
name = name.replace(/\bobject\b/g, "Object");
|
||||
|
||||
// Correct Something.<Something> to Something<Something>
|
||||
name = replaceRecursive(name, /\b(?!Object|Array)([\w$]+)\.<([^>]*)>/gi, function($0, $1, $2) {
|
||||
return $1 + "<" + $2 + ">";
|
||||
});
|
||||
|
||||
// Replace Array.<string> with string[]
|
||||
name = replaceRecursive(name, /\bArray\.?<([^>]*)>/gi, function($0, $1) {
|
||||
return $1 + "[]";
|
||||
});
|
||||
|
||||
// Replace Object.<string,number> with { [k: string]: number }
|
||||
name = replaceRecursive(name, /\bObject\.?<([^,]*), *([^>]*)>/gi, function($0, $1, $2) {
|
||||
return "{ [k: " + $1 + "]: " + $2 + " }";
|
||||
});
|
||||
|
||||
// Replace functions (there are no signatures) with Function
|
||||
name = name.replace(/\bfunction(?:\(\))?\b/g, "Function");
|
||||
|
||||
// Convert plain Object back to just object
|
||||
name = name.replace(/\b(Object\b(?!\.))/g, function($0, $1) {
|
||||
return $1.toLowerCase();
|
||||
});
|
||||
|
||||
return name;
|
||||
}
|
||||
|
||||
// begins writing the definition of the specified element
|
||||
function begin(element, is_interface) {
|
||||
if (!seen[element.longname]) {
|
||||
if (isClass(element)) {
|
||||
var comment = parseComment(element.comment);
|
||||
var classdesc = comment.tags.find(function(tag) { return tag.name === "classdesc"; });
|
||||
if (classdesc) {
|
||||
comment.text = classdesc.text;
|
||||
comment.tags = [];
|
||||
}
|
||||
writeComment(comment, true);
|
||||
} else
|
||||
writeComment(element.comment, is_interface || isClassLike(element) || isNamespace(element) || element.isEnum || element.scope === "global");
|
||||
seen[element.longname] = element;
|
||||
} else
|
||||
writeln();
|
||||
if (element.scope !== "global" || options.module)
|
||||
return;
|
||||
write("export ");
|
||||
}
|
||||
|
||||
// writes the function signature describing element
|
||||
function writeFunctionSignature(element, isConstructor, isTypeDef) {
|
||||
write("(");
|
||||
|
||||
var params = {};
|
||||
|
||||
// this type
|
||||
if (element.this)
|
||||
params["this"] = {
|
||||
type: element.this.replace(/^{|}$/g, ""),
|
||||
optional: false
|
||||
};
|
||||
|
||||
// parameter types
|
||||
if (element.params)
|
||||
element.params.forEach(function(param) {
|
||||
var path = param.name.split(/\./g);
|
||||
if (path.length === 1)
|
||||
params[param.name] = {
|
||||
type: getTypeOf(param),
|
||||
variable: param.variable === true,
|
||||
optional: param.optional === true,
|
||||
defaultValue: param.defaultvalue // Not used yet (TODO)
|
||||
};
|
||||
else // Property syntax (TODO)
|
||||
params[path[0]].type = "{ [k: string]: any }";
|
||||
});
|
||||
|
||||
var paramNames = Object.keys(params);
|
||||
paramNames.forEach(function(name, i) {
|
||||
var param = params[name];
|
||||
var type = param.type;
|
||||
if (param.variable) {
|
||||
name = "..." + name;
|
||||
type = param.type.charAt(0) === "(" ? "any[]" : param.type + "[]";
|
||||
}
|
||||
write(name, !param.variable && param.optional ? "?: " : ": ", type);
|
||||
if (i < paramNames.length - 1)
|
||||
write(", ");
|
||||
});
|
||||
|
||||
write(")");
|
||||
|
||||
// return type
|
||||
if (!isConstructor) {
|
||||
write(isTypeDef ? " => " : ": ");
|
||||
var typeName;
|
||||
if (element.returns && element.returns.length && (typeName = getTypeOf(element.returns[0])) !== "undefined")
|
||||
write(typeName);
|
||||
else
|
||||
write("void");
|
||||
}
|
||||
}
|
||||
|
||||
// writes (a typedef as) an interface
|
||||
function writeInterface(element) {
|
||||
write("interface ", element.name);
|
||||
writeInterfaceBody(element);
|
||||
writeln();
|
||||
}
|
||||
|
||||
function writeInterfaceBody(element) {
|
||||
writeln("{");
|
||||
++indent;
|
||||
if (element.tsType)
|
||||
writeln(element.tsType.replace(/\r?\n|\r/g, "\n"));
|
||||
else if (element.properties && element.properties.length)
|
||||
element.properties.forEach(writeProperty);
|
||||
--indent;
|
||||
write("}");
|
||||
}
|
||||
|
||||
function writeProperty(property, declare) {
|
||||
writeComment(property.description);
|
||||
if (declare)
|
||||
write("let ");
|
||||
write(property.name);
|
||||
if (property.optional)
|
||||
write("?");
|
||||
writeln(": ", getTypeOf(property), ";");
|
||||
}
|
||||
|
||||
//
|
||||
// Handlers
|
||||
//
|
||||
|
||||
// handles a single element of any understood type
|
||||
function handleElement(element, parent) {
|
||||
if (element.scope === "inner")
|
||||
return false;
|
||||
|
||||
if (element.optional !== true && element.type && element.type.names && element.type.names.length) {
|
||||
for (var i = 0; i < element.type.names.length; i++) {
|
||||
if (element.type.names[i].toLowerCase() === "undefined") {
|
||||
// This element is actually optional. Set optional to true and
|
||||
// remove the 'undefined' type
|
||||
element.optional = true;
|
||||
element.type.names.splice(i, 1);
|
||||
i--;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (seen[element.longname])
|
||||
return true;
|
||||
if (isClassLike(element))
|
||||
handleClass(element, parent);
|
||||
else switch (element.kind) {
|
||||
case "module":
|
||||
case "namespace":
|
||||
handleNamespace(element, parent);
|
||||
break;
|
||||
case "constant":
|
||||
case "member":
|
||||
handleMember(element, parent);
|
||||
break;
|
||||
case "function":
|
||||
handleFunction(element, parent);
|
||||
break;
|
||||
case "typedef":
|
||||
handleTypeDef(element, parent);
|
||||
break;
|
||||
case "package":
|
||||
break;
|
||||
}
|
||||
seen[element.longname] = element;
|
||||
return true;
|
||||
}
|
||||
|
||||
// handles (just) a namespace
|
||||
function handleNamespace(element/*, parent*/) {
|
||||
var children = getChildrenOf(element);
|
||||
if (!children.length)
|
||||
return;
|
||||
var first = true;
|
||||
if (element.properties)
|
||||
element.properties.forEach(function(property) {
|
||||
if (!/^[$\w]+$/.test(property.name)) // incompatible in namespace
|
||||
return;
|
||||
if (first) {
|
||||
begin(element);
|
||||
writeln("namespace ", element.name, " {");
|
||||
++indent;
|
||||
first = false;
|
||||
}
|
||||
writeProperty(property, true);
|
||||
});
|
||||
children.forEach(function(child) {
|
||||
if (child.scope === "inner" || seen[child.longname])
|
||||
return;
|
||||
if (first) {
|
||||
begin(element);
|
||||
writeln("namespace ", element.name, " {");
|
||||
++indent;
|
||||
first = false;
|
||||
}
|
||||
handleElement(child, element);
|
||||
});
|
||||
if (!first) {
|
||||
--indent;
|
||||
writeln("}");
|
||||
}
|
||||
}
|
||||
|
||||
// a filter function to remove any module references
|
||||
function notAModuleReference(ref) {
|
||||
return ref.indexOf("module:") === -1;
|
||||
}
|
||||
|
||||
// handles a class or class-like
|
||||
function handleClass(element, parent) {
|
||||
var is_interface = isInterface(element);
|
||||
begin(element, is_interface);
|
||||
if (is_interface)
|
||||
write("interface ");
|
||||
else {
|
||||
if (element.virtual)
|
||||
write("abstract ");
|
||||
write("class ");
|
||||
}
|
||||
write(element.name);
|
||||
if (element.templates && element.templates.length)
|
||||
write("<", element.templates.join(", "), ">");
|
||||
write(" ");
|
||||
|
||||
// extended classes
|
||||
if (element.augments) {
|
||||
var augments = element.augments.filter(notAModuleReference);
|
||||
if (augments.length)
|
||||
write("extends ", augments[0], " ");
|
||||
}
|
||||
|
||||
// implemented interfaces
|
||||
var impls = [];
|
||||
if (element.implements)
|
||||
Array.prototype.push.apply(impls, element.implements);
|
||||
if (element.mixes)
|
||||
Array.prototype.push.apply(impls, element.mixes);
|
||||
impls = impls.filter(notAModuleReference);
|
||||
if (impls.length)
|
||||
write("implements ", impls.join(", "), " ");
|
||||
|
||||
writeln("{");
|
||||
++indent;
|
||||
|
||||
if (element.tsType)
|
||||
writeln(element.tsType.replace(/\r?\n|\r/g, "\n"));
|
||||
|
||||
// constructor
|
||||
if (!is_interface && !element.virtual)
|
||||
handleFunction(element, parent, true);
|
||||
|
||||
// properties
|
||||
if (is_interface && element.properties)
|
||||
element.properties.forEach(function(property) {
|
||||
writeProperty(property);
|
||||
});
|
||||
|
||||
// class-compatible members
|
||||
var incompatible = [];
|
||||
getChildrenOf(element).forEach(function(child) {
|
||||
if (isClassLike(child) || child.kind === "module" || child.kind === "typedef" || child.isEnum) {
|
||||
incompatible.push(child);
|
||||
return;
|
||||
}
|
||||
handleElement(child, element);
|
||||
});
|
||||
|
||||
--indent;
|
||||
writeln("}");
|
||||
|
||||
// class-incompatible members
|
||||
if (incompatible.length) {
|
||||
writeln();
|
||||
if (element.scope === "global" && !options.module)
|
||||
write("export ");
|
||||
writeln("namespace ", element.name, " {");
|
||||
++indent;
|
||||
incompatible.forEach(function(child) {
|
||||
handleElement(child, element);
|
||||
});
|
||||
--indent;
|
||||
writeln("}");
|
||||
}
|
||||
}
|
||||
|
||||
// handles a namespace or class member
|
||||
function handleMember(element, parent) {
|
||||
begin(element);
|
||||
|
||||
if (element.isEnum) {
|
||||
var stringEnum = false;
|
||||
element.properties.forEach(function(property) {
|
||||
if (isNaN(property.defaultvalue)) {
|
||||
stringEnum = true;
|
||||
}
|
||||
});
|
||||
if (stringEnum) {
|
||||
writeln("type ", element.name, " =");
|
||||
++indent;
|
||||
element.properties.forEach(function(property, i) {
|
||||
write(i === 0 ? "" : "| ", JSON.stringify(property.defaultvalue));
|
||||
});
|
||||
--indent;
|
||||
writeln(";");
|
||||
} else {
|
||||
writeln("enum ", element.name, " {");
|
||||
++indent;
|
||||
element.properties.forEach(function(property, i) {
|
||||
write(property.name);
|
||||
if (property.defaultvalue !== undefined)
|
||||
write(" = ", JSON.stringify(property.defaultvalue));
|
||||
if (i < element.properties.length - 1)
|
||||
writeln(",");
|
||||
else
|
||||
writeln();
|
||||
});
|
||||
--indent;
|
||||
writeln("}");
|
||||
}
|
||||
|
||||
} else {
|
||||
|
||||
var inClass = isClassLike(parent);
|
||||
if (inClass) {
|
||||
write(element.access || "public", " ");
|
||||
if (element.scope === "static")
|
||||
write("static ");
|
||||
if (element.readonly)
|
||||
write("readonly ");
|
||||
} else
|
||||
write(element.kind === "constant" ? "const " : "let ");
|
||||
|
||||
write(element.name);
|
||||
if (element.optional)
|
||||
write("?");
|
||||
write(": ");
|
||||
|
||||
if (element.type && element.type.names && /^Object\b/i.test(element.type.names[0]) && element.properties) {
|
||||
writeln("{");
|
||||
++indent;
|
||||
element.properties.forEach(function(property, i) {
|
||||
writeln(JSON.stringify(property.name), ": ", getTypeOf(property), i < element.properties.length - 1 ? "," : "");
|
||||
});
|
||||
--indent;
|
||||
writeln("};");
|
||||
} else
|
||||
writeln(getTypeOf(element), ";");
|
||||
}
|
||||
}
|
||||
|
||||
// handles a function or method
|
||||
function handleFunction(element, parent, isConstructor) {
|
||||
var insideClass = true;
|
||||
if (isConstructor) {
|
||||
writeComment(element.comment);
|
||||
write("constructor");
|
||||
} else {
|
||||
begin(element);
|
||||
insideClass = isClassLike(parent);
|
||||
if (insideClass) {
|
||||
write(element.access || "public", " ");
|
||||
if (element.scope === "static")
|
||||
write("static ");
|
||||
} else
|
||||
write("function ");
|
||||
write(element.name);
|
||||
if (element.templates && element.templates.length)
|
||||
write("<", element.templates.join(", "), ">");
|
||||
}
|
||||
writeFunctionSignature(element, isConstructor, false);
|
||||
writeln(";");
|
||||
if (!insideClass)
|
||||
handleNamespace(element);
|
||||
}
|
||||
|
||||
// handles a type definition (not a real type)
|
||||
function handleTypeDef(element, parent) {
|
||||
if (isInterface(element)) {
|
||||
if (isClassLike(parent))
|
||||
queuedInterfaces.push(element);
|
||||
else {
|
||||
begin(element);
|
||||
writeInterface(element);
|
||||
}
|
||||
} else {
|
||||
writeComment(element.comment, true);
|
||||
write("type ", element.name);
|
||||
if (element.templates && element.templates.length)
|
||||
write("<", element.templates.join(", "), ">");
|
||||
write(" = ");
|
||||
if (element.tsType)
|
||||
write(element.tsType.replace(/\r?\n|\r/g, "\n"));
|
||||
else {
|
||||
var type = getTypeOf(element);
|
||||
if (element.type && element.type.names.length === 1 && element.type.names[0] === "function")
|
||||
writeFunctionSignature(element, false, true);
|
||||
else if (type === "object") {
|
||||
if (element.properties && element.properties.length)
|
||||
writeInterfaceBody(element);
|
||||
else
|
||||
write("{}");
|
||||
} else
|
||||
write(type);
|
||||
}
|
||||
writeln(";");
|
||||
}
|
||||
}
|
||||
25
express-server/node_modules/google-gax/node_modules/protobufjs/cli/node_modules/os-tmpdir/index.js
generated
vendored
Normal file
25
express-server/node_modules/google-gax/node_modules/protobufjs/cli/node_modules/os-tmpdir/index.js
generated
vendored
Normal file
@@ -0,0 +1,25 @@
|
||||
'use strict';
|
||||
var isWindows = process.platform === 'win32';
|
||||
var trailingSlashRe = isWindows ? /[^:]\\$/ : /.\/$/;
|
||||
|
||||
// https://github.com/nodejs/node/blob/3e7a14381497a3b73dda68d05b5130563cdab420/lib/os.js#L25-L43
|
||||
module.exports = function () {
|
||||
var path;
|
||||
|
||||
if (isWindows) {
|
||||
path = process.env.TEMP ||
|
||||
process.env.TMP ||
|
||||
(process.env.SystemRoot || process.env.windir) + '\\temp';
|
||||
} else {
|
||||
path = process.env.TMPDIR ||
|
||||
process.env.TMP ||
|
||||
process.env.TEMP ||
|
||||
'/tmp';
|
||||
}
|
||||
|
||||
if (trailingSlashRe.test(path)) {
|
||||
path = path.slice(0, -1);
|
||||
}
|
||||
|
||||
return path;
|
||||
};
|
||||
21
express-server/node_modules/google-gax/node_modules/protobufjs/cli/node_modules/os-tmpdir/license
generated
vendored
Normal file
21
express-server/node_modules/google-gax/node_modules/protobufjs/cli/node_modules/os-tmpdir/license
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (sindresorhus.com)
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
32
express-server/node_modules/google-gax/node_modules/protobufjs/cli/node_modules/os-tmpdir/readme.md
generated
vendored
Normal file
32
express-server/node_modules/google-gax/node_modules/protobufjs/cli/node_modules/os-tmpdir/readme.md
generated
vendored
Normal file
@@ -0,0 +1,32 @@
|
||||
# os-tmpdir [](https://travis-ci.org/sindresorhus/os-tmpdir)
|
||||
|
||||
> Node.js [`os.tmpdir()`](https://nodejs.org/api/os.html#os_os_tmpdir) [ponyfill](https://ponyfill.com)
|
||||
|
||||
Use this instead of `require('os').tmpdir()` to get a consistent behavior on different Node.js versions (even 0.8).
|
||||
|
||||
|
||||
## Install
|
||||
|
||||
```
|
||||
$ npm install --save os-tmpdir
|
||||
```
|
||||
|
||||
|
||||
## Usage
|
||||
|
||||
```js
|
||||
const osTmpdir = require('os-tmpdir');
|
||||
|
||||
osTmpdir();
|
||||
//=> '/var/folders/m3/5574nnhn0yj488ccryqr7tc80000gn/T'
|
||||
```
|
||||
|
||||
|
||||
## API
|
||||
|
||||
See the [`os.tmpdir()` docs](https://nodejs.org/api/os.html#os_os_tmpdir).
|
||||
|
||||
|
||||
## License
|
||||
|
||||
MIT © [Sindre Sorhus](https://sindresorhus.com)
|
||||
21
express-server/node_modules/google-gax/node_modules/protobufjs/cli/node_modules/tmp/LICENSE
generated
vendored
Normal file
21
express-server/node_modules/google-gax/node_modules/protobufjs/cli/node_modules/tmp/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2014 KARASZI István
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
314
express-server/node_modules/google-gax/node_modules/protobufjs/cli/node_modules/tmp/README.md
generated
vendored
Normal file
314
express-server/node_modules/google-gax/node_modules/protobufjs/cli/node_modules/tmp/README.md
generated
vendored
Normal file
@@ -0,0 +1,314 @@
|
||||
# Tmp
|
||||
|
||||
A simple temporary file and directory creator for [node.js.][1]
|
||||
|
||||
[](https://travis-ci.org/raszi/node-tmp)
|
||||
[](https://david-dm.org/raszi/node-tmp)
|
||||
[](https://badge.fury.io/js/tmp)
|
||||
[](https://raszi.github.io/node-tmp/)
|
||||
[](https://snyk.io/test/npm/tmp)
|
||||
|
||||
## About
|
||||
|
||||
This is a [widely used library][2] to create temporary files and directories
|
||||
in a [node.js][1] environment.
|
||||
|
||||
Tmp offers both an asynchronous and a synchronous API. For all API calls, all
|
||||
the parameters are optional. There also exists a promisified version of the
|
||||
API, see (5) under references below.
|
||||
|
||||
Tmp uses crypto for determining random file names, or, when using templates,
|
||||
a six letter random identifier. And just in case that you do not have that much
|
||||
entropy left on your system, Tmp will fall back to pseudo random numbers.
|
||||
|
||||
You can set whether you want to remove the temporary file on process exit or
|
||||
not, and the destination directory can also be set.
|
||||
|
||||
## How to install
|
||||
|
||||
```bash
|
||||
npm install tmp
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
Please also check [API docs][4].
|
||||
|
||||
### Asynchronous file creation
|
||||
|
||||
Simple temporary file creation, the file will be closed and unlinked on process exit.
|
||||
|
||||
```javascript
|
||||
var tmp = require('tmp');
|
||||
|
||||
tmp.file(function _tempFileCreated(err, path, fd, cleanupCallback) {
|
||||
if (err) throw err;
|
||||
|
||||
console.log('File: ', path);
|
||||
console.log('Filedescriptor: ', fd);
|
||||
|
||||
// If we don't need the file anymore we could manually call the cleanupCallback
|
||||
// But that is not necessary if we didn't pass the keep option because the library
|
||||
// will clean after itself.
|
||||
cleanupCallback();
|
||||
});
|
||||
```
|
||||
|
||||
### Synchronous file creation
|
||||
|
||||
A synchronous version of the above.
|
||||
|
||||
```javascript
|
||||
var tmp = require('tmp');
|
||||
|
||||
var tmpobj = tmp.fileSync();
|
||||
console.log('File: ', tmpobj.name);
|
||||
console.log('Filedescriptor: ', tmpobj.fd);
|
||||
|
||||
// If we don't need the file anymore we could manually call the removeCallback
|
||||
// But that is not necessary if we didn't pass the keep option because the library
|
||||
// will clean after itself.
|
||||
tmpobj.removeCallback();
|
||||
```
|
||||
|
||||
Note that this might throw an exception if either the maximum limit of retries
|
||||
for creating a temporary name fails, or, in case that you do not have the permission
|
||||
to write to the directory where the temporary file should be created in.
|
||||
|
||||
### Asynchronous directory creation
|
||||
|
||||
Simple temporary directory creation, it will be removed on process exit.
|
||||
|
||||
If the directory still contains items on process exit, then it won't be removed.
|
||||
|
||||
```javascript
|
||||
var tmp = require('tmp');
|
||||
|
||||
tmp.dir(function _tempDirCreated(err, path, cleanupCallback) {
|
||||
if (err) throw err;
|
||||
|
||||
console.log('Dir: ', path);
|
||||
|
||||
// Manual cleanup
|
||||
cleanupCallback();
|
||||
});
|
||||
```
|
||||
|
||||
If you want to cleanup the directory even when there are entries in it, then
|
||||
you can pass the `unsafeCleanup` option when creating it.
|
||||
|
||||
### Synchronous directory creation
|
||||
|
||||
A synchronous version of the above.
|
||||
|
||||
```javascript
|
||||
var tmp = require('tmp');
|
||||
|
||||
var tmpobj = tmp.dirSync();
|
||||
console.log('Dir: ', tmpobj.name);
|
||||
// Manual cleanup
|
||||
tmpobj.removeCallback();
|
||||
```
|
||||
|
||||
Note that this might throw an exception if either the maximum limit of retries
|
||||
for creating a temporary name fails, or, in case that you do not have the permission
|
||||
to write to the directory where the temporary directory should be created in.
|
||||
|
||||
### Asynchronous filename generation
|
||||
|
||||
It is possible with this library to generate a unique filename in the specified
|
||||
directory.
|
||||
|
||||
```javascript
|
||||
var tmp = require('tmp');
|
||||
|
||||
tmp.tmpName(function _tempNameGenerated(err, path) {
|
||||
if (err) throw err;
|
||||
|
||||
console.log('Created temporary filename: ', path);
|
||||
});
|
||||
```
|
||||
|
||||
### Synchronous filename generation
|
||||
|
||||
A synchronous version of the above.
|
||||
|
||||
```javascript
|
||||
var tmp = require('tmp');
|
||||
|
||||
var name = tmp.tmpNameSync();
|
||||
console.log('Created temporary filename: ', name);
|
||||
```
|
||||
|
||||
## Advanced usage
|
||||
|
||||
### Asynchronous file creation
|
||||
|
||||
Creates a file with mode `0644`, prefix will be `prefix-` and postfix will be `.txt`.
|
||||
|
||||
```javascript
|
||||
var tmp = require('tmp');
|
||||
|
||||
tmp.file({ mode: 0644, prefix: 'prefix-', postfix: '.txt' }, function _tempFileCreated(err, path, fd) {
|
||||
if (err) throw err;
|
||||
|
||||
console.log('File: ', path);
|
||||
console.log('Filedescriptor: ', fd);
|
||||
});
|
||||
```
|
||||
|
||||
### Synchronous file creation
|
||||
|
||||
A synchronous version of the above.
|
||||
|
||||
```javascript
|
||||
var tmp = require('tmp');
|
||||
|
||||
var tmpobj = tmp.fileSync({ mode: 0644, prefix: 'prefix-', postfix: '.txt' });
|
||||
console.log('File: ', tmpobj.name);
|
||||
console.log('Filedescriptor: ', tmpobj.fd);
|
||||
```
|
||||
|
||||
### Controlling the Descriptor
|
||||
|
||||
As a side effect of creating a unique file `tmp` gets a file descriptor that is
|
||||
returned to the user as the `fd` parameter. The descriptor may be used by the
|
||||
application and is closed when the `removeCallback` is invoked.
|
||||
|
||||
In some use cases the application does not need the descriptor, needs to close it
|
||||
without removing the file, or needs to remove the file without closing the
|
||||
descriptor. Two options control how the descriptor is managed:
|
||||
|
||||
* `discardDescriptor` - if `true` causes `tmp` to close the descriptor after the file
|
||||
is created. In this case the `fd` parameter is undefined.
|
||||
* `detachDescriptor` - if `true` causes `tmp` to return the descriptor in the `fd`
|
||||
parameter, but it is the application's responsibility to close it when it is no
|
||||
longer needed.
|
||||
|
||||
```javascript
|
||||
var tmp = require('tmp');
|
||||
|
||||
tmp.file({ discardDescriptor: true }, function _tempFileCreated(err, path, fd, cleanupCallback) {
|
||||
if (err) throw err;
|
||||
// fd will be undefined, allowing application to use fs.createReadStream(path)
|
||||
// without holding an unused descriptor open.
|
||||
});
|
||||
```
|
||||
|
||||
```javascript
|
||||
var tmp = require('tmp');
|
||||
|
||||
tmp.file({ detachDescriptor: true }, function _tempFileCreated(err, path, fd, cleanupCallback) {
|
||||
if (err) throw err;
|
||||
|
||||
cleanupCallback();
|
||||
// Application can store data through fd here; the space used will automatically
|
||||
// be reclaimed by the operating system when the descriptor is closed or program
|
||||
// terminates.
|
||||
});
|
||||
```
|
||||
|
||||
### Asynchronous directory creation
|
||||
|
||||
Creates a directory with mode `0755`, prefix will be `myTmpDir_`.
|
||||
|
||||
```javascript
|
||||
var tmp = require('tmp');
|
||||
|
||||
tmp.dir({ mode: 0750, prefix: 'myTmpDir_' }, function _tempDirCreated(err, path) {
|
||||
if (err) throw err;
|
||||
|
||||
console.log('Dir: ', path);
|
||||
});
|
||||
```
|
||||
|
||||
### Synchronous directory creation
|
||||
|
||||
Again, a synchronous version of the above.
|
||||
|
||||
```javascript
|
||||
var tmp = require('tmp');
|
||||
|
||||
var tmpobj = tmp.dirSync({ mode: 0750, prefix: 'myTmpDir_' });
|
||||
console.log('Dir: ', tmpobj.name);
|
||||
```
|
||||
|
||||
### mkstemp like, asynchronously
|
||||
|
||||
Creates a new temporary directory with mode `0700` and filename like `/tmp/tmp-nk2J1u`.
|
||||
|
||||
```javascript
|
||||
var tmp = require('tmp');
|
||||
|
||||
tmp.dir({ template: '/tmp/tmp-XXXXXX' }, function _tempDirCreated(err, path) {
|
||||
if (err) throw err;
|
||||
|
||||
console.log('Dir: ', path);
|
||||
});
|
||||
```
|
||||
|
||||
### mkstemp like, synchronously
|
||||
|
||||
This will behave similarly to the asynchronous version.
|
||||
|
||||
```javascript
|
||||
var tmp = require('tmp');
|
||||
|
||||
var tmpobj = tmp.dirSync({ template: '/tmp/tmp-XXXXXX' });
|
||||
console.log('Dir: ', tmpobj.name);
|
||||
```
|
||||
|
||||
### Asynchronous filename generation
|
||||
|
||||
The `tmpName()` function accepts the `prefix`, `postfix`, `dir`, etc. parameters also:
|
||||
|
||||
```javascript
|
||||
var tmp = require('tmp');
|
||||
|
||||
tmp.tmpName({ template: '/tmp/tmp-XXXXXX' }, function _tempNameGenerated(err, path) {
|
||||
if (err) throw err;
|
||||
|
||||
console.log('Created temporary filename: ', path);
|
||||
});
|
||||
```
|
||||
|
||||
### Synchronous filename generation
|
||||
|
||||
The `tmpNameSync()` function works similarly to `tmpName()`.
|
||||
|
||||
```javascript
|
||||
var tmp = require('tmp');
|
||||
var tmpname = tmp.tmpNameSync({ template: '/tmp/tmp-XXXXXX' });
|
||||
console.log('Created temporary filename: ', tmpname);
|
||||
```
|
||||
|
||||
## Graceful cleanup
|
||||
|
||||
One may want to cleanup the temporary files even when an uncaught exception
|
||||
occurs. To enforce this, you can call the `setGracefulCleanup()` method:
|
||||
|
||||
```javascript
|
||||
var tmp = require('tmp');
|
||||
|
||||
tmp.setGracefulCleanup();
|
||||
```
|
||||
|
||||
## Options
|
||||
|
||||
All options are optional :)
|
||||
|
||||
* `mode`: the file mode to create with, it fallbacks to `0600` on file creation and `0700` on directory creation
|
||||
* `prefix`: the optional prefix, fallbacks to `tmp-` if not provided
|
||||
* `postfix`: the optional postfix, fallbacks to `.tmp` on file creation
|
||||
* `template`: [`mkstemp`][3] like filename template, no default
|
||||
* `dir`: the optional temporary directory, fallbacks to system default (guesses from environment)
|
||||
* `tries`: how many times should the function try to get a unique filename before giving up, default `3`
|
||||
* `keep`: signals that the temporary file or directory should not be deleted on exit, default is `false`, means delete
|
||||
* Please keep in mind that it is recommended in this case to call the provided `cleanupCallback` function manually.
|
||||
* `unsafeCleanup`: recursively removes the created temporary directory, even when it's not empty. default is `false`
|
||||
|
||||
[1]: http://nodejs.org/
|
||||
[2]: https://www.npmjs.com/browse/depended/tmp
|
||||
[3]: http://www.kernel.org/doc/man-pages/online/pages/man3/mkstemp.3.html
|
||||
[4]: https://raszi.github.io/node-tmp/
|
||||
[5]: https://github.com/benjamingr/tmp-promise
|
||||
611
express-server/node_modules/google-gax/node_modules/protobufjs/cli/node_modules/tmp/lib/tmp.js
generated
vendored
Normal file
611
express-server/node_modules/google-gax/node_modules/protobufjs/cli/node_modules/tmp/lib/tmp.js
generated
vendored
Normal file
@@ -0,0 +1,611 @@
|
||||
/*!
|
||||
* Tmp
|
||||
*
|
||||
* Copyright (c) 2011-2017 KARASZI Istvan <github@spam.raszi.hu>
|
||||
*
|
||||
* MIT Licensed
|
||||
*/
|
||||
|
||||
/*
|
||||
* Module dependencies.
|
||||
*/
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const crypto = require('crypto');
|
||||
const osTmpDir = require('os-tmpdir');
|
||||
const _c = process.binding('constants');
|
||||
|
||||
/*
|
||||
* The working inner variables.
|
||||
*/
|
||||
const
|
||||
/**
|
||||
* The temporary directory.
|
||||
* @type {string}
|
||||
*/
|
||||
tmpDir = osTmpDir(),
|
||||
|
||||
// the random characters to choose from
|
||||
RANDOM_CHARS = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz',
|
||||
|
||||
TEMPLATE_PATTERN = /XXXXXX/,
|
||||
|
||||
DEFAULT_TRIES = 3,
|
||||
|
||||
CREATE_FLAGS = (_c.O_CREAT || _c.fs.O_CREAT) | (_c.O_EXCL || _c.fs.O_EXCL) | (_c.O_RDWR || _c.fs.O_RDWR),
|
||||
|
||||
EBADF = _c.EBADF || _c.os.errno.EBADF,
|
||||
ENOENT = _c.ENOENT || _c.os.errno.ENOENT,
|
||||
|
||||
DIR_MODE = 448 /* 0o700 */,
|
||||
FILE_MODE = 384 /* 0o600 */,
|
||||
|
||||
// this will hold the objects need to be removed on exit
|
||||
_removeObjects = [];
|
||||
|
||||
var
|
||||
_gracefulCleanup = false,
|
||||
_uncaughtException = false;
|
||||
|
||||
/**
|
||||
* Random name generator based on crypto.
|
||||
* Adapted from http://blog.tompawlak.org/how-to-generate-random-values-nodejs-javascript
|
||||
*
|
||||
* @param {number} howMany
|
||||
* @returns {string} the generated random name
|
||||
* @private
|
||||
*/
|
||||
function _randomChars(howMany) {
|
||||
var
|
||||
value = [],
|
||||
rnd = null;
|
||||
|
||||
// make sure that we do not fail because we ran out of entropy
|
||||
try {
|
||||
rnd = crypto.randomBytes(howMany);
|
||||
} catch (e) {
|
||||
rnd = crypto.pseudoRandomBytes(howMany);
|
||||
}
|
||||
|
||||
for (var i = 0; i < howMany; i++) {
|
||||
value.push(RANDOM_CHARS[rnd[i] % RANDOM_CHARS.length]);
|
||||
}
|
||||
|
||||
return value.join('');
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks whether the `obj` parameter is defined or not.
|
||||
*
|
||||
* @param {Object} obj
|
||||
* @returns {boolean} true if the object is undefined
|
||||
* @private
|
||||
*/
|
||||
function _isUndefined(obj) {
|
||||
return typeof obj === 'undefined';
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses the function arguments.
|
||||
*
|
||||
* This function helps to have optional arguments.
|
||||
*
|
||||
* @param {(Options|Function)} options
|
||||
* @param {Function} callback
|
||||
* @returns {Array} parsed arguments
|
||||
* @private
|
||||
*/
|
||||
function _parseArguments(options, callback) {
|
||||
if (typeof options == 'function') {
|
||||
return [callback || {}, options];
|
||||
}
|
||||
|
||||
if (_isUndefined(options)) {
|
||||
return [{}, callback];
|
||||
}
|
||||
|
||||
return [options, callback];
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates a new temporary name.
|
||||
*
|
||||
* @param {Object} opts
|
||||
* @returns {string} the new random name according to opts
|
||||
* @private
|
||||
*/
|
||||
function _generateTmpName(opts) {
|
||||
if (opts.name) {
|
||||
return path.join(opts.dir || tmpDir, opts.name);
|
||||
}
|
||||
|
||||
// mkstemps like template
|
||||
if (opts.template) {
|
||||
return opts.template.replace(TEMPLATE_PATTERN, _randomChars(6));
|
||||
}
|
||||
|
||||
// prefix and postfix
|
||||
const name = [
|
||||
opts.prefix || 'tmp-',
|
||||
process.pid,
|
||||
_randomChars(12),
|
||||
opts.postfix || ''
|
||||
].join('');
|
||||
|
||||
return path.join(opts.dir || tmpDir, name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets a temporary file name.
|
||||
*
|
||||
* @param {(Options|tmpNameCallback)} options options or callback
|
||||
* @param {?tmpNameCallback} callback the callback function
|
||||
*/
|
||||
function tmpName(options, callback) {
|
||||
var
|
||||
args = _parseArguments(options, callback),
|
||||
opts = args[0],
|
||||
cb = args[1],
|
||||
tries = opts.name ? 1 : opts.tries || DEFAULT_TRIES;
|
||||
|
||||
if (isNaN(tries) || tries < 0)
|
||||
return cb(new Error('Invalid tries'));
|
||||
|
||||
if (opts.template && !opts.template.match(TEMPLATE_PATTERN))
|
||||
return cb(new Error('Invalid template provided'));
|
||||
|
||||
(function _getUniqueName() {
|
||||
const name = _generateTmpName(opts);
|
||||
|
||||
// check whether the path exists then retry if needed
|
||||
fs.stat(name, function (err) {
|
||||
if (!err) {
|
||||
if (tries-- > 0) return _getUniqueName();
|
||||
|
||||
return cb(new Error('Could not get a unique tmp filename, max tries reached ' + name));
|
||||
}
|
||||
|
||||
cb(null, name);
|
||||
});
|
||||
}());
|
||||
}
|
||||
|
||||
/**
|
||||
* Synchronous version of tmpName.
|
||||
*
|
||||
* @param {Object} options
|
||||
* @returns {string} the generated random name
|
||||
* @throws {Error} if the options are invalid or could not generate a filename
|
||||
*/
|
||||
function tmpNameSync(options) {
|
||||
var
|
||||
args = _parseArguments(options),
|
||||
opts = args[0],
|
||||
tries = opts.name ? 1 : opts.tries || DEFAULT_TRIES;
|
||||
|
||||
if (isNaN(tries) || tries < 0)
|
||||
throw new Error('Invalid tries');
|
||||
|
||||
if (opts.template && !opts.template.match(TEMPLATE_PATTERN))
|
||||
throw new Error('Invalid template provided');
|
||||
|
||||
do {
|
||||
const name = _generateTmpName(opts);
|
||||
try {
|
||||
fs.statSync(name);
|
||||
} catch (e) {
|
||||
return name;
|
||||
}
|
||||
} while (tries-- > 0);
|
||||
|
||||
throw new Error('Could not get a unique tmp filename, max tries reached');
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates and opens a temporary file.
|
||||
*
|
||||
* @param {(Options|fileCallback)} options the config options or the callback function
|
||||
* @param {?fileCallback} callback
|
||||
*/
|
||||
function file(options, callback) {
|
||||
var
|
||||
args = _parseArguments(options, callback),
|
||||
opts = args[0],
|
||||
cb = args[1];
|
||||
|
||||
opts.postfix = (_isUndefined(opts.postfix)) ? '.tmp' : opts.postfix;
|
||||
|
||||
// gets a temporary filename
|
||||
tmpName(opts, function _tmpNameCreated(err, name) {
|
||||
if (err) return cb(err);
|
||||
|
||||
// create and open the file
|
||||
fs.open(name, CREATE_FLAGS, opts.mode || FILE_MODE, function _fileCreated(err, fd) {
|
||||
if (err) return cb(err);
|
||||
|
||||
if (opts.discardDescriptor) {
|
||||
return fs.close(fd, function _discardCallback(err) {
|
||||
if (err) {
|
||||
// Low probability, and the file exists, so this could be
|
||||
// ignored. If it isn't we certainly need to unlink the
|
||||
// file, and if that fails too its error is more
|
||||
// important.
|
||||
try {
|
||||
fs.unlinkSync(name);
|
||||
} catch (e) {
|
||||
if (!isENOENT(e)) {
|
||||
err = e;
|
||||
}
|
||||
}
|
||||
return cb(err);
|
||||
}
|
||||
cb(null, name, undefined, _prepareTmpFileRemoveCallback(name, -1, opts));
|
||||
});
|
||||
}
|
||||
if (opts.detachDescriptor) {
|
||||
return cb(null, name, fd, _prepareTmpFileRemoveCallback(name, -1, opts));
|
||||
}
|
||||
cb(null, name, fd, _prepareTmpFileRemoveCallback(name, fd, opts));
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Synchronous version of file.
|
||||
*
|
||||
* @param {Options} options
|
||||
* @returns {FileSyncObject} object consists of name, fd and removeCallback
|
||||
* @throws {Error} if cannot create a file
|
||||
*/
|
||||
function fileSync(options) {
|
||||
var
|
||||
args = _parseArguments(options),
|
||||
opts = args[0];
|
||||
|
||||
opts.postfix = opts.postfix || '.tmp';
|
||||
|
||||
const discardOrDetachDescriptor = opts.discardDescriptor || opts.detachDescriptor;
|
||||
const name = tmpNameSync(opts);
|
||||
var fd = fs.openSync(name, CREATE_FLAGS, opts.mode || FILE_MODE);
|
||||
if (opts.discardDescriptor) {
|
||||
fs.closeSync(fd);
|
||||
fd = undefined;
|
||||
}
|
||||
|
||||
return {
|
||||
name: name,
|
||||
fd: fd,
|
||||
removeCallback: _prepareTmpFileRemoveCallback(name, discardOrDetachDescriptor ? -1 : fd, opts)
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes files and folders in a directory recursively.
|
||||
*
|
||||
* @param {string} root
|
||||
* @private
|
||||
*/
|
||||
function _rmdirRecursiveSync(root) {
|
||||
const dirs = [root];
|
||||
|
||||
do {
|
||||
var
|
||||
dir = dirs.pop(),
|
||||
deferred = false,
|
||||
files = fs.readdirSync(dir);
|
||||
|
||||
for (var i = 0, length = files.length; i < length; i++) {
|
||||
var
|
||||
file = path.join(dir, files[i]),
|
||||
stat = fs.lstatSync(file); // lstat so we don't recurse into symlinked directories
|
||||
|
||||
if (stat.isDirectory()) {
|
||||
if (!deferred) {
|
||||
deferred = true;
|
||||
dirs.push(dir);
|
||||
}
|
||||
dirs.push(file);
|
||||
} else {
|
||||
fs.unlinkSync(file);
|
||||
}
|
||||
}
|
||||
|
||||
if (!deferred) {
|
||||
fs.rmdirSync(dir);
|
||||
}
|
||||
} while (dirs.length !== 0);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a temporary directory.
|
||||
*
|
||||
* @param {(Options|dirCallback)} options the options or the callback function
|
||||
* @param {?dirCallback} callback
|
||||
*/
|
||||
function dir(options, callback) {
|
||||
var
|
||||
args = _parseArguments(options, callback),
|
||||
opts = args[0],
|
||||
cb = args[1];
|
||||
|
||||
// gets a temporary filename
|
||||
tmpName(opts, function _tmpNameCreated(err, name) {
|
||||
if (err) return cb(err);
|
||||
|
||||
// create the directory
|
||||
fs.mkdir(name, opts.mode || DIR_MODE, function _dirCreated(err) {
|
||||
if (err) return cb(err);
|
||||
|
||||
cb(null, name, _prepareTmpDirRemoveCallback(name, opts));
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Synchronous version of dir.
|
||||
*
|
||||
* @param {Options} options
|
||||
* @returns {DirSyncObject} object consists of name and removeCallback
|
||||
* @throws {Error} if it cannot create a directory
|
||||
*/
|
||||
function dirSync(options) {
|
||||
var
|
||||
args = _parseArguments(options),
|
||||
opts = args[0];
|
||||
|
||||
const name = tmpNameSync(opts);
|
||||
fs.mkdirSync(name, opts.mode || DIR_MODE);
|
||||
|
||||
return {
|
||||
name: name,
|
||||
removeCallback: _prepareTmpDirRemoveCallback(name, opts)
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Prepares the callback for removal of the temporary file.
|
||||
*
|
||||
* @param {string} name the path of the file
|
||||
* @param {number} fd file descriptor
|
||||
* @param {Object} opts
|
||||
* @returns {fileCallback}
|
||||
* @private
|
||||
*/
|
||||
function _prepareTmpFileRemoveCallback(name, fd, opts) {
|
||||
const removeCallback = _prepareRemoveCallback(function _removeCallback(fdPath) {
|
||||
try {
|
||||
if (0 <= fdPath[0]) {
|
||||
fs.closeSync(fdPath[0]);
|
||||
}
|
||||
}
|
||||
catch (e) {
|
||||
// under some node/windows related circumstances, a temporary file
|
||||
// may have not be created as expected or the file was already closed
|
||||
// by the user, in which case we will simply ignore the error
|
||||
if (!isEBADF(e) && !isENOENT(e)) {
|
||||
// reraise any unanticipated error
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
try {
|
||||
fs.unlinkSync(fdPath[1]);
|
||||
}
|
||||
catch (e) {
|
||||
if (!isENOENT(e)) {
|
||||
// reraise any unanticipated error
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}, [fd, name]);
|
||||
|
||||
if (!opts.keep) {
|
||||
_removeObjects.unshift(removeCallback);
|
||||
}
|
||||
|
||||
return removeCallback;
|
||||
}
|
||||
|
||||
/**
|
||||
* Prepares the callback for removal of the temporary directory.
|
||||
*
|
||||
* @param {string} name
|
||||
* @param {Object} opts
|
||||
* @returns {Function} the callback
|
||||
* @private
|
||||
*/
|
||||
function _prepareTmpDirRemoveCallback(name, opts) {
|
||||
const removeFunction = opts.unsafeCleanup ? _rmdirRecursiveSync : fs.rmdirSync.bind(fs);
|
||||
const removeCallback = _prepareRemoveCallback(removeFunction, name);
|
||||
|
||||
if (!opts.keep) {
|
||||
_removeObjects.unshift(removeCallback);
|
||||
}
|
||||
|
||||
return removeCallback;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a guarded function wrapping the removeFunction call.
|
||||
*
|
||||
* @param {Function} removeFunction
|
||||
* @param {Object} arg
|
||||
* @returns {Function}
|
||||
* @private
|
||||
*/
|
||||
function _prepareRemoveCallback(removeFunction, arg) {
|
||||
var called = false;
|
||||
|
||||
return function _cleanupCallback(next) {
|
||||
if (!called) {
|
||||
const index = _removeObjects.indexOf(_cleanupCallback);
|
||||
if (index >= 0) {
|
||||
_removeObjects.splice(index, 1);
|
||||
}
|
||||
|
||||
called = true;
|
||||
removeFunction(arg);
|
||||
}
|
||||
|
||||
if (next) next(null);
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* The garbage collector.
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
function _garbageCollector() {
|
||||
if (_uncaughtException && !_gracefulCleanup) {
|
||||
return;
|
||||
}
|
||||
|
||||
// the function being called removes itself from _removeObjects,
|
||||
// loop until _removeObjects is empty
|
||||
while (_removeObjects.length) {
|
||||
try {
|
||||
_removeObjects[0].call(null);
|
||||
} catch (e) {
|
||||
// already removed?
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper for testing against EBADF to compensate changes made to Node 7.x under Windows.
|
||||
*/
|
||||
function isEBADF(error) {
|
||||
return isExpectedError(error, -EBADF, 'EBADF');
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper for testing against ENOENT to compensate changes made to Node 7.x under Windows.
|
||||
*/
|
||||
function isENOENT(error) {
|
||||
return isExpectedError(error, -ENOENT, 'ENOENT');
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper to determine whether the expected error code matches the actual code and errno,
|
||||
* which will differ between the supported node versions.
|
||||
*
|
||||
* - Node >= 7.0:
|
||||
* error.code {String}
|
||||
* error.errno {String|Number} any numerical value will be negated
|
||||
*
|
||||
* - Node >= 6.0 < 7.0:
|
||||
* error.code {String}
|
||||
* error.errno {Number} negated
|
||||
*
|
||||
* - Node >= 4.0 < 6.0: introduces SystemError
|
||||
* error.code {String}
|
||||
* error.errno {Number} negated
|
||||
*
|
||||
* - Node >= 0.10 < 4.0:
|
||||
* error.code {Number} negated
|
||||
* error.errno n/a
|
||||
*/
|
||||
function isExpectedError(error, code, errno) {
|
||||
return error.code == code || error.code == errno;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the graceful cleanup.
|
||||
*
|
||||
* Also removes the created files and directories when an uncaught exception occurs.
|
||||
*/
|
||||
function setGracefulCleanup() {
|
||||
_gracefulCleanup = true;
|
||||
}
|
||||
|
||||
const version = process.versions.node.split('.').map(function (value) {
|
||||
return parseInt(value, 10);
|
||||
});
|
||||
|
||||
if (version[0] === 0 && (version[1] < 9 || version[1] === 9 && version[2] < 5)) {
|
||||
process.addListener('uncaughtException', function _uncaughtExceptionThrown(err) {
|
||||
_uncaughtException = true;
|
||||
_garbageCollector();
|
||||
|
||||
throw err;
|
||||
});
|
||||
}
|
||||
|
||||
process.addListener('exit', function _exit(code) {
|
||||
if (code) _uncaughtException = true;
|
||||
_garbageCollector();
|
||||
});
|
||||
|
||||
/**
|
||||
* Configuration options.
|
||||
*
|
||||
* @typedef {Object} Options
|
||||
* @property {?number} tries the number of tries before give up the name generation
|
||||
* @property {?string} template the "mkstemp" like filename template
|
||||
* @property {?string} name fix name
|
||||
* @property {?string} dir the tmp directory to use
|
||||
* @property {?string} prefix prefix for the generated name
|
||||
* @property {?string} postfix postfix for the generated name
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {Object} FileSyncObject
|
||||
* @property {string} name the name of the file
|
||||
* @property {string} fd the file descriptor
|
||||
* @property {fileCallback} removeCallback the callback function to remove the file
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {Object} DirSyncObject
|
||||
* @property {string} name the name of the directory
|
||||
* @property {fileCallback} removeCallback the callback function to remove the directory
|
||||
*/
|
||||
|
||||
/**
|
||||
* @callback tmpNameCallback
|
||||
* @param {?Error} err the error object if anything goes wrong
|
||||
* @param {string} name the temporary file name
|
||||
*/
|
||||
|
||||
/**
|
||||
* @callback fileCallback
|
||||
* @param {?Error} err the error object if anything goes wrong
|
||||
* @param {string} name the temporary file name
|
||||
* @param {number} fd the file descriptor
|
||||
* @param {cleanupCallback} fn the cleanup callback function
|
||||
*/
|
||||
|
||||
/**
|
||||
* @callback dirCallback
|
||||
* @param {?Error} err the error object if anything goes wrong
|
||||
* @param {string} name the temporary file name
|
||||
* @param {cleanupCallback} fn the cleanup callback function
|
||||
*/
|
||||
|
||||
/**
|
||||
* Removes the temporary created file or directory.
|
||||
*
|
||||
* @callback cleanupCallback
|
||||
* @param {simpleCallback} [next] function to call after entry was removed
|
||||
*/
|
||||
|
||||
/**
|
||||
* Callback function for function composition.
|
||||
* @see {@link https://github.com/raszi/node-tmp/issues/57|raszi/node-tmp#57}
|
||||
*
|
||||
* @callback simpleCallback
|
||||
*/
|
||||
|
||||
// exporting all the needed methods
|
||||
module.exports.tmpdir = tmpDir;
|
||||
|
||||
module.exports.dir = dir;
|
||||
module.exports.dirSync = dirSync;
|
||||
|
||||
module.exports.file = file;
|
||||
module.exports.fileSync = fileSync;
|
||||
|
||||
module.exports.tmpName = tmpName;
|
||||
module.exports.tmpNameSync = tmpNameSync;
|
||||
|
||||
module.exports.setGracefulCleanup = setGracefulCleanup;
|
||||
20
express-server/node_modules/google-gax/node_modules/protobufjs/cli/package-lock.json
generated
vendored
Normal file
20
express-server/node_modules/google-gax/node_modules/protobufjs/cli/package-lock.json
generated
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
{
|
||||
"version": "6.7.0",
|
||||
"lockfileVersion": 1,
|
||||
"requires": true,
|
||||
"dependencies": {
|
||||
"os-tmpdir": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/os-tmpdir/-/os-tmpdir-1.0.2.tgz",
|
||||
"integrity": "sha1-u+Z0BseaqFxc/sdm/lc0VV36EnQ="
|
||||
},
|
||||
"tmp": {
|
||||
"version": "0.0.33",
|
||||
"resolved": "https://registry.npmjs.org/tmp/-/tmp-0.0.33.tgz",
|
||||
"integrity": "sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw==",
|
||||
"requires": {
|
||||
"os-tmpdir": "1.0.2"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
1
express-server/node_modules/google-gax/node_modules/protobufjs/cli/package.json
generated
vendored
Normal file
1
express-server/node_modules/google-gax/node_modules/protobufjs/cli/package.json
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version": "6.7.0"}
|
||||
32
express-server/node_modules/google-gax/node_modules/protobufjs/cli/package.standalone.json
generated
vendored
Normal file
32
express-server/node_modules/google-gax/node_modules/protobufjs/cli/package.standalone.json
generated
vendored
Normal file
@@ -0,0 +1,32 @@
|
||||
{
|
||||
"name": "protobufjs-cli",
|
||||
"description": "Translates between file formats and generates static code as well as TypeScript definitions.",
|
||||
"version": "6.7.0",
|
||||
"author": "Daniel Wirtz <dcode+protobufjs@dcode.io>",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/dcodeIO/protobuf.js.git"
|
||||
},
|
||||
"license": "BSD-3-Clause",
|
||||
"main": "index.js",
|
||||
"types": "index.d.ts",
|
||||
"bin": {
|
||||
"pbjs": "bin/pbjs",
|
||||
"pbts": "bin/pbts"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"protobufjs": "~6.7.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"chalk": "^1.1.3",
|
||||
"escodegen": "^1.8.1",
|
||||
"espree": "^3.1.3",
|
||||
"estraverse": "^4.2.0",
|
||||
"glob": "^7.1.1",
|
||||
"jsdoc": "^3.4.2",
|
||||
"minimist": "^1.2.0",
|
||||
"semver": "^5.3.0",
|
||||
"tmp": "0.0.31",
|
||||
"uglify-js": "^2.8.15"
|
||||
}
|
||||
}
|
||||
9
express-server/node_modules/google-gax/node_modules/protobufjs/cli/pbjs.d.ts
generated
vendored
Normal file
9
express-server/node_modules/google-gax/node_modules/protobufjs/cli/pbjs.d.ts
generated
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
type pbjsCallback = (err: Error|null, output?: string) => void;
|
||||
|
||||
/**
|
||||
* Runs pbjs programmatically.
|
||||
* @param {string[]} args Command line arguments
|
||||
* @param {function(?Error, string=)} [callback] Optional completion callback
|
||||
* @returns {number|undefined} Exit code, if known
|
||||
*/
|
||||
export function main(args: string[], callback?: pbjsCallback): number|undefined;
|
||||
329
express-server/node_modules/google-gax/node_modules/protobufjs/cli/pbjs.js
generated
vendored
Normal file
329
express-server/node_modules/google-gax/node_modules/protobufjs/cli/pbjs.js
generated
vendored
Normal file
@@ -0,0 +1,329 @@
|
||||
"use strict";
|
||||
var path = require("path"),
|
||||
fs = require("fs"),
|
||||
pkg = require("./package.json"),
|
||||
util = require("./util");
|
||||
|
||||
util.setup();
|
||||
|
||||
var protobuf = require(util.pathToProtobufJs),
|
||||
minimist = require("minimist"),
|
||||
chalk = require("chalk"),
|
||||
glob = require("glob");
|
||||
|
||||
var targets = util.requireAll("./targets");
|
||||
|
||||
/**
|
||||
* Runs pbjs programmatically.
|
||||
* @param {string[]} args Command line arguments
|
||||
* @param {function(?Error, string=)} [callback] Optional completion callback
|
||||
* @returns {number|undefined} Exit code, if known
|
||||
*/
|
||||
exports.main = function main(args, callback) {
|
||||
var lintDefault = "eslint-disable " + [
|
||||
"block-scoped-var",
|
||||
"id-length",
|
||||
"no-control-regex",
|
||||
"no-magic-numbers",
|
||||
"no-prototype-builtins",
|
||||
"no-redeclare",
|
||||
"no-shadow",
|
||||
"no-var",
|
||||
"sort-vars"
|
||||
].join(", ");
|
||||
var argv = minimist(args, {
|
||||
alias: {
|
||||
target: "t",
|
||||
out: "o",
|
||||
path: "p",
|
||||
wrap: "w",
|
||||
root: "r",
|
||||
lint: "l",
|
||||
// backward compatibility:
|
||||
"force-long": "strict-long",
|
||||
"force-message": "strict-message"
|
||||
},
|
||||
string: [ "target", "out", "path", "wrap", "dependency", "root", "lint" ],
|
||||
boolean: [ "create", "encode", "decode", "verify", "convert", "delimited", "beautify", "comments", "es6", "sparse", "keep-case", "force-long", "force-number", "force-enum-string", "force-message" ],
|
||||
default: {
|
||||
target: "json",
|
||||
create: true,
|
||||
encode: true,
|
||||
decode: true,
|
||||
verify: true,
|
||||
convert: true,
|
||||
delimited: true,
|
||||
beautify: true,
|
||||
comments: true,
|
||||
es6: null,
|
||||
lint: lintDefault,
|
||||
"keep-case": false,
|
||||
"force-long": false,
|
||||
"force-number": false,
|
||||
"force-enum-string": false,
|
||||
"force-message": false
|
||||
}
|
||||
});
|
||||
|
||||
var target = targets[argv.target],
|
||||
files = argv._,
|
||||
paths = typeof argv.path === "string" ? [ argv.path ] : argv.path || [];
|
||||
|
||||
// alias hyphen args in camel case
|
||||
Object.keys(argv).forEach(function(key) {
|
||||
var camelKey = key.replace(/-([a-z])/g, function($0, $1) { return $1.toUpperCase(); });
|
||||
if (camelKey !== key)
|
||||
argv[camelKey] = argv[key];
|
||||
});
|
||||
|
||||
// protobuf.js package directory contains additional, otherwise non-bundled google types
|
||||
paths.push(path.relative(process.cwd(), path.join(__dirname, "..")) || ".");
|
||||
|
||||
if (!files.length) {
|
||||
var descs = Object.keys(targets).filter(function(key) { return !targets[key].private; }).map(function(key) {
|
||||
return " " + util.pad(key, 14, true) + targets[key].description;
|
||||
});
|
||||
if (callback)
|
||||
callback(Error("usage")); // eslint-disable-line callback-return
|
||||
else
|
||||
process.stderr.write([
|
||||
"protobuf.js v" + pkg.version + " CLI for JavaScript",
|
||||
"",
|
||||
chalk.bold.white("Translates between file formats and generates static code."),
|
||||
"",
|
||||
" -t, --target Specifies the target format. Also accepts a path to require a custom target.",
|
||||
"",
|
||||
descs.join("\n"),
|
||||
"",
|
||||
" -p, --path Adds a directory to the include path.",
|
||||
"",
|
||||
" -o, --out Saves to a file instead of writing to stdout.",
|
||||
"",
|
||||
" --sparse Exports only those types referenced from a main file (experimental).",
|
||||
"",
|
||||
chalk.bold.gray(" Module targets only:"),
|
||||
"",
|
||||
" -w, --wrap Specifies the wrapper to use. Also accepts a path to require a custom wrapper.",
|
||||
"",
|
||||
" default Default wrapper supporting both CommonJS and AMD",
|
||||
" commonjs CommonJS wrapper",
|
||||
" amd AMD wrapper",
|
||||
" es6 ES6 wrapper (implies --es6)",
|
||||
" closure A closure adding to protobuf.roots where protobuf is a global",
|
||||
"",
|
||||
" --dependency Specifies which version of protobuf to require. Accepts any valid module id",
|
||||
"",
|
||||
" -r, --root Specifies an alternative protobuf.roots name.",
|
||||
"",
|
||||
" -l, --lint Linter configuration. Defaults to protobuf.js-compatible rules:",
|
||||
"",
|
||||
" " + lintDefault,
|
||||
"",
|
||||
" --es6 Enables ES6 syntax (const/let instead of var)",
|
||||
"",
|
||||
chalk.bold.gray(" Proto sources only:"),
|
||||
"",
|
||||
" --keep-case Keeps field casing instead of converting to camel case.",
|
||||
"",
|
||||
chalk.bold.gray(" Static targets only:"),
|
||||
"",
|
||||
" --no-create Does not generate create functions used for reflection compatibility.",
|
||||
" --no-encode Does not generate encode functions.",
|
||||
" --no-decode Does not generate decode functions.",
|
||||
" --no-verify Does not generate verify functions.",
|
||||
" --no-convert Does not generate convert functions like from/toObject",
|
||||
" --no-delimited Does not generate delimited encode/decode functions.",
|
||||
" --no-beautify Does not beautify generated code.",
|
||||
" --no-comments Does not output any JSDoc comments.",
|
||||
"",
|
||||
" --force-long Enfores the use of 'Long' for s-/u-/int64 and s-/fixed64 fields.",
|
||||
" --force-number Enfores the use of 'number' for s-/u-/int64 and s-/fixed64 fields.",
|
||||
" --force-message Enfores the use of message instances instead of plain objects.",
|
||||
"",
|
||||
"usage: " + chalk.bold.green("pbjs") + " [options] file1.proto file2.json ..." + chalk.gray(" (or pipe) ") + "other | " + chalk.bold.green("pbjs") + " [options] -",
|
||||
""
|
||||
].join("\n"));
|
||||
return 1;
|
||||
}
|
||||
|
||||
if (typeof argv["strict-long"] === "boolean")
|
||||
argv["force-long"] = argv["strict-long"];
|
||||
|
||||
// Resolve glob expressions
|
||||
for (var i = 0; i < files.length;) {
|
||||
if (glob.hasMagic(files[i])) {
|
||||
var matches = glob.sync(files[i]);
|
||||
Array.prototype.splice.apply(files, [i, 1].concat(matches));
|
||||
i += matches.length;
|
||||
} else
|
||||
++i;
|
||||
}
|
||||
|
||||
// Require custom target
|
||||
if (!target)
|
||||
target = require(path.resolve(process.cwd(), argv.target));
|
||||
|
||||
var root = new protobuf.Root();
|
||||
|
||||
var mainFiles = [];
|
||||
|
||||
// Search include paths when resolving imports
|
||||
root.resolvePath = function pbjsResolvePath(origin, target) {
|
||||
var normOrigin = protobuf.util.path.normalize(origin),
|
||||
normTarget = protobuf.util.path.normalize(target);
|
||||
if (!normOrigin)
|
||||
mainFiles.push(normTarget);
|
||||
|
||||
var resolved = protobuf.util.path.resolve(normOrigin, normTarget, true);
|
||||
var idx = resolved.lastIndexOf("google/protobuf/");
|
||||
if (idx > -1) {
|
||||
var altname = resolved.substring(idx);
|
||||
if (altname in protobuf.common)
|
||||
resolved = altname;
|
||||
}
|
||||
|
||||
if (fs.existsSync(resolved))
|
||||
return resolved;
|
||||
|
||||
for (var i = 0; i < paths.length; ++i) {
|
||||
var iresolved = protobuf.util.path.resolve(paths[i] + "/", target);
|
||||
if (fs.existsSync(iresolved))
|
||||
return iresolved;
|
||||
}
|
||||
|
||||
return resolved;
|
||||
};
|
||||
|
||||
// Use es6 syntax if not explicitly specified on the command line and the es6 wrapper is used
|
||||
if (argv.wrap === "es6" || argv.es6) {
|
||||
argv.wrap = "es6";
|
||||
argv.es6 = true;
|
||||
}
|
||||
|
||||
var parseOptions = {
|
||||
"keepCase": argv["keep-case"] || false
|
||||
};
|
||||
|
||||
// Read from stdin
|
||||
if (files.length === 1 && files[0] === "-") {
|
||||
var data = [];
|
||||
process.stdin.on("data", function(chunk) {
|
||||
data.push(chunk);
|
||||
});
|
||||
process.stdin.on("end", function() {
|
||||
var source = Buffer.concat(data).toString("utf8");
|
||||
try {
|
||||
if (source.charAt(0) !== "{") {
|
||||
protobuf.parse.filename = "-";
|
||||
protobuf.parse(source, root, parseOptions);
|
||||
} else {
|
||||
var json = JSON.parse(source);
|
||||
root.setOptions(json.options).addJSON(json);
|
||||
}
|
||||
callTarget();
|
||||
} catch (err) {
|
||||
if (callback) {
|
||||
callback(err);
|
||||
return;
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
});
|
||||
|
||||
// Load from disk
|
||||
} else {
|
||||
try {
|
||||
root.loadSync(files, parseOptions).resolveAll(); // sync is deterministic while async is not
|
||||
if (argv.sparse)
|
||||
sparsify(root);
|
||||
callTarget();
|
||||
} catch (err) {
|
||||
if (callback) {
|
||||
callback(err);
|
||||
return undefined;
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
function markReferenced(tobj) {
|
||||
tobj.referenced = true;
|
||||
// also mark a type's fields and oneofs
|
||||
if (tobj.fieldsArray)
|
||||
tobj.fieldsArray.forEach(function(fobj) {
|
||||
fobj.referenced = true;
|
||||
});
|
||||
if (tobj.oneofsArray)
|
||||
tobj.oneofsArray.forEach(function(oobj) {
|
||||
oobj.referenced = true;
|
||||
});
|
||||
// also mark an extension field's extended type, but not its (other) fields
|
||||
if (tobj.extensionField)
|
||||
tobj.extensionField.parent.referenced = true;
|
||||
}
|
||||
|
||||
function sparsify(root) {
|
||||
|
||||
// 1. mark directly or indirectly referenced objects
|
||||
util.traverse(root, function(obj) {
|
||||
if (!obj.filename)
|
||||
return;
|
||||
if (mainFiles.indexOf(obj.filename) > -1)
|
||||
util.traverseResolved(obj, markReferenced);
|
||||
});
|
||||
|
||||
// 2. empty unreferenced objects
|
||||
util.traverse(root, function(obj) {
|
||||
var parent = obj.parent;
|
||||
if (!parent || obj.referenced) // root or referenced
|
||||
return;
|
||||
// remove unreferenced namespaces
|
||||
if (obj instanceof protobuf.Namespace) {
|
||||
var hasReferenced = false;
|
||||
util.traverse(obj, function(iobj) {
|
||||
if (iobj.referenced)
|
||||
hasReferenced = true;
|
||||
});
|
||||
if (hasReferenced) { // replace with plain namespace if a namespace subclass
|
||||
if (obj instanceof protobuf.Type || obj instanceof protobuf.Service) {
|
||||
var robj = new protobuf.Namespace(obj.name, obj.options);
|
||||
robj.nested = obj.nested;
|
||||
parent.add(robj);
|
||||
}
|
||||
} else // remove completely if nothing inside is referenced
|
||||
parent.remove(obj);
|
||||
|
||||
// remove everything else unreferenced
|
||||
} else if (!(obj instanceof protobuf.Namespace))
|
||||
parent.remove(obj);
|
||||
});
|
||||
|
||||
// 3. validate that everything is fine
|
||||
root.resolveAll();
|
||||
}
|
||||
|
||||
function callTarget() {
|
||||
target(root, argv, function targetCallback(err, output) {
|
||||
if (err) {
|
||||
if (callback)
|
||||
return callback(err);
|
||||
throw err;
|
||||
}
|
||||
try {
|
||||
if (argv.out)
|
||||
fs.writeFileSync(argv.out, output, { encoding: "utf8" });
|
||||
else if (!callback)
|
||||
process.stdout.write(output, "utf8");
|
||||
return callback
|
||||
? callback(null, output)
|
||||
: undefined;
|
||||
} catch (err) {
|
||||
if (callback)
|
||||
return callback(err);
|
||||
throw err;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return undefined;
|
||||
};
|
||||
9
express-server/node_modules/google-gax/node_modules/protobufjs/cli/pbts.d.ts
generated
vendored
Normal file
9
express-server/node_modules/google-gax/node_modules/protobufjs/cli/pbts.d.ts
generated
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
type pbtsCallback = (err: Error|null, output?: string) => void;
|
||||
|
||||
/**
|
||||
* Runs pbts programmatically.
|
||||
* @param {string[]} args Command line arguments
|
||||
* @param {function(?Error, string=)} [callback] Optional completion callback
|
||||
* @returns {number|undefined} Exit code, if known
|
||||
*/
|
||||
export function main(args: string[], callback?: pbtsCallback): number|undefined;
|
||||
197
express-server/node_modules/google-gax/node_modules/protobufjs/cli/pbts.js
generated
vendored
Normal file
197
express-server/node_modules/google-gax/node_modules/protobufjs/cli/pbts.js
generated
vendored
Normal file
@@ -0,0 +1,197 @@
|
||||
"use strict";
|
||||
var child_process = require("child_process"),
|
||||
path = require("path"),
|
||||
fs = require("fs"),
|
||||
pkg = require("./package.json"),
|
||||
util = require("./util");
|
||||
|
||||
util.setup();
|
||||
|
||||
var minimist = require("minimist"),
|
||||
chalk = require("chalk"),
|
||||
glob = require("glob"),
|
||||
tmp = require("tmp");
|
||||
|
||||
/**
|
||||
* Runs pbts programmatically.
|
||||
* @param {string[]} args Command line arguments
|
||||
* @param {function(?Error, string=)} [callback] Optional completion callback
|
||||
* @returns {number|undefined} Exit code, if known
|
||||
*/
|
||||
exports.main = function(args, callback) {
|
||||
var argv = minimist(args, {
|
||||
alias: {
|
||||
name: "n",
|
||||
out : "o",
|
||||
main: "m",
|
||||
global: "g",
|
||||
import: "i"
|
||||
},
|
||||
string: [ "name", "out", "global", "import" ],
|
||||
boolean: [ "comments", "main" ],
|
||||
default: {
|
||||
comments: true,
|
||||
main: false
|
||||
}
|
||||
});
|
||||
|
||||
var files = argv._;
|
||||
|
||||
if (!files.length) {
|
||||
if (callback)
|
||||
callback(Error("usage")); // eslint-disable-line callback-return
|
||||
else
|
||||
process.stderr.write([
|
||||
"protobuf.js v" + pkg.version + " CLI for TypeScript",
|
||||
"",
|
||||
chalk.bold.white("Generates TypeScript definitions from annotated JavaScript files."),
|
||||
"",
|
||||
" -o, --out Saves to a file instead of writing to stdout.",
|
||||
"",
|
||||
" -g, --global Name of the global object in browser environments, if any.",
|
||||
"",
|
||||
" -i, --import Comma delimited list of imports. Local names will equal camelCase of the basename.",
|
||||
"",
|
||||
" --no-comments Does not output any JSDoc comments.",
|
||||
"",
|
||||
chalk.bold.gray(" Internal flags:"),
|
||||
"",
|
||||
" -n, --name Wraps everything in a module of the specified name.",
|
||||
"",
|
||||
" -m, --main Whether building the main library without any imports.",
|
||||
"",
|
||||
"usage: " + chalk.bold.green("pbts") + " [options] file1.js file2.js ..." + chalk.bold.gray(" (or) ") + "other | " + chalk.bold.green("pbts") + " [options] -",
|
||||
""
|
||||
].join("\n"));
|
||||
return 1;
|
||||
}
|
||||
|
||||
// Resolve glob expressions
|
||||
for (var i = 0; i < files.length;) {
|
||||
if (glob.hasMagic(files[i])) {
|
||||
var matches = glob.sync(files[i]);
|
||||
Array.prototype.splice.apply(files, [i, 1].concat(matches));
|
||||
i += matches.length;
|
||||
} else
|
||||
++i;
|
||||
}
|
||||
|
||||
var cleanup = [];
|
||||
|
||||
// Read from stdin (to a temporary file)
|
||||
if (files.length === 1 && files[0] === "-") {
|
||||
var data = [];
|
||||
process.stdin.on("data", function(chunk) {
|
||||
data.push(chunk);
|
||||
});
|
||||
process.stdin.on("end", function() {
|
||||
files[0] = tmp.tmpNameSync() + ".js";
|
||||
fs.writeFileSync(files[0], Buffer.concat(data));
|
||||
cleanup.push(files[0]);
|
||||
callJsdoc();
|
||||
});
|
||||
|
||||
// Load from disk
|
||||
} else {
|
||||
callJsdoc();
|
||||
}
|
||||
|
||||
function callJsdoc() {
|
||||
|
||||
// There is no proper API for jsdoc, so this executes the CLI and pipes the output
|
||||
var basedir = path.join(__dirname, ".");
|
||||
var moduleName = argv.name || "null";
|
||||
var nodePath = process.execPath;
|
||||
var cmd = "\"" + nodePath + "\" \"" + require.resolve("jsdoc/jsdoc.js") + "\" -c \"" + path.join(basedir, "lib", "tsd-jsdoc.json") + "\" -q \"module=" + encodeURIComponent(moduleName) + "&comments=" + Boolean(argv.comments) + "\" " + files.map(function(file) { return "\"" + file + "\""; }).join(" ");
|
||||
var child = child_process.exec(cmd, {
|
||||
cwd: process.cwd(),
|
||||
argv0: "node",
|
||||
stdio: "pipe",
|
||||
maxBuffer: 1 << 24 // 16mb
|
||||
});
|
||||
var out = [];
|
||||
var ended = false;
|
||||
var closed = false;
|
||||
child.stdout.on("data", function(data) {
|
||||
out.push(data);
|
||||
});
|
||||
child.stdout.on("end", function() {
|
||||
if (closed) finish();
|
||||
else ended = true;
|
||||
});
|
||||
child.stderr.pipe(process.stderr);
|
||||
child.on("close", function(code) {
|
||||
// clean up temporary files, no matter what
|
||||
try { cleanup.forEach(fs.unlinkSync); } catch(e) {/**/} cleanup = [];
|
||||
|
||||
if (code) {
|
||||
out = out.join("").replace(/\s*JSDoc \d+\.\d+\.\d+ [^$]+/, "");
|
||||
process.stderr.write(out);
|
||||
var err = Error("code " + code);
|
||||
if (callback)
|
||||
return callback(err);
|
||||
throw err;
|
||||
}
|
||||
|
||||
if (ended) return finish();
|
||||
closed = true;
|
||||
return undefined;
|
||||
});
|
||||
|
||||
function getImportName(importItem) {
|
||||
return path.basename(importItem, ".js").replace(/([-_~.+]\w)/g, function(match) {
|
||||
return match[1].toUpperCase();
|
||||
});
|
||||
}
|
||||
|
||||
function finish() {
|
||||
var output = [];
|
||||
if (argv.main)
|
||||
output.push(
|
||||
"// DO NOT EDIT! This is a generated file. Edit the JSDoc in src/*.js instead and run 'npm run types'.",
|
||||
""
|
||||
);
|
||||
if (argv.global)
|
||||
output.push(
|
||||
"export as namespace " + argv.global + ";",
|
||||
""
|
||||
);
|
||||
|
||||
if (!argv.main) {
|
||||
// Ensure we have a usable array of imports
|
||||
var importArray = typeof argv.import === "string" ? argv.import.split(",") : argv.import || [];
|
||||
|
||||
// Build an object of imports and paths
|
||||
var imports = {
|
||||
$protobuf: "protobufjs"
|
||||
};
|
||||
importArray.forEach(function(importItem) {
|
||||
imports[getImportName(importItem)] = importItem;
|
||||
});
|
||||
|
||||
// Write out the imports
|
||||
Object.keys(imports).forEach(function(key) {
|
||||
output.push("import * as " + key + " from \"" + imports[key] + "\";");
|
||||
});
|
||||
}
|
||||
|
||||
output = output.join("\n") + "\n" + out.join("");
|
||||
|
||||
try {
|
||||
if (argv.out)
|
||||
fs.writeFileSync(argv.out, output, { encoding: "utf8" });
|
||||
else if (!callback)
|
||||
process.stdout.write(output, "utf8");
|
||||
return callback
|
||||
? callback(null, output)
|
||||
: undefined;
|
||||
} catch (err) {
|
||||
if (callback)
|
||||
return callback(err);
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return undefined;
|
||||
};
|
||||
38
express-server/node_modules/google-gax/node_modules/protobufjs/cli/targets/json-module.js
generated
vendored
Normal file
38
express-server/node_modules/google-gax/node_modules/protobufjs/cli/targets/json-module.js
generated
vendored
Normal file
@@ -0,0 +1,38 @@
|
||||
"use strict";
|
||||
module.exports = json_module;
|
||||
|
||||
var util = require("../util");
|
||||
|
||||
var protobuf = require("../..");
|
||||
|
||||
json_module.description = "JSON representation as a module";
|
||||
|
||||
function jsonSafeProp(json) {
|
||||
return json.replace(/^( +)"(\w+)":/mg, function($0, $1, $2) {
|
||||
return protobuf.util.safeProp($2).charAt(0) === "."
|
||||
? $1 + $2 + ":"
|
||||
: $0;
|
||||
});
|
||||
}
|
||||
|
||||
function json_module(root, options, callback) {
|
||||
try {
|
||||
var rootProp = protobuf.util.safeProp(options.root || "default");
|
||||
var output = [
|
||||
(options.es6 ? "const" : "var") + " $root = ($protobuf.roots" + rootProp + " || ($protobuf.roots" + rootProp + " = new $protobuf.Root()))\n"
|
||||
];
|
||||
if (root.options) {
|
||||
var optionsJson = jsonSafeProp(JSON.stringify(root.options, null, 2));
|
||||
output.push(".setOptions(" + optionsJson + ")\n");
|
||||
}
|
||||
var json = jsonSafeProp(JSON.stringify(root.nested, null, 2).trim());
|
||||
output.push(".addJSON(" + json + ");");
|
||||
output = util.wrap(output.join(""), protobuf.util.merge({ dependency: "protobufjs/light" }, options));
|
||||
process.nextTick(function() {
|
||||
callback(null, output);
|
||||
});
|
||||
} catch (e) {
|
||||
return callback(e);
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
8
express-server/node_modules/google-gax/node_modules/protobufjs/cli/targets/json.js
generated
vendored
Normal file
8
express-server/node_modules/google-gax/node_modules/protobufjs/cli/targets/json.js
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
"use strict";
|
||||
module.exports = json_target;
|
||||
|
||||
json_target.description = "JSON representation";
|
||||
|
||||
function json_target(root, options, callback) {
|
||||
callback(null, JSON.stringify(root, null, 2));
|
||||
}
|
||||
326
express-server/node_modules/google-gax/node_modules/protobufjs/cli/targets/proto.js
generated
vendored
Normal file
326
express-server/node_modules/google-gax/node_modules/protobufjs/cli/targets/proto.js
generated
vendored
Normal file
@@ -0,0 +1,326 @@
|
||||
"use strict";
|
||||
module.exports = proto_target;
|
||||
|
||||
proto_target.private = true;
|
||||
|
||||
var protobuf = require("../..");
|
||||
|
||||
var Namespace = protobuf.Namespace,
|
||||
Enum = protobuf.Enum,
|
||||
Type = protobuf.Type,
|
||||
Field = protobuf.Field,
|
||||
OneOf = protobuf.OneOf,
|
||||
Service = protobuf.Service,
|
||||
Method = protobuf.Method,
|
||||
types = protobuf.types,
|
||||
util = protobuf.util;
|
||||
|
||||
function underScore(str) {
|
||||
return str.substring(0,1)
|
||||
+ str.substring(1)
|
||||
.replace(/([A-Z])(?=[a-z]|$)/g, function($0, $1) { return "_" + $1.toLowerCase(); });
|
||||
}
|
||||
|
||||
var out = [];
|
||||
var indent = 0;
|
||||
var first = false;
|
||||
var syntax = 3;
|
||||
|
||||
function proto_target(root, options, callback) {
|
||||
if (options) {
|
||||
switch (options.syntax) {
|
||||
case undefined:
|
||||
case "proto3":
|
||||
case "3":
|
||||
syntax = 3;
|
||||
break;
|
||||
case "proto2":
|
||||
case "2":
|
||||
syntax = 2;
|
||||
break;
|
||||
default:
|
||||
return callback(Error("invalid syntax: " + options.syntax));
|
||||
}
|
||||
}
|
||||
indent = 0;
|
||||
first = false;
|
||||
try {
|
||||
buildRoot(root);
|
||||
return callback(null, out.join("\n"));
|
||||
} catch (err) {
|
||||
return callback(err);
|
||||
} finally {
|
||||
out = [];
|
||||
syntax = 3;
|
||||
}
|
||||
}
|
||||
|
||||
function push(line) {
|
||||
if (line === "")
|
||||
out.push("");
|
||||
else {
|
||||
var ind = "";
|
||||
for (var i = 0; i < indent; ++i)
|
||||
ind += " ";
|
||||
out.push(ind + line);
|
||||
}
|
||||
}
|
||||
|
||||
function escape(str) {
|
||||
return str.replace(/[\\"']/g, "\\$&")
|
||||
.replace(/\r/g, "\\r")
|
||||
.replace(/\n/g, "\\n")
|
||||
.replace(/\u0000/g, "\\0"); // eslint-disable-line no-control-regex
|
||||
}
|
||||
|
||||
function value(v) {
|
||||
switch (typeof v) {
|
||||
case "boolean":
|
||||
return v ? "true" : "false";
|
||||
case "number":
|
||||
return v.toString();
|
||||
default:
|
||||
return "\"" + escape(String(v)) + "\"";
|
||||
}
|
||||
}
|
||||
|
||||
function buildRoot(root) {
|
||||
root.resolveAll();
|
||||
var pkg = [];
|
||||
var ptr = root;
|
||||
var repeat = true;
|
||||
do {
|
||||
var nested = ptr.nestedArray;
|
||||
if (nested.length === 1 && nested[0] instanceof Namespace && !(nested[0] instanceof Type || nested[0] instanceof Service)) {
|
||||
ptr = nested[0];
|
||||
if (ptr !== root)
|
||||
pkg.push(ptr.name);
|
||||
} else
|
||||
repeat = false;
|
||||
} while (repeat);
|
||||
out.push("syntax = \"proto" + syntax + "\";");
|
||||
if (pkg.length)
|
||||
out.push("", "package " + pkg.join(".") + ";");
|
||||
|
||||
buildOptions(ptr);
|
||||
ptr.nestedArray.forEach(build);
|
||||
}
|
||||
|
||||
function build(object) {
|
||||
if (object instanceof Enum)
|
||||
buildEnum(object);
|
||||
else if (object instanceof Type)
|
||||
buildType(object);
|
||||
else if (object instanceof Field)
|
||||
buildField(object);
|
||||
else if (object instanceof OneOf)
|
||||
buildOneOf(object);
|
||||
else if (object instanceof Service)
|
||||
buildService(object);
|
||||
else if (object instanceof Method)
|
||||
buildMethod(object);
|
||||
else
|
||||
buildNamespace(object);
|
||||
}
|
||||
|
||||
function buildNamespace(namespace) { // just a namespace, not a type etc.
|
||||
push("");
|
||||
push("message " + namespace.name + " {");
|
||||
++indent;
|
||||
buildOptions(namespace);
|
||||
consolidateExtends(namespace.nestedArray).remaining.forEach(build);
|
||||
--indent;
|
||||
push("}");
|
||||
}
|
||||
|
||||
function buildEnum(enm) {
|
||||
push("");
|
||||
push("enum " + enm.name + " {");
|
||||
buildOptions(enm);
|
||||
++indent; first = true;
|
||||
Object.keys(enm.values).forEach(function(name) {
|
||||
var val = enm.values[name];
|
||||
if (first) {
|
||||
push("");
|
||||
first = false;
|
||||
}
|
||||
push(name + " = " + val + ";");
|
||||
});
|
||||
--indent; first = false;
|
||||
push("}");
|
||||
}
|
||||
|
||||
function buildRanges(keyword, ranges) {
|
||||
if (ranges && ranges.length) {
|
||||
var parts = [];
|
||||
ranges.forEach(function(range) {
|
||||
if (typeof range === "string")
|
||||
parts.push("\"" + escape(range) + "\"");
|
||||
else if (range[0] === range[1])
|
||||
parts.push(range[0]);
|
||||
else
|
||||
parts.push(range[0] + " to " + (range[1] === 0x1FFFFFFF ? "max" : range[1]));
|
||||
});
|
||||
push("");
|
||||
push(keyword + " " + parts.join(", ") + ";");
|
||||
}
|
||||
}
|
||||
|
||||
function buildType(type) {
|
||||
if (type.group)
|
||||
return; // built with the sister-field
|
||||
push("");
|
||||
push("message " + type.name + " {");
|
||||
++indent;
|
||||
buildOptions(type);
|
||||
type.oneofsArray.forEach(build);
|
||||
first = true;
|
||||
type.fieldsArray.forEach(build);
|
||||
consolidateExtends(type.nestedArray).remaining.forEach(build);
|
||||
buildRanges("extensions", type.extensions);
|
||||
buildRanges("reserved", type.reserved);
|
||||
--indent;
|
||||
push("}");
|
||||
}
|
||||
|
||||
function buildField(field, passExtend) {
|
||||
if (field.partOf || field.declaringField || field.extend !== undefined && !passExtend)
|
||||
return;
|
||||
if (first) {
|
||||
first = false;
|
||||
push("");
|
||||
}
|
||||
if (field.resolvedType && field.resolvedType.group) {
|
||||
buildGroup(field);
|
||||
return;
|
||||
}
|
||||
var sb = [];
|
||||
if (field.map)
|
||||
sb.push("map<" + field.keyType + ", " + field.type + ">");
|
||||
else if (field.repeated)
|
||||
sb.push("repeated", field.type);
|
||||
else if (syntax === 2 || field.parent.group)
|
||||
sb.push(field.required ? "required" : "optional", field.type);
|
||||
else
|
||||
sb.push(field.type);
|
||||
sb.push(underScore(field.name), "=", field.id);
|
||||
var opts = buildFieldOptions(field);
|
||||
if (opts)
|
||||
sb.push(opts);
|
||||
push(sb.join(" ") + ";");
|
||||
}
|
||||
|
||||
function buildGroup(field) {
|
||||
push(field.rule + " group " + field.resolvedType.name + " = " + field.id + " {");
|
||||
++indent;
|
||||
buildOptions(field.resolvedType);
|
||||
first = true;
|
||||
field.resolvedType.fieldsArray.forEach(function(field) {
|
||||
buildField(field);
|
||||
});
|
||||
--indent;
|
||||
push("}");
|
||||
}
|
||||
|
||||
function buildFieldOptions(field) {
|
||||
var keys;
|
||||
if (!field.options || !(keys = Object.keys(field.options)).length)
|
||||
return null;
|
||||
var sb = [];
|
||||
keys.forEach(function(key) {
|
||||
var val = field.options[key];
|
||||
var wireType = types.packed[field.resolvedType instanceof Enum ? "int32" : field.type];
|
||||
switch (key) {
|
||||
case "packed":
|
||||
val = Boolean(val);
|
||||
// skip when not packable or syntax default
|
||||
if (wireType === undefined || syntax === 3 === val)
|
||||
return;
|
||||
break;
|
||||
case "default":
|
||||
if (syntax === 3)
|
||||
return;
|
||||
// skip default (resolved) default values
|
||||
if (field.long && !util.longNeq(field.defaultValue, types.defaults[field.type]) || !field.long && field.defaultValue === types.defaults[field.type])
|
||||
return;
|
||||
// enum defaults specified as strings are type references and not enclosed in quotes
|
||||
if (field.resolvedType instanceof Enum)
|
||||
break;
|
||||
// otherwise fallthrough
|
||||
default:
|
||||
val = value(val);
|
||||
break;
|
||||
}
|
||||
sb.push(key + "=" + val);
|
||||
});
|
||||
return sb.length
|
||||
? "[" + sb.join(", ") + "]"
|
||||
: null;
|
||||
}
|
||||
|
||||
function consolidateExtends(nested) {
|
||||
var ext = {};
|
||||
nested = nested.filter(function(obj) {
|
||||
if (!(obj instanceof Field) || obj.extend === undefined)
|
||||
return true;
|
||||
(ext[obj.extend] || (ext[obj.extend] = [])).push(obj);
|
||||
return false;
|
||||
});
|
||||
Object.keys(ext).forEach(function(extend) {
|
||||
push("");
|
||||
push("extend " + extend + " {");
|
||||
++indent; first = true;
|
||||
ext[extend].forEach(function(field) {
|
||||
buildField(field, true);
|
||||
});
|
||||
--indent;
|
||||
push("}");
|
||||
});
|
||||
return {
|
||||
remaining: nested
|
||||
};
|
||||
}
|
||||
|
||||
function buildOneOf(oneof) {
|
||||
push("");
|
||||
push("oneof " + underScore(oneof.name) + " {");
|
||||
++indent; first = true;
|
||||
oneof.oneof.forEach(function(fieldName) {
|
||||
var field = oneof.parent.get(fieldName);
|
||||
if (first) {
|
||||
first = false;
|
||||
push("");
|
||||
}
|
||||
var opts = buildFieldOptions(field);
|
||||
push(field.type + " " + underScore(field.name) + " = " + field.id + (opts ? " " + opts : "") + ";");
|
||||
});
|
||||
--indent;
|
||||
push("}");
|
||||
}
|
||||
|
||||
function buildService(service) {
|
||||
push("service " + service.name + " {");
|
||||
++indent;
|
||||
service.methodsArray.forEach(build);
|
||||
consolidateExtends(service.nestedArray).remaining.forEach(build);
|
||||
--indent;
|
||||
push("}");
|
||||
}
|
||||
|
||||
function buildMethod(method) {
|
||||
push(method.type + " " + method.name + " (" + (method.requestStream ? "stream " : "") + method.requestType + ") returns (" + (method.responseStream ? "stream " : "") + method.responseType + ");");
|
||||
}
|
||||
|
||||
function buildOptions(object) {
|
||||
if (!object.options)
|
||||
return;
|
||||
first = true;
|
||||
Object.keys(object.options).forEach(function(key) {
|
||||
if (first) {
|
||||
first = false;
|
||||
push("");
|
||||
}
|
||||
var val = object.options[key];
|
||||
push("option " + key + " = " + JSON.stringify(val) + ";");
|
||||
});
|
||||
}
|
||||
10
express-server/node_modules/google-gax/node_modules/protobufjs/cli/targets/proto2.js
generated
vendored
Normal file
10
express-server/node_modules/google-gax/node_modules/protobufjs/cli/targets/proto2.js
generated
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
"use strict";
|
||||
module.exports = proto2_target;
|
||||
|
||||
var protobuf = require("../..");
|
||||
|
||||
proto2_target.description = "Protocol Buffers, Version 2";
|
||||
|
||||
function proto2_target(root, options, callback) {
|
||||
require("./proto")(root, protobuf.util.merge(options, { syntax: "proto2" }), callback);
|
||||
}
|
||||
10
express-server/node_modules/google-gax/node_modules/protobufjs/cli/targets/proto3.js
generated
vendored
Normal file
10
express-server/node_modules/google-gax/node_modules/protobufjs/cli/targets/proto3.js
generated
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
"use strict";
|
||||
module.exports = proto3_target;
|
||||
|
||||
var protobuf = require("../..");
|
||||
|
||||
proto3_target.description = "Protocol Buffers, Version 3";
|
||||
|
||||
function proto3_target(root, options, callback) {
|
||||
require("./proto")(root, protobuf.util.merge(options, { syntax: "proto3" }), callback);
|
||||
}
|
||||
29
express-server/node_modules/google-gax/node_modules/protobufjs/cli/targets/static-module.js
generated
vendored
Normal file
29
express-server/node_modules/google-gax/node_modules/protobufjs/cli/targets/static-module.js
generated
vendored
Normal file
@@ -0,0 +1,29 @@
|
||||
"use strict";
|
||||
module.exports = static_module_target;
|
||||
|
||||
// - The default wrapper supports AMD, CommonJS and the global scope (as window.root), in this order.
|
||||
// - You can specify a custom wrapper with the --wrap argument.
|
||||
// - CommonJS modules depend on the minimal build for reduced package size with browserify.
|
||||
// - AMD and global scope depend on the full library for now.
|
||||
|
||||
var util = require("../util");
|
||||
|
||||
var protobuf = require("../..");
|
||||
|
||||
static_module_target.description = "Static code without reflection as a module";
|
||||
|
||||
function static_module_target(root, options, callback) {
|
||||
require("./static")(root, options, function(err, output) {
|
||||
if (err) {
|
||||
callback(err);
|
||||
return;
|
||||
}
|
||||
try {
|
||||
output = util.wrap(output, protobuf.util.merge({ dependency: "protobufjs/minimal" }, options));
|
||||
} catch (e) {
|
||||
callback(e);
|
||||
return;
|
||||
}
|
||||
callback(null, output);
|
||||
});
|
||||
}
|
||||
702
express-server/node_modules/google-gax/node_modules/protobufjs/cli/targets/static.js
generated
vendored
Normal file
702
express-server/node_modules/google-gax/node_modules/protobufjs/cli/targets/static.js
generated
vendored
Normal file
@@ -0,0 +1,702 @@
|
||||
"use strict";
|
||||
module.exports = static_target;
|
||||
|
||||
var protobuf = require("../.."),
|
||||
UglifyJS = require("uglify-js"),
|
||||
espree = require("espree"),
|
||||
escodegen = require("escodegen"),
|
||||
estraverse = require("estraverse");
|
||||
|
||||
var Type = protobuf.Type,
|
||||
Service = protobuf.Service,
|
||||
Enum = protobuf.Enum,
|
||||
Namespace = protobuf.Namespace,
|
||||
util = protobuf.util;
|
||||
|
||||
var out = [];
|
||||
var indent = 0;
|
||||
var config = {};
|
||||
|
||||
static_target.description = "Static code without reflection (non-functional on its own)";
|
||||
|
||||
function static_target(root, options, callback) {
|
||||
config = options;
|
||||
try {
|
||||
var aliases = [];
|
||||
if (config.decode)
|
||||
aliases.push("Reader");
|
||||
if (config.encode)
|
||||
aliases.push("Writer");
|
||||
aliases.push("util");
|
||||
if (aliases.length) {
|
||||
if (config.comments)
|
||||
push("// Common aliases");
|
||||
push((config.es6 ? "const " : "var ") + aliases.map(function(name) { return "$" + name + " = $protobuf." + name; }).join(", ") + ";");
|
||||
push("");
|
||||
}
|
||||
if (config.comments) {
|
||||
if (root.comment) {
|
||||
pushComment("@fileoverview " + root.comment);
|
||||
push("");
|
||||
}
|
||||
push("// Exported root namespace");
|
||||
}
|
||||
var rootProp = util.safeProp(config.root || "default");
|
||||
push((config.es6 ? "const" : "var") + " $root = $protobuf.roots" + rootProp + " || ($protobuf.roots" + rootProp + " = {});");
|
||||
buildNamespace(null, root);
|
||||
return callback(null, out.join("\n"));
|
||||
} catch (err) {
|
||||
return callback(err);
|
||||
} finally {
|
||||
out = [];
|
||||
indent = 0;
|
||||
config = {};
|
||||
}
|
||||
}
|
||||
|
||||
function push(line) {
|
||||
if (line === "")
|
||||
return out.push("");
|
||||
var ind = "";
|
||||
for (var i = 0; i < indent; ++i)
|
||||
ind += " ";
|
||||
return out.push(ind + line);
|
||||
}
|
||||
|
||||
function pushComment(lines) {
|
||||
if (!config.comments)
|
||||
return;
|
||||
var split = [];
|
||||
for (var i = 0; i < lines.length; ++i)
|
||||
if (lines[i] != null && lines[i].substring(0, 8) !== "@exclude")
|
||||
Array.prototype.push.apply(split, lines[i].split(/\r?\n/g));
|
||||
push("/**");
|
||||
split.forEach(function(line) {
|
||||
if (line === null)
|
||||
return;
|
||||
push(" * " + line.replace(/\*\//g, "* /"));
|
||||
});
|
||||
push(" */");
|
||||
}
|
||||
|
||||
function exportName(object, asInterface) {
|
||||
if (asInterface) {
|
||||
if (object.__interfaceName)
|
||||
return object.__interfaceName;
|
||||
} else if (object.__exportName)
|
||||
return object.__exportName;
|
||||
var parts = object.fullName.substring(1).split("."),
|
||||
i = 0;
|
||||
while (i < parts.length)
|
||||
parts[i] = escapeName(parts[i++]);
|
||||
if (asInterface)
|
||||
parts[i - 1] = "I" + parts[i - 1];
|
||||
return object[asInterface ? "__interfaceName" : "__exportName"] = parts.join(".");
|
||||
}
|
||||
|
||||
function escapeName(name) {
|
||||
if (!name)
|
||||
return "$root";
|
||||
return util.isReserved(name) ? name + "_" : name;
|
||||
}
|
||||
|
||||
function aOrAn(name) {
|
||||
return ((/^[hH](?:ou|on|ei)/.test(name) || /^[aeiouAEIOU][a-z]/.test(name)) && !/^us/i.test(name)
|
||||
? "an "
|
||||
: "a ") + name;
|
||||
}
|
||||
|
||||
function buildNamespace(ref, ns) {
|
||||
if (!ns)
|
||||
return;
|
||||
if (ns.name !== "") {
|
||||
push("");
|
||||
if (!ref && config.es6)
|
||||
push("export const " + escapeName(ns.name) + " = " + escapeName(ref) + "." + escapeName(ns.name) + " = (() => {");
|
||||
else
|
||||
push(escapeName(ref) + "." + escapeName(ns.name) + " = (function() {");
|
||||
++indent;
|
||||
}
|
||||
|
||||
if (ns instanceof Type) {
|
||||
buildType(undefined, ns);
|
||||
} else if (ns instanceof Service)
|
||||
buildService(undefined, ns);
|
||||
else if (ns.name !== "") {
|
||||
push("");
|
||||
pushComment([
|
||||
ns.comment || "Namespace " + ns.name + ".",
|
||||
ns.parent instanceof protobuf.Root ? "@exports " + escapeName(ns.name) : "@memberof " + exportName(ns.parent),
|
||||
"@namespace"
|
||||
]);
|
||||
push((config.es6 ? "const" : "var") + " " + escapeName(ns.name) + " = {};");
|
||||
}
|
||||
|
||||
ns.nestedArray.forEach(function(nested) {
|
||||
if (nested instanceof Enum)
|
||||
buildEnum(ns.name, nested);
|
||||
else if (nested instanceof Namespace)
|
||||
buildNamespace(ns.name, nested);
|
||||
});
|
||||
if (ns.name !== "") {
|
||||
push("");
|
||||
push("return " + escapeName(ns.name) + ";");
|
||||
--indent;
|
||||
push("})();");
|
||||
}
|
||||
}
|
||||
|
||||
var reduceableBlockStatements = {
|
||||
IfStatement: true,
|
||||
ForStatement: true,
|
||||
WhileStatement: true
|
||||
};
|
||||
|
||||
var shortVars = {
|
||||
"r": "reader",
|
||||
"w": "writer",
|
||||
"m": "message",
|
||||
"t": "tag",
|
||||
"l": "length",
|
||||
"c": "end", "c2": "end2",
|
||||
"k": "key",
|
||||
"ks": "keys", "ks2": "keys2",
|
||||
"e": "error",
|
||||
"f": "impl",
|
||||
"o": "options",
|
||||
"d": "object",
|
||||
"n": "long",
|
||||
"p": "properties"
|
||||
};
|
||||
|
||||
function beautifyCode(code) {
|
||||
// Add semicolons
|
||||
code = UglifyJS.minify(code, {
|
||||
compress: false,
|
||||
mangle: false,
|
||||
output: { beautify: true }
|
||||
}).code;
|
||||
// Properly beautify
|
||||
var ast = espree.parse(code);
|
||||
estraverse.replace(ast, {
|
||||
enter: function(node, parent) {
|
||||
// rename short vars
|
||||
if (node.type === "Identifier" && (parent.property !== node || parent.computed) && shortVars[node.name])
|
||||
return {
|
||||
"type": "Identifier",
|
||||
"name": shortVars[node.name]
|
||||
};
|
||||
// replace var with let if es6
|
||||
if (config.es6 && node.type === "VariableDeclaration" && node.kind === "var") {
|
||||
node.kind = "let";
|
||||
return undefined;
|
||||
}
|
||||
// remove braces around block statements with a single child
|
||||
if (node.type === "BlockStatement" && reduceableBlockStatements[parent.type] && node.body.length === 1)
|
||||
return node.body[0];
|
||||
return undefined;
|
||||
}
|
||||
});
|
||||
code = escodegen.generate(ast, {
|
||||
format: {
|
||||
newline: "\n",
|
||||
quotes: "double"
|
||||
}
|
||||
});
|
||||
// Add id, wireType comments
|
||||
if (config.comments)
|
||||
code = code.replace(/\.uint32\((\d+)\)/g, function($0, $1) {
|
||||
var id = $1 >>> 3,
|
||||
wireType = $1 & 7;
|
||||
return ".uint32(/* id " + id + ", wireType " + wireType + " =*/" + $1 + ")";
|
||||
});
|
||||
return code;
|
||||
}
|
||||
|
||||
var renameVars = {
|
||||
"Writer": "$Writer",
|
||||
"Reader": "$Reader",
|
||||
"util": "$util"
|
||||
};
|
||||
|
||||
function buildFunction(type, functionName, gen, scope) {
|
||||
var code = gen.toString(functionName)
|
||||
.replace(/((?!\.)types\[\d+])(\.values)/g, "$1"); // enums: use types[N] instead of reflected types[N].values
|
||||
|
||||
var ast = espree.parse(code);
|
||||
/* eslint-disable no-extra-parens */
|
||||
estraverse.replace(ast, {
|
||||
enter: function(node, parent) {
|
||||
// rename vars
|
||||
if (
|
||||
node.type === "Identifier" && renameVars[node.name]
|
||||
&& (
|
||||
(parent.type === "MemberExpression" && parent.object === node)
|
||||
|| (parent.type === "BinaryExpression" && parent.right === node)
|
||||
)
|
||||
)
|
||||
return {
|
||||
"type": "Identifier",
|
||||
"name": renameVars[node.name]
|
||||
};
|
||||
// replace this.ctor with the actual ctor
|
||||
if (
|
||||
node.type === "MemberExpression"
|
||||
&& node.object.type === "ThisExpression"
|
||||
&& node.property.type === "Identifier" && node.property.name === "ctor"
|
||||
)
|
||||
return {
|
||||
"type": "Identifier",
|
||||
"name": "$root" + type.fullName
|
||||
};
|
||||
// replace types[N] with the field's actual type
|
||||
if (
|
||||
node.type === "MemberExpression"
|
||||
&& node.object.type === "Identifier" && node.object.name === "types"
|
||||
&& node.property.type === "Literal"
|
||||
)
|
||||
return {
|
||||
"type": "Identifier",
|
||||
"name": "$root" + type.fieldsArray[node.property.value].resolvedType.fullName
|
||||
};
|
||||
return undefined;
|
||||
}
|
||||
});
|
||||
/* eslint-enable no-extra-parens */
|
||||
code = escodegen.generate(ast, {
|
||||
format: {
|
||||
newline: "\n",
|
||||
quotes: "double"
|
||||
}
|
||||
});
|
||||
|
||||
if (config.beautify)
|
||||
code = beautifyCode(code);
|
||||
|
||||
code = code.replace(/ {4}/g, "\t");
|
||||
|
||||
var hasScope = scope && Object.keys(scope).length,
|
||||
isCtor = functionName === type.name;
|
||||
|
||||
if (hasScope) // remove unused scope vars
|
||||
Object.keys(scope).forEach(function(key) {
|
||||
if (!new RegExp("\\b(" + key + ")\\b", "g").test(code))
|
||||
delete scope[key];
|
||||
});
|
||||
|
||||
var lines = code.split(/\n/g);
|
||||
if (isCtor) // constructor
|
||||
push(lines[0]);
|
||||
else if (hasScope) // enclose in an iife
|
||||
push(escapeName(type.name) + "." + escapeName(functionName) + " = (function(" + Object.keys(scope).map(escapeName).join(", ") + ") { return " + lines[0]);
|
||||
else
|
||||
push(escapeName(type.name) + "." + escapeName(functionName) + " = " + lines[0]);
|
||||
lines.slice(1, lines.length - 1).forEach(function(line) {
|
||||
var prev = indent;
|
||||
var i = 0;
|
||||
while (line.charAt(i++) === "\t")
|
||||
++indent;
|
||||
push(line.trim());
|
||||
indent = prev;
|
||||
});
|
||||
if (isCtor)
|
||||
push("}");
|
||||
else if (hasScope)
|
||||
push("};})(" + Object.keys(scope).map(function(key) { return scope[key]; }).join(", ") + ");");
|
||||
else
|
||||
push("};");
|
||||
}
|
||||
|
||||
function toJsType(field) {
|
||||
var type;
|
||||
|
||||
switch (field.type) {
|
||||
case "double":
|
||||
case "float":
|
||||
case "int32":
|
||||
case "uint32":
|
||||
case "sint32":
|
||||
case "fixed32":
|
||||
case "sfixed32":
|
||||
type = "number";
|
||||
break;
|
||||
case "int64":
|
||||
case "uint64":
|
||||
case "sint64":
|
||||
case "fixed64":
|
||||
case "sfixed64":
|
||||
type = config.forceLong ? "Long" : config.forceNumber ? "number" : "number|Long";
|
||||
break;
|
||||
case "bool":
|
||||
type = "boolean";
|
||||
break;
|
||||
case "string":
|
||||
type = "string";
|
||||
break;
|
||||
case "bytes":
|
||||
type = "Uint8Array";
|
||||
break;
|
||||
default:
|
||||
if (field.resolve().resolvedType)
|
||||
type = exportName(field.resolvedType, !(field.resolvedType instanceof protobuf.Enum || config.forceMessage));
|
||||
else
|
||||
type = "*"; // should not happen
|
||||
break;
|
||||
}
|
||||
if (field.map)
|
||||
return "Object.<string," + type + ">";
|
||||
if (field.repeated)
|
||||
return "Array.<" + type + ">";
|
||||
return type;
|
||||
}
|
||||
|
||||
function buildType(ref, type) {
|
||||
|
||||
if (config.comments) {
|
||||
var typeDef = [
|
||||
"Properties of " + aOrAn(type.name) + ".",
|
||||
type.parent instanceof protobuf.Root ? "@exports " + escapeName("I" + type.name) : "@memberof " + exportName(type.parent),
|
||||
"@interface " + escapeName("I" + type.name)
|
||||
];
|
||||
type.fieldsArray.forEach(function(field) {
|
||||
var prop = util.safeProp(field.name); // either .name or ["name"]
|
||||
prop = prop.substring(1, prop.charAt(0) === "[" ? prop.length - 1 : prop.length);
|
||||
var jsType = toJsType(field);
|
||||
if (field.optional)
|
||||
jsType = jsType + "|null";
|
||||
typeDef.push("@property {" + jsType + "} " + (field.optional ? "[" + prop + "]" : prop) + " " + (field.comment || type.name + " " + field.name));
|
||||
});
|
||||
push("");
|
||||
pushComment(typeDef);
|
||||
}
|
||||
|
||||
// constructor
|
||||
push("");
|
||||
pushComment([
|
||||
"Constructs a new " + type.name + ".",
|
||||
type.parent instanceof protobuf.Root ? "@exports " + escapeName(type.name) : "@memberof " + exportName(type.parent),
|
||||
"@classdesc " + (type.comment || "Represents " + aOrAn(type.name) + "."),
|
||||
config.comments ? "@implements " + escapeName("I" + type.name) : null,
|
||||
"@constructor",
|
||||
"@param {" + exportName(type, true) + "=} [" + (config.beautify ? "properties" : "p") + "] Properties to set"
|
||||
]);
|
||||
buildFunction(type, type.name, Type.generateConstructor(type));
|
||||
|
||||
// default values
|
||||
var firstField = true;
|
||||
type.fieldsArray.forEach(function(field) {
|
||||
field.resolve();
|
||||
var prop = util.safeProp(field.name);
|
||||
if (config.comments) {
|
||||
push("");
|
||||
var jsType = toJsType(field);
|
||||
if (field.optional && !field.map && !field.repeated && field.resolvedType instanceof Type)
|
||||
jsType = jsType + "|null|undefined";
|
||||
pushComment([
|
||||
field.comment || type.name + " " + field.name + ".",
|
||||
"@member {" + jsType + "} " + field.name,
|
||||
"@memberof " + exportName(type),
|
||||
"@instance"
|
||||
]);
|
||||
} else if (firstField) {
|
||||
push("");
|
||||
firstField = false;
|
||||
}
|
||||
if (field.repeated)
|
||||
push(escapeName(type.name) + ".prototype" + prop + " = $util.emptyArray;"); // overwritten in constructor
|
||||
else if (field.map)
|
||||
push(escapeName(type.name) + ".prototype" + prop + " = $util.emptyObject;"); // overwritten in constructor
|
||||
else if (field.long)
|
||||
push(escapeName(type.name) + ".prototype" + prop + " = $util.Long ? $util.Long.fromBits("
|
||||
+ JSON.stringify(field.typeDefault.low) + ","
|
||||
+ JSON.stringify(field.typeDefault.high) + ","
|
||||
+ JSON.stringify(field.typeDefault.unsigned)
|
||||
+ ") : " + field.typeDefault.toNumber(field.type.charAt(0) === "u") + ";");
|
||||
else if (field.bytes) {
|
||||
push(escapeName(type.name) + ".prototype" + prop + " = $util.newBuffer(" + JSON.stringify(Array.prototype.slice.call(field.typeDefault)) + ");");
|
||||
} else
|
||||
push(escapeName(type.name) + ".prototype" + prop + " = " + JSON.stringify(field.typeDefault) + ";");
|
||||
});
|
||||
|
||||
// virtual oneof fields
|
||||
var firstOneOf = true;
|
||||
type.oneofsArray.forEach(function(oneof) {
|
||||
if (firstOneOf) {
|
||||
firstOneOf = false;
|
||||
push("");
|
||||
if (config.comments)
|
||||
push("// OneOf field names bound to virtual getters and setters");
|
||||
push((config.es6 ? "let" : "var") + " $oneOfFields;");
|
||||
}
|
||||
oneof.resolve();
|
||||
push("");
|
||||
pushComment([
|
||||
oneof.comment || type.name + " " + oneof.name + ".",
|
||||
"@member {" + oneof.oneof.map(JSON.stringify).join("|") + "|undefined} " + escapeName(oneof.name),
|
||||
"@memberof " + exportName(type),
|
||||
"@instance"
|
||||
]);
|
||||
push("Object.defineProperty(" + escapeName(type.name) + ".prototype, " + JSON.stringify(oneof.name) +", {");
|
||||
++indent;
|
||||
push("get: $util.oneOfGetter($oneOfFields = [" + oneof.oneof.map(JSON.stringify).join(", ") + "]),");
|
||||
push("set: $util.oneOfSetter($oneOfFields)");
|
||||
--indent;
|
||||
push("});");
|
||||
});
|
||||
|
||||
if (config.create) {
|
||||
push("");
|
||||
pushComment([
|
||||
"Creates a new " + type.name + " instance using the specified properties.",
|
||||
"@function create",
|
||||
"@memberof " + exportName(type),
|
||||
"@static",
|
||||
"@param {" + exportName(type, true) + "=} [properties] Properties to set",
|
||||
"@returns {" + exportName(type) + "} " + type.name + " instance"
|
||||
]);
|
||||
push(escapeName(type.name) + ".create = function create(properties) {");
|
||||
++indent;
|
||||
push("return new " + escapeName(type.name) + "(properties);");
|
||||
--indent;
|
||||
push("};");
|
||||
}
|
||||
|
||||
if (config.encode) {
|
||||
push("");
|
||||
pushComment([
|
||||
"Encodes the specified " + type.name + " message. Does not implicitly {@link " + exportName(type) + ".verify|verify} messages.",
|
||||
"@function encode",
|
||||
"@memberof " + exportName(type),
|
||||
"@static",
|
||||
"@param {" + exportName(type, !config.forceMessage) + "} " + (config.beautify ? "message" : "m") + " " + type.name + " message or plain object to encode",
|
||||
"@param {$protobuf.Writer} [" + (config.beautify ? "writer" : "w") + "] Writer to encode to",
|
||||
"@returns {$protobuf.Writer} Writer"
|
||||
]);
|
||||
buildFunction(type, "encode", protobuf.encoder(type));
|
||||
|
||||
if (config.delimited) {
|
||||
push("");
|
||||
pushComment([
|
||||
"Encodes the specified " + type.name + " message, length delimited. Does not implicitly {@link " + exportName(type) + ".verify|verify} messages.",
|
||||
"@function encodeDelimited",
|
||||
"@memberof " + exportName(type),
|
||||
"@static",
|
||||
"@param {" + exportName(type, !config.forceMessage) + "} message " + type.name + " message or plain object to encode",
|
||||
"@param {$protobuf.Writer} [writer] Writer to encode to",
|
||||
"@returns {$protobuf.Writer} Writer"
|
||||
]);
|
||||
push(escapeName(type.name) + ".encodeDelimited = function encodeDelimited(message, writer) {");
|
||||
++indent;
|
||||
push("return this.encode(message, writer).ldelim();");
|
||||
--indent;
|
||||
push("};");
|
||||
}
|
||||
}
|
||||
|
||||
if (config.decode) {
|
||||
push("");
|
||||
pushComment([
|
||||
"Decodes " + aOrAn(type.name) + " message from the specified reader or buffer.",
|
||||
"@function decode",
|
||||
"@memberof " + exportName(type),
|
||||
"@static",
|
||||
"@param {$protobuf.Reader|Uint8Array} " + (config.beautify ? "reader" : "r") + " Reader or buffer to decode from",
|
||||
"@param {number} [" + (config.beautify ? "length" : "l") + "] Message length if known beforehand",
|
||||
"@returns {" + exportName(type) + "} " + type.name,
|
||||
"@throws {Error} If the payload is not a reader or valid buffer",
|
||||
"@throws {$protobuf.util.ProtocolError} If required fields are missing"
|
||||
]);
|
||||
buildFunction(type, "decode", protobuf.decoder(type));
|
||||
|
||||
if (config.delimited) {
|
||||
push("");
|
||||
pushComment([
|
||||
"Decodes " + aOrAn(type.name) + " message from the specified reader or buffer, length delimited.",
|
||||
"@function decodeDelimited",
|
||||
"@memberof " + exportName(type),
|
||||
"@static",
|
||||
"@param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from",
|
||||
"@returns {" + exportName(type) + "} " + type.name,
|
||||
"@throws {Error} If the payload is not a reader or valid buffer",
|
||||
"@throws {$protobuf.util.ProtocolError} If required fields are missing"
|
||||
]);
|
||||
push(escapeName(type.name) + ".decodeDelimited = function decodeDelimited(reader) {");
|
||||
++indent;
|
||||
push("if (!(reader instanceof $Reader))");
|
||||
++indent;
|
||||
push("reader = new $Reader(reader);");
|
||||
--indent;
|
||||
push("return this.decode(reader, reader.uint32());");
|
||||
--indent;
|
||||
push("};");
|
||||
}
|
||||
}
|
||||
|
||||
if (config.verify) {
|
||||
push("");
|
||||
pushComment([
|
||||
"Verifies " + aOrAn(type.name) + " message.",
|
||||
"@function verify",
|
||||
"@memberof " + exportName(type),
|
||||
"@static",
|
||||
"@param {Object.<string,*>} " + (config.beautify ? "message" : "m") + " Plain object to verify",
|
||||
"@returns {string|null} `null` if valid, otherwise the reason why it is not"
|
||||
]);
|
||||
buildFunction(type, "verify", protobuf.verifier(type));
|
||||
}
|
||||
|
||||
if (config.convert) {
|
||||
push("");
|
||||
pushComment([
|
||||
"Creates " + aOrAn(type.name) + " message from a plain object. Also converts values to their respective internal types.",
|
||||
"@function fromObject",
|
||||
"@memberof " + exportName(type),
|
||||
"@static",
|
||||
"@param {Object.<string,*>} " + (config.beautify ? "object" : "d") + " Plain object",
|
||||
"@returns {" + exportName(type) + "} " + type.name
|
||||
]);
|
||||
buildFunction(type, "fromObject", protobuf.converter.fromObject(type));
|
||||
|
||||
push("");
|
||||
pushComment([
|
||||
"Creates a plain object from " + aOrAn(type.name) + " message. Also converts values to other types if specified.",
|
||||
"@function toObject",
|
||||
"@memberof " + exportName(type),
|
||||
"@static",
|
||||
"@param {" + exportName(type) + "} " + (config.beautify ? "message" : "m") + " " + type.name,
|
||||
"@param {$protobuf.IConversionOptions} [" + (config.beautify ? "options" : "o") + "] Conversion options",
|
||||
"@returns {Object.<string,*>} Plain object"
|
||||
]);
|
||||
buildFunction(type, "toObject", protobuf.converter.toObject(type));
|
||||
|
||||
push("");
|
||||
pushComment([
|
||||
"Converts this " + type.name + " to JSON.",
|
||||
"@function toJSON",
|
||||
"@memberof " + exportName(type),
|
||||
"@instance",
|
||||
"@returns {Object.<string,*>} JSON object"
|
||||
]);
|
||||
push(escapeName(type.name) + ".prototype.toJSON = function toJSON() {");
|
||||
++indent;
|
||||
push("return this.constructor.toObject(this, $protobuf.util.toJSONOptions);");
|
||||
--indent;
|
||||
push("};");
|
||||
}
|
||||
}
|
||||
|
||||
function buildService(ref, service) {
|
||||
|
||||
push("");
|
||||
pushComment([
|
||||
"Constructs a new " + service.name + " service.",
|
||||
service.parent instanceof protobuf.Root ? "@exports " + escapeName(service.name) : "@memberof " + exportName(service.parent),
|
||||
"@classdesc " + (service.comment || "Represents " + aOrAn(service.name)),
|
||||
"@extends $protobuf.rpc.Service",
|
||||
"@constructor",
|
||||
"@param {$protobuf.RPCImpl} rpcImpl RPC implementation",
|
||||
"@param {boolean} [requestDelimited=false] Whether requests are length-delimited",
|
||||
"@param {boolean} [responseDelimited=false] Whether responses are length-delimited"
|
||||
]);
|
||||
push("function " + escapeName(service.name) + "(rpcImpl, requestDelimited, responseDelimited) {");
|
||||
++indent;
|
||||
push("$protobuf.rpc.Service.call(this, rpcImpl, requestDelimited, responseDelimited);");
|
||||
--indent;
|
||||
push("}");
|
||||
push("");
|
||||
push("(" + escapeName(service.name) + ".prototype = Object.create($protobuf.rpc.Service.prototype)).constructor = " + escapeName(service.name) + ";");
|
||||
|
||||
if (config.create) {
|
||||
push("");
|
||||
pushComment([
|
||||
"Creates new " + service.name + " service using the specified rpc implementation.",
|
||||
"@function create",
|
||||
"@memberof " + exportName(service),
|
||||
"@static",
|
||||
"@param {$protobuf.RPCImpl} rpcImpl RPC implementation",
|
||||
"@param {boolean} [requestDelimited=false] Whether requests are length-delimited",
|
||||
"@param {boolean} [responseDelimited=false] Whether responses are length-delimited",
|
||||
"@returns {" + escapeName(service.name) + "} RPC service. Useful where requests and/or responses are streamed."
|
||||
]);
|
||||
push(escapeName(service.name) + ".create = function create(rpcImpl, requestDelimited, responseDelimited) {");
|
||||
++indent;
|
||||
push("return new this(rpcImpl, requestDelimited, responseDelimited);");
|
||||
--indent;
|
||||
push("};");
|
||||
}
|
||||
|
||||
service.methodsArray.forEach(function(method) {
|
||||
method.resolve();
|
||||
var lcName = protobuf.util.lcFirst(method.name),
|
||||
cbName = escapeName(method.name + "Callback");
|
||||
push("");
|
||||
pushComment([
|
||||
"Callback as used by {@link " + exportName(service) + "#" + escapeName(lcName) + "}.",
|
||||
// This is a more specialized version of protobuf.rpc.ServiceCallback
|
||||
"@memberof " + exportName(service),
|
||||
"@typedef " + cbName,
|
||||
"@type {function}",
|
||||
"@param {Error|null} error Error, if any",
|
||||
"@param {" + exportName(method.resolvedResponseType) + "} [response] " + method.resolvedResponseType.name
|
||||
]);
|
||||
push("");
|
||||
pushComment([
|
||||
method.comment || "Calls " + method.name + ".",
|
||||
"@function " + lcName,
|
||||
"@memberof " + exportName(service),
|
||||
"@instance",
|
||||
"@param {" + exportName(method.resolvedRequestType, !config.forceMessage) + "} request " + method.resolvedRequestType.name + " message or plain object",
|
||||
"@param {" + exportName(service) + "." + cbName + "} callback Node-style callback called with the error, if any, and " + method.resolvedResponseType.name,
|
||||
"@returns {undefined}",
|
||||
"@variation 1"
|
||||
]);
|
||||
push("Object.defineProperty(" + escapeName(service.name) + ".prototype" + util.safeProp(lcName) + " = function " + escapeName(lcName) + "(request, callback) {");
|
||||
++indent;
|
||||
push("return this.rpcCall(" + escapeName(lcName) + ", $root." + exportName(method.resolvedRequestType) + ", $root." + exportName(method.resolvedResponseType) + ", request, callback);");
|
||||
--indent;
|
||||
push("}, \"name\", { value: " + JSON.stringify(method.name) + " });");
|
||||
if (config.comments)
|
||||
push("");
|
||||
pushComment([
|
||||
method.comment || "Calls " + method.name + ".",
|
||||
"@function " + lcName,
|
||||
"@memberof " + exportName(service),
|
||||
"@instance",
|
||||
"@param {" + exportName(method.resolvedRequestType, !config.forceMessage) + "} request " + method.resolvedRequestType.name + " message or plain object",
|
||||
"@returns {Promise<" + exportName(method.resolvedResponseType) + ">} Promise",
|
||||
"@variation 2"
|
||||
]);
|
||||
});
|
||||
}
|
||||
|
||||
function buildEnum(ref, enm) {
|
||||
|
||||
push("");
|
||||
var comment = [
|
||||
enm.comment || enm.name + " enum.",
|
||||
enm.parent instanceof protobuf.Root ? "@exports " + escapeName(enm.name) : "@name " + exportName(enm),
|
||||
config.forceEnumString ? "@enum {number}" : "@enum {string}",
|
||||
];
|
||||
Object.keys(enm.values).forEach(function(key) {
|
||||
var val = config.forceEnumString ? key : enm.values[key];
|
||||
comment.push((config.forceEnumString ? "@property {string} " : "@property {number} ") + key + "=" + val + " " + (enm.comments[key] || key + " value"));
|
||||
});
|
||||
pushComment(comment);
|
||||
push(escapeName(ref) + "." + escapeName(enm.name) + " = (function() {");
|
||||
++indent;
|
||||
push((config.es6 ? "const" : "var") + " valuesById = {}, values = Object.create(valuesById);");
|
||||
var aliased = [];
|
||||
Object.keys(enm.values).forEach(function(key) {
|
||||
var valueId = enm.values[key];
|
||||
var val = config.forceEnumString ? JSON.stringify(key) : valueId;
|
||||
if (aliased.indexOf(valueId) > -1)
|
||||
push("values[" + JSON.stringify(key) + "] = " + val + ";");
|
||||
else {
|
||||
push("values[valuesById[" + valueId + "] = " + JSON.stringify(key) + "] = " + val + ";");
|
||||
aliased.push(valueId);
|
||||
}
|
||||
});
|
||||
push("return values;");
|
||||
--indent;
|
||||
push("})();");
|
||||
}
|
||||
183
express-server/node_modules/google-gax/node_modules/protobufjs/cli/util.js
generated
vendored
Normal file
183
express-server/node_modules/google-gax/node_modules/protobufjs/cli/util.js
generated
vendored
Normal file
@@ -0,0 +1,183 @@
|
||||
"use strict";
|
||||
var fs = require("fs"),
|
||||
path = require("path"),
|
||||
child_process = require("child_process");
|
||||
|
||||
var semver;
|
||||
|
||||
try {
|
||||
// installed as a peer dependency
|
||||
require.resolve("protobufjs");
|
||||
exports.pathToProtobufJs = "protobufjs";
|
||||
} catch (e) {
|
||||
// local development, i.e. forked from github
|
||||
exports.pathToProtobufJs = "..";
|
||||
}
|
||||
|
||||
var protobuf = require(exports.pathToProtobufJs);
|
||||
|
||||
function basenameCompare(a, b) {
|
||||
var aa = String(a).replace(/\.\w+$/, "").split(/(-?\d*\.?\d+)/g),
|
||||
bb = String(b).replace(/\.\w+$/, "").split(/(-?\d*\.?\d+)/g);
|
||||
for (var i = 0, k = Math.min(aa.length, bb.length); i < k; i++) {
|
||||
var x = parseFloat(aa[i]) || aa[i].toLowerCase(),
|
||||
y = parseFloat(bb[i]) || bb[i].toLowerCase();
|
||||
if (x < y)
|
||||
return -1;
|
||||
if (x > y)
|
||||
return 1;
|
||||
}
|
||||
return a.length < b.length ? -1 : 0;
|
||||
}
|
||||
|
||||
exports.requireAll = function requireAll(dirname) {
|
||||
dirname = path.join(__dirname, dirname);
|
||||
var files = fs.readdirSync(dirname).sort(basenameCompare),
|
||||
all = {};
|
||||
files.forEach(function(file) {
|
||||
var basename = path.basename(file, ".js"),
|
||||
extname = path.extname(file);
|
||||
if (extname === ".js")
|
||||
all[basename] = require(path.join(dirname, file));
|
||||
});
|
||||
return all;
|
||||
};
|
||||
|
||||
exports.traverse = function traverse(current, fn) {
|
||||
fn(current);
|
||||
if (current.fieldsArray)
|
||||
current.fieldsArray.forEach(function(field) {
|
||||
traverse(field, fn);
|
||||
});
|
||||
if (current.oneofsArray)
|
||||
current.oneofsArray.forEach(function(oneof) {
|
||||
traverse(oneof, fn);
|
||||
});
|
||||
if (current.methodsArray)
|
||||
current.methodsArray.forEach(function(method) {
|
||||
traverse(method, fn);
|
||||
});
|
||||
if (current.nestedArray)
|
||||
current.nestedArray.forEach(function(nested) {
|
||||
traverse(nested, fn);
|
||||
});
|
||||
};
|
||||
|
||||
exports.traverseResolved = function traverseResolved(current, fn) {
|
||||
fn(current);
|
||||
if (current.resolvedType)
|
||||
traverseResolved(current.resolvedType, fn);
|
||||
if (current.resolvedKeyType)
|
||||
traverseResolved(current.resolvedKeyType, fn);
|
||||
if (current.resolvedRequestType)
|
||||
traverseResolved(current.resolvedRequestType, fn);
|
||||
if (current.resolvedResponseType)
|
||||
traverseResolved(current.resolvedResponseType, fn);
|
||||
};
|
||||
|
||||
exports.inspect = function inspect(object, indent) {
|
||||
if (!object)
|
||||
return "";
|
||||
var chalk = require("chalk");
|
||||
var sb = [];
|
||||
if (!indent)
|
||||
indent = "";
|
||||
var ind = indent ? indent.substring(0, indent.length - 2) + "└ " : "";
|
||||
sb.push(
|
||||
ind + chalk.bold(object.toString()) + (object.visible ? " (visible)" : ""),
|
||||
indent + chalk.gray("parent: ") + object.parent
|
||||
);
|
||||
if (object instanceof protobuf.Field) {
|
||||
if (object.extend !== undefined)
|
||||
sb.push(indent + chalk.gray("extend: ") + object.extend);
|
||||
if (object.partOf)
|
||||
sb.push(indent + chalk.gray("oneof : ") + object.oneof);
|
||||
}
|
||||
sb.push("");
|
||||
if (object.fieldsArray)
|
||||
object.fieldsArray.forEach(function(field) {
|
||||
sb.push(inspect(field, indent + " "));
|
||||
});
|
||||
if (object.oneofsArray)
|
||||
object.oneofsArray.forEach(function(oneof) {
|
||||
sb.push(inspect(oneof, indent + " "));
|
||||
});
|
||||
if (object.methodsArray)
|
||||
object.methodsArray.forEach(function(service) {
|
||||
sb.push(inspect(service, indent + " "));
|
||||
});
|
||||
if (object.nestedArray)
|
||||
object.nestedArray.forEach(function(nested) {
|
||||
sb.push(inspect(nested, indent + " "));
|
||||
});
|
||||
return sb.join("\n");
|
||||
};
|
||||
|
||||
function modExists(name, version) {
|
||||
for (var i = 0; i < module.paths.length; ++i) {
|
||||
try {
|
||||
var pkg = JSON.parse(fs.readFileSync(path.join(module.paths[i], name, "package.json")));
|
||||
return semver
|
||||
? semver.satisfies(pkg.version, version)
|
||||
: parseInt(pkg.version, 10) === parseInt(version.replace(/^[\^~]/, ""), 10); // used for semver only
|
||||
} catch (e) {/**/}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
function modInstall(install) {
|
||||
child_process.execSync("npm --silent install " + (typeof install === "string" ? install : install.join(" ")), {
|
||||
cwd: __dirname,
|
||||
stdio: "ignore"
|
||||
});
|
||||
}
|
||||
|
||||
exports.setup = function() {
|
||||
var pkg = require(path.join(__dirname, "..", "package.json"));
|
||||
var version = pkg.dependencies["semver"] || pkg.devDependencies["semver"];
|
||||
if (!modExists("semver", version)) {
|
||||
process.stderr.write("installing semver@" + version + "\n");
|
||||
modInstall("semver@" + version);
|
||||
}
|
||||
semver = require("semver"); // used from now on for version comparison
|
||||
var install = [];
|
||||
pkg.cliDependencies.forEach(function(name) {
|
||||
if (name === "semver")
|
||||
return;
|
||||
version = pkg.dependencies[name] || pkg.devDependencies[name];
|
||||
if (!modExists(name, version)) {
|
||||
process.stderr.write("installing " + name + "@" + version + "\n");
|
||||
install.push(name + "@" + version);
|
||||
}
|
||||
});
|
||||
require("../scripts/postinstall"); // emit postinstall warning, if any
|
||||
if (!install.length)
|
||||
return;
|
||||
modInstall(install);
|
||||
};
|
||||
|
||||
exports.wrap = function(OUTPUT, options) {
|
||||
var name = options.wrap || "default";
|
||||
var wrap;
|
||||
try {
|
||||
// try built-in wrappers first
|
||||
wrap = fs.readFileSync(path.join(__dirname, "wrappers", name + ".js")).toString("utf8");
|
||||
} catch (e) {
|
||||
// otherwise fetch the custom one
|
||||
wrap = fs.readFileSync(path.resolve(process.cwd(), name)).toString("utf8");
|
||||
}
|
||||
wrap = wrap.replace(/\$DEPENDENCY/g, JSON.stringify(options.dependency || "protobufjs"));
|
||||
wrap = wrap.replace(/( *)\$OUTPUT;/, function($0, $1) {
|
||||
return $1.length ? OUTPUT.replace(/^/mg, $1) : OUTPUT;
|
||||
});
|
||||
if (options.lint !== "")
|
||||
wrap = "/*" + options.lint + "*/\n" + wrap;
|
||||
return wrap.replace(/\r?\n/g, "\n");
|
||||
};
|
||||
|
||||
exports.pad = function(str, len, l) {
|
||||
while (str.length < len)
|
||||
str = l ? str + " " : " " + str;
|
||||
return str;
|
||||
};
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user