Firebase Update

This commit is contained in:
Lukas Nowy
2018-12-22 23:30:39 +01:00
parent befb44764d
commit acffe619b3
11523 changed files with 1614327 additions and 930246 deletions

31
express-server/node_modules/@grpc/grpc-js/README.md generated vendored Normal file
View File

@ -0,0 +1,31 @@
# Pure JavaScript gRPC Client
**Note: This is an beta-level release. Some APIs may not yet be present and there may be bugs. Please report any that you encounter**
## Installation
Node 10 is recommended. The exact set of compatible Node versions can be found in the `engines` field of the `package.json` file.
```sh
npm install @grpc/grpc-js
```
## Features
- Unary and streaming calls
- Cancellation
- Deadlines
- TLS channel credentials
- Call credentials (for auth)
- Simple reconnection
- Channel API
This library does not directly handle `.proto` files. To use `.proto` files with this library we recommend using the `@grpc/proto-loader` package.
## Some Notes on API Guarantees
The public API of this library follows semantic versioning, with some caveats:
- Some methods are prefixed with an underscore. These methods are internal and should not be considered part of the public API.
- The class `Call` is only exposed due to limitations of TypeScript. It should not be considered part of the public API.
- In general, any API that is exposed by this library but is not exposed by the `grpc` library is likely an error and should not be considered part of the public API.

View File

@ -0,0 +1,16 @@
import { Call } from './call-stream';
import { Http2Channel } from './channel';
import { BaseFilter, Filter, FilterFactory } from './filter';
import { Metadata } from './metadata';
export declare class CallCredentialsFilter extends BaseFilter implements Filter {
private readonly channel;
private readonly stream;
private serviceUrl;
constructor(channel: Http2Channel, stream: Call);
sendMetadata(metadata: Promise<Metadata>): Promise<Metadata>;
}
export declare class CallCredentialsFilterFactory implements FilterFactory<CallCredentialsFilter> {
private readonly channel;
constructor(channel: Http2Channel);
createFilter(callStream: Call): CallCredentialsFilter;
}

View File

@ -0,0 +1,53 @@
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : new P(function (resolve) { resolve(result.value); }).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", { value: true });
const filter_1 = require("./filter");
class CallCredentialsFilter extends filter_1.BaseFilter {
constructor(channel, stream) {
super();
this.channel = channel;
this.stream = stream;
this.channel = channel;
this.stream = stream;
const splitPath = stream.getMethod().split('/');
let serviceName = '';
/* The standard path format is "/{serviceName}/{methodName}", so if we split
* by '/', the first item should be empty and the second should be the
* service name */
if (splitPath.length >= 2) {
serviceName = splitPath[1];
}
/* Currently, call credentials are only allowed on HTTPS connections, so we
* can assume that the scheme is "https" */
this.serviceUrl = `https://${stream.getHost()}/${serviceName}`;
}
sendMetadata(metadata) {
return __awaiter(this, void 0, void 0, function* () {
const channelCredentials = this.channel.credentials._getCallCredentials();
const streamCredentials = this.stream.getCredentials();
const credentials = channelCredentials.compose(streamCredentials);
const credsMetadata = credentials.generateMetadata({ service_url: this.serviceUrl });
const resultMetadata = yield metadata;
resultMetadata.merge(yield credsMetadata);
return resultMetadata;
});
}
}
exports.CallCredentialsFilter = CallCredentialsFilter;
class CallCredentialsFilterFactory {
constructor(channel) {
this.channel = channel;
}
createFilter(callStream) {
return new CallCredentialsFilter(this.channel, callStream);
}
}
exports.CallCredentialsFilterFactory = CallCredentialsFilterFactory;
//# sourceMappingURL=call-credentials-filter.js.map

View File

@ -0,0 +1,32 @@
import { Metadata } from './metadata';
export declare type CallMetadataOptions = {
service_url: string;
};
export declare type CallMetadataGenerator = (options: CallMetadataOptions, cb: (err: Error | null, metadata?: Metadata) => void) => void;
/**
* A class that represents a generic method of adding authentication-related
* metadata on a per-request basis.
*/
export declare abstract class CallCredentials {
/**
* Asynchronously generates a new Metadata object.
* @param options Options used in generating the Metadata object.
*/
abstract generateMetadata(options: CallMetadataOptions): Promise<Metadata>;
/**
* Creates a new CallCredentials object from properties of both this and
* another CallCredentials object. This object's metadata generator will be
* called first.
* @param callCredentials The other CallCredentials object.
*/
abstract compose(callCredentials: CallCredentials): CallCredentials;
/**
* Creates a new CallCredentials object from a given function that generates
* Metadata objects.
* @param metadataGenerator A function that accepts a set of options, and
* generates a Metadata object based on these options, which is passed back
* to the caller via a supplied (err, metadata) callback.
*/
static createFromMetadataGenerator(metadataGenerator: CallMetadataGenerator): CallCredentials;
static createEmpty(): CallCredentials;
}

View File

@ -0,0 +1,81 @@
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : new P(function (resolve) { resolve(result.value); }).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", { value: true });
const lodash_1 = require("lodash");
const metadata_1 = require("./metadata");
/**
* A class that represents a generic method of adding authentication-related
* metadata on a per-request basis.
*/
class CallCredentials {
/**
* Creates a new CallCredentials object from a given function that generates
* Metadata objects.
* @param metadataGenerator A function that accepts a set of options, and
* generates a Metadata object based on these options, which is passed back
* to the caller via a supplied (err, metadata) callback.
*/
static createFromMetadataGenerator(metadataGenerator) {
return new SingleCallCredentials(metadataGenerator);
}
static createEmpty() {
return new EmptyCallCredentials();
}
}
exports.CallCredentials = CallCredentials;
class ComposedCallCredentials extends CallCredentials {
constructor(creds) {
super();
this.creds = creds;
}
generateMetadata(options) {
return __awaiter(this, void 0, void 0, function* () {
const base = new metadata_1.Metadata();
const generated = yield Promise.all(lodash_1.map(this.creds, (cred) => cred.generateMetadata(options)));
for (const gen of generated) {
base.merge(gen);
}
return base;
});
}
compose(other) {
return new ComposedCallCredentials(this.creds.concat([other]));
}
}
class SingleCallCredentials extends CallCredentials {
constructor(metadataGenerator) {
super();
this.metadataGenerator = metadataGenerator;
}
generateMetadata(options) {
return new Promise((resolve, reject) => {
this.metadataGenerator(options, (err, metadata) => {
if (metadata !== undefined) {
resolve(metadata);
}
else {
reject(err);
}
});
});
}
compose(other) {
return new ComposedCallCredentials([this, other]);
}
}
class EmptyCallCredentials extends CallCredentials {
generateMetadata(options) {
return Promise.resolve(new metadata_1.Metadata());
}
compose(other) {
return other;
}
}
//# sourceMappingURL=call-credentials.js.map

View File

@ -0,0 +1,100 @@
/// <reference types="node" />
import * as http2 from 'http2';
import { Duplex } from 'stream';
import { CallCredentials } from './call-credentials';
import { Http2Channel } from './channel';
import { Status } from './constants';
import { EmitterAugmentation1 } from './events';
import { Filter } from './filter';
import { FilterStackFactory } from './filter-stack';
import { Metadata } from './metadata';
import { ObjectDuplex, WriteCallback } from './object-stream';
export declare type Deadline = Date | number;
export interface CallStreamOptions {
deadline: Deadline;
flags: number;
host: string;
parentCall: Call | null;
}
export declare type PartialCallStreamOptions = Partial<CallStreamOptions>;
export interface StatusObject {
code: Status;
details: string;
metadata: Metadata;
}
export declare const enum WriteFlags {
BufferHint = 1,
NoCompress = 2,
WriteThrough = 4,
}
export interface WriteObject {
message: Buffer;
flags?: number;
}
/**
* This interface represents a duplex stream associated with a single gRPC call.
*/
export declare type Call = {
cancelWithStatus(status: Status, details: string): void;
getPeer(): string;
sendMetadata(metadata: Metadata): void;
getDeadline(): Deadline;
getCredentials(): CallCredentials;
setCredentials(credentials: CallCredentials): void;
getStatus(): StatusObject | null;
getMethod(): string;
getHost(): string;
} & EmitterAugmentation1<'metadata', Metadata> & EmitterAugmentation1<'status', StatusObject> & ObjectDuplex<WriteObject, Buffer>;
export declare class Http2CallStream extends Duplex implements Call {
private readonly methodName;
private readonly channel;
private readonly options;
credentials: CallCredentials;
filterStack: Filter;
private http2Stream;
private pendingRead;
private pendingWrite;
private pendingWriteCallback;
private pendingFinalCallback;
private readState;
private readCompressFlag;
private readPartialSize;
private readSizeRemaining;
private readMessageSize;
private readPartialMessage;
private readMessageRemaining;
private isReadFilterPending;
private canPush;
private unpushedReadMessages;
private unfilteredReadMessages;
private mappedStatusCode;
private handlingHeaders;
private handlingTrailers;
private finalStatus;
constructor(methodName: string, channel: Http2Channel, options: CallStreamOptions, filterStackFactory: FilterStackFactory);
/**
* On first call, emits a 'status' event with the given StatusObject.
* Subsequent calls are no-ops.
* @param status The status of the call.
*/
private endCall(status);
private handleFilterError(error);
private handleFilteredRead(message);
private filterReceivedMessage(framedMessage);
private tryPush(messageBytes);
private handleTrailers(headers);
attachHttp2Stream(stream: http2.ClientHttp2Stream): void;
sendMetadata(metadata: Metadata): void;
private destroyHttp2Stream();
cancelWithStatus(status: Status, details: string): void;
getDeadline(): Deadline;
getCredentials(): CallCredentials;
setCredentials(credentials: CallCredentials): void;
getStatus(): StatusObject | null;
getPeer(): string;
getMethod(): string;
getHost(): string;
_read(size: number): void;
_write(chunk: WriteObject, encoding: string, cb: WriteCallback): void;
_final(cb: Function): void;
}

View File

@ -0,0 +1,402 @@
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : new P(function (resolve) { resolve(result.value); }).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", { value: true });
const http2 = require("http2");
const stream_1 = require("stream");
const call_credentials_1 = require("./call-credentials");
const constants_1 = require("./constants");
const metadata_1 = require("./metadata");
const { HTTP2_HEADER_STATUS, HTTP2_HEADER_CONTENT_TYPE, NGHTTP2_CANCEL } = http2.constants;
var ReadState;
(function (ReadState) {
ReadState[ReadState["NO_DATA"] = 0] = "NO_DATA";
ReadState[ReadState["READING_SIZE"] = 1] = "READING_SIZE";
ReadState[ReadState["READING_MESSAGE"] = 2] = "READING_MESSAGE";
})(ReadState || (ReadState = {}));
class Http2CallStream extends stream_1.Duplex {
constructor(methodName, channel, options, filterStackFactory) {
super({ objectMode: true });
this.methodName = methodName;
this.channel = channel;
this.options = options;
this.credentials = call_credentials_1.CallCredentials.createEmpty();
this.http2Stream = null;
this.pendingRead = false;
this.pendingWrite = null;
this.pendingWriteCallback = null;
this.pendingFinalCallback = null;
this.readState = ReadState.NO_DATA;
this.readCompressFlag = Buffer.alloc(1);
this.readPartialSize = Buffer.alloc(4);
this.readSizeRemaining = 4;
this.readMessageSize = 0;
this.readPartialMessage = [];
this.readMessageRemaining = 0;
this.isReadFilterPending = false;
this.canPush = false;
this.unpushedReadMessages = [];
this.unfilteredReadMessages = [];
// Status code mapped from :status. To be used if grpc-status is not received
this.mappedStatusCode = constants_1.Status.UNKNOWN;
// Promise objects that are re-assigned to resolving promises when headers
// or trailers received. Processing headers/trailers is asynchronous, so we
// can use these objects to await their completion. This helps us establish
// order of precedence when obtaining the status of the call.
this.handlingHeaders = Promise.resolve();
this.handlingTrailers = Promise.resolve();
// This is populated (non-null) if and only if the call has ended
this.finalStatus = null;
this.filterStack = filterStackFactory.createFilter(this);
}
/**
* On first call, emits a 'status' event with the given StatusObject.
* Subsequent calls are no-ops.
* @param status The status of the call.
*/
endCall(status) {
if (this.finalStatus === null) {
this.finalStatus = status;
this.emit('status', status);
}
}
handleFilterError(error) {
this.cancelWithStatus(constants_1.Status.INTERNAL, error.message);
}
handleFilteredRead(message) {
this.isReadFilterPending = false;
if (this.canPush) {
if (!this.push(message)) {
this.canPush = false;
this.http2Stream.pause();
}
}
else {
this.unpushedReadMessages.push(message);
}
if (this.unfilteredReadMessages.length > 0) {
/* nextMessage is guaranteed not to be undefined because
unfilteredReadMessages is non-empty */
const nextMessage = this.unfilteredReadMessages.shift();
this.filterReceivedMessage(nextMessage);
}
}
filterReceivedMessage(framedMessage) {
if (framedMessage === null) {
if (this.canPush) {
this.push(null);
}
else {
this.unpushedReadMessages.push(null);
}
return;
}
this.isReadFilterPending = true;
this.filterStack.receiveMessage(Promise.resolve(framedMessage))
.then(this.handleFilteredRead.bind(this), this.handleFilterError.bind(this));
}
tryPush(messageBytes) {
if (this.isReadFilterPending) {
this.unfilteredReadMessages.push(messageBytes);
}
else {
this.filterReceivedMessage(messageBytes);
}
}
handleTrailers(headers) {
const code = this.mappedStatusCode;
const details = '';
let metadata;
try {
metadata = metadata_1.Metadata.fromHttp2Headers(headers);
}
catch (e) {
metadata = new metadata_1.Metadata();
}
const status = { code, details, metadata };
this.handlingTrailers = (() => __awaiter(this, void 0, void 0, function* () {
let finalStatus;
try {
// Attempt to assign final status.
finalStatus =
yield this.filterStack.receiveTrailers(Promise.resolve(status));
}
catch (error) {
yield this.handlingHeaders;
// This is a no-op if the call was already ended when handling headers.
this.endCall({
code: constants_1.Status.INTERNAL,
details: 'Failed to process received status',
metadata: new metadata_1.Metadata()
});
return;
}
// It's possible that headers were received but not fully handled yet.
// Give the headers handler an opportunity to end the call first,
// if an error occurred.
yield this.handlingHeaders;
// This is a no-op if the call was already ended when handling headers.
this.endCall(finalStatus);
}))();
}
attachHttp2Stream(stream) {
if (this.finalStatus !== null) {
stream.close(NGHTTP2_CANCEL);
}
else {
this.http2Stream = stream;
stream.on('response', (headers, flags) => {
switch (headers[HTTP2_HEADER_STATUS]) {
// TODO(murgatroid99): handle 100 and 101
case '400':
this.mappedStatusCode = constants_1.Status.INTERNAL;
break;
case '401':
this.mappedStatusCode = constants_1.Status.UNAUTHENTICATED;
break;
case '403':
this.mappedStatusCode = constants_1.Status.PERMISSION_DENIED;
break;
case '404':
this.mappedStatusCode = constants_1.Status.UNIMPLEMENTED;
break;
case '429':
case '502':
case '503':
case '504':
this.mappedStatusCode = constants_1.Status.UNAVAILABLE;
break;
default:
this.mappedStatusCode = constants_1.Status.UNKNOWN;
}
if (flags & http2.constants.NGHTTP2_FLAG_END_STREAM) {
this.handleTrailers(headers);
}
else {
let metadata;
try {
metadata = metadata_1.Metadata.fromHttp2Headers(headers);
}
catch (error) {
this.endCall({
code: constants_1.Status.UNKNOWN,
details: error.message,
metadata: new metadata_1.Metadata()
});
return;
}
this.handlingHeaders =
this.filterStack.receiveMetadata(Promise.resolve(metadata))
.then((finalMetadata) => {
this.emit('metadata', finalMetadata);
})
.catch((error) => {
this.destroyHttp2Stream();
this.endCall({
code: constants_1.Status.UNKNOWN,
details: error.message,
metadata: new metadata_1.Metadata()
});
});
}
});
stream.on('trailers', this.handleTrailers.bind(this));
stream.on('data', (data) => {
let readHead = 0;
let toRead;
while (readHead < data.length) {
switch (this.readState) {
case ReadState.NO_DATA:
this.readCompressFlag = data.slice(readHead, readHead + 1);
readHead += 1;
this.readState = ReadState.READING_SIZE;
this.readPartialSize.fill(0);
this.readSizeRemaining = 4;
this.readMessageSize = 0;
this.readMessageRemaining = 0;
this.readPartialMessage = [];
break;
case ReadState.READING_SIZE:
toRead = Math.min(data.length - readHead, this.readSizeRemaining);
data.copy(this.readPartialSize, 4 - this.readSizeRemaining, readHead, readHead + toRead);
this.readSizeRemaining -= toRead;
readHead += toRead;
// readSizeRemaining >=0 here
if (this.readSizeRemaining === 0) {
this.readMessageSize = this.readPartialSize.readUInt32BE(0);
this.readMessageRemaining = this.readMessageSize;
if (this.readMessageRemaining > 0) {
this.readState = ReadState.READING_MESSAGE;
}
else {
this.tryPush(Buffer.concat([this.readCompressFlag, this.readPartialSize]));
this.readState = ReadState.NO_DATA;
}
}
break;
case ReadState.READING_MESSAGE:
toRead =
Math.min(data.length - readHead, this.readMessageRemaining);
this.readPartialMessage.push(data.slice(readHead, readHead + toRead));
this.readMessageRemaining -= toRead;
readHead += toRead;
// readMessageRemaining >=0 here
if (this.readMessageRemaining === 0) {
// At this point, we have read a full message
const framedMessageBuffers = [
this.readCompressFlag, this.readPartialSize
].concat(this.readPartialMessage);
const framedMessage = Buffer.concat(framedMessageBuffers, this.readMessageSize + 5);
this.tryPush(framedMessage);
this.readState = ReadState.NO_DATA;
}
break;
default:
throw new Error('This should never happen');
}
}
});
stream.on('end', () => {
this.tryPush(null);
});
stream.on('close', (errorCode) => __awaiter(this, void 0, void 0, function* () {
let code;
let details = '';
switch (errorCode) {
case http2.constants.NGHTTP2_REFUSED_STREAM:
code = constants_1.Status.UNAVAILABLE;
break;
case http2.constants.NGHTTP2_CANCEL:
code = constants_1.Status.CANCELLED;
break;
case http2.constants.NGHTTP2_ENHANCE_YOUR_CALM:
code = constants_1.Status.RESOURCE_EXHAUSTED;
details = 'Bandwidth exhausted';
break;
case http2.constants.NGHTTP2_INADEQUATE_SECURITY:
code = constants_1.Status.PERMISSION_DENIED;
details = 'Protocol not secure enough';
break;
default:
code = constants_1.Status.INTERNAL;
}
// This guarantees that if trailers were received, the value of the
// 'grpc-status' header takes precedence for emitted status data.
yield this.handlingTrailers;
// This is a no-op if trailers were received at all.
// This is OK, because status codes emitted here correspond to more
// catastrophic issues that prevent us from receiving trailers in the
// first place.
this.endCall({ code, details, metadata: new metadata_1.Metadata() });
}));
stream.on('error', (err) => {
this.endCall({
code: constants_1.Status.INTERNAL,
details: 'Internal HTTP2 error',
metadata: new metadata_1.Metadata()
});
});
if (!this.pendingRead) {
stream.pause();
}
if (this.pendingWrite) {
if (!this.pendingWriteCallback) {
throw new Error('Invalid state in write handling code');
}
stream.write(this.pendingWrite, this.pendingWriteCallback);
}
if (this.pendingFinalCallback) {
stream.end(this.pendingFinalCallback);
}
}
}
sendMetadata(metadata) {
this.channel._startHttp2Stream(this.options.host, this.methodName, this, metadata);
}
destroyHttp2Stream() {
// The http2 stream could already have been destroyed if cancelWithStatus
// is called in response to an internal http2 error.
if (this.http2Stream !== null && !this.http2Stream.destroyed) {
/* TODO(murgatroid99): Determine if we want to send different RST_STREAM
* codes based on the status code */
this.http2Stream.close(NGHTTP2_CANCEL);
}
}
cancelWithStatus(status, details) {
this.destroyHttp2Stream();
(() => __awaiter(this, void 0, void 0, function* () {
// If trailers are currently being processed, the call should be ended
// by handleTrailers instead.
yield this.handlingTrailers;
this.endCall({ code: status, details, metadata: new metadata_1.Metadata() });
}))();
}
getDeadline() {
return this.options.deadline;
}
getCredentials() {
return this.credentials;
}
setCredentials(credentials) {
this.credentials = credentials;
}
getStatus() {
return this.finalStatus;
}
getPeer() {
throw new Error('Not yet implemented');
}
getMethod() {
return this.methodName;
}
getHost() {
return this.options.host;
}
_read(size) {
this.canPush = true;
if (this.http2Stream === null) {
this.pendingRead = true;
}
else {
while (this.unpushedReadMessages.length > 0) {
const nextMessage = this.unpushedReadMessages.shift();
this.canPush = this.push(nextMessage);
if (nextMessage === null || (!this.canPush)) {
this.canPush = false;
return;
}
}
/* Only resume reading from the http2Stream if we don't have any pending
* messages to emit, and we haven't gotten the signal to stop pushing
* messages */
this.http2Stream.resume();
}
}
_write(chunk, encoding, cb) {
this.filterStack.sendMessage(Promise.resolve(chunk)).then((message) => {
if (this.http2Stream === null) {
this.pendingWrite = message.message;
this.pendingWriteCallback = cb;
}
else {
this.http2Stream.write(message.message, cb);
}
}, this.handleFilterError.bind(this));
}
_final(cb) {
if (this.http2Stream === null) {
this.pendingFinalCallback = cb;
}
else {
this.http2Stream.end(cb);
}
}
}
exports.Http2CallStream = Http2CallStream;
//# sourceMappingURL=call-stream.js.map

View File

@ -0,0 +1,72 @@
/// <reference types="node" />
import { EventEmitter } from 'events';
import { Duplex, Readable, Writable } from 'stream';
import { Call, StatusObject } from './call-stream';
import { EmitterAugmentation1 } from './events';
import { Metadata } from './metadata';
import { ObjectReadable, ObjectWritable } from './object-stream';
/**
* A type extending the built-in Error object with additional fields.
*/
export declare type ServiceError = StatusObject & Error;
/**
* A base type for all user-facing values returned by client-side method calls.
*/
export declare type SurfaceCall = {
cancel(): void;
getPeer(): string;
} & EmitterAugmentation1<'metadata', Metadata> & EmitterAugmentation1<'status', StatusObject> & EventEmitter;
/**
* A type representing the return value of a unary method call.
*/
export declare type ClientUnaryCall = SurfaceCall;
/**
* A type representing the return value of a server stream method call.
*/
export declare type ClientReadableStream<ResponseType> = {
deserialize: (chunk: Buffer) => ResponseType;
} & SurfaceCall & ObjectReadable<ResponseType>;
/**
* A type representing the return value of a client stream method call.
*/
export declare type ClientWritableStream<RequestType> = {
serialize: (value: RequestType) => Buffer;
} & SurfaceCall & ObjectWritable<RequestType>;
/**
* A type representing the return value of a bidirectional stream method call.
*/
export declare type ClientDuplexStream<RequestType, ResponseType> = ClientWritableStream<RequestType> & ClientReadableStream<ResponseType>;
export declare class ClientUnaryCallImpl extends EventEmitter implements ClientUnaryCall {
private readonly call;
constructor(call: Call);
cancel(): void;
getPeer(): string;
}
export declare class ClientReadableStreamImpl<ResponseType> extends Readable implements ClientReadableStream<ResponseType> {
private readonly call;
readonly deserialize: (chunk: Buffer) => ResponseType;
constructor(call: Call, deserialize: (chunk: Buffer) => ResponseType);
cancel(): void;
getPeer(): string;
_read(_size: number): void;
}
export declare class ClientWritableStreamImpl<RequestType> extends Writable implements ClientWritableStream<RequestType> {
private readonly call;
readonly serialize: (value: RequestType) => Buffer;
constructor(call: Call, serialize: (value: RequestType) => Buffer);
cancel(): void;
getPeer(): string;
_write(chunk: RequestType, encoding: string, cb: Function): void;
_final(cb: Function): void;
}
export declare class ClientDuplexStreamImpl<RequestType, ResponseType> extends Duplex implements ClientDuplexStream<RequestType, ResponseType> {
private readonly call;
readonly serialize: (value: RequestType) => Buffer;
readonly deserialize: (chunk: Buffer) => ResponseType;
constructor(call: Call, serialize: (value: RequestType) => Buffer, deserialize: (chunk: Buffer) => ResponseType);
cancel(): void;
getPeer(): string;
_read(_size: number): void;
_write(chunk: RequestType, encoding: string, cb: Function): void;
_final(cb: Function): void;
}

View File

@ -0,0 +1,154 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const events_1 = require("events");
const stream_1 = require("stream");
const constants_1 = require("./constants");
class ClientUnaryCallImpl extends events_1.EventEmitter {
constructor(call) {
super();
this.call = call;
call.on('metadata', (metadata) => {
this.emit('metadata', metadata);
});
call.on('status', (status) => {
this.emit('status', status);
});
}
cancel() {
this.call.cancelWithStatus(constants_1.Status.CANCELLED, 'Cancelled on client');
}
getPeer() {
return this.call.getPeer();
}
}
exports.ClientUnaryCallImpl = ClientUnaryCallImpl;
function setUpReadableStream(stream, call, deserialize) {
let statusEmitted = false;
call.on('data', (data) => {
let deserialized;
try {
deserialized = deserialize(data);
}
catch (e) {
call.cancelWithStatus(constants_1.Status.INTERNAL, 'Failed to parse server response');
return;
}
if (!stream.push(deserialized)) {
call.pause();
}
});
call.on('end', () => {
if (statusEmitted) {
stream.push(null);
}
else {
call.once('status', () => {
stream.push(null);
});
}
});
call.on('status', (status) => {
if (status.code !== constants_1.Status.OK) {
const error = Object.assign(new Error(status.details), status);
stream.emit('error', error);
}
stream.emit('status', status);
statusEmitted = true;
});
call.pause();
}
class ClientReadableStreamImpl extends stream_1.Readable {
constructor(call, deserialize) {
super({ objectMode: true });
this.call = call;
this.deserialize = deserialize;
call.on('metadata', (metadata) => {
this.emit('metadata', metadata);
});
setUpReadableStream(this, call, deserialize);
}
cancel() {
this.call.cancelWithStatus(constants_1.Status.CANCELLED, 'Cancelled on client');
}
getPeer() {
return this.call.getPeer();
}
_read(_size) {
this.call.resume();
}
}
exports.ClientReadableStreamImpl = ClientReadableStreamImpl;
function tryWrite(call, serialize, chunk, encoding, cb) {
let message;
const flags = Number(encoding);
try {
message = serialize(chunk);
}
catch (e) {
call.cancelWithStatus(constants_1.Status.INTERNAL, 'Serialization failure');
cb(e);
return;
}
const writeObj = { message };
if (!Number.isNaN(flags)) {
writeObj.flags = flags;
}
call.write(writeObj, cb);
}
class ClientWritableStreamImpl extends stream_1.Writable {
constructor(call, serialize) {
super({ objectMode: true });
this.call = call;
this.serialize = serialize;
call.on('metadata', (metadata) => {
this.emit('metadata', metadata);
});
call.on('status', (status) => {
this.emit('status', status);
});
}
cancel() {
this.call.cancelWithStatus(constants_1.Status.CANCELLED, 'Cancelled on client');
}
getPeer() {
return this.call.getPeer();
}
_write(chunk, encoding, cb) {
tryWrite(this.call, this.serialize, chunk, encoding, cb);
}
_final(cb) {
this.call.end();
cb();
}
}
exports.ClientWritableStreamImpl = ClientWritableStreamImpl;
class ClientDuplexStreamImpl extends stream_1.Duplex {
constructor(call, serialize, deserialize) {
super({ objectMode: true });
this.call = call;
this.serialize = serialize;
this.deserialize = deserialize;
call.on('metadata', (metadata) => {
this.emit('metadata', metadata);
});
setUpReadableStream(this, call, deserialize);
}
cancel() {
this.call.cancelWithStatus(constants_1.Status.CANCELLED, 'Cancelled on client');
}
getPeer() {
return this.call.getPeer();
}
_read(_size) {
this.call.resume();
}
_write(chunk, encoding, cb) {
tryWrite(this.call, this.serialize, chunk, encoding, cb);
}
_final(cb) {
this.call.end();
cb();
}
}
exports.ClientDuplexStreamImpl = ClientDuplexStreamImpl;
//# sourceMappingURL=call.js.map

View File

@ -0,0 +1,73 @@
/// <reference types="node" />
import { ConnectionOptions } from 'tls';
import { CallCredentials } from './call-credentials';
/**
* A certificate as received by the checkServerIdentity callback.
*/
export interface Certificate {
/**
* The raw certificate in DER form.
*/
raw: Buffer;
}
/**
* A callback that will receive the expected hostname and presented peer
* certificate as parameters. The callback should return an error to
* indicate that the presented certificate is considered invalid and
* otherwise returned undefined.
*/
export declare type CheckServerIdentityCallback = (hostname: string, cert: Certificate) => Error | undefined;
/**
* Additional peer verification options that can be set when creating
* SSL credentials.
*/
export interface VerifyOptions {
/**
* If set, this callback will be invoked after the usual hostname verification
* has been performed on the peer certificate.
*/
checkServerIdentity?: CheckServerIdentityCallback;
}
/**
* A class that contains credentials for communicating over a channel, as well
* as a set of per-call credentials, which are applied to every method call made
* over a channel initialized with an instance of this class.
*/
export declare abstract class ChannelCredentials {
protected callCredentials: CallCredentials;
protected constructor(callCredentials?: CallCredentials);
/**
* Returns a copy of this object with the included set of per-call credentials
* expanded to include callCredentials.
* @param callCredentials A CallCredentials object to associate with this
* instance.
*/
abstract compose(callCredentials: CallCredentials): ChannelCredentials;
/**
* Gets the set of per-call credentials associated with this instance.
*/
_getCallCredentials(): CallCredentials;
/**
* Gets a SecureContext object generated from input parameters if this
* instance was created with createSsl, or null if this instance was created
* with createInsecure.
*/
abstract _getConnectionOptions(): ConnectionOptions | null;
/**
* Indicates whether this credentials object creates a secure channel.
*/
abstract _isSecure(): boolean;
/**
* Return a new ChannelCredentials instance with a given set of credentials.
* The resulting instance can be used to construct a Channel that communicates
* over TLS.
* @param rootCerts The root certificate data.
* @param privateKey The client certificate private key, if available.
* @param certChain The client certificate key chain, if available.
*/
static createSsl(rootCerts?: Buffer | null, privateKey?: Buffer | null, certChain?: Buffer | null, verifyOptions?: VerifyOptions): ChannelCredentials;
/**
* Return a new ChannelCredentials instance with no credentials.
*/
static createInsecure(): ChannelCredentials;
}

View File

@ -0,0 +1,96 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const tls_1 = require("tls");
const call_credentials_1 = require("./call-credentials");
// tslint:disable-next-line:no-any
function verifyIsBufferOrNull(obj, friendlyName) {
if (obj && !(obj instanceof Buffer)) {
throw new TypeError(`${friendlyName}, if provided, must be a Buffer.`);
}
}
/**
* A class that contains credentials for communicating over a channel, as well
* as a set of per-call credentials, which are applied to every method call made
* over a channel initialized with an instance of this class.
*/
class ChannelCredentials {
constructor(callCredentials) {
this.callCredentials = callCredentials || call_credentials_1.CallCredentials.createEmpty();
}
/**
* Gets the set of per-call credentials associated with this instance.
*/
_getCallCredentials() {
return this.callCredentials;
}
/**
* Return a new ChannelCredentials instance with a given set of credentials.
* The resulting instance can be used to construct a Channel that communicates
* over TLS.
* @param rootCerts The root certificate data.
* @param privateKey The client certificate private key, if available.
* @param certChain The client certificate key chain, if available.
*/
static createSsl(rootCerts, privateKey, certChain, verifyOptions) {
verifyIsBufferOrNull(rootCerts, 'Root certificate');
verifyIsBufferOrNull(privateKey, 'Private key');
verifyIsBufferOrNull(certChain, 'Certificate chain');
if (privateKey && !certChain) {
throw new Error('Private key must be given with accompanying certificate chain');
}
if (!privateKey && certChain) {
throw new Error('Certificate chain must be given with accompanying private key');
}
const secureContext = tls_1.createSecureContext({
ca: rootCerts || undefined,
key: privateKey || undefined,
cert: certChain || undefined
});
const connectionOptions = { secureContext };
if (verifyOptions && verifyOptions.checkServerIdentity) {
connectionOptions.checkServerIdentity =
(host, cert) => {
return verifyOptions.checkServerIdentity(host, { raw: cert.raw });
};
}
return new SecureChannelCredentialsImpl(connectionOptions);
}
/**
* Return a new ChannelCredentials instance with no credentials.
*/
static createInsecure() {
return new InsecureChannelCredentialsImpl();
}
}
exports.ChannelCredentials = ChannelCredentials;
class InsecureChannelCredentialsImpl extends ChannelCredentials {
constructor(callCredentials) {
super(callCredentials);
}
compose(callCredentials) {
throw new Error('Cannot compose insecure credentials');
}
_getConnectionOptions() {
return null;
}
_isSecure() {
return false;
}
}
class SecureChannelCredentialsImpl extends ChannelCredentials {
constructor(connectionOptions, callCredentials) {
super(callCredentials);
this.connectionOptions = connectionOptions;
}
compose(callCredentials) {
const combinedCallCredentials = this.callCredentials.compose(callCredentials);
return new SecureChannelCredentialsImpl(this.connectionOptions, combinedCallCredentials);
}
_getConnectionOptions() {
return this.connectionOptions;
}
_isSecure() {
return true;
}
}
//# sourceMappingURL=channel-credentials.js.map

View File

@ -0,0 +1,24 @@
/**
* An interface that contains options used when initializing a Channel instance.
*/
export interface ChannelOptions {
'grpc.ssl_target_name_override': string;
'grpc.primary_user_agent': string;
'grpc.secondary_user_agent': string;
'grpc.default_authority': string;
'grpc.keepalive_time_ms': number;
'grpc.keepalive_timeout_ms': number;
[key: string]: string | number;
}
/**
* This is for checking provided options at runtime. This is an object for
* easier membership checking.
*/
export declare const recognizedOptions: {
'grpc.ssl_target_name_override': boolean;
'grpc.primary_user_agent': boolean;
'grpc.secondary_user_agent': boolean;
'grpc.default_authority': boolean;
'grpc.keepalive_time_ms': boolean;
'grpc.keepalive_timeout_ms': boolean;
};

View File

@ -0,0 +1,15 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
/**
* This is for checking provided options at runtime. This is an object for
* easier membership checking.
*/
exports.recognizedOptions = {
'grpc.ssl_target_name_override': true,
'grpc.primary_user_agent': true,
'grpc.secondary_user_agent': true,
'grpc.default_authority': true,
'grpc.keepalive_time_ms': true,
'grpc.keepalive_timeout_ms': true
};
//# sourceMappingURL=channel-options.js.map

View File

@ -0,0 +1,92 @@
/// <reference types="node" />
import { EventEmitter } from 'events';
import { Call, Deadline, Http2CallStream } from './call-stream';
import { ChannelCredentials } from './channel-credentials';
import { ChannelOptions } from './channel-options';
import { Metadata } from './metadata';
export declare enum ConnectivityState {
CONNECTING = 0,
READY = 1,
TRANSIENT_FAILURE = 2,
IDLE = 3,
SHUTDOWN = 4,
}
/**
* An interface that represents a communication channel to a server specified
* by a given address.
*/
export interface Channel {
/**
* Close the channel. This has the same functionality as the existing
* grpc.Client.prototype.close
*/
close(): void;
/**
* Return the target that this channel connects to
*/
getTarget(): string;
/**
* Get the channel's current connectivity state. This method is here mainly
* because it is in the existing internal Channel class, and there isn't
* another good place to put it.
* @param tryToConnect If true, the channel will start connecting if it is
* idle. Otherwise, idle channels will only start connecting when a
* call starts.
*/
getConnectivityState(tryToConnect: boolean): ConnectivityState;
/**
* Watch for connectivity state changes. This is also here mainly because
* it is in the existing external Channel class.
* @param currentState The state to watch for transitions from. This should
* always be populated by calling getConnectivityState immediately
* before.
* @param deadline A deadline for waiting for a state change
* @param callback Called with no error when a state change, or with an
* error if the deadline passes without a state change.
*/
watchConnectivityState(currentState: ConnectivityState, deadline: Date | number, callback: (error?: Error) => void): void;
/**
* Create a call object. Call is an opaque type that is used by the Client
* class. This function is called by the gRPC library when starting a
* request. Implementers should return an instance of Call that is returned
* from calling createCall on an instance of the provided Channel class.
* @param method The full method string to request.
* @param deadline The call deadline
* @param host A host string override for making the request
* @param parentCall A server call to propagate some information from
* @param propagateFlags A bitwise combination of elements of grpc.propagate
* that indicates what information to propagate from parentCall.
*/
createCall(method: string, deadline: Deadline | null | undefined, host: string | null | undefined, parentCall: Call | null | undefined, propagateFlags: number | null | undefined): Call;
}
export declare class Http2Channel extends EventEmitter implements Channel {
readonly credentials: ChannelCredentials;
private readonly options;
private readonly userAgent;
private readonly target;
private readonly defaultAuthority;
private connectivityState;
private connecting;
private subChannel;
private filterStackFactory;
private subChannelConnectCallback;
private subChannelCloseCallback;
private backoffTimerId;
private currentBackoff;
private currentBackoffDeadline;
private handleStateChange(oldState, newState);
private transitionToState(oldStates, newState);
private startConnecting();
constructor(address: string, credentials: ChannelCredentials, options: Partial<ChannelOptions>);
_startHttp2Stream(authority: string, methodName: string, stream: Http2CallStream, metadata: Metadata): void;
createCall(method: string, deadline: Deadline | null | undefined, host: string | null | undefined, parentCall: Call | null | undefined, propagateFlags: number | null | undefined): Call;
/**
* Attempts to connect, returning a Promise that resolves when the connection
* is successful, or rejects if the channel is shut down.
*/
private connect();
getConnectivityState(tryToConnect: boolean): ConnectivityState;
watchConnectivityState(currentState: ConnectivityState, deadline: Date | number, callback: (error?: Error) => void): void;
getTarget(): string;
close(): void;
}

View File

@ -0,0 +1,297 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const events_1 = require("events");
const http2 = require("http2");
const tls_1 = require("tls");
const url = require("url");
const call_credentials_filter_1 = require("./call-credentials-filter");
const call_stream_1 = require("./call-stream");
const channel_options_1 = require("./channel-options");
const compression_filter_1 = require("./compression-filter");
const constants_1 = require("./constants");
const deadline_filter_1 = require("./deadline-filter");
const filter_stack_1 = require("./filter-stack");
const metadata_status_filter_1 = require("./metadata-status-filter");
const subchannel_1 = require("./subchannel");
const { version: clientVersion } = require('../../package');
const MIN_CONNECT_TIMEOUT_MS = 20000;
const INITIAL_BACKOFF_MS = 1000;
const BACKOFF_MULTIPLIER = 1.6;
const MAX_BACKOFF_MS = 120000;
const BACKOFF_JITTER = 0.2;
const { HTTP2_HEADER_AUTHORITY, HTTP2_HEADER_CONTENT_TYPE, HTTP2_HEADER_METHOD, HTTP2_HEADER_PATH, HTTP2_HEADER_TE, HTTP2_HEADER_USER_AGENT } = http2.constants;
var ConnectivityState;
(function (ConnectivityState) {
ConnectivityState[ConnectivityState["CONNECTING"] = 0] = "CONNECTING";
ConnectivityState[ConnectivityState["READY"] = 1] = "READY";
ConnectivityState[ConnectivityState["TRANSIENT_FAILURE"] = 2] = "TRANSIENT_FAILURE";
ConnectivityState[ConnectivityState["IDLE"] = 3] = "IDLE";
ConnectivityState[ConnectivityState["SHUTDOWN"] = 4] = "SHUTDOWN";
})(ConnectivityState = exports.ConnectivityState || (exports.ConnectivityState = {}));
function uniformRandom(min, max) {
return Math.random() * (max - min) + min;
}
class Http2Channel extends events_1.EventEmitter {
constructor(address, credentials, options) {
super();
this.credentials = credentials;
this.options = options;
this.connectivityState = ConnectivityState.IDLE;
// Helper Promise object only used in the implementation of connect().
this.connecting = null;
/* For now, we have up to one subchannel, which will exist as long as we are
* connecting or trying to connect */
this.subChannel = null;
this.subChannelConnectCallback = () => { };
this.subChannelCloseCallback = () => { };
this.currentBackoff = INITIAL_BACKOFF_MS;
for (const option in options) {
if (options.hasOwnProperty(option)) {
if (!channel_options_1.recognizedOptions.hasOwnProperty(option)) {
console.warn(`Unrecognized channel argument '${option}' will be ignored.`);
}
}
}
if (credentials._isSecure()) {
this.target = new url.URL(`https://${address}`);
}
else {
this.target = new url.URL(`http://${address}`);
}
// TODO(murgatroid99): Add more centralized handling of channel options
if (this.options['grpc.default_authority']) {
this.defaultAuthority = this.options['grpc.default_authority'];
}
else {
this.defaultAuthority = this.target.host;
}
this.filterStackFactory = new filter_stack_1.FilterStackFactory([
new call_credentials_filter_1.CallCredentialsFilterFactory(this), new deadline_filter_1.DeadlineFilterFactory(this),
new metadata_status_filter_1.MetadataStatusFilterFactory(this), new compression_filter_1.CompressionFilterFactory(this)
]);
this.currentBackoffDeadline = new Date();
/* The only purpose of these lines is to ensure that this.backoffTimerId has
* a value of type NodeJS.Timer. */
this.backoffTimerId = setTimeout(() => { }, 0);
// Build user-agent string.
this.userAgent = [
options['grpc.primary_user_agent'], `grpc-node-js/${clientVersion}`,
options['grpc.secondary_user_agent']
].filter(e => e).join(' '); // remove falsey values first
}
handleStateChange(oldState, newState) {
const now = new Date();
switch (newState) {
case ConnectivityState.CONNECTING:
if (oldState === ConnectivityState.IDLE) {
this.currentBackoff = INITIAL_BACKOFF_MS;
this.currentBackoffDeadline =
new Date(now.getTime() + INITIAL_BACKOFF_MS);
}
else if (oldState === ConnectivityState.TRANSIENT_FAILURE) {
this.currentBackoff = Math.min(this.currentBackoff * BACKOFF_MULTIPLIER, MAX_BACKOFF_MS);
const jitterMagnitude = BACKOFF_JITTER * this.currentBackoff;
this.currentBackoffDeadline = new Date(now.getTime() + this.currentBackoff +
uniformRandom(-jitterMagnitude, jitterMagnitude));
}
this.startConnecting();
break;
case ConnectivityState.READY:
this.emit('connect');
break;
case ConnectivityState.TRANSIENT_FAILURE:
this.subChannel = null;
this.backoffTimerId = setTimeout(() => {
this.transitionToState([ConnectivityState.TRANSIENT_FAILURE], ConnectivityState.CONNECTING);
}, this.currentBackoffDeadline.getTime() - now.getTime());
break;
case ConnectivityState.IDLE:
case ConnectivityState.SHUTDOWN:
if (this.subChannel) {
this.subChannel.close();
this.subChannel.removeListener('connect', this.subChannelConnectCallback);
this.subChannel.removeListener('close', this.subChannelCloseCallback);
this.subChannel = null;
this.emit('shutdown');
clearTimeout(this.backoffTimerId);
}
break;
default:
throw new Error('This should never happen');
}
}
// Transition from any of a set of oldStates to a specific newState
transitionToState(oldStates, newState) {
if (oldStates.indexOf(this.connectivityState) > -1) {
const oldState = this.connectivityState;
this.connectivityState = newState;
this.handleStateChange(oldState, newState);
this.emit('connectivityStateChanged', newState);
}
}
startConnecting() {
const connectionOptions = this.credentials._getConnectionOptions() || {};
if (connectionOptions.secureContext !== null) {
// If provided, the value of grpc.ssl_target_name_override should be used
// to override the target hostname when checking server identity.
// This option is used for testing only.
if (this.options['grpc.ssl_target_name_override']) {
const sslTargetNameOverride = this.options['grpc.ssl_target_name_override'];
connectionOptions.checkServerIdentity =
(host, cert) => {
return tls_1.checkServerIdentity(sslTargetNameOverride, cert);
};
connectionOptions.servername = sslTargetNameOverride;
}
}
const subChannel = new subchannel_1.Http2SubChannel(this.target, connectionOptions, this.userAgent, this.options);
this.subChannel = subChannel;
const now = new Date();
const connectionTimeout = Math.max(this.currentBackoffDeadline.getTime() - now.getTime(), MIN_CONNECT_TIMEOUT_MS);
const connectionTimerId = setTimeout(() => {
// This should trigger the 'close' event, which will send us back to
// TRANSIENT_FAILURE
subChannel.close();
}, connectionTimeout);
this.subChannelConnectCallback = () => {
// Connection succeeded
clearTimeout(connectionTimerId);
this.transitionToState([ConnectivityState.CONNECTING], ConnectivityState.READY);
};
subChannel.once('connect', this.subChannelConnectCallback);
this.subChannelCloseCallback = () => {
// Connection failed
clearTimeout(connectionTimerId);
/* TODO(murgatroid99): verify that this works for
* CONNECTING->TRANSITIVE_FAILURE see nodejs/node#16645 */
this.transitionToState([ConnectivityState.CONNECTING, ConnectivityState.READY], ConnectivityState.TRANSIENT_FAILURE);
};
subChannel.once('close', this.subChannelCloseCallback);
}
_startHttp2Stream(authority, methodName, stream, metadata) {
const finalMetadata = stream.filterStack.sendMetadata(Promise.resolve(metadata.clone()));
Promise.all([finalMetadata, this.connect()])
.then(([metadataValue]) => {
const headers = metadataValue.toHttp2Headers();
headers[HTTP2_HEADER_AUTHORITY] = authority;
headers[HTTP2_HEADER_USER_AGENT] = this.userAgent;
headers[HTTP2_HEADER_CONTENT_TYPE] = 'application/grpc';
headers[HTTP2_HEADER_METHOD] = 'POST';
headers[HTTP2_HEADER_PATH] = methodName;
headers[HTTP2_HEADER_TE] = 'trailers';
if (this.connectivityState === ConnectivityState.READY) {
const subChannel = this.subChannel;
subChannel.startCallStream(metadataValue, stream);
}
else {
/* In this case, we lost the connection while finalizing
* metadata. That should be very unusual */
setImmediate(() => {
this._startHttp2Stream(authority, methodName, stream, metadata);
});
}
})
.catch((error) => {
// We assume the error code isn't 0 (Status.OK)
stream.cancelWithStatus(error.code || constants_1.Status.UNKNOWN, `Getting metadata from plugin failed with error: ${error.message}`);
});
}
createCall(method, deadline, host, parentCall, propagateFlags) {
if (this.connectivityState === ConnectivityState.SHUTDOWN) {
throw new Error('Channel has been shut down');
}
const finalOptions = {
deadline: (deadline === null || deadline === undefined) ? Infinity :
deadline,
flags: propagateFlags || 0,
host: host || this.defaultAuthority,
parentCall: parentCall || null
};
const stream = new call_stream_1.Http2CallStream(method, this, finalOptions, this.filterStackFactory);
return stream;
}
/**
* Attempts to connect, returning a Promise that resolves when the connection
* is successful, or rejects if the channel is shut down.
*/
connect() {
if (this.connectivityState === ConnectivityState.READY) {
return Promise.resolve();
}
else if (this.connectivityState === ConnectivityState.SHUTDOWN) {
return Promise.reject(new Error('Channel has been shut down'));
}
else {
// In effect, this.connecting is only assigned upon the first attempt to
// transition from IDLE to CONNECTING, so this condition could have also
// been (connectivityState === IDLE).
if (!this.connecting) {
this.connecting = new Promise((resolve, reject) => {
this.transitionToState([ConnectivityState.IDLE], ConnectivityState.CONNECTING);
const onConnect = () => {
this.connecting = null;
this.removeListener('shutdown', onShutdown);
resolve();
};
const onShutdown = () => {
this.connecting = null;
this.removeListener('connect', onConnect);
reject(new Error('Channel has been shut down'));
};
this.once('connect', onConnect);
this.once('shutdown', onShutdown);
});
}
return this.connecting;
}
}
getConnectivityState(tryToConnect) {
if (tryToConnect) {
this.transitionToState([ConnectivityState.IDLE], ConnectivityState.CONNECTING);
}
return this.connectivityState;
}
watchConnectivityState(currentState, deadline, callback) {
if (this.connectivityState !== currentState) {
/* If the connectivity state is different from the provided currentState,
* we assume that a state change has successfully occurred */
setImmediate(callback);
}
else {
let deadlineMs = 0;
if (deadline instanceof Date) {
deadlineMs = deadline.getTime();
}
else {
deadlineMs = deadline;
}
let timeout = deadlineMs - Date.now();
if (timeout < 0) {
timeout = 0;
}
const timeoutId = setTimeout(() => {
this.removeListener('connectivityStateChanged', eventCb);
callback(new Error('Channel state did not change before deadline'));
}, timeout);
const eventCb = () => {
clearTimeout(timeoutId);
callback();
};
this.once('connectivityStateChanged', eventCb);
}
}
getTarget() {
return this.target.toString();
}
close() {
if (this.connectivityState === ConnectivityState.SHUTDOWN) {
throw new Error('Channel has been shut down');
}
this.transitionToState([
ConnectivityState.CONNECTING, ConnectivityState.READY,
ConnectivityState.TRANSIENT_FAILURE, ConnectivityState.IDLE
], ConnectivityState.SHUTDOWN);
}
}
exports.Http2Channel = Http2Channel;
//# sourceMappingURL=channel.js.map

View File

@ -0,0 +1,49 @@
/// <reference types="node" />
import { ClientDuplexStream, ClientReadableStream, ClientUnaryCall, ClientWritableStream, ServiceError } from './call';
import { CallCredentials } from './call-credentials';
import { Call, Deadline } from './call-stream';
import { Channel } from './channel';
import { ChannelCredentials } from './channel-credentials';
import { ChannelOptions } from './channel-options';
import { Metadata } from './metadata';
export declare const kChannel: unique symbol;
export interface UnaryCallback<ResponseType> {
(err: ServiceError | null, value?: ResponseType): void;
}
export interface CallOptions {
deadline?: Deadline;
host?: string;
parent?: Call;
propagate_flags?: number;
credentials?: CallCredentials;
}
export declare type ClientOptions = Partial<ChannelOptions> & {
channelOverride?: Channel;
channelFactoryOverride?: (address: string, credentials: ChannelCredentials, options: ClientOptions) => Channel;
};
/**
* A generic gRPC client. Primarily useful as a base class for all generated
* clients.
*/
export declare class Client {
private readonly [kChannel];
constructor(address: string, credentials: ChannelCredentials, options?: ClientOptions);
close(): void;
getChannel(): Channel;
waitForReady(deadline: Deadline, callback: (error?: Error) => void): void;
private handleUnaryResponse<ResponseType>(call, deserialize, callback);
private checkOptionalUnaryResponseArguments<ResponseType>(arg1, arg2?, arg3?);
makeUnaryRequest<RequestType, ResponseType>(method: string, serialize: (value: RequestType) => Buffer, deserialize: (value: Buffer) => ResponseType, argument: RequestType, metadata: Metadata, options: CallOptions, callback: UnaryCallback<ResponseType>): ClientUnaryCall;
makeUnaryRequest<RequestType, ResponseType>(method: string, serialize: (value: RequestType) => Buffer, deserialize: (value: Buffer) => ResponseType, argument: RequestType, metadata: Metadata, callback: UnaryCallback<ResponseType>): ClientUnaryCall;
makeUnaryRequest<RequestType, ResponseType>(method: string, serialize: (value: RequestType) => Buffer, deserialize: (value: Buffer) => ResponseType, argument: RequestType, options: CallOptions, callback: UnaryCallback<ResponseType>): ClientUnaryCall;
makeUnaryRequest<RequestType, ResponseType>(method: string, serialize: (value: RequestType) => Buffer, deserialize: (value: Buffer) => ResponseType, argument: RequestType, callback: UnaryCallback<ResponseType>): ClientUnaryCall;
makeClientStreamRequest<RequestType, ResponseType>(method: string, serialize: (value: RequestType) => Buffer, deserialize: (value: Buffer) => ResponseType, metadata: Metadata, options: CallOptions, callback: UnaryCallback<ResponseType>): ClientWritableStream<RequestType>;
makeClientStreamRequest<RequestType, ResponseType>(method: string, serialize: (value: RequestType) => Buffer, deserialize: (value: Buffer) => ResponseType, metadata: Metadata, callback: UnaryCallback<ResponseType>): ClientWritableStream<RequestType>;
makeClientStreamRequest<RequestType, ResponseType>(method: string, serialize: (value: RequestType) => Buffer, deserialize: (value: Buffer) => ResponseType, options: CallOptions, callback: UnaryCallback<ResponseType>): ClientWritableStream<RequestType>;
makeClientStreamRequest<RequestType, ResponseType>(method: string, serialize: (value: RequestType) => Buffer, deserialize: (value: Buffer) => ResponseType, callback: UnaryCallback<ResponseType>): ClientWritableStream<RequestType>;
private checkMetadataAndOptions(arg1?, arg2?);
makeServerStreamRequest<RequestType, ResponseType>(method: string, serialize: (value: RequestType) => Buffer, deserialize: (value: Buffer) => ResponseType, argument: RequestType, metadata: Metadata, options?: CallOptions): ClientReadableStream<ResponseType>;
makeServerStreamRequest<RequestType, ResponseType>(method: string, serialize: (value: RequestType) => Buffer, deserialize: (value: Buffer) => ResponseType, argument: RequestType, options?: CallOptions): ClientReadableStream<ResponseType>;
makeBidiStreamRequest<RequestType, ResponseType>(method: string, serialize: (value: RequestType) => Buffer, deserialize: (value: Buffer) => ResponseType, metadata: Metadata, options?: CallOptions): ClientDuplexStream<RequestType, ResponseType>;
makeBidiStreamRequest<RequestType, ResponseType>(method: string, serialize: (value: RequestType) => Buffer, deserialize: (value: Buffer) => ResponseType, options?: CallOptions): ClientDuplexStream<RequestType, ResponseType>;
}

View File

@ -0,0 +1,186 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const call_1 = require("./call");
const channel_1 = require("./channel");
const constants_1 = require("./constants");
const metadata_1 = require("./metadata");
// This symbol must be exported (for now).
// See: https://github.com/Microsoft/TypeScript/issues/20080
exports.kChannel = Symbol();
/**
* A generic gRPC client. Primarily useful as a base class for all generated
* clients.
*/
class Client {
constructor(address, credentials, options = {}) {
if (options.channelOverride) {
this[exports.kChannel] = options.channelOverride;
}
else if (options.channelFactoryOverride) {
this[exports.kChannel] =
options.channelFactoryOverride(address, credentials, options);
}
else {
this[exports.kChannel] = new channel_1.Http2Channel(address, credentials, options);
}
}
close() {
this[exports.kChannel].close();
}
getChannel() {
return this[exports.kChannel];
}
waitForReady(deadline, callback) {
const checkState = (err) => {
if (err) {
callback(new Error('Failed to connect before the deadline'));
return;
}
let newState;
try {
newState = this[exports.kChannel].getConnectivityState(true);
}
catch (e) {
callback(new Error('The channel has been closed'));
return;
}
if (newState === channel_1.ConnectivityState.READY) {
callback();
}
else {
try {
this[exports.kChannel].watchConnectivityState(newState, deadline, checkState);
}
catch (e) {
callback(new Error('The channel has been closed'));
}
}
};
setImmediate(checkState);
}
handleUnaryResponse(call, deserialize, callback) {
let responseMessage = null;
call.on('data', (data) => {
if (responseMessage != null) {
call.cancelWithStatus(constants_1.Status.INTERNAL, 'Too many responses received');
}
try {
responseMessage = deserialize(data);
}
catch (e) {
call.cancelWithStatus(constants_1.Status.INTERNAL, 'Failed to parse server response');
}
});
call.on('end', () => {
if (responseMessage == null) {
call.cancelWithStatus(constants_1.Status.INTERNAL, 'Not enough responses received');
}
});
call.on('status', (status) => {
/* We assume that call emits status after it emits end, and that it
* accounts for any cancelWithStatus calls up until it emits status.
* Therefore, considering the above event handlers, status.code should be
* OK if and only if we have a non-null responseMessage */
if (status.code === constants_1.Status.OK) {
callback(null, responseMessage);
}
else {
const error = Object.assign(new Error(status.details), status);
callback(error);
}
});
}
checkOptionalUnaryResponseArguments(arg1, arg2, arg3) {
if (arg1 instanceof Function) {
return { metadata: new metadata_1.Metadata(), options: {}, callback: arg1 };
}
else if (arg2 instanceof Function) {
if (arg1 instanceof metadata_1.Metadata) {
return { metadata: arg1, options: {}, callback: arg2 };
}
else {
return { metadata: new metadata_1.Metadata(), options: arg1, callback: arg2 };
}
}
else {
if (!((arg1 instanceof metadata_1.Metadata) && (arg2 instanceof Object) &&
(arg3 instanceof Function))) {
throw new Error('Incorrect arguments passed');
}
return { metadata: arg1, options: arg2, callback: arg3 };
}
}
makeUnaryRequest(method, serialize, deserialize, argument, metadata, options, callback) {
({ metadata, options, callback } =
this.checkOptionalUnaryResponseArguments(metadata, options, callback));
const call = this[exports.kChannel].createCall(method, options.deadline, options.host, options.parent, options.propagate_flags);
if (options.credentials) {
call.setCredentials(options.credentials);
}
const message = serialize(argument);
const writeObj = { message };
call.sendMetadata(metadata);
call.write(writeObj);
call.end();
this.handleUnaryResponse(call, deserialize, callback);
return new call_1.ClientUnaryCallImpl(call);
}
makeClientStreamRequest(method, serialize, deserialize, metadata, options, callback) {
({ metadata, options, callback } =
this.checkOptionalUnaryResponseArguments(metadata, options, callback));
const call = this[exports.kChannel].createCall(method, options.deadline, options.host, options.parent, options.propagate_flags);
if (options.credentials) {
call.setCredentials(options.credentials);
}
call.sendMetadata(metadata);
this.handleUnaryResponse(call, deserialize, callback);
return new call_1.ClientWritableStreamImpl(call, serialize);
}
checkMetadataAndOptions(arg1, arg2) {
let metadata;
let options;
if (arg1 instanceof metadata_1.Metadata) {
metadata = arg1;
if (arg2) {
options = arg2;
}
else {
options = {};
}
}
else {
if (arg1) {
options = arg1;
}
else {
options = {};
}
metadata = new metadata_1.Metadata();
}
return { metadata, options };
}
makeServerStreamRequest(method, serialize, deserialize, argument, metadata, options) {
({ metadata, options } = this.checkMetadataAndOptions(metadata, options));
const call = this[exports.kChannel].createCall(method, options.deadline, options.host, options.parent, options.propagate_flags);
if (options.credentials) {
call.setCredentials(options.credentials);
}
const message = serialize(argument);
const writeObj = { message };
call.sendMetadata(metadata);
call.write(writeObj);
call.end();
return new call_1.ClientReadableStreamImpl(call, deserialize);
}
makeBidiStreamRequest(method, serialize, deserialize, metadata, options) {
({ metadata, options } = this.checkMetadataAndOptions(metadata, options));
const call = this[exports.kChannel].createCall(method, options.deadline, options.host, options.parent, options.propagate_flags);
if (options.credentials) {
call.setCredentials(options.credentials);
}
call.sendMetadata(metadata);
return new call_1.ClientDuplexStreamImpl(call, serialize, deserialize);
}
}
exports.Client = Client;
//# sourceMappingURL=client.js.map

View File

@ -0,0 +1,18 @@
/// <reference types="node" />
import { Call, WriteObject } from './call-stream';
import { Channel } from './channel';
import { BaseFilter, Filter, FilterFactory } from './filter';
import { Metadata } from './metadata';
export declare class CompressionFilter extends BaseFilter implements Filter {
private sendCompression;
private receiveCompression;
sendMetadata(metadata: Promise<Metadata>): Promise<Metadata>;
receiveMetadata(metadata: Promise<Metadata>): Promise<Metadata>;
sendMessage(message: Promise<WriteObject>): Promise<WriteObject>;
receiveMessage(message: Promise<Buffer>): Promise<Buffer>;
}
export declare class CompressionFilterFactory implements FilterFactory<CompressionFilter> {
private readonly channel;
constructor(channel: Channel);
createFilter(callStream: Call): CompressionFilter;
}

View File

@ -0,0 +1,209 @@
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : new P(function (resolve) { resolve(result.value); }).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", { value: true });
const zlib = require("zlib");
const filter_1 = require("./filter");
class CompressionHandler {
/**
* @param message Raw uncompressed message bytes
* @param compress Indicates whether the message should be compressed
* @return Framed message, compressed if applicable
*/
writeMessage(message, compress) {
return __awaiter(this, void 0, void 0, function* () {
let messageBuffer = message;
if (compress) {
messageBuffer = yield this.compressMessage(messageBuffer);
}
const output = Buffer.allocUnsafe(messageBuffer.length + 5);
output.writeUInt8(compress ? 1 : 0, 0);
output.writeUInt32BE(messageBuffer.length, 1);
messageBuffer.copy(output, 5);
return output;
});
}
/**
* @param data Framed message, possibly compressed
* @return Uncompressed message
*/
readMessage(data) {
return __awaiter(this, void 0, void 0, function* () {
const compressed = data.readUInt8(0) === 1;
let messageBuffer = data.slice(5);
if (compressed) {
messageBuffer = yield this.decompressMessage(messageBuffer);
}
return messageBuffer;
});
}
}
class IdentityHandler extends CompressionHandler {
compressMessage(message) {
return __awaiter(this, void 0, void 0, function* () {
return message;
});
}
writeMessage(message, compress) {
return __awaiter(this, void 0, void 0, function* () {
const output = Buffer.allocUnsafe(message.length + 5);
/* With "identity" compression, messages should always be marked as
* uncompressed */
output.writeUInt8(0, 0);
output.writeUInt32BE(message.length, 1);
message.copy(output, 5);
return output;
});
}
decompressMessage(message) {
return Promise.reject(new Error('Received compressed message but "grpc-encoding" header was identity'));
}
}
class DeflateHandler extends CompressionHandler {
compressMessage(message) {
return new Promise((resolve, reject) => {
zlib.deflate(message, (err, output) => {
if (err) {
reject(err);
}
else {
resolve(output);
}
});
});
}
decompressMessage(message) {
return new Promise((resolve, reject) => {
zlib.inflate(message, (err, output) => {
if (err) {
reject(err);
}
else {
resolve(output);
}
});
});
}
}
class GzipHandler extends CompressionHandler {
compressMessage(message) {
return new Promise((resolve, reject) => {
zlib.gzip(message, (err, output) => {
if (err) {
reject(err);
}
else {
resolve(output);
}
});
});
}
decompressMessage(message) {
return new Promise((resolve, reject) => {
zlib.unzip(message, (err, output) => {
if (err) {
reject(err);
}
else {
resolve(output);
}
});
});
}
}
class UnknownHandler extends CompressionHandler {
constructor(compressionName) {
super();
this.compressionName = compressionName;
}
compressMessage(message) {
return Promise.reject(new Error(`Received message compressed wth unsupported compression method ${this.compressionName}`));
}
decompressMessage(message) {
// This should be unreachable
return Promise.reject(new Error(`Compression method not supported: ${this.compressionName}`));
}
}
function getCompressionHandler(compressionName) {
switch (compressionName) {
case 'identity':
return new IdentityHandler();
case 'deflate':
return new DeflateHandler();
case 'gzip':
return new GzipHandler();
default:
return new UnknownHandler(compressionName);
}
}
class CompressionFilter extends filter_1.BaseFilter {
constructor() {
super(...arguments);
this.sendCompression = new IdentityHandler();
this.receiveCompression = new IdentityHandler();
}
sendMetadata(metadata) {
return __awaiter(this, void 0, void 0, function* () {
const headers = yield metadata;
headers.set('grpc-encoding', 'identity');
headers.set('grpc-accept-encoding', 'identity,deflate,gzip');
return headers;
});
}
receiveMetadata(metadata) {
return __awaiter(this, void 0, void 0, function* () {
const headers = yield metadata;
const receiveEncoding = headers.get('grpc-encoding');
if (receiveEncoding.length > 0) {
const encoding = receiveEncoding[0];
if (typeof encoding === 'string') {
this.receiveCompression = getCompressionHandler(encoding);
}
}
headers.remove('grpc-encoding');
headers.remove('grpc-accept-encoding');
return headers;
});
}
sendMessage(message) {
return __awaiter(this, void 0, void 0, function* () {
/* This filter is special. The input message is the bare message bytes,
* and the output is a framed and possibly compressed message. For this
* reason, this filter should be at the bottom of the filter stack */
const resolvedMessage = yield message;
const compress = resolvedMessage.flags === undefined ?
false :
(resolvedMessage.flags & 2 /* NoCompress */) === 0;
return {
message: yield this.sendCompression.writeMessage(resolvedMessage.message, compress),
flags: resolvedMessage.flags
};
});
}
receiveMessage(message) {
return __awaiter(this, void 0, void 0, function* () {
/* This filter is also special. The input message is framed and possibly
* compressed, and the output message is deframed and uncompressed. So
* this is another reason that this filter should be at the bottom of the
* filter stack. */
return this.receiveCompression.readMessage(yield message);
});
}
}
exports.CompressionFilter = CompressionFilter;
class CompressionFilterFactory {
constructor(channel) {
this.channel = channel;
}
createFilter(callStream) {
return new CompressionFilter();
}
}
exports.CompressionFilterFactory = CompressionFilterFactory;
//# sourceMappingURL=compression-filter.js.map

View File

@ -0,0 +1,24 @@
export declare enum Status {
OK = 0,
CANCELLED = 1,
UNKNOWN = 2,
INVALID_ARGUMENT = 3,
DEADLINE_EXCEEDED = 4,
NOT_FOUND = 5,
ALREADY_EXISTS = 6,
PERMISSION_DENIED = 7,
RESOURCE_EXHAUSTED = 8,
FAILED_PRECONDITION = 9,
ABORTED = 10,
OUT_OF_RANGE = 11,
UNIMPLEMENTED = 12,
INTERNAL = 13,
UNAVAILABLE = 14,
DATA_LOSS = 15,
UNAUTHENTICATED = 16,
}
export declare enum LogVerbosity {
DEBUG = 0,
INFO = 1,
ERROR = 2,
}

View File

@ -0,0 +1,29 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
var Status;
(function (Status) {
Status[Status["OK"] = 0] = "OK";
Status[Status["CANCELLED"] = 1] = "CANCELLED";
Status[Status["UNKNOWN"] = 2] = "UNKNOWN";
Status[Status["INVALID_ARGUMENT"] = 3] = "INVALID_ARGUMENT";
Status[Status["DEADLINE_EXCEEDED"] = 4] = "DEADLINE_EXCEEDED";
Status[Status["NOT_FOUND"] = 5] = "NOT_FOUND";
Status[Status["ALREADY_EXISTS"] = 6] = "ALREADY_EXISTS";
Status[Status["PERMISSION_DENIED"] = 7] = "PERMISSION_DENIED";
Status[Status["RESOURCE_EXHAUSTED"] = 8] = "RESOURCE_EXHAUSTED";
Status[Status["FAILED_PRECONDITION"] = 9] = "FAILED_PRECONDITION";
Status[Status["ABORTED"] = 10] = "ABORTED";
Status[Status["OUT_OF_RANGE"] = 11] = "OUT_OF_RANGE";
Status[Status["UNIMPLEMENTED"] = 12] = "UNIMPLEMENTED";
Status[Status["INTERNAL"] = 13] = "INTERNAL";
Status[Status["UNAVAILABLE"] = 14] = "UNAVAILABLE";
Status[Status["DATA_LOSS"] = 15] = "DATA_LOSS";
Status[Status["UNAUTHENTICATED"] = 16] = "UNAUTHENTICATED";
})(Status = exports.Status || (exports.Status = {}));
var LogVerbosity;
(function (LogVerbosity) {
LogVerbosity[LogVerbosity["DEBUG"] = 0] = "DEBUG";
LogVerbosity[LogVerbosity["INFO"] = 1] = "INFO";
LogVerbosity[LogVerbosity["ERROR"] = 2] = "ERROR";
})(LogVerbosity = exports.LogVerbosity || (exports.LogVerbosity = {}));
//# sourceMappingURL=constants.js.map

View File

@ -0,0 +1,17 @@
import { Call } from './call-stream';
import { Http2Channel } from './channel';
import { BaseFilter, Filter, FilterFactory } from './filter';
import { Metadata } from './metadata';
export declare class DeadlineFilter extends BaseFilter implements Filter {
private readonly channel;
private readonly callStream;
private timer;
private deadline;
constructor(channel: Http2Channel, callStream: Call);
sendMetadata(metadata: Promise<Metadata>): Promise<Metadata>;
}
export declare class DeadlineFilterFactory implements FilterFactory<DeadlineFilter> {
private readonly channel;
constructor(channel: Http2Channel);
createFilter(callStream: Call): DeadlineFilter;
}

View File

@ -0,0 +1,79 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const channel_1 = require("./channel");
const constants_1 = require("./constants");
const filter_1 = require("./filter");
const units = [['m', 1], ['S', 1000], ['M', 60 * 1000], ['H', 60 * 60 * 1000]];
function getDeadline(deadline) {
const now = (new Date()).getTime();
const timeoutMs = Math.max(deadline - now, 0);
for (const [unit, factor] of units) {
const amount = timeoutMs / factor;
if (amount < 1e8) {
return String(Math.ceil(amount)) + unit;
}
}
throw new Error('Deadline is too far in the future');
}
class DeadlineFilter extends filter_1.BaseFilter {
constructor(channel, callStream) {
super();
this.channel = channel;
this.callStream = callStream;
this.timer = null;
const callDeadline = callStream.getDeadline();
if (callDeadline instanceof Date) {
this.deadline = callDeadline.getTime();
}
else {
this.deadline = callDeadline;
}
const now = (new Date()).getTime();
let timeout = this.deadline - now;
if (timeout < 0) {
timeout = 0;
}
if (this.deadline !== Infinity) {
this.timer = setTimeout(() => {
callStream.cancelWithStatus(constants_1.Status.DEADLINE_EXCEEDED, 'Deadline exceeded');
}, timeout);
callStream.on('status', () => clearTimeout(this.timer));
}
}
sendMetadata(metadata) {
if (this.deadline === Infinity) {
return metadata;
}
return new Promise((resolve, reject) => {
if (this.channel.getConnectivityState(false) ===
channel_1.ConnectivityState.READY) {
resolve(metadata);
}
else {
const handleStateChange = (newState) => {
if (newState === channel_1.ConnectivityState.READY) {
resolve(metadata);
this.channel.removeListener('connectivityStateChanged', handleStateChange);
}
};
this.channel.on('connectivityStateChanged', handleStateChange);
}
})
.then((finalMetadata) => {
const timeoutString = getDeadline(this.deadline);
finalMetadata.set('grpc-timeout', timeoutString);
return finalMetadata;
});
}
}
exports.DeadlineFilter = DeadlineFilter;
class DeadlineFilterFactory {
constructor(channel) {
this.channel = channel;
}
createFilter(callStream) {
return new DeadlineFilter(this.channel, callStream);
}
}
exports.DeadlineFilterFactory = DeadlineFilterFactory;
//# sourceMappingURL=deadline-filter.js.map

View File

@ -0,0 +1,27 @@
export interface EmitterAugmentation0<Name extends string | symbol> {
addListener(event: Name, listener: () => void): this;
emit(event: Name): boolean;
on(event: Name, listener: () => void): this;
once(event: Name, listener: () => void): this;
prependListener(event: Name, listener: () => void): this;
prependOnceListener(event: Name, listener: () => void): this;
removeListener(event: Name, listener: () => void): this;
}
export interface EmitterAugmentation1<Name extends string | symbol, Arg> {
addListener(event: Name, listener: (arg1: Arg) => void): this;
emit(event: Name, arg1: Arg): boolean;
on(event: Name, listener: (arg1: Arg) => void): this;
once(event: Name, listener: (arg1: Arg) => void): this;
prependListener(event: Name, listener: (arg1: Arg) => void): this;
prependOnceListener(event: Name, listener: (arg1: Arg) => void): this;
removeListener(event: Name, listener: (arg1: Arg) => void): this;
}
export interface EmitterAugmentation2<Name extends string | symbol, Arg1, Arg2> {
addListener(event: Name, listener: (arg1: Arg1, arg2: Arg2) => void): this;
emit(event: Name, arg1: Arg1, arg2: Arg2): boolean;
on(event: Name, listener: (arg1: Arg1, arg2: Arg2) => void): this;
once(event: Name, listener: (arg1: Arg1, arg2: Arg2) => void): this;
prependListener(event: Name, listener: (arg1: Arg1, arg2: Arg2) => void): this;
prependOnceListener(event: Name, listener: (arg1: Arg1, arg2: Arg2) => void): this;
removeListener(event: Name, listener: (arg1: Arg1, arg2: Arg2) => void): this;
}

View File

@ -0,0 +1,3 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
//# sourceMappingURL=events.js.map

View File

@ -0,0 +1,18 @@
/// <reference types="node" />
import { Call, StatusObject, WriteObject } from './call-stream';
import { Filter, FilterFactory } from './filter';
import { Metadata } from './metadata';
export declare class FilterStack implements Filter {
private readonly filters;
constructor(filters: Filter[]);
sendMetadata(metadata: Promise<Metadata>): any;
receiveMetadata(metadata: Promise<Metadata>): any;
sendMessage(message: Promise<WriteObject>): Promise<WriteObject>;
receiveMessage(message: Promise<Buffer>): Promise<Buffer>;
receiveTrailers(status: Promise<StatusObject>): Promise<StatusObject>;
}
export declare class FilterStackFactory implements FilterFactory<FilterStack> {
private readonly factories;
constructor(factories: Array<FilterFactory<Filter>>);
createFilter(callStream: Call): FilterStack;
}

View File

@ -0,0 +1,34 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const lodash_1 = require("lodash");
class FilterStack {
constructor(filters) {
this.filters = filters;
}
sendMetadata(metadata) {
return lodash_1.flow(lodash_1.map(this.filters, (filter) => filter.sendMetadata.bind(filter)))(metadata);
}
receiveMetadata(metadata) {
return lodash_1.flowRight(lodash_1.map(this.filters, (filter) => filter.receiveMetadata.bind(filter)))(metadata);
}
sendMessage(message) {
return lodash_1.flow(lodash_1.map(this.filters, (filter) => filter.sendMessage.bind(filter)))(message);
}
receiveMessage(message) {
return lodash_1.flowRight(lodash_1.map(this.filters, (filter) => filter.receiveMessage.bind(filter)))(message);
}
receiveTrailers(status) {
return lodash_1.flowRight(lodash_1.map(this.filters, (filter) => filter.receiveTrailers.bind(filter)))(status);
}
}
exports.FilterStack = FilterStack;
class FilterStackFactory {
constructor(factories) {
this.factories = factories;
}
createFilter(callStream) {
return new FilterStack(lodash_1.map(this.factories, (factory) => factory.createFilter(callStream)));
}
}
exports.FilterStackFactory = FilterStackFactory;
//# sourceMappingURL=filter-stack.js.map

View File

@ -0,0 +1,24 @@
/// <reference types="node" />
import { Call, StatusObject, WriteObject } from './call-stream';
import { Metadata } from './metadata';
/**
* Filter classes represent related per-call logic and state that is primarily
* used to modify incoming and outgoing data
*/
export interface Filter {
sendMetadata(metadata: Promise<Metadata>): Promise<Metadata>;
receiveMetadata(metadata: Promise<Metadata>): Promise<Metadata>;
sendMessage(message: Promise<WriteObject>): Promise<WriteObject>;
receiveMessage(message: Promise<Buffer>): Promise<Buffer>;
receiveTrailers(status: Promise<StatusObject>): Promise<StatusObject>;
}
export declare abstract class BaseFilter {
sendMetadata(metadata: Promise<Metadata>): Promise<Metadata>;
receiveMetadata(metadata: Promise<Metadata>): Promise<Metadata>;
sendMessage(message: Promise<WriteObject>): Promise<WriteObject>;
receiveMessage(message: Promise<Buffer>): Promise<Buffer>;
receiveTrailers(status: Promise<StatusObject>): Promise<StatusObject>;
}
export interface FilterFactory<T extends Filter> {
createFilter(callStream: Call): T;
}

View File

@ -0,0 +1,39 @@
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : new P(function (resolve) { resolve(result.value); }).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", { value: true });
class BaseFilter {
sendMetadata(metadata) {
return __awaiter(this, void 0, void 0, function* () {
return metadata;
});
}
receiveMetadata(metadata) {
return __awaiter(this, void 0, void 0, function* () {
return metadata;
});
}
sendMessage(message) {
return __awaiter(this, void 0, void 0, function* () {
return message;
});
}
receiveMessage(message) {
return __awaiter(this, void 0, void 0, function* () {
return message;
});
}
receiveTrailers(status) {
return __awaiter(this, void 0, void 0, function* () {
return status;
});
}
}
exports.BaseFilter = BaseFilter;
//# sourceMappingURL=filter.js.map

View File

@ -0,0 +1,43 @@
/// <reference types="node" />
import { Channel } from './channel';
import { Client } from './client';
import { LogVerbosity, Status } from './constants';
import { loadPackageDefinition, makeClientConstructor } from './make-client';
import { Metadata } from './metadata';
import { StatusBuilder } from './status-builder';
export interface OAuth2Client {
getRequestMetadata: (url: string, callback: (err: Error | null, headers?: {
Authorization: string;
}) => void) => void;
}
/**** Client Credentials ****/
export declare const credentials: {
[key: string]: Function;
};
/**** Metadata ****/
export { Metadata };
/**** Constants ****/
export { LogVerbosity as logVerbosity, Status as status };
/**** Client ****/
export { Client, loadPackageDefinition, makeClientConstructor, makeClientConstructor as makeGenericClientConstructor, Channel };
/**
* Close a Client object.
* @param client The client to close.
*/
export declare const closeClient: (client: Client) => void;
export declare const waitForClientReady: (client: Client, deadline: number | Date, callback: (error?: Error | undefined) => void) => void;
/**** Unimplemented function stubs ****/
export declare const loadObject: (value: any, options: any) => never;
export declare const load: (filename: any, format: any, options: any) => never;
export declare const setLogger: (logger: Partial<Console>) => void;
export declare const setLogVerbosity: (verbosity: LogVerbosity) => void;
export declare const Server: (options: any) => never;
export declare const ServerCredentials: {
createSsl: (rootCerts: any, keyCertPairs: any, checkClientCertificate: any) => never;
createInsecure: () => never;
};
export declare const getClientChannel: (client: Client) => any;
export { StatusBuilder };
export declare const ListenerBuilder: () => never;
export declare const InterceptorBuilder: () => never;
export declare const InterceptingCall: () => never;

View File

@ -0,0 +1,121 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const semver = require("semver");
const call_credentials_1 = require("./call-credentials");
const channel_credentials_1 = require("./channel-credentials");
const client_1 = require("./client");
exports.Client = client_1.Client;
const constants_1 = require("./constants");
exports.logVerbosity = constants_1.LogVerbosity;
exports.status = constants_1.Status;
const logging = require("./logging");
const make_client_1 = require("./make-client");
exports.loadPackageDefinition = make_client_1.loadPackageDefinition;
exports.makeClientConstructor = make_client_1.makeClientConstructor;
exports.makeGenericClientConstructor = make_client_1.makeClientConstructor;
const metadata_1 = require("./metadata");
exports.Metadata = metadata_1.Metadata;
const status_builder_1 = require("./status-builder");
exports.StatusBuilder = status_builder_1.StatusBuilder;
const supportedNodeVersions = '^8.11.2 || >=9.4';
if (!semver.satisfies(process.version, supportedNodeVersions)) {
throw new Error(`@grpc/grpc-js only works on Node ${supportedNodeVersions}`);
}
function mixin(...sources) {
const result = {};
for (const source of sources) {
for (const propName of Object.getOwnPropertyNames(source)) {
const property = source[propName]; // tslint:disable-line no-any
if (typeof property === 'function') {
result[propName] = property;
}
}
}
return result;
}
/**** Client Credentials ****/
// Using assign only copies enumerable properties, which is what we want
exports.credentials = mixin({
/**
* Create a gRPC credential from a Google credential object.
* @param googleCredentials The authentication client to use.
* @return The resulting CallCredentials object.
*/
createFromGoogleCredential: (googleCredentials) => {
return call_credentials_1.CallCredentials.createFromMetadataGenerator((options, callback) => {
googleCredentials.getRequestMetadata(options.service_url, (err, headers) => {
if (err) {
callback(err);
return;
}
const metadata = new metadata_1.Metadata();
metadata.add('authorization', headers.Authorization);
callback(null, metadata);
});
});
},
/**
* Combine a ChannelCredentials with any number of CallCredentials into a
* single ChannelCredentials object.
* @param channelCredentials The ChannelCredentials object.
* @param callCredentials Any number of CallCredentials objects.
* @return The resulting ChannelCredentials object.
*/
combineChannelCredentials: (channelCredentials, ...callCredentials) => {
return callCredentials.reduce((acc, other) => acc.compose(other), channelCredentials);
},
/**
* Combine any number of CallCredentials into a single CallCredentials
* object.
* @param first The first CallCredentials object.
* @param additional Any number of additional CallCredentials objects.
* @return The resulting CallCredentials object.
*/
combineCallCredentials: (first, ...additional) => {
return additional.reduce((acc, other) => acc.compose(other), first);
}
}, channel_credentials_1.ChannelCredentials, call_credentials_1.CallCredentials);
/**
* Close a Client object.
* @param client The client to close.
*/
exports.closeClient = (client) => client.close();
exports.waitForClientReady = (client, deadline, callback) => client.waitForReady(deadline, callback);
/**** Unimplemented function stubs ****/
/* tslint:disable:no-any variable-name */
exports.loadObject = (value, options) => {
throw new Error('Not available in this library. Use @grpc/proto-loader and loadPackageDefinition instead');
};
exports.load = (filename, format, options) => {
throw new Error('Not available in this library. Use @grpc/proto-loader and loadPackageDefinition instead');
};
exports.setLogger = (logger) => {
logging.setLogger(logger);
};
exports.setLogVerbosity = (verbosity) => {
logging.setLoggerVerbosity(verbosity);
};
exports.Server = (options) => {
throw new Error('Not yet implemented');
};
exports.ServerCredentials = {
createSsl: (rootCerts, keyCertPairs, checkClientCertificate) => {
throw new Error('Not yet implemented');
},
createInsecure: () => {
throw new Error('Not yet implemented');
}
};
exports.getClientChannel = (client) => {
return client_1.Client.prototype.getChannel.call(client);
};
exports.ListenerBuilder = () => {
throw new Error('Not yet implemented');
};
exports.InterceptorBuilder = () => {
throw new Error('Not yet implemented');
};
exports.InterceptingCall = () => {
throw new Error('Not yet implemented');
};
//# sourceMappingURL=index.js.map

View File

@ -0,0 +1,6 @@
/// <reference types="node" />
import { LogVerbosity } from './constants';
export declare const getLogger: () => Partial<Console>;
export declare const setLogger: (logger: Partial<Console>) => void;
export declare const setLoggerVerbosity: (verbosity: LogVerbosity) => void;
export declare const log: (severity: LogVerbosity, ...args: any[]) => void;

View File

@ -0,0 +1,21 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const constants_1 = require("./constants");
let _logger = console;
let _logVerbosity = constants_1.LogVerbosity.DEBUG;
exports.getLogger = () => {
return _logger;
};
exports.setLogger = (logger) => {
_logger = logger;
};
exports.setLoggerVerbosity = (verbosity) => {
_logVerbosity = verbosity;
};
// tslint:disable-next-line no-any
exports.log = (severity, ...args) => {
if (severity >= _logVerbosity && typeof _logger.error === 'function') {
_logger.error(...args);
}
};
//# sourceMappingURL=logging.js.map

View File

@ -0,0 +1,57 @@
/// <reference types="node" />
import { ChannelCredentials } from './channel-credentials';
import { ChannelOptions } from './channel-options';
import { Client } from './client';
export interface Serialize<T> {
(value: T): Buffer;
}
export interface Deserialize<T> {
(bytes: Buffer): T;
}
export interface MethodDefinition<RequestType, ResponseType> {
path: string;
requestStream: boolean;
responseStream: boolean;
requestSerialize: Serialize<RequestType>;
responseSerialize: Serialize<ResponseType>;
requestDeserialize: Deserialize<RequestType>;
responseDeserialize: Deserialize<ResponseType>;
originalName?: string;
}
export interface ServiceDefinition {
[index: string]: MethodDefinition<object, object>;
}
export interface PackageDefinition {
[index: string]: ServiceDefinition;
}
export interface ServiceClient extends Client {
[methodName: string]: Function;
}
export interface ServiceClientConstructor {
new (address: string, credentials: ChannelCredentials, options?: Partial<ChannelOptions>): ServiceClient;
service: ServiceDefinition;
}
/**
* Creates a constructor for a client with the given methods, as specified in
* the methods argument. The resulting class will have an instance method for
* each method in the service, which is a partial application of one of the
* [Client]{@link grpc.Client} request methods, depending on `requestSerialize`
* and `responseSerialize`, with the `method`, `serialize`, and `deserialize`
* arguments predefined.
* @param methods An object mapping method names to
* method attributes
* @param serviceName The fully qualified name of the service
* @param classOptions An options object.
* @return New client constructor, which is a subclass of
* {@link grpc.Client}, and has the same arguments as that constructor.
*/
export declare function makeClientConstructor(methods: ServiceDefinition, serviceName: string, classOptions?: {}): ServiceClientConstructor;
export declare type GrpcObject = {
[index: string]: GrpcObject | ServiceClientConstructor;
};
/**
* Load a gRPC package definition as a gRPC object hierarchy.
* @param packageDef The package definition object.
* @return The resulting gRPC object.
*/
export declare function loadPackageDefinition(packageDef: PackageDefinition): GrpcObject;

View File

@ -0,0 +1,98 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const _ = require("lodash");
const client_1 = require("./client");
/**
* Map with short names for each of the requester maker functions. Used in
* makeClientConstructor
* @private
*/
const requesterFuncs = {
unary: client_1.Client.prototype.makeUnaryRequest,
server_stream: client_1.Client.prototype.makeServerStreamRequest,
client_stream: client_1.Client.prototype.makeClientStreamRequest,
bidi: client_1.Client.prototype.makeBidiStreamRequest
};
/**
* Creates a constructor for a client with the given methods, as specified in
* the methods argument. The resulting class will have an instance method for
* each method in the service, which is a partial application of one of the
* [Client]{@link grpc.Client} request methods, depending on `requestSerialize`
* and `responseSerialize`, with the `method`, `serialize`, and `deserialize`
* arguments predefined.
* @param methods An object mapping method names to
* method attributes
* @param serviceName The fully qualified name of the service
* @param classOptions An options object.
* @return New client constructor, which is a subclass of
* {@link grpc.Client}, and has the same arguments as that constructor.
*/
function makeClientConstructor(methods, serviceName, classOptions) {
if (!classOptions) {
classOptions = {};
}
class ServiceClientImpl extends client_1.Client {
}
_.each(methods, (attrs, name) => {
let methodType;
// TODO(murgatroid99): Verify that we don't need this anymore
if (_.startsWith(name, '$')) {
throw new Error('Method names cannot start with $');
}
if (attrs.requestStream) {
if (attrs.responseStream) {
methodType = 'bidi';
}
else {
methodType = 'client_stream';
}
}
else {
if (attrs.responseStream) {
methodType = 'server_stream';
}
else {
methodType = 'unary';
}
}
const serialize = attrs.requestSerialize;
const deserialize = attrs.responseDeserialize;
const methodFunc = _.partial(requesterFuncs[methodType], attrs.path, serialize, deserialize);
ServiceClientImpl.prototype[name] = methodFunc;
// Associate all provided attributes with the method
_.assign(ServiceClientImpl.prototype[name], attrs);
if (attrs.originalName) {
ServiceClientImpl.prototype[attrs.originalName] =
ServiceClientImpl.prototype[name];
}
});
ServiceClientImpl.service = methods;
return ServiceClientImpl;
}
exports.makeClientConstructor = makeClientConstructor;
/**
* Load a gRPC package definition as a gRPC object hierarchy.
* @param packageDef The package definition object.
* @return The resulting gRPC object.
*/
function loadPackageDefinition(packageDef) {
const result = {};
for (const serviceFqn in packageDef) {
if (packageDef.hasOwnProperty(serviceFqn)) {
const service = packageDef[serviceFqn];
const nameComponents = serviceFqn.split('.');
const serviceName = nameComponents[nameComponents.length - 1];
let current = result;
for (const packageName of nameComponents.slice(0, -1)) {
if (!current[packageName]) {
current[packageName] = {};
}
current = current[packageName];
}
current[serviceName] = makeClientConstructor(service, serviceName, {});
}
}
return result;
}
exports.loadPackageDefinition = loadPackageDefinition;
//# sourceMappingURL=make-client.js.map

View File

@ -0,0 +1,12 @@
import { Call } from './call-stream';
import { StatusObject } from './call-stream';
import { Channel } from './channel';
import { BaseFilter, Filter, FilterFactory } from './filter';
export declare class MetadataStatusFilter extends BaseFilter implements Filter {
receiveTrailers(status: Promise<StatusObject>): Promise<StatusObject>;
}
export declare class MetadataStatusFilterFactory implements FilterFactory<MetadataStatusFilter> {
private readonly channel;
constructor(channel: Channel);
createFilter(callStream: Call): MetadataStatusFilter;
}

View File

@ -0,0 +1,48 @@
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : new P(function (resolve) { resolve(result.value); }).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", { value: true });
const constants_1 = require("./constants");
const filter_1 = require("./filter");
class MetadataStatusFilter extends filter_1.BaseFilter {
receiveTrailers(status) {
return __awaiter(this, void 0, void 0, function* () {
// tslint:disable-next-line:prefer-const
let { code, details, metadata } = yield status;
if (code !== constants_1.Status.UNKNOWN) {
// we already have a known status, so don't assign a new one.
return { code, details, metadata };
}
const metadataMap = metadata.getMap();
if (typeof metadataMap['grpc-status'] === 'string') {
const receivedCode = Number(metadataMap['grpc-status']);
if (receivedCode in constants_1.Status) {
code = receivedCode;
}
metadata.remove('grpc-status');
}
if (typeof metadataMap['grpc-message'] === 'string') {
details = decodeURI(metadataMap['grpc-message']);
metadata.remove('grpc-message');
}
return { code, details, metadata };
});
}
}
exports.MetadataStatusFilter = MetadataStatusFilter;
class MetadataStatusFilterFactory {
constructor(channel) {
this.channel = channel;
}
createFilter(callStream) {
return new MetadataStatusFilter();
}
}
exports.MetadataStatusFilterFactory = MetadataStatusFilterFactory;
//# sourceMappingURL=metadata-status-filter.js.map

View File

@ -0,0 +1,71 @@
/// <reference types="node" />
import * as http2 from 'http2';
export declare type MetadataValue = string | Buffer;
export interface MetadataObject {
[key: string]: MetadataValue[];
}
/**
* A class for storing metadata. Keys are normalized to lowercase ASCII.
*/
export declare class Metadata {
protected internalRepr: MetadataObject;
/**
* Sets the given value for the given key by replacing any other values
* associated with that key. Normalizes the key.
* @param key The key to whose value should be set.
* @param value The value to set. Must be a buffer if and only
* if the normalized key ends with '-bin'.
*/
set(key: string, value: MetadataValue): void;
/**
* Adds the given value for the given key by appending to a list of previous
* values associated with that key. Normalizes the key.
* @param key The key for which a new value should be appended.
* @param value The value to add. Must be a buffer if and only
* if the normalized key ends with '-bin'.
*/
add(key: string, value: MetadataValue): void;
/**
* Removes the given key and any associated values. Normalizes the key.
* @param key The key whose values should be removed.
*/
remove(key: string): void;
/**
* Gets a list of all values associated with the key. Normalizes the key.
* @param key The key whose value should be retrieved.
* @return A list of values associated with the given key.
*/
get(key: string): MetadataValue[];
/**
* Gets a plain object mapping each key to the first value associated with it.
* This reflects the most common way that people will want to see metadata.
* @return A key/value mapping of the metadata.
*/
getMap(): {
[key: string]: MetadataValue;
};
/**
* Clones the metadata object.
* @return The newly cloned object.
*/
clone(): Metadata;
/**
* Merges all key-value pairs from a given Metadata object into this one.
* If both this object and the given object have values in the same key,
* values from the other Metadata object will be appended to this object's
* values.
* @param other A Metadata object.
*/
merge(other: Metadata): void;
/**
* Creates an OutgoingHttpHeaders object that can be used with the http2 API.
*/
toHttp2Headers(): http2.OutgoingHttpHeaders;
private _getCoreRepresentation();
/**
* Returns a new Metadata object based fields in a given IncomingHttpHeaders
* object.
* @param headers An IncomingHttpHeaders object.
*/
static fromHttp2Headers(headers: http2.IncomingHttpHeaders): Metadata;
}

View File

@ -0,0 +1,214 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const lodash_1 = require("lodash");
const LEGAL_KEY_REGEX = /^[0-9a-z_.-]+$/;
const LEGAL_NON_BINARY_VALUE_REGEX = /^[ -~]*$/;
function cloneMetadataObject(repr) {
const result = {};
lodash_1.forOwn(repr, (value, key) => {
result[key] = value.map(v => {
if (v instanceof Buffer) {
return Buffer.from(v);
}
else {
return v;
}
});
});
return result;
}
function isLegalKey(key) {
return LEGAL_KEY_REGEX.test(key);
}
function isLegalNonBinaryValue(value) {
return LEGAL_NON_BINARY_VALUE_REGEX.test(value);
}
function isBinaryKey(key) {
return key.endsWith('-bin');
}
function normalizeKey(key) {
return key.toLowerCase();
}
function validate(key, value) {
if (!isLegalKey(key)) {
throw new Error('Metadata key "' + key + '" contains illegal characters');
}
if (value != null) {
if (isBinaryKey(key)) {
if (!(value instanceof Buffer)) {
throw new Error('keys that end with \'-bin\' must have Buffer values');
}
}
else {
if (value instanceof Buffer) {
throw new Error('keys that don\'t end with \'-bin\' must have String values');
}
if (!isLegalNonBinaryValue(value)) {
throw new Error('Metadata string value "' + value +
'" contains illegal characters');
}
}
}
}
/**
* A class for storing metadata. Keys are normalized to lowercase ASCII.
*/
class Metadata {
constructor() {
this.internalRepr = {};
}
/**
* Sets the given value for the given key by replacing any other values
* associated with that key. Normalizes the key.
* @param key The key to whose value should be set.
* @param value The value to set. Must be a buffer if and only
* if the normalized key ends with '-bin'.
*/
set(key, value) {
key = normalizeKey(key);
validate(key, value);
this.internalRepr[key] = [value];
}
/**
* Adds the given value for the given key by appending to a list of previous
* values associated with that key. Normalizes the key.
* @param key The key for which a new value should be appended.
* @param value The value to add. Must be a buffer if and only
* if the normalized key ends with '-bin'.
*/
add(key, value) {
key = normalizeKey(key);
validate(key, value);
if (!this.internalRepr[key]) {
this.internalRepr[key] = [value];
}
else {
this.internalRepr[key].push(value);
}
}
/**
* Removes the given key and any associated values. Normalizes the key.
* @param key The key whose values should be removed.
*/
remove(key) {
key = normalizeKey(key);
validate(key);
if (Object.prototype.hasOwnProperty.call(this.internalRepr, key)) {
delete this.internalRepr[key];
}
}
/**
* Gets a list of all values associated with the key. Normalizes the key.
* @param key The key whose value should be retrieved.
* @return A list of values associated with the given key.
*/
get(key) {
key = normalizeKey(key);
validate(key);
if (Object.prototype.hasOwnProperty.call(this.internalRepr, key)) {
return this.internalRepr[key];
}
else {
return [];
}
}
/**
* Gets a plain object mapping each key to the first value associated with it.
* This reflects the most common way that people will want to see metadata.
* @return A key/value mapping of the metadata.
*/
getMap() {
const result = {};
lodash_1.forOwn(this.internalRepr, (values, key) => {
if (values.length > 0) {
const v = values[0];
result[key] = v instanceof Buffer ? v.slice() : v;
}
});
return result;
}
/**
* Clones the metadata object.
* @return The newly cloned object.
*/
clone() {
const newMetadata = new Metadata();
newMetadata.internalRepr = cloneMetadataObject(this.internalRepr);
return newMetadata;
}
/**
* Merges all key-value pairs from a given Metadata object into this one.
* If both this object and the given object have values in the same key,
* values from the other Metadata object will be appended to this object's
* values.
* @param other A Metadata object.
*/
merge(other) {
lodash_1.forOwn(other.internalRepr, (values, key) => {
this.internalRepr[key] = (this.internalRepr[key] || []).concat(values);
});
}
/**
* Creates an OutgoingHttpHeaders object that can be used with the http2 API.
*/
toHttp2Headers() {
// NOTE: Node <8.9 formats http2 headers incorrectly.
const result = {};
lodash_1.forOwn(this.internalRepr, (values, key) => {
// We assume that the user's interaction with this object is limited to
// through its public API (i.e. keys and values are already validated).
result[key] = values.map((value) => {
if (value instanceof Buffer) {
return value.toString('base64');
}
else {
return value;
}
});
});
return result;
}
// For compatibility with the other Metadata implementation
_getCoreRepresentation() {
return this.internalRepr;
}
/**
* Returns a new Metadata object based fields in a given IncomingHttpHeaders
* object.
* @param headers An IncomingHttpHeaders object.
*/
static fromHttp2Headers(headers) {
const result = new Metadata();
lodash_1.forOwn(headers, (values, key) => {
// Reserved headers (beginning with `:`) are not valid keys.
if (key.charAt(0) === ':') {
return;
}
if (isBinaryKey(key)) {
if (Array.isArray(values)) {
values.forEach((value) => {
result.add(key, Buffer.from(value, 'base64'));
});
}
else if (values !== undefined) {
values.split(',').forEach(v => {
result.add(key, Buffer.from(v.trim(), 'base64'));
});
}
}
else {
if (Array.isArray(values)) {
values.forEach((value) => {
result.add(key, value);
});
}
else if (values !== undefined) {
values.split(',').forEach(v => result.add(key, v.trim()));
}
}
});
return result;
}
}
exports.Metadata = Metadata;
//# sourceMappingURL=metadata.js.map

View File

@ -0,0 +1,37 @@
/// <reference types="node" />
import { Duplex, Readable, Writable } from 'stream';
import { EmitterAugmentation1 } from './events';
export declare type WriteCallback = (error: Error | null | undefined) => void;
export interface IntermediateObjectReadable<T> extends Readable {
read(size?: number): any & T;
}
export declare type ObjectReadable<T> = {
read(size?: number): T;
} & EmitterAugmentation1<'data', T> & IntermediateObjectReadable<T>;
export interface IntermediateObjectWritable<T> extends Writable {
_write(chunk: any & T, encoding: string, callback: Function): void;
write(chunk: any & T, cb?: WriteCallback): boolean;
write(chunk: any & T, encoding?: any, cb?: WriteCallback): boolean;
setDefaultEncoding(encoding: string): this;
end(): void;
end(chunk: any & T, cb?: Function): void;
end(chunk: any & T, encoding?: any, cb?: Function): void;
}
export interface ObjectWritable<T> extends IntermediateObjectWritable<T> {
_write(chunk: T, encoding: string, callback: Function): void;
write(chunk: T, cb?: Function): boolean;
write(chunk: T, encoding?: any, cb?: Function): boolean;
setDefaultEncoding(encoding: string): this;
end(): void;
end(chunk: T, cb?: Function): void;
end(chunk: T, encoding?: any, cb?: Function): void;
}
export declare type ObjectDuplex<T, U> = {
read(size?: number): U;
_write(chunk: T, encoding: string, callback: Function): void;
write(chunk: T, cb?: Function): boolean;
write(chunk: T, encoding?: any, cb?: Function): boolean;
end(): void;
end(chunk: T, cb?: Function): void;
end(chunk: T, encoding?: any, cb?: Function): void;
} & Duplex & ObjectWritable<T> & ObjectReadable<U>;

View File

@ -0,0 +1,3 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
//# sourceMappingURL=object-stream.js.map

View File

@ -0,0 +1,28 @@
import { StatusObject } from './call-stream';
import { Status } from './constants';
import { Metadata } from './metadata';
/**
* A builder for gRPC status objects.
*/
export declare class StatusBuilder {
private code;
private details;
private metadata;
constructor();
/**
* Adds a status code to the builder.
*/
withCode(code: Status): this;
/**
* Adds details to the builder.
*/
withDetails(details: string): this;
/**
* Adds metadata to the builder.
*/
withMetadata(metadata: Metadata): this;
/**
* Builds the status object.
*/
build(): Partial<StatusObject>;
}

View File

@ -0,0 +1,51 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
/**
* A builder for gRPC status objects.
*/
class StatusBuilder {
constructor() {
this.code = null;
this.details = null;
this.metadata = null;
}
/**
* Adds a status code to the builder.
*/
withCode(code) {
this.code = code;
return this;
}
/**
* Adds details to the builder.
*/
withDetails(details) {
this.details = details;
return this;
}
/**
* Adds metadata to the builder.
*/
withMetadata(metadata) {
this.metadata = metadata;
return this;
}
/**
* Builds the status object.
*/
build() {
const status = {};
if (this.code !== null) {
status.code = this.code;
}
if (this.details !== null) {
status.details = this.details;
}
if (this.metadata !== null) {
status.metadata = this.metadata;
}
return status;
}
}
exports.StatusBuilder = StatusBuilder;
//# sourceMappingURL=status-builder.js.map

View File

@ -0,0 +1,33 @@
/// <reference types="node" />
import { EventEmitter } from 'events';
import * as http2 from 'http2';
import * as url from 'url';
import { Call, Http2CallStream } from './call-stream';
import { ChannelOptions } from './channel-options';
import { Metadata } from './metadata';
export interface SubChannel extends EventEmitter {
/**
* Attach a call stream to this subchannel's connection to start it
* @param headers The headers to start the stream with
* @param callStream The stream to start
*/
startCallStream(metadata: Metadata, callStream: Call): void;
close(): void;
}
export declare class Http2SubChannel extends EventEmitter implements SubChannel {
private session;
private refCount;
private userAgent;
private keepaliveTimeMs;
private keepaliveTimeoutMs;
private keepaliveIntervalId;
private keepaliveTimeoutId;
constructor(target: url.URL, connectionOptions: http2.SecureClientSessionOptions, userAgent: string, channelArgs: Partial<ChannelOptions>);
private ref();
private unref();
private sendPing();
private startKeepalivePings();
private stopKeepalivePings();
startCallStream(metadata: Metadata, callStream: Http2CallStream): void;
close(): void;
}

View File

@ -0,0 +1,96 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const events_1 = require("events");
const http2 = require("http2");
const { HTTP2_HEADER_AUTHORITY, HTTP2_HEADER_CONTENT_TYPE, HTTP2_HEADER_METHOD, HTTP2_HEADER_PATH, HTTP2_HEADER_TE, HTTP2_HEADER_USER_AGENT } = http2.constants;
/* setInterval and setTimeout only accept signed 32 bit integers. JS doesn't
* have a constant for the max signed 32 bit integer, so this is a simple way
* to calculate it */
const KEEPALIVE_TIME_MS = ~(1 << 31);
const KEEPALIVE_TIMEOUT_MS = 20000;
class Http2SubChannel extends events_1.EventEmitter {
constructor(target, connectionOptions, userAgent, channelArgs) {
super();
this.refCount = 0;
this.keepaliveTimeMs = KEEPALIVE_TIME_MS;
this.keepaliveTimeoutMs = KEEPALIVE_TIMEOUT_MS;
this.session = http2.connect(target, connectionOptions);
this.session.on('connect', () => {
this.emit('connect');
});
this.session.on('close', () => {
this.stopKeepalivePings();
this.emit('close');
});
this.session.on('error', () => {
this.stopKeepalivePings();
this.emit('close');
});
this.userAgent = userAgent;
if (channelArgs['grpc.keepalive_time_ms']) {
this.keepaliveTimeMs = channelArgs['grpc.keepalive_time_ms'];
}
if (channelArgs['grpc.keepalive_timeout_ms']) {
this.keepaliveTimeoutMs = channelArgs['grpc.keepalive_timeout_ms'];
}
this.keepaliveIntervalId = setTimeout(() => { }, 0);
clearTimeout(this.keepaliveIntervalId);
this.keepaliveTimeoutId = setTimeout(() => { }, 0);
clearTimeout(this.keepaliveTimeoutId);
}
ref() {
if (this.refCount === 0) {
this.session.ref();
this.startKeepalivePings();
}
this.refCount += 1;
}
unref() {
this.refCount -= 1;
if (this.refCount === 0) {
this.session.unref();
this.stopKeepalivePings();
}
}
sendPing() {
this.keepaliveTimeoutId = setTimeout(() => {
this.emit('close');
}, this.keepaliveTimeoutMs);
this.session.ping((err, duration, payload) => {
clearTimeout(this.keepaliveTimeoutId);
});
}
/* TODO(murgatroid99): refactor subchannels so that keepalives can be handled
* per subchannel */
startKeepalivePings() {
this.keepaliveIntervalId = setInterval(() => {
this.sendPing();
}, this.keepaliveTimeMs);
this.sendPing();
}
stopKeepalivePings() {
clearInterval(this.keepaliveIntervalId);
clearTimeout(this.keepaliveTimeoutId);
}
// Prerequisite: this subchannel is connected
startCallStream(metadata, callStream) {
const headers = metadata.toHttp2Headers();
headers[HTTP2_HEADER_AUTHORITY] = callStream.getHost();
headers[HTTP2_HEADER_USER_AGENT] = this.userAgent;
headers[HTTP2_HEADER_CONTENT_TYPE] = 'application/grpc';
headers[HTTP2_HEADER_METHOD] = 'POST';
headers[HTTP2_HEADER_PATH] = callStream.getMethod();
headers[HTTP2_HEADER_TE] = 'trailers';
const http2Stream = this.session.request(headers);
this.ref();
http2Stream.on('close', () => {
this.unref();
});
callStream.attachHttp2Stream(http2Stream);
}
close() {
this.session.close();
}
}
exports.Http2SubChannel = Http2SubChannel;
//# sourceMappingURL=subchannel.js.map

79
express-server/node_modules/@grpc/grpc-js/package.json generated vendored Normal file
View File

@ -0,0 +1,79 @@
{
"_from": "@grpc/grpc-js@^0.3.0",
"_id": "@grpc/grpc-js@0.3.2",
"_inBundle": false,
"_integrity": "sha512-vvcC4EZwS2kzEuwe3zmExi20YM1tqO+L/xdpzInze0WnRfhwbssDfLtMfAyAPQaL2CD8dRZLCOVLbsSdF1KVjA==",
"_location": "/@grpc/grpc-js",
"_phantomChildren": {},
"_requested": {
"type": "range",
"registry": true,
"raw": "@grpc/grpc-js@^0.3.0",
"name": "@grpc/grpc-js",
"escapedName": "@grpc%2fgrpc-js",
"scope": "@grpc",
"rawSpec": "^0.3.0",
"saveSpec": null,
"fetchSpec": "^0.3.0"
},
"_requiredBy": [
"/google-gax"
],
"_resolved": "https://registry.npmjs.org/@grpc/grpc-js/-/grpc-js-0.3.2.tgz",
"_shasum": "8adcf22154bfd4a0903296d6656420c2ff86e388",
"_spec": "@grpc/grpc-js@^0.3.0",
"_where": "D:\\Desktop\\Git\\Firebase\\SmartShopperFirebase\\node_modules\\google-gax",
"author": {
"name": "Google Inc."
},
"bundleDependencies": false,
"contributors": [
{
"name": "Google Inc."
}
],
"dependencies": {
"lodash": "^4.17.4",
"semver": "^5.5.0"
},
"deprecated": false,
"description": "gRPC Library for Node - pure JS implementation",
"devDependencies": {
"@types/lodash": "^4.14.108",
"@types/mocha": "^2.2.43",
"@types/node": "^10.5.4",
"clang-format": "^1.0.55",
"gts": "^0.5.1",
"typescript": "~2.7.0"
},
"engines": {
"node": "^8.11.2 || >=9.4"
},
"files": [
"build/src/*.{js,d.ts}"
],
"homepage": "https://grpc.io/",
"keywords": [],
"license": "Apache-2.0",
"main": "build/src/index.js",
"name": "@grpc/grpc-js",
"repository": {
"type": "git",
"url": "https://github.com/grpc/grpc-node/tree/master/packages/grpc-js-core"
},
"scripts": {
"build": "npm run compile",
"check": "gts check",
"clean": "gts clean",
"compile": "tsc -p .",
"fix": "gts fix",
"format": "clang-format -i -style=\"{Language: JavaScript, BasedOnStyle: Google, ColumnLimit: 80}\" src/*.ts test/*.ts",
"lint": "tslint -c node_modules/google-ts-style/tslint.json -p . -t codeFrame --type-check",
"posttest": "npm run check",
"prepare": "npm run compile",
"pretest": "npm run compile",
"test": "gulp test"
},
"types": "build/src/index.d.ts",
"version": "0.3.2"
}

View File

@ -0,0 +1,51 @@
# gRPC Protobuf Loader
A utility package for loading `.proto` files for use with gRPC, using the latest Protobuf.js package.
## Installation
```sh
npm install @grpc/proto-loader
```
## Usage
```js
const protoLoader = require('@grpc/proto-loader');
const grpcLibrary = require('grpc');
// OR
const grpcLibrary = require('@grpc/grpc-js');
protoLoader.load(protoFileName, options).then(packageDefinition => {
const packageObject = grpcLibrary.loadPackageDefinition(packageDefinition);
});
// OR
const packageDefinition = protoLoader.loadSync(protoFileName, options);
const packageObject = grpcLibrary.loadPackageDefinition(packageDefinition);
```
The options parameter is an object that can have the following optional properties:
| Field name | Valid values | Description
|------------|--------------|------------
| `keepCase` | `true` or `false` | Preserve field names. The default is to change them to camel case.
| `longs` | `String` or `Number` | The type to use to represent `long` values. Defaults to a `Long` object type.
| `enums` | `String` | The type to use to represent `enum` values. Defaults to the numeric value.
| `bytes` | `Array` or `String` | The type to use to represent `bytes` values. Defaults to `Buffer`.
| `defaults` | `true` or `false` | Set default values on output objects. Defaults to `false`.
| `arrays` | `true` or `false` | Set empty arrays for missing array values even if `defaults` is `false` Defaults to `false`.
| `objects` | `true` or `false` | Set empty objects for missing object values even if `defaults` is `false` Defaults to `false`.
| `oneofs` | `true` or `false` | Set virtual oneof properties to the present field's name. Defaults to `false`.
| `includeDirs` | An array of strings | A list of search paths for imported `.proto` files.
The following options object closely approximates the existing behavior of `grpc.load`:
```js
const options = {
keepCase: true,
longs: String,
enums: String,
defaults: true,
oneofs: true
}
```

View File

@ -0,0 +1,70 @@
/// <reference types="node" />
/**
* @license
* Copyright 2018 gRPC authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
import * as Protobuf from 'protobufjs';
export interface Serialize<T> {
(value: T): Buffer;
}
export interface Deserialize<T> {
(bytes: Buffer): T;
}
export interface MethodDefinition<RequestType, ResponseType> {
path: string;
requestStream: boolean;
responseStream: boolean;
requestSerialize: Serialize<RequestType>;
responseSerialize: Serialize<ResponseType>;
requestDeserialize: Deserialize<RequestType>;
responseDeserialize: Deserialize<ResponseType>;
originalName?: string;
}
export interface ServiceDefinition {
[index: string]: MethodDefinition<object, object>;
}
export interface PackageDefinition {
[index: string]: ServiceDefinition;
}
export declare type Options = Protobuf.IParseOptions & Protobuf.IConversionOptions & {
includeDirs?: string[];
};
/**
* Load a .proto file with the specified options.
* @param filename The file path to load. Can be an absolute path or relative to
* an include path.
* @param options.keepCase Preserve field names. The default is to change them
* to camel case.
* @param options.longs The type that should be used to represent `long` values.
* Valid options are `Number` and `String`. Defaults to a `Long` object type
* from a library.
* @param options.enums The type that should be used to represent `enum` values.
* The only valid option is `String`. Defaults to the numeric value.
* @param options.bytes The type that should be used to represent `bytes`
* values. Valid options are `Array` and `String`. The default is to use
* `Buffer`.
* @param options.defaults Set default values on output objects. Defaults to
* `false`.
* @param options.arrays Set empty arrays for missing array values even if
* `defaults` is `false`. Defaults to `false`.
* @param options.objects Set empty objects for missing object values even if
* `defaults` is `false`. Defaults to `false`.
* @param options.oneofs Set virtual oneof properties to the present field's
* name
* @param options.includeDirs Paths to search for imported `.proto` files.
*/
export declare function load(filename: string, options?: Options): Promise<PackageDefinition>;
export declare function loadSync(filename: string, options?: Options): PackageDefinition;

View File

@ -0,0 +1,157 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
/**
* @license
* Copyright 2018 gRPC authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
var Protobuf = require("protobufjs");
var fs = require("fs");
var path = require("path");
var _ = require("lodash");
function joinName(baseName, name) {
if (baseName === '') {
return name;
}
else {
return baseName + '.' + name;
}
}
function getAllServices(obj, parentName) {
var objName = joinName(parentName, obj.name);
if (obj.hasOwnProperty('methods')) {
return [[objName, obj]];
}
else {
return obj.nestedArray.map(function (child) {
if (child.hasOwnProperty('nested')) {
return getAllServices(child, objName);
}
else {
return [];
}
}).reduce(function (accumulator, currentValue) { return accumulator.concat(currentValue); }, []);
}
}
function createDeserializer(cls, options) {
return function deserialize(argBuf) {
return cls.toObject(cls.decode(argBuf), options);
};
}
function createSerializer(cls) {
return function serialize(arg) {
var message = cls.fromObject(arg);
return cls.encode(message).finish();
};
}
function createMethodDefinition(method, serviceName, options) {
return {
path: '/' + serviceName + '/' + method.name,
requestStream: !!method.requestStream,
responseStream: !!method.responseStream,
requestSerialize: createSerializer(method.resolvedRequestType),
requestDeserialize: createDeserializer(method.resolvedRequestType, options),
responseSerialize: createSerializer(method.resolvedResponseType),
responseDeserialize: createDeserializer(method.resolvedResponseType, options),
// TODO(murgatroid99): Find a better way to handle this
originalName: _.camelCase(method.name)
};
}
function createServiceDefinition(service, name, options) {
var def = {};
for (var _i = 0, _a = service.methodsArray; _i < _a.length; _i++) {
var method = _a[_i];
def[method.name] = createMethodDefinition(method, name, options);
}
return def;
}
function createPackageDefinition(root, options) {
var def = {};
for (var _i = 0, _a = getAllServices(root, ''); _i < _a.length; _i++) {
var _b = _a[_i], name = _b[0], service = _b[1];
def[name] = createServiceDefinition(service, name, options);
}
return def;
}
function addIncludePathResolver(root, includePaths) {
root.resolvePath = function (origin, target) {
for (var _i = 0, includePaths_1 = includePaths; _i < includePaths_1.length; _i++) {
var directory = includePaths_1[_i];
var fullPath = path.join(directory, target);
try {
fs.accessSync(fullPath, fs.constants.R_OK);
return fullPath;
}
catch (err) {
continue;
}
}
return null;
};
}
/**
* Load a .proto file with the specified options.
* @param filename The file path to load. Can be an absolute path or relative to
* an include path.
* @param options.keepCase Preserve field names. The default is to change them
* to camel case.
* @param options.longs The type that should be used to represent `long` values.
* Valid options are `Number` and `String`. Defaults to a `Long` object type
* from a library.
* @param options.enums The type that should be used to represent `enum` values.
* The only valid option is `String`. Defaults to the numeric value.
* @param options.bytes The type that should be used to represent `bytes`
* values. Valid options are `Array` and `String`. The default is to use
* `Buffer`.
* @param options.defaults Set default values on output objects. Defaults to
* `false`.
* @param options.arrays Set empty arrays for missing array values even if
* `defaults` is `false`. Defaults to `false`.
* @param options.objects Set empty objects for missing object values even if
* `defaults` is `false`. Defaults to `false`.
* @param options.oneofs Set virtual oneof properties to the present field's
* name
* @param options.includeDirs Paths to search for imported `.proto` files.
*/
function load(filename, options) {
var root = new Protobuf.Root();
options = options || {};
if (!!options.includeDirs) {
if (!(options.includeDirs instanceof Array)) {
return Promise.reject(new Error('The includeDirs option must be an array'));
}
addIncludePathResolver(root, options.includeDirs);
}
return root.load(filename, options).then(function (loadedRoot) {
loadedRoot.resolveAll();
return createPackageDefinition(root, options);
});
}
exports.load = load;
function loadSync(filename, options) {
var root = new Protobuf.Root();
options = options || {};
if (!!options.includeDirs) {
if (!(options.includeDirs instanceof Array)) {
throw new Error('The include option must be an array');
}
addIncludePathResolver(root, options.includeDirs);
}
var loadedRoot = root.loadSync(filename, options);
loadedRoot.resolveAll();
return createPackageDefinition(root, options);
}
exports.loadSync = loadSync;
//# sourceMappingURL=index.js.map

View File

@ -0,0 +1,15 @@
#!/bin/sh
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
case `uname` in
*CYGWIN*) basedir=`cygpath -w "$basedir"`;;
esac
if [ -x "$basedir/node" ]; then
"$basedir/node" "$basedir/../protobufjs/bin/pbjs" "$@"
ret=$?
else
node "$basedir/../protobufjs/bin/pbjs" "$@"
ret=$?
fi
exit $ret

View File

@ -0,0 +1,7 @@
@IF EXIST "%~dp0\node.exe" (
"%~dp0\node.exe" "%~dp0\..\protobufjs\bin\pbjs" %*
) ELSE (
@SETLOCAL
@SET PATHEXT=%PATHEXT:;.JS;=;%
node "%~dp0\..\protobufjs\bin\pbjs" %*
)

View File

@ -0,0 +1,15 @@
#!/bin/sh
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
case `uname` in
*CYGWIN*) basedir=`cygpath -w "$basedir"`;;
esac
if [ -x "$basedir/node" ]; then
"$basedir/node" "$basedir/../protobufjs/bin/pbts" "$@"
ret=$?
else
node "$basedir/../protobufjs/bin/pbts" "$@"
ret=$?
fi
exit $ret

View File

@ -0,0 +1,7 @@
@IF EXIST "%~dp0\node.exe" (
"%~dp0\node.exe" "%~dp0\..\protobufjs\bin\pbts" %*
) ELSE (
@SETLOCAL
@SET PATHEXT=%PATHEXT:;.JS;=;%
node "%~dp0\..\protobufjs\bin\pbts" %*
)

View File

@ -0,0 +1,21 @@
MIT License
Copyright (c) Microsoft Corporation. All rights reserved.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE

View File

@ -0,0 +1,16 @@
# Installation
> `npm install --save @types/node`
# Summary
This package contains type definitions for Node.js (http://nodejs.org/).
# Details
Files were exported from https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/node/v9
Additional Details
* Last updated: Tue, 18 Dec 2018 21:38:45 GMT
* Dependencies: none
* Global values: Buffer, NodeJS, SlowBuffer, Symbol, __dirname, __filename, clearImmediate, clearInterval, clearTimeout, console, exports, global, module, process, require, setImmediate, setInterval, setTimeout
# Credits
These definitions were written by Microsoft TypeScript <https://github.com/Microsoft>, DefinitelyTyped <https://github.com/DefinitelyTyped>, Parambir Singh <https://github.com/parambirs>, Christian Vaagland Tellnes <https://github.com/tellnes>, Wilco Bakker <https://github.com/WilcoBakker>, Nicolas Voigt <https://github.com/octo-sniffle>, Chigozirim C. <https://github.com/smac89>, Flarna <https://github.com/Flarna>, Mariusz Wiktorczyk <https://github.com/mwiktorczyk>, wwwy3y3 <https://github.com/wwwy3y3>, Deividas Bakanas <https://github.com/DeividasBakanas>, Kelvin Jin <https://github.com/kjin>, Alvis HT Tang <https://github.com/alvis>, Sebastian Silbermann <https://github.com/eps1lon>, Hannes Magnusson <https://github.com/Hannes-Magnusson-CK>, Alberto Schiabel <https://github.com/jkomyno>, Klaus Meinhardt <https://github.com/ajafff>, Huw <https://github.com/hoo29>, Nicolas Even <https://github.com/n-e>, Bruno Scheufler <https://github.com/brunoscheufler>, Mohsen Azimi <https://github.com/mohsen1>, Hoàng Văn Khải <https://github.com/KSXGitHub>, Alexander T. <https://github.com/a-tarasyuk>, Lishude <https://github.com/islishude>, Andrew Makarov <https://github.com/r3nya>, Eugene Y. Q. Shen <https://github.com/eyqs>.

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,152 @@
{
"_from": "@types/node@^9.4.6",
"_id": "@types/node@9.6.41",
"_inBundle": false,
"_integrity": "sha512-sPZWEbFMz6qAy9SLY7jh5cgepmsiwqUUHjvEm8lpU6kug2hmmcyuTnwhoGw/GWpI5Npue4EqvsiQQI0eWjW/ZA==",
"_location": "/@grpc/proto-loader/@types/node",
"_phantomChildren": {},
"_requested": {
"type": "range",
"registry": true,
"raw": "@types/node@^9.4.6",
"name": "@types/node",
"escapedName": "@types%2fnode",
"scope": "@types",
"rawSpec": "^9.4.6",
"saveSpec": null,
"fetchSpec": "^9.4.6"
},
"_requiredBy": [
"/@grpc/proto-loader"
],
"_resolved": "https://registry.npmjs.org/@types/node/-/node-9.6.41.tgz",
"_shasum": "e57c3152eb2e7ec748c733cebd0c095b437c5d37",
"_spec": "@types/node@^9.4.6",
"_where": "D:\\Desktop\\Git\\Firebase\\SmartShopperFirebase\\node_modules\\@grpc\\proto-loader",
"bugs": {
"url": "https://github.com/DefinitelyTyped/DefinitelyTyped/issues"
},
"bundleDependencies": false,
"contributors": [
{
"name": "Microsoft TypeScript",
"url": "https://github.com/Microsoft"
},
{
"name": "DefinitelyTyped",
"url": "https://github.com/DefinitelyTyped"
},
{
"name": "Parambir Singh",
"url": "https://github.com/parambirs"
},
{
"name": "Christian Vaagland Tellnes",
"url": "https://github.com/tellnes"
},
{
"name": "Wilco Bakker",
"url": "https://github.com/WilcoBakker"
},
{
"name": "Nicolas Voigt",
"url": "https://github.com/octo-sniffle"
},
{
"name": "Chigozirim C.",
"url": "https://github.com/smac89"
},
{
"name": "Flarna",
"url": "https://github.com/Flarna"
},
{
"name": "Mariusz Wiktorczyk",
"url": "https://github.com/mwiktorczyk"
},
{
"name": "wwwy3y3",
"url": "https://github.com/wwwy3y3"
},
{
"name": "Deividas Bakanas",
"url": "https://github.com/DeividasBakanas"
},
{
"name": "Kelvin Jin",
"url": "https://github.com/kjin"
},
{
"name": "Alvis HT Tang",
"url": "https://github.com/alvis"
},
{
"name": "Sebastian Silbermann",
"url": "https://github.com/eps1lon"
},
{
"name": "Hannes Magnusson",
"url": "https://github.com/Hannes-Magnusson-CK"
},
{
"name": "Alberto Schiabel",
"url": "https://github.com/jkomyno"
},
{
"name": "Klaus Meinhardt",
"url": "https://github.com/ajafff"
},
{
"name": "Huw",
"url": "https://github.com/hoo29"
},
{
"name": "Nicolas Even",
"url": "https://github.com/n-e"
},
{
"name": "Bruno Scheufler",
"url": "https://github.com/brunoscheufler"
},
{
"name": "Mohsen Azimi",
"url": "https://github.com/mohsen1"
},
{
"name": "Hoàng Văn Khải",
"url": "https://github.com/KSXGitHub"
},
{
"name": "Alexander T.",
"url": "https://github.com/a-tarasyuk"
},
{
"name": "Lishude",
"url": "https://github.com/islishude"
},
{
"name": "Andrew Makarov",
"url": "https://github.com/r3nya"
},
{
"name": "Eugene Y. Q. Shen",
"url": "https://github.com/eyqs"
}
],
"dependencies": {},
"deprecated": false,
"description": "TypeScript definitions for Node.js",
"homepage": "https://github.com/DefinitelyTyped/DefinitelyTyped#readme",
"license": "MIT",
"main": "",
"name": "@types/node",
"repository": {
"type": "git",
"url": "git+https://github.com/DefinitelyTyped/DefinitelyTyped.git"
},
"scripts": {},
"typeScriptVersion": "2.0",
"types": "index",
"typesPublisherContentHash": "159c257e6a57c0dd5acf840c15ba44d9373d0ee24e030485ba68ec05a145a907",
"version": "9.6.41"
}

View File

@ -0,0 +1,202 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View File

@ -0,0 +1,246 @@
long.js
=======
A Long class for representing a 64 bit two's-complement integer value derived from the [Closure Library](https://github.com/google/closure-library)
for stand-alone use and extended with unsigned support.
[![Build Status](https://travis-ci.org/dcodeIO/long.js.svg)](https://travis-ci.org/dcodeIO/long.js)
Background
----------
As of [ECMA-262 5th Edition](http://ecma262-5.com/ELS5_HTML.htm#Section_8.5), "all the positive and negative integers
whose magnitude is no greater than 2<sup>53</sup> are representable in the Number type", which is "representing the
doubleprecision 64-bit format IEEE 754 values as specified in the IEEE Standard for Binary Floating-Point Arithmetic".
The [maximum safe integer](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Number/MAX_SAFE_INTEGER)
in JavaScript is 2<sup>53</sup>-1.
Example: 2<sup>64</sup>-1 is 1844674407370955**1615** but in JavaScript it evaluates to 1844674407370955**2000**.
Furthermore, bitwise operators in JavaScript "deal only with integers in the range 2<sup>31</sup> through
2<sup>31</sup>1, inclusive, or in the range 0 through 2<sup>32</sup>1, inclusive. These operators accept any value of
the Number type but first convert each such value to one of 2<sup>32</sup> integer values."
In some use cases, however, it is required to be able to reliably work with and perform bitwise operations on the full
64 bits. This is where long.js comes into play.
Usage
-----
The class is compatible with CommonJS and AMD loaders and is exposed globally as `Long` if neither is available.
```javascript
var Long = require("long");
var longVal = new Long(0xFFFFFFFF, 0x7FFFFFFF);
console.log(longVal.toString());
...
```
API
---
### Constructor
* new **Long**(low: `number`, high: `number`, unsigned?: `boolean`)<br />
Constructs a 64 bit two's-complement integer, given its low and high 32 bit values as *signed* integers. See the from* functions below for more convenient ways of constructing Longs.
### Fields
* Long#**low**: `number`<br />
The low 32 bits as a signed value.
* Long#**high**: `number`<br />
The high 32 bits as a signed value.
* Long#**unsigned**: `boolean`<br />
Whether unsigned or not.
### Constants
* Long.**ZERO**: `Long`<br />
Signed zero.
* Long.**ONE**: `Long`<br />
Signed one.
* Long.**NEG_ONE**: `Long`<br />
Signed negative one.
* Long.**UZERO**: `Long`<br />
Unsigned zero.
* Long.**UONE**: `Long`<br />
Unsigned one.
* Long.**MAX_VALUE**: `Long`<br />
Maximum signed value.
* Long.**MIN_VALUE**: `Long`<br />
Minimum signed value.
* Long.**MAX_UNSIGNED_VALUE**: `Long`<br />
Maximum unsigned value.
### Utility
* Long.**isLong**(obj: `*`): `boolean`<br />
Tests if the specified object is a Long.
* Long.**fromBits**(lowBits: `number`, highBits: `number`, unsigned?: `boolean`): `Long`<br />
Returns a Long representing the 64 bit integer that comes by concatenating the given low and high bits. Each is assumed to use 32 bits.
* Long.**fromBytes**(bytes: `number[]`, unsigned?: `boolean`, le?: `boolean`): `Long`<br />
Creates a Long from its byte representation.
* Long.**fromBytesLE**(bytes: `number[]`, unsigned?: `boolean`): `Long`<br />
Creates a Long from its little endian byte representation.
* Long.**fromBytesBE**(bytes: `number[]`, unsigned?: `boolean`): `Long`<br />
Creates a Long from its big endian byte representation.
* Long.**fromInt**(value: `number`, unsigned?: `boolean`): `Long`<br />
Returns a Long representing the given 32 bit integer value.
* Long.**fromNumber**(value: `number`, unsigned?: `boolean`): `Long`<br />
Returns a Long representing the given value, provided that it is a finite number. Otherwise, zero is returned.
* Long.**fromString**(str: `string`, unsigned?: `boolean`, radix?: `number`)<br />
Long.**fromString**(str: `string`, radix: `number`)<br />
Returns a Long representation of the given string, written using the specified radix.
* Long.**fromValue**(val: `*`, unsigned?: `boolean`): `Long`<br />
Converts the specified value to a Long using the appropriate from* function for its type.
### Methods
* Long#**add**(addend: `Long | number | string`): `Long`<br />
Returns the sum of this and the specified Long.
* Long#**and**(other: `Long | number | string`): `Long`<br />
Returns the bitwise AND of this Long and the specified.
* Long#**compare**/**comp**(other: `Long | number | string`): `number`<br />
Compares this Long's value with the specified's. Returns `0` if they are the same, `1` if the this is greater and `-1` if the given one is greater.
* Long#**divide**/**div**(divisor: `Long | number | string`): `Long`<br />
Returns this Long divided by the specified.
* Long#**equals**/**eq**(other: `Long | number | string`): `boolean`<br />
Tests if this Long's value equals the specified's.
* Long#**getHighBits**(): `number`<br />
Gets the high 32 bits as a signed integer.
* Long#**getHighBitsUnsigned**(): `number`<br />
Gets the high 32 bits as an unsigned integer.
* Long#**getLowBits**(): `number`<br />
Gets the low 32 bits as a signed integer.
* Long#**getLowBitsUnsigned**(): `number`<br />
Gets the low 32 bits as an unsigned integer.
* Long#**getNumBitsAbs**(): `number`<br />
Gets the number of bits needed to represent the absolute value of this Long.
* Long#**greaterThan**/**gt**(other: `Long | number | string`): `boolean`<br />
Tests if this Long's value is greater than the specified's.
* Long#**greaterThanOrEqual**/**gte**/**ge**(other: `Long | number | string`): `boolean`<br />
Tests if this Long's value is greater than or equal the specified's.
* Long#**isEven**(): `boolean`<br />
Tests if this Long's value is even.
* Long#**isNegative**(): `boolean`<br />
Tests if this Long's value is negative.
* Long#**isOdd**(): `boolean`<br />
Tests if this Long's value is odd.
* Long#**isPositive**(): `boolean`<br />
Tests if this Long's value is positive.
* Long#**isZero**/**eqz**(): `boolean`<br />
Tests if this Long's value equals zero.
* Long#**lessThan**/**lt**(other: `Long | number | string`): `boolean`<br />
Tests if this Long's value is less than the specified's.
* Long#**lessThanOrEqual**/**lte**/**le**(other: `Long | number | string`): `boolean`<br />
Tests if this Long's value is less than or equal the specified's.
* Long#**modulo**/**mod**/**rem**(divisor: `Long | number | string`): `Long`<br />
Returns this Long modulo the specified.
* Long#**multiply**/**mul**(multiplier: `Long | number | string`): `Long`<br />
Returns the product of this and the specified Long.
* Long#**negate**/**neg**(): `Long`<br />
Negates this Long's value.
* Long#**not**(): `Long`<br />
Returns the bitwise NOT of this Long.
* Long#**notEquals**/**neq**/**ne**(other: `Long | number | string`): `boolean`<br />
Tests if this Long's value differs from the specified's.
* Long#**or**(other: `Long | number | string`): `Long`<br />
Returns the bitwise OR of this Long and the specified.
* Long#**shiftLeft**/**shl**(numBits: `Long | number | string`): `Long`<br />
Returns this Long with bits shifted to the left by the given amount.
* Long#**shiftRight**/**shr**(numBits: `Long | number | string`): `Long`<br />
Returns this Long with bits arithmetically shifted to the right by the given amount.
* Long#**shiftRightUnsigned**/**shru**/**shr_u**(numBits: `Long | number | string`): `Long`<br />
Returns this Long with bits logically shifted to the right by the given amount.
* Long#**subtract**/**sub**(subtrahend: `Long | number | string`): `Long`<br />
Returns the difference of this and the specified Long.
* Long#**toBytes**(le?: `boolean`): `number[]`<br />
Converts this Long to its byte representation.
* Long#**toBytesLE**(): `number[]`<br />
Converts this Long to its little endian byte representation.
* Long#**toBytesBE**(): `number[]`<br />
Converts this Long to its big endian byte representation.
* Long#**toInt**(): `number`<br />
Converts the Long to a 32 bit integer, assuming it is a 32 bit integer.
* Long#**toNumber**(): `number`<br />
Converts the Long to a the nearest floating-point representation of this value (double, 53 bit mantissa).
* Long#**toSigned**(): `Long`<br />
Converts this Long to signed.
* Long#**toString**(radix?: `number`): `string`<br />
Converts the Long to a string written in the specified radix.
* Long#**toUnsigned**(): `Long`<br />
Converts this Long to unsigned.
* Long#**xor**(other: `Long | number | string`): `Long`<br />
Returns the bitwise XOR of this Long and the given one.
Building
--------
To build an UMD bundle to `dist/long.js`, run:
```
$> npm install
$> npm run build
```
Running the [tests](./tests):
```
$> npm test
```

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1 @@
module.exports = require("./src/long");

View File

@ -0,0 +1,63 @@
{
"_from": "long@^4.0.0",
"_id": "long@4.0.0",
"_inBundle": false,
"_integrity": "sha512-XsP+KhQif4bjX1kbuSiySJFNAehNxgLb6hPRGJ9QsUr8ajHkuXGdrHmFUTUUXhDwVX2R5bY4JNZEwbUiMhV+MA==",
"_location": "/@grpc/proto-loader/long",
"_phantomChildren": {},
"_requested": {
"type": "range",
"registry": true,
"raw": "long@^4.0.0",
"name": "long",
"escapedName": "long",
"rawSpec": "^4.0.0",
"saveSpec": null,
"fetchSpec": "^4.0.0"
},
"_requiredBy": [
"/@grpc/proto-loader/protobufjs"
],
"_resolved": "https://registry.npmjs.org/long/-/long-4.0.0.tgz",
"_shasum": "9a7b71cfb7d361a194ea555241c92f7468d5bf28",
"_spec": "long@^4.0.0",
"_where": "D:\\Desktop\\Git\\Firebase\\SmartShopperFirebase\\node_modules\\@grpc\\proto-loader\\node_modules\\protobufjs",
"author": {
"name": "Daniel Wirtz",
"email": "dcode@dcode.io"
},
"bugs": {
"url": "https://github.com/dcodeIO/long.js/issues"
},
"bundleDependencies": false,
"dependencies": {},
"deprecated": false,
"description": "A Long class for representing a 64-bit two's-complement integer value.",
"devDependencies": {
"webpack": "^3.10.0"
},
"files": [
"index.js",
"LICENSE",
"README.md",
"src/long.js",
"dist/long.js",
"dist/long.js.map"
],
"homepage": "https://github.com/dcodeIO/long.js#readme",
"keywords": [
"math"
],
"license": "Apache-2.0",
"main": "src/long.js",
"name": "long",
"repository": {
"type": "git",
"url": "git+https://github.com/dcodeIO/long.js.git"
},
"scripts": {
"build": "webpack",
"test": "node tests"
},
"version": "4.0.0"
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,935 @@
# [6.8.8](https://github.com/dcodeIO/protobuf.js/releases/tag/6.8.8)
## Fixed
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/3001425b0d896d14188307cd0cc84ce195ad9e04) Persist recent index.d.ts changes in JSDoc<br />
# [6.8.7](https://github.com/dcodeIO/protobuf.js/releases/tag/6.8.7)
## Fixed
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/e8449c4bf1269a2cc423708db6f0b47a383d33f0) Fix package browser field descriptor ([#1046](https://github.com/dcodeIO/protobuf.js/issues/1046))<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/996b3fa0c598ecc73302bfc39208c44830f07b1a) Fix static codegen issues with uglifyjs3<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/a06317139b92fdd8c6b3b188fb7b9704dc8ccbf1) Fix lint issues / pbts on windows<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/a927a6646e8fdddebcb3e13bc8b28b041b3ee40a) Fix empty 'bytes' field decoding, now using Buffer where applicable ([#1020](https://github.com/dcodeIO/protobuf.js/issues/1020))<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/f13a81fb41fbef2ce9dcee13f23b7276c83fbcfd) Fix circular dependency of Namespace and Enum ([#994](https://github.com/dcodeIO/protobuf.js/issues/994))<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/c05c58fad61c16e5ce20ca19758e4782cdd5d2e3) Ignore optional commas in aggregate options ([#999](https://github.com/dcodeIO/protobuf.js/issues/999))<br />
## New
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/36fc964b8db1e4372c76b1baf9f03857cd875b07) Make Message<T> have a default type param ([#1086](https://github.com/dcodeIO/protobuf.js/issues/1086))<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/996b3fa0c598ecc73302bfc39208c44830f07b1a) Explicitly define service method names when generating static code, see [#857](https://github.com/dcodeIO/protobuf.js/issues/857)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/07c5d59e1da8c5533a39007ba332928206281408) Also handle services in ext/descriptor ([#1001](https://github.com/dcodeIO/protobuf.js/issues/1001))<br />
## CLI
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/2c5ef95818a310243f88ffba0331cd47ee603c0a) Extend list of ignored ESLint rules for pbjs, fixes [#1085](https://github.com/dcodeIO/protobuf.js/issues/1085)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/8576b49ad3e55b8beae2a8f044c51040484eef12) Fix declared return type of pbjs/pbts callback ([#1025](https://github.com/dcodeIO/protobuf.js/issues/1025))<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/9fceaa69667895e609a3ed78eb2efa7a0ecfb890) Added an option to pbts to allow custom imports ([#1038](https://github.com/dcodeIO/protobuf.js/issues/1038))<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/65d113b0079fa2570837f3cf95268ce24714a248) Get node executable path from process.execPath ([#1018](https://github.com/dcodeIO/protobuf.js/issues/1018))<br />
## Other
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/b611875cfbc1f98d8973a2e86f1506de84f00049) Slim down CI testing and remove some not ultimately necesssary dependencies with audit issues<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/812b38ddabb35e154f9ff94f32ad8ce2a70310f1) Move global handling to util, see [#995](https://github.com/dcodeIO/protobuf.js/issues/995)<br />
# [6.8.6](https://github.com/dcodeIO/protobuf.js/releases/tag/6.8.6)
## Fixed
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/2ee1028d631a328e152d7e09f2a0e0c5c83dc2aa) Fix typeRefRe being vulnerable to ReDoS<br />
# [6.8.5](https://github.com/dcodeIO/protobuf.js/releases/tag/6.8.6)
## New
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/462132f222d8febb8211d839635aad5b82dc6315) Preserve comments when serializing/deserializing with toJSON and fromJSON. ([#983](https://github.com/dcodeIO/protobuf.js/issues/983))<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/d29c0caa715a14214fc755b3cf10ac119cdaf199) Add more details to some frequent error messages ([#962](https://github.com/dcodeIO/protobuf.js/issues/962))<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/8400f87ad8ed2b47e659bc8bb6c3cf2467802425) Add IParseOptions#alternateCommentMode ([#968](https://github.com/dcodeIO/protobuf.js/issues/968))<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/d6e3b9e218896ec1910e02448b5ee87e4d96ede6) Added field_mask to built-in common wrappers ([#982](https://github.com/dcodeIO/protobuf.js/issues/982))<br />
## Other
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/635fef013fbb3523536d92c690ffd7d84829db35) Remove code climate config in order to use 'in-app' config instead<br />
# [6.8.4](https://github.com/dcodeIO/protobuf.js/releases/tag/6.8.4)
## Other
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/69440c023e6962c644715a0c95363ddf19db648f) Update jsdoc dependency (pinned vulnerable marked)<br />
# [6.8.3](https://github.com/dcodeIO/protobuf.js/releases/tag/6.8.3)
## CLI
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/cc991a058b0636f3454166c76de7b664cf23a8f4) Use correct safeProp in json-module target, see [#956](https://github.com/dcodeIO/protobuf.js/issues/956)<br />
# [6.8.2](https://github.com/dcodeIO/protobuf.js/releases/tag/6.8.2)
## Other
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/6fc6481d790648e9e2169a961ad31a732398c911) Include dist files in npm package, see [#955](https://github.com/dcodeIO/protobuf.js/issues/955)<br />
# [6.8.1](https://github.com/dcodeIO/protobuf.js/releases/tag/6.8.1)
## Fixed
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/db2dd49f6aab6ecd606eee334b95cc0969e483c2) Prevent invalid JSDoc names when generating service methods, see [#870](https://github.com/dcodeIO/protobuf.js/issues/870)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/62297998d681357ada70fb370b99bac5573e5054) Prevent parse errors when generating service method names, see [#870](https://github.com/dcodeIO/protobuf.js/issues/870)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/478f332e0fc1d0c318a70b1514b1d59c8c200c37) Support parsing nested option-values with or without ':' ([#951](https://github.com/dcodeIO/protobuf.js/issues/951), fixes [#946](https://github.com/dcodeIO/protobuf.js/issues/946))<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/83477ca8e0e1f814ac79a642ea656f047563613a) Add support for reserved keyword in enums ([#950](https://github.com/dcodeIO/protobuf.js/issues/950), fixes [#949](https://github.com/dcodeIO/protobuf.js/issues/949))<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/c482a5b76fd57769eae4308793e3ff8725264664) Unified safe property escapes and added a test for [#834](https://github.com/dcodeIO/protobuf.js/issues/834)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/1724581c36ecc4fc166ea14a9dd57af5e093a467) Fix codegen if type name starts with "Object"<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/adecd544c5fcbeba28d502645f895024e3552970) Fixed dependency for json-module to use "light".<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/2a8dd74fca70d4e6fb41328a7cee81d1d50ad7ad) Basic support for URL prefixes in google.protobuf.Any types.<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/be78a3d9bc8d9618950c77f9e261b422670042ce) fixed 'error is not defined linter warning when using static/static-module and es6<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/c712447b309ae81134c7afd60f8dfa5ecd3be230) Fixed wrong type_url for any type (no leading '.' allowed).<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/145bda25ee1de2c0678ce7b8a093669ec2526b1d) Fixed fromObject() for google.protobuf.Any types.<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/7dec43d9d847481ad93fca498fd970b3a4a14b11) Handle case where 'extendee' is undefined in ext/descriptor<br />
## CLI
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/20a26271423319085d321878edc5166a5449e68a) Sanitize CR-only line endings (coming from jsdoc?)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/19d2af12b5db5a0f668f50b0cae3ee0f8a7affc2) Make sure enum typings become generated ([#884](https://github.com/dcodeIO/protobuf.js/issues/884) didn't solve this)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/a2c72c08b0265b112d367fa3d33407ff0de955b9) Remove exclude and include patterns from jsdoc config<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/9afb8a2ff27c1e0a999d7331f3f65f568f5cced5) Skip defaults when generating proto3<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/952c7d1b478cc7c6de82475a17a1387992e8651f) Wait for both the 'end' and 'close' event to happen before finishing in pbts, see [#863](https://github.com/dcodeIO/protobuf.js/issues/863)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/ed7e2e71f5cde27c4128f4f2e3f4782cc51fbec7) Accept null for optional fields in generated static code<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/27cc66a539251216ef10aea04652d58113949df9) Annotate TS classes with @implements<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/05e7e0636727008c72549459b8594fa0442d346f) Annotate virtual oneofs as string literal unions<br />
## New
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/685adb0e7ef0f50e4b93a105013547884957cc98) Also check for reserved ids and names in enums<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/843d0d5b927968025ca11babff28495dd3bb2863) Also support 'reserved' in enum descriptors<br />
## Other
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/a8376b57fb0a858adff9dc8a1d1b5372eff9d85c) Include just relevant files in npm package, fixes [#781](https://github.com/dcodeIO/protobuf.js/issues/781)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/bda1bc6917c681516f6be8be8f0e84ba1262c4ce) Fix travis build<br />
# [6.8.0](https://github.com/dcodeIO/protobuf.js/releases/tag/6.8.0)
## Breaking
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/ff858003f525db542cbb270777b6fab3a230c9bb) Replaced Buffer and Long types with interfaces and removed stubs<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/22f907c49adbbdf09b72bde5299271dbe0ee9cbe) Removed Message#toObject in favor of having just the static version (unnecessary static code otherwise)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/c97b61811248df002f1fb93557b982bc0aa27309) Everything uses interfaces now instead of typedefs (SomethingProperties is now ISomething)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/b9f179064f3ddf683f13e0d4e17840301be64010) ReflectionObject#toJSON properly omits explicit undefined values<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/7a6f98b5e74f9e9142f9be3ba0683caeaff916c4) Initial implementation of TypeScript decorators<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/7a6f98b5e74f9e9142f9be3ba0683caeaff916c4) Refactored protobuf.Class away<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/7a6f98b5e74f9e9142f9be3ba0683caeaff916c4) TypeScript definitions now have (a lot of) generics<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/7a6f98b5e74f9e9142f9be3ba0683caeaff916c4) Removed deprecated features<br />
## Fixed
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/c306d19d806eb697913ffa2b8613f650127a4c50) Added 'undefined' besides 'null' as a valid value of an optional field, fixes [#826](https://github.com/dcodeIO/protobuf.js/issues/826)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/c5518c3bac0da9c2045e6f1baf0dee915afb4221) Fixed an issue with codegen typings, see [#819](https://github.com/dcodeIO/protobuf.js/issues/819)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/66d149e92ff1baddfdfd4b6a88ca9bcea6fc6195) Ported utf8 chunking mechanism to base64 as well, fixes [#800](https://github.com/dcodeIO/protobuf.js/issues/800)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/e1f9d9856c98a0f0eb1aa8bdf4ac0df467bee8b9) Also be more verbose when defining properties for ES6, fixes [#820](https://github.com/dcodeIO/protobuf.js/issues/820)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/cf36097305ab02047be5014eabeccc3154e18bde) Generate more verbose JSDoc comments for ES6 support, fixes [#820](https://github.com/dcodeIO/protobuf.js/issues/820)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/f2959795330966f13cb65bbb6034c88a01fc0bcc) Emit a maximum of one error var when generating verifiers, fixes [#786](https://github.com/dcodeIO/protobuf.js/issues/786)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/3b848a10b39c1897ca1ea3b5149ef72ae43fcd11) Fixed missing semicolon after 'extensions' and 'reserved' when generating proto files, fixes [#810](https://github.com/dcodeIO/protobuf.js/issues/810)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/eb1b40497e14a09facbc370676f486bed1376f52) Call npm with '--no-bin-links' when installing CLI deps, fixes [#823](https://github.com/dcodeIO/protobuf.js/issues/823)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/429de19d851477f1df2804d5bc0be30228cd0924) Fix Reader argument conversion in static module<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/03194c203d6ff61ae825e66f8a29ca204fa503b9) Use JSDoc, they said, it documents code, they said. Fixes [#770](https://github.com/dcodeIO/protobuf.js/issues/770)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/ec6a133ff541c638517e00f47b772990207c8640) parser should not confuse previous trailing line comments with comments for the next declaration, see [#762](https://github.com/dcodeIO/protobuf.js/issues/762)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/0589ace4dc9e5c565ff996cf6e6bf94e63f43c4e) Types should not clear constructor with cache (fixes decorators)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/056ecc3834a3b323aaaa676957efcbe3f52365a0) Namespace#lookup should also check in nested namespaces (wtf)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/ed34b093839652db2ff7b84db87857fc57d96038) Reader#bytes should also support plain arrays<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/514afcfa890aa598e93254576c4fd6062e0eff3b) Fix markdown for pipe in code in table<br />
## New
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/17c2797592bc4effd9aaae3ba9777c9550bb75ac) Upgrade to codegen 2<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/57d7d35ddbb9e3a28c396b4ef1ae3b150eeb8035) ext/descriptor enables interoperability between reflection and descriptor.proto (experimental), see [#757](https://github.com/dcodeIO/protobuf.js/issues/757)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/3939667ef1f37b025bd7f9476015890496d50e00) Added 'json' conversion option for proto3 JSON mapping compatibility of NaN and Infinity + additional documentation of util.toJSONOptions, see [#351](https://github.com/dcodeIO/protobuf.js/issues/351)<br />
## CLI
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/4eac28c7d3acefb0af7b82c62cf8d19bf3e7d37b) Use protobuf/minimal when pbjs target is static-module<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/3a959453fe63706c38ebbacda208e1f25f27dc99) Added closure wrapper<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/13bf9c2635e6a1a2711670fc8e28ae9d7b8d1c8f) Various improvements to statically generated JSDoc, also fixes [#772](https://github.com/dcodeIO/protobuf.js/issues/772)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/ffdc93c7cf7c8a716316b00864ea7c510e05b0c8) Check incompatible properties for namespaces only in tsd-jsdoc<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/fb3f9c70436d4f81bcd0bf62b71af4d253390e4f) Additional tsd-jsdoc handling of properties inside of namespaces and TS specific API exposure<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/2dcae25c99e2ed8afd01e27d21b106633b8c31b9) Several improvements to tsd-jsdoc emitted comments<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/ff858003f525db542cbb270777b6fab3a230c9bb) Further TypeScript definition improvements<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/22f907c49adbbdf09b72bde5299271dbe0ee9cbe) Relieved tsd files from unnecessary comments<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/22f907c49adbbdf09b72bde5299271dbe0ee9cbe) Generate TS namespaces for vars and functions with properties<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/b355115e619c6595ac9d91897cfe628ef0e46054) Prefer @tstype over @type when generating typedefs (tsd-jsdoc)<br />
## Docs
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/23f4b990375efcac2c144592cf4ca558722dcf2d) Replaced nullable types with explicit type|null for better tooling compatibility, also fixes [#766](https://github.com/dcodeIO/protobuf.js/issues/766) and fixes 767<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/6493f52013c92a34b8305a25068ec7b8c4c29d54) Added more info to ext/descriptor README, see [#757](https://github.com/dcodeIO/protobuf.js/issues/757)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/ef92da3768d8746dbfe72e77232f78b879fc811d) Additional notes on ext/descriptor<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/b646cf7499791a41b75eef2de1a80fb558d4159e) Updated CHANGELOG so everyone knows what's going on (and soon, breaking)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/35a663757efe188bea552aef017837bc6c6a481a) Additional docs on TS/decorators usage<br />
## Other
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/9726be0888a9461721447677e9dece16a682b9f6) Updated dist files<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/9726be0888a9461721447677e9dece16a682b9f6) Added package-lock.json<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/114f7ea9fa3813003afc3ebb453b2dd2262808e1) Minor formatting<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/8a6e464954b472fdbb4d46d9270fe3b4b3c7272d) Generate files<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/42f8a97630bcb30d197b0f1d6cbdd96879d27f96) Remove the no-constructor arg<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/6446247cd7edbb77f03dc42c557f568811286a39) Remove the ctor option.<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/2059ee0f6f951575d5c5d2dc5eb06b6fa34e27aa) Add support to generate types for JSON object.<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/7445da0f8cb2e450eff17723f25f366daaf3bbbb) aspromise performance pass<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/3f8b74ba6726567eaf68c4d447c120f75eac042f) codegen 2 performance pass, [#653](https://github.com/dcodeIO/protobuf.js/issues/653) might benefit<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/d44a7eec2fd393e5cb24196fb5818c8c278a0f34) Fixed minimal library including reflection functionality<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/a18e6db9f02696c66032bce7ef4c0eb0568a8048) Minor compression ratio tuning<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/b49a4edd38395e209bedac2e0bfb7b9d5c4e980b) Fixed failing test case + coverage<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/8f7111cacd236501b7e26791b9747b1974a2d9eb) Improved fromObject wrapper for google.protobuf.Any.<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/0e471a2516bde3cd3c27b2691afa0dcfbb01f042) Fixed failing tokenize test case<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/5867f076d8510fa97e3bd6642bbe61960f7fd196) Removed debug build, made it an extension<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/22f907c49adbbdf09b72bde5299271dbe0ee9cbe) Regenerated dist files<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/5bc3541d2da19e2857dc884f743d37c27e8e21f2) Even more documentation and typings for ext/descriptor<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/773e6347b57e4a5236b1ef0bb8d361e4b233caf7) ext/descriptor docs<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/773e6347b57e4a5236b1ef0bb8d361e4b233caf7) Decorators coverage<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/9a23ded94729ceeea2f87cb7e8460eaaaf1c8269) ext/descriptor support for various standard options, see [#757](https://github.com/dcodeIO/protobuf.js/issues/757)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/2d8ce6ec0abd261f9b261a44a0a258fdf57ecec3) ext/descriptor passes descriptor.proto test with no differences, see [#757](https://github.com/dcodeIO/protobuf.js/issues/757)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/3a20968c6d676312e4f2a510f7e079e0e0819daf) Properly remove unnecessary (packed) options from JSON descriptors<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/2a30df8bd5f20d91143a38c2232dafc3a6f3a7bd) Use typedefs in ext/descriptor (like everywhere else), see [#757](https://github.com/dcodeIO/protobuf.js/issues/757)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/1fc911cef01e081c04fb82ead685f49dde1403bb) Fixed obvious issues with ext/descriptor, does not throw anymore when throwing descriptor.proto itself at it, see [#757](https://github.com/dcodeIO/protobuf.js/issues/757)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/6c37dbd14f39dad687f2f89f1558a875f7dcc882) Added still missing root traversal to ext/descriptor, see [#757](https://github.com/dcodeIO/protobuf.js/issues/757)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/7ab136daa5eb2769b616b6b7522e45a4e33a59f6) Initial map fields support for ext/descriptor, see [#757](https://github.com/dcodeIO/protobuf.js/issues/757)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/708552bb84508364b6e6fdf73906aa69e83854e1) Added infrastructure for TypeScript support of extensions<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/23f26defa793b371c16b5f920fbacb3fb66bdf22) TypeScript generics improvements<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/e49bef863c0fb10257ec1001a3c5561755f2ec6b) More ext/descriptor progress, see [#757](https://github.com/dcodeIO/protobuf.js/issues/757)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/6b94336c1e6eec0f2eb1bd5dca73a7a8e71a2153) Just export the relevant namespace in ext/descriptor<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/fbb99489ed0c095174feff8f53431d30fb6c34a0) Initial descriptor.proto extension for reflection interoperability, see [#757](https://github.com/dcodeIO/protobuf.js/issues/757)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/48e66d975bf7b4e6bdbb68ec24386c98b16c54c5) Moved custom wrappers to its own module instead, also makes the API easier to use manually, see [#677](https://github.com/dcodeIO/protobuf.js/issues/677)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/0c6e639d08fdf9be12677bf678563ea631bafb2c) Added infrastructure for custom wrapping/unwrapping of special types, see [#677](https://github.com/dcodeIO/protobuf.js/issues/677)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/0425b584f49841d87a8249fef30c78cc31c1c742) More decorator progress (MapField.d, optional Type.d)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/7a6f98b5e74f9e9142f9be3ba0683caeaff916c4) tsd-jsdoc now has limited generics support<br />
# [6.7.3](https://github.com/dcodeIO/protobuf.js/releases/tag/6.7.3)
## Other
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/57f1da64945f2dc5537c6eaa53e08e8fdd477b67) long, @types/long and @types/node are just dependencies, see [#753](https://github.com/dcodeIO/protobuf.js/issues/753)<br />
# [6.7.2](https://github.com/dcodeIO/protobuf.js/releases/tag/6.7.2)
## New
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/a7621be0a56585defc72d863f4e891e476905692) Split up NamespaceDescriptor to make nested plain namespaces a thing, see [#749](https://github.com/dcodeIO/protobuf.js/issues/749)<br />
## Docs
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/e980e72ae3d4697ef0426c8a51608d31f516a2c4) More README<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/1f76749d0b9a780c7b6cb56be304f7327d74ebdb) Replaced 'runtime message' with 'message instance' for clarity<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/e6b6dedb550edbd0e54e212799e42aae2f1a87f1) Rephrased the Usage section around the concept of valid messages<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/0d8100ba87be768ebdec834ca2759693e0bf4325) Added toolset diagram to README<br />
## Other
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/3405ae8d1ea775c96c30d1ef5cde666c9c7341b3) Touched benchmark output metrics once more<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/e36b228f4bb8b1cd835bf31f8605b759a7f1f501) Fixed failing browser test<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/7b3bdb562ee7d30c1a557d7b7851d55de3091da4) Output more human friendly metrics from benchmark<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/59e447889057c4575f383630942fd308a35c12e6) Stripped down static bench code to what's necessary<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/f88dad098282ece65f5d6e224ca38305a8431829) Revamped benchmark, now also covers Google's JS implementation<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/45356be81ba7796faee0d4d8ad324abdd9f301fb) Updated dependencies and dist files<br />
# [6.7.1](https://github.com/dcodeIO/protobuf.js/releases/tag/6.7.1)
## New
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/3d23eed6f7c79007969672f06c1a9ccd691e2411) Made .verify behave more like .encode, see [#710](https://github.com/dcodeIO/protobuf.js/issues/710)<br />
## CLI
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/bed514290c105c3b606f760f2abba80510721c77) With null/undefined eliminated by constructors and .create, document message fields as non-optional where applicable (ideally used with TS & strictNullChecks), see [#743](https://github.com/dcodeIO/protobuf.js/issues/743)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/007b2329842679ddf994df7ec0f9c70e73ee3caf) Renamed --strict-long/message to --force-long/message with backward compatible aliases, see [#741](https://github.com/dcodeIO/protobuf.js/issues/741)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/6aae71f75e82ffd899869b0c952daf98991421b8) Keep $Properties with --strict-message but require actual instances within, see [#741](https://github.com/dcodeIO/protobuf.js/issues/741)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/c812cef0eff26998f14c9d58d4486464ad7b2bbc) Added --strict-message option to pbjs to strictly reference message instances instead of $Properties, see [#741](https://github.com/dcodeIO/protobuf.js/issues/741)<br />
## Docs
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/412407de9afb7ec3a999c4c9a3a1f388f971fce7) Restructured README<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/1c4d9d7f024bfa096ddc24aabbdf39211ed8637a) Added more information on typings usage, see [#744](https://github.com/dcodeIO/protobuf.js/issues/744)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/602065e16862751c515c2f3391ee8b880e8140b1) Clarified typescript example in README, see [#744](https://github.com/dcodeIO/protobuf.js/issues/744)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/79d0ba2cc71a156910a9d937683af164df694f08) Clarified that the service API targets clients consuming a service, see [#742](https://github.com/dcodeIO/protobuf.js/issues/742)<br />
## Other
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/a66f76452ba050088efd1aaebf3c503a55e6287c) Omit copying of undefined or null in constructors and .create, see [#743](https://github.com/dcodeIO/protobuf.js/issues/743)<br />
# [6.7.0](https://github.com/dcodeIO/protobuf.js/releases/tag/6.7.0)
## Breaking
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/9c1bbf10e445c3495b23a354f9cbee951b4b20f0) Namespace#lookupEnum should actually look up the reflected enum and not just its values<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/44a8d3af5da578c2e6bbe0a1b948d469bbe27ca1) Decoder now throws if required fields are missing, see [#695](https://github.com/dcodeIO/protobuf.js/issues/695) / [#696](https://github.com/dcodeIO/protobuf.js/issues/696)<br />
## Fixed
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/d1e3122e326480fdd44e96afd76ee72e9744b246) Added functionality to filter for multiple types at once in lookup(), used by lookupTypeOrEnum(), fixes [#740](https://github.com/dcodeIO/protobuf.js/issues/740)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/8aa21268aa5e0f568cb39e99a83b99ccb4084381) Ensure that fields have been resolved when looking up js types in static target, see [#731](https://github.com/dcodeIO/protobuf.js/issues/731)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/f755d36829b9f1effd7960fab3a86a141aeb9fea) Properly copy fields array before sorting in toObject, fixes [#729](https://github.com/dcodeIO/protobuf.js/issues/729)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/a06691f5b87f7e90fed0115b78ce6febc4479206) Actually emit TS compatible enums in static target if not aliases, see [#720](https://github.com/dcodeIO/protobuf.js/issues/720)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/b01bb58dec92ebf6950846d9b8d8e3df5442b15d) Hardened tokenize/parse, esp. comment parsing, see [#713](https://github.com/dcodeIO/protobuf.js/issues/713)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/bc76ad732fc0689cb0a2aeeb91b06ec5331d7972) Exclude any fields part of some oneof when populating defaults in toObject, see [#710](https://github.com/dcodeIO/protobuf.js/issues/710)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/68cdb5f11fdbb950623be089f98e1356cb7b1ea3) Most of the parser is not case insensitive, see [#705](https://github.com/dcodeIO/protobuf.js/issues/705)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/3e930b907a834a7da759478b8d3f52fef1da22d8) Retain options argument in Root#load when used with promises, see [#684](https://github.com/dcodeIO/protobuf.js/issues/684)<br />
## New
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/3c14ef42b3c8f2fef2d96d65d6e288211f86c9ef) Created a micromodule from (currently still bundled) float support<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/7ecae9e9f2e1324ef72bf5073463e01deff50cd6) util.isset(obj, prop) can be used to test if a message property is considered to be set, see [#728](https://github.com/dcodeIO/protobuf.js/issues/728)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/c04d4a5ab8f91899bd3e1b17fe4407370ef8abb7) Implemented stubs for long.js / node buffers to be used where either one isn't wanted, see [#718](https://github.com/dcodeIO/protobuf.js/issues/718)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/b9574ad02521a31ebd509cdaa269e7807da78d7c) Simplified reusing / replacing internal constructors<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/1f97b7af05b49ef69bd6e9d54906d1b7583f42c4) Constructors/.create always initialize proper mutable objects/arrays, see [#700](https://github.com/dcodeIO/protobuf.js/issues/700)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/adb4bb001a894dd8d00bcfe03457497eb994f6ba) Verifiers return an error if multiple fields part of the same oneof are set, see [#710](https://github.com/dcodeIO/protobuf.js/issues/710)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/fe93d436b430d01b563318bff591e0dd408c06a4) Added `oneofs: true` to ConversionOptions, see [#710](https://github.com/dcodeIO/protobuf.js/issues/710)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/228c882410d47a26576f839b15f1601e8aa7914d) Optional fields handle null just like undefined regardless of type see [#709](https://github.com/dcodeIO/protobuf.js/issues/709)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/da6af8138afa5343a47c12a8beedb99889c0dd51) Encoders no longer examine virtual oneof properties but encode whatever is present, see [#710](https://github.com/dcodeIO/protobuf.js/issues/710)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/ac26a7aa60359a37dbddaad139c0134b592b3325) pbjs now generates multiple exports when using ES6 syntax, see [#686](https://github.com/dcodeIO/protobuf.js/issues/686)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/c1ca65dc6987384af6f9fac2fbd7700fcf5765b2) Sequentially serialize fields ordered by id, as of the spec.<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/26d9fadb21a85ca0b5609156c26453ae875e4933) decode throws specific ProtocolError with a reference to the so far decoded message if required fields are missing + example<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/2b5577b238a452ae86aa395fb2ad3a3f45d755dc) Reader.create asserts that `buffer` is a valid buffer, see [#695](https://github.com/dcodeIO/protobuf.js/issues/695)<br />
## CLI
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/6f74d30f059e33a4678f28e7a50dc4878c54bed2) Exclude JSDoc on typedefs from generated d.ts files because typescript@next, see [#737](https://github.com/dcodeIO/protobuf.js/issues/737)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/2ebb1b781812e77de914cd260e7ab69612ffd99e) Prepare static code with estraverse instead of regular expressions, see [#732](https://github.com/dcodeIO/protobuf.js/issues/732)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/29ce6cae0cacc0f1d87ca47e64be6a81325aaa55) Moved tsd-jsdoc to future cli package, see [#716](https://github.com/dcodeIO/protobuf.js/issues/716)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/8de21e1a947ddb50a167147dd63ad29d37b6a891) $Properties are just a type that's satisfied, not implemented, by classes, see [#723](https://github.com/dcodeIO/protobuf.js/issues/723)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/4bfe0c239b9c337f8fa64ea64f6a71baf5639b84) More progress on decoupling the CLI<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/8a60174932d15198883ac3f07000ab4e7179a695) Fixed computed array indexes not being renamed in static code, see [#726](https://github.com/dcodeIO/protobuf.js/issues/726)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/8d9981588d17709791846de63f1f3bfd09433b03) Check upfront if key-var is required in static decoders with maps, see [#726](https://github.com/dcodeIO/protobuf.js/issues/726)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/16adff0c7b67c69a2133b6aac375365c5f2bdbf7) Fixed handling of stdout if callback is specified, see [#724](https://github.com/dcodeIO/protobuf.js/issues/724)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/6423a419fe45e648593833bf535ba1736b31ef63) Preparations for moving the CLI to its own package, see [#716](https://github.com/dcodeIO/protobuf.js/issues/716)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/afefa3de09620f50346bdcfa04d52952824c3c8d) Properly implement $Properties interface in JSDoc, see [#723](https://github.com/dcodeIO/protobuf.js/issues/723)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/a1f23e09fb5635275bb7646dfafc70caef74c6b8) Recursively use $Properties inside of $Properties in static code, see [#717](https://github.com/dcodeIO/protobuf.js/issues/717)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/c3f0a2124c661bb9ba35f92c21a98a4405d30b47) Added --strict-long option to pbjs to always emit 'Long' instead of 'number|Long' (only relevant with long.js), see [#718](https://github.com/dcodeIO/protobuf.js/issues/718)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/0bc4a14501f84f93afd6ce2933ad00749c82f4df) Statically emitted long type is 'Long' now instead of '$protobuf.Long', see [#718](https://github.com/dcodeIO/protobuf.js/issues/718)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/a75625d176b7478e0e506f05e2cee5e3d7a0d89a) Decoupled message properties as an interface in static code for TS intellisense support, see [#717](https://github.com/dcodeIO/protobuf.js/issues/717)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/23f14a61e8c2f68b06d1bb4ed20b938764c78860) Static code statically resolves types[..], see [#715](https://github.com/dcodeIO/protobuf.js/issues/715)<br />
## Docs
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/ef71e77726b6bf5978b948d598c18bf8b237ade4) Added type definitions for all possible JSON descriptors<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/4bfe0c239b9c337f8fa64ea64f6a71baf5639b84) Explained the JSON structure in README and moved CLI specific information to the CLI package<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/3ba3ad762f7486b4806ad1c45764e92a81ca24dd) Added information on how to use the stubs to README, see [#718](https://github.com/dcodeIO/protobuf.js/issues/718)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/a5dbba41341bf44876cd4226f08044f88148f37d) Added 'What is a valid message' section to README<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/6f8f2c1fdf92e6f81363d77bc059820b2376fe32) Added a hint on using .create to initial example<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/3ad28ec920e0fe8d0223db28804a7b3f8a6880c2) Even more usage for README<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/5a1f861a0f6b582faae7a4cc5c6ca7e4418086da) Additional information on general usage (README)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/320dea5a1d1387c72759e10a17afd77dc48c3de0) Restructured README to Installation, Usage and Examples sections<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/1c9055dd69f7696d2582942b307a1ac8ac0f5533) Added a longish section on the correct use of the toolset to README<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/99667c8e1ff0fd3dac83ce8c0cff5d0b1e347310) Added a few additional notes on core methods to README, see [#710](https://github.com/dcodeIO/protobuf.js/issues/710)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/2130bc97e44567e766ea8efacb365383c909dbd4) Extended traverse-types example, see [#693](https://github.com/dcodeIO/protobuf.js/issues/693)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/13e4aa3ff274ab42f1302e16fd59d074c5587b5b) Better explain how .verify, .encode and .decode are connected<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/7502dd2dfdaea111e5c1a902c524ad0a51ff9bd4) Documented that Type#encode respectively Message.encode do not implicitly .verify, see [#696](https://github.com/dcodeIO/protobuf.js/issues/696) [ci-skip]<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/e7e123aa0b6c05eb4156a761739e37c008a3cbc1) Documented throwing behavior of Reader.create and Message.decode<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/0fcde32306da77f02cb1ea81ed18a32cee01f17b) Added error handling notes to README, see [#696](https://github.com/dcodeIO/protobuf.js/issues/696)<br />
## Other
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/fef924e5f708f14dac5713aedc484535d36bfb47) Use @protobufjs/float<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/fef924e5f708f14dac5713aedc484535d36bfb47) Rebuilt dist files for 6.7.0<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/ca0dce2d7f34cd45e4c1cc753a97c58e05b3b9d2) Updated deps, ts fixes and regenerated dist files<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/2c2d4002d6776f3edde608bd813c37d798d87e6b) Manually merged gentests improvements, fixes [#733](https://github.com/dcodeIO/protobuf.js/issues/733)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/e4a6b6f81fa492a63b12f0da0c381612deff1973) Make sure that util.Long is overridden by AMD loaders only if present, see [#730](https://github.com/dcodeIO/protobuf.js/issues/730)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/fff1eb297a728ed6d334c591e7d796636859aa9a) Coverage for util.isset and service as a namespace<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/8401a47d030214a54b5ee30426ebc7a9d9c3773d) Shortened !== undefined && !== null to equivalent != null in static code<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/e1dd1bc2667de73bb65d876162131be2a4d9fef4) With stubs in place, 'number|Long' return values can be just 'Long' instead, see [#718](https://github.com/dcodeIO/protobuf.js/issues/718)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/404ba8e03a63f708a70a72f0208e0ca9826fe20b) Just alias as the actual ideal type when using stubs, see [#718](https://github.com/dcodeIO/protobuf.js/issues/718)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/270cc94c7c4b8ad84d19498672bfc854b55130c9) General cleanup + regenerated dist/test files<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/017161ce97ceef3b2d0ce648651a4636f187d78b) Simplified camel case regex, see [#714](https://github.com/dcodeIO/protobuf.js/issues/714)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/d410fd20f35d2a35eb314783b17b6570a40a99e8) Regenerated dist files and changelog for 6.7.0<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/88ca8f0d1eb334646ca2625c78e63fdd57221408) Retain alias order in static code for what it's worth, see [#712](https://github.com/dcodeIO/protobuf.js/issues/712)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/2a74fbf551e934b3212273e6a28ad65ac4436faf) Everything can be block- or line-style when parsing, see [#713](https://github.com/dcodeIO/protobuf.js/issues/713)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/47bb95a31784b935b9ced52aa773b9d66236105e) Determine necessary aliases depending on config, see [#712](https://github.com/dcodeIO/protobuf.js/issues/712)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/588ffd9b129869de0abcef1d69bfa18f2f25d8e1) Use more precise types for message-like plain objects<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/37b39c8d1a5307eea09aa24d7fd9233a8df5b7b6) Regenerated dist files<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/9c94813f9a5f1eb114d7c6112f7e87cb116fe9da) Regenerated relevant files<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/d7493efe1a86a60f6cdcf7976523e69523d3f7a3) Moved field comparer to util<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/fe917652f88df17d4dbaae1cd74f470385342be2) Updated tests to use new simplified encoder logic<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/b69173b4e7b514c40bb4a85b54ca5465492a235b) Updated path to tsd-jsdoc template used by pbts, see [#707](https://github.com/dcodeIO/protobuf.js/issues/707)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/5041fad9defdb0bc8131560e92f3b454d8e45273) Additional restructuring for moving configuration files out of the root folder<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/c0b7c9fa6309d345c4ce8e06fd86f27528f4ea66) Added codegen support for constructor functions, see [#700](https://github.com/dcodeIO/protobuf.js/issues/700)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/4573f9aabd7e8f883e530f4d0b055e5ec9b75219) Attempted to fix broken custom error test<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/4b49f500fce156b164c757d8f17be2338f767c82) Trying out a more aggressive aproach for custom error subclasses<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/95cd64ee514dc60d10daac5180726ff39594e8e8) Moved a few things out of the root folder<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/db1030ed257f9699a0bcf3bad0bbe8acccf5d766) Coverage for encoder compat. / protocolerror<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/948a4caf5092453fa091ac7a594ccd1cc5b503d2) Updated dist and generated test files<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/3ead13e83ecdc8715fbab916f7ccaf3fbfdf59ed) Added tslint<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/364e7d457ed4c11328e609f600a57b7bc4888554) Exclude dist/ from codeclimate checks<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/6e81fcb05f25386e3997399e6596e9d9414f0286) Also lint cli utilities<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/e7e123aa0b6c05eb4156a761739e37c008a3cbc1) Cache any regexp instance (perf)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/d89c45f8af0293fb34e6f12b37ceca49083e1faa) Use code climate badges<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/e70fbe3492c37f009dbaccf910c1e0f81e8f0f44) Updated travis to pipe to codeclimate, coverage<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/a7ab1036906bb7638193a9e991cb62c86108880a) More precise linter configuration<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/58688c178257051ceb2dfea8a63eb6be7dcf1cf1) Added codeclimate<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/4b21e00adc6fae42e6a88deaeb0b7c077c6ca50e) Moved cli deps placeholder creation to post install script<br />
# [6.6.5](https://github.com/dcodeIO/protobuf.js/releases/tag/6.6.5)
## Fixed
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/478ee51194878f24be8607e42e5259952607bd44) sfixed64 is not zig-zag encoded, see [#692](https://github.com/dcodeIO/protobuf.js/issues/692)<br />
## CLI
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/7a944538c89492abbed147915acea611f11c03a2) Added a placeholder to cli deps node_modules folder to make sure node can load from it<br />
## Other
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/83142e420eb1167b2162063a092ae8d89c9dd4b2) Restructured a few failing tests<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/367d55523a3ae88f21d47aa96447ec3e943d4620) Traversal example + minimalistic documentation<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/8eeffcbcd027c929e2a76accad588c61dfa2e37c) Added a custom getters/setters example for gRPC<br />
# [6.6.4](https://github.com/dcodeIO/protobuf.js/releases/tag/6.6.4)
## Fixed
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/88eb7a603a21643d5012a374c7d246f4c27620f3) Made sure that LongBits ctor is always called with unsigned 32 bits + static codegen compat., fixes [#690](https://github.com/dcodeIO/protobuf.js/issues/690)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/50e82fa7759be035a67c7818a1e3ebe0d6f453b6) Properly handle multiple ../.. in path.normalize, see [#688](https://github.com/dcodeIO/protobuf.js/issues/688)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/7c3506b3f0c5a08a887e97313828af0c21effc61) Post-merge, also tackles [#683](https://github.com/dcodeIO/protobuf.js/issues/683) (packed option for repeated enum values)<br />
## New
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/7f3f4600bcae6f2e4dadd5cdb055886193a539b7) Verify accepts non-null objects only, see [#685](https://github.com/dcodeIO/protobuf.js/issues/685)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/d65c22936183d04014d6a8eb880ae0ec33aeba6d) allow_alias enum option was not being honored. This case is now handled and a test case was added<br />
## CLI
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/2ddb76b6e93174787a68f68fb28d26b8ece7cc56) Added an experimental --sparse option to limit pbjs output to actually referenced types within main files<br />
## Docs
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/33d14c97600ed954193301aecbf8492076dd0179) Added explicit hint on Uint8Array to initial example, see [#670](https://github.com/dcodeIO/protobuf.js/issues/670)<br />
## Other
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/cbd4c622912688b47658fea00fd53603049b5104) Ranges and names support for reserved fields, see [#676](https://github.com/dcodeIO/protobuf.js/issues/676)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/487f8922d879955ba22f89b036f897b9753b0355) Updated depdendencies / rebuilt dist files<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/37536e5fa7a15fbc851040e09beb465bc22d9cf3) Use ?: instead of |undefined in .d.ts files<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/f8b415a2fc2d1b1eff19333600a010bcaaebf890) Mark optional fields as possibly being undefined<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/2ddb76b6e93174787a68f68fb28d26b8ece7cc56) Added a few more common google types from google/api, see [#433](https://github.com/dcodeIO/protobuf.js/issues/433)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/d246024f4c7d13ca970c91a757e2f47432a619df) Minor optimizations to dependencies, build process and tsd<br />
# [6.6.3](https://github.com/dcodeIO/protobuf.js/releases/tag/6.6.3)
## Fixed
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/0be01a14915e3e510038808fedbc67192a182d9b) Support node 4.2.0 to 4.4.7 buffers + travis case, see [#665](https://github.com/dcodeIO/protobuf.js/issues/665)<br />
## CLI
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/6a0920b2c32e7963741693f5a773b89f4b262688) Added ES6 syntax flag to pbjs, see [#667](https://github.com/dcodeIO/protobuf.js/issues/667)<br />
## Docs
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/c365242bdc28a47f5c6ab91bae34c277d1044eb3) Reference Buffer for BufferReader/Writer, see [#668](https://github.com/dcodeIO/protobuf.js/issues/668)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/43976072d13bb760a0689b54cc35bdea6817ca0d) Slightly shortened README<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/e64cf65b09047755899ec2330ca0fc2f4d7932c2) Additional notes on the distinction of different use cases / distributions, see [#666](https://github.com/dcodeIO/protobuf.js/issues/666)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/83758c99275c2bbd30f63ea1661284578f5c9d91) Extended README with additional information on JSON format<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/fdc3102689e8a3e8345eee5ead07ba3c9c3fe80c) Added extended usage instructions for TypeScript and custom classes to README, see [#666](https://github.com/dcodeIO/protobuf.js/issues/666)<br />
## Other
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/3701488cca6bc56ce6b7ad93c7b80e16de2571a7) Updated dist files<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/579068a45e285c7d2c69b359716dd6870352f46f) Updated test cases to use new buffer util<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/0be01a14915e3e510038808fedbc67192a182d9b) Added fetch test cases + some test cleanup<br />
# [6.6.2](https://github.com/dcodeIO/protobuf.js/releases/tag/6.6.2)
## Fixed
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/3aea1bf3d4920dc01603fda25b86e6436ae45ec2) Properly replace short vars when beautifying static code, see [#663](https://github.com/dcodeIO/protobuf.js/issues/663)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/b6cf228a82152f72f21b1b307983126395313470) Use custom prelude in order to exclude any module loader code from source (for webpack), see [#658](https://github.com/dcodeIO/protobuf.js/issues/658)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/2b12fb7db9d4eaa3b76b7198539946e97db684c4) Make sure to check optional inner messages for null when encoding, see [#658](https://github.com/dcodeIO/protobuf.js/issues/658)<br />
## New
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/276a594771329da8334984771cb536de7322d5b4) Initial attempt on a backwards compatible fetch implementation with binary support, see [#661](https://github.com/dcodeIO/protobuf.js/issues/661)<br />
## Other
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/2d81864fa5c4dac75913456d582e0bea9cf0dd80) Root#resolvePath skips files when returning null, see [#368](https://github.com/dcodeIO/protobuf.js/issues/368)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/aab3ec1a757aff0f11402c3fb943c003f092c1af) Changes callback on failed response decode in rpc service to pass actual error instead of 'error' string<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/9044178c052299670108f10621d6e9b3d56e8a40) Travis should exit with the respective error when running sauce tests<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/73721f12072d77263e72a3b27cd5cf9409db9f8b) Moved checks whether a test case is applicable to parent case<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/3fcd88c3f9b1a084b06cab2d5881cb5bb895869d) Added eventemitter tests and updated micromodule dependencies (so far)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/2db4305ca67d003d57aa14eb23f25eb6c3672034) Added lib/path tests and updated a few dependencies<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/2b12fb7db9d4eaa3b76b7198539946e97db684c4) Moved micro modules to lib so they can have their own tests etc.<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/b6dfa9f0a4c899b5c217d60d1c2bb835e06b2122) Updated travis<br />
# [6.6.1](https://github.com/dcodeIO/protobuf.js/releases/tag/6.6.1)
## Fixed
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/039ac77b062ee6ebf4ec84a5e6c6ece221e63401) Properly set up reflection when using light build<br />
# [6.6.0](https://github.com/dcodeIO/protobuf.js/releases/tag/6.6.0) ([release](https://github.com/dcodeIO/protobuf.js/releases/tag/6.6.0))
## Breaking
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/cdfe6bfba27fa1a1d0e61887597ad4bb16d7e5ed) Inlined / refactored away .testJSON, see [#653](https://github.com/dcodeIO/protobuf.js/issues/653)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/6a483a529ef9345ed217a23394a136db0d9f7771) Refactored util.extend away<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/27b16351f3286468e539c2ab382de4b52667cf5e) Reflected and statically generated services use common utility, now work exactly the same<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/dca26badfb843a597f81e98738e2fda3f66c7341) fromObject now throws for entirely bogus values (repeated, map and inner message fields), fixes [#601](https://github.com/dcodeIO/protobuf.js/issues/601)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/4bff9c356ef5c10b4aa34d1921a3b513e03dbb3d) Cleaned up library distributions, now is full / light / minimal with proper browserify support for each<br />
## Fixed
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/301f7762ef724229cd1df51e496eed8cfd2f10eb) Do not randomly remove slashes from comments, fixes [#656](https://github.com/dcodeIO/protobuf.js/issues/656)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/ef7be352baaec26bdcdce01a71fbee47bbdeec15) Properly parse nested textformat options, also tackles [#655](https://github.com/dcodeIO/protobuf.js/issues/655)<br />
## New
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/b4f4f48f1949876ae92808b0a5ca5f2b29cc011c) Relieved the requirement to call .resolveAll() on roots in order to populate static code-compatible properties, see [#653](https://github.com/dcodeIO/protobuf.js/issues/653)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/56c8ec4196d461383c3e1f271da02553d877ae81) Added a (highly experimental) debug build as a starting point for [#653](https://github.com/dcodeIO/protobuf.js/issues/653)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/c5d291f9bab045385c5938ba0f6cdf50a315461f) Full build depends on light build depends on minimal build, shares all relevant code<br />
## CLI
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/735da4315a98a6960f3b5089115e308548b91c07) Also reuse specified root in pbjs for JSON modules, see [#653](https://github.com/dcodeIO/protobuf.js/issues/653)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/3a056244d3acf339722d56549469a8df018e682e) Reuse specified root name in pbjs to be able to split definitions over multiple files more easily, see [#653](https://github.com/dcodeIO/protobuf.js/issues/653)<br />
## Docs
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/28ddf756ab83cc890761ef2bd84a0788d2ad040d) Improved pbjs/pbts examples, better covers reflection with definitions for static modules<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/6f0b44aea6cf72d23042810f05a7cede85239eb3) Fixed centered formatting on npm<br />
## Other
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/dd96dcdacb8eae94942f7016b8dc37a2569fe420) Various other minor improvements / assertions refactored away, see [#653](https://github.com/dcodeIO/protobuf.js/issues/653)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/3317a76fb56b9b31bb07ad672d6bdda94b79b6c3) Fixed some common reflection deopt sites, see [#653](https://github.com/dcodeIO/protobuf.js/issues/653)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/6a483a529ef9345ed217a23394a136db0d9f7771) Reflection performance pass, see [#653](https://github.com/dcodeIO/protobuf.js/issues/653)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/6a483a529ef9345ed217a23394a136db0d9f7771) Added TS definitions to alternative builds' index files<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/6a483a529ef9345ed217a23394a136db0d9f7771) Removed unnecessary prototype aliases, improves gzip ratio<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/641625fd64aca55b1163845e6787b58054ac36ec) Unified behaviour of and docs on Class constructor / Class.create<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/7299929b37267af2100237d4f8b4ed8610b9f7e1) Statically generated services actually inherit from rpc.Service<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/f4cf75e4e4192910b52dd5864a32ee138bd4e508) Do not try to run sauce tests for PRs<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/33da148e2b750ce06591c1c66ce4c46ccecc3c8f) Added utility to enable/disable debugging extensions to experimental debug build<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/fdb1a729ae5f8ab762c51699bc4bb721102ef0c8) Fixed node 0.12 tests<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/6bc5bb4a7649d6b91a5944a9ae20178d004c8856) Fixed coverage<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/6f0b44aea6cf72d23042810f05a7cede85239eb3) Added a test case for [#652](https://github.com/dcodeIO/protobuf.js/issues/652)<br />
# [6.5.3](https://github.com/dcodeIO/protobuf.js/releases/tag/6.5.3)
## Other
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/799d0303bf289bb720f2b27af59e44c3197f3fb7) In fromObject, check if object is already a runtime message, see [#652](https://github.com/dcodeIO/protobuf.js/issues/652)<br />
# [6.5.2](https://github.com/dcodeIO/protobuf.js/releases/tag/6.5.2)
## Docs
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/8cff92fe3b7ddb1930371edb4937cd0db9216e52) Added coverage reporting<br />
## CLI
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/cbaaae99b4e39a859664df0e6d20f0491169f489) Added version scheme warning to everything CLI so that we don't need this overly explicit in README<br />
## Other
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/6877b3399f1a4c33568221bffb4e298b01b14439) Coverage progress, 100%<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/711a9eb55cb796ec1e51af7d56ef2ebbd5903063) Coverage progress<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/e7526283ee4dd82231235afefbfad6af54ba8970) Attempted to fix badges once and for all<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/5aa296c901c2b460ee3be4530ede394e2a45e0ea) Coverage progress<br />
# [6.5.1](https://github.com/dcodeIO/protobuf.js/releases/tag/6.5.1)
## CLI
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/9719fd2fa8fd97899c54712a238091e8fd1c57b2) Reuse module paths when looking up cli dependencies, see [#648](https://github.com/dcodeIO/protobuf.js/issues/648)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/6302655d1304cf662f556be5d9fe7a016fcedc3c) Check actual module directories to determine if cli dependencies are present and bootstrap semver, see [#648](https://github.com/dcodeIO/protobuf.js/issues/648)<br />
## Docs
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/dfc7c4323bf98fb26ddcfcfbb6896a6d6e8450a4) Added a note on semver-incompatibility, see [#649](https://github.com/dcodeIO/protobuf.js/issues/649)<br />
## Other
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/49053ffa0ea8a4ba5ae048706dba1ab6f3bc803b) Coverage progress<br />
# [6.5.0](https://github.com/dcodeIO/protobuf.js/releases/tag/6.5.0) ([release](https://github.com/dcodeIO/protobuf.js/releases/tag/6.5.0))
## Breaking
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/3946e0fefea415f52a16ea7a74109ff40eee9643) Initial upgrade of converters to real generated functions, see [#620](https://github.com/dcodeIO/protobuf.js/issues/620)<br />
## Fixed
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/08cda241a3e095f3123f8a991bfd80aa3eae9400) An enum's default value present as a string looks up using typeDefault, not defaultValue which is an array if repeated<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/c7e14b1d684aaba2080195cc83900288c5019bbc) Use common utility for virtual oneof getters and setters in both reflection and static code, see [#644](https://github.com/dcodeIO/protobuf.js/issues/644)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/508984b7ff9529906be282375d36fdbada66b8e6) Properly use Type.toObject/Message.toObject within converters, see [#641](https://github.com/dcodeIO/protobuf.js/issues/641)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/5bca18f2d32e8687986e23edade7c2aeb6b6bac1) Generate null/undefined assertion in fromObject if actually NOT an enum, see [#620](https://github.com/dcodeIO/protobuf.js/issues/620)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/508984b7ff9529906be282375d36fdbada66b8e6) Replace ALL occurencies of types[%d].values in static code, see [#641](https://github.com/dcodeIO/protobuf.js/issues/641)<br />
## New
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/9b090bb1673aeb9b8f1d7162316fce4d7a3348f0) Switched to own property-aware encoders for compatibility, see [#639](https://github.com/dcodeIO/protobuf.js/issues/639)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/340d6aa82ac17c4a761c681fa71d5a0955032c8b) Now also parses comments, sets them on reflected objects and re-uses them when generating static code, see [#640](https://github.com/dcodeIO/protobuf.js/issues/640)<br />
## CLI
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/3cb82628159db4d2aa721b63619b16aadc5f1981) Further improved generated static code style<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/cda5c5452fa0797f1e4c375471aef96f844711f1) Removed scoping iifes from generated static code<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/def7b45fb9b5e01028cfa3bf2ecd8272575feb4d) Removed even more clutter from generated static code<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/dbd19fd9d3a57d033aad1d7173f7f66db8f8db3e) Removed various clutter from generated static code<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/1cc8a2460c7e161c9bc58fa441ec88e752df409c) Made sure that static target's replacement regexes don't match fields<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/d4272dbf5d0b2577af8efb74a94d246e2e0d728e) Also accept (trailing) triple-slash comments for compatibility with protoc-gen-doc, see [#640](https://github.com/dcodeIO/protobuf.js/issues/640)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/0a3862b75fa60ef732e0cd36d623f025acc2fb45) Use semver to validate that CLI dependencies actually satisfy the required version, see [#637](https://github.com/dcodeIO/protobuf.js/issues/637)<br />
## Docs
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/9e360ea6a74d41307483e51f18769df7f5b047b9) Added a hint on documenting .proto files for static code<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/d2a97bb818474645cf7ce1832952b2c3c739b234) Documented internally used codegen partials for what it's worth<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/079388ca65dfd581d74188a6ae49cfa01b103809) Updated converter documentation<br />
## Other
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/168e448dba723d98be05c55dd24769dfe3f43d35) Bundler provides useful stuff to uglify and a global var without extra bloat<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/32e0529387ef97182ad0b9ae135fd8b883ed66b4) Cleaned and categorized tests, coverage progress<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/3325e86930a3cb70358c689cb3016c1be991628f) Properly removed builtins from bundle<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/2c94b641fc5700c8781ac0b9fe796debac8d6893) Call hasOwnProperty builtin as late as possible decreasing the probability of having to call it at all (perf)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/818bcacde267be70a75e689f480a3caad6f80cf7) Slightly hardened codegen sprintf<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/818bcacde267be70a75e689f480a3caad6f80cf7) Significantly improved uint32 write performance<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/b5daa272407cb31945fd38c34bbef7c9edd1db1c) Cleaned up test case data and removed unused files<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/c280a4a18c6d81c3468177b2ea58ae3bc4f25e73) Removed now useless trailing comment checks, see [#640](https://github.com/dcodeIO/protobuf.js/issues/640)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/44167db494c49d9e4b561a66ad9ce2d8ed865a21) Ensured that pbjs' beautify does not break regular expressions in generated verify functions<br />
# [6.4.6](https://github.com/dcodeIO/protobuf.js/releases/tag/6.4.6)
## Fixed
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/e11012ce047e8b231ba7d8cc896b8e3a88bcb902) Case-sensitively test for legacy group definitions, fixes [#638](https://github.com/dcodeIO/protobuf.js/issues/638)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/7e57f4cdd284f886b936511b213a6468e4ddcdce) Properly parse text format options + simple test case, fixes [#636](https://github.com/dcodeIO/protobuf.js/issues/636)<br />
## Docs
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/fe4d97bbc4d33ce94352dde62ddcd44ead02d7ad) Added SVG logo, see [#629](https://github.com/dcodeIO/protobuf.js/issues/629)<br />
## Other
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/57990f7ed8ad5c512c28ad040908cee23bbf2aa8) Also refactored Service and Type to inherit from NamespaceBase, see [#635](https://github.com/dcodeIO/protobuf.js/issues/635)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/fe4d97bbc4d33ce94352dde62ddcd44ead02d7ad) Moved TS-compatible Namespace features to a virtual NamespaceBase class, compiles with strictNullChecks by default now, see [#635](https://github.com/dcodeIO/protobuf.js/issues/635)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/fe4d97bbc4d33ce94352dde62ddcd44ead02d7ad) Minor codegen enhancements<br />
# [6.4.5](https://github.com/dcodeIO/protobuf.js/releases/tag/6.4.5)
## Fixed
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/1154ce0867306e810cf62a5b41bdb0b765aa8ff3) Properly handle empty/noop Writer#ldelim, fixes [#625](https://github.com/dcodeIO/protobuf.js/issues/625)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/f303049f92c53970619375653be46fbb4e3b7d78) Properly annotate map fields in pbjs, fixes [#624](https://github.com/dcodeIO/protobuf.js/issues/624)<br />
## New
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/4b786282a906387e071a5a28e4842a46df588c7d) Made sure that Writer#bytes is always able to handle plain arrays<br />
## Other
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/1e6a8d10f291a16631376dd85d5dd385937e6a55) Slightly restructured utility to better support static code default values<br />
# [6.4.4](https://github.com/dcodeIO/protobuf.js/releases/tag/6.4.4)
## Fixed
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/26d68e36e438b590589e5beaec418c63b8f939cf) Dynamically resolve jsdoc when running pbts, fixes [#622](https://github.com/dcodeIO/protobuf.js/issues/622)<br />
## Docs
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/69c04d7d374e70337352cec9b445301cd7fe60d6) Explain 6.4.2 vs 6.4.3 in changelog<br />
# [6.4.3](https://github.com/dcodeIO/protobuf.js/releases/tag/6.4.4)
## Fixed
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/c2c39fc7cec5634ecd1fbaebbe199bf097269097) Fixed invalid definition of Field#packed property, also introduced decoder.compat mode (packed fields, on by default)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/11fb1a66ae31af675d0d9ce0240cd8e920ae75e7) Always decode packed/non-packed based on wire format only, see [#602](https://github.com/dcodeIO/protobuf.js/issues/602)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/c9a61e574f5a2b06f6b15b14c0c0ff56f8381d1f) Use full library for JSON modules and runtime dependency for static modules, fixes [#621](https://github.com/dcodeIO/protobuf.js/issues/621)<br />
## CLI
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/e88d13ca7ee971451b57d056f747215f37dfd3d7) Additional workarounds for on demand CLI dependencies, see [#618](https://github.com/dcodeIO/protobuf.js/issues/618)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/44f6357557ab3d881310024342bcc1e0d336a20c) Revised automatic setup of cli dependencies, see [#618](https://github.com/dcodeIO/protobuf.js/issues/618)<br />
## Other
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/e027a3c7855368837e477ce074ac65f191bf774a) Removed Android 4.0 test (no longer supported by sauce)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/8ba3c5efd182bc80fc36f9d5fe5e2b615b358236) Removed some unused utility, slightly more efficient codegen, additional comments<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/f22a34a071753bca416732ec4d01892263f543fb) Updated tests for new package.json layout<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/f22a34a071753bca416732ec4d01892263f543fb) Added break/continue label support to codegen<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/f2ffa0731aea7c431c59e452e0f74247d815a352) Updated dependencies, rebuilt dist files and changed logo to use an absolute url<br />
6.4.2 had been accidentally published as 6.4.3.
# [6.4.1](https://github.com/dcodeIO/protobuf.js/releases/tag/6.4.1)
## Fixed
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/9035d4872e32d6402c8e4d8c915d4f24d5192ea9) Added more default value checks to converter, fixes [#616](https://github.com/dcodeIO/protobuf.js/issues/616)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/62eef58aa3b002115ebded0fa58acc770cd4e4f4) Respect long defaults in converters<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/e3170a160079a3a7a99997a2661cdf654cb69e24) Convert inner messages and undefined/null values more thoroughly, fixes [#615](https://github.com/dcodeIO/protobuf.js/issues/615)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/b52089efcb9827537012bebe83d1a15738e214f4) Always use first defined enum value as field default, fixes [#613](https://github.com/dcodeIO/protobuf.js/issues/613)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/64f95f9fa1bbe42717d261aeec5c16d1a7aedcfb) Install correct 'tmp' dependency when running pbts without dev dependencies installed, fixes [#612](https://github.com/dcodeIO/protobuf.js/issues/612)<br />
## New
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/cba46c389ed56737184e5bc2bcce07243d52e5ce) Generate named constructors for runtime messages, see [#588](https://github.com/dcodeIO/protobuf.js/issues/588)<br />
## CLI
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/ee20b81f9451c56dc106177bbf9758840b99d0f8) pbjs/pbts no longer generate any volatile headers, see [#614](https://github.com/dcodeIO/protobuf.js/issues/614)<br />
## Docs
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/ec9d517d0b87ebe489f02097c2fc8005fae38904) Attempted to make broken shields less annoying<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/5cd4c2f2a94bc3c0f2c580040bce28dd42eaccec) Updated README<br />
## Other
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/0643f93f5c0d96ed0ece5b47f54993ac3a827f1b) Some cleanup and added a logo<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/169638382de9efe35a1079c5f2045c33b858059a) use $protobuf.Long<br />
# [6.4.0](https://github.com/dcodeIO/protobuf.js/releases/tag/6.4.0) ([release](https://github.com/dcodeIO/protobuf.js/releases/tag/6.4.0))
## Breaking
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/a017bf8a2dbdb7f9e7ce4c026bb6845174feb3b1) Dropped IE8 support<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/39bc1031bb502f8b677b3736dd283736ea4d92c1) Removed now unused util.longNeq which was used by early static code<br />
## Fixed
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/5915ff972482e7db2a73629244ab8a93685b2e55) Do not swallow errors in loadSync, also accept negative enum values in Enum#add, fixes [#609](https://github.com/dcodeIO/protobuf.js/issues/609)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/fde56c0de69b480343931264a01a1ead1e3156ec) Improved bytes field support, also fixes [#606](https://github.com/dcodeIO/protobuf.js/issues/606)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/0c03f327115d57c4cd5eea3a9a1fad672ed6bd44) Fall back to browser Reader when passing an Uint8Array under node, fixes [#605](https://github.com/dcodeIO/protobuf.js/issues/605)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/7eb3d456370d7d66b0856e32b2d2602abf598516) Respect optional properties when writing interfaces in tsd-jsdoc, fixes [#598](https://github.com/dcodeIO/protobuf.js/issues/598)<br />
## New
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/bcadffecb3a8b98fbbd34b45bae0e6af58f9c810) Instead of protobuf.parse.keepCase, fall back to protobuf.parse.defaults holding all possible defaults, see [#608](https://github.com/dcodeIO/protobuf.js/issues/608)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/a4d6a2af0d57a2e0cccf31e3462c8b2465239f8b) Added global ParseOptions#keepCase fallback as protobuf.parse.keepCase, see [#608](https://github.com/dcodeIO/protobuf.js/issues/608)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/a017bf8a2dbdb7f9e7ce4c026bb6845174feb3b1) Converters use code generation and support custom implementations<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/28ce07d9812f5e1743afef95a94532d2c9488a84) Be more verbose when throwing invalid wire type errors, see [#602](https://github.com/dcodeIO/protobuf.js/issues/602)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/40074bb69c3ca4fcefe09d4cfe01f3a86844a7e8) Added an asJSON-option to always populate array fields, even if defaults=false, see [#597](https://github.com/dcodeIO/protobuf.js/issues/597)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/a7d23240a278aac0bf01767b6096d692c09ae1ce) Attempt to improve TypeScript support by using explicit exports<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/cec253fb9a177ac810ec96f4f87186506091fa37) Copy-pasted typescript definitions to micro modules, see [#599](https://github.com/dcodeIO/protobuf.js/issues/599)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/1f18453c7bfcce65c258fa98a3e3d4577d2e550f) Emit an error on resolveAll() if any extension fields cannot be resolved, see [#595](https://github.com/dcodeIO/protobuf.js/issues/595) + test case<br />
## CLI
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/804739dbb75359b0034db0097fe82081e3870a53) Removed 'not recommend' label for --keep-case, see [#608](https://github.com/dcodeIO/protobuf.js/issues/608)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/9681854526f1813a6ef08becf130ef2fbc28b638) Added customizable linter configuration to pbjs<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/9681854526f1813a6ef08becf130ef2fbc28b638) Added stdin support to pbjs and pbts<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/407223b5ceca3304bc65cb48888abfdc917d5800) Static code no longer uses IE8 support utility<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/a017bf8a2dbdb7f9e7ce4c026bb6845174feb3b1) Generated static code now supports asJSON/from<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/3c775535517b8385a1d3c1bf056f3da3b4266f8c) Added support for TypeScript enums to pbts<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/0cda72a55a1f2567a5d981dc5d924e55b8070513) Added a few helpful comments to static code<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/24b293c297feff8bda5ee7a2f8f3f83d77c156d0) Slightly beautify statically generated code<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/65637ffce20099df97ffbcdce50faccc8e97c366) Do not wrap main definition as a module and export directly instead<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/65637ffce20099df97ffbcdce50faccc8e97c366) Generate prettier definitions with --no-comments<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/20d8a2dd93d3bbb6990594286f992e703fc4e334) Added variable arguments support to tsd-jsdoc<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/8493dbd9a923693e943f710918937d83ae3c4572) Reference dependency imports as a module to prevent name collisions, see [#596](https://github.com/dcodeIO/protobuf.js/issues/596)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/39a2ea361c50d7f4aaa0408a0d55bb13823b906c) Removed now unnecessary comment lines in generated static code<br />
## Docs
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/a4e41b55471d83a8bf265c6641c3c6e0eee82e31) Added notes on CSP-restricted environments to README, see [#593](https://github.com/dcodeIO/protobuf.js/issues/593)<br />
## Other
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/1a3effdad171ded0608e8da021ba8f9dd017f2ff) Added test case for asJSON with arrays=true, see [#597](https://github.com/dcodeIO/protobuf.js/issues/597)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/751a90f509b68a5f410d1f1844ccff2fc1fc056a) Added a tape adapter to assert message equality accross browsers<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/fde56c0de69b480343931264a01a1ead1e3156ec) Refactored some internal utility away<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/805291086f6212d1f108b3d8f36325cf1739c0bd) Reverted previous attempt on [#597](https://github.com/dcodeIO/protobuf.js/issues/597)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/c5160217ea95996375460c5403dfe37b913d392e) Minor tsd-jsdoc refactor<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/961dd03061fc2c43ab3bf22b3f9f5165504c1002) Removed unused sandbox files<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/f625eb8b0762f8f5d35bcd5fc445e52b92d8e77d) Updated package.json of micro modules to reference types, see [#599](https://github.com/dcodeIO/protobuf.js/issues/599)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/46ec8209b21cf9ff09ae8674e2a5bbc49fd4991b) Reference dependencies as imports in generated typescript definitions, see [#596](https://github.com/dcodeIO/protobuf.js/issues/596)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/3bab132b871798c7c50c60a4c14c2effdffa372e) Allow null values on optional long fields, see [#590](https://github.com/dcodeIO/protobuf.js/issues/590)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/31da56c177f1e11ffe0072ad5f58a55e3f8008fd) Various jsdoc improvements and a workaround for d.ts generation, see [#592](https://github.com/dcodeIO/protobuf.js/issues/592)<br />
# [6.3.1](https://github.com/dcodeIO/protobuf.js/releases/tag/6.3.1)
## Fixed
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/95ed6e9e8268711db24f44f0d7e58dd278ddac4c) Empty inner messages are always present on the wire + test case + removed now unused Writer#ldelim parameter, see [#585](https://github.com/dcodeIO/protobuf.js/issues/585)<br />
## CLI
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/e8a4d5373b1a00cc6eafa5b201b91d0e250cc00b) Expose tsd-jsdoc's comments option to pbts as --no-comments, see [#587](https://github.com/dcodeIO/protobuf.js/issues/587)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/6fe099259b5985d873ba5bec88c049d7491a11cc) Increase child process max buffer when running jsdoc from pbts, see [#587](https://github.com/dcodeIO/protobuf.js/issues/587)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/3d84ecdb4788d71b5d3928e74db78e8e54695f0a) pbjs now generates more convenient dot-notation property accessors<br />
## Other
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/1e0ebc064e4f2566cebf525d526d0b701447bd6a) And fixed IE8 again (should probably just drop IE8 for good)<br />
# [6.3.0](https://github.com/dcodeIO/protobuf.js/releases/tag/6.3.0)
## Breaking
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/a97956b1322b6ee62d4fc9af885658cd5855e521) Moved camelCase/underScore away from util to where actually used<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/c144e7386529b53235a4a5bdd8383bdb322f2825) Renamed asJSON option keys (enum to enums, long to longs) because enum is a reserved keyword<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/5b9ade428dca2df6a13277522f2916e22092a98b) Moved JSON/Message conversion to its own source file and added Message/Type.from + test case, see [#575](https://github.com/dcodeIO/protobuf.js/issues/575)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/0b0de2458a1ade1ccd4ceb789697be13290f856b) Relicensed the library and its components to BSD-3-Clause to match the official implementation (again)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/22a64c641d4897965035cc80e92667bd243f182f) Dropped support for browser buffer entirely (is an Uint8Array anyway), ensures performance and makes things simpler<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/22a64c641d4897965035cc80e92667bd243f182f) Removed dead parts of the Reader API<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/964f65a9dd94ae0a18b8be3d9a9c1b0b1fdf6424) Refactored BufferReader/Writer to their own files and removed unnecessary operations (node always has FloatXXArray and browser buffer uses ieee anyway)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/bfac0ea9afa3dbaf5caf79ddf0600c3c7772a538) Stripped out fallback encoder/decoder/verifier completely (even IE8 supports codegen), significantly reduces bundle size, can use static codegen elsewhere<br />
## Fixed
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/c3023a2f51fc74547f6c6e53cf75feed60f3a25c) Actually concatenate mixed custom options when parsing<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/0d66b839df0acec2aea0566d2c0bbcec46c3cd1d) Fixed a couple of issues with alternative browser builds<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/33706cdc201bc863774c4af6ac2c38ad96a276e6) Properly set long defaults on prototypes<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/0ea2740f0774b4c5c349b9c303f3fb2c2743c37b) Fixed reference error in minimal runtime, see [#580](https://github.com/dcodeIO/protobuf.js/issues/580)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/741b6d8fde84d9574676a729a29a428d99f0a0a0) Non-repeated empty messages are always present on the wire, see [#581](https://github.com/dcodeIO/protobuf.js/issues/581)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/7fac9d6a39bf42d316c1676082a2d0804bc55934) Properly check Buffer.prototype.set with node v4<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/3ad8108eab57e2b061ee6f1fddf964abe3f4cbc7) Prevent NRE and properly annotate verify signature in tsd-jsdoc, fixed [#572](https://github.com/dcodeIO/protobuf.js/issues/572)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/6c2415d599847cbdadc17dee3cdf369fc9facade) Fix directly using Buffer instead of util.Buffer<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/19e906c2a15acc6178b3bba6b19c2f021e681176) Added filter type to Namespace#lookup, fixes [#569](https://github.com/dcodeIO/protobuf.js/issues/569)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/9c9a66bf393d9d6927f35a9c18abf5d1c31db912) Fixed parsing enum inner options, see [#565](https://github.com/dcodeIO/protobuf.js/issues/565)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/ea7ba8b83890084d61012cb5386dc11dadfb3908) Fixed release links in README files<br />
## New
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/442471363f99e67fa97044f234a47b3c9b929dfa) Added a noparse build for completeness<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/bfee1cc3624d0fa21f9553c2f6ce2fcf7fcc09b7) Now compresses .gz files using zopfli to make them useful beyond being just a reference<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/aed134aa1cd7edd801de77c736cf5efe6fa61cb0) Updated non-bundled google types folder with missing descriptors and added wrappers to core<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/0b0de2458a1ade1ccd4ceb789697be13290f856b) Replaced the ieee754 implementation for old browsers with a faster, use-case specific one + simple test case<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/99ad9cc08721b834a197d4bbb67fa152d7ad79aa) Added .create to statically generated types and uppercase nested elements to reflection namespaces, see [#576](https://github.com/dcodeIO/protobuf.js/issues/576)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/99ad9cc08721b834a197d4bbb67fa152d7ad79aa) Also added Namespace#getEnum for completeness, see [#576](https://github.com/dcodeIO/protobuf.js/issues/576)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/ef43acff547c0cd84cfb7a892fe94504a586e491) Added Namespace#getEnum and changed #lookupEnum to the same behavior, see [#576](https://github.com/dcodeIO/protobuf.js/issues/576)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/1fcfdfe21c1b321d975a8a96d133a452c9a9c0d8) Added a heap of coverage comments for usually unused code paths to open things up<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/c234de7f0573ee30ed1ecb15aa82b74c0f994876) Added codegen test to determine if any ancient browsers don't actually support it<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/fed2000e7e461efdb1c3a1a1aeefa8b255a7c20b) Added legacy groups support to pbjs, see [#568](https://github.com/dcodeIO/protobuf.js/issues/568)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/974a1321da3614832aa0a5b2e7c923f66e4ba8ae) Initial support for legacy groups + test case, see [#568](https://github.com/dcodeIO/protobuf.js/issues/568)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/9c9a66bf393d9d6927f35a9c18abf5d1c31db912) Added asJSON bytes as Buffer, see [#566](https://github.com/dcodeIO/protobuf.js/issues/566)<br />
## CLI
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/c60cd397e902ae6851c017f2c298520b8336cbee) Annotated callback types in pbjs-generated services, see [#582](https://github.com/dcodeIO/protobuf.js/issues/582)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/3e7e4fc59e6d2d6c862410b4b427fbedccdb237b) Removed type/ns alias comment in static target to not confuse jsdoc unnecessarily<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/99ad9cc08721b834a197d4bbb67fa152d7ad79aa) Made pbjs use loadSync for deterministic outputs, see [#573](https://github.com/dcodeIO/protobuf.js/issues/573)<br />
## Docs
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/4d1f5facfcaaf5f2ab6a70b12443ff1b66e7b94e) Updated documentation on runtime and noparse builds<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/c59647a7542cbc4292248787e5f32bb99a9b8d46) Fixed an issue with the changelog generator skipping some commits<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/24f2c03af9f13f5404259866fdc8fed33bfaae25) Added notes on how to use pbjs and pbts programmatically<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/3544576116146b209246d71c7f7a9ed687950b26) Manually sorted old changelog entries<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/d5812571f335bae68f924aa1098519683a9f3e44) Initial changelog generator, see [#574](https://github.com/dcodeIO/protobuf.js/issues/574)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/ab3e236a967a032a98267a648f84d129fdb4d4a6) Added static/JSON module interchangeability to README<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/7939a4bd8baca5f7e07530fc93f27911a6d91c6f) Updated README and bundler according to dynamic require calls<br />
## Other
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/93e04f1db4a9ef3accff8d071c75be3d74c0cd4a) Added basic services test case<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/b5a068f5b79b6f00c4b05d9ac458878650ffa09a) Just polyfill Buffer.from / .allocUnsafe for good<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/4375a485789e14f7bf24bece819001154a03dca2) Added a test case to find out if all the fallbacks are just for IE8<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/deb2e82ed7eda41d065a09d120e91c0f7ecf1e6a) Commented out float assertions in float test including explanation<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/d3ebd5745b024033fbc2410ecad4d4e02abd67db) Expose array implementation used with (older) browsers on util for tests<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/b1b6a813c93da4c7459755186aa02ef2f3765c94) Updated test cases<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/99dc5faa7b39fdad8ebc102de4463f8deb7f48ff) Added assumptions to float test case<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/948ca2e3c5c62fedcd918d75539c261abf1a7347) Updated travis config to use C++11<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/c59647a7542cbc4292248787e5f32bb99a9b8d46) Updated / added additional LICENSE files where appropriate<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/333f0221814be976874862dc83d0b216e07d4012) Integrated changelog into build process, now also has 'npm run make' for everything, see [#574](https://github.com/dcodeIO/protobuf.js/issues/574)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/ab3e236a967a032a98267a648f84d129fdb4d4a6) Minor optimizations through providing type-hints<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/ab3e236a967a032a98267a648f84d129fdb4d4a6) Reverted shortened switch statements in verifier<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/ab3e236a967a032a98267a648f84d129fdb4d4a6) Enums can't be map key types<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/8ef6975b0bd372b79e9b638f43940424824e7176) Use custom require (now a micromodule) for all optional modules, see [#571](https://github.com/dcodeIO/protobuf.js/issues/571)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/e226f001e4e4633d64c52be4abc1915d7b7bd515) Support usage when size = 0<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/19e906c2a15acc6178b3bba6b19c2f021e681176) Reverted aliases frequently used in codegen for better gzip ratio<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/47b51ec95a540681cbed0bac1b2f02fc4cf0b73d) Shrinked bundle size - a bit<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/f8451f0058fdf7a1fac15ffc529e4e899c6b343c) Can finally run with --trace-deopt again without crashes<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/9c9a66bf393d9d6927f35a9c18abf5d1c31db912) Other minor optimizations<br />
# [6.2.1](https://github.com/dcodeIO/protobuf.js/releases/tag/6.2.1)
## New
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/1a6fdc9a11fb08506d09351f8e853384c2b8be25) Added ParseOptions to protobuf.parse and --keep-case for .proto sources to pbjs, see [#564](https://github.com/dcodeIO/protobuf.js/issues/564)<br />
## Other
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/fc383d0721d83f66b2d941f0d9361621839327e9) Better TypeScript definition support for @property-annotated objects<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/4531d75cddee9a99adcac814d52613116ba789f3) Can't just inline longNeq but can be simplified<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/8f25377cf99036794ba13b160a5060f312d1a7e7) Array abuse and varint optimization<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/90b201209a03e8022ada0ab9182f338fa0813651) Updated dependencies<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/f1110b0993ec86e0a4aee1735bd75b901952cb36) Other minor improvements to short ifs<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/c079c900e2d61c63d5508eafacbd00163d377482) Reader/Writer example<br />
# [6.2.0](https://github.com/dcodeIO/protobuf.js/releases/tag/6.2.0)
## Fixed
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/9b7b92a4c7f8caa460d687778dc0628a74cdde37) Fixed reserved names re, also ensure valid service method names, see [#559](https://github.com/dcodeIO/protobuf.js/issues/559)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/a83425049c9a78c5607bc35e8089e08ce78a741e) Fix d.ts whitespace on empty lines, added tsd-jsdoc LICENSE<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/5f9bede280aa998afb7898e8d2718b4a229e8e6f) Fix asJSON defaults option, make it work for repeated fields.<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/b0aef62191b65cbb305ece84a6652d76f98da259) Inlined any Reader/Writer#tag calls, also fixes [#556](https://github.com/dcodeIO/protobuf.js/issues/556)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/4d091d41caad9e63cd64003a08210b78878e01dd) Fix building default dist files with explicit runtime=false<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/096dfb686f88db38ed2d8111ed7aac36f8ba658a) Apply asJSON recursively<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/19c269f1dce1b35fa190f264896d0865a54a4fff) Ensure working reflection class names with minified builds<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/9c769504e0ffa6cbe0b6f8cdc14f1231bed7ee34) Lazily resolve (some) cyclic dependencies, see [#560](https://github.com/dcodeIO/protobuf.js/issues/560)<br />
## New
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/da07d8bbbede4175cc45ca46d883210c1082e295) Added protobuf.roots to minimal runtime, see [#554](https://github.com/dcodeIO/protobuf.js/issues/554)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/8f407a18607334185afcc85ee98dc1478322bd01) Repo now includes a restructured version of tsd-jsdoc with our changes incorporated for issues/prs, see [#550](https://github.com/dcodeIO/protobuf.js/issues/550)<br />
## CLI
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/1b5e4250415c6169eadb405561242f847d75044b) Updated pbjs arguments<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/4750e3111b9fdb107d0fc811e99904fbcdbb6de1) Pipe tsd-jsdoc output (requires dcodeIO/tsd-jsdoc/master) and respect cwd, see [#550](https://github.com/dcodeIO/protobuf.js/issues/550)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/75f4b6cb6325a3fc7cd8fed3de5dbe0b6b29c748) tsd-jsdoc progress<br />
## Docs
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/766171e4c8b6650ea9c6bc3e76c9c96973c2f546) README<br />
## Other
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/c33835cb1fe1872d823e94b0fff024dc624323e8) Added GH issue template<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/6f9ffb6307476d48f45dc4f936744b82982d386b) Path micromodule, dependencies<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/0b9b1d8505743995c5328daab1f1e124debc63bd) Test case for [#556](https://github.com/dcodeIO/protobuf.js/issues/556)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/74b2c5c5d33a46c3751ebeadc9d934d4ccb8286c) Raw alloc benchmark<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/fb74223b7273530d8baa53437ee96c65a387436d) Other minor optimizations<br />
# [6.1.1](https://github.com/dcodeIO/protobuf.js/releases/tag/6.1.1)
## Fixed
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/baea920fa6bf5746e0a7888cdbb089cd5d94fc90) Properly encode/decode map kv pairs as repeated messages (codegen and fallback), see [#547](https://github.com/dcodeIO/protobuf.js/issues/547)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/28a1d26f28daf855c949614ef485237c6bf316e5) Make genVerifyKey actually generate conditions for 32bit values and bool, fixes [#546](https://github.com/dcodeIO/protobuf.js/issues/546)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/3e9d8ea9a5cbb2e029b5c892714edd6926d2e5a7) Fix to generation of verify methods for bytes<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/e7893675ccdf18f0fdaea8f9a054a6b5402b060e) Take special care of oneofs when encoding (i.e. when explicitly set to defaults), see [#542](https://github.com/dcodeIO/protobuf.js/issues/542)<br />
## New
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/52cd8b5a891ec8e11611127c8cfa6b3a91ff78e3) Added Message#asJSON option for bytes conversion<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/01365ba9116ca1649b682635bb29814657c4133c) Added Namespace#lookupType and Namespace#lookupService (throw instead of returning null), see [#544](https://github.com/dcodeIO/protobuf.js/issues/544)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/a54fbc918ef6bd627113f05049ff704e07bf33b4) Provide prebuilt browser versions of the static runtime<br />
## CLI
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/3783af7ca9187a1d9b1bb278ca69e0188c7e4c66) Initial pbts CLI for generating TypeScript definitions, see [#550](https://github.com/dcodeIO/protobuf.js/issues/550)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/b8bce03405196b1779727f246229fd9217b4303d) Refactored json/static-module targets to use common wrappers<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/691231fbc453a243f48a97bfb86794ab5718ef49) Refactor cli to support multiple built-in wrappers, added named roots instead of always using global.root and added additionally necessary eslint comments, see [#540](https://github.com/dcodeIO/protobuf.js/issues/540)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/e3e77d0c7dc973d3a5948a49d123bdaf8a048030) Annotate namespaces generated by static target, see [#550](https://github.com/dcodeIO/protobuf.js/issues/550)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/aff21a71e6bd949647b1b7721ea4e1fe16bcd933) static target: Basic support for oneof fields, see [#542](https://github.com/dcodeIO/protobuf.js/issues/542)<br />
## Docs
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/b6b00aa7b0cd35e0e8f3c16b322788e9942668d4) Fix to reflection documentation<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/ed86f3acbeb6145be5f24dcd05efb287b539e61b) README on minimal runtime / available downloads<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/d31590b82d8bafe6657bf877d403f01a034ab4ba) Notes on descriptors vs static modules<br />
## Other
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/ce41d0ef21cee2d918bdc5c3b542d3b7638b6ead) A lot of minor optimizations to performance and gzip ratio<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/ecbb4a52fbab445e63bf23b91539e853efaefa47) Minimized base64 tables<br />
# [6.1.0](https://github.com/dcodeIO/protobuf.js/releases/tag/6.1.0)
## Breaking
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/a46cc4934b7e888ae80e06fd7fdf91e5bc7f54f5) Removed as-function overload for Reader/Writer, profiler stub, optimized version of Reader#int32<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/7983ee0ba15dc5c1daad82a067616865051848c9) Refactored Prototype and inherits away, is now Class and Message for more intuitive documentation and type refs<br />
## Fixed
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/c3c70fe3a47fd4f7c85dc80e1af7d9403fe349cd) Fixed failing test case on node < 6<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/66be5983321dd06460382d045eb87ed72a186776) Fixed serialization order of sfixed64, fixes [#536](https://github.com/dcodeIO/protobuf.js/issues/536)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/7def340833f9f1cc41f4835bd0d62e203b54d9eb) Fixed serialization order of fixed64, fallback to parseInt with no long lib, see [#534](https://github.com/dcodeIO/protobuf.js/issues/534)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/98a58d40ca7ee7afb1f76c5804e82619104644f6) Actually allow undefined as service method type, fixes [#528](https://github.com/dcodeIO/protobuf.js/issues/528)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/38d867fc50a4d7eb1ca07525c9e4c71b8782443e) Do not skip optional delimiter after aggregate options, fixes [#520](https://github.com/dcodeIO/protobuf.js/issues/520)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/67449db7c7416cbc59ad230c168cf6e6b6dba0c5) Verify empty base64 encoded strings for bytes fields, see [#535](https://github.com/dcodeIO/protobuf.js/issues/535)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/ef0fcb6d525c5aab13a39b4f393adf03f751c8c9) wrong spell role should be rule<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/55db92e21a26c04f524aeecb2316968c000e744d) decodeDelimited always forks if writer is specified, see [#531](https://github.com/dcodeIO/protobuf.js/issues/531)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/ebae1e18152617f11ac07827828f5740d4f2eb7e) Mimic spec-compliant behaviour in oneof getVirtual, see [#523](https://github.com/dcodeIO/protobuf.js/issues/523)<br />
## New
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/a0398f5880c434ff88fd8d420ba07cc29c5d39d3) Initial base64 string support for bytes fields, see [#535](https://github.com/dcodeIO/protobuf.js/issues/535)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/6a6c00c3e1def5d35c7fcaa1bbb6ce4e0fe67544) Initial type-checking verifier, see [#526](https://github.com/dcodeIO/protobuf.js/issues/526), added to bench out of competition<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/3aa984e063cd73e4687102b4abd8adc16582dbc4) Initial loadSync (node only), see [#529](https://github.com/dcodeIO/protobuf.js/issues/529)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/f1370ff5b0db2ebb73b975a3d7c7bd5b901cbfac) Initial RPC service implementaion, see [#529](https://github.com/dcodeIO/protobuf.js/issues/529)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/090d8eaf10704a811a73e1becd52f2307cbcad48) added 'defaults' option to Prototype#asJSON, see [#521](https://github.com/dcodeIO/protobuf.js/issues/521)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/7c28483d65cde148e61fe9993f1716960b39e049) Use Uint8Array pool in browsers, just like node does with buffers<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/4157a0ec2e54c4d19794cb16edddcd8d4fbd3e76) Also validate map fields, see [#526](https://github.com/dcodeIO/protobuf.js/issues/526) (this really needs some tests)<br />
## CLI
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/0ce099bf4f4666fd00403a2839e6da628b8328a9) Added json-module target to pbjs, renamed static to static-module, see [#522](https://github.com/dcodeIO/protobuf.js/issues/522)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/1d99442fe65fcaa2f9e33cc0186ef1336057e0cf) updated internals and static target to use immutable objects on prototypes<br />
## Docs
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/e6eaa91b9fe021b3356d4d7e42033a877bc45871) Added a couple of alternative signatures, protobuf.load returns promise or undefined, aliased Reader/Writer-as-function signature with Reader/Writer.create for typed dialects, see [#518](https://github.com/dcodeIO/protobuf.js/issues/518)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/9df6a3d4a654c3e122f97d9a594574c7bbb412da) Added variations for Root#load, see [#527](https://github.com/dcodeIO/protobuf.js/issues/527)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/193e65c006a8df8e9b72e0f23ace14a94952ee36) Added benchmark and profile related information to README<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/228a2027de35238feb867cb0485c78c755c4d17d) Added service example to README, see [#529](https://github.com/dcodeIO/protobuf.js/issues/529)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/1a8c720714bf867f1f0195b4690faefa4f65e66a) README on tests<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/014fb668dcf853874c67e3e0aeb7b488a149d35c) Update README/dist to reflect recent changes<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/11d844c010c5a22eff9d5824714fb67feca77b26) Minimal documentation for micromodules<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/47608dd8595b0df2b30dd18fef4b8207f73ed56a) Document all the callbacks, see [#527](https://github.com/dcodeIO/protobuf.js/issues/527)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/3891ab07bbe20cf84701605aa62453a6dbdb6af2) Documented streaming-rpc example a bit<br />
## Other
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/5606cb1bc41bc90cb069de676650729186b38640) Removed the need for triple-slash references in .d.ts by providing a minimal Long interface, see [#527](https://github.com/dcodeIO/protobuf.js/issues/527), see [#530](https://github.com/dcodeIO/protobuf.js/issues/530)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/adf3cc3d340f8b2a596c892c64457b15e42a771b) Transition to micromodules<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/f3a9589b74af6a1bf175f2b1994badf703d7abc4) Refactored argument order of utf8 for plausibility<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/14c207ed6e05a61e756fa4192efb2fa219734dd6) Restructured reusable micromodules<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/b510ba258986271f07007aebc5dcfea7cfd90cf4) Can't use Uint8Array#set on node < 6 buffers<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/78952a50ceee8e196b4f156eb01f7f693b5b8aac) Test case for [#531](https://github.com/dcodeIO/protobuf.js/issues/531)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/954577c6b421f7d7f4905bcc32f57e4ebaf548da) Safer signaling for synchronous load, see [#529](https://github.com/dcodeIO/protobuf.js/issues/529)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/9ea3766ff1b8fb7ccad028f44efe27d3b019eeb7) Proper end of stream signaling to rpcImpl, see [#529](https://github.com/dcodeIO/protobuf.js/issues/529)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/e4faf7fac9b34d4776f3c15dfef8d2ae54104567) Moved event emitter to util, also accepts listener context, see [#529](https://github.com/dcodeIO/protobuf.js/issues/529)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/9bdec62793ce77c954774cc19106bde4132f24fc) Probably the worst form of hiding require programmatically, see [#527](https://github.com/dcodeIO/protobuf.js/issues/527)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/4462d8b05d3aba37c865cf53e09b3199cf051a92) Attempt to hide require('fs') from webpack, see [#527](https://github.com/dcodeIO/protobuf.js/issues/527)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/7c3bf8d32cbf831b251730b3876c35c901926300) Trying out jsdoc variations, see [#527](https://github.com/dcodeIO/protobuf.js/issues/527)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/bb4059467287fefda8f966de575fd0f8f9690bd3) by the way, why not include the json->proto functionality into "util"?<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/f1008e6ee53ee50358e19c10df8608e950be4be3) Update proto.js<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/fc9014822d9cdeae8c6e454ccb66ee28f579826c) Automatic profile generation and processing<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/2a2f6dcab5beaaa98e55a005b3d02643c45504d6) Generalized buffer pool and moved it to util<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/53a16bf3ada4a60cc09757712e0046f3f2d9d094) Make shields visible on npm, yey<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/9004b9d0c5135a7f6df208ea658258bf2f9e6fc9) More shields, I love shields, and maybe a workaround for travis timing out when sauce takes forever<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/060a7916a2715a9e4cd4d05d7c331bec33e60b7e) Trying SauceLabs with higher concurrency<br />
# [6.0.2](https://github.com/dcodeIO/protobuf.js/releases/tag/6.0.2)
## Fixed
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/23d664384900eb65e44910def45f04be996fbba1) Fix packable float/double see [#513](https://github.com/dcodeIO/protobuf.js/issues/513)<br />
## New
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/54283d39c4c955b6a84f7f53d4940eec39e4df5e) Handle oneofs in prototype ctor, add non-ES5 fallbacks, test case<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/0ae66752362899b8407918a759b09938e82436e1) Be nice to AMD, allow reconfiguration of Reader/Writer interface<br />
## CLI
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/00f3574ef4ee8b237600e41839bf0066719c4469) Initial static codegen target for reference<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/81e36a7c14d89b487dfe7cfb2f8380fcdf0df392) pbjs static target services support<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/4885b8239eb74c72e665787ea0ece3336e493d7f) pbjs static target progress, uses customizable wrapper template<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/ad5abe7bac7885ba4f68df7eeb800d2e3b81750b) Static pbjs target progress, now generates usable CommonJS code, see [#512](https://github.com/dcodeIO/protobuf.js/issues/512)<br />
## Docs
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/d9634d218849fb49ff5dfb4597bbb2c2d43bbf08) TypeScript example<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/fce8276193a5a9fabad5e5fbeb2ccd4f0f3294a9) Adjectives, notes on browserify<br />
## Other
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/23d664384900eb65e44910def45f04be996fbba1) Refactor runtime util into separate file, reader/writer uses runtime util<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/f91c432a498bebc0adecef1562061b392611f51a) Also optimize reader with what we have learned<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/d83f799519fe69808c88e83d9ad66c645d15e963) More (shameless) writer over-optimization<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/8a2dbc610a06fe3a1a2695a3ab032d073b77760d) Trading package size for float speed<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/95c5538cfaf1daf6b4990f6aa7599779aaacf99f) Skip defining getters and setters on IE8 entirely, automate defining fallbacks<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/09865d069303e795e475c82afe2b2267abaa59ea) Unified proto/reflection/classes/static encoding API to always return a writer<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/98d6ae186a48416e4ff3030987caed285f40a4f7) plain js utf8 is faster for short strings<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/79fbbf48b8e4dc9c41dcbdef2b73c5f2608b0318) improve TypeScript support. add simple test script.<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/96fa07adec8b0ae05e07c2c40383267f25f2fc92) Use long.js dependency in tests, reference types instead of paths in .d.ts see [#503](https://github.com/dcodeIO/protobuf.js/issues/503)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/5785dee15d07fbcd14025a96686707173bd649a0) Restructured encoder / decoder to better support static code gen<br />
# [6.0.1](https://github.com/dcodeIO/protobuf.js/releases/tag/6.0.1)
## Fixed
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/799c1c1a84b255d1831cc84c3d24e61b36fa2530) Add support for long strings, fixes [#509](https://github.com/dcodeIO/protobuf.js/issues/509)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/6e5fdb67cb34f90932e95a51370e1652acc55b4c) expose zero on LongBits, fixes [#508](https://github.com/dcodeIO/protobuf.js/issues/508)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/aa922c07490f185c5f97cf28ebbd65200fc5e377) Fixed issues with Root.fromJSON/#addJSON, search global for Long<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/51fe45656b530efbba6dad92f92db2300aa18761) Properly exclude browserify's annoying _process, again, fixes [#502](https://github.com/dcodeIO/protobuf.js/issues/502)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/3c16e462a28c36abbc8a176eab9ac2e10ba68597) Remember loaded files earlier to prevent race conditions, fixes [#501](https://github.com/dcodeIO/protobuf.js/issues/501)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/4012a00a0578185d92fb6e7d3babd059fee6d6ab) Allow negative enum ids even if super inefficient (encodes as 10 bytes), fixes [#499](https://github.com/dcodeIO/protobuf.js/issues/499), fixes [#500](https://github.com/dcodeIO/protobuf.js/issues/500)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/96dd8f1729ad72e29dbe08dd01bc0ba08446dbe6) set resolvedResponseType on resolve(), fixes [#497](https://github.com/dcodeIO/protobuf.js/issues/497)<br />
## New
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/d3ae961765e193ec11227d96d699463de346423f) Initial take on runtime services, see [#507](https://github.com/dcodeIO/protobuf.js/issues/507)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/90cd46b3576ddb2d0a6fc6ae55da512db4be3acc) Include dist/ in npm package for frontend use<br />
## CLI
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/4affa1b7c0544229fb5f0d3948df6d832f6feadb) pbjs proto target field options, language-level compliance with jspb test.proto<br />
## Docs
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/6a06e95222d741c47a51bcec85cd20317de7c0b0) always use Uint8Array in docs for tsd, see [#503](https://github.com/dcodeIO/protobuf.js/issues/503)<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/637698316e095fc35f62a304daaca22654974966) Notes on dist files<br />
## Other
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/29ff3f10e367d6a2ae15fb4254f4073541559c65) Update eslint env<br />
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/943be1749c7d37945c11d1ebffbed9112c528d9f) Browser field in package.json isn't required<br />

View File

@ -0,0 +1,39 @@
This license applies to all parts of protobuf.js except those files
either explicitly including or referencing a different license or
located in a directory containing a different LICENSE file.
---
Copyright (c) 2016, Daniel Wirtz All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name of its author, nor the names of its contributors
may be used to endorse or promote products derived from this software
without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
---
Code generated by the command line utilities is owned by the owner
of the input file used when generating it. This code is not
standalone and requires a support library to be linked with it. This
support library is itself covered by the above license.

View File

@ -0,0 +1,879 @@
<h1><p align="center"><img alt="protobuf.js" src="https://github.com/dcodeIO/protobuf.js/raw/master/pbjs.png" width="120" height="104" /></p></h1>
<p align="center"><a href="https://npmjs.org/package/protobufjs"><img src="https://img.shields.io/npm/v/protobufjs.svg" alt=""></a> <a href="https://travis-ci.org/dcodeIO/protobuf.js"><img src="https://travis-ci.org/dcodeIO/protobuf.js.svg?branch=master" alt=""></a> <a href="https://npmjs.org/package/protobufjs"><img src="https://img.shields.io/npm/dm/protobufjs.svg" alt=""></a> <a href="https://www.paypal.com/cgi-bin/webscr?cmd=_donations&business=dcode%40dcode.io&item_name=Open%20Source%20Software%20Donation&item_number=dcodeIO%2Fprotobuf.js"><img alt="donate ❤" src="https://img.shields.io/badge/donate-❤-ff2244.svg"></a></p>
**Protocol Buffers** are a language-neutral, platform-neutral, extensible way of serializing structured data for use in communications protocols, data storage, and more, originally designed at Google ([see](https://developers.google.com/protocol-buffers/)).
**protobuf.js** is a pure JavaScript implementation with [TypeScript](https://www.typescriptlang.org) support for [node.js](https://nodejs.org) and the browser. It's easy to use, blazingly fast and works out of the box with [.proto](https://developers.google.com/protocol-buffers/docs/proto) files!
Contents
--------
* [Installation](#installation)<br />
How to include protobuf.js in your project.
* [Usage](#usage)<br />
A brief introduction to using the toolset.
* [Valid Message](#valid-message)
* [Toolset](#toolset)<br />
* [Examples](#examples)<br />
A few examples to get you started.
* [Using .proto files](#using-proto-files)
* [Using JSON descriptors](#using-json-descriptors)
* [Using reflection only](#using-reflection-only)
* [Using custom classes](#using-custom-classes)
* [Using services](#using-services)
* [Usage with TypeScript](#usage-with-typescript)<br />
* [Command line](#command-line)<br />
How to use the command line utility.
* [pbjs for JavaScript](#pbjs-for-javascript)
* [pbts for TypeScript](#pbts-for-typescript)
* [Reflection vs. static code](#reflection-vs-static-code)
* [Command line API](#command-line-api)<br />
* [Additional documentation](#additional-documentation)<br />
A list of available documentation resources.
* [Performance](#performance)<br />
A few internals and a benchmark on performance.
* [Compatibility](#compatibility)<br />
Notes on compatibility regarding browsers and optional libraries.
* [Building](#building)<br />
How to build the library and its components yourself.
Installation
---------------
### node.js
```
$> npm install protobufjs [--save --save-prefix=~]
```
```js
var protobuf = require("protobufjs");
```
**Note** that this library's versioning scheme is not semver-compatible for historical reasons. For guaranteed backward compatibility, always depend on `~6.A.B` instead of `^6.A.B` (hence the `--save-prefix` above).
### Browsers
Development:
```
<script src="//cdn.rawgit.com/dcodeIO/protobuf.js/6.X.X/dist/protobuf.js"></script>
```
Production:
```
<script src="//cdn.rawgit.com/dcodeIO/protobuf.js/6.X.X/dist/protobuf.min.js"></script>
```
**Remember** to replace the version tag with the exact [release](https://github.com/dcodeIO/protobuf.js/tags) your project depends upon.
The library supports CommonJS and AMD loaders and also exports globally as `protobuf`.
### Distributions
Where bundle size is a factor, there are additional stripped-down versions of the [full library][dist-full] (~19kb gzipped) available that exclude certain functionality:
* When working with JSON descriptors (i.e. generated by [pbjs](#pbjs-for-javascript)) and/or reflection only, see the [light library][dist-light] (~16kb gzipped) that excludes the parser. CommonJS entry point is:
```js
var protobuf = require("protobufjs/light");
```
* When working with statically generated code only, see the [minimal library][dist-minimal] (~6.5kb gzipped) that also excludes reflection. CommonJS entry point is:
```js
var protobuf = require("protobufjs/minimal");
```
[dist-full]: https://github.com/dcodeIO/protobuf.js/tree/master/dist
[dist-light]: https://github.com/dcodeIO/protobuf.js/tree/master/dist/light
[dist-minimal]: https://github.com/dcodeIO/protobuf.js/tree/master/dist/minimal
Usage
-----
Because JavaScript is a dynamically typed language, protobuf.js introduces the concept of a **valid message** in order to provide the best possible [performance](#performance) (and, as a side product, proper typings):
### Valid message
> A valid message is an object (1) not missing any required fields and (2) exclusively composed of JS types understood by the wire format writer.
There are two possible types of valid messages and the encoder is able to work with both of these for convenience:
* **Message instances** (explicit instances of message classes with default values on their prototype) always (have to) satisfy the requirements of a valid message by design and
* **Plain JavaScript objects** that just so happen to be composed in a way satisfying the requirements of a valid message as well.
In a nutshell, the wire format writer understands the following types:
| Field type | Expected JS type (create, encode) | Conversion (fromObject)
|------------|-----------------------------------|------------------------
| s-/u-/int32<br />s-/fixed32 | `number` (32 bit integer) | <code>value &#124; 0</code> if signed<br />`value >>> 0` if unsigned
| s-/u-/int64<br />s-/fixed64 | `Long`-like (optimal)<br />`number` (53 bit integer) | `Long.fromValue(value)` with long.js<br />`parseInt(value, 10)` otherwise
| float<br />double | `number` | `Number(value)`
| bool | `boolean` | `Boolean(value)`
| string | `string` | `String(value)`
| bytes | `Uint8Array` (optimal)<br />`Buffer` (optimal under node)<br />`Array.<number>` (8 bit integers) | `base64.decode(value)` if a `string`<br />`Object` with non-zero `.length` is assumed to be buffer-like
| enum | `number` (32 bit integer) | Looks up the numeric id if a `string`
| message | Valid message | `Message.fromObject(value)`
* Explicit `undefined` and `null` are considered as not set if the field is optional.
* Repeated fields are `Array.<T>`.
* Map fields are `Object.<string,T>` with the key being the string representation of the respective value or an 8 characters long binary hash string for `Long`-likes.
* Types marked as *optimal* provide the best performance because no conversion step (i.e. number to low and high bits or base64 string to buffer) is required.
### Toolset
With that in mind and again for performance reasons, each message class provides a distinct set of methods with each method doing just one thing. This avoids unnecessary assertions / redundant operations where performance is a concern but also forces a user to perform verification (of plain JavaScript objects that *might* just so happen to be a valid message) explicitly where necessary - for example when dealing with user input.
**Note** that `Message` below refers to any message class.
* **Message.verify**(message: `Object`): `null|string`<br />
verifies that a **plain JavaScript object** satisfies the requirements of a valid message and thus can be encoded without issues. Instead of throwing, it returns the error message as a string, if any.
```js
var payload = "invalid (not an object)";
var err = AwesomeMessage.verify(payload);
if (err)
throw Error(err);
```
* **Message.encode**(message: `Message|Object` [, writer: `Writer`]): `Writer`<br />
encodes a **message instance** or valid **plain JavaScript object**. This method does not implicitly verify the message and it's up to the user to make sure that the payload is a valid message.
```js
var buffer = AwesomeMessage.encode(message).finish();
```
* **Message.encodeDelimited**(message: `Message|Object` [, writer: `Writer`]): `Writer`<br />
works like `Message.encode` but additionally prepends the length of the message as a varint.
* **Message.decode**(reader: `Reader|Uint8Array`): `Message`<br />
decodes a buffer to a **message instance**. If required fields are missing, it throws a `util.ProtocolError` with an `instance` property set to the so far decoded message. If the wire format is invalid, it throws an `Error`.
```js
try {
var decodedMessage = AwesomeMessage.decode(buffer);
} catch (e) {
if (e instanceof protobuf.util.ProtocolError) {
// e.instance holds the so far decoded message with missing required fields
} else {
// wire format is invalid
}
}
```
* **Message.decodeDelimited**(reader: `Reader|Uint8Array`): `Message`<br />
works like `Message.decode` but additionally reads the length of the message prepended as a varint.
* **Message.create**(properties: `Object`): `Message`<br />
creates a new **message instance** from a set of properties that satisfy the requirements of a valid message. Where applicable, it is recommended to prefer `Message.create` over `Message.fromObject` because it doesn't perform possibly redundant conversion.
```js
var message = AwesomeMessage.create({ awesomeField: "AwesomeString" });
```
* **Message.fromObject**(object: `Object`): `Message`<br />
converts any non-valid **plain JavaScript object** to a **message instance** using the conversion steps outlined within the table above.
```js
var message = AwesomeMessage.fromObject({ awesomeField: 42 });
// converts awesomeField to a string
```
* **Message.toObject**(message: `Message` [, options: `ConversionOptions`]): `Object`<br />
converts a **message instance** to an arbitrary **plain JavaScript object** for interoperability with other libraries or storage. The resulting plain JavaScript object *might* still satisfy the requirements of a valid message depending on the actual conversion options specified, but most of the time it does not.
```js
var object = AwesomeMessage.toObject(message, {
enums: String, // enums as string names
longs: String, // longs as strings (requires long.js)
bytes: String, // bytes as base64 encoded strings
defaults: true, // includes default values
arrays: true, // populates empty arrays (repeated fields) even if defaults=false
objects: true, // populates empty objects (map fields) even if defaults=false
oneofs: true // includes virtual oneof fields set to the present field's name
});
```
For reference, the following diagram aims to display relationships between the different methods and the concept of a valid message:
<p align="center"><img alt="Toolset Diagram" src="http://dcode.io/protobuf.js/toolset.svg" /></p>
> In other words: `verify` indicates that calling `create` or `encode` directly on the plain object will [result in a valid message respectively] succeed. `fromObject`, on the other hand, does conversion from a broader range of plain objects to create valid messages. ([ref](https://github.com/dcodeIO/protobuf.js/issues/748#issuecomment-291925749))
Examples
--------
### Using .proto files
It is possible to load existing .proto files using the full library, which parses and compiles the definitions to ready to use (reflection-based) message classes:
```protobuf
// awesome.proto
package awesomepackage;
syntax = "proto3";
message AwesomeMessage {
string awesome_field = 1; // becomes awesomeField
}
```
```js
protobuf.load("awesome.proto", function(err, root) {
if (err)
throw err;
// Obtain a message type
var AwesomeMessage = root.lookupType("awesomepackage.AwesomeMessage");
// Exemplary payload
var payload = { awesomeField: "AwesomeString" };
// Verify the payload if necessary (i.e. when possibly incomplete or invalid)
var errMsg = AwesomeMessage.verify(payload);
if (errMsg)
throw Error(errMsg);
// Create a new message
var message = AwesomeMessage.create(payload); // or use .fromObject if conversion is necessary
// Encode a message to an Uint8Array (browser) or Buffer (node)
var buffer = AwesomeMessage.encode(message).finish();
// ... do something with buffer
// Decode an Uint8Array (browser) or Buffer (node) to a message
var message = AwesomeMessage.decode(buffer);
// ... do something with message
// If the application uses length-delimited buffers, there is also encodeDelimited and decodeDelimited.
// Maybe convert the message back to a plain object
var object = AwesomeMessage.toObject(message, {
longs: String,
enums: String,
bytes: String,
// see ConversionOptions
});
});
```
Additionally, promise syntax can be used by omitting the callback, if preferred:
```js
protobuf.load("awesome.proto")
.then(function(root) {
...
});
```
### Using JSON descriptors
The library utilizes JSON descriptors that are equivalent to a .proto definition. For example, the following is identical to the .proto definition seen above:
```json
// awesome.json
{
"nested": {
"AwesomeMessage": {
"fields": {
"awesomeField": {
"type": "string",
"id": 1
}
}
}
}
}
```
JSON descriptors closely resemble the internal reflection structure:
| Type (T) | Extends | Type-specific properties
|--------------------|--------------------|-------------------------
| *ReflectionObject* | | options
| *Namespace* | *ReflectionObject* | nested
| Root | *Namespace* | **nested**
| Type | *Namespace* | **fields**
| Enum | *ReflectionObject* | **values**
| Field | *ReflectionObject* | rule, **type**, **id**
| MapField | Field | **keyType**
| OneOf | *ReflectionObject* | **oneof** (array of field names)
| Service | *Namespace* | **methods**
| Method | *ReflectionObject* | type, **requestType**, **responseType**, requestStream, responseStream
* **Bold properties** are required. *Italic types* are abstract.
* `T.fromJSON(name, json)` creates the respective reflection object from a JSON descriptor
* `T#toJSON()` creates a JSON descriptor from the respective reflection object (its name is used as the key within the parent)
Exclusively using JSON descriptors instead of .proto files enables the use of just the light library (the parser isn't required in this case).
A JSON descriptor can either be loaded the usual way:
```js
protobuf.load("awesome.json", function(err, root) {
if (err) throw err;
// Continue at "Obtain a message type" above
});
```
Or it can be loaded inline:
```js
var jsonDescriptor = require("./awesome.json"); // exemplary for node
var root = protobuf.Root.fromJSON(jsonDescriptor);
// Continue at "Obtain a message type" above
```
### Using reflection only
Both the full and the light library include full reflection support. One could, for example, define the .proto definitions seen in the examples above using just reflection:
```js
...
var Root = protobuf.Root,
Type = protobuf.Type,
Field = protobuf.Field;
var AwesomeMessage = new Type("AwesomeMessage").add(new Field("awesomeField", 1, "string"));
var root = new Root().define("awesomepackage").add(AwesomeMessage);
// Continue at "Create a new message" above
...
```
Detailed information on the reflection structure is available within the [API documentation](#additional-documentation).
### Using custom classes
Message classes can also be extended with custom functionality and it is also possible to register a custom constructor with a reflected message type:
```js
...
// Define a custom constructor
function AwesomeMessage(properties) {
// custom initialization code
...
}
// Register the custom constructor with its reflected type (*)
root.lookupType("awesomepackage.AwesomeMessage").ctor = AwesomeMessage;
// Define custom functionality
AwesomeMessage.customStaticMethod = function() { ... };
AwesomeMessage.prototype.customInstanceMethod = function() { ... };
// Continue at "Create a new message" above
```
(*) Besides referencing its reflected type through `AwesomeMessage.$type` and `AwesomeMesage#$type`, the respective custom class is automatically populated with:
* `AwesomeMessage.create`
* `AwesomeMessage.encode` and `AwesomeMessage.encodeDelimited`
* `AwesomeMessage.decode` and `AwesomeMessage.decodeDelimited`
* `AwesomeMessage.verify`
* `AwesomeMessage.fromObject`, `AwesomeMessage.toObject`, `AwesomeMessage#toObject` and `AwesomeMessage#toJSON`
Afterwards, decoded messages of this type are `instanceof AwesomeMessage`.
Alternatively, it is also possible to reuse and extend the internal constructor if custom initialization code is not required:
```js
...
// Reuse the internal constructor
var AwesomeMessage = root.lookupType("awesomepackage.AwesomeMessage").ctor;
// Define custom functionality
AwesomeMessage.customStaticMethod = function() { ... };
AwesomeMessage.prototype.customInstanceMethod = function() { ... };
// Continue at "Create a new message" above
```
### Using services
The library also supports consuming services but it doesn't make any assumptions about the actual transport channel. Instead, a user must provide a suitable RPC implementation, which is an asynchronous function that takes the reflected service method, the binary request and a node-style callback as its parameters:
```js
function rpcImpl(method, requestData, callback) {
// perform the request using an HTTP request or a WebSocket for example
var responseData = ...;
// and call the callback with the binary response afterwards:
callback(null, responseData);
}
```
Example:
```protobuf
// greeter.proto
syntax = "proto3";
service Greeter {
rpc SayHello (HelloRequest) returns (HelloReply) {}
}
message HelloRequest {
string name = 1;
}
message HelloReply {
string message = 1;
}
```
```js
...
var Greeter = root.lookup("Greeter");
var greeter = Greeter.create(/* see above */ rpcImpl, /* request delimited? */ false, /* response delimited? */ false);
greeter.sayHello({ name: 'you' }, function(err, response) {
console.log('Greeting:', response.message);
});
```
Services also support promises:
```js
greeter.sayHello({ name: 'you' })
.then(function(response) {
console.log('Greeting:', response.message);
});
```
There is also an [example for streaming RPC](https://github.com/dcodeIO/protobuf.js/blob/master/examples/streaming-rpc.js).
Note that the service API is meant for clients. Implementing a server-side endpoint pretty much always requires transport channel (i.e. http, websocket, etc.) specific code with the only common denominator being that it decodes and encodes messages.
### Usage with TypeScript
The library ships with its own [type definitions](https://github.com/dcodeIO/protobuf.js/blob/master/index.d.ts) and modern editors like [Visual Studio Code](https://code.visualstudio.com/) will automatically detect and use them for code completion.
The npm package depends on [@types/node](https://www.npmjs.com/package/@types/node) because of `Buffer` and [@types/long](https://www.npmjs.com/package/@types/long) because of `Long`. If you are not building for node and/or not using long.js, it should be safe to exclude them manually.
#### Using the JS API
The API shown above works pretty much the same with TypeScript. However, because everything is typed, accessing fields on instances of dynamically generated message classes requires either using bracket-notation (i.e. `message["awesomeField"]`) or explicit casts. Alternatively, it is possible to use a [typings file generated for its static counterpart](#pbts-for-typescript).
```ts
import { load } from "protobufjs"; // respectively "./node_modules/protobufjs"
load("awesome.proto", function(err, root) {
if (err)
throw err;
// example code
const AwesomeMessage = root.lookupType("awesomepackage.AwesomeMessage");
let message = AwesomeMessage.create({ awesomeField: "hello" });
console.log(`message = ${JSON.stringify(message)}`);
let buffer = AwesomeMessage.encode(message).finish();
console.log(`buffer = ${Array.prototype.toString.call(buffer)}`);
let decoded = AwesomeMessage.decode(buffer);
console.log(`decoded = ${JSON.stringify(decoded)}`);
});
```
#### Using generated static code
If you generated static code to `bundle.js` using the CLI and its type definitions to `bundle.d.ts`, then you can just do:
```ts
import { AwesomeMessage } from "./bundle.js";
// example code
let message = AwesomeMessage.create({ awesomeField: "hello" });
let buffer = AwesomeMessage.encode(message).finish();
let decoded = AwesomeMessage.decode(buffer);
```
#### Using decorators
The library also includes an early implementation of [decorators](https://www.typescriptlang.org/docs/handbook/decorators.html).
**Note** that decorators are an experimental feature in TypeScript and that declaration order is important depending on the JS target. For example, `@Field.d(2, AwesomeArrayMessage)` requires that `AwesomeArrayMessage` has been defined earlier when targeting `ES5`.
```ts
import { Message, Type, Field, OneOf } from "protobufjs/light"; // respectively "./node_modules/protobufjs/light.js"
export class AwesomeSubMessage extends Message<AwesomeSubMessage> {
@Field.d(1, "string")
public awesomeString: string;
}
export enum AwesomeEnum {
ONE = 1,
TWO = 2
}
@Type.d("SuperAwesomeMessage")
export class AwesomeMessage extends Message<AwesomeMessage> {
@Field.d(1, "string", "optional", "awesome default string")
public awesomeField: string;
@Field.d(2, AwesomeSubMessage)
public awesomeSubMessage: AwesomeSubMessage;
@Field.d(3, AwesomeEnum, "optional", AwesomeEnum.ONE)
public awesomeEnum: AwesomeEnum;
@OneOf.d("awesomeSubMessage", "awesomeEnum")
public which: string;
}
// example code
let message = new AwesomeMessage({ awesomeField: "hello" });
let buffer = AwesomeMessage.encode(message).finish();
let decoded = AwesomeMessage.decode(buffer);
```
Supported decorators are:
* **Type.d(typeName?: `string`)** &nbsp; *(optional)*<br />
annotates a class as a protobuf message type. If `typeName` is not specified, the constructor's runtime function name is used for the reflected type.
* **Field.d&lt;T>(fieldId: `number`, fieldType: `string | Constructor<T>`, fieldRule?: `"optional" | "required" | "repeated"`, defaultValue?: `T`)**<br />
annotates a property as a protobuf field with the specified id and protobuf type.
* **MapField.d&lt;T extends { [key: string]: any }>(fieldId: `number`, fieldKeyType: `string`, fieldValueType. `string | Constructor<{}>`)**<br />
annotates a property as a protobuf map field with the specified id, protobuf key and value type.
* **OneOf.d&lt;T extends string>(...fieldNames: `string[]`)**<br />
annotates a property as a protobuf oneof covering the specified fields.
Other notes:
* Decorated types reside in `protobuf.roots["decorated"]` using a flat structure, so no duplicate names.
* Enums are copied to a reflected enum with a generic name on decorator evaluation because referenced enum objects have no runtime name the decorator could use.
* Default values must be specified as arguments to the decorator instead of using a property initializer for proper prototype behavior.
* Property names on decorated classes must not be renamed on compile time (i.e. by a minifier) because decorators just receive the original field name as a string.
**ProTip!** Not as pretty, but you can [use decorators in plain JavaScript](https://github.com/dcodeIO/protobuf.js/blob/master/examples/js-decorators.js) as well.
Command line
------------
**Note** that moving the CLI to [its own package](./cli) is a work in progress. At the moment, it's still part of the main package.
The command line interface (CLI) can be used to translate between file formats and to generate static code as well as TypeScript definitions.
### pbjs for JavaScript
```
Translates between file formats and generates static code.
-t, --target Specifies the target format. Also accepts a path to require a custom target.
json JSON representation
json-module JSON representation as a module
proto2 Protocol Buffers, Version 2
proto3 Protocol Buffers, Version 3
static Static code without reflection (non-functional on its own)
static-module Static code without reflection as a module
-p, --path Adds a directory to the include path.
-o, --out Saves to a file instead of writing to stdout.
--sparse Exports only those types referenced from a main file (experimental).
Module targets only:
-w, --wrap Specifies the wrapper to use. Also accepts a path to require a custom wrapper.
default Default wrapper supporting both CommonJS and AMD
commonjs CommonJS wrapper
amd AMD wrapper
es6 ES6 wrapper (implies --es6)
closure A closure adding to protobuf.roots where protobuf is a global
-r, --root Specifies an alternative protobuf.roots name.
-l, --lint Linter configuration. Defaults to protobuf.js-compatible rules:
eslint-disable block-scoped-var, no-redeclare, no-control-regex, no-prototype-builtins
--es6 Enables ES6 syntax (const/let instead of var)
Proto sources only:
--keep-case Keeps field casing instead of converting to camel case.
Static targets only:
--no-create Does not generate create functions used for reflection compatibility.
--no-encode Does not generate encode functions.
--no-decode Does not generate decode functions.
--no-verify Does not generate verify functions.
--no-convert Does not generate convert functions like from/toObject
--no-delimited Does not generate delimited encode/decode functions.
--no-beautify Does not beautify generated code.
--no-comments Does not output any JSDoc comments.
--force-long Enfores the use of 'Long' for s-/u-/int64 and s-/fixed64 fields.
--force-message Enfores the use of message instances instead of plain objects.
usage: pbjs [options] file1.proto file2.json ... (or pipe) other | pbjs [options] -
```
For production environments it is recommended to bundle all your .proto files to a single .json file, which minimizes the number of network requests and avoids any parser overhead (hint: works with just the **light** library):
```
$> pbjs -t json file1.proto file2.proto > bundle.json
```
Now, either include this file in your final bundle:
```js
var root = protobuf.Root.fromJSON(require("./bundle.json"));
```
or load it the usual way:
```js
protobuf.load("bundle.json", function(err, root) {
...
});
```
Generated static code, on the other hand, works with just the **minimal** library. For example
```
$> pbjs -t static-module -w commonjs -o compiled.js file1.proto file2.proto
```
will generate static code for definitions within `file1.proto` and `file2.proto` to a CommonJS module `compiled.js`.
**ProTip!** Documenting your .proto files with `/** ... */`-blocks or (trailing) `/// ...` lines translates to generated static code.
### pbts for TypeScript
```
Generates TypeScript definitions from annotated JavaScript files.
-o, --out Saves to a file instead of writing to stdout.
-g, --global Name of the global object in browser environments, if any.
--no-comments Does not output any JSDoc comments.
Internal flags:
-n, --name Wraps everything in a module of the specified name.
-m, --main Whether building the main library without any imports.
usage: pbts [options] file1.js file2.js ... (or) other | pbts [options] -
```
Picking up on the example above, the following not only generates static code to a CommonJS module `compiled.js` but also its respective TypeScript definitions to `compiled.d.ts`:
```
$> pbjs -t static-module -w commonjs -o compiled.js file1.proto file2.proto
$> pbts -o compiled.d.ts compiled.js
```
Additionally, TypeScript definitions of static modules are compatible with their reflection-based counterparts (i.e. as exported by JSON modules), as long as the following conditions are met:
1. Instead of using `new SomeMessage(...)`, always use `SomeMessage.create(...)` because reflection objects do not provide a constructor.
2. Types, services and enums must start with an uppercase letter to become available as properties of the reflected types as well (i.e. to be able to use `MyMessage.MyEnum` instead of `root.lookup("MyMessage.MyEnum")`).
For example, the following generates a JSON module `bundle.js` and a `bundle.d.ts`, but no static code:
```
$> pbjs -t json-module -w commonjs -o bundle.js file1.proto file2.proto
$> pbjs -t static-module file1.proto file2.proto | pbts -o bundle.d.ts -
```
### Reflection vs. static code
While using .proto files directly requires the full library respectively pure reflection/JSON the light library, pretty much all code but the relatively short descriptors is shared.
Static code, on the other hand, requires just the minimal library, but generates additional source code without any reflection features. This also implies that there is a break-even point where statically generated code becomes larger than descriptor-based code once the amount of code generated exceeds the size of the full respectively light library.
There is no significant difference performance-wise as the code generated statically is pretty much the same as generated at runtime and both are largely interchangeable as seen in the previous section.
| Source | Library | Advantages | Tradeoffs
|--------|---------|------------|-----------
| .proto | full | Easily editable<br />Interoperability with other libraries<br />No compile step | Some parsing and possibly network overhead
| JSON | light | Easily editable<br />No parsing overhead<br />Single bundle (no network overhead) | protobuf.js specific<br />Has a compile step
| static | minimal | Works where `eval` access is restricted<br />Fully documented<br />Small footprint for small protos | Can be hard to edit<br />No reflection<br />Has a compile step
### Command line API
Both utilities can be used programmatically by providing command line arguments and a callback to their respective `main` functions:
```js
var pbjs = require("protobufjs/cli/pbjs"); // or require("protobufjs/cli").pbjs / .pbts
pbjs.main([ "--target", "json-module", "path/to/myproto.proto" ], function(err, output) {
if (err)
throw err;
// do something with output
});
```
Additional documentation
------------------------
#### Protocol Buffers
* [Google's Developer Guide](https://developers.google.com/protocol-buffers/docs/overview)
#### protobuf.js
* [API Documentation](http://dcode.io/protobuf.js)
* [CHANGELOG](https://github.com/dcodeIO/protobuf.js/blob/master/CHANGELOG.md)
* [Frequently asked questions](https://github.com/dcodeIO/protobuf.js/wiki) on our wiki
#### Community
* [Questions and answers](http://stackoverflow.com/search?tab=newest&q=protobuf.js) on StackOverflow
Performance
-----------
The package includes a benchmark that compares protobuf.js performance to native JSON (as far as this is possible) and [Google's JS implementation](https://github.com/google/protobuf/tree/master/js). On an i7-2600K running node 6.9.1 it yields:
```
benchmarking encoding performance ...
protobuf.js (reflect) x 541,707 ops/sec ±1.13% (87 runs sampled)
protobuf.js (static) x 548,134 ops/sec ±1.38% (89 runs sampled)
JSON (string) x 318,076 ops/sec ±0.63% (93 runs sampled)
JSON (buffer) x 179,165 ops/sec ±2.26% (91 runs sampled)
google-protobuf x 74,406 ops/sec ±0.85% (86 runs sampled)
protobuf.js (static) was fastest
protobuf.js (reflect) was 0.9% ops/sec slower (factor 1.0)
JSON (string) was 41.5% ops/sec slower (factor 1.7)
JSON (buffer) was 67.6% ops/sec slower (factor 3.1)
google-protobuf was 86.4% ops/sec slower (factor 7.3)
benchmarking decoding performance ...
protobuf.js (reflect) x 1,383,981 ops/sec ±0.88% (93 runs sampled)
protobuf.js (static) x 1,378,925 ops/sec ±0.81% (93 runs sampled)
JSON (string) x 302,444 ops/sec ±0.81% (93 runs sampled)
JSON (buffer) x 264,882 ops/sec ±0.81% (93 runs sampled)
google-protobuf x 179,180 ops/sec ±0.64% (94 runs sampled)
protobuf.js (reflect) was fastest
protobuf.js (static) was 0.3% ops/sec slower (factor 1.0)
JSON (string) was 78.1% ops/sec slower (factor 4.6)
JSON (buffer) was 80.8% ops/sec slower (factor 5.2)
google-protobuf was 87.0% ops/sec slower (factor 7.7)
benchmarking combined performance ...
protobuf.js (reflect) x 275,900 ops/sec ±0.78% (90 runs sampled)
protobuf.js (static) x 290,096 ops/sec ±0.96% (90 runs sampled)
JSON (string) x 129,381 ops/sec ±0.77% (90 runs sampled)
JSON (buffer) x 91,051 ops/sec ±0.94% (90 runs sampled)
google-protobuf x 42,050 ops/sec ±0.85% (91 runs sampled)
protobuf.js (static) was fastest
protobuf.js (reflect) was 4.7% ops/sec slower (factor 1.0)
JSON (string) was 55.3% ops/sec slower (factor 2.2)
JSON (buffer) was 68.6% ops/sec slower (factor 3.2)
google-protobuf was 85.5% ops/sec slower (factor 6.9)
```
These results are achieved by
* generating type-specific encoders, decoders, verifiers and converters at runtime
* configuring the reader/writer interface according to the environment
* using node-specific functionality where beneficial and, of course
* avoiding unnecessary operations through splitting up [the toolset](#toolset).
You can also run [the benchmark](https://github.com/dcodeIO/protobuf.js/blob/master/bench/index.js) ...
```
$> npm run bench
```
and [the profiler](https://github.com/dcodeIO/protobuf.js/blob/master/bench/prof.js) yourself (the latter requires a recent version of node):
```
$> npm run prof <encode|decode|encode-browser|decode-browser> [iterations=10000000]
```
Note that as of this writing, the benchmark suite performs significantly slower on node 7.2.0 compared to 6.9.1 because moths.
Compatibility
-------------
* Works in all modern and not-so-modern browsers except IE8.
* Because the internals of this package do not rely on `google/protobuf/descriptor.proto`, options are parsed and presented literally.
* If typed arrays are not supported by the environment, plain arrays will be used instead.
* Support for pre-ES5 environments (except IE8) can be achieved by [using a polyfill](https://github.com/dcodeIO/protobuf.js/blob/master/scripts/polyfill.js).
* Support for [Content Security Policy](https://w3c.github.io/webappsec-csp/)-restricted environments (like Chrome extensions without [unsafe-eval](https://developer.chrome.com/extensions/contentSecurityPolicy#relaxing-eval)) can be achieved by generating and using static code instead.
* If a proper way to work with 64 bit values (uint64, int64 etc.) is required, just install [long.js](https://github.com/dcodeIO/long.js) alongside this library. All 64 bit numbers will then be returned as a `Long` instance instead of a possibly unsafe JavaScript number ([see](https://github.com/dcodeIO/long.js)).
* For descriptor.proto interoperability, see [ext/descriptor](https://github.com/dcodeIO/protobuf.js/tree/master/ext/descriptor)
Building
--------
To build the library or its components yourself, clone it from GitHub and install the development dependencies:
```
$> git clone https://github.com/dcodeIO/protobuf.js.git
$> cd protobuf.js
$> npm install
```
Building the respective development and production versions with their respective source maps to `dist/`:
```
$> npm run build
```
Building the documentation to `docs/`:
```
$> npm run docs
```
Building the TypeScript definition to `index.d.ts`:
```
$> npm run types
```
### Browserify integration
By default, protobuf.js integrates into any browserify build-process without requiring any optional modules. Hence:
* If int64 support is required, explicitly require the `long` module somewhere in your project as it will be excluded otherwise. This assumes that a global `require` function is present that protobuf.js can call to obtain the long module.
If there is no global `require` function present after bundling, it's also possible to assign the long module programmatically:
```js
var Long = ...;
protobuf.util.Long = Long;
protobuf.configure();
```
* If you have any special requirements, there is [the bundler](https://github.com/dcodeIO/protobuf.js/blob/master/scripts/bundle.js) for reference.
**License:** [BSD 3-Clause License](https://opensource.org/licenses/BSD-3-Clause)

View File

@ -0,0 +1,6 @@
#!/usr/bin/env node
var path = require("path"),
cli = require(path.join(__dirname, "..", "cli", "pbjs.js"));
var ret = cli.main(process.argv.slice(2));
if (typeof ret === 'number')
process.exit(ret);

View File

@ -0,0 +1,6 @@
#!/usr/bin/env node
var path = require("path"),
cli = require(path.join(__dirname, "..", "cli", "pbts.js"));
var ret = cli.main(process.argv.slice(2));
if (typeof ret === 'number')
process.exit(ret);

View File

@ -0,0 +1,33 @@
Copyright (c) 2016, Daniel Wirtz All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name of its author, nor the names of its contributors
may be used to endorse or promote products derived from this software
without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
---
Code generated by the command line utilities is owned by the owner
of the input file used when generating it. This code is not
standalone and requires a support library to be linked with it. This
support library is itself covered by the above license.

View File

@ -0,0 +1,11 @@
protobufjs-cli
==============
[![npm](https://img.shields.io/npm/v/protobufjscli.svg)](https://www.npmjs.com/package/protobufjs-cli)
Command line interface (CLI) for [protobuf.js](https://github.com/dcodeIO/protobuf.js). Translates between file formats and generates static code as well as TypeScript definitions.
* [CLI Documentation](https://github.com/dcodeIO/protobuf.js#command-line)
**Note** that moving the CLI to its own package is a work in progress. At the moment, it's still part of the main package.
**License:** [BSD 3-Clause License](https://opensource.org/licenses/BSD-3-Clause)

View File

@ -0,0 +1,6 @@
#!/usr/bin/env node
var path = require("path"),
cli = require(path.join(__dirname, "..", "pbjs.js"));
var ret = cli.main(process.argv.slice(2));
if (typeof ret === 'number')
process.exit(ret);

View File

@ -0,0 +1,6 @@
#!/usr/bin/env node
var path = require("path"),
cli = require(path.join(__dirname, "..", "pbts.js"));
var ret = cli.main(process.argv.slice(2));
if (typeof ret === 'number')
process.exit(ret);

View File

@ -0,0 +1,3 @@
import * as pbjs from "./pbjs.js";
import * as pbts from "./pbts.js";
export { pbjs, pbts };

View File

@ -0,0 +1,3 @@
"use strict";
exports.pbjs = require("./pbjs");
exports.pbts = require("./pbts");

View File

@ -0,0 +1,18 @@
{
"tags": {
"allowUnknownTags": false
},
"plugins": [
"./tsd-jsdoc/plugin"
],
"opts": {
"encoding" : "utf8",
"recurse" : true,
"lenient" : true,
"template" : "./tsd-jsdoc",
"private" : false,
"comments" : true,
"destination" : false
}
}

View File

@ -0,0 +1,21 @@
The MIT License
Copyright (c) 2016 Chad Engler
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.

View File

@ -0,0 +1,23 @@
protobuf.js fork of tsd-jsdoc
=============================
This is a modified version of [tsd-jsdoc](https://github.com/englercj/tsd-jsdoc) v1.0.1 for use with protobuf.js, parked here so we can process issues and pull requests. The ultimate goal is to switch back to the a recent version of tsd-jsdoc once it meets our needs.
Options
-------
* **module: `string`**<br />
Wraps everything in a module of the specified name.
* **private: `boolean`**<br />
Includes private members when set to `true`.
* **comments: `boolean`**<br />
Skips comments when explicitly set to `false`.
* **destination: `string|boolean`**<br />
Saves to the specified destination file or to console when set to `false`.
Setting options on the command line
-----------------------------------
Providing `-q, --query <queryString>` on the command line will set respectively override existing options. Example: `-q module=protobufjs`

View File

@ -0,0 +1,21 @@
"use strict";
exports.defineTags = function(dictionary) {
dictionary.defineTag("template", {
mustHaveValue: true,
canHaveType: false,
canHaveName: false,
onTagged: function(doclet, tag) {
(doclet.templates || (doclet.templates = [])).push(tag.text);
}
});
dictionary.defineTag("tstype", {
mustHaveValue: true,
canHaveType: false,
canHaveName: false,
onTagged: function(doclet, tag) {
doclet.tsType = tag.text;
}
});
};

View File

@ -0,0 +1,693 @@
"use strict";
var fs = require("fs");
// output stream
var out = null;
// documentation data
var data = null;
// already handled objects, by name
var seen = {};
// indentation level
var indent = 0;
// whether indent has been written for the current line yet
var indentWritten = false;
// provided options
var options = {};
// queued interfaces
var queuedInterfaces = [];
// whether writing the first line
var firstLine = true;
// JSDoc hook
exports.publish = function publish(taffy, opts) {
options = opts || {};
// query overrides options
if (options.query)
Object.keys(options.query).forEach(function(key) {
if (key !== "query")
switch (options[key] = options.query[key]) {
case "true":
options[key] = true;
break;
case "false":
options[key] = false;
break;
case "null":
options[key] = null;
break;
}
});
// remove undocumented
taffy({ undocumented: true }).remove();
taffy({ ignore: true }).remove();
taffy({ inherited: true }).remove();
// remove private
if (!options.private)
taffy({ access: "private" }).remove();
// setup output
out = options.destination
? fs.createWriteStream(options.destination)
: process.stdout;
try {
// setup environment
data = taffy().get();
indent = 0;
indentWritten = false;
firstLine = true;
// wrap everything in a module if configured
if (options.module) {
writeln("export = ", options.module, ";");
writeln();
writeln("declare namespace ", options.module, " {");
writeln();
++indent;
}
// handle all
getChildrenOf(undefined).forEach(function(child) {
handleElement(child, null);
});
// process queued
while (queuedInterfaces.length) {
var element = queuedInterfaces.shift();
begin(element);
writeInterface(element);
writeln(";");
}
// end wrap
if (options.module) {
--indent;
writeln("}");
}
// close file output
if (out !== process.stdout)
out.end();
} finally {
// gc environment objects
out = data = null;
seen = options = {};
queuedInterfaces = [];
}
};
//
// Utility
//
// writes one or multiple strings
function write() {
var s = Array.prototype.slice.call(arguments).join("");
if (!indentWritten) {
for (var i = 0; i < indent; ++i)
s = " " + s;
indentWritten = true;
}
out.write(s);
firstLine = false;
}
// writes zero or multiple strings, followed by a new line
function writeln() {
var s = Array.prototype.slice.call(arguments).join("");
if (s.length)
write(s, "\n");
else if (!firstLine)
out.write("\n");
indentWritten = false;
}
var keepTags = [
"param",
"returns",
"throws",
"see"
];
// parses a comment into text and tags
function parseComment(comment) {
var lines = comment.replace(/^ *\/\*\* *|^ *\*\/| *\*\/ *$|^ *\* */mg, "").trim().split(/\r?\n|\r/g); // property.description has just "\r" ?!
var desc;
var text = [];
var tags = null;
for (var i = 0; i < lines.length; ++i) {
var match = /^@(\w+)\b/.exec(lines[i]);
if (match) {
if (!tags) {
tags = [];
desc = text;
}
text = [];
tags.push({ name: match[1], text: text });
lines[i] = lines[i].substring(match[1].length + 1).trim();
}
if (lines[i].length || text.length)
text.push(lines[i]);
}
return {
text: desc || text,
tags: tags || []
};
}
// writes a comment
function writeComment(comment, otherwiseNewline) {
if (!comment || options.comments === false) {
if (otherwiseNewline)
writeln();
return;
}
if (typeof comment !== "object")
comment = parseComment(comment);
comment.tags = comment.tags.filter(function(tag) {
return keepTags.indexOf(tag.name) > -1 && (tag.name !== "returns" || tag.text[0] !== "{undefined}");
});
writeln();
if (!comment.tags.length && comment.text.length < 2) {
writeln("/** " + comment.text[0] + " */");
return;
}
writeln("/**");
comment.text.forEach(function(line) {
if (line.length)
writeln(" * ", line);
else
writeln(" *");
});
comment.tags.forEach(function(tag) {
var started = false;
if (tag.text.length) {
tag.text.forEach(function(line, i) {
if (i > 0)
write(" * ");
else if (tag.name !== "throws")
line = line.replace(/^\{[^\s]*} ?/, "");
if (!line.length)
return;
if (!started) {
write(" * @", tag.name, " ");
started = true;
}
writeln(line);
});
}
});
writeln(" */");
}
// recursively replaces all occurencies of re's match
function replaceRecursive(name, re, fn) {
var found;
function replacer() {
found = true;
return fn.apply(null, arguments);
}
do {
found = false;
name = name.replace(re, replacer);
} while (found);
return name;
}
// tests if an element is considered to be a class or class-like
function isClassLike(element) {
return isClass(element) || isInterface(element);
}
// tests if an element is considered to be a class
function isClass(element) {
return element && element.kind === "class";
}
// tests if an element is considered to be an interface
function isInterface(element) {
return element && (element.kind === "interface" || element.kind === "mixin");
}
// tests if an element is considered to be a namespace
function isNamespace(element) {
return element && (element.kind === "namespace" || element.kind === "module");
}
// gets all children of the specified parent
function getChildrenOf(parent) {
var memberof = parent ? parent.longname : undefined;
return data.filter(function(element) {
return element.memberof === memberof;
});
}
// gets the literal type of an element
function getTypeOf(element) {
if (element.tsType)
return element.tsType.replace(/\r?\n|\r/g, "\n");
var name = "any";
var type = element.type;
if (type && type.names && type.names.length) {
if (type.names.length === 1)
name = element.type.names[0].trim();
else
name = "(" + element.type.names.join("|") + ")";
} else
return name;
// Replace catchalls with any
name = name.replace(/\*|\bmixed\b/g, "any");
// Ensure upper case Object for map expressions below
name = name.replace(/\bobject\b/g, "Object");
// Correct Something.<Something> to Something<Something>
name = replaceRecursive(name, /\b(?!Object|Array)([\w$]+)\.<([^>]*)>/gi, function($0, $1, $2) {
return $1 + "<" + $2 + ">";
});
// Replace Array.<string> with string[]
name = replaceRecursive(name, /\bArray\.?<([^>]*)>/gi, function($0, $1) {
return $1 + "[]";
});
// Replace Object.<string,number> with { [k: string]: number }
name = replaceRecursive(name, /\bObject\.?<([^,]*), *([^>]*)>/gi, function($0, $1, $2) {
return "{ [k: " + $1 + "]: " + $2 + " }";
});
// Replace functions (there are no signatures) with Function
name = name.replace(/\bfunction(?:\(\))?\b/g, "Function");
// Convert plain Object back to just object
name = name.replace(/\b(Object\b(?!\.))/g, function($0, $1) {
return $1.toLowerCase();
});
return name;
}
// begins writing the definition of the specified element
function begin(element, is_interface) {
if (!seen[element.longname]) {
if (isClass(element)) {
var comment = parseComment(element.comment);
var classdesc = comment.tags.find(function(tag) { return tag.name === "classdesc"; });
if (classdesc) {
comment.text = classdesc.text;
comment.tags = [];
}
writeComment(comment, true);
} else
writeComment(element.comment, is_interface || isClassLike(element) || isNamespace(element) || element.isEnum || element.scope === "global");
seen[element.longname] = element;
} else
writeln();
if (element.scope !== "global" || options.module)
return;
write("export ");
}
// writes the function signature describing element
function writeFunctionSignature(element, isConstructor, isTypeDef) {
write("(");
var params = {};
// this type
if (element.this)
params["this"] = {
type: element.this.replace(/^{|}$/g, ""),
optional: false
};
// parameter types
if (element.params)
element.params.forEach(function(param) {
var path = param.name.split(/\./g);
if (path.length === 1)
params[param.name] = {
type: getTypeOf(param),
variable: param.variable === true,
optional: param.optional === true,
defaultValue: param.defaultvalue // Not used yet (TODO)
};
else // Property syntax (TODO)
params[path[0]].type = "{ [k: string]: any }";
});
var paramNames = Object.keys(params);
paramNames.forEach(function(name, i) {
var param = params[name];
var type = param.type;
if (param.variable) {
name = "..." + name;
type = param.type.charAt(0) === "(" ? "any[]" : param.type + "[]";
}
write(name, !param.variable && param.optional ? "?: " : ": ", type);
if (i < paramNames.length - 1)
write(", ");
});
write(")");
// return type
if (!isConstructor) {
write(isTypeDef ? " => " : ": ");
var typeName;
if (element.returns && element.returns.length && (typeName = getTypeOf(element.returns[0])) !== "undefined")
write(typeName);
else
write("void");
}
}
// writes (a typedef as) an interface
function writeInterface(element) {
write("interface ", element.name);
writeInterfaceBody(element);
writeln();
}
function writeInterfaceBody(element) {
writeln("{");
++indent;
if (element.tsType)
writeln(element.tsType.replace(/\r?\n|\r/g, "\n"));
else if (element.properties && element.properties.length)
element.properties.forEach(writeProperty);
--indent;
write("}");
}
function writeProperty(property, declare) {
writeComment(property.description);
if (declare)
write("let ");
write(property.name);
if (property.optional)
write("?");
writeln(": ", getTypeOf(property), ";");
}
//
// Handlers
//
// handles a single element of any understood type
function handleElement(element, parent) {
if (element.scope === "inner")
return false;
if (element.optional !== true && element.type && element.type.names && element.type.names.length) {
for (var i = 0; i < element.type.names.length; i++) {
if (element.type.names[i].toLowerCase() === "undefined") {
// This element is actually optional. Set optional to true and
// remove the 'undefined' type
element.optional = true;
element.type.names.splice(i, 1);
i--;
}
}
}
if (seen[element.longname])
return true;
if (isClassLike(element))
handleClass(element, parent);
else switch (element.kind) {
case "module":
case "namespace":
handleNamespace(element, parent);
break;
case "constant":
case "member":
handleMember(element, parent);
break;
case "function":
handleFunction(element, parent);
break;
case "typedef":
handleTypeDef(element, parent);
break;
case "package":
break;
}
seen[element.longname] = element;
return true;
}
// handles (just) a namespace
function handleNamespace(element/*, parent*/) {
var children = getChildrenOf(element);
if (!children.length)
return;
var first = true;
if (element.properties)
element.properties.forEach(function(property) {
if (!/^[$\w]+$/.test(property.name)) // incompatible in namespace
return;
if (first) {
begin(element);
writeln("namespace ", element.name, " {");
++indent;
first = false;
}
writeProperty(property, true);
});
children.forEach(function(child) {
if (child.scope === "inner" || seen[child.longname])
return;
if (first) {
begin(element);
writeln("namespace ", element.name, " {");
++indent;
first = false;
}
handleElement(child, element);
});
if (!first) {
--indent;
writeln("}");
}
}
// a filter function to remove any module references
function notAModuleReference(ref) {
return ref.indexOf("module:") === -1;
}
// handles a class or class-like
function handleClass(element, parent) {
var is_interface = isInterface(element);
begin(element, is_interface);
if (is_interface)
write("interface ");
else {
if (element.virtual)
write("abstract ");
write("class ");
}
write(element.name);
if (element.templates && element.templates.length)
write("<", element.templates.join(", "), ">");
write(" ");
// extended classes
if (element.augments) {
var augments = element.augments.filter(notAModuleReference);
if (augments.length)
write("extends ", augments[0], " ");
}
// implemented interfaces
var impls = [];
if (element.implements)
Array.prototype.push.apply(impls, element.implements);
if (element.mixes)
Array.prototype.push.apply(impls, element.mixes);
impls = impls.filter(notAModuleReference);
if (impls.length)
write("implements ", impls.join(", "), " ");
writeln("{");
++indent;
if (element.tsType)
writeln(element.tsType.replace(/\r?\n|\r/g, "\n"));
// constructor
if (!is_interface && !element.virtual)
handleFunction(element, parent, true);
// properties
if (is_interface && element.properties)
element.properties.forEach(function(property) {
writeProperty(property);
});
// class-compatible members
var incompatible = [];
getChildrenOf(element).forEach(function(child) {
if (isClassLike(child) || child.kind === "module" || child.kind === "typedef" || child.isEnum) {
incompatible.push(child);
return;
}
handleElement(child, element);
});
--indent;
writeln("}");
// class-incompatible members
if (incompatible.length) {
writeln();
if (element.scope === "global" && !options.module)
write("export ");
writeln("namespace ", element.name, " {");
++indent;
incompatible.forEach(function(child) {
handleElement(child, element);
});
--indent;
writeln("}");
}
}
// handles a namespace or class member
function handleMember(element, parent) {
begin(element);
if (element.isEnum) {
var stringEnum = false;
element.properties.forEach(function(property) {
if (isNaN(property.defaultvalue)) {
stringEnum = true;
}
});
if (stringEnum) {
writeln("type ", element.name, " =");
++indent;
element.properties.forEach(function(property, i) {
write(i === 0 ? "" : "| ", JSON.stringify(property.defaultvalue));
});
--indent;
writeln(";");
} else {
writeln("enum ", element.name, " {");
++indent;
element.properties.forEach(function(property, i) {
write(property.name);
if (property.defaultvalue !== undefined)
write(" = ", JSON.stringify(property.defaultvalue));
if (i < element.properties.length - 1)
writeln(",");
else
writeln();
});
--indent;
writeln("}");
}
} else {
var inClass = isClassLike(parent);
if (inClass) {
write(element.access || "public", " ");
if (element.scope === "static")
write("static ");
if (element.readonly)
write("readonly ");
} else
write(element.kind === "constant" ? "const " : "let ");
write(element.name);
if (element.optional)
write("?");
write(": ");
if (element.type && element.type.names && /^Object\b/i.test(element.type.names[0]) && element.properties) {
writeln("{");
++indent;
element.properties.forEach(function(property, i) {
writeln(JSON.stringify(property.name), ": ", getTypeOf(property), i < element.properties.length - 1 ? "," : "");
});
--indent;
writeln("};");
} else
writeln(getTypeOf(element), ";");
}
}
// handles a function or method
function handleFunction(element, parent, isConstructor) {
var insideClass = true;
if (isConstructor) {
writeComment(element.comment);
write("constructor");
} else {
begin(element);
insideClass = isClassLike(parent);
if (insideClass) {
write(element.access || "public", " ");
if (element.scope === "static")
write("static ");
} else
write("function ");
write(element.name);
if (element.templates && element.templates.length)
write("<", element.templates.join(", "), ">");
}
writeFunctionSignature(element, isConstructor, false);
writeln(";");
if (!insideClass)
handleNamespace(element);
}
// handles a type definition (not a real type)
function handleTypeDef(element, parent) {
if (isInterface(element)) {
if (isClassLike(parent))
queuedInterfaces.push(element);
else {
begin(element);
writeInterface(element);
}
} else {
writeComment(element.comment, true);
write("type ", element.name);
if (element.templates && element.templates.length)
write("<", element.templates.join(", "), ">");
write(" = ");
if (element.tsType)
write(element.tsType.replace(/\r?\n|\r/g, "\n"));
else {
var type = getTypeOf(element);
if (element.type && element.type.names.length === 1 && element.type.names[0] === "function")
writeFunctionSignature(element, false, true);
else if (type === "object") {
if (element.properties && element.properties.length)
writeInterfaceBody(element);
else
write("{}");
} else
write(type);
}
writeln(";");
}
}

View File

@ -0,0 +1,25 @@
'use strict';
var isWindows = process.platform === 'win32';
var trailingSlashRe = isWindows ? /[^:]\\$/ : /.\/$/;
// https://github.com/nodejs/node/blob/3e7a14381497a3b73dda68d05b5130563cdab420/lib/os.js#L25-L43
module.exports = function () {
var path;
if (isWindows) {
path = process.env.TEMP ||
process.env.TMP ||
(process.env.SystemRoot || process.env.windir) + '\\temp';
} else {
path = process.env.TMPDIR ||
process.env.TMP ||
process.env.TEMP ||
'/tmp';
}
if (trailingSlashRe.test(path)) {
path = path.slice(0, -1);
}
return path;
};

View File

@ -0,0 +1,21 @@
The MIT License (MIT)
Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (sindresorhus.com)
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.

View File

@ -0,0 +1,32 @@
# os-tmpdir [![Build Status](https://travis-ci.org/sindresorhus/os-tmpdir.svg?branch=master)](https://travis-ci.org/sindresorhus/os-tmpdir)
> Node.js [`os.tmpdir()`](https://nodejs.org/api/os.html#os_os_tmpdir) [ponyfill](https://ponyfill.com)
Use this instead of `require('os').tmpdir()` to get a consistent behavior on different Node.js versions (even 0.8).
## Install
```
$ npm install --save os-tmpdir
```
## Usage
```js
const osTmpdir = require('os-tmpdir');
osTmpdir();
//=> '/var/folders/m3/5574nnhn0yj488ccryqr7tc80000gn/T'
```
## API
See the [`os.tmpdir()` docs](https://nodejs.org/api/os.html#os_os_tmpdir).
## License
MIT © [Sindre Sorhus](https://sindresorhus.com)

View File

@ -0,0 +1,21 @@
The MIT License (MIT)
Copyright (c) 2014 KARASZI István
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@ -0,0 +1,314 @@
# Tmp
A simple temporary file and directory creator for [node.js.][1]
[![Build Status](https://travis-ci.org/raszi/node-tmp.svg?branch=master)](https://travis-ci.org/raszi/node-tmp)
[![Dependencies](https://david-dm.org/raszi/node-tmp.svg)](https://david-dm.org/raszi/node-tmp)
[![npm version](https://badge.fury.io/js/tmp.svg)](https://badge.fury.io/js/tmp)
[![API documented](https://img.shields.io/badge/API-documented-brightgreen.svg)](https://raszi.github.io/node-tmp/)
[![Known Vulnerabilities](https://snyk.io/test/npm/tmp/badge.svg)](https://snyk.io/test/npm/tmp)
## About
This is a [widely used library][2] to create temporary files and directories
in a [node.js][1] environment.
Tmp offers both an asynchronous and a synchronous API. For all API calls, all
the parameters are optional. There also exists a promisified version of the
API, see (5) under references below.
Tmp uses crypto for determining random file names, or, when using templates,
a six letter random identifier. And just in case that you do not have that much
entropy left on your system, Tmp will fall back to pseudo random numbers.
You can set whether you want to remove the temporary file on process exit or
not, and the destination directory can also be set.
## How to install
```bash
npm install tmp
```
## Usage
Please also check [API docs][4].
### Asynchronous file creation
Simple temporary file creation, the file will be closed and unlinked on process exit.
```javascript
var tmp = require('tmp');
tmp.file(function _tempFileCreated(err, path, fd, cleanupCallback) {
if (err) throw err;
console.log('File: ', path);
console.log('Filedescriptor: ', fd);
// If we don't need the file anymore we could manually call the cleanupCallback
// But that is not necessary if we didn't pass the keep option because the library
// will clean after itself.
cleanupCallback();
});
```
### Synchronous file creation
A synchronous version of the above.
```javascript
var tmp = require('tmp');
var tmpobj = tmp.fileSync();
console.log('File: ', tmpobj.name);
console.log('Filedescriptor: ', tmpobj.fd);
// If we don't need the file anymore we could manually call the removeCallback
// But that is not necessary if we didn't pass the keep option because the library
// will clean after itself.
tmpobj.removeCallback();
```
Note that this might throw an exception if either the maximum limit of retries
for creating a temporary name fails, or, in case that you do not have the permission
to write to the directory where the temporary file should be created in.
### Asynchronous directory creation
Simple temporary directory creation, it will be removed on process exit.
If the directory still contains items on process exit, then it won't be removed.
```javascript
var tmp = require('tmp');
tmp.dir(function _tempDirCreated(err, path, cleanupCallback) {
if (err) throw err;
console.log('Dir: ', path);
// Manual cleanup
cleanupCallback();
});
```
If you want to cleanup the directory even when there are entries in it, then
you can pass the `unsafeCleanup` option when creating it.
### Synchronous directory creation
A synchronous version of the above.
```javascript
var tmp = require('tmp');
var tmpobj = tmp.dirSync();
console.log('Dir: ', tmpobj.name);
// Manual cleanup
tmpobj.removeCallback();
```
Note that this might throw an exception if either the maximum limit of retries
for creating a temporary name fails, or, in case that you do not have the permission
to write to the directory where the temporary directory should be created in.
### Asynchronous filename generation
It is possible with this library to generate a unique filename in the specified
directory.
```javascript
var tmp = require('tmp');
tmp.tmpName(function _tempNameGenerated(err, path) {
if (err) throw err;
console.log('Created temporary filename: ', path);
});
```
### Synchronous filename generation
A synchronous version of the above.
```javascript
var tmp = require('tmp');
var name = tmp.tmpNameSync();
console.log('Created temporary filename: ', name);
```
## Advanced usage
### Asynchronous file creation
Creates a file with mode `0644`, prefix will be `prefix-` and postfix will be `.txt`.
```javascript
var tmp = require('tmp');
tmp.file({ mode: 0644, prefix: 'prefix-', postfix: '.txt' }, function _tempFileCreated(err, path, fd) {
if (err) throw err;
console.log('File: ', path);
console.log('Filedescriptor: ', fd);
});
```
### Synchronous file creation
A synchronous version of the above.
```javascript
var tmp = require('tmp');
var tmpobj = tmp.fileSync({ mode: 0644, prefix: 'prefix-', postfix: '.txt' });
console.log('File: ', tmpobj.name);
console.log('Filedescriptor: ', tmpobj.fd);
```
### Controlling the Descriptor
As a side effect of creating a unique file `tmp` gets a file descriptor that is
returned to the user as the `fd` parameter. The descriptor may be used by the
application and is closed when the `removeCallback` is invoked.
In some use cases the application does not need the descriptor, needs to close it
without removing the file, or needs to remove the file without closing the
descriptor. Two options control how the descriptor is managed:
* `discardDescriptor` - if `true` causes `tmp` to close the descriptor after the file
is created. In this case the `fd` parameter is undefined.
* `detachDescriptor` - if `true` causes `tmp` to return the descriptor in the `fd`
parameter, but it is the application's responsibility to close it when it is no
longer needed.
```javascript
var tmp = require('tmp');
tmp.file({ discardDescriptor: true }, function _tempFileCreated(err, path, fd, cleanupCallback) {
if (err) throw err;
// fd will be undefined, allowing application to use fs.createReadStream(path)
// without holding an unused descriptor open.
});
```
```javascript
var tmp = require('tmp');
tmp.file({ detachDescriptor: true }, function _tempFileCreated(err, path, fd, cleanupCallback) {
if (err) throw err;
cleanupCallback();
// Application can store data through fd here; the space used will automatically
// be reclaimed by the operating system when the descriptor is closed or program
// terminates.
});
```
### Asynchronous directory creation
Creates a directory with mode `0755`, prefix will be `myTmpDir_`.
```javascript
var tmp = require('tmp');
tmp.dir({ mode: 0750, prefix: 'myTmpDir_' }, function _tempDirCreated(err, path) {
if (err) throw err;
console.log('Dir: ', path);
});
```
### Synchronous directory creation
Again, a synchronous version of the above.
```javascript
var tmp = require('tmp');
var tmpobj = tmp.dirSync({ mode: 0750, prefix: 'myTmpDir_' });
console.log('Dir: ', tmpobj.name);
```
### mkstemp like, asynchronously
Creates a new temporary directory with mode `0700` and filename like `/tmp/tmp-nk2J1u`.
```javascript
var tmp = require('tmp');
tmp.dir({ template: '/tmp/tmp-XXXXXX' }, function _tempDirCreated(err, path) {
if (err) throw err;
console.log('Dir: ', path);
});
```
### mkstemp like, synchronously
This will behave similarly to the asynchronous version.
```javascript
var tmp = require('tmp');
var tmpobj = tmp.dirSync({ template: '/tmp/tmp-XXXXXX' });
console.log('Dir: ', tmpobj.name);
```
### Asynchronous filename generation
The `tmpName()` function accepts the `prefix`, `postfix`, `dir`, etc. parameters also:
```javascript
var tmp = require('tmp');
tmp.tmpName({ template: '/tmp/tmp-XXXXXX' }, function _tempNameGenerated(err, path) {
if (err) throw err;
console.log('Created temporary filename: ', path);
});
```
### Synchronous filename generation
The `tmpNameSync()` function works similarly to `tmpName()`.
```javascript
var tmp = require('tmp');
var tmpname = tmp.tmpNameSync({ template: '/tmp/tmp-XXXXXX' });
console.log('Created temporary filename: ', tmpname);
```
## Graceful cleanup
One may want to cleanup the temporary files even when an uncaught exception
occurs. To enforce this, you can call the `setGracefulCleanup()` method:
```javascript
var tmp = require('tmp');
tmp.setGracefulCleanup();
```
## Options
All options are optional :)
* `mode`: the file mode to create with, it fallbacks to `0600` on file creation and `0700` on directory creation
* `prefix`: the optional prefix, fallbacks to `tmp-` if not provided
* `postfix`: the optional postfix, fallbacks to `.tmp` on file creation
* `template`: [`mkstemp`][3] like filename template, no default
* `dir`: the optional temporary directory, fallbacks to system default (guesses from environment)
* `tries`: how many times should the function try to get a unique filename before giving up, default `3`
* `keep`: signals that the temporary file or directory should not be deleted on exit, default is `false`, means delete
* Please keep in mind that it is recommended in this case to call the provided `cleanupCallback` function manually.
* `unsafeCleanup`: recursively removes the created temporary directory, even when it's not empty. default is `false`
[1]: http://nodejs.org/
[2]: https://www.npmjs.com/browse/depended/tmp
[3]: http://www.kernel.org/doc/man-pages/online/pages/man3/mkstemp.3.html
[4]: https://raszi.github.io/node-tmp/
[5]: https://github.com/benjamingr/tmp-promise

View File

@ -0,0 +1,611 @@
/*!
* Tmp
*
* Copyright (c) 2011-2017 KARASZI Istvan <github@spam.raszi.hu>
*
* MIT Licensed
*/
/*
* Module dependencies.
*/
const fs = require('fs');
const path = require('path');
const crypto = require('crypto');
const osTmpDir = require('os-tmpdir');
const _c = process.binding('constants');
/*
* The working inner variables.
*/
const
/**
* The temporary directory.
* @type {string}
*/
tmpDir = osTmpDir(),
// the random characters to choose from
RANDOM_CHARS = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz',
TEMPLATE_PATTERN = /XXXXXX/,
DEFAULT_TRIES = 3,
CREATE_FLAGS = (_c.O_CREAT || _c.fs.O_CREAT) | (_c.O_EXCL || _c.fs.O_EXCL) | (_c.O_RDWR || _c.fs.O_RDWR),
EBADF = _c.EBADF || _c.os.errno.EBADF,
ENOENT = _c.ENOENT || _c.os.errno.ENOENT,
DIR_MODE = 448 /* 0o700 */,
FILE_MODE = 384 /* 0o600 */,
// this will hold the objects need to be removed on exit
_removeObjects = [];
var
_gracefulCleanup = false,
_uncaughtException = false;
/**
* Random name generator based on crypto.
* Adapted from http://blog.tompawlak.org/how-to-generate-random-values-nodejs-javascript
*
* @param {number} howMany
* @returns {string} the generated random name
* @private
*/
function _randomChars(howMany) {
var
value = [],
rnd = null;
// make sure that we do not fail because we ran out of entropy
try {
rnd = crypto.randomBytes(howMany);
} catch (e) {
rnd = crypto.pseudoRandomBytes(howMany);
}
for (var i = 0; i < howMany; i++) {
value.push(RANDOM_CHARS[rnd[i] % RANDOM_CHARS.length]);
}
return value.join('');
}
/**
* Checks whether the `obj` parameter is defined or not.
*
* @param {Object} obj
* @returns {boolean} true if the object is undefined
* @private
*/
function _isUndefined(obj) {
return typeof obj === 'undefined';
}
/**
* Parses the function arguments.
*
* This function helps to have optional arguments.
*
* @param {(Options|Function)} options
* @param {Function} callback
* @returns {Array} parsed arguments
* @private
*/
function _parseArguments(options, callback) {
if (typeof options == 'function') {
return [callback || {}, options];
}
if (_isUndefined(options)) {
return [{}, callback];
}
return [options, callback];
}
/**
* Generates a new temporary name.
*
* @param {Object} opts
* @returns {string} the new random name according to opts
* @private
*/
function _generateTmpName(opts) {
if (opts.name) {
return path.join(opts.dir || tmpDir, opts.name);
}
// mkstemps like template
if (opts.template) {
return opts.template.replace(TEMPLATE_PATTERN, _randomChars(6));
}
// prefix and postfix
const name = [
opts.prefix || 'tmp-',
process.pid,
_randomChars(12),
opts.postfix || ''
].join('');
return path.join(opts.dir || tmpDir, name);
}
/**
* Gets a temporary file name.
*
* @param {(Options|tmpNameCallback)} options options or callback
* @param {?tmpNameCallback} callback the callback function
*/
function tmpName(options, callback) {
var
args = _parseArguments(options, callback),
opts = args[0],
cb = args[1],
tries = opts.name ? 1 : opts.tries || DEFAULT_TRIES;
if (isNaN(tries) || tries < 0)
return cb(new Error('Invalid tries'));
if (opts.template && !opts.template.match(TEMPLATE_PATTERN))
return cb(new Error('Invalid template provided'));
(function _getUniqueName() {
const name = _generateTmpName(opts);
// check whether the path exists then retry if needed
fs.stat(name, function (err) {
if (!err) {
if (tries-- > 0) return _getUniqueName();
return cb(new Error('Could not get a unique tmp filename, max tries reached ' + name));
}
cb(null, name);
});
}());
}
/**
* Synchronous version of tmpName.
*
* @param {Object} options
* @returns {string} the generated random name
* @throws {Error} if the options are invalid or could not generate a filename
*/
function tmpNameSync(options) {
var
args = _parseArguments(options),
opts = args[0],
tries = opts.name ? 1 : opts.tries || DEFAULT_TRIES;
if (isNaN(tries) || tries < 0)
throw new Error('Invalid tries');
if (opts.template && !opts.template.match(TEMPLATE_PATTERN))
throw new Error('Invalid template provided');
do {
const name = _generateTmpName(opts);
try {
fs.statSync(name);
} catch (e) {
return name;
}
} while (tries-- > 0);
throw new Error('Could not get a unique tmp filename, max tries reached');
}
/**
* Creates and opens a temporary file.
*
* @param {(Options|fileCallback)} options the config options or the callback function
* @param {?fileCallback} callback
*/
function file(options, callback) {
var
args = _parseArguments(options, callback),
opts = args[0],
cb = args[1];
opts.postfix = (_isUndefined(opts.postfix)) ? '.tmp' : opts.postfix;
// gets a temporary filename
tmpName(opts, function _tmpNameCreated(err, name) {
if (err) return cb(err);
// create and open the file
fs.open(name, CREATE_FLAGS, opts.mode || FILE_MODE, function _fileCreated(err, fd) {
if (err) return cb(err);
if (opts.discardDescriptor) {
return fs.close(fd, function _discardCallback(err) {
if (err) {
// Low probability, and the file exists, so this could be
// ignored. If it isn't we certainly need to unlink the
// file, and if that fails too its error is more
// important.
try {
fs.unlinkSync(name);
} catch (e) {
if (!isENOENT(e)) {
err = e;
}
}
return cb(err);
}
cb(null, name, undefined, _prepareTmpFileRemoveCallback(name, -1, opts));
});
}
if (opts.detachDescriptor) {
return cb(null, name, fd, _prepareTmpFileRemoveCallback(name, -1, opts));
}
cb(null, name, fd, _prepareTmpFileRemoveCallback(name, fd, opts));
});
});
}
/**
* Synchronous version of file.
*
* @param {Options} options
* @returns {FileSyncObject} object consists of name, fd and removeCallback
* @throws {Error} if cannot create a file
*/
function fileSync(options) {
var
args = _parseArguments(options),
opts = args[0];
opts.postfix = opts.postfix || '.tmp';
const discardOrDetachDescriptor = opts.discardDescriptor || opts.detachDescriptor;
const name = tmpNameSync(opts);
var fd = fs.openSync(name, CREATE_FLAGS, opts.mode || FILE_MODE);
if (opts.discardDescriptor) {
fs.closeSync(fd);
fd = undefined;
}
return {
name: name,
fd: fd,
removeCallback: _prepareTmpFileRemoveCallback(name, discardOrDetachDescriptor ? -1 : fd, opts)
};
}
/**
* Removes files and folders in a directory recursively.
*
* @param {string} root
* @private
*/
function _rmdirRecursiveSync(root) {
const dirs = [root];
do {
var
dir = dirs.pop(),
deferred = false,
files = fs.readdirSync(dir);
for (var i = 0, length = files.length; i < length; i++) {
var
file = path.join(dir, files[i]),
stat = fs.lstatSync(file); // lstat so we don't recurse into symlinked directories
if (stat.isDirectory()) {
if (!deferred) {
deferred = true;
dirs.push(dir);
}
dirs.push(file);
} else {
fs.unlinkSync(file);
}
}
if (!deferred) {
fs.rmdirSync(dir);
}
} while (dirs.length !== 0);
}
/**
* Creates a temporary directory.
*
* @param {(Options|dirCallback)} options the options or the callback function
* @param {?dirCallback} callback
*/
function dir(options, callback) {
var
args = _parseArguments(options, callback),
opts = args[0],
cb = args[1];
// gets a temporary filename
tmpName(opts, function _tmpNameCreated(err, name) {
if (err) return cb(err);
// create the directory
fs.mkdir(name, opts.mode || DIR_MODE, function _dirCreated(err) {
if (err) return cb(err);
cb(null, name, _prepareTmpDirRemoveCallback(name, opts));
});
});
}
/**
* Synchronous version of dir.
*
* @param {Options} options
* @returns {DirSyncObject} object consists of name and removeCallback
* @throws {Error} if it cannot create a directory
*/
function dirSync(options) {
var
args = _parseArguments(options),
opts = args[0];
const name = tmpNameSync(opts);
fs.mkdirSync(name, opts.mode || DIR_MODE);
return {
name: name,
removeCallback: _prepareTmpDirRemoveCallback(name, opts)
};
}
/**
* Prepares the callback for removal of the temporary file.
*
* @param {string} name the path of the file
* @param {number} fd file descriptor
* @param {Object} opts
* @returns {fileCallback}
* @private
*/
function _prepareTmpFileRemoveCallback(name, fd, opts) {
const removeCallback = _prepareRemoveCallback(function _removeCallback(fdPath) {
try {
if (0 <= fdPath[0]) {
fs.closeSync(fdPath[0]);
}
}
catch (e) {
// under some node/windows related circumstances, a temporary file
// may have not be created as expected or the file was already closed
// by the user, in which case we will simply ignore the error
if (!isEBADF(e) && !isENOENT(e)) {
// reraise any unanticipated error
throw e;
}
}
try {
fs.unlinkSync(fdPath[1]);
}
catch (e) {
if (!isENOENT(e)) {
// reraise any unanticipated error
throw e;
}
}
}, [fd, name]);
if (!opts.keep) {
_removeObjects.unshift(removeCallback);
}
return removeCallback;
}
/**
* Prepares the callback for removal of the temporary directory.
*
* @param {string} name
* @param {Object} opts
* @returns {Function} the callback
* @private
*/
function _prepareTmpDirRemoveCallback(name, opts) {
const removeFunction = opts.unsafeCleanup ? _rmdirRecursiveSync : fs.rmdirSync.bind(fs);
const removeCallback = _prepareRemoveCallback(removeFunction, name);
if (!opts.keep) {
_removeObjects.unshift(removeCallback);
}
return removeCallback;
}
/**
* Creates a guarded function wrapping the removeFunction call.
*
* @param {Function} removeFunction
* @param {Object} arg
* @returns {Function}
* @private
*/
function _prepareRemoveCallback(removeFunction, arg) {
var called = false;
return function _cleanupCallback(next) {
if (!called) {
const index = _removeObjects.indexOf(_cleanupCallback);
if (index >= 0) {
_removeObjects.splice(index, 1);
}
called = true;
removeFunction(arg);
}
if (next) next(null);
};
}
/**
* The garbage collector.
*
* @private
*/
function _garbageCollector() {
if (_uncaughtException && !_gracefulCleanup) {
return;
}
// the function being called removes itself from _removeObjects,
// loop until _removeObjects is empty
while (_removeObjects.length) {
try {
_removeObjects[0].call(null);
} catch (e) {
// already removed?
}
}
}
/**
* Helper for testing against EBADF to compensate changes made to Node 7.x under Windows.
*/
function isEBADF(error) {
return isExpectedError(error, -EBADF, 'EBADF');
}
/**
* Helper for testing against ENOENT to compensate changes made to Node 7.x under Windows.
*/
function isENOENT(error) {
return isExpectedError(error, -ENOENT, 'ENOENT');
}
/**
* Helper to determine whether the expected error code matches the actual code and errno,
* which will differ between the supported node versions.
*
* - Node >= 7.0:
* error.code {String}
* error.errno {String|Number} any numerical value will be negated
*
* - Node >= 6.0 < 7.0:
* error.code {String}
* error.errno {Number} negated
*
* - Node >= 4.0 < 6.0: introduces SystemError
* error.code {String}
* error.errno {Number} negated
*
* - Node >= 0.10 < 4.0:
* error.code {Number} negated
* error.errno n/a
*/
function isExpectedError(error, code, errno) {
return error.code == code || error.code == errno;
}
/**
* Sets the graceful cleanup.
*
* Also removes the created files and directories when an uncaught exception occurs.
*/
function setGracefulCleanup() {
_gracefulCleanup = true;
}
const version = process.versions.node.split('.').map(function (value) {
return parseInt(value, 10);
});
if (version[0] === 0 && (version[1] < 9 || version[1] === 9 && version[2] < 5)) {
process.addListener('uncaughtException', function _uncaughtExceptionThrown(err) {
_uncaughtException = true;
_garbageCollector();
throw err;
});
}
process.addListener('exit', function _exit(code) {
if (code) _uncaughtException = true;
_garbageCollector();
});
/**
* Configuration options.
*
* @typedef {Object} Options
* @property {?number} tries the number of tries before give up the name generation
* @property {?string} template the "mkstemp" like filename template
* @property {?string} name fix name
* @property {?string} dir the tmp directory to use
* @property {?string} prefix prefix for the generated name
* @property {?string} postfix postfix for the generated name
*/
/**
* @typedef {Object} FileSyncObject
* @property {string} name the name of the file
* @property {string} fd the file descriptor
* @property {fileCallback} removeCallback the callback function to remove the file
*/
/**
* @typedef {Object} DirSyncObject
* @property {string} name the name of the directory
* @property {fileCallback} removeCallback the callback function to remove the directory
*/
/**
* @callback tmpNameCallback
* @param {?Error} err the error object if anything goes wrong
* @param {string} name the temporary file name
*/
/**
* @callback fileCallback
* @param {?Error} err the error object if anything goes wrong
* @param {string} name the temporary file name
* @param {number} fd the file descriptor
* @param {cleanupCallback} fn the cleanup callback function
*/
/**
* @callback dirCallback
* @param {?Error} err the error object if anything goes wrong
* @param {string} name the temporary file name
* @param {cleanupCallback} fn the cleanup callback function
*/
/**
* Removes the temporary created file or directory.
*
* @callback cleanupCallback
* @param {simpleCallback} [next] function to call after entry was removed
*/
/**
* Callback function for function composition.
* @see {@link https://github.com/raszi/node-tmp/issues/57|raszi/node-tmp#57}
*
* @callback simpleCallback
*/
// exporting all the needed methods
module.exports.tmpdir = tmpDir;
module.exports.dir = dir;
module.exports.dirSync = dirSync;
module.exports.file = file;
module.exports.fileSync = fileSync;
module.exports.tmpName = tmpName;
module.exports.tmpNameSync = tmpNameSync;
module.exports.setGracefulCleanup = setGracefulCleanup;

View File

@ -0,0 +1,20 @@
{
"version": "6.7.0",
"lockfileVersion": 1,
"requires": true,
"dependencies": {
"os-tmpdir": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/os-tmpdir/-/os-tmpdir-1.0.2.tgz",
"integrity": "sha1-u+Z0BseaqFxc/sdm/lc0VV36EnQ="
},
"tmp": {
"version": "0.0.33",
"resolved": "https://registry.npmjs.org/tmp/-/tmp-0.0.33.tgz",
"integrity": "sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw==",
"requires": {
"os-tmpdir": "1.0.2"
}
}
}
}

View File

@ -0,0 +1 @@
{"version": "6.7.0"}

View File

@ -0,0 +1,32 @@
{
"name": "protobufjs-cli",
"description": "Translates between file formats and generates static code as well as TypeScript definitions.",
"version": "6.7.0",
"author": "Daniel Wirtz <dcode+protobufjs@dcode.io>",
"repository": {
"type": "git",
"url": "https://github.com/dcodeIO/protobuf.js.git"
},
"license": "BSD-3-Clause",
"main": "index.js",
"types": "index.d.ts",
"bin": {
"pbjs": "bin/pbjs",
"pbts": "bin/pbts"
},
"peerDependencies": {
"protobufjs": "~6.7.0"
},
"dependencies": {
"chalk": "^1.1.3",
"escodegen": "^1.8.1",
"espree": "^3.1.3",
"estraverse": "^4.2.0",
"glob": "^7.1.1",
"jsdoc": "^3.4.2",
"minimist": "^1.2.0",
"semver": "^5.3.0",
"tmp": "0.0.31",
"uglify-js": "^2.8.15"
}
}

View File

@ -0,0 +1,9 @@
type pbjsCallback = (err: Error|null, output?: string) => void;
/**
* Runs pbjs programmatically.
* @param {string[]} args Command line arguments
* @param {function(?Error, string=)} [callback] Optional completion callback
* @returns {number|undefined} Exit code, if known
*/
export function main(args: string[], callback?: pbjsCallback): number|undefined;

View File

@ -0,0 +1,329 @@
"use strict";
var path = require("path"),
fs = require("fs"),
pkg = require("./package.json"),
util = require("./util");
util.setup();
var protobuf = require(util.pathToProtobufJs),
minimist = require("minimist"),
chalk = require("chalk"),
glob = require("glob");
var targets = util.requireAll("./targets");
/**
* Runs pbjs programmatically.
* @param {string[]} args Command line arguments
* @param {function(?Error, string=)} [callback] Optional completion callback
* @returns {number|undefined} Exit code, if known
*/
exports.main = function main(args, callback) {
var lintDefault = "eslint-disable " + [
"block-scoped-var",
"id-length",
"no-control-regex",
"no-magic-numbers",
"no-prototype-builtins",
"no-redeclare",
"no-shadow",
"no-var",
"sort-vars"
].join(", ");
var argv = minimist(args, {
alias: {
target: "t",
out: "o",
path: "p",
wrap: "w",
root: "r",
lint: "l",
// backward compatibility:
"force-long": "strict-long",
"force-message": "strict-message"
},
string: [ "target", "out", "path", "wrap", "dependency", "root", "lint" ],
boolean: [ "create", "encode", "decode", "verify", "convert", "delimited", "beautify", "comments", "es6", "sparse", "keep-case", "force-long", "force-number", "force-enum-string", "force-message" ],
default: {
target: "json",
create: true,
encode: true,
decode: true,
verify: true,
convert: true,
delimited: true,
beautify: true,
comments: true,
es6: null,
lint: lintDefault,
"keep-case": false,
"force-long": false,
"force-number": false,
"force-enum-string": false,
"force-message": false
}
});
var target = targets[argv.target],
files = argv._,
paths = typeof argv.path === "string" ? [ argv.path ] : argv.path || [];
// alias hyphen args in camel case
Object.keys(argv).forEach(function(key) {
var camelKey = key.replace(/-([a-z])/g, function($0, $1) { return $1.toUpperCase(); });
if (camelKey !== key)
argv[camelKey] = argv[key];
});
// protobuf.js package directory contains additional, otherwise non-bundled google types
paths.push(path.relative(process.cwd(), path.join(__dirname, "..")) || ".");
if (!files.length) {
var descs = Object.keys(targets).filter(function(key) { return !targets[key].private; }).map(function(key) {
return " " + util.pad(key, 14, true) + targets[key].description;
});
if (callback)
callback(Error("usage")); // eslint-disable-line callback-return
else
process.stderr.write([
"protobuf.js v" + pkg.version + " CLI for JavaScript",
"",
chalk.bold.white("Translates between file formats and generates static code."),
"",
" -t, --target Specifies the target format. Also accepts a path to require a custom target.",
"",
descs.join("\n"),
"",
" -p, --path Adds a directory to the include path.",
"",
" -o, --out Saves to a file instead of writing to stdout.",
"",
" --sparse Exports only those types referenced from a main file (experimental).",
"",
chalk.bold.gray(" Module targets only:"),
"",
" -w, --wrap Specifies the wrapper to use. Also accepts a path to require a custom wrapper.",
"",
" default Default wrapper supporting both CommonJS and AMD",
" commonjs CommonJS wrapper",
" amd AMD wrapper",
" es6 ES6 wrapper (implies --es6)",
" closure A closure adding to protobuf.roots where protobuf is a global",
"",
" --dependency Specifies which version of protobuf to require. Accepts any valid module id",
"",
" -r, --root Specifies an alternative protobuf.roots name.",
"",
" -l, --lint Linter configuration. Defaults to protobuf.js-compatible rules:",
"",
" " + lintDefault,
"",
" --es6 Enables ES6 syntax (const/let instead of var)",
"",
chalk.bold.gray(" Proto sources only:"),
"",
" --keep-case Keeps field casing instead of converting to camel case.",
"",
chalk.bold.gray(" Static targets only:"),
"",
" --no-create Does not generate create functions used for reflection compatibility.",
" --no-encode Does not generate encode functions.",
" --no-decode Does not generate decode functions.",
" --no-verify Does not generate verify functions.",
" --no-convert Does not generate convert functions like from/toObject",
" --no-delimited Does not generate delimited encode/decode functions.",
" --no-beautify Does not beautify generated code.",
" --no-comments Does not output any JSDoc comments.",
"",
" --force-long Enfores the use of 'Long' for s-/u-/int64 and s-/fixed64 fields.",
" --force-number Enfores the use of 'number' for s-/u-/int64 and s-/fixed64 fields.",
" --force-message Enfores the use of message instances instead of plain objects.",
"",
"usage: " + chalk.bold.green("pbjs") + " [options] file1.proto file2.json ..." + chalk.gray(" (or pipe) ") + "other | " + chalk.bold.green("pbjs") + " [options] -",
""
].join("\n"));
return 1;
}
if (typeof argv["strict-long"] === "boolean")
argv["force-long"] = argv["strict-long"];
// Resolve glob expressions
for (var i = 0; i < files.length;) {
if (glob.hasMagic(files[i])) {
var matches = glob.sync(files[i]);
Array.prototype.splice.apply(files, [i, 1].concat(matches));
i += matches.length;
} else
++i;
}
// Require custom target
if (!target)
target = require(path.resolve(process.cwd(), argv.target));
var root = new protobuf.Root();
var mainFiles = [];
// Search include paths when resolving imports
root.resolvePath = function pbjsResolvePath(origin, target) {
var normOrigin = protobuf.util.path.normalize(origin),
normTarget = protobuf.util.path.normalize(target);
if (!normOrigin)
mainFiles.push(normTarget);
var resolved = protobuf.util.path.resolve(normOrigin, normTarget, true);
var idx = resolved.lastIndexOf("google/protobuf/");
if (idx > -1) {
var altname = resolved.substring(idx);
if (altname in protobuf.common)
resolved = altname;
}
if (fs.existsSync(resolved))
return resolved;
for (var i = 0; i < paths.length; ++i) {
var iresolved = protobuf.util.path.resolve(paths[i] + "/", target);
if (fs.existsSync(iresolved))
return iresolved;
}
return resolved;
};
// Use es6 syntax if not explicitly specified on the command line and the es6 wrapper is used
if (argv.wrap === "es6" || argv.es6) {
argv.wrap = "es6";
argv.es6 = true;
}
var parseOptions = {
"keepCase": argv["keep-case"] || false
};
// Read from stdin
if (files.length === 1 && files[0] === "-") {
var data = [];
process.stdin.on("data", function(chunk) {
data.push(chunk);
});
process.stdin.on("end", function() {
var source = Buffer.concat(data).toString("utf8");
try {
if (source.charAt(0) !== "{") {
protobuf.parse.filename = "-";
protobuf.parse(source, root, parseOptions);
} else {
var json = JSON.parse(source);
root.setOptions(json.options).addJSON(json);
}
callTarget();
} catch (err) {
if (callback) {
callback(err);
return;
}
throw err;
}
});
// Load from disk
} else {
try {
root.loadSync(files, parseOptions).resolveAll(); // sync is deterministic while async is not
if (argv.sparse)
sparsify(root);
callTarget();
} catch (err) {
if (callback) {
callback(err);
return undefined;
}
throw err;
}
}
function markReferenced(tobj) {
tobj.referenced = true;
// also mark a type's fields and oneofs
if (tobj.fieldsArray)
tobj.fieldsArray.forEach(function(fobj) {
fobj.referenced = true;
});
if (tobj.oneofsArray)
tobj.oneofsArray.forEach(function(oobj) {
oobj.referenced = true;
});
// also mark an extension field's extended type, but not its (other) fields
if (tobj.extensionField)
tobj.extensionField.parent.referenced = true;
}
function sparsify(root) {
// 1. mark directly or indirectly referenced objects
util.traverse(root, function(obj) {
if (!obj.filename)
return;
if (mainFiles.indexOf(obj.filename) > -1)
util.traverseResolved(obj, markReferenced);
});
// 2. empty unreferenced objects
util.traverse(root, function(obj) {
var parent = obj.parent;
if (!parent || obj.referenced) // root or referenced
return;
// remove unreferenced namespaces
if (obj instanceof protobuf.Namespace) {
var hasReferenced = false;
util.traverse(obj, function(iobj) {
if (iobj.referenced)
hasReferenced = true;
});
if (hasReferenced) { // replace with plain namespace if a namespace subclass
if (obj instanceof protobuf.Type || obj instanceof protobuf.Service) {
var robj = new protobuf.Namespace(obj.name, obj.options);
robj.nested = obj.nested;
parent.add(robj);
}
} else // remove completely if nothing inside is referenced
parent.remove(obj);
// remove everything else unreferenced
} else if (!(obj instanceof protobuf.Namespace))
parent.remove(obj);
});
// 3. validate that everything is fine
root.resolveAll();
}
function callTarget() {
target(root, argv, function targetCallback(err, output) {
if (err) {
if (callback)
return callback(err);
throw err;
}
try {
if (argv.out)
fs.writeFileSync(argv.out, output, { encoding: "utf8" });
else if (!callback)
process.stdout.write(output, "utf8");
return callback
? callback(null, output)
: undefined;
} catch (err) {
if (callback)
return callback(err);
throw err;
}
});
}
return undefined;
};

View File

@ -0,0 +1,9 @@
type pbtsCallback = (err: Error|null, output?: string) => void;
/**
* Runs pbts programmatically.
* @param {string[]} args Command line arguments
* @param {function(?Error, string=)} [callback] Optional completion callback
* @returns {number|undefined} Exit code, if known
*/
export function main(args: string[], callback?: pbtsCallback): number|undefined;

View File

@ -0,0 +1,197 @@
"use strict";
var child_process = require("child_process"),
path = require("path"),
fs = require("fs"),
pkg = require("./package.json"),
util = require("./util");
util.setup();
var minimist = require("minimist"),
chalk = require("chalk"),
glob = require("glob"),
tmp = require("tmp");
/**
* Runs pbts programmatically.
* @param {string[]} args Command line arguments
* @param {function(?Error, string=)} [callback] Optional completion callback
* @returns {number|undefined} Exit code, if known
*/
exports.main = function(args, callback) {
var argv = minimist(args, {
alias: {
name: "n",
out : "o",
main: "m",
global: "g",
import: "i"
},
string: [ "name", "out", "global", "import" ],
boolean: [ "comments", "main" ],
default: {
comments: true,
main: false
}
});
var files = argv._;
if (!files.length) {
if (callback)
callback(Error("usage")); // eslint-disable-line callback-return
else
process.stderr.write([
"protobuf.js v" + pkg.version + " CLI for TypeScript",
"",
chalk.bold.white("Generates TypeScript definitions from annotated JavaScript files."),
"",
" -o, --out Saves to a file instead of writing to stdout.",
"",
" -g, --global Name of the global object in browser environments, if any.",
"",
" -i, --import Comma delimited list of imports. Local names will equal camelCase of the basename.",
"",
" --no-comments Does not output any JSDoc comments.",
"",
chalk.bold.gray(" Internal flags:"),
"",
" -n, --name Wraps everything in a module of the specified name.",
"",
" -m, --main Whether building the main library without any imports.",
"",
"usage: " + chalk.bold.green("pbts") + " [options] file1.js file2.js ..." + chalk.bold.gray(" (or) ") + "other | " + chalk.bold.green("pbts") + " [options] -",
""
].join("\n"));
return 1;
}
// Resolve glob expressions
for (var i = 0; i < files.length;) {
if (glob.hasMagic(files[i])) {
var matches = glob.sync(files[i]);
Array.prototype.splice.apply(files, [i, 1].concat(matches));
i += matches.length;
} else
++i;
}
var cleanup = [];
// Read from stdin (to a temporary file)
if (files.length === 1 && files[0] === "-") {
var data = [];
process.stdin.on("data", function(chunk) {
data.push(chunk);
});
process.stdin.on("end", function() {
files[0] = tmp.tmpNameSync() + ".js";
fs.writeFileSync(files[0], Buffer.concat(data));
cleanup.push(files[0]);
callJsdoc();
});
// Load from disk
} else {
callJsdoc();
}
function callJsdoc() {
// There is no proper API for jsdoc, so this executes the CLI and pipes the output
var basedir = path.join(__dirname, ".");
var moduleName = argv.name || "null";
var nodePath = process.execPath;
var cmd = "\"" + nodePath + "\" \"" + require.resolve("jsdoc/jsdoc.js") + "\" -c \"" + path.join(basedir, "lib", "tsd-jsdoc.json") + "\" -q \"module=" + encodeURIComponent(moduleName) + "&comments=" + Boolean(argv.comments) + "\" " + files.map(function(file) { return "\"" + file + "\""; }).join(" ");
var child = child_process.exec(cmd, {
cwd: process.cwd(),
argv0: "node",
stdio: "pipe",
maxBuffer: 1 << 24 // 16mb
});
var out = [];
var ended = false;
var closed = false;
child.stdout.on("data", function(data) {
out.push(data);
});
child.stdout.on("end", function() {
if (closed) finish();
else ended = true;
});
child.stderr.pipe(process.stderr);
child.on("close", function(code) {
// clean up temporary files, no matter what
try { cleanup.forEach(fs.unlinkSync); } catch(e) {/**/} cleanup = [];
if (code) {
out = out.join("").replace(/\s*JSDoc \d+\.\d+\.\d+ [^$]+/, "");
process.stderr.write(out);
var err = Error("code " + code);
if (callback)
return callback(err);
throw err;
}
if (ended) return finish();
closed = true;
return undefined;
});
function getImportName(importItem) {
return path.basename(importItem, ".js").replace(/([-_~.+]\w)/g, function(match) {
return match[1].toUpperCase();
});
}
function finish() {
var output = [];
if (argv.main)
output.push(
"// DO NOT EDIT! This is a generated file. Edit the JSDoc in src/*.js instead and run 'npm run types'.",
""
);
if (argv.global)
output.push(
"export as namespace " + argv.global + ";",
""
);
if (!argv.main) {
// Ensure we have a usable array of imports
var importArray = typeof argv.import === "string" ? argv.import.split(",") : argv.import || [];
// Build an object of imports and paths
var imports = {
$protobuf: "protobufjs"
};
importArray.forEach(function(importItem) {
imports[getImportName(importItem)] = importItem;
});
// Write out the imports
Object.keys(imports).forEach(function(key) {
output.push("import * as " + key + " from \"" + imports[key] + "\";");
});
}
output = output.join("\n") + "\n" + out.join("");
try {
if (argv.out)
fs.writeFileSync(argv.out, output, { encoding: "utf8" });
else if (!callback)
process.stdout.write(output, "utf8");
return callback
? callback(null, output)
: undefined;
} catch (err) {
if (callback)
return callback(err);
throw err;
}
}
}
return undefined;
};

View File

@ -0,0 +1,38 @@
"use strict";
module.exports = json_module;
var util = require("../util");
var protobuf = require("../..");
json_module.description = "JSON representation as a module";
function jsonSafeProp(json) {
return json.replace(/^( +)"(\w+)":/mg, function($0, $1, $2) {
return protobuf.util.safeProp($2).charAt(0) === "."
? $1 + $2 + ":"
: $0;
});
}
function json_module(root, options, callback) {
try {
var rootProp = protobuf.util.safeProp(options.root || "default");
var output = [
(options.es6 ? "const" : "var") + " $root = ($protobuf.roots" + rootProp + " || ($protobuf.roots" + rootProp + " = new $protobuf.Root()))\n"
];
if (root.options) {
var optionsJson = jsonSafeProp(JSON.stringify(root.options, null, 2));
output.push(".setOptions(" + optionsJson + ")\n");
}
var json = jsonSafeProp(JSON.stringify(root.nested, null, 2).trim());
output.push(".addJSON(" + json + ");");
output = util.wrap(output.join(""), protobuf.util.merge({ dependency: "protobufjs/light" }, options));
process.nextTick(function() {
callback(null, output);
});
} catch (e) {
return callback(e);
}
return undefined;
}

View File

@ -0,0 +1,8 @@
"use strict";
module.exports = json_target;
json_target.description = "JSON representation";
function json_target(root, options, callback) {
callback(null, JSON.stringify(root, null, 2));
}

View File

@ -0,0 +1,326 @@
"use strict";
module.exports = proto_target;
proto_target.private = true;
var protobuf = require("../..");
var Namespace = protobuf.Namespace,
Enum = protobuf.Enum,
Type = protobuf.Type,
Field = protobuf.Field,
OneOf = protobuf.OneOf,
Service = protobuf.Service,
Method = protobuf.Method,
types = protobuf.types,
util = protobuf.util;
function underScore(str) {
return str.substring(0,1)
+ str.substring(1)
.replace(/([A-Z])(?=[a-z]|$)/g, function($0, $1) { return "_" + $1.toLowerCase(); });
}
var out = [];
var indent = 0;
var first = false;
var syntax = 3;
function proto_target(root, options, callback) {
if (options) {
switch (options.syntax) {
case undefined:
case "proto3":
case "3":
syntax = 3;
break;
case "proto2":
case "2":
syntax = 2;
break;
default:
return callback(Error("invalid syntax: " + options.syntax));
}
}
indent = 0;
first = false;
try {
buildRoot(root);
return callback(null, out.join("\n"));
} catch (err) {
return callback(err);
} finally {
out = [];
syntax = 3;
}
}
function push(line) {
if (line === "")
out.push("");
else {
var ind = "";
for (var i = 0; i < indent; ++i)
ind += " ";
out.push(ind + line);
}
}
function escape(str) {
return str.replace(/[\\"']/g, "\\$&")
.replace(/\r/g, "\\r")
.replace(/\n/g, "\\n")
.replace(/\u0000/g, "\\0"); // eslint-disable-line no-control-regex
}
function value(v) {
switch (typeof v) {
case "boolean":
return v ? "true" : "false";
case "number":
return v.toString();
default:
return "\"" + escape(String(v)) + "\"";
}
}
function buildRoot(root) {
root.resolveAll();
var pkg = [];
var ptr = root;
var repeat = true;
do {
var nested = ptr.nestedArray;
if (nested.length === 1 && nested[0] instanceof Namespace && !(nested[0] instanceof Type || nested[0] instanceof Service)) {
ptr = nested[0];
if (ptr !== root)
pkg.push(ptr.name);
} else
repeat = false;
} while (repeat);
out.push("syntax = \"proto" + syntax + "\";");
if (pkg.length)
out.push("", "package " + pkg.join(".") + ";");
buildOptions(ptr);
ptr.nestedArray.forEach(build);
}
function build(object) {
if (object instanceof Enum)
buildEnum(object);
else if (object instanceof Type)
buildType(object);
else if (object instanceof Field)
buildField(object);
else if (object instanceof OneOf)
buildOneOf(object);
else if (object instanceof Service)
buildService(object);
else if (object instanceof Method)
buildMethod(object);
else
buildNamespace(object);
}
function buildNamespace(namespace) { // just a namespace, not a type etc.
push("");
push("message " + namespace.name + " {");
++indent;
buildOptions(namespace);
consolidateExtends(namespace.nestedArray).remaining.forEach(build);
--indent;
push("}");
}
function buildEnum(enm) {
push("");
push("enum " + enm.name + " {");
buildOptions(enm);
++indent; first = true;
Object.keys(enm.values).forEach(function(name) {
var val = enm.values[name];
if (first) {
push("");
first = false;
}
push(name + " = " + val + ";");
});
--indent; first = false;
push("}");
}
function buildRanges(keyword, ranges) {
if (ranges && ranges.length) {
var parts = [];
ranges.forEach(function(range) {
if (typeof range === "string")
parts.push("\"" + escape(range) + "\"");
else if (range[0] === range[1])
parts.push(range[0]);
else
parts.push(range[0] + " to " + (range[1] === 0x1FFFFFFF ? "max" : range[1]));
});
push("");
push(keyword + " " + parts.join(", ") + ";");
}
}
function buildType(type) {
if (type.group)
return; // built with the sister-field
push("");
push("message " + type.name + " {");
++indent;
buildOptions(type);
type.oneofsArray.forEach(build);
first = true;
type.fieldsArray.forEach(build);
consolidateExtends(type.nestedArray).remaining.forEach(build);
buildRanges("extensions", type.extensions);
buildRanges("reserved", type.reserved);
--indent;
push("}");
}
function buildField(field, passExtend) {
if (field.partOf || field.declaringField || field.extend !== undefined && !passExtend)
return;
if (first) {
first = false;
push("");
}
if (field.resolvedType && field.resolvedType.group) {
buildGroup(field);
return;
}
var sb = [];
if (field.map)
sb.push("map<" + field.keyType + ", " + field.type + ">");
else if (field.repeated)
sb.push("repeated", field.type);
else if (syntax === 2 || field.parent.group)
sb.push(field.required ? "required" : "optional", field.type);
else
sb.push(field.type);
sb.push(underScore(field.name), "=", field.id);
var opts = buildFieldOptions(field);
if (opts)
sb.push(opts);
push(sb.join(" ") + ";");
}
function buildGroup(field) {
push(field.rule + " group " + field.resolvedType.name + " = " + field.id + " {");
++indent;
buildOptions(field.resolvedType);
first = true;
field.resolvedType.fieldsArray.forEach(function(field) {
buildField(field);
});
--indent;
push("}");
}
function buildFieldOptions(field) {
var keys;
if (!field.options || !(keys = Object.keys(field.options)).length)
return null;
var sb = [];
keys.forEach(function(key) {
var val = field.options[key];
var wireType = types.packed[field.resolvedType instanceof Enum ? "int32" : field.type];
switch (key) {
case "packed":
val = Boolean(val);
// skip when not packable or syntax default
if (wireType === undefined || syntax === 3 === val)
return;
break;
case "default":
if (syntax === 3)
return;
// skip default (resolved) default values
if (field.long && !util.longNeq(field.defaultValue, types.defaults[field.type]) || !field.long && field.defaultValue === types.defaults[field.type])
return;
// enum defaults specified as strings are type references and not enclosed in quotes
if (field.resolvedType instanceof Enum)
break;
// otherwise fallthrough
default:
val = value(val);
break;
}
sb.push(key + "=" + val);
});
return sb.length
? "[" + sb.join(", ") + "]"
: null;
}
function consolidateExtends(nested) {
var ext = {};
nested = nested.filter(function(obj) {
if (!(obj instanceof Field) || obj.extend === undefined)
return true;
(ext[obj.extend] || (ext[obj.extend] = [])).push(obj);
return false;
});
Object.keys(ext).forEach(function(extend) {
push("");
push("extend " + extend + " {");
++indent; first = true;
ext[extend].forEach(function(field) {
buildField(field, true);
});
--indent;
push("}");
});
return {
remaining: nested
};
}
function buildOneOf(oneof) {
push("");
push("oneof " + underScore(oneof.name) + " {");
++indent; first = true;
oneof.oneof.forEach(function(fieldName) {
var field = oneof.parent.get(fieldName);
if (first) {
first = false;
push("");
}
var opts = buildFieldOptions(field);
push(field.type + " " + underScore(field.name) + " = " + field.id + (opts ? " " + opts : "") + ";");
});
--indent;
push("}");
}
function buildService(service) {
push("service " + service.name + " {");
++indent;
service.methodsArray.forEach(build);
consolidateExtends(service.nestedArray).remaining.forEach(build);
--indent;
push("}");
}
function buildMethod(method) {
push(method.type + " " + method.name + " (" + (method.requestStream ? "stream " : "") + method.requestType + ") returns (" + (method.responseStream ? "stream " : "") + method.responseType + ");");
}
function buildOptions(object) {
if (!object.options)
return;
first = true;
Object.keys(object.options).forEach(function(key) {
if (first) {
first = false;
push("");
}
var val = object.options[key];
push("option " + key + " = " + JSON.stringify(val) + ";");
});
}

View File

@ -0,0 +1,10 @@
"use strict";
module.exports = proto2_target;
var protobuf = require("../..");
proto2_target.description = "Protocol Buffers, Version 2";
function proto2_target(root, options, callback) {
require("./proto")(root, protobuf.util.merge(options, { syntax: "proto2" }), callback);
}

View File

@ -0,0 +1,10 @@
"use strict";
module.exports = proto3_target;
var protobuf = require("../..");
proto3_target.description = "Protocol Buffers, Version 3";
function proto3_target(root, options, callback) {
require("./proto")(root, protobuf.util.merge(options, { syntax: "proto3" }), callback);
}

View File

@ -0,0 +1,29 @@
"use strict";
module.exports = static_module_target;
// - The default wrapper supports AMD, CommonJS and the global scope (as window.root), in this order.
// - You can specify a custom wrapper with the --wrap argument.
// - CommonJS modules depend on the minimal build for reduced package size with browserify.
// - AMD and global scope depend on the full library for now.
var util = require("../util");
var protobuf = require("../..");
static_module_target.description = "Static code without reflection as a module";
function static_module_target(root, options, callback) {
require("./static")(root, options, function(err, output) {
if (err) {
callback(err);
return;
}
try {
output = util.wrap(output, protobuf.util.merge({ dependency: "protobufjs/minimal" }, options));
} catch (e) {
callback(e);
return;
}
callback(null, output);
});
}

Some files were not shown because too many files have changed in this diff Show More