Firebase Update
This commit is contained in:
27
express-server/node_modules/@google-cloud/firestore/CHANGELOG.md
generated
vendored
Normal file
27
express-server/node_modules/@google-cloud/firestore/CHANGELOG.md
generated
vendored
Normal file
@@ -0,0 +1,27 @@
|
||||
# Changelog
|
||||
|
||||
[npm history][1]
|
||||
|
||||
[1]: https://www.npmjs.com/package/@google-cloud/firestore?activeTab=versions
|
||||
|
||||
## v0.17.0
|
||||
|
||||
### Implementation Changes
|
||||
- Regenerate library with synth.py customizations ([#345](https://github.com/googleapis/nodejs-firestore/pull/345))
|
||||
- contains some documentation and internal timeout changes
|
||||
- Converting backoff.js to TypeScript ([#328](https://github.com/googleapis/nodejs-firestore/pull/328))
|
||||
- Making .dotChanges a method ([#324](https://github.com/googleapis/nodejs-firestore/pull/324))
|
||||
|
||||
### Dependencies
|
||||
- chore(deps): update dependency nyc to v13 ([#329](https://github.com/googleapis/nodejs-firestore/pull/329))
|
||||
- fix(deps): update dependency google-gax to ^0.19.0 ([#325](https://github.com/googleapis/nodejs-firestore/pull/325))
|
||||
|
||||
### Documentation
|
||||
- Fix DocumentReference.get() docs ([#332](https://github.com/googleapis/nodejs-firestore/pull/332))
|
||||
|
||||
### Internal / Testing Changes
|
||||
- Retry npm install in CI ([#341](https://github.com/googleapis/nodejs-firestore/pull/341))
|
||||
- make synth.py generate library to ./dev ([#337](https://github.com/googleapis/nodejs-firestore/pull/337))
|
||||
- Revert "Re-generate library using /synth.py ([#331](https://github.com/googleapis/nodejs-firestore/pull/331))" ([#334](https://github.com/googleapis/nodejs-firestore/pull/334))
|
||||
- Re-generate library using /synth.py ([#331](https://github.com/googleapis/nodejs-firestore/pull/331))
|
||||
|
||||
202
express-server/node_modules/@google-cloud/firestore/LICENSE
generated
vendored
Normal file
202
express-server/node_modules/@google-cloud/firestore/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,202 @@
|
||||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
141
express-server/node_modules/@google-cloud/firestore/README.md
generated
vendored
Normal file
141
express-server/node_modules/@google-cloud/firestore/README.md
generated
vendored
Normal file
@@ -0,0 +1,141 @@
|
||||
<img src="https://avatars2.githubusercontent.com/u/2810941?v=3&s=96" alt="Google Cloud Platform logo" title="Google Cloud Platform" align="right" height="96" width="96"/>
|
||||
|
||||
# [Google Cloud Firestore: Node.js Server SDK](https://github.com/googleapis/nodejs-firestore)
|
||||
|
||||
[](https://cloud.google.com/terms/launch-stages)
|
||||
[](https://circleci.com/gh/googleapis/nodejs-firestore)
|
||||
[](https://ci.appveyor.com/project/googleapis/nodejs-firestore)
|
||||
[](https://codecov.io/gh/googleapis/nodejs-firestore)
|
||||
|
||||
This is the Node.js Server SDK for
|
||||
[Google Cloud Firestore](https://firebase.google.com/docs/firestore/). Google
|
||||
Cloud Firestore is a NoSQL document database built for automatic scaling, high
|
||||
performance, and ease of application development.
|
||||
|
||||
This Cloud Firestore Server SDK uses Google’s [Cloud Identity and Access
|
||||
Management](https://cloud.google.com/firestore/docs/security/iam) for
|
||||
authentication and should only be used **in trusted environments**. Your Cloud
|
||||
Identity credentials allow you bypass all access restrictions and provide read
|
||||
and write access to all data in your Cloud Firestore project.
|
||||
|
||||
The Cloud Firestore Server SDKs are designed to manage the full set of data in
|
||||
your Cloud Firestore project and work best with reliable network connectivity.
|
||||
Data operations performed via these SDKs directly access the Cloud Firestore
|
||||
backend and all document reads and writes are optimized for high throughput.
|
||||
|
||||
Applications that use Google's Server SDKs should not be used in end-user
|
||||
environments, such as on phones or on publicly hosted websites. If you are
|
||||
developing a Web or Node.js application that accesses Cloud Firestore on behalf
|
||||
of end users, use the [`firebase`](https://www.npmjs.com/package/firebase)
|
||||
Client SDK.
|
||||
|
||||
**Table of contents:**
|
||||
|
||||
* [Quickstart](#quickstart)
|
||||
* [Before you begin](#before-you-begin)
|
||||
* [Installing the client library](#installing-the-client-library)
|
||||
* [Using the client library](#using-the-client-library)
|
||||
* [Versioning](#versioning)
|
||||
* [Contributing](#contributing)
|
||||
* [License](#license)
|
||||
|
||||
## Quickstart
|
||||
|
||||
Read more about the client libraries for Cloud APIs, including the older
|
||||
Google APIs Client Libraries, in [Client Libraries Explained][explained].
|
||||
|
||||
[explained]: https://cloud.google.com/apis/docs/client-libraries-explained
|
||||
|
||||
* [Cloud Firestore Node.js Client API Reference][client-docs]
|
||||
* [github.com/googleapis/nodejs-firestore](https://github.com/googleapis/nodejs-firestore)
|
||||
* [Cloud Firestore Documentation][product-docs]
|
||||
|
||||
### Before you begin
|
||||
|
||||
1. Select or create a Cloud Platform project.
|
||||
|
||||
[Go to the projects page][projects]
|
||||
|
||||
1. Enable the Google Cloud Firestore API.
|
||||
|
||||
[Enable the API][enable_api]
|
||||
|
||||
1. [Set up authentication with a service account][auth] so you can access the
|
||||
API from your local workstation.
|
||||
|
||||
[projects]: https://console.cloud.google.com/project
|
||||
[enable_api]: https://console.cloud.google.com/flows/enableapi?apiid=firestore.googleapis.com
|
||||
[auth]: https://cloud.google.com/docs/authentication/getting-started
|
||||
|
||||
### Installing the client library
|
||||
|
||||
npm install --save @google-cloud/firestore
|
||||
|
||||
### Using the client library
|
||||
|
||||
```javascript
|
||||
const Firestore = require('@google-cloud/firestore');
|
||||
|
||||
const firestore = new Firestore({
|
||||
projectId: 'YOUR_PROJECT_ID',
|
||||
keyFilename: '/path/to/keyfile.json',
|
||||
});
|
||||
|
||||
const document = firestore.doc('posts/intro-to-firestore');
|
||||
|
||||
// Enter new data into the document.
|
||||
document.set({
|
||||
title: 'Welcome to Firestore',
|
||||
body: 'Hello World',
|
||||
}).then(() => {
|
||||
// Document created successfully.
|
||||
});
|
||||
|
||||
// Update an existing document.
|
||||
document.update({
|
||||
body: 'My first Firestore app',
|
||||
}).then(() => {
|
||||
// Document updated successfully.
|
||||
});
|
||||
|
||||
// Read the document.
|
||||
document.get().then(doc => {
|
||||
// Document read successfully.
|
||||
});
|
||||
|
||||
// Delete the document.
|
||||
document.delete().then(() => {
|
||||
// Document deleted successfully.
|
||||
});
|
||||
```
|
||||
|
||||
|
||||
The [Cloud Firestore Node.js Client API Reference][client-docs] documentation
|
||||
also contains samples.
|
||||
|
||||
## Versioning
|
||||
|
||||
This library follows [Semantic Versioning](http://semver.org/).
|
||||
|
||||
This library is considered to be in **beta**. This means it is expected to be
|
||||
mostly stable while we work toward a general availability release; however,
|
||||
complete stability is not guaranteed. We will address issues and requests
|
||||
against beta libraries with a high priority.
|
||||
|
||||
More Information: [Google Cloud Platform Launch Stages][launch_stages]
|
||||
|
||||
[launch_stages]: https://cloud.google.com/terms/launch-stages
|
||||
|
||||
## Contributing
|
||||
|
||||
Contributions welcome! See the [Contributing Guide](https://github.com/googleapis/nodejs-firestore/blob/master/.github/CONTRIBUTING.md).
|
||||
|
||||
## License
|
||||
|
||||
Apache Version 2.0
|
||||
|
||||
See [LICENSE](https://github.com/googleapis/nodejs-firestore/blob/master/LICENSE)
|
||||
|
||||
[client-docs]: https://cloud.google.com/nodejs/docs/reference/firestore/latest/
|
||||
[product-docs]: https://firebase.google.com/docs/firestore/
|
||||
[shell_img]: //gstatic.com/cloudssh/images/open-btn.png
|
||||
3726
express-server/node_modules/@google-cloud/firestore/build/protos/firestore_proto_api.d.ts
generated
vendored
Normal file
3726
express-server/node_modules/@google-cloud/firestore/build/protos/firestore_proto_api.d.ts
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
6483
express-server/node_modules/@google-cloud/firestore/build/protos/firestore_proto_api.js
generated
vendored
Normal file
6483
express-server/node_modules/@google-cloud/firestore/build/protos/firestore_proto_api.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
31
express-server/node_modules/@google-cloud/firestore/build/protos/google/api/annotations.proto
generated
vendored
Normal file
31
express-server/node_modules/@google-cloud/firestore/build/protos/google/api/annotations.proto
generated
vendored
Normal file
@@ -0,0 +1,31 @@
|
||||
// Copyright (c) 2015, Google Inc.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
syntax = "proto3";
|
||||
|
||||
package google.api;
|
||||
|
||||
import "google/api/http.proto";
|
||||
import "google/protobuf/descriptor.proto";
|
||||
|
||||
option go_package = "google.golang.org/genproto/googleapis/api/annotations;annotations";
|
||||
option java_multiple_files = true;
|
||||
option java_outer_classname = "AnnotationsProto";
|
||||
option java_package = "com.google.api";
|
||||
option objc_class_prefix = "GAPI";
|
||||
|
||||
extend google.protobuf.MethodOptions {
|
||||
// See `HttpRule`.
|
||||
HttpRule http = 72295728;
|
||||
}
|
||||
370
express-server/node_modules/@google-cloud/firestore/build/protos/google/api/http.proto
generated
vendored
Normal file
370
express-server/node_modules/@google-cloud/firestore/build/protos/google/api/http.proto
generated
vendored
Normal file
@@ -0,0 +1,370 @@
|
||||
// Copyright 2018 Google LLC.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
//
|
||||
|
||||
syntax = "proto3";
|
||||
|
||||
package google.api;
|
||||
|
||||
option cc_enable_arenas = true;
|
||||
option go_package = "google.golang.org/genproto/googleapis/api/annotations;annotations";
|
||||
option java_multiple_files = true;
|
||||
option java_outer_classname = "HttpProto";
|
||||
option java_package = "com.google.api";
|
||||
option objc_class_prefix = "GAPI";
|
||||
|
||||
|
||||
// Defines the HTTP configuration for an API service. It contains a list of
|
||||
// [HttpRule][google.api.HttpRule], each specifying the mapping of an RPC method
|
||||
// to one or more HTTP REST API methods.
|
||||
message Http {
|
||||
// A list of HTTP configuration rules that apply to individual API methods.
|
||||
//
|
||||
// **NOTE:** All service configuration rules follow "last one wins" order.
|
||||
repeated HttpRule rules = 1;
|
||||
|
||||
// When set to true, URL path parmeters will be fully URI-decoded except in
|
||||
// cases of single segment matches in reserved expansion, where "%2F" will be
|
||||
// left encoded.
|
||||
//
|
||||
// The default behavior is to not decode RFC 6570 reserved characters in multi
|
||||
// segment matches.
|
||||
bool fully_decode_reserved_expansion = 2;
|
||||
}
|
||||
|
||||
// # gRPC Transcoding
|
||||
//
|
||||
// gRPC Transcoding is a feature for mapping between a gRPC method and one or
|
||||
// more HTTP REST endpoints. It allows developers to build a single API service
|
||||
// that supports both gRPC APIs and REST APIs. Many systems, including [Google
|
||||
// APIs](https://github.com/googleapis/googleapis),
|
||||
// [Cloud Endpoints](https://cloud.google.com/endpoints), [gRPC
|
||||
// Gateway](https://github.com/grpc-ecosystem/grpc-gateway),
|
||||
// and [Envoy](https://github.com/envoyproxy/envoy) proxy support this feature
|
||||
// and use it for large scale production services.
|
||||
//
|
||||
// `HttpRule` defines the schema of the gRPC/REST mapping. The mapping specifies
|
||||
// how different portions of the gRPC request message are mapped to the URL
|
||||
// path, URL query parameters, and HTTP request body. It also controls how the
|
||||
// gRPC response message is mapped to the HTTP response body. `HttpRule` is
|
||||
// typically specified as an `google.api.http` annotation on the gRPC method.
|
||||
//
|
||||
// Each mapping specifies a URL path template and an HTTP method. The path
|
||||
// template may refer to one or more fields in the gRPC request message, as long
|
||||
// as each field is a non-repeated field with a primitive (non-message) type.
|
||||
// The path template controls how fields of the request message are mapped to
|
||||
// the URL path.
|
||||
//
|
||||
// Example:
|
||||
//
|
||||
// service Messaging {
|
||||
// rpc GetMessage(GetMessageRequest) returns (Message) {
|
||||
// option (google.api.http) = {
|
||||
// get: "/v1/{name=messages/*}"
|
||||
// };
|
||||
// }
|
||||
// }
|
||||
// message GetMessageRequest {
|
||||
// string name = 1; // Mapped to URL path.
|
||||
// }
|
||||
// message Message {
|
||||
// string text = 1; // The resource content.
|
||||
// }
|
||||
//
|
||||
// This enables an HTTP REST to gRPC mapping as below:
|
||||
//
|
||||
// HTTP | gRPC
|
||||
// -----|-----
|
||||
// `GET /v1/messages/123456` | `GetMessage(name: "messages/123456")`
|
||||
//
|
||||
// Any fields in the request message which are not bound by the path template
|
||||
// automatically become HTTP query parameters if there is no HTTP request body.
|
||||
// For example:
|
||||
//
|
||||
// service Messaging {
|
||||
// rpc GetMessage(GetMessageRequest) returns (Message) {
|
||||
// option (google.api.http) = {
|
||||
// get:"/v1/messages/{message_id}"
|
||||
// };
|
||||
// }
|
||||
// }
|
||||
// message GetMessageRequest {
|
||||
// message SubMessage {
|
||||
// string subfield = 1;
|
||||
// }
|
||||
// string message_id = 1; // Mapped to URL path.
|
||||
// int64 revision = 2; // Mapped to URL query parameter `revision`.
|
||||
// SubMessage sub = 3; // Mapped to URL query parameter `sub.subfield`.
|
||||
// }
|
||||
//
|
||||
// This enables a HTTP JSON to RPC mapping as below:
|
||||
//
|
||||
// HTTP | gRPC
|
||||
// -----|-----
|
||||
// `GET /v1/messages/123456?revision=2&sub.subfield=foo` | `GetMessage(message_id: "123456" revision: 2 sub: SubMessage(subfield: "foo"))`
|
||||
//
|
||||
// Note that fields which are mapped to URL query parameters must have a
|
||||
// primitive type or a repeated primitive type or a non-repeated message type.
|
||||
// In the case of a repeated type, the parameter can be repeated in the URL
|
||||
// as `...?param=A¶m=B`. In the case of a message type, each field of the
|
||||
// message is mapped to a separate parameter, such as
|
||||
// `...?foo.a=A&foo.b=B&foo.c=C`.
|
||||
//
|
||||
// For HTTP methods that allow a request body, the `body` field
|
||||
// specifies the mapping. Consider a REST update method on the
|
||||
// message resource collection:
|
||||
//
|
||||
// service Messaging {
|
||||
// rpc UpdateMessage(UpdateMessageRequest) returns (Message) {
|
||||
// option (google.api.http) = {
|
||||
// patch: "/v1/messages/{message_id}"
|
||||
// body: "message"
|
||||
// };
|
||||
// }
|
||||
// }
|
||||
// message UpdateMessageRequest {
|
||||
// string message_id = 1; // mapped to the URL
|
||||
// Message message = 2; // mapped to the body
|
||||
// }
|
||||
//
|
||||
// The following HTTP JSON to RPC mapping is enabled, where the
|
||||
// representation of the JSON in the request body is determined by
|
||||
// protos JSON encoding:
|
||||
//
|
||||
// HTTP | gRPC
|
||||
// -----|-----
|
||||
// `PATCH /v1/messages/123456 { "text": "Hi!" }` | `UpdateMessage(message_id: "123456" message { text: "Hi!" })`
|
||||
//
|
||||
// The special name `*` can be used in the body mapping to define that
|
||||
// every field not bound by the path template should be mapped to the
|
||||
// request body. This enables the following alternative definition of
|
||||
// the update method:
|
||||
//
|
||||
// service Messaging {
|
||||
// rpc UpdateMessage(Message) returns (Message) {
|
||||
// option (google.api.http) = {
|
||||
// patch: "/v1/messages/{message_id}"
|
||||
// body: "*"
|
||||
// };
|
||||
// }
|
||||
// }
|
||||
// message Message {
|
||||
// string message_id = 1;
|
||||
// string text = 2;
|
||||
// }
|
||||
//
|
||||
//
|
||||
// The following HTTP JSON to RPC mapping is enabled:
|
||||
//
|
||||
// HTTP | gRPC
|
||||
// -----|-----
|
||||
// `PATCH /v1/messages/123456 { "text": "Hi!" }` | `UpdateMessage(message_id: "123456" text: "Hi!")`
|
||||
//
|
||||
// Note that when using `*` in the body mapping, it is not possible to
|
||||
// have HTTP parameters, as all fields not bound by the path end in
|
||||
// the body. This makes this option more rarely used in practice when
|
||||
// defining REST APIs. The common usage of `*` is in custom methods
|
||||
// which don't use the URL at all for transferring data.
|
||||
//
|
||||
// It is possible to define multiple HTTP methods for one RPC by using
|
||||
// the `additional_bindings` option. Example:
|
||||
//
|
||||
// service Messaging {
|
||||
// rpc GetMessage(GetMessageRequest) returns (Message) {
|
||||
// option (google.api.http) = {
|
||||
// get: "/v1/messages/{message_id}"
|
||||
// additional_bindings {
|
||||
// get: "/v1/users/{user_id}/messages/{message_id}"
|
||||
// }
|
||||
// };
|
||||
// }
|
||||
// }
|
||||
// message GetMessageRequest {
|
||||
// string message_id = 1;
|
||||
// string user_id = 2;
|
||||
// }
|
||||
//
|
||||
// This enables the following two alternative HTTP JSON to RPC mappings:
|
||||
//
|
||||
// HTTP | gRPC
|
||||
// -----|-----
|
||||
// `GET /v1/messages/123456` | `GetMessage(message_id: "123456")`
|
||||
// `GET /v1/users/me/messages/123456` | `GetMessage(user_id: "me" message_id: "123456")`
|
||||
//
|
||||
// ## Rules for HTTP mapping
|
||||
//
|
||||
// 1. Leaf request fields (recursive expansion nested messages in the request
|
||||
// message) are classified into three categories:
|
||||
// - Fields referred by the path template. They are passed via the URL path.
|
||||
// - Fields referred by the [HttpRule.body][google.api.HttpRule.body]. They are passed via the HTTP
|
||||
// request body.
|
||||
// - All other fields are passed via the URL query parameters, and the
|
||||
// parameter name is the field path in the request message. A repeated
|
||||
// field can be represented as multiple query parameters under the same
|
||||
// name.
|
||||
// 2. If [HttpRule.body][google.api.HttpRule.body] is "*", there is no URL query parameter, all fields
|
||||
// are passed via URL path and HTTP request body.
|
||||
// 3. If [HttpRule.body][google.api.HttpRule.body] is omitted, there is no HTTP request body, all
|
||||
// fields are passed via URL path and URL query parameters.
|
||||
//
|
||||
// ### Path template syntax
|
||||
//
|
||||
// Template = "/" Segments [ Verb ] ;
|
||||
// Segments = Segment { "/" Segment } ;
|
||||
// Segment = "*" | "**" | LITERAL | Variable ;
|
||||
// Variable = "{" FieldPath [ "=" Segments ] "}" ;
|
||||
// FieldPath = IDENT { "." IDENT } ;
|
||||
// Verb = ":" LITERAL ;
|
||||
//
|
||||
// The syntax `*` matches a single URL path segment. The syntax `**` matches
|
||||
// zero or more URL path segments, which must be the last part of the URL path
|
||||
// except the `Verb`.
|
||||
//
|
||||
// The syntax `Variable` matches part of the URL path as specified by its
|
||||
// template. A variable template must not contain other variables. If a variable
|
||||
// matches a single path segment, its template may be omitted, e.g. `{var}`
|
||||
// is equivalent to `{var=*}`.
|
||||
//
|
||||
// The syntax `LITERAL` matches literal text in the URL path. If the `LITERAL`
|
||||
// contains any reserved character, such characters should be percent-encoded
|
||||
// before the matching.
|
||||
//
|
||||
// If a variable contains exactly one path segment, such as `"{var}"` or
|
||||
// `"{var=*}"`, when such a variable is expanded into a URL path on the client
|
||||
// side, all characters except `[-_.~0-9a-zA-Z]` are percent-encoded. The
|
||||
// server side does the reverse decoding. Such variables show up in the
|
||||
// [Discovery Document](https://developers.google.com/discovery/v1/reference/apis)
|
||||
// as `{var}`.
|
||||
//
|
||||
// If a variable contains multiple path segments, such as `"{var=foo/*}"`
|
||||
// or `"{var=**}"`, when such a variable is expanded into a URL path on the
|
||||
// client side, all characters except `[-_.~/0-9a-zA-Z]` are percent-encoded.
|
||||
// The server side does the reverse decoding, except "%2F" and "%2f" are left
|
||||
// unchanged. Such variables show up in the
|
||||
// [Discovery Document](https://developers.google.com/discovery/v1/reference/apis)
|
||||
// as `{+var}`.
|
||||
//
|
||||
// ## Using gRPC API Service Configuration
|
||||
//
|
||||
// gRPC API Service Configuration (service config) is a configuration language
|
||||
// for configuring a gRPC service to become a user-facing product. The
|
||||
// service config is simply the YAML representation of the `google.api.Service`
|
||||
// proto message.
|
||||
//
|
||||
// As an alternative to annotating your proto file, you can configure gRPC
|
||||
// transcoding in your service config YAML files. You do this by specifying a
|
||||
// `HttpRule` that maps the gRPC method to a REST endpoint, achieving the same
|
||||
// effect as the proto annotation. This can be particularly useful if you
|
||||
// have a proto that is reused in multiple services. Note that any transcoding
|
||||
// specified in the service config will override any matching transcoding
|
||||
// configuration in the proto.
|
||||
//
|
||||
// Example:
|
||||
//
|
||||
// http:
|
||||
// rules:
|
||||
// # Selects a gRPC method and applies HttpRule to it.
|
||||
// - selector: example.v1.Messaging.GetMessage
|
||||
// get: /v1/messages/{message_id}/{sub.subfield}
|
||||
//
|
||||
// ## Special notes
|
||||
//
|
||||
// When gRPC Transcoding is used to map a gRPC to JSON REST endpoints, the
|
||||
// proto to JSON conversion must follow the [proto3
|
||||
// specification](https://developers.google.com/protocol-buffers/docs/proto3#json).
|
||||
//
|
||||
// While the single segment variable follows the semantics of
|
||||
// [RFC 6570](https://tools.ietf.org/html/rfc6570) Section 3.2.2 Simple String
|
||||
// Expansion, the multi segment variable **does not** follow RFC 6570 Section
|
||||
// 3.2.3 Reserved Expansion. The reason is that the Reserved Expansion
|
||||
// does not expand special characters like `?` and `#`, which would lead
|
||||
// to invalid URLs. As the result, gRPC Transcoding uses a custom encoding
|
||||
// for multi segment variables.
|
||||
//
|
||||
// The path variables **must not** refer to any repeated or mapped field,
|
||||
// because client libraries are not capable of handling such variable expansion.
|
||||
//
|
||||
// The path variables **must not** capture the leading "/" character. The reason
|
||||
// is that the most common use case "{var}" does not capture the leading "/"
|
||||
// character. For consistency, all path variables must share the same behavior.
|
||||
//
|
||||
// Repeated message fields must not be mapped to URL query parameters, because
|
||||
// no client library can support such complicated mapping.
|
||||
//
|
||||
// If an API needs to use a JSON array for request or response body, it can map
|
||||
// the request or response body to a repeated field. However, some gRPC
|
||||
// Transcoding implementations may not support this feature.
|
||||
message HttpRule {
|
||||
// Selects a method to which this rule applies.
|
||||
//
|
||||
// Refer to [selector][google.api.DocumentationRule.selector] for syntax details.
|
||||
string selector = 1;
|
||||
|
||||
// Determines the URL pattern is matched by this rules. This pattern can be
|
||||
// used with any of the {get|put|post|delete|patch} methods. A custom method
|
||||
// can be defined using the 'custom' field.
|
||||
oneof pattern {
|
||||
// Maps to HTTP GET. Used for listing and getting information about
|
||||
// resources.
|
||||
string get = 2;
|
||||
|
||||
// Maps to HTTP PUT. Used for replacing a resource.
|
||||
string put = 3;
|
||||
|
||||
// Maps to HTTP POST. Used for creating a resource or performing an action.
|
||||
string post = 4;
|
||||
|
||||
// Maps to HTTP DELETE. Used for deleting a resource.
|
||||
string delete = 5;
|
||||
|
||||
// Maps to HTTP PATCH. Used for updating a resource.
|
||||
string patch = 6;
|
||||
|
||||
// The custom pattern is used for specifying an HTTP method that is not
|
||||
// included in the `pattern` field, such as HEAD, or "*" to leave the
|
||||
// HTTP method unspecified for this rule. The wild-card rule is useful
|
||||
// for services that provide content to Web (HTML) clients.
|
||||
CustomHttpPattern custom = 8;
|
||||
}
|
||||
|
||||
// The name of the request field whose value is mapped to the HTTP request
|
||||
// body, or `*` for mapping all request fields not captured by the path
|
||||
// pattern to the HTTP body, or omitted for not having any HTTP request body.
|
||||
//
|
||||
// NOTE: the referred field must be present at the top-level of the request
|
||||
// message type.
|
||||
string body = 7;
|
||||
|
||||
// Optional. The name of the response field whose value is mapped to the HTTP
|
||||
// response body. When omitted, the entire response message will be used
|
||||
// as the HTTP response body.
|
||||
//
|
||||
// NOTE: The referred field must be present at the top-level of the response
|
||||
// message type.
|
||||
string response_body = 12;
|
||||
|
||||
// Additional HTTP bindings for the selector. Nested bindings must
|
||||
// not contain an `additional_bindings` field themselves (that is,
|
||||
// the nesting may only be one level deep).
|
||||
repeated HttpRule additional_bindings = 11;
|
||||
}
|
||||
|
||||
// A custom pattern is used for defining custom HTTP verb.
|
||||
message CustomHttpPattern {
|
||||
// The name of this custom HTTP verb.
|
||||
string kind = 1;
|
||||
|
||||
// The path matched by this custom verb.
|
||||
string path = 2;
|
||||
}
|
||||
BIN
express-server/node_modules/@google-cloud/firestore/build/protos/google/firestore/v1beta1/.DS_Store
generated
vendored
Normal file
BIN
express-server/node_modules/@google-cloud/firestore/build/protos/google/firestore/v1beta1/.DS_Store
generated
vendored
Normal file
Binary file not shown.
83
express-server/node_modules/@google-cloud/firestore/build/protos/google/firestore/v1beta1/common.proto
generated
vendored
Normal file
83
express-server/node_modules/@google-cloud/firestore/build/protos/google/firestore/v1beta1/common.proto
generated
vendored
Normal file
@@ -0,0 +1,83 @@
|
||||
// Copyright 2018 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
syntax = "proto3";
|
||||
|
||||
package google.firestore.v1beta1;
|
||||
|
||||
import "google/api/annotations.proto";
|
||||
import "google/protobuf/timestamp.proto";
|
||||
|
||||
option csharp_namespace = "Google.Cloud.Firestore.V1Beta1";
|
||||
option go_package = "google.golang.org/genproto/googleapis/firestore/v1beta1;firestore";
|
||||
option java_multiple_files = true;
|
||||
option java_outer_classname = "CommonProto";
|
||||
option java_package = "com.google.firestore.v1beta1";
|
||||
option objc_class_prefix = "GCFS";
|
||||
option php_namespace = "Google\\Cloud\\Firestore\\V1beta1";
|
||||
|
||||
|
||||
// A set of field paths on a document.
|
||||
// Used to restrict a get or update operation on a document to a subset of its
|
||||
// fields.
|
||||
// This is different from standard field masks, as this is always scoped to a
|
||||
// [Document][google.firestore.v1beta1.Document], and takes in account the dynamic nature of [Value][google.firestore.v1beta1.Value].
|
||||
message DocumentMask {
|
||||
// The list of field paths in the mask. See [Document.fields][google.firestore.v1beta1.Document.fields] for a field
|
||||
// path syntax reference.
|
||||
repeated string field_paths = 1;
|
||||
}
|
||||
|
||||
// A precondition on a document, used for conditional operations.
|
||||
message Precondition {
|
||||
// The type of precondition.
|
||||
oneof condition_type {
|
||||
// When set to `true`, the target document must exist.
|
||||
// When set to `false`, the target document must not exist.
|
||||
bool exists = 1;
|
||||
|
||||
// When set, the target document must exist and have been last updated at
|
||||
// that time.
|
||||
google.protobuf.Timestamp update_time = 2;
|
||||
}
|
||||
}
|
||||
|
||||
// Options for creating a new transaction.
|
||||
message TransactionOptions {
|
||||
// Options for a transaction that can be used to read and write documents.
|
||||
message ReadWrite {
|
||||
// An optional transaction to retry.
|
||||
bytes retry_transaction = 1;
|
||||
}
|
||||
|
||||
// Options for a transaction that can only be used to read documents.
|
||||
message ReadOnly {
|
||||
// The consistency mode for this transaction. If not set, defaults to strong
|
||||
// consistency.
|
||||
oneof consistency_selector {
|
||||
// Reads documents at the given time.
|
||||
// This may not be older than 60 seconds.
|
||||
google.protobuf.Timestamp read_time = 2;
|
||||
}
|
||||
}
|
||||
|
||||
// The mode of the transaction.
|
||||
oneof mode {
|
||||
// The transaction can only be used for read operations.
|
||||
ReadOnly read_only = 2;
|
||||
|
||||
// The transaction can be used for both read and write operations.
|
||||
ReadWrite read_write = 3;
|
||||
}
|
||||
}
|
||||
150
express-server/node_modules/@google-cloud/firestore/build/protos/google/firestore/v1beta1/document.proto
generated
vendored
Normal file
150
express-server/node_modules/@google-cloud/firestore/build/protos/google/firestore/v1beta1/document.proto
generated
vendored
Normal file
@@ -0,0 +1,150 @@
|
||||
// Copyright 2018 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
syntax = "proto3";
|
||||
|
||||
package google.firestore.v1beta1;
|
||||
|
||||
import "google/api/annotations.proto";
|
||||
import "google/protobuf/struct.proto";
|
||||
import "google/protobuf/timestamp.proto";
|
||||
import "google/type/latlng.proto";
|
||||
|
||||
option csharp_namespace = "Google.Cloud.Firestore.V1Beta1";
|
||||
option go_package = "google.golang.org/genproto/googleapis/firestore/v1beta1;firestore";
|
||||
option java_multiple_files = true;
|
||||
option java_outer_classname = "DocumentProto";
|
||||
option java_package = "com.google.firestore.v1beta1";
|
||||
option objc_class_prefix = "GCFS";
|
||||
option php_namespace = "Google\\Cloud\\Firestore\\V1beta1";
|
||||
|
||||
|
||||
// A Firestore document.
|
||||
//
|
||||
// Must not exceed 1 MiB - 4 bytes.
|
||||
message Document {
|
||||
// The resource name of the document, for example
|
||||
// `projects/{project_id}/databases/{database_id}/documents/{document_path}`.
|
||||
string name = 1;
|
||||
|
||||
// The document's fields.
|
||||
//
|
||||
// The map keys represent field names.
|
||||
//
|
||||
// A simple field name contains only characters `a` to `z`, `A` to `Z`,
|
||||
// `0` to `9`, or `_`, and must not start with `0` to `9`. For example,
|
||||
// `foo_bar_17`.
|
||||
//
|
||||
// Field names matching the regular expression `__.*__` are reserved. Reserved
|
||||
// field names are forbidden except in certain documented contexts. The map
|
||||
// keys, represented as UTF-8, must not exceed 1,500 bytes and cannot be
|
||||
// empty.
|
||||
//
|
||||
// Field paths may be used in other contexts to refer to structured fields
|
||||
// defined here. For `map_value`, the field path is represented by the simple
|
||||
// or quoted field names of the containing fields, delimited by `.`. For
|
||||
// example, the structured field
|
||||
// `"foo" : { map_value: { "x&y" : { string_value: "hello" }}}` would be
|
||||
// represented by the field path `foo.x&y`.
|
||||
//
|
||||
// Within a field path, a quoted field name starts and ends with `` ` `` and
|
||||
// may contain any character. Some characters, including `` ` ``, must be
|
||||
// escaped using a `\`. For example, `` `x&y` `` represents `x&y` and
|
||||
// `` `bak\`tik` `` represents `` bak`tik ``.
|
||||
map<string, Value> fields = 2;
|
||||
|
||||
// Output only. The time at which the document was created.
|
||||
//
|
||||
// This value increases monotonically when a document is deleted then
|
||||
// recreated. It can also be compared to values from other documents and
|
||||
// the `read_time` of a query.
|
||||
google.protobuf.Timestamp create_time = 3;
|
||||
|
||||
// Output only. The time at which the document was last changed.
|
||||
//
|
||||
// This value is initially set to the `create_time` then increases
|
||||
// monotonically with each change to the document. It can also be
|
||||
// compared to values from other documents and the `read_time` of a query.
|
||||
google.protobuf.Timestamp update_time = 4;
|
||||
}
|
||||
|
||||
// A message that can hold any of the supported value types.
|
||||
message Value {
|
||||
// Must have a value set.
|
||||
oneof value_type {
|
||||
// A null value.
|
||||
google.protobuf.NullValue null_value = 11;
|
||||
|
||||
// A boolean value.
|
||||
bool boolean_value = 1;
|
||||
|
||||
// An integer value.
|
||||
int64 integer_value = 2;
|
||||
|
||||
// A double value.
|
||||
double double_value = 3;
|
||||
|
||||
// A timestamp value.
|
||||
//
|
||||
// Precise only to microseconds. When stored, any additional precision is
|
||||
// rounded down.
|
||||
google.protobuf.Timestamp timestamp_value = 10;
|
||||
|
||||
// A string value.
|
||||
//
|
||||
// The string, represented as UTF-8, must not exceed 1 MiB - 89 bytes.
|
||||
// Only the first 1,500 bytes of the UTF-8 representation are considered by
|
||||
// queries.
|
||||
string string_value = 17;
|
||||
|
||||
// A bytes value.
|
||||
//
|
||||
// Must not exceed 1 MiB - 89 bytes.
|
||||
// Only the first 1,500 bytes are considered by queries.
|
||||
bytes bytes_value = 18;
|
||||
|
||||
// A reference to a document. For example:
|
||||
// `projects/{project_id}/databases/{database_id}/documents/{document_path}`.
|
||||
string reference_value = 5;
|
||||
|
||||
// A geo point value representing a point on the surface of Earth.
|
||||
google.type.LatLng geo_point_value = 8;
|
||||
|
||||
// An array value.
|
||||
//
|
||||
// Cannot directly contain another array value, though can contain an
|
||||
// map which contains another array.
|
||||
ArrayValue array_value = 9;
|
||||
|
||||
// A map value.
|
||||
MapValue map_value = 6;
|
||||
}
|
||||
}
|
||||
|
||||
// An array value.
|
||||
message ArrayValue {
|
||||
// Values in the array.
|
||||
repeated Value values = 1;
|
||||
}
|
||||
|
||||
// A map value.
|
||||
message MapValue {
|
||||
// The map's fields.
|
||||
//
|
||||
// The map keys represent field names. Field names matching the regular
|
||||
// expression `__.*__` are reserved. Reserved field names are forbidden except
|
||||
// in certain documented contexts. The map keys, represented as UTF-8, must
|
||||
// not exceed 1,500 bytes and cannot be empty.
|
||||
map<string, Value> fields = 1;
|
||||
}
|
||||
760
express-server/node_modules/@google-cloud/firestore/build/protos/google/firestore/v1beta1/firestore.proto
generated
vendored
Normal file
760
express-server/node_modules/@google-cloud/firestore/build/protos/google/firestore/v1beta1/firestore.proto
generated
vendored
Normal file
@@ -0,0 +1,760 @@
|
||||
// Copyright 2018 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
syntax = "proto3";
|
||||
|
||||
package google.firestore.v1beta1;
|
||||
|
||||
import "google/api/annotations.proto";
|
||||
import "google/firestore/v1beta1/common.proto";
|
||||
import "google/firestore/v1beta1/document.proto";
|
||||
import "google/firestore/v1beta1/query.proto";
|
||||
import "google/firestore/v1beta1/write.proto";
|
||||
import "google/protobuf/empty.proto";
|
||||
import "google/protobuf/timestamp.proto";
|
||||
import "google/rpc/status.proto";
|
||||
|
||||
option csharp_namespace = "Google.Cloud.Firestore.V1Beta1";
|
||||
option go_package = "google.golang.org/genproto/googleapis/firestore/v1beta1;firestore";
|
||||
option java_multiple_files = true;
|
||||
option java_outer_classname = "FirestoreProto";
|
||||
option java_package = "com.google.firestore.v1beta1";
|
||||
option objc_class_prefix = "GCFS";
|
||||
option php_namespace = "Google\\Cloud\\Firestore\\V1beta1";
|
||||
// Specification of the Firestore API.
|
||||
|
||||
// The Cloud Firestore service.
|
||||
//
|
||||
// This service exposes several types of comparable timestamps:
|
||||
//
|
||||
// * `create_time` - The time at which a document was created. Changes only
|
||||
// when a document is deleted, then re-created. Increases in a strict
|
||||
// monotonic fashion.
|
||||
// * `update_time` - The time at which a document was last updated. Changes
|
||||
// every time a document is modified. Does not change when a write results
|
||||
// in no modifications. Increases in a strict monotonic fashion.
|
||||
// * `read_time` - The time at which a particular state was observed. Used
|
||||
// to denote a consistent snapshot of the database or the time at which a
|
||||
// Document was observed to not exist.
|
||||
// * `commit_time` - The time at which the writes in a transaction were
|
||||
// committed. Any read with an equal or greater `read_time` is guaranteed
|
||||
// to see the effects of the transaction.
|
||||
service Firestore {
|
||||
// Gets a single document.
|
||||
rpc GetDocument(GetDocumentRequest) returns (Document) {
|
||||
option (google.api.http) = {
|
||||
get: "/v1beta1/{name=projects/*/databases/*/documents/*/**}"
|
||||
};
|
||||
}
|
||||
|
||||
// Lists documents.
|
||||
rpc ListDocuments(ListDocumentsRequest) returns (ListDocumentsResponse) {
|
||||
option (google.api.http) = {
|
||||
get: "/v1beta1/{parent=projects/*/databases/*/documents/*/**}/{collection_id}"
|
||||
};
|
||||
}
|
||||
|
||||
// Creates a new document.
|
||||
rpc CreateDocument(CreateDocumentRequest) returns (Document) {
|
||||
option (google.api.http) = {
|
||||
post: "/v1beta1/{parent=projects/*/databases/*/documents/**}/{collection_id}"
|
||||
body: "document"
|
||||
};
|
||||
}
|
||||
|
||||
// Updates or inserts a document.
|
||||
rpc UpdateDocument(UpdateDocumentRequest) returns (Document) {
|
||||
option (google.api.http) = {
|
||||
patch: "/v1beta1/{document.name=projects/*/databases/*/documents/*/**}"
|
||||
body: "document"
|
||||
};
|
||||
}
|
||||
|
||||
// Deletes a document.
|
||||
rpc DeleteDocument(DeleteDocumentRequest) returns (google.protobuf.Empty) {
|
||||
option (google.api.http) = {
|
||||
delete: "/v1beta1/{name=projects/*/databases/*/documents/*/**}"
|
||||
};
|
||||
}
|
||||
|
||||
// Gets multiple documents.
|
||||
//
|
||||
// Documents returned by this method are not guaranteed to be returned in the
|
||||
// same order that they were requested.
|
||||
rpc BatchGetDocuments(BatchGetDocumentsRequest) returns (stream BatchGetDocumentsResponse) {
|
||||
option (google.api.http) = {
|
||||
post: "/v1beta1/{database=projects/*/databases/*}/documents:batchGet"
|
||||
body: "*"
|
||||
};
|
||||
}
|
||||
|
||||
// Starts a new transaction.
|
||||
rpc BeginTransaction(BeginTransactionRequest) returns (BeginTransactionResponse) {
|
||||
option (google.api.http) = {
|
||||
post: "/v1beta1/{database=projects/*/databases/*}/documents:beginTransaction"
|
||||
body: "*"
|
||||
};
|
||||
}
|
||||
|
||||
// Commits a transaction, while optionally updating documents.
|
||||
rpc Commit(CommitRequest) returns (CommitResponse) {
|
||||
option (google.api.http) = {
|
||||
post: "/v1beta1/{database=projects/*/databases/*}/documents:commit"
|
||||
body: "*"
|
||||
};
|
||||
}
|
||||
|
||||
// Rolls back a transaction.
|
||||
rpc Rollback(RollbackRequest) returns (google.protobuf.Empty) {
|
||||
option (google.api.http) = {
|
||||
post: "/v1beta1/{database=projects/*/databases/*}/documents:rollback"
|
||||
body: "*"
|
||||
};
|
||||
}
|
||||
|
||||
// Runs a query.
|
||||
rpc RunQuery(RunQueryRequest) returns (stream RunQueryResponse) {
|
||||
option (google.api.http) = {
|
||||
post: "/v1beta1/{parent=projects/*/databases/*/documents}:runQuery"
|
||||
body: "*"
|
||||
additional_bindings {
|
||||
post: "/v1beta1/{parent=projects/*/databases/*/documents/*/**}:runQuery"
|
||||
body: "*"
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// Streams batches of document updates and deletes, in order.
|
||||
rpc Write(stream WriteRequest) returns (stream WriteResponse) {
|
||||
option (google.api.http) = {
|
||||
post: "/v1beta1/{database=projects/*/databases/*}/documents:write"
|
||||
body: "*"
|
||||
};
|
||||
}
|
||||
|
||||
// Listens to changes.
|
||||
rpc Listen(stream ListenRequest) returns (stream ListenResponse) {
|
||||
option (google.api.http) = {
|
||||
post: "/v1beta1/{database=projects/*/databases/*}/documents:listen"
|
||||
body: "*"
|
||||
};
|
||||
}
|
||||
|
||||
// Lists all the collection IDs underneath a document.
|
||||
rpc ListCollectionIds(ListCollectionIdsRequest) returns (ListCollectionIdsResponse) {
|
||||
option (google.api.http) = {
|
||||
post: "/v1beta1/{parent=projects/*/databases/*/documents}:listCollectionIds"
|
||||
body: "*"
|
||||
additional_bindings {
|
||||
post: "/v1beta1/{parent=projects/*/databases/*/documents/*/**}:listCollectionIds"
|
||||
body: "*"
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// The request for [Firestore.GetDocument][google.firestore.v1beta1.Firestore.GetDocument].
|
||||
message GetDocumentRequest {
|
||||
// The resource name of the Document to get. In the format:
|
||||
// `projects/{project_id}/databases/{database_id}/documents/{document_path}`.
|
||||
string name = 1;
|
||||
|
||||
// The fields to return. If not set, returns all fields.
|
||||
//
|
||||
// If the document has a field that is not present in this mask, that field
|
||||
// will not be returned in the response.
|
||||
DocumentMask mask = 2;
|
||||
|
||||
// The consistency mode for this transaction.
|
||||
// If not set, defaults to strong consistency.
|
||||
oneof consistency_selector {
|
||||
// Reads the document in a transaction.
|
||||
bytes transaction = 3;
|
||||
|
||||
// Reads the version of the document at the given time.
|
||||
// This may not be older than 60 seconds.
|
||||
google.protobuf.Timestamp read_time = 5;
|
||||
}
|
||||
}
|
||||
|
||||
// The request for [Firestore.ListDocuments][google.firestore.v1beta1.Firestore.ListDocuments].
|
||||
message ListDocumentsRequest {
|
||||
// The parent resource name. In the format:
|
||||
// `projects/{project_id}/databases/{database_id}/documents` or
|
||||
// `projects/{project_id}/databases/{database_id}/documents/{document_path}`.
|
||||
// For example:
|
||||
// `projects/my-project/databases/my-database/documents` or
|
||||
// `projects/my-project/databases/my-database/documents/chatrooms/my-chatroom`
|
||||
string parent = 1;
|
||||
|
||||
// The collection ID, relative to `parent`, to list. For example: `chatrooms`
|
||||
// or `messages`.
|
||||
string collection_id = 2;
|
||||
|
||||
// The maximum number of documents to return.
|
||||
int32 page_size = 3;
|
||||
|
||||
// The `next_page_token` value returned from a previous List request, if any.
|
||||
string page_token = 4;
|
||||
|
||||
// The order to sort results by. For example: `priority desc, name`.
|
||||
string order_by = 6;
|
||||
|
||||
// The fields to return. If not set, returns all fields.
|
||||
//
|
||||
// If a document has a field that is not present in this mask, that field
|
||||
// will not be returned in the response.
|
||||
DocumentMask mask = 7;
|
||||
|
||||
// The consistency mode for this transaction.
|
||||
// If not set, defaults to strong consistency.
|
||||
oneof consistency_selector {
|
||||
// Reads documents in a transaction.
|
||||
bytes transaction = 8;
|
||||
|
||||
// Reads documents as they were at the given time.
|
||||
// This may not be older than 60 seconds.
|
||||
google.protobuf.Timestamp read_time = 10;
|
||||
}
|
||||
|
||||
// If the list should show missing documents. A missing document is a
|
||||
// document that does not exist but has sub-documents. These documents will
|
||||
// be returned with a key but will not have fields, [Document.create_time][google.firestore.v1beta1.Document.create_time],
|
||||
// or [Document.update_time][google.firestore.v1beta1.Document.update_time] set.
|
||||
//
|
||||
// Requests with `show_missing` may not specify `where` or
|
||||
// `order_by`.
|
||||
bool show_missing = 12;
|
||||
}
|
||||
|
||||
// The response for [Firestore.ListDocuments][google.firestore.v1beta1.Firestore.ListDocuments].
|
||||
message ListDocumentsResponse {
|
||||
// The Documents found.
|
||||
repeated Document documents = 1;
|
||||
|
||||
// The next page token.
|
||||
string next_page_token = 2;
|
||||
}
|
||||
|
||||
// The request for [Firestore.CreateDocument][google.firestore.v1beta1.Firestore.CreateDocument].
|
||||
message CreateDocumentRequest {
|
||||
// The parent resource. For example:
|
||||
// `projects/{project_id}/databases/{database_id}/documents` or
|
||||
// `projects/{project_id}/databases/{database_id}/documents/chatrooms/{chatroom_id}`
|
||||
string parent = 1;
|
||||
|
||||
// The collection ID, relative to `parent`, to list. For example: `chatrooms`.
|
||||
string collection_id = 2;
|
||||
|
||||
// The client-assigned document ID to use for this document.
|
||||
//
|
||||
// Optional. If not specified, an ID will be assigned by the service.
|
||||
string document_id = 3;
|
||||
|
||||
// The document to create. `name` must not be set.
|
||||
Document document = 4;
|
||||
|
||||
// The fields to return. If not set, returns all fields.
|
||||
//
|
||||
// If the document has a field that is not present in this mask, that field
|
||||
// will not be returned in the response.
|
||||
DocumentMask mask = 5;
|
||||
}
|
||||
|
||||
// The request for [Firestore.UpdateDocument][google.firestore.v1beta1.Firestore.UpdateDocument].
|
||||
message UpdateDocumentRequest {
|
||||
// The updated document.
|
||||
// Creates the document if it does not already exist.
|
||||
Document document = 1;
|
||||
|
||||
// The fields to update.
|
||||
// None of the field paths in the mask may contain a reserved name.
|
||||
//
|
||||
// If the document exists on the server and has fields not referenced in the
|
||||
// mask, they are left unchanged.
|
||||
// Fields referenced in the mask, but not present in the input document, are
|
||||
// deleted from the document on the server.
|
||||
DocumentMask update_mask = 2;
|
||||
|
||||
// The fields to return. If not set, returns all fields.
|
||||
//
|
||||
// If the document has a field that is not present in this mask, that field
|
||||
// will not be returned in the response.
|
||||
DocumentMask mask = 3;
|
||||
|
||||
// An optional precondition on the document.
|
||||
// The request will fail if this is set and not met by the target document.
|
||||
Precondition current_document = 4;
|
||||
}
|
||||
|
||||
// The request for [Firestore.DeleteDocument][google.firestore.v1beta1.Firestore.DeleteDocument].
|
||||
message DeleteDocumentRequest {
|
||||
// The resource name of the Document to delete. In the format:
|
||||
// `projects/{project_id}/databases/{database_id}/documents/{document_path}`.
|
||||
string name = 1;
|
||||
|
||||
// An optional precondition on the document.
|
||||
// The request will fail if this is set and not met by the target document.
|
||||
Precondition current_document = 2;
|
||||
}
|
||||
|
||||
// The request for [Firestore.BatchGetDocuments][google.firestore.v1beta1.Firestore.BatchGetDocuments].
|
||||
message BatchGetDocumentsRequest {
|
||||
// The database name. In the format:
|
||||
// `projects/{project_id}/databases/{database_id}`.
|
||||
string database = 1;
|
||||
|
||||
// The names of the documents to retrieve. In the format:
|
||||
// `projects/{project_id}/databases/{database_id}/documents/{document_path}`.
|
||||
// The request will fail if any of the document is not a child resource of the
|
||||
// given `database`. Duplicate names will be elided.
|
||||
repeated string documents = 2;
|
||||
|
||||
// The fields to return. If not set, returns all fields.
|
||||
//
|
||||
// If a document has a field that is not present in this mask, that field will
|
||||
// not be returned in the response.
|
||||
DocumentMask mask = 3;
|
||||
|
||||
// The consistency mode for this transaction.
|
||||
// If not set, defaults to strong consistency.
|
||||
oneof consistency_selector {
|
||||
// Reads documents in a transaction.
|
||||
bytes transaction = 4;
|
||||
|
||||
// Starts a new transaction and reads the documents.
|
||||
// Defaults to a read-only transaction.
|
||||
// The new transaction ID will be returned as the first response in the
|
||||
// stream.
|
||||
TransactionOptions new_transaction = 5;
|
||||
|
||||
// Reads documents as they were at the given time.
|
||||
// This may not be older than 60 seconds.
|
||||
google.protobuf.Timestamp read_time = 7;
|
||||
}
|
||||
}
|
||||
|
||||
// The streamed response for [Firestore.BatchGetDocuments][google.firestore.v1beta1.Firestore.BatchGetDocuments].
|
||||
message BatchGetDocumentsResponse {
|
||||
// A single result.
|
||||
// This can be empty if the server is just returning a transaction.
|
||||
oneof result {
|
||||
// A document that was requested.
|
||||
Document found = 1;
|
||||
|
||||
// A document name that was requested but does not exist. In the format:
|
||||
// `projects/{project_id}/databases/{database_id}/documents/{document_path}`.
|
||||
string missing = 2;
|
||||
}
|
||||
|
||||
// The transaction that was started as part of this request.
|
||||
// Will only be set in the first response, and only if
|
||||
// [BatchGetDocumentsRequest.new_transaction][google.firestore.v1beta1.BatchGetDocumentsRequest.new_transaction] was set in the request.
|
||||
bytes transaction = 3;
|
||||
|
||||
// The time at which the document was read.
|
||||
// This may be monotically increasing, in this case the previous documents in
|
||||
// the result stream are guaranteed not to have changed between their
|
||||
// read_time and this one.
|
||||
google.protobuf.Timestamp read_time = 4;
|
||||
}
|
||||
|
||||
// The request for [Firestore.BeginTransaction][google.firestore.v1beta1.Firestore.BeginTransaction].
|
||||
message BeginTransactionRequest {
|
||||
// The database name. In the format:
|
||||
// `projects/{project_id}/databases/{database_id}`.
|
||||
string database = 1;
|
||||
|
||||
// The options for the transaction.
|
||||
// Defaults to a read-write transaction.
|
||||
TransactionOptions options = 2;
|
||||
}
|
||||
|
||||
// The response for [Firestore.BeginTransaction][google.firestore.v1beta1.Firestore.BeginTransaction].
|
||||
message BeginTransactionResponse {
|
||||
// The transaction that was started.
|
||||
bytes transaction = 1;
|
||||
}
|
||||
|
||||
// The request for [Firestore.Commit][google.firestore.v1beta1.Firestore.Commit].
|
||||
message CommitRequest {
|
||||
// The database name. In the format:
|
||||
// `projects/{project_id}/databases/{database_id}`.
|
||||
string database = 1;
|
||||
|
||||
// The writes to apply.
|
||||
//
|
||||
// Always executed atomically and in order.
|
||||
repeated Write writes = 2;
|
||||
|
||||
// If set, applies all writes in this transaction, and commits it.
|
||||
bytes transaction = 3;
|
||||
}
|
||||
|
||||
// The response for [Firestore.Commit][google.firestore.v1beta1.Firestore.Commit].
|
||||
message CommitResponse {
|
||||
// The result of applying the writes.
|
||||
//
|
||||
// This i-th write result corresponds to the i-th write in the
|
||||
// request.
|
||||
repeated WriteResult write_results = 1;
|
||||
|
||||
// The time at which the commit occurred.
|
||||
google.protobuf.Timestamp commit_time = 2;
|
||||
}
|
||||
|
||||
// The request for [Firestore.Rollback][google.firestore.v1beta1.Firestore.Rollback].
|
||||
message RollbackRequest {
|
||||
// The database name. In the format:
|
||||
// `projects/{project_id}/databases/{database_id}`.
|
||||
string database = 1;
|
||||
|
||||
// The transaction to roll back.
|
||||
bytes transaction = 2;
|
||||
}
|
||||
|
||||
// The request for [Firestore.RunQuery][google.firestore.v1beta1.Firestore.RunQuery].
|
||||
message RunQueryRequest {
|
||||
// The parent resource name. In the format:
|
||||
// `projects/{project_id}/databases/{database_id}/documents` or
|
||||
// `projects/{project_id}/databases/{database_id}/documents/{document_path}`.
|
||||
// For example:
|
||||
// `projects/my-project/databases/my-database/documents` or
|
||||
// `projects/my-project/databases/my-database/documents/chatrooms/my-chatroom`
|
||||
string parent = 1;
|
||||
|
||||
// The query to run.
|
||||
oneof query_type {
|
||||
// A structured query.
|
||||
StructuredQuery structured_query = 2;
|
||||
}
|
||||
|
||||
// The consistency mode for this transaction.
|
||||
// If not set, defaults to strong consistency.
|
||||
oneof consistency_selector {
|
||||
// Reads documents in a transaction.
|
||||
bytes transaction = 5;
|
||||
|
||||
// Starts a new transaction and reads the documents.
|
||||
// Defaults to a read-only transaction.
|
||||
// The new transaction ID will be returned as the first response in the
|
||||
// stream.
|
||||
TransactionOptions new_transaction = 6;
|
||||
|
||||
// Reads documents as they were at the given time.
|
||||
// This may not be older than 60 seconds.
|
||||
google.protobuf.Timestamp read_time = 7;
|
||||
}
|
||||
}
|
||||
|
||||
// The response for [Firestore.RunQuery][google.firestore.v1beta1.Firestore.RunQuery].
|
||||
message RunQueryResponse {
|
||||
// The transaction that was started as part of this request.
|
||||
// Can only be set in the first response, and only if
|
||||
// [RunQueryRequest.new_transaction][google.firestore.v1beta1.RunQueryRequest.new_transaction] was set in the request.
|
||||
// If set, no other fields will be set in this response.
|
||||
bytes transaction = 2;
|
||||
|
||||
// A query result.
|
||||
// Not set when reporting partial progress.
|
||||
Document document = 1;
|
||||
|
||||
// The time at which the document was read. This may be monotonically
|
||||
// increasing; in this case, the previous documents in the result stream are
|
||||
// guaranteed not to have changed between their `read_time` and this one.
|
||||
//
|
||||
// If the query returns no results, a response with `read_time` and no
|
||||
// `document` will be sent, and this represents the time at which the query
|
||||
// was run.
|
||||
google.protobuf.Timestamp read_time = 3;
|
||||
|
||||
// The number of results that have been skipped due to an offset between
|
||||
// the last response and the current response.
|
||||
int32 skipped_results = 4;
|
||||
}
|
||||
|
||||
// The request for [Firestore.Write][google.firestore.v1beta1.Firestore.Write].
|
||||
//
|
||||
// The first request creates a stream, or resumes an existing one from a token.
|
||||
//
|
||||
// When creating a new stream, the server replies with a response containing
|
||||
// only an ID and a token, to use in the next request.
|
||||
//
|
||||
// When resuming a stream, the server first streams any responses later than the
|
||||
// given token, then a response containing only an up-to-date token, to use in
|
||||
// the next request.
|
||||
message WriteRequest {
|
||||
// The database name. In the format:
|
||||
// `projects/{project_id}/databases/{database_id}`.
|
||||
// This is only required in the first message.
|
||||
string database = 1;
|
||||
|
||||
// The ID of the write stream to resume.
|
||||
// This may only be set in the first message. When left empty, a new write
|
||||
// stream will be created.
|
||||
string stream_id = 2;
|
||||
|
||||
// The writes to apply.
|
||||
//
|
||||
// Always executed atomically and in order.
|
||||
// This must be empty on the first request.
|
||||
// This may be empty on the last request.
|
||||
// This must not be empty on all other requests.
|
||||
repeated Write writes = 3;
|
||||
|
||||
// A stream token that was previously sent by the server.
|
||||
//
|
||||
// The client should set this field to the token from the most recent
|
||||
// [WriteResponse][google.firestore.v1beta1.WriteResponse] it has received. This acknowledges that the client has
|
||||
// received responses up to this token. After sending this token, earlier
|
||||
// tokens may not be used anymore.
|
||||
//
|
||||
// The server may close the stream if there are too many unacknowledged
|
||||
// responses.
|
||||
//
|
||||
// Leave this field unset when creating a new stream. To resume a stream at
|
||||
// a specific point, set this field and the `stream_id` field.
|
||||
//
|
||||
// Leave this field unset when creating a new stream.
|
||||
bytes stream_token = 4;
|
||||
|
||||
// Labels associated with this write request.
|
||||
map<string, string> labels = 5;
|
||||
}
|
||||
|
||||
// The response for [Firestore.Write][google.firestore.v1beta1.Firestore.Write].
|
||||
message WriteResponse {
|
||||
// The ID of the stream.
|
||||
// Only set on the first message, when a new stream was created.
|
||||
string stream_id = 1;
|
||||
|
||||
// A token that represents the position of this response in the stream.
|
||||
// This can be used by a client to resume the stream at this point.
|
||||
//
|
||||
// This field is always set.
|
||||
bytes stream_token = 2;
|
||||
|
||||
// The result of applying the writes.
|
||||
//
|
||||
// This i-th write result corresponds to the i-th write in the
|
||||
// request.
|
||||
repeated WriteResult write_results = 3;
|
||||
|
||||
// The time at which the commit occurred.
|
||||
google.protobuf.Timestamp commit_time = 4;
|
||||
}
|
||||
|
||||
// A request for [Firestore.Listen][google.firestore.v1beta1.Firestore.Listen]
|
||||
message ListenRequest {
|
||||
// The database name. In the format:
|
||||
// `projects/{project_id}/databases/{database_id}`.
|
||||
string database = 1;
|
||||
|
||||
// The supported target changes.
|
||||
oneof target_change {
|
||||
// A target to add to this stream.
|
||||
Target add_target = 2;
|
||||
|
||||
// The ID of a target to remove from this stream.
|
||||
int32 remove_target = 3;
|
||||
}
|
||||
|
||||
// Labels associated with this target change.
|
||||
map<string, string> labels = 4;
|
||||
}
|
||||
|
||||
// The response for [Firestore.Listen][google.firestore.v1beta1.Firestore.Listen].
|
||||
message ListenResponse {
|
||||
// The supported responses.
|
||||
oneof response_type {
|
||||
// Targets have changed.
|
||||
TargetChange target_change = 2;
|
||||
|
||||
// A [Document][google.firestore.v1beta1.Document] has changed.
|
||||
DocumentChange document_change = 3;
|
||||
|
||||
// A [Document][google.firestore.v1beta1.Document] has been deleted.
|
||||
DocumentDelete document_delete = 4;
|
||||
|
||||
// A [Document][google.firestore.v1beta1.Document] has been removed from a target (because it is no longer
|
||||
// relevant to that target).
|
||||
DocumentRemove document_remove = 6;
|
||||
|
||||
// A filter to apply to the set of documents previously returned for the
|
||||
// given target.
|
||||
//
|
||||
// Returned when documents may have been removed from the given target, but
|
||||
// the exact documents are unknown.
|
||||
ExistenceFilter filter = 5;
|
||||
}
|
||||
}
|
||||
|
||||
// A specification of a set of documents to listen to.
|
||||
message Target {
|
||||
// A target specified by a set of documents names.
|
||||
message DocumentsTarget {
|
||||
// The names of the documents to retrieve. In the format:
|
||||
// `projects/{project_id}/databases/{database_id}/documents/{document_path}`.
|
||||
// The request will fail if any of the document is not a child resource of
|
||||
// the given `database`. Duplicate names will be elided.
|
||||
repeated string documents = 2;
|
||||
}
|
||||
|
||||
// A target specified by a query.
|
||||
message QueryTarget {
|
||||
// The parent resource name. In the format:
|
||||
// `projects/{project_id}/databases/{database_id}/documents` or
|
||||
// `projects/{project_id}/databases/{database_id}/documents/{document_path}`.
|
||||
// For example:
|
||||
// `projects/my-project/databases/my-database/documents` or
|
||||
// `projects/my-project/databases/my-database/documents/chatrooms/my-chatroom`
|
||||
string parent = 1;
|
||||
|
||||
// The query to run.
|
||||
oneof query_type {
|
||||
// A structured query.
|
||||
StructuredQuery structured_query = 2;
|
||||
}
|
||||
}
|
||||
|
||||
// The type of target to listen to.
|
||||
oneof target_type {
|
||||
// A target specified by a query.
|
||||
QueryTarget query = 2;
|
||||
|
||||
// A target specified by a set of document names.
|
||||
DocumentsTarget documents = 3;
|
||||
}
|
||||
|
||||
// When to start listening.
|
||||
//
|
||||
// If not specified, all matching Documents are returned before any
|
||||
// subsequent changes.
|
||||
oneof resume_type {
|
||||
// A resume token from a prior [TargetChange][google.firestore.v1beta1.TargetChange] for an identical target.
|
||||
//
|
||||
// Using a resume token with a different target is unsupported and may fail.
|
||||
bytes resume_token = 4;
|
||||
|
||||
// Start listening after a specific `read_time`.
|
||||
//
|
||||
// The client must know the state of matching documents at this time.
|
||||
google.protobuf.Timestamp read_time = 11;
|
||||
}
|
||||
|
||||
// A client provided target ID.
|
||||
//
|
||||
// If not set, the server will assign an ID for the target.
|
||||
//
|
||||
// Used for resuming a target without changing IDs. The IDs can either be
|
||||
// client-assigned or be server-assigned in a previous stream. All targets
|
||||
// with client provided IDs must be added before adding a target that needs
|
||||
// a server-assigned id.
|
||||
int32 target_id = 5;
|
||||
|
||||
// If the target should be removed once it is current and consistent.
|
||||
bool once = 6;
|
||||
}
|
||||
|
||||
// Targets being watched have changed.
|
||||
message TargetChange {
|
||||
// The type of change.
|
||||
enum TargetChangeType {
|
||||
// No change has occurred. Used only to send an updated `resume_token`.
|
||||
NO_CHANGE = 0;
|
||||
|
||||
// The targets have been added.
|
||||
ADD = 1;
|
||||
|
||||
// The targets have been removed.
|
||||
REMOVE = 2;
|
||||
|
||||
// The targets reflect all changes committed before the targets were added
|
||||
// to the stream.
|
||||
//
|
||||
// This will be sent after or with a `read_time` that is greater than or
|
||||
// equal to the time at which the targets were added.
|
||||
//
|
||||
// Listeners can wait for this change if read-after-write semantics
|
||||
// are desired.
|
||||
CURRENT = 3;
|
||||
|
||||
// The targets have been reset, and a new initial state for the targets
|
||||
// will be returned in subsequent changes.
|
||||
//
|
||||
// After the initial state is complete, `CURRENT` will be returned even
|
||||
// if the target was previously indicated to be `CURRENT`.
|
||||
RESET = 4;
|
||||
}
|
||||
|
||||
// The type of change that occurred.
|
||||
TargetChangeType target_change_type = 1;
|
||||
|
||||
// The target IDs of targets that have changed.
|
||||
//
|
||||
// If empty, the change applies to all targets.
|
||||
//
|
||||
// For `target_change_type=ADD`, the order of the target IDs matches the order
|
||||
// of the requests to add the targets. This allows clients to unambiguously
|
||||
// associate server-assigned target IDs with added targets.
|
||||
//
|
||||
// For other states, the order of the target IDs is not defined.
|
||||
repeated int32 target_ids = 2;
|
||||
|
||||
// The error that resulted in this change, if applicable.
|
||||
google.rpc.Status cause = 3;
|
||||
|
||||
// A token that can be used to resume the stream for the given `target_ids`,
|
||||
// or all targets if `target_ids` is empty.
|
||||
//
|
||||
// Not set on every target change.
|
||||
bytes resume_token = 4;
|
||||
|
||||
// The consistent `read_time` for the given `target_ids` (omitted when the
|
||||
// target_ids are not at a consistent snapshot).
|
||||
//
|
||||
// The stream is guaranteed to send a `read_time` with `target_ids` empty
|
||||
// whenever the entire stream reaches a new consistent snapshot. ADD,
|
||||
// CURRENT, and RESET messages are guaranteed to (eventually) result in a
|
||||
// new consistent snapshot (while NO_CHANGE and REMOVE messages are not).
|
||||
//
|
||||
// For a given stream, `read_time` is guaranteed to be monotonically
|
||||
// increasing.
|
||||
google.protobuf.Timestamp read_time = 6;
|
||||
}
|
||||
|
||||
// The request for [Firestore.ListCollectionIds][google.firestore.v1beta1.Firestore.ListCollectionIds].
|
||||
message ListCollectionIdsRequest {
|
||||
// The parent document. In the format:
|
||||
// `projects/{project_id}/databases/{database_id}/documents/{document_path}`.
|
||||
// For example:
|
||||
// `projects/my-project/databases/my-database/documents/chatrooms/my-chatroom`
|
||||
string parent = 1;
|
||||
|
||||
// The maximum number of results to return.
|
||||
int32 page_size = 2;
|
||||
|
||||
// A page token. Must be a value from
|
||||
// [ListCollectionIdsResponse][google.firestore.v1beta1.ListCollectionIdsResponse].
|
||||
string page_token = 3;
|
||||
}
|
||||
|
||||
// The response from [Firestore.ListCollectionIds][google.firestore.v1beta1.Firestore.ListCollectionIds].
|
||||
message ListCollectionIdsResponse {
|
||||
// The collection ids.
|
||||
repeated string collection_ids = 1;
|
||||
|
||||
// A page token that may be used to continue the list.
|
||||
string next_page_token = 2;
|
||||
}
|
||||
235
express-server/node_modules/@google-cloud/firestore/build/protos/google/firestore/v1beta1/query.proto
generated
vendored
Normal file
235
express-server/node_modules/@google-cloud/firestore/build/protos/google/firestore/v1beta1/query.proto
generated
vendored
Normal file
@@ -0,0 +1,235 @@
|
||||
// Copyright 2018 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
syntax = "proto3";
|
||||
|
||||
package google.firestore.v1beta1;
|
||||
|
||||
import "google/api/annotations.proto";
|
||||
import "google/firestore/v1beta1/document.proto";
|
||||
import "google/protobuf/wrappers.proto";
|
||||
|
||||
option csharp_namespace = "Google.Cloud.Firestore.V1Beta1";
|
||||
option go_package = "google.golang.org/genproto/googleapis/firestore/v1beta1;firestore";
|
||||
option java_multiple_files = true;
|
||||
option java_outer_classname = "QueryProto";
|
||||
option java_package = "com.google.firestore.v1beta1";
|
||||
option objc_class_prefix = "GCFS";
|
||||
option php_namespace = "Google\\Cloud\\Firestore\\V1beta1";
|
||||
|
||||
|
||||
// A Firestore query.
|
||||
message StructuredQuery {
|
||||
// A selection of a collection, such as `messages as m1`.
|
||||
message CollectionSelector {
|
||||
// The collection ID.
|
||||
// When set, selects only collections with this ID.
|
||||
string collection_id = 2;
|
||||
|
||||
// When false, selects only collections that are immediate children of
|
||||
// the `parent` specified in the containing `RunQueryRequest`.
|
||||
// When true, selects all descendant collections.
|
||||
bool all_descendants = 3;
|
||||
}
|
||||
|
||||
// A filter.
|
||||
message Filter {
|
||||
// The type of filter.
|
||||
oneof filter_type {
|
||||
// A composite filter.
|
||||
CompositeFilter composite_filter = 1;
|
||||
|
||||
// A filter on a document field.
|
||||
FieldFilter field_filter = 2;
|
||||
|
||||
// A filter that takes exactly one argument.
|
||||
UnaryFilter unary_filter = 3;
|
||||
}
|
||||
}
|
||||
|
||||
// A filter that merges multiple other filters using the given operator.
|
||||
message CompositeFilter {
|
||||
// A composite filter operator.
|
||||
enum Operator {
|
||||
// Unspecified. This value must not be used.
|
||||
OPERATOR_UNSPECIFIED = 0;
|
||||
|
||||
// The results are required to satisfy each of the combined filters.
|
||||
AND = 1;
|
||||
}
|
||||
|
||||
// The operator for combining multiple filters.
|
||||
Operator op = 1;
|
||||
|
||||
// The list of filters to combine.
|
||||
// Must contain at least one filter.
|
||||
repeated Filter filters = 2;
|
||||
}
|
||||
|
||||
// A filter on a specific field.
|
||||
message FieldFilter {
|
||||
// A field filter operator.
|
||||
enum Operator {
|
||||
// Unspecified. This value must not be used.
|
||||
OPERATOR_UNSPECIFIED = 0;
|
||||
|
||||
// Less than. Requires that the field come first in `order_by`.
|
||||
LESS_THAN = 1;
|
||||
|
||||
// Less than or equal. Requires that the field come first in `order_by`.
|
||||
LESS_THAN_OR_EQUAL = 2;
|
||||
|
||||
// Greater than. Requires that the field come first in `order_by`.
|
||||
GREATER_THAN = 3;
|
||||
|
||||
// Greater than or equal. Requires that the field come first in
|
||||
// `order_by`.
|
||||
GREATER_THAN_OR_EQUAL = 4;
|
||||
|
||||
// Equal.
|
||||
EQUAL = 5;
|
||||
|
||||
// Contains. Requires that the field is an array.
|
||||
ARRAY_CONTAINS = 7;
|
||||
}
|
||||
|
||||
// The field to filter by.
|
||||
FieldReference field = 1;
|
||||
|
||||
// The operator to filter by.
|
||||
Operator op = 2;
|
||||
|
||||
// The value to compare to.
|
||||
Value value = 3;
|
||||
}
|
||||
|
||||
// A filter with a single operand.
|
||||
message UnaryFilter {
|
||||
// A unary operator.
|
||||
enum Operator {
|
||||
// Unspecified. This value must not be used.
|
||||
OPERATOR_UNSPECIFIED = 0;
|
||||
|
||||
// Test if a field is equal to NaN.
|
||||
IS_NAN = 2;
|
||||
|
||||
// Test if an exprestion evaluates to Null.
|
||||
IS_NULL = 3;
|
||||
}
|
||||
|
||||
// The unary operator to apply.
|
||||
Operator op = 1;
|
||||
|
||||
// The argument to the filter.
|
||||
oneof operand_type {
|
||||
// The field to which to apply the operator.
|
||||
FieldReference field = 2;
|
||||
}
|
||||
}
|
||||
|
||||
// An order on a field.
|
||||
message Order {
|
||||
// The field to order by.
|
||||
FieldReference field = 1;
|
||||
|
||||
// The direction to order by. Defaults to `ASCENDING`.
|
||||
Direction direction = 2;
|
||||
}
|
||||
|
||||
// A reference to a field, such as `max(messages.time) as max_time`.
|
||||
message FieldReference {
|
||||
string field_path = 2;
|
||||
}
|
||||
|
||||
// The projection of document's fields to return.
|
||||
message Projection {
|
||||
// The fields to return.
|
||||
//
|
||||
// If empty, all fields are returned. To only return the name
|
||||
// of the document, use `['__name__']`.
|
||||
repeated FieldReference fields = 2;
|
||||
}
|
||||
|
||||
// A sort direction.
|
||||
enum Direction {
|
||||
// Unspecified.
|
||||
DIRECTION_UNSPECIFIED = 0;
|
||||
|
||||
// Ascending.
|
||||
ASCENDING = 1;
|
||||
|
||||
// Descending.
|
||||
DESCENDING = 2;
|
||||
}
|
||||
|
||||
// The projection to return.
|
||||
Projection select = 1;
|
||||
|
||||
// The collections to query.
|
||||
repeated CollectionSelector from = 2;
|
||||
|
||||
// The filter to apply.
|
||||
Filter where = 3;
|
||||
|
||||
// The order to apply to the query results.
|
||||
//
|
||||
// Firestore guarantees a stable ordering through the following rules:
|
||||
//
|
||||
// * Any field required to appear in `order_by`, that is not already
|
||||
// specified in `order_by`, is appended to the order in field name order
|
||||
// by default.
|
||||
// * If an order on `__name__` is not specified, it is appended by default.
|
||||
//
|
||||
// Fields are appended with the same sort direction as the last order
|
||||
// specified, or 'ASCENDING' if no order was specified. For example:
|
||||
//
|
||||
// * `SELECT * FROM Foo ORDER BY A` becomes
|
||||
// `SELECT * FROM Foo ORDER BY A, __name__`
|
||||
// * `SELECT * FROM Foo ORDER BY A DESC` becomes
|
||||
// `SELECT * FROM Foo ORDER BY A DESC, __name__ DESC`
|
||||
// * `SELECT * FROM Foo WHERE A > 1` becomes
|
||||
// `SELECT * FROM Foo WHERE A > 1 ORDER BY A, __name__`
|
||||
repeated Order order_by = 4;
|
||||
|
||||
// A starting point for the query results.
|
||||
Cursor start_at = 7;
|
||||
|
||||
// A end point for the query results.
|
||||
Cursor end_at = 8;
|
||||
|
||||
// The number of results to skip.
|
||||
//
|
||||
// Applies before limit, but after all other constraints. Must be >= 0 if
|
||||
// specified.
|
||||
int32 offset = 6;
|
||||
|
||||
// The maximum number of results to return.
|
||||
//
|
||||
// Applies after all other constraints.
|
||||
// Must be >= 0 if specified.
|
||||
google.protobuf.Int32Value limit = 5;
|
||||
}
|
||||
|
||||
// A position in a query result set.
|
||||
message Cursor {
|
||||
// The values that represent a position, in the order they appear in
|
||||
// the order by clause of a query.
|
||||
//
|
||||
// Can contain fewer values than specified in the order by clause.
|
||||
repeated Value values = 1;
|
||||
|
||||
// If the position is just before or just after the given values, relative
|
||||
// to the sort order defined by the query.
|
||||
bool before = 2;
|
||||
}
|
||||
214
express-server/node_modules/@google-cloud/firestore/build/protos/google/firestore/v1beta1/write.proto
generated
vendored
Normal file
214
express-server/node_modules/@google-cloud/firestore/build/protos/google/firestore/v1beta1/write.proto
generated
vendored
Normal file
@@ -0,0 +1,214 @@
|
||||
// Copyright 2018 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
syntax = "proto3";
|
||||
|
||||
package google.firestore.v1beta1;
|
||||
|
||||
import "google/api/annotations.proto";
|
||||
import "google/firestore/v1beta1/common.proto";
|
||||
import "google/firestore/v1beta1/document.proto";
|
||||
import "google/protobuf/timestamp.proto";
|
||||
|
||||
option csharp_namespace = "Google.Cloud.Firestore.V1Beta1";
|
||||
option go_package = "google.golang.org/genproto/googleapis/firestore/v1beta1;firestore";
|
||||
option java_multiple_files = true;
|
||||
option java_outer_classname = "WriteProto";
|
||||
option java_package = "com.google.firestore.v1beta1";
|
||||
option objc_class_prefix = "GCFS";
|
||||
option php_namespace = "Google\\Cloud\\Firestore\\V1beta1";
|
||||
|
||||
|
||||
// A write on a document.
|
||||
message Write {
|
||||
// The operation to execute.
|
||||
oneof operation {
|
||||
// A document to write.
|
||||
Document update = 1;
|
||||
|
||||
// A document name to delete. In the format:
|
||||
// `projects/{project_id}/databases/{database_id}/documents/{document_path}`.
|
||||
string delete = 2;
|
||||
|
||||
// Applies a tranformation to a document.
|
||||
// At most one `transform` per document is allowed in a given request.
|
||||
// An `update` cannot follow a `transform` on the same document in a given
|
||||
// request.
|
||||
DocumentTransform transform = 6;
|
||||
}
|
||||
|
||||
// The fields to update in this write.
|
||||
//
|
||||
// This field can be set only when the operation is `update`.
|
||||
// If the mask is not set for an `update` and the document exists, any
|
||||
// existing data will be overwritten.
|
||||
// If the mask is set and the document on the server has fields not covered by
|
||||
// the mask, they are left unchanged.
|
||||
// Fields referenced in the mask, but not present in the input document, are
|
||||
// deleted from the document on the server.
|
||||
// The field paths in this mask must not contain a reserved field name.
|
||||
DocumentMask update_mask = 3;
|
||||
|
||||
// An optional precondition on the document.
|
||||
//
|
||||
// The write will fail if this is set and not met by the target document.
|
||||
Precondition current_document = 4;
|
||||
}
|
||||
|
||||
// A transformation of a document.
|
||||
message DocumentTransform {
|
||||
// A transformation of a field of the document.
|
||||
message FieldTransform {
|
||||
// A value that is calculated by the server.
|
||||
enum ServerValue {
|
||||
// Unspecified. This value must not be used.
|
||||
SERVER_VALUE_UNSPECIFIED = 0;
|
||||
|
||||
// The time at which the server processed the request, with millisecond
|
||||
// precision.
|
||||
REQUEST_TIME = 1;
|
||||
}
|
||||
|
||||
// The path of the field. See [Document.fields][google.firestore.v1beta1.Document.fields] for the field path syntax
|
||||
// reference.
|
||||
string field_path = 1;
|
||||
|
||||
// The transformation to apply on the field.
|
||||
oneof transform_type {
|
||||
// Sets the field to the given server value.
|
||||
ServerValue set_to_server_value = 2;
|
||||
|
||||
// Append the given elements in order if they are not already present in
|
||||
// the current field value.
|
||||
// If the field is not an array, or if the field does not yet exist, it is
|
||||
// first set to the empty array.
|
||||
//
|
||||
// Equivalent numbers of different types (e.g. 3L and 3.0) are
|
||||
// considered equal when checking if a value is missing.
|
||||
// NaN is equal to NaN, and Null is equal to Null.
|
||||
// If the input contains multiple equivalent values, only the first will
|
||||
// be considered.
|
||||
//
|
||||
// The corresponding transform_result will be the null value.
|
||||
ArrayValue append_missing_elements = 6;
|
||||
|
||||
// Remove all of the given elements from the array in the field.
|
||||
// If the field is not an array, or if the field does not yet exist, it is
|
||||
// set to the empty array.
|
||||
//
|
||||
// Equivalent numbers of the different types (e.g. 3L and 3.0) are
|
||||
// considered equal when deciding whether an element should be removed.
|
||||
// NaN is equal to NaN, and Null is equal to Null.
|
||||
// This will remove all equivalent values if there are duplicates.
|
||||
//
|
||||
// The corresponding transform_result will be the null value.
|
||||
ArrayValue remove_all_from_array = 7;
|
||||
}
|
||||
}
|
||||
|
||||
// The name of the document to transform.
|
||||
string document = 1;
|
||||
|
||||
// The list of transformations to apply to the fields of the document, in
|
||||
// order.
|
||||
// This must not be empty.
|
||||
repeated FieldTransform field_transforms = 2;
|
||||
}
|
||||
|
||||
// The result of applying a write.
|
||||
message WriteResult {
|
||||
// The last update time of the document after applying the write. Not set
|
||||
// after a `delete`.
|
||||
//
|
||||
// If the write did not actually change the document, this will be the
|
||||
// previous update_time.
|
||||
google.protobuf.Timestamp update_time = 1;
|
||||
|
||||
// The results of applying each [DocumentTransform.FieldTransform][google.firestore.v1beta1.DocumentTransform.FieldTransform], in the
|
||||
// same order.
|
||||
repeated Value transform_results = 2;
|
||||
}
|
||||
|
||||
// A [Document][google.firestore.v1beta1.Document] has changed.
|
||||
//
|
||||
// May be the result of multiple [writes][google.firestore.v1beta1.Write], including deletes, that
|
||||
// ultimately resulted in a new value for the [Document][google.firestore.v1beta1.Document].
|
||||
//
|
||||
// Multiple [DocumentChange][google.firestore.v1beta1.DocumentChange] messages may be returned for the same logical
|
||||
// change, if multiple targets are affected.
|
||||
message DocumentChange {
|
||||
// The new state of the [Document][google.firestore.v1beta1.Document].
|
||||
//
|
||||
// If `mask` is set, contains only fields that were updated or added.
|
||||
Document document = 1;
|
||||
|
||||
// A set of target IDs of targets that match this document.
|
||||
repeated int32 target_ids = 5;
|
||||
|
||||
// A set of target IDs for targets that no longer match this document.
|
||||
repeated int32 removed_target_ids = 6;
|
||||
}
|
||||
|
||||
// A [Document][google.firestore.v1beta1.Document] has been deleted.
|
||||
//
|
||||
// May be the result of multiple [writes][google.firestore.v1beta1.Write], including updates, the
|
||||
// last of which deleted the [Document][google.firestore.v1beta1.Document].
|
||||
//
|
||||
// Multiple [DocumentDelete][google.firestore.v1beta1.DocumentDelete] messages may be returned for the same logical
|
||||
// delete, if multiple targets are affected.
|
||||
message DocumentDelete {
|
||||
// The resource name of the [Document][google.firestore.v1beta1.Document] that was deleted.
|
||||
string document = 1;
|
||||
|
||||
// A set of target IDs for targets that previously matched this entity.
|
||||
repeated int32 removed_target_ids = 6;
|
||||
|
||||
// The read timestamp at which the delete was observed.
|
||||
//
|
||||
// Greater or equal to the `commit_time` of the delete.
|
||||
google.protobuf.Timestamp read_time = 4;
|
||||
}
|
||||
|
||||
// A [Document][google.firestore.v1beta1.Document] has been removed from the view of the targets.
|
||||
//
|
||||
// Sent if the document is no longer relevant to a target and is out of view.
|
||||
// Can be sent instead of a DocumentDelete or a DocumentChange if the server
|
||||
// can not send the new value of the document.
|
||||
//
|
||||
// Multiple [DocumentRemove][google.firestore.v1beta1.DocumentRemove] messages may be returned for the same logical
|
||||
// write or delete, if multiple targets are affected.
|
||||
message DocumentRemove {
|
||||
// The resource name of the [Document][google.firestore.v1beta1.Document] that has gone out of view.
|
||||
string document = 1;
|
||||
|
||||
// A set of target IDs for targets that previously matched this document.
|
||||
repeated int32 removed_target_ids = 2;
|
||||
|
||||
// The read timestamp at which the remove was observed.
|
||||
//
|
||||
// Greater or equal to the `commit_time` of the change/delete/remove.
|
||||
google.protobuf.Timestamp read_time = 4;
|
||||
}
|
||||
|
||||
// A digest of all the documents that match a given target.
|
||||
message ExistenceFilter {
|
||||
// The target ID to which this filter applies.
|
||||
int32 target_id = 1;
|
||||
|
||||
// The total count of documents that match [target_id][google.firestore.v1beta1.ExistenceFilter.target_id].
|
||||
//
|
||||
// If different from the count of documents in the client that match, the
|
||||
// client must manually determine which documents no longer match the target.
|
||||
int32 count = 2;
|
||||
}
|
||||
155
express-server/node_modules/@google-cloud/firestore/build/protos/google/protobuf/any.proto
generated
vendored
Normal file
155
express-server/node_modules/@google-cloud/firestore/build/protos/google/protobuf/any.proto
generated
vendored
Normal file
@@ -0,0 +1,155 @@
|
||||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
syntax = "proto3";
|
||||
|
||||
package google.protobuf;
|
||||
|
||||
option csharp_namespace = "Google.Protobuf.WellKnownTypes";
|
||||
option go_package = "github.com/golang/protobuf/ptypes/any";
|
||||
option java_package = "com.google.protobuf";
|
||||
option java_outer_classname = "AnyProto";
|
||||
option java_multiple_files = true;
|
||||
option objc_class_prefix = "GPB";
|
||||
|
||||
// `Any` contains an arbitrary serialized protocol buffer message along with a
|
||||
// URL that describes the type of the serialized message.
|
||||
//
|
||||
// Protobuf library provides support to pack/unpack Any values in the form
|
||||
// of utility functions or additional generated methods of the Any type.
|
||||
//
|
||||
// Example 1: Pack and unpack a message in C++.
|
||||
//
|
||||
// Foo foo = ...;
|
||||
// Any any;
|
||||
// any.PackFrom(foo);
|
||||
// ...
|
||||
// if (any.UnpackTo(&foo)) {
|
||||
// ...
|
||||
// }
|
||||
//
|
||||
// Example 2: Pack and unpack a message in Java.
|
||||
//
|
||||
// Foo foo = ...;
|
||||
// Any any = Any.pack(foo);
|
||||
// ...
|
||||
// if (any.is(Foo.class)) {
|
||||
// foo = any.unpack(Foo.class);
|
||||
// }
|
||||
//
|
||||
// Example 3: Pack and unpack a message in Python.
|
||||
//
|
||||
// foo = Foo(...)
|
||||
// any = Any()
|
||||
// any.Pack(foo)
|
||||
// ...
|
||||
// if any.Is(Foo.DESCRIPTOR):
|
||||
// any.Unpack(foo)
|
||||
// ...
|
||||
//
|
||||
// Example 4: Pack and unpack a message in Go
|
||||
//
|
||||
// foo := &pb.Foo{...}
|
||||
// any, err := ptypes.MarshalAny(foo)
|
||||
// ...
|
||||
// foo := &pb.Foo{}
|
||||
// if err := ptypes.UnmarshalAny(any, foo); err != nil {
|
||||
// ...
|
||||
// }
|
||||
//
|
||||
// The pack methods provided by protobuf library will by default use
|
||||
// 'type.googleapis.com/full.type.name' as the type URL and the unpack
|
||||
// methods only use the fully qualified type name after the last '/'
|
||||
// in the type URL, for example "foo.bar.com/x/y.z" will yield type
|
||||
// name "y.z".
|
||||
//
|
||||
//
|
||||
// JSON
|
||||
// ====
|
||||
// The JSON representation of an `Any` value uses the regular
|
||||
// representation of the deserialized, embedded message, with an
|
||||
// additional field `@type` which contains the type URL. Example:
|
||||
//
|
||||
// package google.profile;
|
||||
// message Person {
|
||||
// string first_name = 1;
|
||||
// string last_name = 2;
|
||||
// }
|
||||
//
|
||||
// {
|
||||
// "@type": "type.googleapis.com/google.profile.Person",
|
||||
// "firstName": <string>,
|
||||
// "lastName": <string>
|
||||
// }
|
||||
//
|
||||
// If the embedded message type is well-known and has a custom JSON
|
||||
// representation, that representation will be embedded adding a field
|
||||
// `value` which holds the custom JSON in addition to the `@type`
|
||||
// field. Example (for message [google.protobuf.Duration][]):
|
||||
//
|
||||
// {
|
||||
// "@type": "type.googleapis.com/google.protobuf.Duration",
|
||||
// "value": "1.212s"
|
||||
// }
|
||||
//
|
||||
message Any {
|
||||
// A URL/resource name that uniquely identifies the type of the serialized
|
||||
// protocol buffer message. This string must contain at least
|
||||
// one "/" character. The last segment of the URL's path must represent
|
||||
// the fully qualified name of the type (as in
|
||||
// `path/google.protobuf.Duration`). The name should be in a canonical form
|
||||
// (e.g., leading "." is not accepted).
|
||||
//
|
||||
// In practice, teams usually precompile into the binary all types that they
|
||||
// expect it to use in the context of Any. However, for URLs which use the
|
||||
// scheme `http`, `https`, or no scheme, one can optionally set up a type
|
||||
// server that maps type URLs to message definitions as follows:
|
||||
//
|
||||
// * If no scheme is provided, `https` is assumed.
|
||||
// * An HTTP GET on the URL must yield a [google.protobuf.Type][]
|
||||
// value in binary format, or produce an error.
|
||||
// * Applications are allowed to cache lookup results based on the
|
||||
// URL, or have them precompiled into a binary to avoid any
|
||||
// lookup. Therefore, binary compatibility needs to be preserved
|
||||
// on changes to types. (Use versioned type names to manage
|
||||
// breaking changes.)
|
||||
//
|
||||
// Note: this functionality is not currently available in the official
|
||||
// protobuf release, and it is not used for type URLs beginning with
|
||||
// type.googleapis.com.
|
||||
//
|
||||
// Schemes other than `http`, `https` (or the empty scheme) might be
|
||||
// used with implementation specific semantics.
|
||||
//
|
||||
string type_url = 1;
|
||||
|
||||
// Must be a valid serialized protocol buffer of the above specified type.
|
||||
bytes value = 2;
|
||||
}
|
||||
52
express-server/node_modules/@google-cloud/firestore/build/protos/google/protobuf/empty.proto
generated
vendored
Normal file
52
express-server/node_modules/@google-cloud/firestore/build/protos/google/protobuf/empty.proto
generated
vendored
Normal file
@@ -0,0 +1,52 @@
|
||||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
syntax = "proto3";
|
||||
|
||||
package google.protobuf;
|
||||
|
||||
option csharp_namespace = "Google.Protobuf.WellKnownTypes";
|
||||
option go_package = "github.com/golang/protobuf/ptypes/empty";
|
||||
option java_package = "com.google.protobuf";
|
||||
option java_outer_classname = "EmptyProto";
|
||||
option java_multiple_files = true;
|
||||
option objc_class_prefix = "GPB";
|
||||
option cc_enable_arenas = true;
|
||||
|
||||
// A generic empty message that you can re-use to avoid defining duplicated
|
||||
// empty messages in your APIs. A typical example is to use it as the request
|
||||
// or the response type of an API method. For instance:
|
||||
//
|
||||
// service Foo {
|
||||
// rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty);
|
||||
// }
|
||||
//
|
||||
// The JSON representation for `Empty` is empty JSON object `{}`.
|
||||
message Empty {}
|
||||
96
express-server/node_modules/@google-cloud/firestore/build/protos/google/protobuf/struct.proto
generated
vendored
Normal file
96
express-server/node_modules/@google-cloud/firestore/build/protos/google/protobuf/struct.proto
generated
vendored
Normal file
@@ -0,0 +1,96 @@
|
||||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
syntax = "proto3";
|
||||
|
||||
package google.protobuf;
|
||||
|
||||
option csharp_namespace = "Google.Protobuf.WellKnownTypes";
|
||||
option cc_enable_arenas = true;
|
||||
option go_package = "github.com/golang/protobuf/ptypes/struct;structpb";
|
||||
option java_package = "com.google.protobuf";
|
||||
option java_outer_classname = "StructProto";
|
||||
option java_multiple_files = true;
|
||||
option objc_class_prefix = "GPB";
|
||||
|
||||
|
||||
// `Struct` represents a structured data value, consisting of fields
|
||||
// which map to dynamically typed values. In some languages, `Struct`
|
||||
// might be supported by a native representation. For example, in
|
||||
// scripting languages like JS a struct is represented as an
|
||||
// object. The details of that representation are described together
|
||||
// with the proto support for the language.
|
||||
//
|
||||
// The JSON representation for `Struct` is JSON object.
|
||||
message Struct {
|
||||
// Unordered map of dynamically typed values.
|
||||
map<string, Value> fields = 1;
|
||||
}
|
||||
|
||||
// `Value` represents a dynamically typed value which can be either
|
||||
// null, a number, a string, a boolean, a recursive struct value, or a
|
||||
// list of values. A producer of value is expected to set one of that
|
||||
// variants, absence of any variant indicates an error.
|
||||
//
|
||||
// The JSON representation for `Value` is JSON value.
|
||||
message Value {
|
||||
// The kind of value.
|
||||
oneof kind {
|
||||
// Represents a null value.
|
||||
NullValue null_value = 1;
|
||||
// Represents a double value.
|
||||
double number_value = 2;
|
||||
// Represents a string value.
|
||||
string string_value = 3;
|
||||
// Represents a boolean value.
|
||||
bool bool_value = 4;
|
||||
// Represents a structured value.
|
||||
Struct struct_value = 5;
|
||||
// Represents a repeated `Value`.
|
||||
ListValue list_value = 6;
|
||||
}
|
||||
}
|
||||
|
||||
// `NullValue` is a singleton enumeration to represent the null value for the
|
||||
// `Value` type union.
|
||||
//
|
||||
// The JSON representation for `NullValue` is JSON `null`.
|
||||
enum NullValue {
|
||||
// Null value.
|
||||
NULL_VALUE = 0;
|
||||
}
|
||||
|
||||
// `ListValue` is a wrapper around a repeated field of values.
|
||||
//
|
||||
// The JSON representation for `ListValue` is JSON array.
|
||||
message ListValue {
|
||||
// Repeated field of dynamically typed values.
|
||||
repeated Value values = 1;
|
||||
}
|
||||
137
express-server/node_modules/@google-cloud/firestore/build/protos/google/protobuf/timestamp.proto
generated
vendored
Normal file
137
express-server/node_modules/@google-cloud/firestore/build/protos/google/protobuf/timestamp.proto
generated
vendored
Normal file
@@ -0,0 +1,137 @@
|
||||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
syntax = "proto3";
|
||||
|
||||
package google.protobuf;
|
||||
|
||||
option csharp_namespace = "Google.Protobuf.WellKnownTypes";
|
||||
option cc_enable_arenas = true;
|
||||
option go_package = "github.com/golang/protobuf/ptypes/timestamp";
|
||||
option java_package = "com.google.protobuf";
|
||||
option java_outer_classname = "TimestampProto";
|
||||
option java_multiple_files = true;
|
||||
option objc_class_prefix = "GPB";
|
||||
|
||||
// A Timestamp represents a point in time independent of any time zone or local
|
||||
// calendar, encoded as a count of seconds and fractions of seconds at
|
||||
// nanosecond resolution. The count is relative to an epoch at UTC midnight on
|
||||
// January 1, 1970, in the proleptic Gregorian calendar which extends the
|
||||
// Gregorian calendar backwards to year one.
|
||||
//
|
||||
// All minutes are 60 seconds long. Leap seconds are "smeared" so that no leap
|
||||
// second table is needed for interpretation, using a [24-hour linear
|
||||
// smear](https://developers.google.com/time/smear).
|
||||
//
|
||||
// The range is from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z. By
|
||||
// restricting to that range, we ensure that we can convert to and from [RFC
|
||||
// 3339](https://www.ietf.org/rfc/rfc3339.txt) date strings.
|
||||
//
|
||||
// # Examples
|
||||
//
|
||||
// Example 1: Compute Timestamp from POSIX `time()`.
|
||||
//
|
||||
// Timestamp timestamp;
|
||||
// timestamp.set_seconds(time(NULL));
|
||||
// timestamp.set_nanos(0);
|
||||
//
|
||||
// Example 2: Compute Timestamp from POSIX `gettimeofday()`.
|
||||
//
|
||||
// struct timeval tv;
|
||||
// gettimeofday(&tv, NULL);
|
||||
//
|
||||
// Timestamp timestamp;
|
||||
// timestamp.set_seconds(tv.tv_sec);
|
||||
// timestamp.set_nanos(tv.tv_usec * 1000);
|
||||
//
|
||||
// Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`.
|
||||
//
|
||||
// FILETIME ft;
|
||||
// GetSystemTimeAsFileTime(&ft);
|
||||
// UINT64 ticks = (((UINT64)ft.dwHighDateTime) << 32) | ft.dwLowDateTime;
|
||||
//
|
||||
// // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z
|
||||
// // is 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z.
|
||||
// Timestamp timestamp;
|
||||
// timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL));
|
||||
// timestamp.set_nanos((INT32) ((ticks % 10000000) * 100));
|
||||
//
|
||||
// Example 4: Compute Timestamp from Java `System.currentTimeMillis()`.
|
||||
//
|
||||
// long millis = System.currentTimeMillis();
|
||||
//
|
||||
// Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000)
|
||||
// .setNanos((int) ((millis % 1000) * 1000000)).build();
|
||||
//
|
||||
//
|
||||
// Example 5: Compute Timestamp from current time in Python.
|
||||
//
|
||||
// timestamp = Timestamp()
|
||||
// timestamp.GetCurrentTime()
|
||||
//
|
||||
// # JSON Mapping
|
||||
//
|
||||
// In JSON format, the Timestamp type is encoded as a string in the
|
||||
// [RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format. That is, the
|
||||
// format is "{year}-{month}-{day}T{hour}:{min}:{sec}[.{frac_sec}]Z"
|
||||
// where {year} is always expressed using four digits while {month}, {day},
|
||||
// {hour}, {min}, and {sec} are zero-padded to two digits each. The fractional
|
||||
// seconds, which can go up to 9 digits (i.e. up to 1 nanosecond resolution),
|
||||
// are optional. The "Z" suffix indicates the timezone ("UTC"); the timezone
|
||||
// is required. A proto3 JSON serializer should always use UTC (as indicated by
|
||||
// "Z") when printing the Timestamp type and a proto3 JSON parser should be
|
||||
// able to accept both UTC and other timezones (as indicated by an offset).
|
||||
//
|
||||
// For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past
|
||||
// 01:30 UTC on January 15, 2017.
|
||||
//
|
||||
// In JavaScript, one can convert a Date object to this format using the
|
||||
// standard [toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString]
|
||||
// method. In Python, a standard `datetime.datetime` object can be converted
|
||||
// to this format using [`strftime`](https://docs.python.org/2/library/time.html#time.strftime)
|
||||
// with the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one
|
||||
// can use the Joda Time's [`ISODateTimeFormat.dateTime()`](
|
||||
// http://www.joda.org/joda-time/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime%2D%2D
|
||||
// ) to obtain a formatter capable of generating timestamps in this format.
|
||||
//
|
||||
//
|
||||
message Timestamp {
|
||||
|
||||
// Represents seconds of UTC time since Unix epoch
|
||||
// 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to
|
||||
// 9999-12-31T23:59:59Z inclusive.
|
||||
int64 seconds = 1;
|
||||
|
||||
// Non-negative fractions of a second at nanosecond resolution. Negative
|
||||
// second values with fractions must still have non-negative nanos values
|
||||
// that count forward in time. Must be from 0 to 999,999,999
|
||||
// inclusive.
|
||||
int32 nanos = 2;
|
||||
}
|
||||
123
express-server/node_modules/@google-cloud/firestore/build/protos/google/protobuf/wrappers.proto
generated
vendored
Normal file
123
express-server/node_modules/@google-cloud/firestore/build/protos/google/protobuf/wrappers.proto
generated
vendored
Normal file
@@ -0,0 +1,123 @@
|
||||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
// Wrappers for primitive (non-message) types. These types are useful
|
||||
// for embedding primitives in the `google.protobuf.Any` type and for places
|
||||
// where we need to distinguish between the absence of a primitive
|
||||
// typed field and its default value.
|
||||
//
|
||||
// These wrappers have no meaningful use within repeated fields as they lack
|
||||
// the ability to detect presence on individual elements.
|
||||
// These wrappers have no meaningful use within a map or a oneof since
|
||||
// individual entries of a map or fields of a oneof can already detect presence.
|
||||
|
||||
syntax = "proto3";
|
||||
|
||||
package google.protobuf;
|
||||
|
||||
option csharp_namespace = "Google.Protobuf.WellKnownTypes";
|
||||
option cc_enable_arenas = true;
|
||||
option go_package = "github.com/golang/protobuf/ptypes/wrappers";
|
||||
option java_package = "com.google.protobuf";
|
||||
option java_outer_classname = "WrappersProto";
|
||||
option java_multiple_files = true;
|
||||
option objc_class_prefix = "GPB";
|
||||
|
||||
// Wrapper message for `double`.
|
||||
//
|
||||
// The JSON representation for `DoubleValue` is JSON number.
|
||||
message DoubleValue {
|
||||
// The double value.
|
||||
double value = 1;
|
||||
}
|
||||
|
||||
// Wrapper message for `float`.
|
||||
//
|
||||
// The JSON representation for `FloatValue` is JSON number.
|
||||
message FloatValue {
|
||||
// The float value.
|
||||
float value = 1;
|
||||
}
|
||||
|
||||
// Wrapper message for `int64`.
|
||||
//
|
||||
// The JSON representation for `Int64Value` is JSON string.
|
||||
message Int64Value {
|
||||
// The int64 value.
|
||||
int64 value = 1;
|
||||
}
|
||||
|
||||
// Wrapper message for `uint64`.
|
||||
//
|
||||
// The JSON representation for `UInt64Value` is JSON string.
|
||||
message UInt64Value {
|
||||
// The uint64 value.
|
||||
uint64 value = 1;
|
||||
}
|
||||
|
||||
// Wrapper message for `int32`.
|
||||
//
|
||||
// The JSON representation for `Int32Value` is JSON number.
|
||||
message Int32Value {
|
||||
// The int32 value.
|
||||
int32 value = 1;
|
||||
}
|
||||
|
||||
// Wrapper message for `uint32`.
|
||||
//
|
||||
// The JSON representation for `UInt32Value` is JSON number.
|
||||
message UInt32Value {
|
||||
// The uint32 value.
|
||||
uint32 value = 1;
|
||||
}
|
||||
|
||||
// Wrapper message for `bool`.
|
||||
//
|
||||
// The JSON representation for `BoolValue` is JSON `true` and `false`.
|
||||
message BoolValue {
|
||||
// The bool value.
|
||||
bool value = 1;
|
||||
}
|
||||
|
||||
// Wrapper message for `string`.
|
||||
//
|
||||
// The JSON representation for `StringValue` is JSON string.
|
||||
message StringValue {
|
||||
// The string value.
|
||||
string value = 1;
|
||||
}
|
||||
|
||||
// Wrapper message for `bytes`.
|
||||
//
|
||||
// The JSON representation for `BytesValue` is JSON string.
|
||||
message BytesValue {
|
||||
// The bytes value.
|
||||
bytes value = 1;
|
||||
}
|
||||
92
express-server/node_modules/@google-cloud/firestore/build/protos/google/rpc/status.proto
generated
vendored
Normal file
92
express-server/node_modules/@google-cloud/firestore/build/protos/google/rpc/status.proto
generated
vendored
Normal file
@@ -0,0 +1,92 @@
|
||||
// Copyright 2017 Google Inc.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
syntax = "proto3";
|
||||
|
||||
package google.rpc;
|
||||
|
||||
import "google/protobuf/any.proto";
|
||||
|
||||
option go_package = "google.golang.org/genproto/googleapis/rpc/status;status";
|
||||
option java_multiple_files = true;
|
||||
option java_outer_classname = "StatusProto";
|
||||
option java_package = "com.google.rpc";
|
||||
option objc_class_prefix = "RPC";
|
||||
|
||||
|
||||
// The `Status` type defines a logical error model that is suitable for different
|
||||
// programming environments, including REST APIs and RPC APIs. It is used by
|
||||
// [gRPC](https://github.com/grpc). The error model is designed to be:
|
||||
//
|
||||
// - Simple to use and understand for most users
|
||||
// - Flexible enough to meet unexpected needs
|
||||
//
|
||||
// # Overview
|
||||
//
|
||||
// The `Status` message contains three pieces of data: error code, error message,
|
||||
// and error details. The error code should be an enum value of
|
||||
// [google.rpc.Code][google.rpc.Code], but it may accept additional error codes if needed. The
|
||||
// error message should be a developer-facing English message that helps
|
||||
// developers *understand* and *resolve* the error. If a localized user-facing
|
||||
// error message is needed, put the localized message in the error details or
|
||||
// localize it in the client. The optional error details may contain arbitrary
|
||||
// information about the error. There is a predefined set of error detail types
|
||||
// in the package `google.rpc` that can be used for common error conditions.
|
||||
//
|
||||
// # Language mapping
|
||||
//
|
||||
// The `Status` message is the logical representation of the error model, but it
|
||||
// is not necessarily the actual wire format. When the `Status` message is
|
||||
// exposed in different client libraries and different wire protocols, it can be
|
||||
// mapped differently. For example, it will likely be mapped to some exceptions
|
||||
// in Java, but more likely mapped to some error codes in C.
|
||||
//
|
||||
// # Other uses
|
||||
//
|
||||
// The error model and the `Status` message can be used in a variety of
|
||||
// environments, either with or without APIs, to provide a
|
||||
// consistent developer experience across different environments.
|
||||
//
|
||||
// Example uses of this error model include:
|
||||
//
|
||||
// - Partial errors. If a service needs to return partial errors to the client,
|
||||
// it may embed the `Status` in the normal response to indicate the partial
|
||||
// errors.
|
||||
//
|
||||
// - Workflow errors. A typical workflow has multiple steps. Each step may
|
||||
// have a `Status` message for error reporting.
|
||||
//
|
||||
// - Batch operations. If a client uses batch request and batch response, the
|
||||
// `Status` message should be used directly inside batch response, one for
|
||||
// each error sub-response.
|
||||
//
|
||||
// - Asynchronous operations. If an API call embeds asynchronous operation
|
||||
// results in its response, the status of those operations should be
|
||||
// represented directly using the `Status` message.
|
||||
//
|
||||
// - Logging. If some API errors are stored in logs, the message `Status` could
|
||||
// be used directly after any stripping needed for security/privacy reasons.
|
||||
message Status {
|
||||
// The status code, which should be an enum value of [google.rpc.Code][google.rpc.Code].
|
||||
int32 code = 1;
|
||||
|
||||
// A developer-facing error message, which should be in English. Any
|
||||
// user-facing error message should be localized and sent in the
|
||||
// [google.rpc.Status.details][google.rpc.Status.details] field, or localized by the client.
|
||||
string message = 2;
|
||||
|
||||
// A list of messages that carry the error details. There is a common set of
|
||||
// message types for APIs to use.
|
||||
repeated google.protobuf.Any details = 3;
|
||||
}
|
||||
71
express-server/node_modules/@google-cloud/firestore/build/protos/google/type/latlng.proto
generated
vendored
Normal file
71
express-server/node_modules/@google-cloud/firestore/build/protos/google/type/latlng.proto
generated
vendored
Normal file
@@ -0,0 +1,71 @@
|
||||
// Copyright 2016 Google Inc.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
syntax = "proto3";
|
||||
|
||||
package google.type;
|
||||
|
||||
option go_package = "google.golang.org/genproto/googleapis/type/latlng;latlng";
|
||||
option java_multiple_files = true;
|
||||
option java_outer_classname = "LatLngProto";
|
||||
option java_package = "com.google.type";
|
||||
option objc_class_prefix = "GTP";
|
||||
|
||||
|
||||
// An object representing a latitude/longitude pair. This is expressed as a pair
|
||||
// of doubles representing degrees latitude and degrees longitude. Unless
|
||||
// specified otherwise, this must conform to the
|
||||
// <a href="http://www.unoosa.org/pdf/icg/2012/template/WGS_84.pdf">WGS84
|
||||
// standard</a>. Values must be within normalized ranges.
|
||||
//
|
||||
// Example of normalization code in Python:
|
||||
//
|
||||
// def NormalizeLongitude(longitude):
|
||||
// """Wraps decimal degrees longitude to [-180.0, 180.0]."""
|
||||
// q, r = divmod(longitude, 360.0)
|
||||
// if r > 180.0 or (r == 180.0 and q <= -1.0):
|
||||
// return r - 360.0
|
||||
// return r
|
||||
//
|
||||
// def NormalizeLatLng(latitude, longitude):
|
||||
// """Wraps decimal degrees latitude and longitude to
|
||||
// [-90.0, 90.0] and [-180.0, 180.0], respectively."""
|
||||
// r = latitude % 360.0
|
||||
// if r <= 90.0:
|
||||
// return r, NormalizeLongitude(longitude)
|
||||
// elif r >= 270.0:
|
||||
// return r - 360, NormalizeLongitude(longitude)
|
||||
// else:
|
||||
// return 180 - r, NormalizeLongitude(longitude + 180.0)
|
||||
//
|
||||
// assert 180.0 == NormalizeLongitude(180.0)
|
||||
// assert -180.0 == NormalizeLongitude(-180.0)
|
||||
// assert -179.0 == NormalizeLongitude(181.0)
|
||||
// assert (0.0, 0.0) == NormalizeLatLng(360.0, 0.0)
|
||||
// assert (0.0, 0.0) == NormalizeLatLng(-360.0, 0.0)
|
||||
// assert (85.0, 180.0) == NormalizeLatLng(95.0, 0.0)
|
||||
// assert (-85.0, -170.0) == NormalizeLatLng(-95.0, 10.0)
|
||||
// assert (90.0, 10.0) == NormalizeLatLng(90.0, 10.0)
|
||||
// assert (-90.0, -10.0) == NormalizeLatLng(-90.0, -10.0)
|
||||
// assert (0.0, -170.0) == NormalizeLatLng(-180.0, 10.0)
|
||||
// assert (0.0, -170.0) == NormalizeLatLng(180.0, 10.0)
|
||||
// assert (-90.0, 10.0) == NormalizeLatLng(270.0, 10.0)
|
||||
// assert (90.0, 10.0) == NormalizeLatLng(-270.0, 10.0)
|
||||
message LatLng {
|
||||
// The latitude in degrees. It must be in the range [-90.0, +90.0].
|
||||
double latitude = 1;
|
||||
|
||||
// The longitude in degrees. It must be in the range [-180.0, +180.0].
|
||||
double longitude = 2;
|
||||
}
|
||||
73
express-server/node_modules/@google-cloud/firestore/build/protos/update.sh
generated
vendored
Normal file
73
express-server/node_modules/@google-cloud/firestore/build/protos/update.sh
generated
vendored
Normal file
@@ -0,0 +1,73 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Copyright 2018 Google Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
set -euo pipefail
|
||||
IFS=$'\n\t'
|
||||
|
||||
# Variables
|
||||
PROTOS_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
||||
WORK_DIR=`mktemp -d`
|
||||
|
||||
# deletes the temp directory on exit
|
||||
function cleanup {
|
||||
rm -rf "$WORK_DIR"
|
||||
echo "Deleted temp working directory $WORK_DIR"
|
||||
}
|
||||
|
||||
# register the cleanup function to be called on the EXIT signal
|
||||
trap cleanup EXIT
|
||||
|
||||
# Enter work dir
|
||||
pushd "$WORK_DIR"
|
||||
|
||||
# Clone necessary git repos.
|
||||
git clone https://github.com/googleapis/googleapis.git
|
||||
git clone https://github.com/google/protobuf.git
|
||||
|
||||
# Copy necessary protos.
|
||||
mkdir -p "${PROTOS_DIR}/google/api"
|
||||
cp googleapis/google/api/{annotations.proto,http.proto} \
|
||||
"${PROTOS_DIR}/google/api/"
|
||||
|
||||
mkdir -p "${PROTOS_DIR}/google/firestore/v1beta1"
|
||||
cp googleapis/google/firestore/v1beta1/*.proto \
|
||||
"${PROTOS_DIR}/google/firestore/v1beta1/"
|
||||
|
||||
mkdir -p "${PROTOS_DIR}/google/rpc"
|
||||
cp googleapis/google/rpc/status.proto \
|
||||
"${PROTOS_DIR}/google/rpc/"
|
||||
|
||||
mkdir -p "${PROTOS_DIR}/google/type"
|
||||
cp googleapis/google/type/latlng.proto \
|
||||
"${PROTOS_DIR}/google/type/"
|
||||
|
||||
mkdir -p "${PROTOS_DIR}/google/protobuf"
|
||||
cp protobuf/src/google/protobuf/{any,empty,struct,timestamp,wrappers}.proto \
|
||||
"${PROTOS_DIR}/google/protobuf/"
|
||||
|
||||
# Generate the Protobuf typings
|
||||
pbjs --proto_path=. --js_out=import_style=commonjs,binary:library \
|
||||
--target=static --no-create --no-encode --no-decode --no-verify \
|
||||
--no-convert --no-delimited --force-enum-string --force-number -o \
|
||||
firestore_proto_api.js "${PROTOS_DIR}/google/firestore/v1beta1/*.proto" \
|
||||
"${PROTOS_DIR}/google/protobuf/*.proto" "${PROTOS_DIR}/google/type/*.proto" \
|
||||
"${PROTOS_DIR}/google/rpc/*.proto" "${PROTOS_DIR}/google/api/*.proto"
|
||||
pbts -o firestore_proto_api.d.ts firestore_proto_api.js
|
||||
|
||||
# Copy typings into source repo
|
||||
cp {firestore_proto_api.d.ts,firestore_proto_api.js} ${PROTOS_DIR}
|
||||
|
||||
popd
|
||||
145
express-server/node_modules/@google-cloud/firestore/build/src/backoff.js
generated
vendored
Normal file
145
express-server/node_modules/@google-cloud/firestore/build/src/backoff.js
generated
vendored
Normal file
@@ -0,0 +1,145 @@
|
||||
"use strict";
|
||||
/*!
|
||||
* Copyright 2017 Google Inc. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const logger_1 = require("./logger");
|
||||
/*
|
||||
* @module firestore/backoff
|
||||
* @private
|
||||
*
|
||||
* Contains backoff logic to facilitate RPC error handling. This class derives
|
||||
* its implementation from the Firestore Mobile Web Client.
|
||||
*
|
||||
* @see https://github.com/firebase/firebase-js-sdk/blob/master/packages/firestore/src/remote/backoff.ts
|
||||
*/
|
||||
/*!
|
||||
* The default initial backoff time in milliseconds after an error.
|
||||
* Set to 1s according to https://cloud.google.com/apis/design/errors.
|
||||
*/
|
||||
const DEFAULT_BACKOFF_INITIAL_DELAY_MS = 1000;
|
||||
/*!
|
||||
* The default maximum backoff time in milliseconds.
|
||||
*/
|
||||
const DEFAULT_BACKOFF_MAX_DELAY_MS = 60 * 1000;
|
||||
/*!
|
||||
* The default factor to increase the backup by after each failed attempt.
|
||||
*/
|
||||
const DEFAULT_BACKOFF_FACTOR = 1.5;
|
||||
/*!
|
||||
* The default jitter to distribute the backoff attempts by (0 means no
|
||||
* randomization, 1.0 means +/-50% randomization).
|
||||
*/
|
||||
const DEFAULT_JITTER_FACTOR = 1.0;
|
||||
/*!
|
||||
* The timeout handler used by `ExponentialBackoff`.
|
||||
*/
|
||||
let delayExecution = setTimeout;
|
||||
/**
|
||||
* Allows overriding of the timeout handler used by the exponential backoff
|
||||
* implementation. If not invoked, we default to `setTimeout()`.
|
||||
*
|
||||
* Used only in testing.
|
||||
*
|
||||
* @private
|
||||
* @param {function} handler A handler than matches the API of `setTimeout()`.
|
||||
*/
|
||||
function setTimeoutHandler(handler) {
|
||||
delayExecution = handler;
|
||||
}
|
||||
exports.setTimeoutHandler = setTimeoutHandler;
|
||||
/**
|
||||
* A helper for running delayed tasks following an exponential backoff curve
|
||||
* between attempts.
|
||||
*
|
||||
* Each delay is made up of a "base" delay which follows the exponential
|
||||
* backoff curve, and a "jitter" (+/- 50% by default) that is calculated and
|
||||
* added to the base delay. This prevents clients from accidentally
|
||||
* synchronizing their delays causing spikes of load to the backend.
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
class ExponentialBackoff {
|
||||
constructor(options = {}) {
|
||||
/**
|
||||
* The backoff delay of the current attempt.
|
||||
*/
|
||||
this.currentBaseMs = 0;
|
||||
this.initialDelayMs = options.initialDelayMs !== undefined ?
|
||||
options.initialDelayMs :
|
||||
DEFAULT_BACKOFF_INITIAL_DELAY_MS;
|
||||
this.backoffFactor = options.backoffFactor !== undefined ?
|
||||
options.backoffFactor :
|
||||
DEFAULT_BACKOFF_FACTOR;
|
||||
this.maxDelayMs = options.maxDelayMs !== undefined ?
|
||||
options.maxDelayMs :
|
||||
DEFAULT_BACKOFF_MAX_DELAY_MS;
|
||||
this.jitterFactor = options.jitterFactor !== undefined ?
|
||||
options.jitterFactor :
|
||||
DEFAULT_JITTER_FACTOR;
|
||||
}
|
||||
/**
|
||||
* Resets the backoff delay.
|
||||
*
|
||||
* The very next backoffAndWait() will have no delay. If it is called again
|
||||
* (i.e. due to an error), initialDelayMs (plus jitter) will be used, and
|
||||
* subsequent ones will increase according to the backoffFactor.
|
||||
*/
|
||||
reset() {
|
||||
this.currentBaseMs = 0;
|
||||
}
|
||||
/**
|
||||
* Resets the backoff delay to the maximum delay (e.g. for use after a
|
||||
* RESOURCE_EXHAUSTED error).
|
||||
*/
|
||||
resetToMax() {
|
||||
this.currentBaseMs = this.maxDelayMs;
|
||||
}
|
||||
/**
|
||||
* Returns a promise that resolves after currentDelayMs, and increases the
|
||||
* delay for any subsequent attempts.
|
||||
*
|
||||
* @return A Promise that resolves when the current delay elapsed.
|
||||
*/
|
||||
backoffAndWait() {
|
||||
// First schedule using the current base (which may be 0 and should be
|
||||
// honored as such).
|
||||
const delayWithJitterMs = this.currentBaseMs + this.jitterDelayMs();
|
||||
if (this.currentBaseMs > 0) {
|
||||
logger_1.logger('ExponentialBackoff.backoffAndWait', null, `Backing off for ${delayWithJitterMs} ms ` +
|
||||
`(base delay: ${this.currentBaseMs} ms)`);
|
||||
}
|
||||
// Apply backoff factor to determine next delay and ensure it is within
|
||||
// bounds.
|
||||
this.currentBaseMs *= this.backoffFactor;
|
||||
this.currentBaseMs = Math.max(this.currentBaseMs, this.initialDelayMs);
|
||||
this.currentBaseMs = Math.min(this.currentBaseMs, this.maxDelayMs);
|
||||
return new Promise(resolve => {
|
||||
delayExecution(resolve, delayWithJitterMs);
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Returns a randomized "jitter" delay based on the current base and jitter
|
||||
* factor.
|
||||
*
|
||||
* @private
|
||||
* @returns {number} The jitter to apply based on the current delay.
|
||||
*/
|
||||
jitterDelayMs() {
|
||||
return (Math.random() - 0.5) * this.jitterFactor * this.currentBaseMs;
|
||||
}
|
||||
}
|
||||
exports.ExponentialBackoff = ExponentialBackoff;
|
||||
//# sourceMappingURL=backoff.js.map
|
||||
215
express-server/node_modules/@google-cloud/firestore/build/src/convert.js
generated
vendored
Normal file
215
express-server/node_modules/@google-cloud/firestore/build/src/convert.js
generated
vendored
Normal file
@@ -0,0 +1,215 @@
|
||||
"use strict";
|
||||
/*!
|
||||
* Copyright 2017 Google Inc. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const validate_1 = require("./validate");
|
||||
const validate = validate_1.createValidator();
|
||||
/*!
|
||||
* @module firestore/convert
|
||||
* @private
|
||||
*
|
||||
* This module contains utility functions to convert
|
||||
* `firestore.v1beta1.Documents` from Proto3 JSON to their equivalent
|
||||
* representation in Protobuf JS. Protobuf JS is the only encoding supported by
|
||||
* this client, and dependencies that use Proto3 JSON (such as the Google Cloud
|
||||
* Functions SDK) are supported through this conversion and its usage in
|
||||
* {@see Firestore#snapshot_}.
|
||||
*/
|
||||
/**
|
||||
* Converts an ISO 8601 or google.protobuf.Timestamp proto into Protobuf JS.
|
||||
*
|
||||
* @private
|
||||
* @param timestampValue The value to convert.
|
||||
* @param argumentName The argument name to use in the error message if the
|
||||
* conversion fails. If omitted, 'timestampValue' is used.
|
||||
* @return The value as expected by Protobuf JS or undefined if no input was
|
||||
* provided.
|
||||
*/
|
||||
function timestampFromJson(timestampValue, argumentName) {
|
||||
let timestampProto;
|
||||
if (typeof timestampValue === 'string') {
|
||||
const date = new Date(timestampValue);
|
||||
const seconds = Math.floor(date.getTime() / 1000);
|
||||
let nanos = 0;
|
||||
if (timestampValue.length > 20) {
|
||||
const nanoString = timestampValue.substring(20, timestampValue.length - 1);
|
||||
const trailingZeroes = 9 - nanoString.length;
|
||||
nanos = Number(nanoString) * Math.pow(10, trailingZeroes);
|
||||
}
|
||||
if (isNaN(seconds) || isNaN(nanos)) {
|
||||
argumentName = argumentName || 'timestampValue';
|
||||
throw new Error(`Specify a valid ISO 8601 timestamp for "${argumentName}".`);
|
||||
}
|
||||
timestampProto = {
|
||||
seconds: seconds || undefined,
|
||||
nanos: nanos || undefined,
|
||||
};
|
||||
}
|
||||
else if (timestampValue !== undefined) {
|
||||
validate.isObject('timestampValue', timestampValue);
|
||||
timestampProto = {
|
||||
seconds: timestampValue.seconds || undefined,
|
||||
nanos: timestampValue.nanos || undefined,
|
||||
};
|
||||
}
|
||||
return timestampProto;
|
||||
}
|
||||
exports.timestampFromJson = timestampFromJson;
|
||||
/**
|
||||
* Converts a Proto3 JSON 'bytesValue' field into Protobuf JS.
|
||||
*
|
||||
* @private
|
||||
* @param bytesValue The value to convert.
|
||||
* @return The value as expected by Protobuf JS.
|
||||
*/
|
||||
function bytesFromJson(bytesValue) {
|
||||
if (typeof bytesValue === 'string') {
|
||||
return Buffer.from(bytesValue, 'base64');
|
||||
}
|
||||
else {
|
||||
return bytesValue;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Detects 'valueType' from a Proto3 JSON `firestore.v1beta1.Value` proto.
|
||||
*
|
||||
* @private
|
||||
* @param proto The `firestore.v1beta1.Value` proto.
|
||||
* @return The string value for 'valueType'.
|
||||
*/
|
||||
function detectValueType(proto) {
|
||||
if (proto.valueType) {
|
||||
return proto.valueType;
|
||||
}
|
||||
const detectedValues = [];
|
||||
if (proto.stringValue !== undefined) {
|
||||
detectedValues.push('stringValue');
|
||||
}
|
||||
if (proto.booleanValue !== undefined) {
|
||||
detectedValues.push('booleanValue');
|
||||
}
|
||||
if (proto.integerValue !== undefined) {
|
||||
detectedValues.push('integerValue');
|
||||
}
|
||||
if (proto.doubleValue !== undefined) {
|
||||
detectedValues.push('doubleValue');
|
||||
}
|
||||
if (proto.timestampValue !== undefined) {
|
||||
detectedValues.push('timestampValue');
|
||||
}
|
||||
if (proto.referenceValue !== undefined) {
|
||||
detectedValues.push('referenceValue');
|
||||
}
|
||||
if (proto.arrayValue !== undefined) {
|
||||
detectedValues.push('arrayValue');
|
||||
}
|
||||
if (proto.nullValue !== undefined) {
|
||||
detectedValues.push('nullValue');
|
||||
}
|
||||
if (proto.mapValue !== undefined) {
|
||||
detectedValues.push('mapValue');
|
||||
}
|
||||
if (proto.geoPointValue !== undefined) {
|
||||
detectedValues.push('geoPointValue');
|
||||
}
|
||||
if (proto.bytesValue !== undefined) {
|
||||
detectedValues.push('bytesValue');
|
||||
}
|
||||
if (detectedValues.length !== 1) {
|
||||
throw new Error(`Unable to infer type value fom '${JSON.stringify(proto)}'.`);
|
||||
}
|
||||
return detectedValues[0];
|
||||
}
|
||||
exports.detectValueType = detectValueType;
|
||||
/**
|
||||
* Converts a `firestore.v1beta1.Value` in Proto3 JSON encoding into the
|
||||
* Protobuf JS format expected by this client.
|
||||
*
|
||||
* @private
|
||||
* @param fieldValue The `firestore.v1beta1.Value` in Proto3 JSON format.
|
||||
* @return The `firestore.v1beta1.Value` in Protobuf JS format.
|
||||
*/
|
||||
function valueFromJson(fieldValue) {
|
||||
const valueType = detectValueType(fieldValue);
|
||||
switch (valueType) {
|
||||
case 'timestampValue':
|
||||
return {
|
||||
timestampValue: timestampFromJson(fieldValue.timestampValue),
|
||||
};
|
||||
case 'bytesValue':
|
||||
return {
|
||||
bytesValue: bytesFromJson(fieldValue.bytesValue),
|
||||
};
|
||||
case 'integerValue':
|
||||
return {
|
||||
integerValue: Number(fieldValue.integerValue),
|
||||
};
|
||||
case 'doubleValue':
|
||||
return {
|
||||
doubleValue: Number(fieldValue.doubleValue),
|
||||
};
|
||||
case 'arrayValue': {
|
||||
const arrayValue = [];
|
||||
if (Array.isArray(fieldValue.arrayValue.values)) {
|
||||
for (const value of fieldValue.arrayValue.values) {
|
||||
arrayValue.push(valueFromJson(value));
|
||||
}
|
||||
}
|
||||
return {
|
||||
arrayValue: {
|
||||
values: arrayValue,
|
||||
},
|
||||
};
|
||||
}
|
||||
case 'mapValue': {
|
||||
const mapValue = {};
|
||||
for (const prop in fieldValue.mapValue.fields) {
|
||||
if (fieldValue.mapValue.fields.hasOwnProperty(prop)) {
|
||||
mapValue[prop] = valueFromJson(fieldValue.mapValue.fields[prop]);
|
||||
}
|
||||
}
|
||||
return {
|
||||
mapValue: {
|
||||
fields: mapValue,
|
||||
},
|
||||
};
|
||||
}
|
||||
default:
|
||||
return fieldValue;
|
||||
}
|
||||
}
|
||||
exports.valueFromJson = valueFromJson;
|
||||
/**
|
||||
* Converts a `firestore.v1beta1.Document` in Proto3 JSON encoding into the
|
||||
* Protobuf JS format expected by this client. This conversion creates a copy of
|
||||
* the underlying document.
|
||||
*
|
||||
* @private
|
||||
* @param document The `firestore.v1beta1.Document` in Proto3 JSON
|
||||
* format.
|
||||
* @return The `firestore.v1beta1.Document` in Protobuf JS format.
|
||||
*/
|
||||
function documentFromJson(document) {
|
||||
const result = {};
|
||||
for (const prop in document) {
|
||||
if (document.hasOwnProperty(prop)) {
|
||||
result[prop] = valueFromJson(document[prop]);
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
exports.documentFromJson = documentFromJson;
|
||||
//# sourceMappingURL=convert.js.map
|
||||
166
express-server/node_modules/@google-cloud/firestore/build/src/document-change.js
generated
vendored
Normal file
166
express-server/node_modules/@google-cloud/firestore/build/src/document-change.js
generated
vendored
Normal file
@@ -0,0 +1,166 @@
|
||||
"use strict";
|
||||
/*!
|
||||
* Copyright 2018 Google Inc. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
/**
|
||||
* A DocumentChange represents a change to the documents matching a query.
|
||||
* It contains the document affected and the type of change that occurred.
|
||||
*
|
||||
* @class
|
||||
*/
|
||||
class DocumentChange {
|
||||
/**
|
||||
* @private
|
||||
* @hideconstructor
|
||||
*
|
||||
* @param {string} type 'added' | 'removed' | 'modified'.
|
||||
* @param {QueryDocumentSnapshot} document The document.
|
||||
* @param {number} oldIndex The index in the documents array prior to this
|
||||
* change.
|
||||
* @param {number} newIndex The index in the documents array after this
|
||||
* change.
|
||||
*/
|
||||
constructor(type, document, oldIndex, newIndex) {
|
||||
this._type = type;
|
||||
this._document = document;
|
||||
this._oldIndex = oldIndex;
|
||||
this._newIndex = newIndex;
|
||||
}
|
||||
/**
|
||||
* The type of change ('added', 'modified', or 'removed').
|
||||
*
|
||||
* @type {string}
|
||||
* @name DocumentChange#type
|
||||
* @readonly
|
||||
*
|
||||
* @example
|
||||
* let query = firestore.collection('col').where('foo', '==', 'bar');
|
||||
* let docsArray = [];
|
||||
*
|
||||
* let unsubscribe = query.onSnapshot(querySnapshot => {
|
||||
* for (let change of querySnapshot.docChanges) {
|
||||
* console.log(`Type of change is ${change.type}`);
|
||||
* }
|
||||
* });
|
||||
*
|
||||
* // Remove this listener.
|
||||
* unsubscribe();
|
||||
*/
|
||||
get type() {
|
||||
return this._type;
|
||||
}
|
||||
/**
|
||||
* The document affected by this change.
|
||||
*
|
||||
* @type {QueryDocumentSnapshot}
|
||||
* @name DocumentChange#doc
|
||||
* @readonly
|
||||
*
|
||||
* @example
|
||||
* let query = firestore.collection('col').where('foo', '==', 'bar');
|
||||
*
|
||||
* let unsubscribe = query.onSnapshot(querySnapshot => {
|
||||
* for (let change of querySnapshot.docChanges) {
|
||||
* console.log(change.doc.data());
|
||||
* }
|
||||
* });
|
||||
*
|
||||
* // Remove this listener.
|
||||
* unsubscribe();
|
||||
*/
|
||||
get doc() {
|
||||
return this._document;
|
||||
}
|
||||
/**
|
||||
* The index of the changed document in the result set immediately prior to
|
||||
* this DocumentChange (i.e. supposing that all prior DocumentChange objects
|
||||
* have been applied). Is -1 for 'added' events.
|
||||
*
|
||||
* @type {number}
|
||||
* @name DocumentChange#oldIndex
|
||||
* @readonly
|
||||
*
|
||||
* @example
|
||||
* let query = firestore.collection('col').where('foo', '==', 'bar');
|
||||
* let docsArray = [];
|
||||
*
|
||||
* let unsubscribe = query.onSnapshot(querySnapshot => {
|
||||
* for (let change of querySnapshot.docChanges) {
|
||||
* if (change.oldIndex !== -1) {
|
||||
* docsArray.splice(change.oldIndex, 1);
|
||||
* }
|
||||
* if (change.newIndex !== -1) {
|
||||
* docsArray.splice(change.newIndex, 0, change.doc);
|
||||
* }
|
||||
* }
|
||||
* });
|
||||
*
|
||||
* // Remove this listener.
|
||||
* unsubscribe();
|
||||
*/
|
||||
get oldIndex() {
|
||||
return this._oldIndex;
|
||||
}
|
||||
/**
|
||||
* The index of the changed document in the result set immediately after
|
||||
* this DocumentChange (i.e. supposing that all prior DocumentChange
|
||||
* objects and the current DocumentChange object have been applied).
|
||||
* Is -1 for 'removed' events.
|
||||
*
|
||||
* @type {number}
|
||||
* @name DocumentChange#newIndex
|
||||
* @readonly
|
||||
*
|
||||
* @example
|
||||
* let query = firestore.collection('col').where('foo', '==', 'bar');
|
||||
* let docsArray = [];
|
||||
*
|
||||
* let unsubscribe = query.onSnapshot(querySnapshot => {
|
||||
* for (let change of querySnapshot.docChanges) {
|
||||
* if (change.oldIndex !== -1) {
|
||||
* docsArray.splice(change.oldIndex, 1);
|
||||
* }
|
||||
* if (change.newIndex !== -1) {
|
||||
* docsArray.splice(change.newIndex, 0, change.doc);
|
||||
* }
|
||||
* }
|
||||
* });
|
||||
*
|
||||
* // Remove this listener.
|
||||
* unsubscribe();
|
||||
*/
|
||||
get newIndex() {
|
||||
return this._newIndex;
|
||||
}
|
||||
/**
|
||||
* Returns true if the data in this `DocumentChange` is equal to the provided
|
||||
* value.
|
||||
*
|
||||
* @param {*} other The value to compare against.
|
||||
* @return true if this `DocumentChange` is equal to the provided value.
|
||||
*/
|
||||
isEqual(other) {
|
||||
if (this === other) {
|
||||
return true;
|
||||
}
|
||||
return (other instanceof DocumentChange && this._type === other._type &&
|
||||
this._oldIndex === other._oldIndex &&
|
||||
this._newIndex === other._newIndex &&
|
||||
this._document.isEqual(other._document));
|
||||
}
|
||||
}
|
||||
exports.DocumentChange = DocumentChange;
|
||||
//# sourceMappingURL=document-change.js.map
|
||||
955
express-server/node_modules/@google-cloud/firestore/build/src/document.js
generated
vendored
Normal file
955
express-server/node_modules/@google-cloud/firestore/build/src/document.js
generated
vendored
Normal file
@@ -0,0 +1,955 @@
|
||||
"use strict";
|
||||
/*!
|
||||
* Copyright 2017 Google Inc. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const assert = require("assert");
|
||||
const deepEqual = require('deep-equal');
|
||||
const is = require("is");
|
||||
const field_value_1 = require("./field-value");
|
||||
const path_1 = require("./path");
|
||||
const serializer_1 = require("./serializer");
|
||||
const timestamp_1 = require("./timestamp");
|
||||
/**
|
||||
* Returns a builder for DocumentSnapshot and QueryDocumentSnapshot instances.
|
||||
* Invoke `.build()' to assemble the final snapshot.
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
class DocumentSnapshotBuilder {
|
||||
/**
|
||||
* Builds the DocumentSnapshot.
|
||||
*
|
||||
* @private
|
||||
* @returns Returns either a QueryDocumentSnapshot (if `fieldsProto` was
|
||||
* provided) or a DocumentSnapshot.
|
||||
*/
|
||||
build() {
|
||||
assert((this.fieldsProto !== undefined) === (this.createTime !== undefined), 'Create time should be set iff document exists.');
|
||||
assert((this.fieldsProto !== undefined) === (this.updateTime !== undefined), 'Update time should be set iff document exists.');
|
||||
return this.fieldsProto ?
|
||||
new QueryDocumentSnapshot(this.ref, this.fieldsProto, this.readTime, this.createTime, this.updateTime) :
|
||||
new DocumentSnapshot(this.ref, undefined, this.readTime);
|
||||
}
|
||||
}
|
||||
exports.DocumentSnapshotBuilder = DocumentSnapshotBuilder;
|
||||
/**
|
||||
* A DocumentSnapshot is an immutable representation for a document in a
|
||||
* Firestore database. The data can be extracted with
|
||||
* [data()]{@link DocumentSnapshot#data} or
|
||||
* [get(fieldPath)]{@link DocumentSnapshot#get} to get a
|
||||
* specific field.
|
||||
*
|
||||
* <p>For a DocumentSnapshot that points to a non-existing document, any data
|
||||
* access will return 'undefined'. You can use the
|
||||
* [exists]{@link DocumentSnapshot#exists} property to explicitly verify a
|
||||
* document's existence.
|
||||
*
|
||||
* @class
|
||||
*/
|
||||
class DocumentSnapshot {
|
||||
/**
|
||||
* @private
|
||||
* @hideconstructor
|
||||
*
|
||||
* @param ref The reference to the document.
|
||||
* @param fieldsProto The fields of the Firestore `Document` Protobuf backing
|
||||
* this document (or undefined if the document does not exist).
|
||||
* @param readTime The time when this snapshot was read (or undefined if
|
||||
* the document exists only locally).
|
||||
* @param createTime The time when the document was created (or undefined if
|
||||
* the document does not exist).
|
||||
* @param updateTime The time when the document was last updated (or undefined
|
||||
* if the document does not exist).
|
||||
*/
|
||||
constructor(ref, fieldsProto, readTime, createTime, updateTime) {
|
||||
this._ref = ref;
|
||||
this._fieldsProto = fieldsProto;
|
||||
this._serializer = ref.firestore._serializer;
|
||||
this._validator = ref.firestore._validator;
|
||||
this._readTime = readTime;
|
||||
this._createTime = createTime;
|
||||
this._updateTime = updateTime;
|
||||
}
|
||||
/**
|
||||
* Creates a DocumentSnapshot from an object.
|
||||
*
|
||||
* @private
|
||||
* @param ref The reference to the document.
|
||||
* @param obj The object to store in the DocumentSnapshot.
|
||||
* @return The created DocumentSnapshot.
|
||||
*/
|
||||
static fromObject(ref, obj) {
|
||||
const serializer = ref.firestore._serializer;
|
||||
return new DocumentSnapshot(ref, serializer.encodeFields(obj));
|
||||
}
|
||||
/**
|
||||
* Creates a DocumentSnapshot from an UpdateMap.
|
||||
*
|
||||
* This methods expands the top-level field paths in a JavaScript map and
|
||||
* turns { foo.bar : foobar } into { foo { bar : foobar }}
|
||||
*
|
||||
* @private
|
||||
* @param ref The reference to the document.
|
||||
* @param data The field/value map to expand.
|
||||
* @return The created DocumentSnapshot.
|
||||
*/
|
||||
static fromUpdateMap(ref, data) {
|
||||
const serializer = ref.firestore._serializer;
|
||||
/**
|
||||
* Merges 'value' at the field path specified by the path array into
|
||||
* 'target'.
|
||||
*/
|
||||
function merge(target, value, path, pos) {
|
||||
const key = path[pos];
|
||||
const isLast = pos === path.length - 1;
|
||||
if (target[key] === undefined) {
|
||||
if (isLast) {
|
||||
if (value instanceof field_value_1.FieldTransform) {
|
||||
// If there is already data at this path, we need to retain it.
|
||||
// Otherwise, we don't include it in the DocumentSnapshot.
|
||||
return !is.empty(target) ? target : null;
|
||||
}
|
||||
// The merge is done.
|
||||
const leafNode = serializer.encodeValue(value);
|
||||
if (leafNode) {
|
||||
target[key] = leafNode;
|
||||
}
|
||||
return target;
|
||||
}
|
||||
else {
|
||||
// We need to expand the target object.
|
||||
const childNode = {
|
||||
mapValue: {
|
||||
fields: {},
|
||||
},
|
||||
};
|
||||
const nestedValue = merge(childNode.mapValue.fields, value, path, pos + 1);
|
||||
if (nestedValue) {
|
||||
childNode.mapValue.fields = nestedValue;
|
||||
target[key] = childNode;
|
||||
return target;
|
||||
}
|
||||
else {
|
||||
return !is.empty(target) ? target : null;
|
||||
}
|
||||
}
|
||||
}
|
||||
else {
|
||||
assert(!isLast, 'Can\'t merge current value into a nested object');
|
||||
target[key].mapValue.fields =
|
||||
merge(target[key].mapValue.fields, value, path, pos + 1);
|
||||
return target;
|
||||
}
|
||||
}
|
||||
const res = {};
|
||||
data.forEach((value, key) => {
|
||||
const components = key.toArray();
|
||||
merge(res, value, components, 0);
|
||||
});
|
||||
return new DocumentSnapshot(ref, res);
|
||||
}
|
||||
/**
|
||||
* True if the document exists.
|
||||
*
|
||||
* @type {boolean}
|
||||
* @name DocumentSnapshot#exists
|
||||
* @readonly
|
||||
*
|
||||
* @example
|
||||
* let documentRef = firestore.doc('col/doc');
|
||||
*
|
||||
* documentRef.get().then((documentSnapshot) => {
|
||||
* if (documentSnapshot.exists) {
|
||||
* console.log(`Data: ${JSON.stringify(documentSnapshot.data())}`);
|
||||
* }
|
||||
* });
|
||||
*/
|
||||
get exists() {
|
||||
return this._fieldsProto !== undefined;
|
||||
}
|
||||
/**
|
||||
* A [DocumentReference]{@link DocumentReference} for the document
|
||||
* stored in this snapshot.
|
||||
*
|
||||
* @type {DocumentReference}
|
||||
* @name DocumentSnapshot#ref
|
||||
* @readonly
|
||||
*
|
||||
* @example
|
||||
* let documentRef = firestore.doc('col/doc');
|
||||
*
|
||||
* documentRef.get().then((documentSnapshot) => {
|
||||
* if (documentSnapshot.exists) {
|
||||
* console.log(`Found document at '${documentSnapshot.ref.path}'`);
|
||||
* }
|
||||
* });
|
||||
*/
|
||||
get ref() {
|
||||
return this._ref;
|
||||
}
|
||||
/**
|
||||
* The ID of the document for which this DocumentSnapshot contains data.
|
||||
*
|
||||
* @type {string}
|
||||
* @name DocumentSnapshot#id
|
||||
* @readonly
|
||||
*
|
||||
* @example
|
||||
* let documentRef = firestore.doc('col/doc');
|
||||
*
|
||||
* documentRef.get().then((documentSnapshot) => {
|
||||
* if (documentSnapshot.exists) {
|
||||
* console.log(`Document found with name '${documentSnapshot.id}'`);
|
||||
* }
|
||||
* });
|
||||
*/
|
||||
get id() {
|
||||
return this._ref.id;
|
||||
}
|
||||
/**
|
||||
* The time the document was created. Undefined for documents that don't
|
||||
* exist.
|
||||
*
|
||||
* @type {Timestamp|undefined}
|
||||
* @name DocumentSnapshot#createTime
|
||||
* @readonly
|
||||
*
|
||||
* @example
|
||||
* let documentRef = firestore.doc('col/doc');
|
||||
*
|
||||
* documentRef.get().then(documentSnapshot => {
|
||||
* if (documentSnapshot.exists) {
|
||||
* let createTime = documentSnapshot.createTime;
|
||||
* console.log(`Document created at '${createTime.toDate()}'`);
|
||||
* }
|
||||
* });
|
||||
*/
|
||||
get createTime() {
|
||||
return this._createTime;
|
||||
}
|
||||
/**
|
||||
* The time the document was last updated (at the time the snapshot was
|
||||
* generated). Undefined for documents that don't exist.
|
||||
*
|
||||
* @type {Timestamp|undefined}
|
||||
* @name DocumentSnapshot#updateTime
|
||||
* @readonly
|
||||
*
|
||||
* @example
|
||||
* let documentRef = firestore.doc('col/doc');
|
||||
*
|
||||
* documentRef.get().then(documentSnapshot => {
|
||||
* if (documentSnapshot.exists) {
|
||||
* let updateTime = documentSnapshot.updateTime;
|
||||
* console.log(`Document updated at '${updateTime.toDate()}'`);
|
||||
* }
|
||||
* });
|
||||
*/
|
||||
get updateTime() {
|
||||
return this._updateTime;
|
||||
}
|
||||
/**
|
||||
* The time this snapshot was read.
|
||||
*
|
||||
* @type {Timestamp}
|
||||
* @name DocumentSnapshot#readTime
|
||||
* @readonly
|
||||
*
|
||||
* @example
|
||||
* let documentRef = firestore.doc('col/doc');
|
||||
*
|
||||
* documentRef.get().then(documentSnapshot => {
|
||||
* let readTime = documentSnapshot.readTime;
|
||||
* console.log(`Document read at '${readTime.toDate()}'`);
|
||||
* });
|
||||
*/
|
||||
get readTime() {
|
||||
if (this._readTime === undefined) {
|
||||
throw new Error(`Called 'readTime' on a local document`);
|
||||
}
|
||||
return this._readTime;
|
||||
}
|
||||
/**
|
||||
* Retrieves all fields in the document as an object. Returns 'undefined' if
|
||||
* the document doesn't exist.
|
||||
*
|
||||
* @returns {DocumentData|undefined} An object containing all fields in the
|
||||
* document or 'undefined' if the document doesn't exist.
|
||||
*
|
||||
* @example
|
||||
* let documentRef = firestore.doc('col/doc');
|
||||
*
|
||||
* documentRef.get().then(documentSnapshot => {
|
||||
* let data = documentSnapshot.data();
|
||||
* console.log(`Retrieved data: ${JSON.stringify(data)}`);
|
||||
* });
|
||||
*/
|
||||
data() {
|
||||
const fields = this._fieldsProto;
|
||||
if (fields === undefined) {
|
||||
return undefined;
|
||||
}
|
||||
const obj = {};
|
||||
for (const prop in fields) {
|
||||
if (fields.hasOwnProperty(prop)) {
|
||||
obj[prop] = this._serializer.decodeValue(fields[prop]);
|
||||
}
|
||||
}
|
||||
return obj;
|
||||
}
|
||||
/**
|
||||
* Retrieves the field specified by `field`.
|
||||
*
|
||||
* @param {string|FieldPath} field The field path
|
||||
* (e.g. 'foo' or 'foo.bar') to a specific field.
|
||||
* @returns {*} The data at the specified field location or undefined if no
|
||||
* such field exists.
|
||||
*
|
||||
* @example
|
||||
* let documentRef = firestore.doc('col/doc');
|
||||
*
|
||||
* documentRef.set({ a: { b: 'c' }}).then(() => {
|
||||
* return documentRef.get();
|
||||
* }).then(documentSnapshot => {
|
||||
* let field = documentSnapshot.get('a.b');
|
||||
* console.log(`Retrieved field value: ${field}`);
|
||||
* });
|
||||
*/
|
||||
get(field) {
|
||||
this._validator.isFieldPath('field', field);
|
||||
const protoField = this.protoField(field);
|
||||
if (protoField === undefined) {
|
||||
return undefined;
|
||||
}
|
||||
return this._serializer.decodeValue(protoField);
|
||||
}
|
||||
/**
|
||||
* Retrieves the field specified by 'fieldPath' in its Protobuf JS
|
||||
* representation.
|
||||
*
|
||||
* @private
|
||||
* @param field The path (e.g. 'foo' or 'foo.bar') to a specific field.
|
||||
* @returns The Protobuf-encoded data at the specified field location or
|
||||
* undefined if no such field exists.
|
||||
*/
|
||||
protoField(field) {
|
||||
let fields = this._fieldsProto;
|
||||
if (fields === undefined) {
|
||||
return undefined;
|
||||
}
|
||||
const components = path_1.FieldPath.fromArgument(field).toArray();
|
||||
while (components.length > 1) {
|
||||
fields = fields[components.shift()];
|
||||
if (!fields || !fields.mapValue) {
|
||||
return undefined;
|
||||
}
|
||||
fields = fields.mapValue.fields;
|
||||
}
|
||||
return fields[components[0]];
|
||||
}
|
||||
/**
|
||||
* Checks whether this DocumentSnapshot contains any fields.
|
||||
*
|
||||
* @private
|
||||
* @return {boolean}
|
||||
*/
|
||||
get isEmpty() {
|
||||
return is.undefined(this._fieldsProto) || is.empty(this._fieldsProto);
|
||||
}
|
||||
/**
|
||||
* Convert a document snapshot to the Firestore 'Document' Protobuf.
|
||||
*
|
||||
* @private
|
||||
* @returns The document in the format the API expects.
|
||||
*/
|
||||
toProto() {
|
||||
return {
|
||||
update: {
|
||||
name: this._ref.formattedName,
|
||||
fields: this._fieldsProto,
|
||||
},
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Returns true if the document's data and path in this `DocumentSnapshot` is
|
||||
* equal to the provided value.
|
||||
*
|
||||
* @param {*} other The value to compare against.
|
||||
* @return {boolean} true if this `DocumentSnapshot` is equal to the provided
|
||||
* value.
|
||||
*/
|
||||
isEqual(other) {
|
||||
// Since the read time is different on every document read, we explicitly
|
||||
// ignore all document metadata in this comparison.
|
||||
return (this === other ||
|
||||
((other instanceof DocumentSnapshot) && this._ref.isEqual(other._ref) &&
|
||||
deepEqual(this._fieldsProto, other._fieldsProto, { strict: true })));
|
||||
}
|
||||
}
|
||||
exports.DocumentSnapshot = DocumentSnapshot;
|
||||
/**
|
||||
* A QueryDocumentSnapshot contains data read from a document in your
|
||||
* Firestore database as part of a query. The document is guaranteed to exist
|
||||
* and its data can be extracted with [data()]{@link QueryDocumentSnapshot#data}
|
||||
* or [get()]{@link DocumentSnapshot#get} to get a specific field.
|
||||
*
|
||||
* A QueryDocumentSnapshot offers the same API surface as a
|
||||
* {@link DocumentSnapshot}. Since query results contain only existing
|
||||
* documents, the [exists]{@link DocumentSnapshot#exists} property will
|
||||
* always be true and [data()]{@link QueryDocumentSnapshot#data} will never
|
||||
* return 'undefined'.
|
||||
*
|
||||
* @class
|
||||
* @extends DocumentSnapshot
|
||||
*/
|
||||
class QueryDocumentSnapshot extends DocumentSnapshot {
|
||||
/**
|
||||
* @private
|
||||
* @hideconstructor
|
||||
*
|
||||
* @param ref The reference to the document.
|
||||
* @param fieldsProto The fields of the Firestore `Document` Protobuf backing
|
||||
* this document.
|
||||
* @param readTime The time when this snapshot was read.
|
||||
* @param createTime The time when the document was created.
|
||||
* @param updateTime The time when the document was last updated.
|
||||
*/
|
||||
constructor(ref, fieldsProto, readTime, createTime, updateTime) {
|
||||
super(ref, fieldsProto, readTime, createTime, updateTime);
|
||||
}
|
||||
/**
|
||||
* The time the document was created.
|
||||
*
|
||||
* @type {Timestamp}
|
||||
* @name QueryDocumentSnapshot#createTime
|
||||
* @readonly
|
||||
* @override
|
||||
*
|
||||
* @example
|
||||
* let query = firestore.collection('col');
|
||||
*
|
||||
* query.get().forEach(snapshot => {
|
||||
* console.log(`Document created at '${snapshot.createTime.toDate()}'`);
|
||||
* });
|
||||
*/
|
||||
get createTime() {
|
||||
return super.createTime;
|
||||
}
|
||||
/**
|
||||
* The time the document was last updated (at the time the snapshot was
|
||||
* generated).
|
||||
*
|
||||
* @type {Timestamp}
|
||||
* @name QueryDocumentSnapshot#updateTime
|
||||
* @readonly
|
||||
* @override
|
||||
*
|
||||
* @example
|
||||
* let query = firestore.collection('col');
|
||||
*
|
||||
* query.get().forEach(snapshot => {
|
||||
* console.log(`Document updated at '${snapshot.updateTime.toDate()}'`);
|
||||
* });
|
||||
*/
|
||||
get updateTime() {
|
||||
return super.updateTime;
|
||||
}
|
||||
/**
|
||||
* Retrieves all fields in the document as an object.
|
||||
*
|
||||
* @override
|
||||
*
|
||||
* @returns {DocumentData} An object containing all fields in the document.
|
||||
*
|
||||
* @example
|
||||
* let query = firestore.collection('col');
|
||||
*
|
||||
* query.get().forEach(documentSnapshot => {
|
||||
* let data = documentSnapshot.data();
|
||||
* console.log(`Retrieved data: ${JSON.stringify(data)}`);
|
||||
* });
|
||||
*/
|
||||
data() {
|
||||
const data = super.data();
|
||||
if (!data) {
|
||||
throw new Error('The data in a QueryDocumentSnapshot should always exist.');
|
||||
}
|
||||
return data;
|
||||
}
|
||||
}
|
||||
exports.QueryDocumentSnapshot = QueryDocumentSnapshot;
|
||||
/**
|
||||
* A Firestore Document Mask contains the field paths affected by an update.
|
||||
*
|
||||
* @class
|
||||
* @private
|
||||
*/
|
||||
class DocumentMask {
|
||||
/**
|
||||
* @private
|
||||
* @hideconstructor
|
||||
*
|
||||
* @param fieldPaths The field paths in this mask.
|
||||
*/
|
||||
constructor(fieldPaths) {
|
||||
this._sortedPaths = fieldPaths;
|
||||
this._sortedPaths.sort((a, b) => a.compareTo(b));
|
||||
}
|
||||
/**
|
||||
* Creates a document mask with the field paths of a document.
|
||||
*
|
||||
* @private
|
||||
* @param data A map with fields to modify. Only the keys are used to extract
|
||||
* the document mask.
|
||||
*/
|
||||
static fromUpdateMap(data) {
|
||||
const fieldPaths = [];
|
||||
data.forEach((value, key) => {
|
||||
if (!(value instanceof field_value_1.FieldTransform) || value.includeInDocumentMask) {
|
||||
fieldPaths.push(path_1.FieldPath.fromArgument(key));
|
||||
}
|
||||
});
|
||||
return new DocumentMask(fieldPaths);
|
||||
}
|
||||
/**
|
||||
* Creates a document mask from an array of field paths.
|
||||
*
|
||||
* @private
|
||||
* @param fieldMask A list of field paths.
|
||||
*/
|
||||
static fromFieldMask(fieldMask) {
|
||||
const fieldPaths = [];
|
||||
for (const fieldPath of fieldMask) {
|
||||
fieldPaths.push(path_1.FieldPath.fromArgument(fieldPath));
|
||||
}
|
||||
return new DocumentMask(fieldPaths);
|
||||
}
|
||||
/**
|
||||
* Creates a document mask with the field names of a document.
|
||||
*
|
||||
* @private
|
||||
* @param data An object with fields to modify. Only the keys are used to
|
||||
* extract the document mask.
|
||||
*/
|
||||
static fromObject(data) {
|
||||
const fieldPaths = [];
|
||||
function extractFieldPaths(currentData, currentPath) {
|
||||
let isEmpty = true;
|
||||
for (const key in currentData) {
|
||||
if (currentData.hasOwnProperty(key)) {
|
||||
isEmpty = false;
|
||||
// We don't split on dots since fromObject is called with
|
||||
// DocumentData.
|
||||
const childSegment = new path_1.FieldPath(key);
|
||||
const childPath = currentPath ? currentPath.append(childSegment) : childSegment;
|
||||
const value = currentData[key];
|
||||
if (value instanceof field_value_1.FieldTransform) {
|
||||
if (value.includeInDocumentMask) {
|
||||
fieldPaths.push(childPath);
|
||||
}
|
||||
}
|
||||
else if (serializer_1.isPlainObject(value)) {
|
||||
extractFieldPaths(value, childPath);
|
||||
}
|
||||
else {
|
||||
fieldPaths.push(childPath);
|
||||
}
|
||||
}
|
||||
}
|
||||
// Add a field path for an explicitly updated empty map.
|
||||
if (currentPath && isEmpty) {
|
||||
fieldPaths.push(currentPath);
|
||||
}
|
||||
}
|
||||
extractFieldPaths(data);
|
||||
return new DocumentMask(fieldPaths);
|
||||
}
|
||||
/**
|
||||
* Returns true if this document mask contains no fields.
|
||||
*
|
||||
* @private
|
||||
* @return {boolean} Whether this document mask is empty.
|
||||
*/
|
||||
get isEmpty() {
|
||||
return this._sortedPaths.length === 0;
|
||||
}
|
||||
/**
|
||||
* Removes the specified values from a sorted field path array.
|
||||
*
|
||||
* @private
|
||||
* @param input A sorted array of FieldPaths.
|
||||
* @param values An array of FieldPaths to remove.
|
||||
*/
|
||||
static removeFromSortedArray(input, values) {
|
||||
for (let i = 0; i < input.length;) {
|
||||
let removed = false;
|
||||
for (const fieldPath of values) {
|
||||
if (input[i].isEqual(fieldPath)) {
|
||||
input.splice(i, 1);
|
||||
removed = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!removed) {
|
||||
++i;
|
||||
}
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Removes the field path specified in 'fieldPaths' from this document mask.
|
||||
*
|
||||
* @private
|
||||
* @param fieldPaths An array of FieldPaths.
|
||||
*/
|
||||
removeFields(fieldPaths) {
|
||||
DocumentMask.removeFromSortedArray(this._sortedPaths, fieldPaths);
|
||||
}
|
||||
/**
|
||||
* Returns whether this document mask contains 'fieldPath'.
|
||||
*
|
||||
* @private
|
||||
* @param fieldPath The field path to test.
|
||||
* @return Whether this document mask contains 'fieldPath'.
|
||||
*/
|
||||
contains(fieldPath) {
|
||||
for (const sortedPath of this._sortedPaths) {
|
||||
const cmp = sortedPath.compareTo(fieldPath);
|
||||
if (cmp === 0) {
|
||||
return true;
|
||||
}
|
||||
else if (cmp > 0) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
/**
|
||||
* Removes all properties from 'data' that are not contained in this document
|
||||
* mask.
|
||||
*
|
||||
* @private
|
||||
* @param data An object to filter.
|
||||
* @return A shallow copy of the object filtered by this document mask.
|
||||
*/
|
||||
applyTo(data) {
|
||||
/*!
|
||||
* Applies this DocumentMask to 'data' and computes the list of field paths
|
||||
* that were specified in the mask but are not present in 'data'.
|
||||
*/
|
||||
const applyDocumentMask = data => {
|
||||
const remainingPaths = this._sortedPaths.slice(0);
|
||||
const processObject = (currentData, currentPath) => {
|
||||
let result = null;
|
||||
Object.keys(currentData).forEach(key => {
|
||||
const childPath = currentPath ? currentPath.append(key) : new path_1.FieldPath(key);
|
||||
if (this.contains(childPath)) {
|
||||
DocumentMask.removeFromSortedArray(remainingPaths, [childPath]);
|
||||
result = result || {};
|
||||
result[key] = currentData[key];
|
||||
}
|
||||
else if (is.object(currentData[key])) {
|
||||
const childObject = processObject(currentData[key], childPath);
|
||||
if (childObject) {
|
||||
result = result || {};
|
||||
result[key] = childObject;
|
||||
}
|
||||
}
|
||||
});
|
||||
return result;
|
||||
};
|
||||
// processObject() returns 'null' if the DocumentMask is empty.
|
||||
const filteredData = processObject(data) || {};
|
||||
return {
|
||||
filteredData,
|
||||
remainingPaths,
|
||||
};
|
||||
};
|
||||
const result = applyDocumentMask(data);
|
||||
if (result.remainingPaths.length !== 0) {
|
||||
throw new Error(`Input data is missing for field "${result.remainingPaths[0].toString()}".`);
|
||||
}
|
||||
return result.filteredData;
|
||||
}
|
||||
/**
|
||||
* Converts a document mask to the Firestore 'DocumentMask' Proto.
|
||||
*
|
||||
* @private
|
||||
* @returns A Firestore 'DocumentMask' Proto.
|
||||
*/
|
||||
toProto() {
|
||||
if (this.isEmpty) {
|
||||
return {};
|
||||
}
|
||||
const encodedPaths = [];
|
||||
for (const fieldPath of this._sortedPaths) {
|
||||
encodedPaths.push(fieldPath.formattedName);
|
||||
}
|
||||
return {
|
||||
fieldPaths: encodedPaths,
|
||||
};
|
||||
}
|
||||
}
|
||||
exports.DocumentMask = DocumentMask;
|
||||
/**
|
||||
* A Firestore Document Transform.
|
||||
*
|
||||
* A DocumentTransform contains pending server-side transforms and their
|
||||
* corresponding field paths.
|
||||
*
|
||||
* @private
|
||||
* @class
|
||||
*/
|
||||
class DocumentTransform {
|
||||
/**
|
||||
* @private
|
||||
* @hideconstructor
|
||||
*
|
||||
* @param ref The DocumentReference for this transform.
|
||||
* @param transforms A Map of FieldPaths to FieldTransforms.
|
||||
*/
|
||||
constructor(ref, transforms) {
|
||||
this._ref = ref;
|
||||
this._validator = ref.firestore._validator;
|
||||
this._transforms = transforms;
|
||||
}
|
||||
/**
|
||||
* Generates a DocumentTransform from a JavaScript object.
|
||||
*
|
||||
* @private
|
||||
* @param ref The `DocumentReference` to use for the DocumentTransform.
|
||||
* @param obj The object to extract the transformations from.
|
||||
* @returns The Document Transform.
|
||||
*/
|
||||
static fromObject(ref, obj) {
|
||||
const updateMap = new Map();
|
||||
for (const prop in obj) {
|
||||
if (obj.hasOwnProperty(prop)) {
|
||||
updateMap.set(new path_1.FieldPath(prop), obj[prop]);
|
||||
}
|
||||
}
|
||||
return DocumentTransform.fromUpdateMap(ref, updateMap);
|
||||
}
|
||||
/**
|
||||
* Generates a DocumentTransform from an Update Map.
|
||||
*
|
||||
* @private
|
||||
* @param ref The `DocumentReference` to use for the DocumentTransform.
|
||||
* @param data The update data to extract the transformations from.
|
||||
* @returns The Document Transform.
|
||||
*/
|
||||
static fromUpdateMap(ref, data) {
|
||||
const transforms = new Map();
|
||||
function encode_(val, path, allowTransforms) {
|
||||
if (val instanceof field_value_1.FieldTransform && val.includeInDocumentTransform) {
|
||||
if (allowTransforms) {
|
||||
transforms.set(path, val);
|
||||
}
|
||||
else {
|
||||
throw new Error(`${val.methodName}() is not supported inside of array values.`);
|
||||
}
|
||||
}
|
||||
else if (is.array(val)) {
|
||||
for (let i = 0; i < val.length; ++i) {
|
||||
// We need to verify that no array value contains a document transform
|
||||
encode_(val[i], path.append(String(i)), false);
|
||||
}
|
||||
}
|
||||
else if (serializer_1.isPlainObject(val)) {
|
||||
for (const prop in val) {
|
||||
if (val.hasOwnProperty(prop)) {
|
||||
encode_(val[prop], path.append(new path_1.FieldPath(prop)), allowTransforms);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
data.forEach((value, key) => {
|
||||
encode_(value, path_1.FieldPath.fromArgument(key), true);
|
||||
});
|
||||
return new DocumentTransform(ref, transforms);
|
||||
}
|
||||
/**
|
||||
* Whether this DocumentTransform contains any actionable transformations.
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
get isEmpty() {
|
||||
return this._transforms.size === 0;
|
||||
}
|
||||
/**
|
||||
* Returns the array of fields in this DocumentTransform.
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
get fields() {
|
||||
return Array.from(this._transforms.keys());
|
||||
}
|
||||
/** Validates the user provided field values in this document transform. */
|
||||
validate() {
|
||||
this._transforms.forEach(transform => transform.validate(this._validator));
|
||||
}
|
||||
/**
|
||||
* Converts a document transform to the Firestore 'DocumentTransform' Proto.
|
||||
*
|
||||
* @private
|
||||
* @param serializer The Firestore serializer
|
||||
* @returns A Firestore 'DocumentTransform' Proto or 'null' if this transform
|
||||
* is empty.
|
||||
*/
|
||||
toProto(serializer) {
|
||||
if (this.isEmpty) {
|
||||
return null;
|
||||
}
|
||||
const protoTransforms = [];
|
||||
this._transforms.forEach((transform, path) => {
|
||||
protoTransforms.push(transform.toProto(serializer, path));
|
||||
});
|
||||
return {
|
||||
transform: {
|
||||
document: this._ref.formattedName,
|
||||
fieldTransforms: protoTransforms,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
exports.DocumentTransform = DocumentTransform;
|
||||
/*!
|
||||
* A Firestore Precondition encapsulates options for database writes.
|
||||
*
|
||||
* @private
|
||||
* @class
|
||||
*/
|
||||
class Precondition {
|
||||
/**
|
||||
* @private
|
||||
* @hideconstructor
|
||||
*
|
||||
* @param options.exists - Whether the referenced document should exist in
|
||||
* Firestore,
|
||||
* @param options.lastUpdateTime - The last update time of the referenced
|
||||
* document in Firestore.
|
||||
* @param options
|
||||
*/
|
||||
constructor(options) {
|
||||
if (options !== undefined) {
|
||||
this._exists = options.exists;
|
||||
this._lastUpdateTime = options.lastUpdateTime;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Generates the Protobuf `Preconditon` object for this precondition.
|
||||
*
|
||||
* @private
|
||||
* @returns The `Preconditon` Protobuf object or 'null' if there are no
|
||||
* preconditions.
|
||||
*/
|
||||
toProto() {
|
||||
if (this.isEmpty) {
|
||||
return null;
|
||||
}
|
||||
const proto = {};
|
||||
if (this._lastUpdateTime !== undefined) {
|
||||
proto.updateTime = this._lastUpdateTime.toProto().timestampValue;
|
||||
}
|
||||
else {
|
||||
proto.exists = this._exists;
|
||||
}
|
||||
return proto;
|
||||
}
|
||||
/**
|
||||
* Whether this DocumentTransform contains any enforcement.
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
get isEmpty() {
|
||||
return this._exists === undefined && !this._lastUpdateTime;
|
||||
}
|
||||
}
|
||||
exports.Precondition = Precondition;
|
||||
/**
|
||||
* Validates the use of 'options' as a Precondition and enforces that 'exists'
|
||||
* and 'lastUpdateTime' use valid types.
|
||||
*
|
||||
* @private
|
||||
* @param options.exists Whether the referenced document should exist.
|
||||
* @param options.lastUpdateTime The last update time of the referenced
|
||||
* document in Firestore.
|
||||
* @param allowExist Whether to allow the 'exists' preconditions.
|
||||
* @returns 'true' if the input is a valid Precondition.
|
||||
*/
|
||||
function validatePrecondition(precondition, allowExist) {
|
||||
if (!is.object(precondition)) {
|
||||
throw new Error('Input is not an object.');
|
||||
}
|
||||
let conditions = 0;
|
||||
if (precondition.exists !== undefined) {
|
||||
++conditions;
|
||||
if (!allowExist) {
|
||||
throw new Error('"exists" is not an allowed condition.');
|
||||
}
|
||||
if (!is.boolean(precondition.exists)) {
|
||||
throw new Error('"exists" is not a boolean.');
|
||||
}
|
||||
}
|
||||
if (precondition.lastUpdateTime !== undefined) {
|
||||
++conditions;
|
||||
if (!(precondition.lastUpdateTime instanceof timestamp_1.Timestamp)) {
|
||||
throw new Error('"lastUpdateTime" is not a Firestore Timestamp.');
|
||||
}
|
||||
}
|
||||
if (conditions > 1) {
|
||||
throw new Error('Input contains more than one condition.');
|
||||
}
|
||||
return true;
|
||||
}
|
||||
exports.validatePrecondition = validatePrecondition;
|
||||
/**
|
||||
* Validates the use of 'options' as SetOptions and enforces that 'merge' is a
|
||||
* boolean.
|
||||
*
|
||||
* @private
|
||||
* @param options.merge - Whether set() should merge the provided data into an
|
||||
* existing document.
|
||||
* @param options.mergeFields - Whether set() should only merge the specified
|
||||
* set of fields.
|
||||
* @returns 'true' if the input is a valid SetOptions object.
|
||||
*/
|
||||
function validateSetOptions(options) {
|
||||
if (!is.object(options)) {
|
||||
throw new Error('Input is not an object.');
|
||||
}
|
||||
if (options.merge !== undefined && !is.boolean(options.merge)) {
|
||||
throw new Error('"merge" is not a boolean.');
|
||||
}
|
||||
if (options.mergeFields !== undefined) {
|
||||
if (!is.array(options.mergeFields)) {
|
||||
throw new Error('"mergeFields" is not an array.');
|
||||
}
|
||||
for (let i = 0; i < options.mergeFields.length; ++i) {
|
||||
try {
|
||||
path_1.FieldPath.validateFieldPath(options.mergeFields[i]);
|
||||
}
|
||||
catch (err) {
|
||||
throw new Error(`Element at index ${i} is not a valid FieldPath. ${err.message}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (options.merge !== undefined && options.mergeFields !== undefined) {
|
||||
throw new Error('You cannot specify both "merge" and "mergeFields".');
|
||||
}
|
||||
return true;
|
||||
}
|
||||
exports.validateSetOptions = validateSetOptions;
|
||||
//# sourceMappingURL=document.js.map
|
||||
320
express-server/node_modules/@google-cloud/firestore/build/src/field-value.js
generated
vendored
Normal file
320
express-server/node_modules/@google-cloud/firestore/build/src/field-value.js
generated
vendored
Normal file
@@ -0,0 +1,320 @@
|
||||
"use strict";
|
||||
/*!
|
||||
* Copyright 2018 Google Inc. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const deepEqual = require('deep-equal');
|
||||
const validate_1 = require("./validate");
|
||||
const validate = validate_1.createValidator();
|
||||
/**
|
||||
* Sentinel values that can be used when writing documents with set(), create()
|
||||
* or update().
|
||||
*
|
||||
* @class
|
||||
*/
|
||||
class FieldValue {
|
||||
/**
|
||||
* @private
|
||||
* @hideconstructor
|
||||
*/
|
||||
constructor() { }
|
||||
/**
|
||||
* Returns a sentinel for use with update() or set() with {merge:true} to mark
|
||||
* a field for deletion.
|
||||
*
|
||||
* @returns {FieldValue} The sentinel value to use in your objects.
|
||||
*
|
||||
* @example
|
||||
* let documentRef = firestore.doc('col/doc');
|
||||
* let data = { a: 'b', c: 'd' };
|
||||
*
|
||||
* documentRef.set(data).then(() => {
|
||||
* return documentRef.update({a: Firestore.FieldValue.delete()});
|
||||
* }).then(() => {
|
||||
* // Document now only contains { c: 'd' }
|
||||
* });
|
||||
*/
|
||||
static delete() {
|
||||
return DeleteTransform.DELETE_SENTINEL;
|
||||
}
|
||||
/**
|
||||
* Returns a sentinel used with set(), create() or update() to include a
|
||||
* server-generated timestamp in the written data.
|
||||
*
|
||||
* @return {FieldValue} The FieldValue sentinel for use in a call to set(),
|
||||
* create() or update().
|
||||
*
|
||||
* @example
|
||||
* let documentRef = firestore.doc('col/doc');
|
||||
*
|
||||
* documentRef.set({
|
||||
* time: Firestore.FieldValue.serverTimestamp()
|
||||
* }).then(() => {
|
||||
* return documentRef.get();
|
||||
* }).then(doc => {
|
||||
* console.log(`Server time set to ${doc.get('time')}`);
|
||||
* });
|
||||
*/
|
||||
static serverTimestamp() {
|
||||
return ServerTimestampTransform.SERVER_TIMESTAMP_SENTINEL;
|
||||
}
|
||||
/**
|
||||
* Returns a special value that can be used with set(), create() or update()
|
||||
* that tells the server to union the given elements with any array value that
|
||||
* already exists on the server. Each specified element that doesn't already
|
||||
* exist in the array will be added to the end. If the field being modified is
|
||||
* not already an array it will be overwritten with an array containing
|
||||
* exactly the specified elements.
|
||||
*
|
||||
* @param {...*} elements The elements to union into the array.
|
||||
* @return {FieldValue} The FieldValue sentinel for use in a call to set(),
|
||||
* create() or update().
|
||||
*
|
||||
* @example
|
||||
* let documentRef = firestore.doc('col/doc');
|
||||
*
|
||||
* documentRef.update(
|
||||
* 'array', Firestore.FieldValue.arrayUnion('foo')
|
||||
* ).then(() => {
|
||||
* return documentRef.get();
|
||||
* }).then(doc => {
|
||||
* // doc.get('array') contains field 'foo'
|
||||
* });
|
||||
*/
|
||||
static arrayUnion(...elements) {
|
||||
validate.minNumberOfArguments('FieldValue.arrayUnion', arguments, 1);
|
||||
return new ArrayUnionTransform(elements);
|
||||
}
|
||||
/**
|
||||
* Returns a special value that can be used with set(), create() or update()
|
||||
* that tells the server to remove the given elements from any array value
|
||||
* that already exists on the server. All instances of each element specified
|
||||
* will be removed from the array. If the field being modified is not already
|
||||
* an array it will be overwritten with an empty array.
|
||||
*
|
||||
* @param {...*} elements The elements to remove from the array.
|
||||
* @return {FieldValue} The FieldValue sentinel for use in a call to set(),
|
||||
* create() or update().
|
||||
*
|
||||
* @example
|
||||
* let documentRef = firestore.doc('col/doc');
|
||||
*
|
||||
* documentRef.update(
|
||||
* 'array', Firestore.FieldValue.arrayRemove('foo')
|
||||
* ).then(() => {
|
||||
* return documentRef.get();
|
||||
* }).then(doc => {
|
||||
* // doc.get('array') no longer contains field 'foo'
|
||||
* });
|
||||
*/
|
||||
static arrayRemove(...elements) {
|
||||
validate.minNumberOfArguments('FieldValue.arrayRemove', arguments, 1);
|
||||
return new ArrayRemoveTransform(elements);
|
||||
}
|
||||
/**
|
||||
* Returns true if this `FieldValue` is equal to the provided value.
|
||||
*
|
||||
* @param {*} other The value to compare against.
|
||||
* @return {boolean} true if this `FieldValue` is equal to the provided value.
|
||||
*/
|
||||
isEqual(other) {
|
||||
return this === other;
|
||||
}
|
||||
}
|
||||
exports.FieldValue = FieldValue;
|
||||
/**
|
||||
* An internal interface shared by all field transforms.
|
||||
*
|
||||
* A 'FieldTransform` subclass should implement '.includeInDocumentMask',
|
||||
* '.includeInDocumentTransform' and 'toProto' (if '.includeInDocumentTransform'
|
||||
* is 'true').
|
||||
*
|
||||
* @private
|
||||
* @abstract
|
||||
*/
|
||||
class FieldTransform extends FieldValue {
|
||||
}
|
||||
exports.FieldTransform = FieldTransform;
|
||||
/**
|
||||
* A transform that deletes a field from a Firestore document.
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
class DeleteTransform extends FieldTransform {
|
||||
constructor() {
|
||||
super();
|
||||
}
|
||||
/**
|
||||
* Deletes are included in document masks.
|
||||
*/
|
||||
get includeInDocumentMask() {
|
||||
return true;
|
||||
}
|
||||
/**
|
||||
* Deletes are are omitted from document transforms.
|
||||
*/
|
||||
get includeInDocumentTransform() {
|
||||
return false;
|
||||
}
|
||||
get methodName() {
|
||||
return 'FieldValue.delete';
|
||||
}
|
||||
validate() {
|
||||
return true;
|
||||
}
|
||||
toProto(serializer, fieldPath) {
|
||||
throw new Error('FieldValue.delete() should not be included in a FieldTransform');
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Sentinel value for a field delete.
|
||||
*/
|
||||
DeleteTransform.DELETE_SENTINEL = new DeleteTransform();
|
||||
exports.DeleteTransform = DeleteTransform;
|
||||
/**
|
||||
* A transform that sets a field to the Firestore server time.
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
class ServerTimestampTransform extends FieldTransform {
|
||||
constructor() {
|
||||
super();
|
||||
}
|
||||
/**
|
||||
* Server timestamps are omitted from document masks.
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
get includeInDocumentMask() {
|
||||
return false;
|
||||
}
|
||||
/**
|
||||
* Server timestamps are included in document transforms.
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
get includeInDocumentTransform() {
|
||||
return true;
|
||||
}
|
||||
get methodName() {
|
||||
return 'FieldValue.serverTimestamp';
|
||||
}
|
||||
validate() {
|
||||
return true;
|
||||
}
|
||||
toProto(serializer, fieldPath) {
|
||||
return {
|
||||
fieldPath: fieldPath.formattedName,
|
||||
setToServerValue: 'REQUEST_TIME',
|
||||
};
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Sentinel value for a server timestamp.
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
ServerTimestampTransform.SERVER_TIMESTAMP_SENTINEL = new ServerTimestampTransform();
|
||||
/**
|
||||
* Transforms an array value via a union operation.
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
class ArrayUnionTransform extends FieldTransform {
|
||||
constructor(elements) {
|
||||
super();
|
||||
this.elements = elements;
|
||||
}
|
||||
/**
|
||||
* Array transforms are omitted from document masks.
|
||||
*/
|
||||
get includeInDocumentMask() {
|
||||
return false;
|
||||
}
|
||||
/**
|
||||
* Array transforms are included in document transforms.
|
||||
*/
|
||||
get includeInDocumentTransform() {
|
||||
return true;
|
||||
}
|
||||
get methodName() {
|
||||
return 'FieldValue.arrayUnion';
|
||||
}
|
||||
validate(validator) {
|
||||
let valid = true;
|
||||
for (let i = 0; valid && i < this.elements.length; ++i) {
|
||||
valid = validator.isArrayElement(i, this.elements[i], { allowDeletes: 'none', allowTransforms: false });
|
||||
}
|
||||
return valid;
|
||||
}
|
||||
toProto(serializer, fieldPath) {
|
||||
const encodedElements = serializer.encodeValue(this.elements).arrayValue;
|
||||
return {
|
||||
fieldPath: fieldPath.formattedName,
|
||||
appendMissingElements: encodedElements
|
||||
};
|
||||
}
|
||||
isEqual(other) {
|
||||
return (this === other ||
|
||||
(other instanceof ArrayUnionTransform &&
|
||||
deepEqual(this.elements, other.elements, { strict: true })));
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Transforms an array value via a remove operation.
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
class ArrayRemoveTransform extends FieldTransform {
|
||||
constructor(elements) {
|
||||
super();
|
||||
this.elements = elements;
|
||||
}
|
||||
/**
|
||||
* Array transforms are omitted from document masks.
|
||||
*/
|
||||
get includeInDocumentMask() {
|
||||
return false;
|
||||
}
|
||||
/**
|
||||
* Array transforms are included in document transforms.
|
||||
*/
|
||||
get includeInDocumentTransform() {
|
||||
return true;
|
||||
}
|
||||
get methodName() {
|
||||
return 'FieldValue.arrayRemove';
|
||||
}
|
||||
validate(validator) {
|
||||
let valid = true;
|
||||
for (let i = 0; valid && i < this.elements.length; ++i) {
|
||||
valid = validator.isArrayElement(i, this.elements[i], { allowDeletes: 'none', allowTransforms: false });
|
||||
}
|
||||
return valid;
|
||||
}
|
||||
toProto(serializer, fieldPath) {
|
||||
const encodedElements = serializer.encodeValue(this.elements).arrayValue;
|
||||
return {
|
||||
fieldPath: fieldPath.formattedName,
|
||||
removeAllFromArray: encodedElements
|
||||
};
|
||||
}
|
||||
isEqual(other) {
|
||||
return (this === other ||
|
||||
(other instanceof ArrayRemoveTransform &&
|
||||
deepEqual(this.elements, other.elements, { strict: true })));
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=field-value.js.map
|
||||
101
express-server/node_modules/@google-cloud/firestore/build/src/geo-point.js
generated
vendored
Normal file
101
express-server/node_modules/@google-cloud/firestore/build/src/geo-point.js
generated
vendored
Normal file
@@ -0,0 +1,101 @@
|
||||
"use strict";
|
||||
/*!
|
||||
* Copyright 2018 Google Inc. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const validate_1 = require("./validate");
|
||||
const validate = validate_1.createValidator();
|
||||
/**
|
||||
* An immutable object representing a geographic location in Firestore. The
|
||||
* location is represented as a latitude/longitude pair.
|
||||
*
|
||||
* @class
|
||||
*/
|
||||
class GeoPoint {
|
||||
/**
|
||||
* Creates a [GeoPoint]{@link GeoPoint}.
|
||||
*
|
||||
* @param {number} latitude The latitude as a number between -90 and 90.
|
||||
* @param {number} longitude The longitude as a number between -180 and 180.
|
||||
*
|
||||
* @example
|
||||
* let data = {
|
||||
* google: new Firestore.GeoPoint(37.422, 122.084)
|
||||
* };
|
||||
*
|
||||
* firestore.doc('col/doc').set(data).then(() => {
|
||||
* console.log(`Location is ${data.google.latitude}, ` +
|
||||
* `${data.google.longitude}`);
|
||||
* });
|
||||
*/
|
||||
constructor(latitude, longitude) {
|
||||
validate.isNumber('latitude', latitude, -90, 90);
|
||||
validate.isNumber('longitude', longitude, -180, 180);
|
||||
this._latitude = latitude;
|
||||
this._longitude = longitude;
|
||||
}
|
||||
/**
|
||||
* The latitude as a number between -90 and 90.
|
||||
*
|
||||
* @type {number}
|
||||
* @name GeoPoint#latitude
|
||||
* @readonly
|
||||
*/
|
||||
get latitude() {
|
||||
return this._latitude;
|
||||
}
|
||||
/**
|
||||
* The longitude as a number between -180 and 180.
|
||||
*
|
||||
* @type {number}
|
||||
* @name GeoPoint#longitude
|
||||
* @readonly
|
||||
*/
|
||||
get longitude() {
|
||||
return this._longitude;
|
||||
}
|
||||
/**
|
||||
* Returns true if this `GeoPoint` is equal to the provided value.
|
||||
*
|
||||
* @param {*} other The value to compare against.
|
||||
* @return {boolean} true if this `GeoPoint` is equal to the provided value.
|
||||
*/
|
||||
isEqual(other) {
|
||||
return (this === other ||
|
||||
(other instanceof GeoPoint && this.latitude === other.latitude &&
|
||||
this.longitude === other.longitude));
|
||||
}
|
||||
/**
|
||||
* Converts the GeoPoint to a google.type.LatLng proto.
|
||||
* @private
|
||||
*/
|
||||
toProto() {
|
||||
return {
|
||||
geoPointValue: {
|
||||
latitude: this.latitude,
|
||||
longitude: this.longitude,
|
||||
}
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Converts a google.type.LatLng proto to its GeoPoint representation.
|
||||
* @private
|
||||
*/
|
||||
static fromProto(proto) {
|
||||
return new GeoPoint(proto.latitude || 0, proto.longitude || 0);
|
||||
}
|
||||
}
|
||||
exports.GeoPoint = GeoPoint;
|
||||
//# sourceMappingURL=geo-point.js.map
|
||||
1224
express-server/node_modules/@google-cloud/firestore/build/src/index.js
generated
vendored
Normal file
1224
express-server/node_modules/@google-cloud/firestore/build/src/index.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
58
express-server/node_modules/@google-cloud/firestore/build/src/logger.js
generated
vendored
Normal file
58
express-server/node_modules/@google-cloud/firestore/build/src/logger.js
generated
vendored
Normal file
@@ -0,0 +1,58 @@
|
||||
"use strict";
|
||||
/*!
|
||||
* Copyright 2018 Google Inc. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const util = require("util");
|
||||
const validate_1 = require("./validate");
|
||||
const validate = validate_1.createValidator();
|
||||
/*! The Firestore library version */
|
||||
let libVersion;
|
||||
/*! The external function used to emit logs. */
|
||||
let logFunction = (msg) => { };
|
||||
/**
|
||||
* Log function to use for debug output. By default, we don't perform any
|
||||
* logging.
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
function logger(methodName, requestTag, logMessage, ...additionalArgs) {
|
||||
requestTag = requestTag || '#####';
|
||||
const formattedMessage = util.format(logMessage, ...additionalArgs);
|
||||
const time = new Date().toISOString();
|
||||
logFunction(`Firestore (${libVersion}) ${time} ${requestTag} [${methodName}]: ` +
|
||||
formattedMessage);
|
||||
}
|
||||
exports.logger = logger;
|
||||
/**
|
||||
* Sets the log function for all active Firestore instances.
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
function setLogFunction(logger) {
|
||||
validate.isFunction('logger', logger);
|
||||
logFunction = logger;
|
||||
}
|
||||
exports.setLogFunction = setLogFunction;
|
||||
/**
|
||||
* Sets the log function for all active Firestore instances.
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
function setLibVersion(version) {
|
||||
libVersion = version;
|
||||
}
|
||||
exports.setLibVersion = setLibVersion;
|
||||
//# sourceMappingURL=logger.js.map
|
||||
231
express-server/node_modules/@google-cloud/firestore/build/src/order.js
generated
vendored
Normal file
231
express-server/node_modules/@google-cloud/firestore/build/src/order.js
generated
vendored
Normal file
@@ -0,0 +1,231 @@
|
||||
"use strict";
|
||||
/*!
|
||||
* Copyright 2017 Google Inc. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const convert_1 = require("./convert");
|
||||
const path_1 = require("./path");
|
||||
const validate_1 = require("./validate");
|
||||
/*!
|
||||
* The type order as defined by the backend.
|
||||
*/
|
||||
var TypeOrder;
|
||||
(function (TypeOrder) {
|
||||
TypeOrder[TypeOrder["NULL"] = 0] = "NULL";
|
||||
TypeOrder[TypeOrder["BOOLEAN"] = 1] = "BOOLEAN";
|
||||
TypeOrder[TypeOrder["NUMBER"] = 2] = "NUMBER";
|
||||
TypeOrder[TypeOrder["TIMESTAMP"] = 3] = "TIMESTAMP";
|
||||
TypeOrder[TypeOrder["STRING"] = 4] = "STRING";
|
||||
TypeOrder[TypeOrder["BLOB"] = 5] = "BLOB";
|
||||
TypeOrder[TypeOrder["REF"] = 6] = "REF";
|
||||
TypeOrder[TypeOrder["GEO_POINT"] = 7] = "GEO_POINT";
|
||||
TypeOrder[TypeOrder["ARRAY"] = 8] = "ARRAY";
|
||||
TypeOrder[TypeOrder["OBJECT"] = 9] = "OBJECT";
|
||||
})(TypeOrder || (TypeOrder = {}));
|
||||
/*!
|
||||
* @private
|
||||
*/
|
||||
function typeOrder(val) {
|
||||
const valueType = convert_1.detectValueType(val);
|
||||
switch (valueType) {
|
||||
case 'nullValue':
|
||||
return TypeOrder.NULL;
|
||||
case 'integerValue':
|
||||
return TypeOrder.NUMBER;
|
||||
case 'doubleValue':
|
||||
return TypeOrder.NUMBER;
|
||||
case 'stringValue':
|
||||
return TypeOrder.STRING;
|
||||
case 'booleanValue':
|
||||
return TypeOrder.BOOLEAN;
|
||||
case 'arrayValue':
|
||||
return TypeOrder.ARRAY;
|
||||
case 'timestampValue':
|
||||
return TypeOrder.TIMESTAMP;
|
||||
case 'geoPointValue':
|
||||
return TypeOrder.GEO_POINT;
|
||||
case 'bytesValue':
|
||||
return TypeOrder.BLOB;
|
||||
case 'referenceValue':
|
||||
return TypeOrder.REF;
|
||||
case 'mapValue':
|
||||
return TypeOrder.OBJECT;
|
||||
default:
|
||||
throw validate_1.customObjectError(val);
|
||||
}
|
||||
}
|
||||
/*!
|
||||
* @private
|
||||
*/
|
||||
function primitiveComparator(left, right) {
|
||||
if (left < right) {
|
||||
return -1;
|
||||
}
|
||||
if (left > right) {
|
||||
return 1;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
exports.primitiveComparator = primitiveComparator;
|
||||
/*!
|
||||
* Utility function to compare doubles (using Firestore semantics for NaN).
|
||||
* @private
|
||||
*/
|
||||
function compareNumbers(left, right) {
|
||||
if (left < right) {
|
||||
return -1;
|
||||
}
|
||||
if (left > right) {
|
||||
return 1;
|
||||
}
|
||||
if (left === right) {
|
||||
return 0;
|
||||
}
|
||||
// one or both are NaN.
|
||||
if (isNaN(left)) {
|
||||
return isNaN(right) ? 0 : -1;
|
||||
}
|
||||
return 1;
|
||||
}
|
||||
/*!
|
||||
* @private
|
||||
*/
|
||||
function compareNumberProtos(left, right) {
|
||||
let leftValue, rightValue;
|
||||
if (left.integerValue !== undefined) {
|
||||
leftValue = Number(left.integerValue);
|
||||
}
|
||||
else {
|
||||
leftValue = Number(left.doubleValue);
|
||||
}
|
||||
if (right.integerValue !== undefined) {
|
||||
rightValue = Number(right.integerValue);
|
||||
}
|
||||
else {
|
||||
rightValue = Number(right.doubleValue);
|
||||
}
|
||||
return compareNumbers(leftValue, rightValue);
|
||||
}
|
||||
/*!
|
||||
* @private
|
||||
*/
|
||||
function compareTimestamps(left, right) {
|
||||
const seconds = primitiveComparator(left.seconds || 0, right.seconds || 0);
|
||||
if (seconds !== 0) {
|
||||
return seconds;
|
||||
}
|
||||
return primitiveComparator(left.nanos || 0, right.nanos || 0);
|
||||
}
|
||||
/*!
|
||||
* @private
|
||||
*/
|
||||
function compareBlobs(left, right) {
|
||||
if (!(left instanceof Buffer) || !(right instanceof Buffer)) {
|
||||
throw new Error('Blobs can only be compared if they are Buffers.');
|
||||
}
|
||||
return Buffer.compare(left, right);
|
||||
}
|
||||
/*!
|
||||
* @private
|
||||
*/
|
||||
function compareReferenceProtos(left, right) {
|
||||
const leftPath = path_1.ResourcePath.fromSlashSeparatedString(left.referenceValue);
|
||||
const rightPath = path_1.ResourcePath.fromSlashSeparatedString(right.referenceValue);
|
||||
return leftPath.compareTo(rightPath);
|
||||
}
|
||||
/*!
|
||||
* @private
|
||||
*/
|
||||
function compareGeoPoints(left, right) {
|
||||
return (primitiveComparator(left.latitude || 0, right.latitude || 0) ||
|
||||
primitiveComparator(left.longitude || 0, right.longitude || 0));
|
||||
}
|
||||
/*!
|
||||
* @private
|
||||
*/
|
||||
function compareArrays(left, right) {
|
||||
for (let i = 0; i < left.length && i < right.length; i++) {
|
||||
const valueComparison = compare(left[i], right[i]);
|
||||
if (valueComparison !== 0) {
|
||||
return valueComparison;
|
||||
}
|
||||
}
|
||||
// If all the values matched so far, just check the length.
|
||||
return primitiveComparator(left.length, right.length);
|
||||
}
|
||||
/*!
|
||||
* @private
|
||||
*/
|
||||
function compareObjects(left, right) {
|
||||
// This requires iterating over the keys in the object in order and doing a
|
||||
// deep comparison.
|
||||
const leftKeys = Object.keys(left);
|
||||
const rightKeys = Object.keys(right);
|
||||
leftKeys.sort();
|
||||
rightKeys.sort();
|
||||
for (let i = 0; i < leftKeys.length && i < rightKeys.length; i++) {
|
||||
const keyComparison = primitiveComparator(leftKeys[i], rightKeys[i]);
|
||||
if (keyComparison !== 0) {
|
||||
return keyComparison;
|
||||
}
|
||||
const key = leftKeys[i];
|
||||
const valueComparison = compare(left[key], right[key]);
|
||||
if (valueComparison !== 0) {
|
||||
return valueComparison;
|
||||
}
|
||||
}
|
||||
// If all the keys matched so far, just check the length.
|
||||
return primitiveComparator(leftKeys.length, rightKeys.length);
|
||||
}
|
||||
/*!
|
||||
* @private
|
||||
*/
|
||||
function compare(left, right) {
|
||||
// First compare the types.
|
||||
const leftType = typeOrder(left);
|
||||
const rightType = typeOrder(right);
|
||||
const typeComparison = primitiveComparator(leftType, rightType);
|
||||
if (typeComparison !== 0) {
|
||||
return typeComparison;
|
||||
}
|
||||
// So they are the same type.
|
||||
switch (leftType) {
|
||||
case TypeOrder.NULL:
|
||||
// Nulls are all equal.
|
||||
return 0;
|
||||
case TypeOrder.BOOLEAN:
|
||||
return primitiveComparator(left.booleanValue, right.booleanValue);
|
||||
case TypeOrder.STRING:
|
||||
return primitiveComparator(left.stringValue, right.stringValue);
|
||||
case TypeOrder.NUMBER:
|
||||
return compareNumberProtos(left, right);
|
||||
case TypeOrder.TIMESTAMP:
|
||||
return compareTimestamps(left.timestampValue, right.timestampValue);
|
||||
case TypeOrder.BLOB:
|
||||
return compareBlobs(left.bytesValue, right.bytesValue);
|
||||
case TypeOrder.REF:
|
||||
return compareReferenceProtos(left, right);
|
||||
case TypeOrder.GEO_POINT:
|
||||
return compareGeoPoints(left.geoPointValue, right.geoPointValue);
|
||||
case TypeOrder.ARRAY:
|
||||
return compareArrays(left.arrayValue.values || [], right.arrayValue.values || []);
|
||||
case TypeOrder.OBJECT:
|
||||
return compareObjects(left.mapValue.fields || {}, right.mapValue.fields || {});
|
||||
default:
|
||||
throw new Error(`Encountered unknown type order: ${leftType}`);
|
||||
}
|
||||
}
|
||||
exports.compare = compare;
|
||||
//# sourceMappingURL=order.js.map
|
||||
513
express-server/node_modules/@google-cloud/firestore/build/src/path.js
generated
vendored
Normal file
513
express-server/node_modules/@google-cloud/firestore/build/src/path.js
generated
vendored
Normal file
@@ -0,0 +1,513 @@
|
||||
"use strict";
|
||||
/*!
|
||||
* Copyright 2017 Google Inc. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const is = require("is");
|
||||
const validate_1 = require("./validate");
|
||||
const validate = validate_1.createValidator();
|
||||
/*!
|
||||
* A regular expression to verify an absolute Resource Path in Firestore. It
|
||||
* extracts the project ID, the database name and the relative resource path
|
||||
* if available.
|
||||
*
|
||||
* @type {RegExp}
|
||||
*/
|
||||
const RESOURCE_PATH_RE =
|
||||
// Note: [\s\S] matches all characters including newlines.
|
||||
/^projects\/([^/]*)\/databases\/([^/]*)(?:\/documents\/)?([\s\S]*)$/;
|
||||
/*!
|
||||
* A regular expression to verify whether a field name can be passed to the
|
||||
* backend without escaping.
|
||||
*
|
||||
* @type {RegExp}
|
||||
*/
|
||||
const UNESCAPED_FIELD_NAME_RE = /^[_a-zA-Z][_a-zA-Z0-9]*$/;
|
||||
/*!
|
||||
* A regular expression to verify field paths that are passed to the API as
|
||||
* strings. Field paths that do not match this expression have to be provided
|
||||
* as a [FieldPath]{@link FieldPath} object.
|
||||
*
|
||||
* @type {RegExp}
|
||||
*/
|
||||
const FIELD_PATH_RE = /^[^*~/[\]]+$/;
|
||||
/**
|
||||
* An abstract class representing a Firestore path.
|
||||
*
|
||||
* Subclasses have to implement `split()` and `canonicalString()`.
|
||||
*
|
||||
* @private
|
||||
* @class
|
||||
*/
|
||||
class Path {
|
||||
/**
|
||||
* Creates a new Path with the given segments.
|
||||
*
|
||||
* @private
|
||||
* @hideconstructor
|
||||
* @param segments Sequence of parts of a path.
|
||||
*/
|
||||
constructor(segments) {
|
||||
this.segments = segments;
|
||||
}
|
||||
/**
|
||||
* String representation as expected by the proto API.
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
get formattedName() {
|
||||
return this.canonicalString();
|
||||
}
|
||||
/**
|
||||
* Returns the number of segments of this field path.
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
get size() {
|
||||
return this.segments.length;
|
||||
}
|
||||
/**
|
||||
* Create a child path beneath the current level.
|
||||
*
|
||||
* @private
|
||||
* @param relativePath Relative path to append to the current path.
|
||||
* @returns The new path.
|
||||
*/
|
||||
append(relativePath) {
|
||||
if (relativePath instanceof Path) {
|
||||
return this.construct(this.segments.concat(relativePath.segments));
|
||||
}
|
||||
return this.construct(this.segments.concat(this.split(relativePath)));
|
||||
}
|
||||
/**
|
||||
* Returns the path of the parent node.
|
||||
*
|
||||
* @private
|
||||
* @returns The new path or null if we are already at the root.
|
||||
*/
|
||||
parent() {
|
||||
if (this.segments.length === 0) {
|
||||
return null;
|
||||
}
|
||||
return this.construct(this.segments.slice(0, this.segments.length - 1));
|
||||
}
|
||||
/**
|
||||
* Checks whether the current path is a prefix of the specified path.
|
||||
*
|
||||
* @private
|
||||
* @param other The path to check against.
|
||||
* @returns 'true' iff the current path is a prefix match with 'other'.
|
||||
*/
|
||||
isPrefixOf(other) {
|
||||
if (other.segments.length < this.segments.length) {
|
||||
return false;
|
||||
}
|
||||
for (let i = 0; i < this.segments.length; i++) {
|
||||
if (this.segments[i] !== other.segments[i]) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
/**
|
||||
* Returns a string representation of this path.
|
||||
*
|
||||
* @private
|
||||
* @returns A string representing this path.
|
||||
*/
|
||||
toString() {
|
||||
return this.formattedName;
|
||||
}
|
||||
/**
|
||||
* Compare the current path against another Path object.
|
||||
*
|
||||
* @private
|
||||
* @param other The path to compare to.
|
||||
* @returns -1 if current < other, 1 if current > other, 0 if equal
|
||||
*/
|
||||
compareTo(other) {
|
||||
const len = Math.min(this.segments.length, other.segments.length);
|
||||
for (let i = 0; i < len; i++) {
|
||||
if (this.segments[i] < other.segments[i]) {
|
||||
return -1;
|
||||
}
|
||||
if (this.segments[i] > other.segments[i]) {
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
if (this.segments.length < other.segments.length) {
|
||||
return -1;
|
||||
}
|
||||
if (this.segments.length > other.segments.length) {
|
||||
return 1;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
/**
|
||||
* Returns a copy of the underlying segments.
|
||||
*
|
||||
* @private
|
||||
* @returns A copy of the segments that make up this path.
|
||||
*/
|
||||
toArray() {
|
||||
return this.segments.slice();
|
||||
}
|
||||
/**
|
||||
* Returns true if this `Path` is equal to the provided value.
|
||||
*
|
||||
* @private
|
||||
* @param other The value to compare against.
|
||||
* @return true if this `Path` is equal to the provided value.
|
||||
*/
|
||||
isEqual(other) {
|
||||
return (this === other ||
|
||||
(other instanceof this.constructor && this.compareTo(other) === 0));
|
||||
}
|
||||
}
|
||||
/**
|
||||
* A slash-separated path for navigating resources (documents and collections)
|
||||
* within Firestore.
|
||||
*
|
||||
* @private
|
||||
* @class
|
||||
*/
|
||||
class ResourcePath extends Path {
|
||||
/**
|
||||
* Constructs a Firestore Resource Path.
|
||||
*
|
||||
* @private
|
||||
* @hideconstructor
|
||||
*
|
||||
* @param projectId The Firestore project id.
|
||||
* @param databaseId The Firestore database id.
|
||||
* @param segments Sequence of names of the parts of the path.
|
||||
*/
|
||||
constructor(projectId, databaseId, ...segments) {
|
||||
super(segments);
|
||||
this.projectId = projectId;
|
||||
this.databaseId = databaseId;
|
||||
}
|
||||
/**
|
||||
* String representation of the path relative to the database root.
|
||||
*
|
||||
* @private
|
||||
* @type {string}
|
||||
*/
|
||||
get relativeName() {
|
||||
return this.segments.join('/');
|
||||
}
|
||||
/**
|
||||
* Indicates whether this ResourcePath points to a document.
|
||||
*
|
||||
* @private
|
||||
* @type {boolean}
|
||||
*/
|
||||
get isDocument() {
|
||||
return this.segments.length > 0 && this.segments.length % 2 === 0;
|
||||
}
|
||||
/**
|
||||
* Indicates whether this ResourcePath points to a collection.
|
||||
*
|
||||
* @private
|
||||
* @type {boolean}
|
||||
*/
|
||||
get isCollection() {
|
||||
return this.segments.length % 2 === 1;
|
||||
}
|
||||
/**
|
||||
* The last component of the path.
|
||||
*
|
||||
* @private
|
||||
* @type {string|null}
|
||||
*/
|
||||
get id() {
|
||||
if (this.segments.length > 0) {
|
||||
return this.segments[this.segments.length - 1];
|
||||
}
|
||||
return null;
|
||||
}
|
||||
/**
|
||||
* Returns true if the given string can be used as a relative or absolute
|
||||
* resource path.
|
||||
*
|
||||
* @private
|
||||
* @param {string} resourcePath The path to validate.
|
||||
* @throws if the string can't be used as a resource path.
|
||||
* @returns {boolean} 'true' when the path is valid.
|
||||
*/
|
||||
static validateResourcePath(resourcePath) {
|
||||
if (!is.string(resourcePath) || resourcePath === '') {
|
||||
throw new Error(`Path must be a non-empty string.`);
|
||||
}
|
||||
if (resourcePath.indexOf('//') >= 0) {
|
||||
throw new Error('Paths must not contain //.');
|
||||
}
|
||||
return true;
|
||||
}
|
||||
/**
|
||||
* Creates a resource path from an absolute Firestore path.
|
||||
*
|
||||
* @private
|
||||
* @param {string} absolutePath A string representation of a Resource Path.
|
||||
* @returns {ResourcePath} The new ResourcePath.
|
||||
*/
|
||||
static fromSlashSeparatedString(absolutePath) {
|
||||
const elements = RESOURCE_PATH_RE.exec(absolutePath);
|
||||
if (elements) {
|
||||
const project = elements[1];
|
||||
const database = elements[2];
|
||||
const path = elements[3];
|
||||
return new ResourcePath(project, database).append(path);
|
||||
}
|
||||
throw new Error(`Resource name '${absolutePath}' is not valid.`);
|
||||
}
|
||||
/**
|
||||
* Splits a string into path segments, using slashes as separators.
|
||||
*
|
||||
* @private
|
||||
* @override
|
||||
* @param {string} relativePath The path to split.
|
||||
* @returns {Array.<string>} - The split path segments.
|
||||
*/
|
||||
split(relativePath) {
|
||||
// We may have an empty segment at the beginning or end if they had a
|
||||
// leading or trailing slash (which we allow).
|
||||
return relativePath.split('/').filter(segment => segment.length > 0);
|
||||
}
|
||||
/**
|
||||
* String representation of a ResourcePath as expected by the API.
|
||||
*
|
||||
* @private
|
||||
* @override
|
||||
* @returns {string} The representation as expected by the API.
|
||||
*/
|
||||
canonicalString() {
|
||||
let components = [
|
||||
'projects',
|
||||
this.projectId,
|
||||
'databases',
|
||||
this.databaseId,
|
||||
];
|
||||
if (this.segments.length > 0) {
|
||||
components = components.concat('documents', this.segments);
|
||||
}
|
||||
return components.join('/');
|
||||
}
|
||||
/**
|
||||
* Constructs a new instance of ResourcePath. We need this instead of using
|
||||
* the normal constructor because polymorphic 'this' doesn't work on static
|
||||
* methods.
|
||||
*
|
||||
* @private
|
||||
* @override
|
||||
* @param {Array.<string>} segments Sequence of names of the parts of the
|
||||
* path.
|
||||
* @returns {ResourcePath} The newly created ResourcePath.
|
||||
*/
|
||||
construct(segments) {
|
||||
return new ResourcePath(this.projectId, this.databaseId, ...segments);
|
||||
}
|
||||
/**
|
||||
* Compare the current path against another ResourcePath object.
|
||||
*
|
||||
* @private
|
||||
* @override
|
||||
* @param {ResourcePath} other The path to compare to.
|
||||
* @returns {number} -1 if current < other, 1 if current > other, 0 if equal
|
||||
*/
|
||||
compareTo(other) {
|
||||
// Ignore DocumentReference with {{projectId}} placeholders and assume that
|
||||
// the resolved IDs match the provided ResourcePath. We could alternatively
|
||||
// try to resolve the Project ID here, but this is asynchronous as it
|
||||
// requires Disk I/O.
|
||||
if (this.projectId !== '{{projectId}}' &&
|
||||
other.projectId !== '{{projectId}}') {
|
||||
if (this.projectId < other.projectId) {
|
||||
return -1;
|
||||
}
|
||||
if (this.projectId > other.projectId) {
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
if (this.databaseId < other.databaseId) {
|
||||
return -1;
|
||||
}
|
||||
if (this.databaseId > other.databaseId) {
|
||||
return 1;
|
||||
}
|
||||
return super.compareTo(other);
|
||||
}
|
||||
/**
|
||||
* Converts this ResourcePath to the Firestore Proto representation.
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
toProto() {
|
||||
return {
|
||||
referenceValue: this.formattedName,
|
||||
};
|
||||
}
|
||||
}
|
||||
exports.ResourcePath = ResourcePath;
|
||||
/**
|
||||
* A dot-separated path for navigating sub-objects within a document.
|
||||
*
|
||||
* @class
|
||||
*/
|
||||
class FieldPath extends Path {
|
||||
/**
|
||||
* Constructs a Firestore Field Path.
|
||||
*
|
||||
* @param {...string|string[]} segments Sequence of field names that form
|
||||
* this path.
|
||||
*
|
||||
* @example
|
||||
* let query = firestore.collection('col');
|
||||
* let fieldPath = new FieldPath('f.o.o', 'bar');
|
||||
*
|
||||
* query.where(fieldPath, '==', 42).get().then(snapshot => {
|
||||
* snapshot.forEach(document => {
|
||||
* console.log(`Document contains {'f.o.o' : {'bar' : 42}}`);
|
||||
* });
|
||||
* });
|
||||
*/
|
||||
constructor(...segments) {
|
||||
validate.minNumberOfArguments('FieldPath', arguments, 1);
|
||||
const elements = is.array(segments[0]) ?
|
||||
segments[0] :
|
||||
segments;
|
||||
for (let i = 0; i < elements.length; ++i) {
|
||||
validate.isString(i, elements[i]);
|
||||
if (elements[i].length === 0) {
|
||||
throw new Error(`Element at index ${i} should not be an empty string.`);
|
||||
}
|
||||
}
|
||||
super(elements);
|
||||
}
|
||||
/**
|
||||
* A special FieldPath value to refer to the ID of a document. It can be used
|
||||
* in queries to sort or filter by the document ID.
|
||||
*
|
||||
* @returns {FieldPath}
|
||||
*/
|
||||
static documentId() {
|
||||
return FieldPath._DOCUMENT_ID;
|
||||
}
|
||||
/**
|
||||
* Returns true if the provided value can be used as a field path argument.
|
||||
*
|
||||
* @private
|
||||
* @param fieldPath The value to verify.
|
||||
* @throws if the string can't be used as a field path.
|
||||
* @returns 'true' when the path is valid.
|
||||
*/
|
||||
static validateFieldPath(fieldPath) {
|
||||
if (!(fieldPath instanceof FieldPath)) {
|
||||
if (fieldPath === undefined) {
|
||||
throw new Error('Path cannot be omitted.');
|
||||
}
|
||||
if (is.object(fieldPath) &&
|
||||
fieldPath.constructor.name === 'FieldPath') {
|
||||
throw validate_1.customObjectError(fieldPath);
|
||||
}
|
||||
if (typeof fieldPath !== 'string') {
|
||||
throw new Error('Paths can only be specified as strings or via a FieldPath object.');
|
||||
}
|
||||
if (fieldPath.indexOf('..') >= 0) {
|
||||
throw new Error(`Paths must not contain ".." in them.`);
|
||||
}
|
||||
if (fieldPath.startsWith('.') || fieldPath.endsWith('.')) {
|
||||
throw new Error(`Paths must not start or end with ".".`);
|
||||
}
|
||||
if (!FIELD_PATH_RE.test(fieldPath)) {
|
||||
throw new Error(`Paths can't be empty and must not contain "*~/[]".`);
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
/**
|
||||
* Turns a field path argument into a [FieldPath]{@link FieldPath}.
|
||||
* Supports FieldPaths as input (which are passed through) and dot-separated
|
||||
* strings.
|
||||
*
|
||||
* @private
|
||||
* @param {string|FieldPath} fieldPath The FieldPath to create.
|
||||
* @returns {FieldPath} A field path representation.
|
||||
*/
|
||||
static fromArgument(fieldPath) {
|
||||
// validateFieldPath() is used in all public API entry points to validate
|
||||
// that fromArgument() is only called with a Field Path or a string.
|
||||
return fieldPath instanceof FieldPath ?
|
||||
fieldPath :
|
||||
new FieldPath(...fieldPath.split('.'));
|
||||
}
|
||||
/**
|
||||
* String representation of a FieldPath as expected by the API.
|
||||
*
|
||||
* @private
|
||||
* @override
|
||||
* @returns {string} The representation as expected by the API.
|
||||
*/
|
||||
canonicalString() {
|
||||
return this.segments
|
||||
.map(str => {
|
||||
return UNESCAPED_FIELD_NAME_RE.test(str) ?
|
||||
str :
|
||||
'`' + str.replace('\\', '\\\\').replace('`', '\\`') + '`';
|
||||
})
|
||||
.join('.');
|
||||
}
|
||||
/**
|
||||
* Splits a string into path segments, using dots as separators.
|
||||
*
|
||||
* @private
|
||||
* @override
|
||||
* @param {string} fieldPath The path to split.
|
||||
* @returns {Array.<string>} - The split path segments.
|
||||
*/
|
||||
split(fieldPath) {
|
||||
return fieldPath.split('.');
|
||||
}
|
||||
/**
|
||||
* Constructs a new instance of FieldPath. We need this instead of using
|
||||
* the normal constructor because polymorphic 'this' doesn't work on static
|
||||
* methods.
|
||||
*
|
||||
* @private
|
||||
* @override
|
||||
* @param {Array.<string>} segments Sequence of field names.
|
||||
* @returns {ResourcePath} The newly created FieldPath.
|
||||
*/
|
||||
construct(segments) {
|
||||
return new FieldPath(...segments);
|
||||
}
|
||||
/**
|
||||
* Returns true if this `FieldPath` is equal to the provided value.
|
||||
*
|
||||
* @param {*} other The value to compare against.
|
||||
* @return {boolean} true if this `FieldPath` is equal to the provided value.
|
||||
*/
|
||||
isEqual(other) {
|
||||
return super.isEqual(other);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* A special sentinel value to refer to the ID of a document.
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
FieldPath._DOCUMENT_ID = new FieldPath('__name__');
|
||||
exports.FieldPath = FieldPath;
|
||||
//# sourceMappingURL=path.js.map
|
||||
122
express-server/node_modules/@google-cloud/firestore/build/src/pool.js
generated
vendored
Normal file
122
express-server/node_modules/@google-cloud/firestore/build/src/pool.js
generated
vendored
Normal file
@@ -0,0 +1,122 @@
|
||||
"use strict";
|
||||
/*!
|
||||
* Copyright 2018 Google Inc. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const assert = require("assert");
|
||||
/**
|
||||
* An auto-resizing pool that distributes concurrent operations over multiple
|
||||
* clients of type `T`.
|
||||
*
|
||||
* ClientPool is used within Firestore to manage a pool of GAPIC clients and
|
||||
* automatically initializes multiple clients if we issue more than 100
|
||||
* concurrent operations.
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
class ClientPool {
|
||||
/**
|
||||
* @param concurrentOperationLimit The number of operations that each client
|
||||
* can handle.
|
||||
* @param clientFactory A factory function called as needed when new clients
|
||||
* are required.
|
||||
*/
|
||||
constructor(concurrentOperationLimit, clientFactory) {
|
||||
this.concurrentOperationLimit = concurrentOperationLimit;
|
||||
this.clientFactory = clientFactory;
|
||||
/** Stores each active clients and how many operations it has outstanding. */
|
||||
this.activeClients = new Map();
|
||||
}
|
||||
/**
|
||||
* Returns an already existing client if it has less than the maximum number
|
||||
* of concurrent operations or initializes and returns a new client.
|
||||
*/
|
||||
acquire() {
|
||||
let selectedClient = null;
|
||||
let selectedRequestCount = 0;
|
||||
this.activeClients.forEach((requestCount, client) => {
|
||||
if (!selectedClient && requestCount < this.concurrentOperationLimit) {
|
||||
selectedClient = client;
|
||||
selectedRequestCount = requestCount;
|
||||
}
|
||||
});
|
||||
if (!selectedClient) {
|
||||
selectedClient = this.clientFactory();
|
||||
assert(!this.activeClients.has(selectedClient), 'The provided client factory returned an existing instance');
|
||||
}
|
||||
this.activeClients.set(selectedClient, selectedRequestCount + 1);
|
||||
return selectedClient;
|
||||
}
|
||||
/**
|
||||
* Reduces the number of operations for the provided client, potentially
|
||||
* removing it from the pool of active clients.
|
||||
*/
|
||||
release(client) {
|
||||
let requestCount = this.activeClients.get(client) || 0;
|
||||
assert(requestCount > 0, 'No active request');
|
||||
requestCount = requestCount - 1;
|
||||
this.activeClients.set(client, requestCount);
|
||||
if (requestCount === 0) {
|
||||
this.garbageCollect();
|
||||
}
|
||||
}
|
||||
/**
|
||||
* The number of currently registered clients.
|
||||
*
|
||||
* @return Number of currently registered clients.
|
||||
*/
|
||||
// Visible for testing.
|
||||
get size() {
|
||||
return this.activeClients.size;
|
||||
}
|
||||
/**
|
||||
* Runs the provided operation in this pool. This function may create an
|
||||
* additional client if all existing clients already operate at the concurrent
|
||||
* operation limit.
|
||||
*
|
||||
* @param op A callback function that returns a Promise. The client T will
|
||||
* be returned to the pool when callback finishes.
|
||||
* @return A Promise that resolves with the result of `op`.
|
||||
*/
|
||||
run(op) {
|
||||
const client = this.acquire();
|
||||
return op(client)
|
||||
.catch(err => {
|
||||
this.release(client);
|
||||
return Promise.reject(err);
|
||||
})
|
||||
.then(res => {
|
||||
this.release(client);
|
||||
return res;
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Deletes clients that are no longer executing operations. Keeps up to one
|
||||
* idle client to reduce future initialization costs.
|
||||
*/
|
||||
garbageCollect() {
|
||||
let idleClients = 0;
|
||||
this.activeClients.forEach((requestCount, client) => {
|
||||
if (requestCount === 0) {
|
||||
++idleClients;
|
||||
if (idleClients > 1) {
|
||||
this.activeClients.delete(client);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
exports.ClientPool = ClientPool;
|
||||
//# sourceMappingURL=pool.js.map
|
||||
1778
express-server/node_modules/@google-cloud/firestore/build/src/reference.js
generated
vendored
Normal file
1778
express-server/node_modules/@google-cloud/firestore/build/src/reference.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
224
express-server/node_modules/@google-cloud/firestore/build/src/serializer.js
generated
vendored
Normal file
224
express-server/node_modules/@google-cloud/firestore/build/src/serializer.js
generated
vendored
Normal file
@@ -0,0 +1,224 @@
|
||||
"use strict";
|
||||
/*!
|
||||
* Copyright 2018 Google Inc. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const is = require("is");
|
||||
const timestamp_1 = require("./timestamp");
|
||||
const field_value_1 = require("./field-value");
|
||||
const validate_1 = require("./validate");
|
||||
const path_1 = require("./path");
|
||||
const convert_1 = require("./convert");
|
||||
const geo_point_1 = require("./geo-point");
|
||||
/**
|
||||
* Serializer that is used to convert between JavaScripts types and their
|
||||
* Firestore Protobuf representation.
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
class Serializer {
|
||||
constructor(firestore) {
|
||||
// Instead of storing the `firestore` object, we store just a reference to
|
||||
// its `.doc()` method. This avoid a circular reference, which breaks
|
||||
// JSON.stringify().
|
||||
this.createReference = path => firestore.doc(path);
|
||||
this.timestampsInSnapshots = !!firestore._settings.timestampsInSnapshots;
|
||||
}
|
||||
/**
|
||||
* Encodes a JavaScrip object into the Firestore 'Fields' representation.
|
||||
*
|
||||
* @private
|
||||
* @param obj The object to encode.
|
||||
* @returns The Firestore 'Fields' representation
|
||||
*/
|
||||
encodeFields(obj) {
|
||||
const fields = {};
|
||||
for (const prop in obj) {
|
||||
if (obj.hasOwnProperty(prop)) {
|
||||
const val = this.encodeValue(obj[prop]);
|
||||
if (val) {
|
||||
fields[prop] = val;
|
||||
}
|
||||
}
|
||||
}
|
||||
return fields;
|
||||
}
|
||||
/**
|
||||
* Encodes a JavaScript value into the Firestore 'Value' representation.
|
||||
*
|
||||
* @private
|
||||
* @param val The object to encode
|
||||
* @returns The Firestore Proto or null if we are deleting a field.
|
||||
*/
|
||||
encodeValue(val) {
|
||||
if (val instanceof field_value_1.FieldTransform) {
|
||||
return null;
|
||||
}
|
||||
if (is.string(val)) {
|
||||
return {
|
||||
stringValue: val,
|
||||
};
|
||||
}
|
||||
if (typeof val === 'boolean') {
|
||||
return {
|
||||
booleanValue: val,
|
||||
};
|
||||
}
|
||||
if (typeof val === 'number' && is.integer(val)) {
|
||||
return {
|
||||
integerValue: val,
|
||||
};
|
||||
}
|
||||
// Integers are handled above, the remaining numbers are treated as doubles
|
||||
if (is.number(val)) {
|
||||
return {
|
||||
doubleValue: val,
|
||||
};
|
||||
}
|
||||
if (is.date(val)) {
|
||||
const timestamp = timestamp_1.Timestamp.fromDate(val);
|
||||
return {
|
||||
timestampValue: {
|
||||
seconds: timestamp.seconds,
|
||||
nanos: timestamp.nanoseconds,
|
||||
},
|
||||
};
|
||||
}
|
||||
if (val === null) {
|
||||
return {
|
||||
nullValue: 'NULL_VALUE',
|
||||
};
|
||||
}
|
||||
if (val instanceof Buffer || val instanceof Uint8Array) {
|
||||
return {
|
||||
bytesValue: val,
|
||||
};
|
||||
}
|
||||
if (typeof val === 'object' && 'toProto' in val &&
|
||||
typeof val.toProto === 'function') {
|
||||
return val.toProto();
|
||||
}
|
||||
if (val instanceof Array) {
|
||||
const array = {
|
||||
arrayValue: {},
|
||||
};
|
||||
if (val.length > 0) {
|
||||
array.arrayValue.values = [];
|
||||
for (let i = 0; i < val.length; ++i) {
|
||||
const enc = this.encodeValue(val[i]);
|
||||
if (enc) {
|
||||
array.arrayValue.values.push(enc);
|
||||
}
|
||||
}
|
||||
}
|
||||
return array;
|
||||
}
|
||||
if (typeof val === 'object' && isPlainObject(val)) {
|
||||
const map = {
|
||||
mapValue: {},
|
||||
};
|
||||
// If we encounter an empty object, we always need to send it to make sure
|
||||
// the server creates a map entry.
|
||||
if (!is.empty(val)) {
|
||||
map.mapValue.fields = this.encodeFields(val);
|
||||
if (is.empty(map.mapValue.fields)) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
return map;
|
||||
}
|
||||
throw validate_1.customObjectError(val);
|
||||
}
|
||||
/**
|
||||
* Decodes a single Firestore 'Value' Protobuf.
|
||||
*
|
||||
* @private
|
||||
* @param proto A Firestore 'Value' Protobuf.
|
||||
* @returns The converted JS type.
|
||||
*/
|
||||
decodeValue(proto) {
|
||||
const valueType = convert_1.detectValueType(proto);
|
||||
switch (valueType) {
|
||||
case 'stringValue': {
|
||||
return proto.stringValue;
|
||||
}
|
||||
case 'booleanValue': {
|
||||
return proto.booleanValue;
|
||||
}
|
||||
case 'integerValue': {
|
||||
return Number(proto.integerValue);
|
||||
}
|
||||
case 'doubleValue': {
|
||||
return Number(proto.doubleValue);
|
||||
}
|
||||
case 'timestampValue': {
|
||||
const timestamp = timestamp_1.Timestamp.fromProto(proto.timestampValue);
|
||||
return this.timestampsInSnapshots ? timestamp : timestamp.toDate();
|
||||
}
|
||||
case 'referenceValue': {
|
||||
const resourcePath = path_1.ResourcePath.fromSlashSeparatedString(proto.referenceValue);
|
||||
return this.createReference(resourcePath.relativeName);
|
||||
}
|
||||
case 'arrayValue': {
|
||||
const array = [];
|
||||
if (is.array(proto.arrayValue.values)) {
|
||||
for (const value of proto.arrayValue.values) {
|
||||
array.push(this.decodeValue(value));
|
||||
}
|
||||
}
|
||||
return array;
|
||||
}
|
||||
case 'nullValue': {
|
||||
return null;
|
||||
}
|
||||
case 'mapValue': {
|
||||
const obj = {};
|
||||
const fields = proto.mapValue.fields;
|
||||
for (const prop in fields) {
|
||||
if (fields.hasOwnProperty(prop)) {
|
||||
obj[prop] = this.decodeValue(fields[prop]);
|
||||
}
|
||||
}
|
||||
return obj;
|
||||
}
|
||||
case 'geoPointValue': {
|
||||
return geo_point_1.GeoPoint.fromProto(proto.geoPointValue);
|
||||
}
|
||||
case 'bytesValue': {
|
||||
return proto.bytesValue;
|
||||
}
|
||||
default: {
|
||||
throw new Error('Cannot decode type from Firestore Value: ' +
|
||||
JSON.stringify(proto));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
exports.Serializer = Serializer;
|
||||
/**
|
||||
* Verifies that 'obj' is a plain JavaScript object that can be encoded as a
|
||||
* 'Map' in Firestore.
|
||||
*
|
||||
* @private
|
||||
* @param input The argument to verify.
|
||||
* @returns 'true' if the input can be a treated as a plain object.
|
||||
*/
|
||||
function isPlainObject(input) {
|
||||
return (typeof input === 'object' && input !== null &&
|
||||
(Object.getPrototypeOf(input) === Object.prototype ||
|
||||
Object.getPrototypeOf(input) === null));
|
||||
}
|
||||
exports.isPlainObject = isPlainObject;
|
||||
//# sourceMappingURL=serializer.js.map
|
||||
222
express-server/node_modules/@google-cloud/firestore/build/src/timestamp.js
generated
vendored
Normal file
222
express-server/node_modules/@google-cloud/firestore/build/src/timestamp.js
generated
vendored
Normal file
@@ -0,0 +1,222 @@
|
||||
"use strict";
|
||||
/*!
|
||||
* Copyright 2018 Google Inc. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const validate_1 = require("./validate");
|
||||
const validate = validate_1.createValidator();
|
||||
/*!
|
||||
* Number of nanoseconds in a millisecond.
|
||||
*
|
||||
* @type {number}
|
||||
*/
|
||||
const MS_TO_NANOS = 1000000;
|
||||
/**
|
||||
* A Timestamp represents a point in time independent of any time zone or
|
||||
* calendar, represented as seconds and fractions of seconds at nanosecond
|
||||
* resolution in UTC Epoch time. It is encoded using the Proleptic Gregorian
|
||||
* Calendar which extends the Gregorian calendar backwards to year one. It is
|
||||
* encoded assuming all minutes are 60 seconds long, i.e. leap seconds are
|
||||
* "smeared" so that no leap second table is needed for interpretation. Range is
|
||||
* from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z.
|
||||
*
|
||||
* @see https://github.com/google/protobuf/blob/master/src/google/protobuf/timestamp.proto
|
||||
*/
|
||||
class Timestamp {
|
||||
/**
|
||||
* Creates a new timestamp with the current date, with millisecond precision.
|
||||
*
|
||||
* @example
|
||||
* let documentRef = firestore.doc('col/doc');
|
||||
*
|
||||
* documentRef.set({ updateTime:Firestore.Timestamp.now() });
|
||||
*
|
||||
* @return {Timestamp} A new `Timestamp` representing the current date.
|
||||
*/
|
||||
static now() {
|
||||
return Timestamp.fromMillis(Date.now());
|
||||
}
|
||||
/**
|
||||
* Creates a new timestamp from the given date.
|
||||
*
|
||||
* @example
|
||||
* let documentRef = firestore.doc('col/doc');
|
||||
*
|
||||
* let date = Date.parse('01 Jan 2000 00:00:00 GMT');
|
||||
* documentRef.set({ startTime:Firestore.Timestamp.fromDate(date) });
|
||||
*
|
||||
* @param {Date} date The date to initialize the `Timestamp` from.
|
||||
* @return {Timestamp} A new `Timestamp` representing the same point in time
|
||||
* as the given date.
|
||||
*/
|
||||
static fromDate(date) {
|
||||
return Timestamp.fromMillis(date.getTime());
|
||||
}
|
||||
/**
|
||||
* Creates a new timestamp from the given number of milliseconds.
|
||||
*
|
||||
* @example
|
||||
* let documentRef = firestore.doc('col/doc');
|
||||
*
|
||||
* documentRef.set({ startTime:Firestore.Timestamp.fromMillis(42) });
|
||||
*
|
||||
* @param {number} milliseconds Number of milliseconds since Unix epoch
|
||||
* 1970-01-01T00:00:00Z.
|
||||
* @return {Timestamp} A new `Timestamp` representing the same point in time
|
||||
* as the given number of milliseconds.
|
||||
*/
|
||||
static fromMillis(milliseconds) {
|
||||
const seconds = Math.floor(milliseconds / 1000);
|
||||
const nanos = (milliseconds - seconds * 1000) * MS_TO_NANOS;
|
||||
return new Timestamp(seconds, nanos);
|
||||
}
|
||||
/**
|
||||
* Generates a `Timestamp` object from a Timestamp proto.
|
||||
*
|
||||
* @private
|
||||
* @param {Object} timestamp The `Timestamp` Protobuf object.
|
||||
*/
|
||||
static fromProto(timestamp) {
|
||||
return new Timestamp(Number(timestamp.seconds || 0), Number(timestamp.nanos || 0));
|
||||
}
|
||||
/**
|
||||
* Creates a new timestamp.
|
||||
*
|
||||
* @example
|
||||
* let documentRef = firestore.doc('col/doc');
|
||||
*
|
||||
* documentRef.set({ startTime:new Firestore.Timestamp(42, 0) });
|
||||
*
|
||||
* @param {number} seconds The number of seconds of UTC time since Unix epoch
|
||||
* 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to
|
||||
* 9999-12-31T23:59:59Z inclusive.
|
||||
* @param {number} nanoseconds The non-negative fractions of a second at
|
||||
* nanosecond resolution. Negative second values with fractions must still
|
||||
* have non-negative nanoseconds values that count forward in time. Must be
|
||||
* from 0 to 999,999,999 inclusive.
|
||||
*/
|
||||
constructor(seconds, nanoseconds) {
|
||||
validate.isInteger('seconds', seconds);
|
||||
validate.isInteger('nanoseconds', nanoseconds, 0, 999999999);
|
||||
this._seconds = seconds;
|
||||
this._nanoseconds = nanoseconds;
|
||||
}
|
||||
/**
|
||||
* The number of seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z.
|
||||
*
|
||||
* @example
|
||||
* let documentRef = firestore.doc('col/doc');
|
||||
*
|
||||
* documentRef.get().then(snap => {
|
||||
* let updatedAt = snap.updateTime;
|
||||
* console.log(`Updated at ${updated.seconds}s ${updated.nanoseconds}ns`);
|
||||
* });
|
||||
*
|
||||
* @type {number}
|
||||
*/
|
||||
get seconds() {
|
||||
return this._seconds;
|
||||
}
|
||||
/**
|
||||
* The non-negative fractions of a second at nanosecond resolution.
|
||||
*
|
||||
* @example
|
||||
* let documentRef = firestore.doc('col/doc');
|
||||
*
|
||||
* documentRef.get().then(snap => {
|
||||
* let updated = snap.updateTime;
|
||||
* console.log(`Updated at ${updated.seconds}s ${updated.nanoseconds}ns`);
|
||||
* });
|
||||
*
|
||||
* @type {number}
|
||||
*/
|
||||
get nanoseconds() {
|
||||
return this._nanoseconds;
|
||||
}
|
||||
/**
|
||||
* Returns a new `Date` corresponding to this timestamp. This may lose
|
||||
* precision.
|
||||
*
|
||||
* @example
|
||||
* let documentRef = firestore.doc('col/doc');
|
||||
*
|
||||
* documentRef.get().then(snap => {
|
||||
* console.log(`Document updated at: ${snap.updateTime.toDate()}`);
|
||||
* });
|
||||
*
|
||||
* @return {Date} JavaScript `Date` object representing the same point in time
|
||||
* as this `Timestamp`, with millisecond precision.
|
||||
*/
|
||||
toDate() {
|
||||
return new Date(this._seconds * 1000 + Math.round(this._nanoseconds / MS_TO_NANOS));
|
||||
}
|
||||
/**
|
||||
* Returns the number of milliseconds since Unix epoch 1970-01-01T00:00:00Z.
|
||||
*
|
||||
* @example
|
||||
* let documentRef = firestore.doc('col/doc');
|
||||
*
|
||||
* documentRef.get().then(snap => {
|
||||
* let startTime = snap.get('startTime');
|
||||
* let endTime = snap.get('endTime');
|
||||
* console.log(`Duration: ${endTime - startTime}`);
|
||||
* });
|
||||
*
|
||||
* @return {number} The point in time corresponding to this timestamp,
|
||||
* represented as the number of milliseconds since Unix epoch
|
||||
* 1970-01-01T00:00:00Z.
|
||||
*/
|
||||
toMillis() {
|
||||
return this._seconds * 1000 + Math.floor(this._nanoseconds / MS_TO_NANOS);
|
||||
}
|
||||
/**
|
||||
* Returns 'true' if this `Timestamp` is equal to the provided one.
|
||||
*
|
||||
* @example
|
||||
* let documentRef = firestore.doc('col/doc');
|
||||
*
|
||||
* documentRef.get().then(snap => {
|
||||
* if (snap.createTime.isEqual(snap.updateTime)) {
|
||||
* console.log('Document is in its initial state.');
|
||||
* }
|
||||
* });
|
||||
*
|
||||
* @param {any} other The `Timestamp` to compare against.
|
||||
* @return {boolean} 'true' if this `Timestamp` is equal to the provided one.
|
||||
*/
|
||||
isEqual(other) {
|
||||
return (this === other ||
|
||||
(other instanceof Timestamp && this._seconds === other.seconds &&
|
||||
this._nanoseconds === other.nanoseconds));
|
||||
}
|
||||
/**
|
||||
* Generates the Protobuf `Timestamp` object for this timestamp.
|
||||
*
|
||||
* @private
|
||||
* @returns {Object} The `Timestamp` Protobuf object.
|
||||
*/
|
||||
toProto() {
|
||||
const timestamp = {};
|
||||
if (this.seconds) {
|
||||
timestamp.seconds = this.seconds;
|
||||
}
|
||||
if (this.nanoseconds) {
|
||||
timestamp.nanos = this.nanoseconds;
|
||||
}
|
||||
return { timestampValue: timestamp };
|
||||
}
|
||||
}
|
||||
exports.Timestamp = Timestamp;
|
||||
//# sourceMappingURL=timestamp.js.map
|
||||
308
express-server/node_modules/@google-cloud/firestore/build/src/transaction.js
generated
vendored
Normal file
308
express-server/node_modules/@google-cloud/firestore/build/src/transaction.js
generated
vendored
Normal file
@@ -0,0 +1,308 @@
|
||||
"use strict";
|
||||
/*!
|
||||
* Copyright 2017 Google Inc. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const reference_1 = require("./reference");
|
||||
const util_1 = require("./util");
|
||||
const util_2 = require("./util");
|
||||
/*!
|
||||
* Error message for transactional reads that were executed after performing
|
||||
* writes.
|
||||
*/
|
||||
const READ_AFTER_WRITE_ERROR_MSG = 'Firestore transactions require all reads to be executed before all writes.';
|
||||
/*!
|
||||
* Transactions can be retried if the initial stream opening errors out.
|
||||
*/
|
||||
const ALLOW_RETRIES = true;
|
||||
/**
|
||||
* A reference to a transaction.
|
||||
*
|
||||
* The Transaction object passed to a transaction's updateFunction provides
|
||||
* the methods to read and write data within the transaction context. See
|
||||
* [runTransaction()]{@link Firestore#runTransaction}.
|
||||
*
|
||||
* @class
|
||||
*/
|
||||
class Transaction {
|
||||
/**
|
||||
* @private
|
||||
* @hideconstructor
|
||||
*
|
||||
* @param firestore The Firestore Database client.
|
||||
* @param previousTransaction If available, the failed transaction that is
|
||||
* being retried.
|
||||
*/
|
||||
constructor(firestore, previousTransaction) {
|
||||
this._firestore = firestore;
|
||||
this._validator = firestore._validator;
|
||||
this._previousTransaction = previousTransaction;
|
||||
this._writeBatch = firestore.batch();
|
||||
this._requestTag =
|
||||
previousTransaction ? previousTransaction.requestTag : util_2.requestTag();
|
||||
}
|
||||
/**
|
||||
* Retrieve a document or a query result from the database. Holds a
|
||||
* pessimistic lock on all returned documents.
|
||||
*
|
||||
* @param {DocumentReference|Query} refOrQuery The document or query to
|
||||
* return.
|
||||
* @returns {Promise} A Promise that resolves with a DocumentSnapshot or
|
||||
* QuerySnapshot for the returned documents.
|
||||
*
|
||||
* @example
|
||||
* firestore.runTransaction(transaction => {
|
||||
* let documentRef = firestore.doc('col/doc');
|
||||
* return transaction.get(documentRef).then(doc => {
|
||||
* if (doc.exists) {
|
||||
* transaction.update(documentRef, { count: doc.get('count') + 1 });
|
||||
* } else {
|
||||
* transaction.create(documentRef, { count: 1 });
|
||||
* }
|
||||
* });
|
||||
* });
|
||||
*/
|
||||
get(refOrQuery) {
|
||||
if (!this._writeBatch.isEmpty) {
|
||||
throw new Error(READ_AFTER_WRITE_ERROR_MSG);
|
||||
}
|
||||
if (refOrQuery instanceof reference_1.DocumentReference) {
|
||||
return this._firestore
|
||||
.getAll_([refOrQuery], /* fieldMask= */ null, this._requestTag, this._transactionId)
|
||||
.then(res => {
|
||||
return Promise.resolve(res[0]);
|
||||
});
|
||||
}
|
||||
if (refOrQuery instanceof reference_1.Query) {
|
||||
return refOrQuery._get(this._transactionId);
|
||||
}
|
||||
throw new Error('Argument "refOrQuery" must be a DocumentRef or a Query.');
|
||||
}
|
||||
/**
|
||||
* Retrieves multiple documents from Firestore. Holds a pessimistic lock on
|
||||
* all returned documents.
|
||||
*
|
||||
* @param {DocumentReference} documentRef A `DocumentReference` to receive.
|
||||
* @param {Array.<DocumentReference|ReadOptions>} moreDocumentRefsOrReadOptions
|
||||
* Additional `DocumentReferences` to receive, followed by an optional field
|
||||
* mask.
|
||||
* @returns {Promise<Array.<DocumentSnapshot>>} A Promise that
|
||||
* contains an array with the resulting document snapshots.
|
||||
*
|
||||
* @example
|
||||
* let firstDoc = firestore.doc('col/doc1');
|
||||
* let secondDoc = firestore.doc('col/doc2');
|
||||
* let resultDoc = firestore.doc('col/doc3');
|
||||
*
|
||||
* firestore.runTransaction(transaction => {
|
||||
* return transaction.getAll(firstDoc, secondDoc).then(docs => {
|
||||
* transaction.set(resultDoc, {
|
||||
* sum: docs[1].get('count') + docs[2].get('count')
|
||||
* });
|
||||
* });
|
||||
* });
|
||||
*/
|
||||
getAll(documentRef, ...moreDocumentRefsOrReadOptions) {
|
||||
if (!this._writeBatch.isEmpty) {
|
||||
throw new Error(READ_AFTER_WRITE_ERROR_MSG);
|
||||
}
|
||||
this._validator.minNumberOfArguments('Transaction.getAll', arguments, 1);
|
||||
const { documents, fieldMask } = util_1.parseGetAllArguments(this._validator, [documentRef, ...moreDocumentRefsOrReadOptions]);
|
||||
return this._firestore.getAll_(documents, fieldMask, this._requestTag, this._transactionId);
|
||||
}
|
||||
/**
|
||||
* Create the document referred to by the provided
|
||||
* [DocumentReference]{@link DocumentReference}. The operation will
|
||||
* fail the transaction if a document exists at the specified location.
|
||||
*
|
||||
* @param {DocumentReference} documentRef A reference to the document to be
|
||||
* created.
|
||||
* @param {DocumentData} data The object data to serialize as the document.
|
||||
* @returns {Transaction} This Transaction instance. Used for
|
||||
* chaining method calls.
|
||||
*
|
||||
* @example
|
||||
* firestore.runTransaction(transaction => {
|
||||
* let documentRef = firestore.doc('col/doc');
|
||||
* return transaction.get(documentRef).then(doc => {
|
||||
* if (!doc.exists) {
|
||||
* transaction.create(documentRef, { foo: 'bar' });
|
||||
* }
|
||||
* });
|
||||
* });
|
||||
*/
|
||||
create(documentRef, data) {
|
||||
this._writeBatch.create(documentRef, data);
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* Writes to the document referred to by the provided
|
||||
* [DocumentReference]{@link DocumentReference}. If the document
|
||||
* does not exist yet, it will be created. If you pass
|
||||
* [SetOptions]{@link SetOptions}, the provided data can be merged into the
|
||||
* existing document.
|
||||
*
|
||||
* @param {DocumentReference} documentRef A reference to the document to be
|
||||
* set.
|
||||
* @param {DocumentData} data The object to serialize as the document.
|
||||
* @param {SetOptions=} options An object to configure the set behavior.
|
||||
* @param {boolean=} options.merge - If true, set() merges the values
|
||||
* specified in its data argument. Fields omitted from this set() call
|
||||
* remain untouched.
|
||||
* @param {Array.<string|FieldPath>=} options.mergeFields - If provided,
|
||||
* set() only replaces the specified field paths. Any field path that is not
|
||||
* specified is ignored and remains untouched.
|
||||
* @returns {Transaction} This Transaction instance. Used for
|
||||
* chaining method calls.
|
||||
*
|
||||
* @example
|
||||
* firestore.runTransaction(transaction => {
|
||||
* let documentRef = firestore.doc('col/doc');
|
||||
* transaction.set(documentRef, { foo: 'bar' });
|
||||
* return Promise.resolve();
|
||||
* });
|
||||
*/
|
||||
set(documentRef, data, options) {
|
||||
this._writeBatch.set(documentRef, data, options);
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* Updates fields in the document referred to by the provided
|
||||
* [DocumentReference]{@link DocumentReference}. The update will
|
||||
* fail if applied to a document that does not exist.
|
||||
*
|
||||
* The update() method accepts either an object with field paths encoded as
|
||||
* keys and field values encoded as values, or a variable number of arguments
|
||||
* that alternate between field paths and field values. Nested fields can be
|
||||
* updated by providing dot-separated field path strings or by providing
|
||||
* FieldPath objects.
|
||||
*
|
||||
* A Precondition restricting this update can be specified as the last
|
||||
* argument.
|
||||
*
|
||||
* @param {DocumentReference} documentRef A reference to the document to be
|
||||
* updated.
|
||||
* @param {UpdateData|string|FieldPath} dataOrField An object
|
||||
* containing the fields and values with which to update the document
|
||||
* or the path of the first field to update.
|
||||
* @param {
|
||||
* ...(Precondition|*|string|FieldPath)} preconditionOrValues -
|
||||
* An alternating list of field paths and values to update or a Precondition
|
||||
* to to enforce on this update.
|
||||
* @returns {Transaction} This Transaction instance. Used for
|
||||
* chaining method calls.
|
||||
*
|
||||
* @example
|
||||
* firestore.runTransaction(transaction => {
|
||||
* let documentRef = firestore.doc('col/doc');
|
||||
* return transaction.get(documentRef).then(doc => {
|
||||
* if (doc.exists) {
|
||||
* transaction.update(documentRef, { count: doc.get('count') + 1 });
|
||||
* } else {
|
||||
* transaction.create(documentRef, { count: 1 });
|
||||
* }
|
||||
* });
|
||||
* });
|
||||
*/
|
||||
update(documentRef, dataOrField, ...preconditionOrValues) {
|
||||
this._validator.minNumberOfArguments('update', arguments, 2);
|
||||
preconditionOrValues = Array.prototype.slice.call(arguments, 2);
|
||||
this._writeBatch.update.apply(this._writeBatch, [documentRef, dataOrField].concat(preconditionOrValues));
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* Deletes the document referred to by the provided [DocumentReference]
|
||||
* {@link DocumentReference}.
|
||||
*
|
||||
* @param {DocumentReference} documentRef A reference to the document to be
|
||||
* deleted.
|
||||
* @param {Precondition=} precondition A precondition to enforce for this
|
||||
* delete.
|
||||
* @param {Timestamp=} precondition.lastUpdateTime If set, enforces that the
|
||||
* document was last updated at lastUpdateTime. Fails the transaction if the
|
||||
* document doesn't exist or was last updated at a different time.
|
||||
* @returns {Transaction} This Transaction instance. Used for
|
||||
* chaining method calls.
|
||||
*
|
||||
* @example
|
||||
* firestore.runTransaction(transaction => {
|
||||
* let documentRef = firestore.doc('col/doc');
|
||||
* transaction.delete(documentRef);
|
||||
* return Promise.resolve();
|
||||
* });
|
||||
*/
|
||||
delete(documentRef, precondition) {
|
||||
this._writeBatch.delete(documentRef, precondition);
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* Starts a transaction and obtains the transaction id from the server.
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
begin() {
|
||||
const request = {
|
||||
database: this._firestore.formattedName,
|
||||
};
|
||||
if (this._previousTransaction) {
|
||||
// tslint:disable-next-line no-any
|
||||
request.options = {
|
||||
readWrite: {
|
||||
retryTransaction: this._previousTransaction._transactionId,
|
||||
},
|
||||
};
|
||||
}
|
||||
return this._firestore
|
||||
.request('beginTransaction', request, this._requestTag, ALLOW_RETRIES)
|
||||
.then(resp => {
|
||||
this._transactionId = resp.transaction;
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Commits all queued-up changes in this transaction and releases all locks.
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
commit() {
|
||||
return this._writeBatch
|
||||
.commit_({
|
||||
transactionId: this._transactionId,
|
||||
requestTag: this._requestTag,
|
||||
})
|
||||
.then(() => { });
|
||||
}
|
||||
/**
|
||||
* Releases all locks and rolls back this transaction.
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
rollback() {
|
||||
const request = {
|
||||
database: this._firestore.formattedName,
|
||||
transaction: this._transactionId,
|
||||
};
|
||||
return this._firestore.request('rollback', request, this._requestTag, /* allowRetries= */ false);
|
||||
}
|
||||
/**
|
||||
* Returns the tag to use with with all request for this Transaction.
|
||||
* @private
|
||||
* @return A unique client-generated identifier for this transaction.
|
||||
*/
|
||||
get requestTag() {
|
||||
return this._requestTag;
|
||||
}
|
||||
}
|
||||
exports.Transaction = Transaction;
|
||||
//# sourceMappingURL=transaction.js.map
|
||||
21
express-server/node_modules/@google-cloud/firestore/build/src/types.js
generated
vendored
Normal file
21
express-server/node_modules/@google-cloud/firestore/build/src/types.js
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
"use strict";
|
||||
/*!
|
||||
* Copyright 2018 Google Inc. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
class GrpcError extends Error {
|
||||
}
|
||||
exports.GrpcError = GrpcError;
|
||||
//# sourceMappingURL=types.js.map
|
||||
91
express-server/node_modules/@google-cloud/firestore/build/src/util.js
generated
vendored
Normal file
91
express-server/node_modules/@google-cloud/firestore/build/src/util.js
generated
vendored
Normal file
@@ -0,0 +1,91 @@
|
||||
"use strict";
|
||||
/*!
|
||||
* Copyright 2018 Google Inc. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const path_1 = require("./path");
|
||||
const serializer_1 = require("./serializer");
|
||||
/**
|
||||
* Generate a unique client-side identifier.
|
||||
*
|
||||
* Used for the creation of new documents.
|
||||
*
|
||||
* @private
|
||||
* @returns {string} A unique 20-character wide identifier.
|
||||
*/
|
||||
function autoId() {
|
||||
const chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789';
|
||||
let autoId = '';
|
||||
for (let i = 0; i < 20; i++) {
|
||||
autoId += chars.charAt(Math.floor(Math.random() * chars.length));
|
||||
}
|
||||
return autoId;
|
||||
}
|
||||
exports.autoId = autoId;
|
||||
/**
|
||||
* Generate a short and semi-random client-side identifier.
|
||||
*
|
||||
* Used for the creation of request tags.
|
||||
*
|
||||
* @private
|
||||
* @returns {string} A random 5-character wide identifier.
|
||||
*/
|
||||
function requestTag() {
|
||||
return autoId().substr(0, 5);
|
||||
}
|
||||
exports.requestTag = requestTag;
|
||||
/**
|
||||
* Parses the arguments for the `getAll()` call supported by both the Firestore
|
||||
* and Transaction class.
|
||||
*
|
||||
* @private
|
||||
* @param validator The argument validator to use.
|
||||
* @param documentRefsOrReadOptions An array of document references followed by
|
||||
* an optional ReadOptions object.
|
||||
*/
|
||||
function parseGetAllArguments(validator, documentRefsOrReadOptions) {
|
||||
let documents;
|
||||
let readOptions = undefined;
|
||||
// In the original release of the SDK, getAll() was documented to accept
|
||||
// either a varargs list of DocumentReferences or a single array of
|
||||
// DocumentReferences. To support this usage in the TypeScript client, we have
|
||||
// to manually verify the arguments to determine which input the user
|
||||
// provided.
|
||||
const usesDeprecatedArgumentStyle = Array.isArray(documentRefsOrReadOptions[0]);
|
||||
if (usesDeprecatedArgumentStyle) {
|
||||
documents = documentRefsOrReadOptions[0];
|
||||
readOptions = documentRefsOrReadOptions[1];
|
||||
}
|
||||
else {
|
||||
if (documentRefsOrReadOptions.length > 0 &&
|
||||
serializer_1.isPlainObject(documentRefsOrReadOptions[documentRefsOrReadOptions.length - 1])) {
|
||||
readOptions = documentRefsOrReadOptions.pop();
|
||||
documents = documentRefsOrReadOptions;
|
||||
}
|
||||
else {
|
||||
documents = documentRefsOrReadOptions;
|
||||
}
|
||||
}
|
||||
for (let i = 0; i < documents.length; ++i) {
|
||||
validator.isDocumentReference(i, documents[i]);
|
||||
}
|
||||
validator.isOptionalReadOptions('options', readOptions);
|
||||
const fieldMask = readOptions && readOptions.fieldMask ?
|
||||
readOptions.fieldMask.map(fieldPath => path_1.FieldPath.fromArgument(fieldPath)) :
|
||||
null;
|
||||
return { fieldMask, documents };
|
||||
}
|
||||
exports.parseGetAllArguments = parseGetAllArguments;
|
||||
//# sourceMappingURL=util.js.map
|
||||
108
express-server/node_modules/@google-cloud/firestore/build/src/v1beta1/doc/google/firestore/v1beta1/doc_common.js
generated
vendored
Normal file
108
express-server/node_modules/@google-cloud/firestore/build/src/v1beta1/doc/google/firestore/v1beta1/doc_common.js
generated
vendored
Normal file
@@ -0,0 +1,108 @@
|
||||
"use strict";
|
||||
// Copyright 2018 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// https://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
// Note: this file is purely for documentation. Any contents are not expected
|
||||
// to be loaded as the JS file.
|
||||
/**
|
||||
* A set of field paths on a document.
|
||||
* Used to restrict a get or update operation on a document to a subset of its
|
||||
* fields.
|
||||
* This is different from standard field masks, as this is always scoped to a
|
||||
* Document, and takes in account the dynamic nature of Value.
|
||||
*
|
||||
* @property {string[]} fieldPaths
|
||||
* The list of field paths in the mask. See Document.fields for a field
|
||||
* path syntax reference.
|
||||
*
|
||||
* @typedef DocumentMask
|
||||
* @memberof google.firestore.v1beta1
|
||||
* @see [google.firestore.v1beta1.DocumentMask definition in proto format]{@link https://github.com/googleapis/googleapis/blob/master/google/firestore/v1beta1/common.proto}
|
||||
*/
|
||||
const DocumentMask = {
|
||||
// This is for documentation. Actual contents will be loaded by gRPC.
|
||||
};
|
||||
/**
|
||||
* A precondition on a document, used for conditional operations.
|
||||
*
|
||||
* @property {boolean} exists
|
||||
* When set to `true`, the target document must exist.
|
||||
* When set to `false`, the target document must not exist.
|
||||
*
|
||||
* @property {Object} updateTime
|
||||
* When set, the target document must exist and have been last updated at
|
||||
* that time.
|
||||
*
|
||||
* This object should have the same structure as [Timestamp]{@link
|
||||
* google.protobuf.Timestamp}
|
||||
*
|
||||
* @typedef Precondition
|
||||
* @memberof google.firestore.v1beta1
|
||||
* @see [google.firestore.v1beta1.Precondition definition in proto format]{@link https://github.com/googleapis/googleapis/blob/master/google/firestore/v1beta1/common.proto}
|
||||
*/
|
||||
const Precondition = {
|
||||
// This is for documentation. Actual contents will be loaded by gRPC.
|
||||
};
|
||||
/**
|
||||
* Options for creating a new transaction.
|
||||
*
|
||||
* @property {Object} readOnly
|
||||
* The transaction can only be used for read operations.
|
||||
*
|
||||
* This object should have the same structure as [ReadOnly]{@link
|
||||
* google.firestore.v1beta1.ReadOnly}
|
||||
*
|
||||
* @property {Object} readWrite
|
||||
* The transaction can be used for both read and write operations.
|
||||
*
|
||||
* This object should have the same structure as [ReadWrite]{@link
|
||||
* google.firestore.v1beta1.ReadWrite}
|
||||
*
|
||||
* @typedef TransactionOptions
|
||||
* @memberof google.firestore.v1beta1
|
||||
* @see [google.firestore.v1beta1.TransactionOptions definition in proto format]{@link https://github.com/googleapis/googleapis/blob/master/google/firestore/v1beta1/common.proto}
|
||||
*/
|
||||
const TransactionOptions = {
|
||||
// This is for documentation. Actual contents will be loaded by gRPC.
|
||||
/**
|
||||
* Options for a transaction that can be used to read and write documents.
|
||||
*
|
||||
* @property {string} retryTransaction
|
||||
* An optional transaction to retry.
|
||||
*
|
||||
* @typedef ReadWrite
|
||||
* @memberof google.firestore.v1beta1
|
||||
* @see [google.firestore.v1beta1.TransactionOptions.ReadWrite definition in proto format]{@link https://github.com/googleapis/googleapis/blob/master/google/firestore/v1beta1/common.proto}
|
||||
*/
|
||||
ReadWrite: {
|
||||
// This is for documentation. Actual contents will be loaded by gRPC.
|
||||
},
|
||||
/**
|
||||
* Options for a transaction that can only be used to read documents.
|
||||
*
|
||||
* @property {Object} readTime
|
||||
* Reads documents at the given time.
|
||||
* This may not be older than 60 seconds.
|
||||
*
|
||||
* This object should have the same structure as [Timestamp]{@link
|
||||
* google.protobuf.Timestamp}
|
||||
*
|
||||
* @typedef ReadOnly
|
||||
* @memberof google.firestore.v1beta1
|
||||
* @see [google.firestore.v1beta1.TransactionOptions.ReadOnly definition in proto format]{@link https://github.com/googleapis/googleapis/blob/master/google/firestore/v1beta1/common.proto}
|
||||
*/
|
||||
ReadOnly: {
|
||||
// This is for documentation. Actual contents will be loaded by gRPC.
|
||||
}
|
||||
};
|
||||
//# sourceMappingURL=doc_common.js.map
|
||||
185
express-server/node_modules/@google-cloud/firestore/build/src/v1beta1/doc/google/firestore/v1beta1/doc_document.js
generated
vendored
Normal file
185
express-server/node_modules/@google-cloud/firestore/build/src/v1beta1/doc/google/firestore/v1beta1/doc_document.js
generated
vendored
Normal file
@@ -0,0 +1,185 @@
|
||||
"use strict";
|
||||
// Copyright 2018 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// https://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
// Note: this file is purely for documentation. Any contents are not expected
|
||||
// to be loaded as the JS file.
|
||||
/**
|
||||
* A Firestore document.
|
||||
*
|
||||
* Must not exceed 1 MiB - 4 bytes.
|
||||
*
|
||||
* @property {string} name
|
||||
* The resource name of the document, for example
|
||||
* `projects/{project_id}/databases/{database_id}/documents/{document_path}`.
|
||||
*
|
||||
* @property {Object.<string, Object>} fields
|
||||
* The document's fields.
|
||||
*
|
||||
* The map keys represent field names.
|
||||
*
|
||||
* A simple field name contains only characters `a` to `z`, `A` to `Z`,
|
||||
* `0` to `9`, or `_`, and must not start with `0` to `9`. For example,
|
||||
* `foo_bar_17`.
|
||||
*
|
||||
* Field names matching the regular expression `__.*__` are reserved. Reserved
|
||||
* field names are forbidden except in certain documented contexts. The map
|
||||
* keys, represented as UTF-8, must not exceed 1,500 bytes and cannot be
|
||||
* empty.
|
||||
*
|
||||
* Field paths may be used in other contexts to refer to structured fields
|
||||
* defined here. For `map_value`, the field path is represented by the simple
|
||||
* or quoted field names of the containing fields, delimited by `.`. For
|
||||
* example, the structured field
|
||||
* `"foo" : { map_value: { "x&y" : { string_value: "hello" }}}` would be
|
||||
* represented by the field path `foo.x&y`.
|
||||
*
|
||||
* Within a field path, a quoted field name starts and ends with `` ` `` and
|
||||
* may contain any character. Some characters, including `` ` ``, must be
|
||||
* escaped using a `\`. For example, `` `x&y` `` represents `x&y` and
|
||||
* `` `bak\`tik` `` represents `` bak`tik ``.
|
||||
*
|
||||
* @property {Object} createTime
|
||||
* Output only. The time at which the document was created.
|
||||
*
|
||||
* This value increases monotonically when a document is deleted then
|
||||
* recreated. It can also be compared to values from other documents and
|
||||
* the `read_time` of a query.
|
||||
*
|
||||
* This object should have the same structure as [Timestamp]{@link
|
||||
* google.protobuf.Timestamp}
|
||||
*
|
||||
* @property {Object} updateTime
|
||||
* Output only. The time at which the document was last changed.
|
||||
*
|
||||
* This value is initially set to the `create_time` then increases
|
||||
* monotonically with each change to the document. It can also be
|
||||
* compared to values from other documents and the `read_time` of a query.
|
||||
*
|
||||
* This object should have the same structure as [Timestamp]{@link
|
||||
* google.protobuf.Timestamp}
|
||||
*
|
||||
* @typedef Document
|
||||
* @memberof google.firestore.v1beta1
|
||||
* @see [google.firestore.v1beta1.Document definition in proto format]{@link https://github.com/googleapis/googleapis/blob/master/google/firestore/v1beta1/document.proto}
|
||||
*/
|
||||
const Document = {
|
||||
// This is for documentation. Actual contents will be loaded by gRPC.
|
||||
};
|
||||
/**
|
||||
* A message that can hold any of the supported value types.
|
||||
*
|
||||
* @property {number} nullValue
|
||||
* A null value.
|
||||
*
|
||||
* The number should be among the values of [NullValue]{@link
|
||||
* google.protobuf.NullValue}
|
||||
*
|
||||
* @property {boolean} booleanValue
|
||||
* A boolean value.
|
||||
*
|
||||
* @property {number} integerValue
|
||||
* An integer value.
|
||||
*
|
||||
* @property {number} doubleValue
|
||||
* A double value.
|
||||
*
|
||||
* @property {Object} timestampValue
|
||||
* A timestamp value.
|
||||
*
|
||||
* Precise only to microseconds. When stored, any additional precision is
|
||||
* rounded down.
|
||||
*
|
||||
* This object should have the same structure as [Timestamp]{@link
|
||||
* google.protobuf.Timestamp}
|
||||
*
|
||||
* @property {string} stringValue
|
||||
* A string value.
|
||||
*
|
||||
* The string, represented as UTF-8, must not exceed 1 MiB - 89 bytes.
|
||||
* Only the first 1,500 bytes of the UTF-8 representation are considered by
|
||||
* queries.
|
||||
*
|
||||
* @property {string} bytesValue
|
||||
* A bytes value.
|
||||
*
|
||||
* Must not exceed 1 MiB - 89 bytes.
|
||||
* Only the first 1,500 bytes are considered by queries.
|
||||
*
|
||||
* @property {string} referenceValue
|
||||
* A reference to a document. For example:
|
||||
* `projects/{project_id}/databases/{database_id}/documents/{document_path}`.
|
||||
*
|
||||
* @property {Object} geoPointValue
|
||||
* A geo point value representing a point on the surface of Earth.
|
||||
*
|
||||
* This object should have the same structure as [LatLng]{@link
|
||||
* google.type.LatLng}
|
||||
*
|
||||
* @property {Object} arrayValue
|
||||
* An array value.
|
||||
*
|
||||
* Cannot directly contain another array value, though can contain an
|
||||
* map which contains another array.
|
||||
*
|
||||
* This object should have the same structure as [ArrayValue]{@link
|
||||
* google.firestore.v1beta1.ArrayValue}
|
||||
*
|
||||
* @property {Object} mapValue
|
||||
* A map value.
|
||||
*
|
||||
* This object should have the same structure as [MapValue]{@link
|
||||
* google.firestore.v1beta1.MapValue}
|
||||
*
|
||||
* @typedef Value
|
||||
* @memberof google.firestore.v1beta1
|
||||
* @see [google.firestore.v1beta1.Value definition in proto format]{@link https://github.com/googleapis/googleapis/blob/master/google/firestore/v1beta1/document.proto}
|
||||
*/
|
||||
const Value = {
|
||||
// This is for documentation. Actual contents will be loaded by gRPC.
|
||||
};
|
||||
/**
|
||||
* An array value.
|
||||
*
|
||||
* @property {Object[]} values
|
||||
* Values in the array.
|
||||
*
|
||||
* This object should have the same structure as [Value]{@link
|
||||
* google.firestore.v1beta1.Value}
|
||||
*
|
||||
* @typedef ArrayValue
|
||||
* @memberof google.firestore.v1beta1
|
||||
* @see [google.firestore.v1beta1.ArrayValue definition in proto format]{@link https://github.com/googleapis/googleapis/blob/master/google/firestore/v1beta1/document.proto}
|
||||
*/
|
||||
const ArrayValue = {
|
||||
// This is for documentation. Actual contents will be loaded by gRPC.
|
||||
};
|
||||
/**
|
||||
* A map value.
|
||||
*
|
||||
* @property {Object.<string, Object>} fields
|
||||
* The map's fields.
|
||||
*
|
||||
* The map keys represent field names. Field names matching the regular
|
||||
* expression `__.*__` are reserved. Reserved field names are forbidden except
|
||||
* in certain documented contexts. The map keys, represented as UTF-8, must
|
||||
* not exceed 1,500 bytes and cannot be empty.
|
||||
*
|
||||
* @typedef MapValue
|
||||
* @memberof google.firestore.v1beta1
|
||||
* @see [google.firestore.v1beta1.MapValue definition in proto format]{@link https://github.com/googleapis/googleapis/blob/master/google/firestore/v1beta1/document.proto}
|
||||
*/
|
||||
const MapValue = {
|
||||
// This is for documentation. Actual contents will be loaded by gRPC.
|
||||
};
|
||||
//# sourceMappingURL=doc_document.js.map
|
||||
882
express-server/node_modules/@google-cloud/firestore/build/src/v1beta1/doc/google/firestore/v1beta1/doc_firestore.js
generated
vendored
Normal file
882
express-server/node_modules/@google-cloud/firestore/build/src/v1beta1/doc/google/firestore/v1beta1/doc_firestore.js
generated
vendored
Normal file
@@ -0,0 +1,882 @@
|
||||
"use strict";
|
||||
// Copyright 2018 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// https://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
// Note: this file is purely for documentation. Any contents are not expected
|
||||
// to be loaded as the JS file.
|
||||
/**
|
||||
* The request for Firestore.GetDocument.
|
||||
*
|
||||
* @property {string} name
|
||||
* The resource name of the Document to get. In the format:
|
||||
* `projects/{project_id}/databases/{database_id}/documents/{document_path}`.
|
||||
*
|
||||
* @property {Object} mask
|
||||
* The fields to return. If not set, returns all fields.
|
||||
*
|
||||
* If the document has a field that is not present in this mask, that field
|
||||
* will not be returned in the response.
|
||||
*
|
||||
* This object should have the same structure as [DocumentMask]{@link
|
||||
* google.firestore.v1beta1.DocumentMask}
|
||||
*
|
||||
* @property {string} transaction
|
||||
* Reads the document in a transaction.
|
||||
*
|
||||
* @property {Object} readTime
|
||||
* Reads the version of the document at the given time.
|
||||
* This may not be older than 60 seconds.
|
||||
*
|
||||
* This object should have the same structure as [Timestamp]{@link
|
||||
* google.protobuf.Timestamp}
|
||||
*
|
||||
* @typedef GetDocumentRequest
|
||||
* @memberof google.firestore.v1beta1
|
||||
* @see [google.firestore.v1beta1.GetDocumentRequest definition in proto format]{@link https://github.com/googleapis/googleapis/blob/master/google/firestore/v1beta1/firestore.proto}
|
||||
*/
|
||||
const GetDocumentRequest = {
|
||||
// This is for documentation. Actual contents will be loaded by gRPC.
|
||||
};
|
||||
/**
|
||||
* The request for Firestore.ListDocuments.
|
||||
*
|
||||
* @property {string} parent
|
||||
* The parent resource name. In the format:
|
||||
* `projects/{project_id}/databases/{database_id}/documents` or
|
||||
* `projects/{project_id}/databases/{database_id}/documents/{document_path}`.
|
||||
* For example:
|
||||
* `projects/my-project/databases/my-database/documents` or
|
||||
* `projects/my-project/databases/my-database/documents/chatrooms/my-chatroom`
|
||||
*
|
||||
* @property {string} collectionId
|
||||
* The collection ID, relative to `parent`, to list. For example: `chatrooms`
|
||||
* or `messages`.
|
||||
*
|
||||
* @property {number} pageSize
|
||||
* The maximum number of documents to return.
|
||||
*
|
||||
* @property {string} pageToken
|
||||
* The `next_page_token` value returned from a previous List request, if any.
|
||||
*
|
||||
* @property {string} orderBy
|
||||
* The order to sort results by. For example: `priority desc, name`.
|
||||
*
|
||||
* @property {Object} mask
|
||||
* The fields to return. If not set, returns all fields.
|
||||
*
|
||||
* If a document has a field that is not present in this mask, that field
|
||||
* will not be returned in the response.
|
||||
*
|
||||
* This object should have the same structure as [DocumentMask]{@link
|
||||
* google.firestore.v1beta1.DocumentMask}
|
||||
*
|
||||
* @property {string} transaction
|
||||
* Reads documents in a transaction.
|
||||
*
|
||||
* @property {Object} readTime
|
||||
* Reads documents as they were at the given time.
|
||||
* This may not be older than 60 seconds.
|
||||
*
|
||||
* This object should have the same structure as [Timestamp]{@link
|
||||
* google.protobuf.Timestamp}
|
||||
*
|
||||
* @property {boolean} showMissing
|
||||
* If the list should show missing documents. A missing document is a
|
||||
* document that does not exist but has sub-documents. These documents will
|
||||
* be returned with a key but will not have fields, Document.create_time,
|
||||
* or Document.update_time set.
|
||||
*
|
||||
* Requests with `show_missing` may not specify `where` or
|
||||
* `order_by`.
|
||||
*
|
||||
* @typedef ListDocumentsRequest
|
||||
* @memberof google.firestore.v1beta1
|
||||
* @see [google.firestore.v1beta1.ListDocumentsRequest definition in proto format]{@link https://github.com/googleapis/googleapis/blob/master/google/firestore/v1beta1/firestore.proto}
|
||||
*/
|
||||
const ListDocumentsRequest = {
|
||||
// This is for documentation. Actual contents will be loaded by gRPC.
|
||||
};
|
||||
/**
|
||||
* The response for Firestore.ListDocuments.
|
||||
*
|
||||
* @property {Object[]} documents
|
||||
* The Documents found.
|
||||
*
|
||||
* This object should have the same structure as [Document]{@link
|
||||
* google.firestore.v1beta1.Document}
|
||||
*
|
||||
* @property {string} nextPageToken
|
||||
* The next page token.
|
||||
*
|
||||
* @typedef ListDocumentsResponse
|
||||
* @memberof google.firestore.v1beta1
|
||||
* @see [google.firestore.v1beta1.ListDocumentsResponse definition in proto format]{@link https://github.com/googleapis/googleapis/blob/master/google/firestore/v1beta1/firestore.proto}
|
||||
*/
|
||||
const ListDocumentsResponse = {
|
||||
// This is for documentation. Actual contents will be loaded by gRPC.
|
||||
};
|
||||
/**
|
||||
* The request for Firestore.CreateDocument.
|
||||
*
|
||||
* @property {string} parent
|
||||
* The parent resource. For example:
|
||||
* `projects/{project_id}/databases/{database_id}/documents` or
|
||||
* `projects/{project_id}/databases/{database_id}/documents/chatrooms/{chatroom_id}`
|
||||
*
|
||||
* @property {string} collectionId
|
||||
* The collection ID, relative to `parent`, to list. For example: `chatrooms`.
|
||||
*
|
||||
* @property {string} documentId
|
||||
* The client-assigned document ID to use for this document.
|
||||
*
|
||||
* Optional. If not specified, an ID will be assigned by the service.
|
||||
*
|
||||
* @property {Object} document
|
||||
* The document to create. `name` must not be set.
|
||||
*
|
||||
* This object should have the same structure as [Document]{@link
|
||||
* google.firestore.v1beta1.Document}
|
||||
*
|
||||
* @property {Object} mask
|
||||
* The fields to return. If not set, returns all fields.
|
||||
*
|
||||
* If the document has a field that is not present in this mask, that field
|
||||
* will not be returned in the response.
|
||||
*
|
||||
* This object should have the same structure as [DocumentMask]{@link
|
||||
* google.firestore.v1beta1.DocumentMask}
|
||||
*
|
||||
* @typedef CreateDocumentRequest
|
||||
* @memberof google.firestore.v1beta1
|
||||
* @see [google.firestore.v1beta1.CreateDocumentRequest definition in proto format]{@link https://github.com/googleapis/googleapis/blob/master/google/firestore/v1beta1/firestore.proto}
|
||||
*/
|
||||
const CreateDocumentRequest = {
|
||||
// This is for documentation. Actual contents will be loaded by gRPC.
|
||||
};
|
||||
/**
|
||||
* The request for Firestore.UpdateDocument.
|
||||
*
|
||||
* @property {Object} document
|
||||
* The updated document.
|
||||
* Creates the document if it does not already exist.
|
||||
*
|
||||
* This object should have the same structure as [Document]{@link
|
||||
* google.firestore.v1beta1.Document}
|
||||
*
|
||||
* @property {Object} updateMask
|
||||
* The fields to update.
|
||||
* None of the field paths in the mask may contain a reserved name.
|
||||
*
|
||||
* If the document exists on the server and has fields not referenced in the
|
||||
* mask, they are left unchanged.
|
||||
* Fields referenced in the mask, but not present in the input document, are
|
||||
* deleted from the document on the server.
|
||||
*
|
||||
* This object should have the same structure as [DocumentMask]{@link
|
||||
* google.firestore.v1beta1.DocumentMask}
|
||||
*
|
||||
* @property {Object} mask
|
||||
* The fields to return. If not set, returns all fields.
|
||||
*
|
||||
* If the document has a field that is not present in this mask, that field
|
||||
* will not be returned in the response.
|
||||
*
|
||||
* This object should have the same structure as [DocumentMask]{@link
|
||||
* google.firestore.v1beta1.DocumentMask}
|
||||
*
|
||||
* @property {Object} currentDocument
|
||||
* An optional precondition on the document.
|
||||
* The request will fail if this is set and not met by the target document.
|
||||
*
|
||||
* This object should have the same structure as [Precondition]{@link
|
||||
* google.firestore.v1beta1.Precondition}
|
||||
*
|
||||
* @typedef UpdateDocumentRequest
|
||||
* @memberof google.firestore.v1beta1
|
||||
* @see [google.firestore.v1beta1.UpdateDocumentRequest definition in proto format]{@link https://github.com/googleapis/googleapis/blob/master/google/firestore/v1beta1/firestore.proto}
|
||||
*/
|
||||
const UpdateDocumentRequest = {
|
||||
// This is for documentation. Actual contents will be loaded by gRPC.
|
||||
};
|
||||
/**
|
||||
* The request for Firestore.DeleteDocument.
|
||||
*
|
||||
* @property {string} name
|
||||
* The resource name of the Document to delete. In the format:
|
||||
* `projects/{project_id}/databases/{database_id}/documents/{document_path}`.
|
||||
*
|
||||
* @property {Object} currentDocument
|
||||
* An optional precondition on the document.
|
||||
* The request will fail if this is set and not met by the target document.
|
||||
*
|
||||
* This object should have the same structure as [Precondition]{@link
|
||||
* google.firestore.v1beta1.Precondition}
|
||||
*
|
||||
* @typedef DeleteDocumentRequest
|
||||
* @memberof google.firestore.v1beta1
|
||||
* @see [google.firestore.v1beta1.DeleteDocumentRequest definition in proto format]{@link https://github.com/googleapis/googleapis/blob/master/google/firestore/v1beta1/firestore.proto}
|
||||
*/
|
||||
const DeleteDocumentRequest = {
|
||||
// This is for documentation. Actual contents will be loaded by gRPC.
|
||||
};
|
||||
/**
|
||||
* The request for Firestore.BatchGetDocuments.
|
||||
*
|
||||
* @property {string} database
|
||||
* The database name. In the format:
|
||||
* `projects/{project_id}/databases/{database_id}`.
|
||||
*
|
||||
* @property {string[]} documents
|
||||
* The names of the documents to retrieve. In the format:
|
||||
* `projects/{project_id}/databases/{database_id}/documents/{document_path}`.
|
||||
* The request will fail if any of the document is not a child resource of the
|
||||
* given `database`. Duplicate names will be elided.
|
||||
*
|
||||
* @property {Object} mask
|
||||
* The fields to return. If not set, returns all fields.
|
||||
*
|
||||
* If a document has a field that is not present in this mask, that field will
|
||||
* not be returned in the response.
|
||||
*
|
||||
* This object should have the same structure as [DocumentMask]{@link
|
||||
* google.firestore.v1beta1.DocumentMask}
|
||||
*
|
||||
* @property {string} transaction
|
||||
* Reads documents in a transaction.
|
||||
*
|
||||
* @property {Object} newTransaction
|
||||
* Starts a new transaction and reads the documents.
|
||||
* Defaults to a read-only transaction.
|
||||
* The new transaction ID will be returned as the first response in the
|
||||
* stream.
|
||||
*
|
||||
* This object should have the same structure as [TransactionOptions]{@link
|
||||
* google.firestore.v1beta1.TransactionOptions}
|
||||
*
|
||||
* @property {Object} readTime
|
||||
* Reads documents as they were at the given time.
|
||||
* This may not be older than 60 seconds.
|
||||
*
|
||||
* This object should have the same structure as [Timestamp]{@link
|
||||
* google.protobuf.Timestamp}
|
||||
*
|
||||
* @typedef BatchGetDocumentsRequest
|
||||
* @memberof google.firestore.v1beta1
|
||||
* @see [google.firestore.v1beta1.BatchGetDocumentsRequest definition in proto format]{@link https://github.com/googleapis/googleapis/blob/master/google/firestore/v1beta1/firestore.proto}
|
||||
*/
|
||||
const BatchGetDocumentsRequest = {
|
||||
// This is for documentation. Actual contents will be loaded by gRPC.
|
||||
};
|
||||
/**
|
||||
* The streamed response for Firestore.BatchGetDocuments.
|
||||
*
|
||||
* @property {Object} found
|
||||
* A document that was requested.
|
||||
*
|
||||
* This object should have the same structure as [Document]{@link
|
||||
* google.firestore.v1beta1.Document}
|
||||
*
|
||||
* @property {string} missing
|
||||
* A document name that was requested but does not exist. In the format:
|
||||
* `projects/{project_id}/databases/{database_id}/documents/{document_path}`.
|
||||
*
|
||||
* @property {string} transaction
|
||||
* The transaction that was started as part of this request.
|
||||
* Will only be set in the first response, and only if
|
||||
* BatchGetDocumentsRequest.new_transaction was set in the request.
|
||||
*
|
||||
* @property {Object} readTime
|
||||
* The time at which the document was read.
|
||||
* This may be monotically increasing, in this case the previous documents in
|
||||
* the result stream are guaranteed not to have changed between their
|
||||
* read_time and this one.
|
||||
*
|
||||
* This object should have the same structure as [Timestamp]{@link
|
||||
* google.protobuf.Timestamp}
|
||||
*
|
||||
* @typedef BatchGetDocumentsResponse
|
||||
* @memberof google.firestore.v1beta1
|
||||
* @see [google.firestore.v1beta1.BatchGetDocumentsResponse definition in proto format]{@link https://github.com/googleapis/googleapis/blob/master/google/firestore/v1beta1/firestore.proto}
|
||||
*/
|
||||
const BatchGetDocumentsResponse = {
|
||||
// This is for documentation. Actual contents will be loaded by gRPC.
|
||||
};
|
||||
/**
|
||||
* The request for Firestore.BeginTransaction.
|
||||
*
|
||||
* @property {string} database
|
||||
* The database name. In the format:
|
||||
* `projects/{project_id}/databases/{database_id}`.
|
||||
*
|
||||
* @property {Object} options
|
||||
* The options for the transaction.
|
||||
* Defaults to a read-write transaction.
|
||||
*
|
||||
* This object should have the same structure as [TransactionOptions]{@link
|
||||
* google.firestore.v1beta1.TransactionOptions}
|
||||
*
|
||||
* @typedef BeginTransactionRequest
|
||||
* @memberof google.firestore.v1beta1
|
||||
* @see [google.firestore.v1beta1.BeginTransactionRequest definition in proto format]{@link https://github.com/googleapis/googleapis/blob/master/google/firestore/v1beta1/firestore.proto}
|
||||
*/
|
||||
const BeginTransactionRequest = {
|
||||
// This is for documentation. Actual contents will be loaded by gRPC.
|
||||
};
|
||||
/**
|
||||
* The response for Firestore.BeginTransaction.
|
||||
*
|
||||
* @property {string} transaction
|
||||
* The transaction that was started.
|
||||
*
|
||||
* @typedef BeginTransactionResponse
|
||||
* @memberof google.firestore.v1beta1
|
||||
* @see [google.firestore.v1beta1.BeginTransactionResponse definition in proto format]{@link https://github.com/googleapis/googleapis/blob/master/google/firestore/v1beta1/firestore.proto}
|
||||
*/
|
||||
const BeginTransactionResponse = {
|
||||
// This is for documentation. Actual contents will be loaded by gRPC.
|
||||
};
|
||||
/**
|
||||
* The request for Firestore.Commit.
|
||||
*
|
||||
* @property {string} database
|
||||
* The database name. In the format:
|
||||
* `projects/{project_id}/databases/{database_id}`.
|
||||
*
|
||||
* @property {Object[]} writes
|
||||
* The writes to apply.
|
||||
*
|
||||
* Always executed atomically and in order.
|
||||
*
|
||||
* This object should have the same structure as [Write]{@link
|
||||
* google.firestore.v1beta1.Write}
|
||||
*
|
||||
* @property {string} transaction
|
||||
* If set, applies all writes in this transaction, and commits it.
|
||||
*
|
||||
* @typedef CommitRequest
|
||||
* @memberof google.firestore.v1beta1
|
||||
* @see [google.firestore.v1beta1.CommitRequest definition in proto format]{@link https://github.com/googleapis/googleapis/blob/master/google/firestore/v1beta1/firestore.proto}
|
||||
*/
|
||||
const CommitRequest = {
|
||||
// This is for documentation. Actual contents will be loaded by gRPC.
|
||||
};
|
||||
/**
|
||||
* The response for Firestore.Commit.
|
||||
*
|
||||
* @property {Object[]} writeResults
|
||||
* The result of applying the writes.
|
||||
*
|
||||
* This i-th write result corresponds to the i-th write in the
|
||||
* request.
|
||||
*
|
||||
* This object should have the same structure as [WriteResult]{@link
|
||||
* google.firestore.v1beta1.WriteResult}
|
||||
*
|
||||
* @property {Object} commitTime
|
||||
* The time at which the commit occurred.
|
||||
*
|
||||
* This object should have the same structure as [Timestamp]{@link
|
||||
* google.protobuf.Timestamp}
|
||||
*
|
||||
* @typedef CommitResponse
|
||||
* @memberof google.firestore.v1beta1
|
||||
* @see [google.firestore.v1beta1.CommitResponse definition in proto format]{@link https://github.com/googleapis/googleapis/blob/master/google/firestore/v1beta1/firestore.proto}
|
||||
*/
|
||||
const CommitResponse = {
|
||||
// This is for documentation. Actual contents will be loaded by gRPC.
|
||||
};
|
||||
/**
|
||||
* The request for Firestore.Rollback.
|
||||
*
|
||||
* @property {string} database
|
||||
* The database name. In the format:
|
||||
* `projects/{project_id}/databases/{database_id}`.
|
||||
*
|
||||
* @property {string} transaction
|
||||
* The transaction to roll back.
|
||||
*
|
||||
* @typedef RollbackRequest
|
||||
* @memberof google.firestore.v1beta1
|
||||
* @see [google.firestore.v1beta1.RollbackRequest definition in proto format]{@link https://github.com/googleapis/googleapis/blob/master/google/firestore/v1beta1/firestore.proto}
|
||||
*/
|
||||
const RollbackRequest = {
|
||||
// This is for documentation. Actual contents will be loaded by gRPC.
|
||||
};
|
||||
/**
|
||||
* The request for Firestore.RunQuery.
|
||||
*
|
||||
* @property {string} parent
|
||||
* The parent resource name. In the format:
|
||||
* `projects/{project_id}/databases/{database_id}/documents` or
|
||||
* `projects/{project_id}/databases/{database_id}/documents/{document_path}`.
|
||||
* For example:
|
||||
* `projects/my-project/databases/my-database/documents` or
|
||||
* `projects/my-project/databases/my-database/documents/chatrooms/my-chatroom`
|
||||
*
|
||||
* @property {Object} structuredQuery
|
||||
* A structured query.
|
||||
*
|
||||
* This object should have the same structure as [StructuredQuery]{@link
|
||||
* google.firestore.v1beta1.StructuredQuery}
|
||||
*
|
||||
* @property {string} transaction
|
||||
* Reads documents in a transaction.
|
||||
*
|
||||
* @property {Object} newTransaction
|
||||
* Starts a new transaction and reads the documents.
|
||||
* Defaults to a read-only transaction.
|
||||
* The new transaction ID will be returned as the first response in the
|
||||
* stream.
|
||||
*
|
||||
* This object should have the same structure as [TransactionOptions]{@link
|
||||
* google.firestore.v1beta1.TransactionOptions}
|
||||
*
|
||||
* @property {Object} readTime
|
||||
* Reads documents as they were at the given time.
|
||||
* This may not be older than 60 seconds.
|
||||
*
|
||||
* This object should have the same structure as [Timestamp]{@link
|
||||
* google.protobuf.Timestamp}
|
||||
*
|
||||
* @typedef RunQueryRequest
|
||||
* @memberof google.firestore.v1beta1
|
||||
* @see [google.firestore.v1beta1.RunQueryRequest definition in proto format]{@link https://github.com/googleapis/googleapis/blob/master/google/firestore/v1beta1/firestore.proto}
|
||||
*/
|
||||
const RunQueryRequest = {
|
||||
// This is for documentation. Actual contents will be loaded by gRPC.
|
||||
};
|
||||
/**
|
||||
* The response for Firestore.RunQuery.
|
||||
*
|
||||
* @property {string} transaction
|
||||
* The transaction that was started as part of this request.
|
||||
* Can only be set in the first response, and only if
|
||||
* RunQueryRequest.new_transaction was set in the request.
|
||||
* If set, no other fields will be set in this response.
|
||||
*
|
||||
* @property {Object} document
|
||||
* A query result.
|
||||
* Not set when reporting partial progress.
|
||||
*
|
||||
* This object should have the same structure as [Document]{@link
|
||||
* google.firestore.v1beta1.Document}
|
||||
*
|
||||
* @property {Object} readTime
|
||||
* The time at which the document was read. This may be monotonically
|
||||
* increasing; in this case, the previous documents in the result stream are
|
||||
* guaranteed not to have changed between their `read_time` and this one.
|
||||
*
|
||||
* If the query returns no results, a response with `read_time` and no
|
||||
* `document` will be sent, and this represents the time at which the query
|
||||
* was run.
|
||||
*
|
||||
* This object should have the same structure as [Timestamp]{@link
|
||||
* google.protobuf.Timestamp}
|
||||
*
|
||||
* @property {number} skippedResults
|
||||
* The number of results that have been skipped due to an offset between
|
||||
* the last response and the current response.
|
||||
*
|
||||
* @typedef RunQueryResponse
|
||||
* @memberof google.firestore.v1beta1
|
||||
* @see [google.firestore.v1beta1.RunQueryResponse definition in proto format]{@link https://github.com/googleapis/googleapis/blob/master/google/firestore/v1beta1/firestore.proto}
|
||||
*/
|
||||
const RunQueryResponse = {
|
||||
// This is for documentation. Actual contents will be loaded by gRPC.
|
||||
};
|
||||
/**
|
||||
* The request for Firestore.Write.
|
||||
*
|
||||
* The first request creates a stream, or resumes an existing one from a token.
|
||||
*
|
||||
* When creating a new stream, the server replies with a response containing
|
||||
* only an ID and a token, to use in the next request.
|
||||
*
|
||||
* When resuming a stream, the server first streams any responses later than the
|
||||
* given token, then a response containing only an up-to-date token, to use in
|
||||
* the next request.
|
||||
*
|
||||
* @property {string} database
|
||||
* The database name. In the format:
|
||||
* `projects/{project_id}/databases/{database_id}`.
|
||||
* This is only required in the first message.
|
||||
*
|
||||
* @property {string} streamId
|
||||
* The ID of the write stream to resume.
|
||||
* This may only be set in the first message. When left empty, a new write
|
||||
* stream will be created.
|
||||
*
|
||||
* @property {Object[]} writes
|
||||
* The writes to apply.
|
||||
*
|
||||
* Always executed atomically and in order.
|
||||
* This must be empty on the first request.
|
||||
* This may be empty on the last request.
|
||||
* This must not be empty on all other requests.
|
||||
*
|
||||
* This object should have the same structure as [Write]{@link
|
||||
* google.firestore.v1beta1.Write}
|
||||
*
|
||||
* @property {string} streamToken
|
||||
* A stream token that was previously sent by the server.
|
||||
*
|
||||
* The client should set this field to the token from the most recent
|
||||
* WriteResponse it has received. This acknowledges that the client has
|
||||
* received responses up to this token. After sending this token, earlier
|
||||
* tokens may not be used anymore.
|
||||
*
|
||||
* The server may close the stream if there are too many unacknowledged
|
||||
* responses.
|
||||
*
|
||||
* Leave this field unset when creating a new stream. To resume a stream at
|
||||
* a specific point, set this field and the `stream_id` field.
|
||||
*
|
||||
* Leave this field unset when creating a new stream.
|
||||
*
|
||||
* @property {Object.<string, string>} labels
|
||||
* Labels associated with this write request.
|
||||
*
|
||||
* @typedef WriteRequest
|
||||
* @memberof google.firestore.v1beta1
|
||||
* @see [google.firestore.v1beta1.WriteRequest definition in proto format]{@link https://github.com/googleapis/googleapis/blob/master/google/firestore/v1beta1/firestore.proto}
|
||||
*/
|
||||
const WriteRequest = {
|
||||
// This is for documentation. Actual contents will be loaded by gRPC.
|
||||
};
|
||||
/**
|
||||
* The response for Firestore.Write.
|
||||
*
|
||||
* @property {string} streamId
|
||||
* The ID of the stream.
|
||||
* Only set on the first message, when a new stream was created.
|
||||
*
|
||||
* @property {string} streamToken
|
||||
* A token that represents the position of this response in the stream.
|
||||
* This can be used by a client to resume the stream at this point.
|
||||
*
|
||||
* This field is always set.
|
||||
*
|
||||
* @property {Object[]} writeResults
|
||||
* The result of applying the writes.
|
||||
*
|
||||
* This i-th write result corresponds to the i-th write in the
|
||||
* request.
|
||||
*
|
||||
* This object should have the same structure as [WriteResult]{@link
|
||||
* google.firestore.v1beta1.WriteResult}
|
||||
*
|
||||
* @property {Object} commitTime
|
||||
* The time at which the commit occurred.
|
||||
*
|
||||
* This object should have the same structure as [Timestamp]{@link
|
||||
* google.protobuf.Timestamp}
|
||||
*
|
||||
* @typedef WriteResponse
|
||||
* @memberof google.firestore.v1beta1
|
||||
* @see [google.firestore.v1beta1.WriteResponse definition in proto format]{@link https://github.com/googleapis/googleapis/blob/master/google/firestore/v1beta1/firestore.proto}
|
||||
*/
|
||||
const WriteResponse = {
|
||||
// This is for documentation. Actual contents will be loaded by gRPC.
|
||||
};
|
||||
/**
|
||||
* A request for Firestore.Listen
|
||||
*
|
||||
* @property {string} database
|
||||
* The database name. In the format:
|
||||
* `projects/{project_id}/databases/{database_id}`.
|
||||
*
|
||||
* @property {Object} addTarget
|
||||
* A target to add to this stream.
|
||||
*
|
||||
* This object should have the same structure as [Target]{@link
|
||||
* google.firestore.v1beta1.Target}
|
||||
*
|
||||
* @property {number} removeTarget
|
||||
* The ID of a target to remove from this stream.
|
||||
*
|
||||
* @property {Object.<string, string>} labels
|
||||
* Labels associated with this target change.
|
||||
*
|
||||
* @typedef ListenRequest
|
||||
* @memberof google.firestore.v1beta1
|
||||
* @see [google.firestore.v1beta1.ListenRequest definition in proto format]{@link https://github.com/googleapis/googleapis/blob/master/google/firestore/v1beta1/firestore.proto}
|
||||
*/
|
||||
const ListenRequest = {
|
||||
// This is for documentation. Actual contents will be loaded by gRPC.
|
||||
};
|
||||
/**
|
||||
* The response for Firestore.Listen.
|
||||
*
|
||||
* @property {Object} targetChange
|
||||
* Targets have changed.
|
||||
*
|
||||
* This object should have the same structure as [TargetChange]{@link
|
||||
* google.firestore.v1beta1.TargetChange}
|
||||
*
|
||||
* @property {Object} documentChange
|
||||
* A Document has changed.
|
||||
*
|
||||
* This object should have the same structure as [DocumentChange]{@link
|
||||
* google.firestore.v1beta1.DocumentChange}
|
||||
*
|
||||
* @property {Object} documentDelete
|
||||
* A Document has been deleted.
|
||||
*
|
||||
* This object should have the same structure as [DocumentDelete]{@link
|
||||
* google.firestore.v1beta1.DocumentDelete}
|
||||
*
|
||||
* @property {Object} documentRemove
|
||||
* A Document has been removed from a target (because it is no longer
|
||||
* relevant to that target).
|
||||
*
|
||||
* This object should have the same structure as [DocumentRemove]{@link
|
||||
* google.firestore.v1beta1.DocumentRemove}
|
||||
*
|
||||
* @property {Object} filter
|
||||
* A filter to apply to the set of documents previously returned for the
|
||||
* given target.
|
||||
*
|
||||
* Returned when documents may have been removed from the given target, but
|
||||
* the exact documents are unknown.
|
||||
*
|
||||
* This object should have the same structure as [ExistenceFilter]{@link
|
||||
* google.firestore.v1beta1.ExistenceFilter}
|
||||
*
|
||||
* @typedef ListenResponse
|
||||
* @memberof google.firestore.v1beta1
|
||||
* @see [google.firestore.v1beta1.ListenResponse definition in proto format]{@link https://github.com/googleapis/googleapis/blob/master/google/firestore/v1beta1/firestore.proto}
|
||||
*/
|
||||
const ListenResponse = {
|
||||
// This is for documentation. Actual contents will be loaded by gRPC.
|
||||
};
|
||||
/**
|
||||
* A specification of a set of documents to listen to.
|
||||
*
|
||||
* @property {Object} query
|
||||
* A target specified by a query.
|
||||
*
|
||||
* This object should have the same structure as [QueryTarget]{@link
|
||||
* google.firestore.v1beta1.QueryTarget}
|
||||
*
|
||||
* @property {Object} documents
|
||||
* A target specified by a set of document names.
|
||||
*
|
||||
* This object should have the same structure as [DocumentsTarget]{@link
|
||||
* google.firestore.v1beta1.DocumentsTarget}
|
||||
*
|
||||
* @property {string} resumeToken
|
||||
* A resume token from a prior TargetChange for an identical target.
|
||||
*
|
||||
* Using a resume token with a different target is unsupported and may fail.
|
||||
*
|
||||
* @property {Object} readTime
|
||||
* Start listening after a specific `read_time`.
|
||||
*
|
||||
* The client must know the state of matching documents at this time.
|
||||
*
|
||||
* This object should have the same structure as [Timestamp]{@link
|
||||
* google.protobuf.Timestamp}
|
||||
*
|
||||
* @property {number} targetId
|
||||
* A client provided target ID.
|
||||
*
|
||||
* If not set, the server will assign an ID for the target.
|
||||
*
|
||||
* Used for resuming a target without changing IDs. The IDs can either be
|
||||
* client-assigned or be server-assigned in a previous stream. All targets
|
||||
* with client provided IDs must be added before adding a target that needs
|
||||
* a server-assigned id.
|
||||
*
|
||||
* @property {boolean} once
|
||||
* If the target should be removed once it is current and consistent.
|
||||
*
|
||||
* @typedef Target
|
||||
* @memberof google.firestore.v1beta1
|
||||
* @see [google.firestore.v1beta1.Target definition in proto format]{@link https://github.com/googleapis/googleapis/blob/master/google/firestore/v1beta1/firestore.proto}
|
||||
*/
|
||||
const Target = {
|
||||
// This is for documentation. Actual contents will be loaded by gRPC.
|
||||
/**
|
||||
* A target specified by a set of documents names.
|
||||
*
|
||||
* @property {string[]} documents
|
||||
* The names of the documents to retrieve. In the format:
|
||||
* `projects/{project_id}/databases/{database_id}/documents/{document_path}`.
|
||||
* The request will fail if any of the document is not a child resource of
|
||||
* the given `database`. Duplicate names will be elided.
|
||||
*
|
||||
* @typedef DocumentsTarget
|
||||
* @memberof google.firestore.v1beta1
|
||||
* @see [google.firestore.v1beta1.Target.DocumentsTarget definition in proto format]{@link https://github.com/googleapis/googleapis/blob/master/google/firestore/v1beta1/firestore.proto}
|
||||
*/
|
||||
DocumentsTarget: {
|
||||
// This is for documentation. Actual contents will be loaded by gRPC.
|
||||
},
|
||||
/**
|
||||
* A target specified by a query.
|
||||
*
|
||||
* @property {string} parent
|
||||
* The parent resource name. In the format:
|
||||
* `projects/{project_id}/databases/{database_id}/documents` or
|
||||
* `projects/{project_id}/databases/{database_id}/documents/{document_path}`.
|
||||
* For example:
|
||||
* `projects/my-project/databases/my-database/documents` or
|
||||
* `projects/my-project/databases/my-database/documents/chatrooms/my-chatroom`
|
||||
*
|
||||
* @property {Object} structuredQuery
|
||||
* A structured query.
|
||||
*
|
||||
* This object should have the same structure as [StructuredQuery]{@link
|
||||
* google.firestore.v1beta1.StructuredQuery}
|
||||
*
|
||||
* @typedef QueryTarget
|
||||
* @memberof google.firestore.v1beta1
|
||||
* @see [google.firestore.v1beta1.Target.QueryTarget definition in proto format]{@link https://github.com/googleapis/googleapis/blob/master/google/firestore/v1beta1/firestore.proto}
|
||||
*/
|
||||
QueryTarget: {
|
||||
// This is for documentation. Actual contents will be loaded by gRPC.
|
||||
}
|
||||
};
|
||||
/**
|
||||
* Targets being watched have changed.
|
||||
*
|
||||
* @property {number} targetChangeType
|
||||
* The type of change that occurred.
|
||||
*
|
||||
* The number should be among the values of [TargetChangeType]{@link
|
||||
* google.firestore.v1beta1.TargetChangeType}
|
||||
*
|
||||
* @property {number[]} targetIds
|
||||
* The target IDs of targets that have changed.
|
||||
*
|
||||
* If empty, the change applies to all targets.
|
||||
*
|
||||
* For `target_change_type=ADD`, the order of the target IDs matches the order
|
||||
* of the requests to add the targets. This allows clients to unambiguously
|
||||
* associate server-assigned target IDs with added targets.
|
||||
*
|
||||
* For other states, the order of the target IDs is not defined.
|
||||
*
|
||||
* @property {Object} cause
|
||||
* The error that resulted in this change, if applicable.
|
||||
*
|
||||
* This object should have the same structure as [Status]{@link
|
||||
* google.rpc.Status}
|
||||
*
|
||||
* @property {string} resumeToken
|
||||
* A token that can be used to resume the stream for the given `target_ids`,
|
||||
* or all targets if `target_ids` is empty.
|
||||
*
|
||||
* Not set on every target change.
|
||||
*
|
||||
* @property {Object} readTime
|
||||
* The consistent `read_time` for the given `target_ids` (omitted when the
|
||||
* target_ids are not at a consistent snapshot).
|
||||
*
|
||||
* The stream is guaranteed to send a `read_time` with `target_ids` empty
|
||||
* whenever the entire stream reaches a new consistent snapshot. ADD,
|
||||
* CURRENT, and RESET messages are guaranteed to (eventually) result in a
|
||||
* new consistent snapshot (while NO_CHANGE and REMOVE messages are not).
|
||||
*
|
||||
* For a given stream, `read_time` is guaranteed to be monotonically
|
||||
* increasing.
|
||||
*
|
||||
* This object should have the same structure as [Timestamp]{@link
|
||||
* google.protobuf.Timestamp}
|
||||
*
|
||||
* @typedef TargetChange
|
||||
* @memberof google.firestore.v1beta1
|
||||
* @see [google.firestore.v1beta1.TargetChange definition in proto format]{@link https://github.com/googleapis/googleapis/blob/master/google/firestore/v1beta1/firestore.proto}
|
||||
*/
|
||||
const TargetChange = {
|
||||
// This is for documentation. Actual contents will be loaded by gRPC.
|
||||
/**
|
||||
* The type of change.
|
||||
*
|
||||
* @enum {number}
|
||||
* @memberof google.firestore.v1beta1
|
||||
*/
|
||||
TargetChangeType: {
|
||||
/**
|
||||
* No change has occurred. Used only to send an updated `resume_token`.
|
||||
*/
|
||||
NO_CHANGE: 0,
|
||||
/**
|
||||
* The targets have been added.
|
||||
*/
|
||||
ADD: 1,
|
||||
/**
|
||||
* The targets have been removed.
|
||||
*/
|
||||
REMOVE: 2,
|
||||
/**
|
||||
* The targets reflect all changes committed before the targets were added
|
||||
* to the stream.
|
||||
*
|
||||
* This will be sent after or with a `read_time` that is greater than or
|
||||
* equal to the time at which the targets were added.
|
||||
*
|
||||
* Listeners can wait for this change if read-after-write semantics
|
||||
* are desired.
|
||||
*/
|
||||
CURRENT: 3,
|
||||
/**
|
||||
* The targets have been reset, and a new initial state for the targets
|
||||
* will be returned in subsequent changes.
|
||||
*
|
||||
* After the initial state is complete, `CURRENT` will be returned even
|
||||
* if the target was previously indicated to be `CURRENT`.
|
||||
*/
|
||||
RESET: 4
|
||||
}
|
||||
};
|
||||
/**
|
||||
* The request for Firestore.ListCollectionIds.
|
||||
*
|
||||
* @property {string} parent
|
||||
* The parent document. In the format:
|
||||
* `projects/{project_id}/databases/{database_id}/documents/{document_path}`.
|
||||
* For example:
|
||||
* `projects/my-project/databases/my-database/documents/chatrooms/my-chatroom`
|
||||
*
|
||||
* @property {number} pageSize
|
||||
* The maximum number of results to return.
|
||||
*
|
||||
* @property {string} pageToken
|
||||
* A page token. Must be a value from
|
||||
* ListCollectionIdsResponse.
|
||||
*
|
||||
* @typedef ListCollectionIdsRequest
|
||||
* @memberof google.firestore.v1beta1
|
||||
* @see [google.firestore.v1beta1.ListCollectionIdsRequest definition in proto format]{@link https://github.com/googleapis/googleapis/blob/master/google/firestore/v1beta1/firestore.proto}
|
||||
*/
|
||||
const ListCollectionIdsRequest = {
|
||||
// This is for documentation. Actual contents will be loaded by gRPC.
|
||||
};
|
||||
/**
|
||||
* The response from Firestore.ListCollectionIds.
|
||||
*
|
||||
* @property {string[]} collectionIds
|
||||
* The collection ids.
|
||||
*
|
||||
* @property {string} nextPageToken
|
||||
* A page token that may be used to continue the list.
|
||||
*
|
||||
* @typedef ListCollectionIdsResponse
|
||||
* @memberof google.firestore.v1beta1
|
||||
* @see [google.firestore.v1beta1.ListCollectionIdsResponse definition in proto format]{@link https://github.com/googleapis/googleapis/blob/master/google/firestore/v1beta1/firestore.proto}
|
||||
*/
|
||||
const ListCollectionIdsResponse = {
|
||||
// This is for documentation. Actual contents will be loaded by gRPC.
|
||||
};
|
||||
//# sourceMappingURL=doc_firestore.js.map
|
||||
384
express-server/node_modules/@google-cloud/firestore/build/src/v1beta1/doc/google/firestore/v1beta1/doc_query.js
generated
vendored
Normal file
384
express-server/node_modules/@google-cloud/firestore/build/src/v1beta1/doc/google/firestore/v1beta1/doc_query.js
generated
vendored
Normal file
@@ -0,0 +1,384 @@
|
||||
"use strict";
|
||||
// Copyright 2018 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// https://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
// Note: this file is purely for documentation. Any contents are not expected
|
||||
// to be loaded as the JS file.
|
||||
/**
|
||||
* A Firestore query.
|
||||
*
|
||||
* @property {Object} select
|
||||
* The projection to return.
|
||||
*
|
||||
* This object should have the same structure as [Projection]{@link
|
||||
* google.firestore.v1beta1.Projection}
|
||||
*
|
||||
* @property {Object[]} from
|
||||
* The collections to query.
|
||||
*
|
||||
* This object should have the same structure as [CollectionSelector]{@link
|
||||
* google.firestore.v1beta1.CollectionSelector}
|
||||
*
|
||||
* @property {Object} where
|
||||
* The filter to apply.
|
||||
*
|
||||
* This object should have the same structure as [Filter]{@link
|
||||
* google.firestore.v1beta1.Filter}
|
||||
*
|
||||
* @property {Object[]} orderBy
|
||||
* The order to apply to the query results.
|
||||
*
|
||||
* Firestore guarantees a stable ordering through the following rules:
|
||||
*
|
||||
* * Any field required to appear in `order_by`, that is not already
|
||||
* specified in `order_by`, is appended to the order in field name order
|
||||
* by default.
|
||||
* * If an order on `__name__` is not specified, it is appended by default.
|
||||
*
|
||||
* Fields are appended with the same sort direction as the last order
|
||||
* specified, or 'ASCENDING' if no order was specified. For example:
|
||||
*
|
||||
* * `SELECT * FROM Foo ORDER BY A` becomes
|
||||
* `SELECT * FROM Foo ORDER BY A, __name__`
|
||||
* * `SELECT * FROM Foo ORDER BY A DESC` becomes
|
||||
* `SELECT * FROM Foo ORDER BY A DESC, __name__ DESC`
|
||||
* * `SELECT * FROM Foo WHERE A > 1` becomes
|
||||
* `SELECT * FROM Foo WHERE A > 1 ORDER BY A, __name__`
|
||||
*
|
||||
* This object should have the same structure as [Order]{@link
|
||||
* google.firestore.v1beta1.Order}
|
||||
*
|
||||
* @property {Object} startAt
|
||||
* A starting point for the query results.
|
||||
*
|
||||
* This object should have the same structure as [Cursor]{@link
|
||||
* google.firestore.v1beta1.Cursor}
|
||||
*
|
||||
* @property {Object} endAt
|
||||
* A end point for the query results.
|
||||
*
|
||||
* This object should have the same structure as [Cursor]{@link
|
||||
* google.firestore.v1beta1.Cursor}
|
||||
*
|
||||
* @property {number} offset
|
||||
* The number of results to skip.
|
||||
*
|
||||
* Applies before limit, but after all other constraints. Must be >= 0 if
|
||||
* specified.
|
||||
*
|
||||
* @property {Object} limit
|
||||
* The maximum number of results to return.
|
||||
*
|
||||
* Applies after all other constraints.
|
||||
* Must be >= 0 if specified.
|
||||
*
|
||||
* This object should have the same structure as [Int32Value]{@link
|
||||
* google.protobuf.Int32Value}
|
||||
*
|
||||
* @typedef StructuredQuery
|
||||
* @memberof google.firestore.v1beta1
|
||||
* @see [google.firestore.v1beta1.StructuredQuery definition in proto format]{@link https://github.com/googleapis/googleapis/blob/master/google/firestore/v1beta1/query.proto}
|
||||
*/
|
||||
const StructuredQuery = {
|
||||
// This is for documentation. Actual contents will be loaded by gRPC.
|
||||
/**
|
||||
* A selection of a collection, such as `messages as m1`.
|
||||
*
|
||||
* @property {string} collectionId
|
||||
* The collection ID.
|
||||
* When set, selects only collections with this ID.
|
||||
*
|
||||
* @property {boolean} allDescendants
|
||||
* When false, selects only collections that are immediate children of
|
||||
* the `parent` specified in the containing `RunQueryRequest`.
|
||||
* When true, selects all descendant collections.
|
||||
*
|
||||
* @typedef CollectionSelector
|
||||
* @memberof google.firestore.v1beta1
|
||||
* @see [google.firestore.v1beta1.StructuredQuery.CollectionSelector definition in proto format]{@link https://github.com/googleapis/googleapis/blob/master/google/firestore/v1beta1/query.proto}
|
||||
*/
|
||||
CollectionSelector: {
|
||||
// This is for documentation. Actual contents will be loaded by gRPC.
|
||||
},
|
||||
/**
|
||||
* A filter.
|
||||
*
|
||||
* @property {Object} compositeFilter
|
||||
* A composite filter.
|
||||
*
|
||||
* This object should have the same structure as [CompositeFilter]{@link
|
||||
* google.firestore.v1beta1.CompositeFilter}
|
||||
*
|
||||
* @property {Object} fieldFilter
|
||||
* A filter on a document field.
|
||||
*
|
||||
* This object should have the same structure as [FieldFilter]{@link
|
||||
* google.firestore.v1beta1.FieldFilter}
|
||||
*
|
||||
* @property {Object} unaryFilter
|
||||
* A filter that takes exactly one argument.
|
||||
*
|
||||
* This object should have the same structure as [UnaryFilter]{@link
|
||||
* google.firestore.v1beta1.UnaryFilter}
|
||||
*
|
||||
* @typedef Filter
|
||||
* @memberof google.firestore.v1beta1
|
||||
* @see [google.firestore.v1beta1.StructuredQuery.Filter definition in proto format]{@link https://github.com/googleapis/googleapis/blob/master/google/firestore/v1beta1/query.proto}
|
||||
*/
|
||||
Filter: {
|
||||
// This is for documentation. Actual contents will be loaded by gRPC.
|
||||
},
|
||||
/**
|
||||
* A filter that merges multiple other filters using the given operator.
|
||||
*
|
||||
* @property {number} op
|
||||
* The operator for combining multiple filters.
|
||||
*
|
||||
* The number should be among the values of [Operator]{@link
|
||||
* google.firestore.v1beta1.Operator}
|
||||
*
|
||||
* @property {Object[]} filters
|
||||
* The list of filters to combine.
|
||||
* Must contain at least one filter.
|
||||
*
|
||||
* This object should have the same structure as [Filter]{@link
|
||||
* google.firestore.v1beta1.Filter}
|
||||
*
|
||||
* @typedef CompositeFilter
|
||||
* @memberof google.firestore.v1beta1
|
||||
* @see [google.firestore.v1beta1.StructuredQuery.CompositeFilter definition in proto format]{@link https://github.com/googleapis/googleapis/blob/master/google/firestore/v1beta1/query.proto}
|
||||
*/
|
||||
CompositeFilter: {
|
||||
// This is for documentation. Actual contents will be loaded by gRPC.
|
||||
/**
|
||||
* A composite filter operator.
|
||||
*
|
||||
* @enum {number}
|
||||
* @memberof google.firestore.v1beta1
|
||||
*/
|
||||
Operator: {
|
||||
/**
|
||||
* Unspecified. This value must not be used.
|
||||
*/
|
||||
OPERATOR_UNSPECIFIED: 0,
|
||||
/**
|
||||
* The results are required to satisfy each of the combined filters.
|
||||
*/
|
||||
AND: 1
|
||||
}
|
||||
},
|
||||
/**
|
||||
* A filter on a specific field.
|
||||
*
|
||||
* @property {Object} field
|
||||
* The field to filter by.
|
||||
*
|
||||
* This object should have the same structure as [FieldReference]{@link
|
||||
* google.firestore.v1beta1.FieldReference}
|
||||
*
|
||||
* @property {number} op
|
||||
* The operator to filter by.
|
||||
*
|
||||
* The number should be among the values of [Operator]{@link
|
||||
* google.firestore.v1beta1.Operator}
|
||||
*
|
||||
* @property {Object} value
|
||||
* The value to compare to.
|
||||
*
|
||||
* This object should have the same structure as [Value]{@link
|
||||
* google.firestore.v1beta1.Value}
|
||||
*
|
||||
* @typedef FieldFilter
|
||||
* @memberof google.firestore.v1beta1
|
||||
* @see [google.firestore.v1beta1.StructuredQuery.FieldFilter definition in proto format]{@link https://github.com/googleapis/googleapis/blob/master/google/firestore/v1beta1/query.proto}
|
||||
*/
|
||||
FieldFilter: {
|
||||
// This is for documentation. Actual contents will be loaded by gRPC.
|
||||
/**
|
||||
* A field filter operator.
|
||||
*
|
||||
* @enum {number}
|
||||
* @memberof google.firestore.v1beta1
|
||||
*/
|
||||
Operator: {
|
||||
/**
|
||||
* Unspecified. This value must not be used.
|
||||
*/
|
||||
OPERATOR_UNSPECIFIED: 0,
|
||||
/**
|
||||
* Less than. Requires that the field come first in `order_by`.
|
||||
*/
|
||||
LESS_THAN: 1,
|
||||
/**
|
||||
* Less than or equal. Requires that the field come first in `order_by`.
|
||||
*/
|
||||
LESS_THAN_OR_EQUAL: 2,
|
||||
/**
|
||||
* Greater than. Requires that the field come first in `order_by`.
|
||||
*/
|
||||
GREATER_THAN: 3,
|
||||
/**
|
||||
* Greater than or equal. Requires that the field come first in
|
||||
* `order_by`.
|
||||
*/
|
||||
GREATER_THAN_OR_EQUAL: 4,
|
||||
/**
|
||||
* Equal.
|
||||
*/
|
||||
EQUAL: 5,
|
||||
/**
|
||||
* Contains. Requires that the field is an array.
|
||||
*/
|
||||
ARRAY_CONTAINS: 7
|
||||
}
|
||||
},
|
||||
/**
|
||||
* A filter with a single operand.
|
||||
*
|
||||
* @property {number} op
|
||||
* The unary operator to apply.
|
||||
*
|
||||
* The number should be among the values of [Operator]{@link
|
||||
* google.firestore.v1beta1.Operator}
|
||||
*
|
||||
* @property {Object} field
|
||||
* The field to which to apply the operator.
|
||||
*
|
||||
* This object should have the same structure as [FieldReference]{@link
|
||||
* google.firestore.v1beta1.FieldReference}
|
||||
*
|
||||
* @typedef UnaryFilter
|
||||
* @memberof google.firestore.v1beta1
|
||||
* @see [google.firestore.v1beta1.StructuredQuery.UnaryFilter definition in proto format]{@link https://github.com/googleapis/googleapis/blob/master/google/firestore/v1beta1/query.proto}
|
||||
*/
|
||||
UnaryFilter: {
|
||||
// This is for documentation. Actual contents will be loaded by gRPC.
|
||||
/**
|
||||
* A unary operator.
|
||||
*
|
||||
* @enum {number}
|
||||
* @memberof google.firestore.v1beta1
|
||||
*/
|
||||
Operator: {
|
||||
/**
|
||||
* Unspecified. This value must not be used.
|
||||
*/
|
||||
OPERATOR_UNSPECIFIED: 0,
|
||||
/**
|
||||
* Test if a field is equal to NaN.
|
||||
*/
|
||||
IS_NAN: 2,
|
||||
/**
|
||||
* Test if an exprestion evaluates to Null.
|
||||
*/
|
||||
IS_NULL: 3
|
||||
}
|
||||
},
|
||||
/**
|
||||
* An order on a field.
|
||||
*
|
||||
* @property {Object} field
|
||||
* The field to order by.
|
||||
*
|
||||
* This object should have the same structure as [FieldReference]{@link
|
||||
* google.firestore.v1beta1.FieldReference}
|
||||
*
|
||||
* @property {number} direction
|
||||
* The direction to order by. Defaults to `ASCENDING`.
|
||||
*
|
||||
* The number should be among the values of [Direction]{@link
|
||||
* google.firestore.v1beta1.Direction}
|
||||
*
|
||||
* @typedef Order
|
||||
* @memberof google.firestore.v1beta1
|
||||
* @see [google.firestore.v1beta1.StructuredQuery.Order definition in proto format]{@link https://github.com/googleapis/googleapis/blob/master/google/firestore/v1beta1/query.proto}
|
||||
*/
|
||||
Order: {
|
||||
// This is for documentation. Actual contents will be loaded by gRPC.
|
||||
},
|
||||
/**
|
||||
* A reference to a field, such as `max(messages.time) as max_time`.
|
||||
*
|
||||
* @property {string} fieldPath
|
||||
*
|
||||
* @typedef FieldReference
|
||||
* @memberof google.firestore.v1beta1
|
||||
* @see [google.firestore.v1beta1.StructuredQuery.FieldReference definition in proto format]{@link https://github.com/googleapis/googleapis/blob/master/google/firestore/v1beta1/query.proto}
|
||||
*/
|
||||
FieldReference: {
|
||||
// This is for documentation. Actual contents will be loaded by gRPC.
|
||||
},
|
||||
/**
|
||||
* The projection of document's fields to return.
|
||||
*
|
||||
* @property {Object[]} fields
|
||||
* The fields to return.
|
||||
*
|
||||
* If empty, all fields are returned. To only return the name
|
||||
* of the document, use `['__name__']`.
|
||||
*
|
||||
* This object should have the same structure as [FieldReference]{@link
|
||||
* google.firestore.v1beta1.FieldReference}
|
||||
*
|
||||
* @typedef Projection
|
||||
* @memberof google.firestore.v1beta1
|
||||
* @see [google.firestore.v1beta1.StructuredQuery.Projection definition in proto format]{@link https://github.com/googleapis/googleapis/blob/master/google/firestore/v1beta1/query.proto}
|
||||
*/
|
||||
Projection: {
|
||||
// This is for documentation. Actual contents will be loaded by gRPC.
|
||||
},
|
||||
/**
|
||||
* A sort direction.
|
||||
*
|
||||
* @enum {number}
|
||||
* @memberof google.firestore.v1beta1
|
||||
*/
|
||||
Direction: {
|
||||
/**
|
||||
* Unspecified.
|
||||
*/
|
||||
DIRECTION_UNSPECIFIED: 0,
|
||||
/**
|
||||
* Ascending.
|
||||
*/
|
||||
ASCENDING: 1,
|
||||
/**
|
||||
* Descending.
|
||||
*/
|
||||
DESCENDING: 2
|
||||
}
|
||||
};
|
||||
/**
|
||||
* A position in a query result set.
|
||||
*
|
||||
* @property {Object[]} values
|
||||
* The values that represent a position, in the order they appear in
|
||||
* the order by clause of a query.
|
||||
*
|
||||
* Can contain fewer values than specified in the order by clause.
|
||||
*
|
||||
* This object should have the same structure as [Value]{@link
|
||||
* google.firestore.v1beta1.Value}
|
||||
*
|
||||
* @property {boolean} before
|
||||
* If the position is just before or just after the given values, relative
|
||||
* to the sort order defined by the query.
|
||||
*
|
||||
* @typedef Cursor
|
||||
* @memberof google.firestore.v1beta1
|
||||
* @see [google.firestore.v1beta1.Cursor definition in proto format]{@link https://github.com/googleapis/googleapis/blob/master/google/firestore/v1beta1/query.proto}
|
||||
*/
|
||||
const Cursor = {
|
||||
// This is for documentation. Actual contents will be loaded by gRPC.
|
||||
};
|
||||
//# sourceMappingURL=doc_query.js.map
|
||||
296
express-server/node_modules/@google-cloud/firestore/build/src/v1beta1/doc/google/firestore/v1beta1/doc_write.js
generated
vendored
Normal file
296
express-server/node_modules/@google-cloud/firestore/build/src/v1beta1/doc/google/firestore/v1beta1/doc_write.js
generated
vendored
Normal file
@@ -0,0 +1,296 @@
|
||||
"use strict";
|
||||
// Copyright 2018 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// https://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
// Note: this file is purely for documentation. Any contents are not expected
|
||||
// to be loaded as the JS file.
|
||||
/**
|
||||
* A write on a document.
|
||||
*
|
||||
* @property {Object} update
|
||||
* A document to write.
|
||||
*
|
||||
* This object should have the same structure as [Document]{@link
|
||||
* google.firestore.v1beta1.Document}
|
||||
*
|
||||
* @property {string} delete
|
||||
* A document name to delete. In the format:
|
||||
* `projects/{project_id}/databases/{database_id}/documents/{document_path}`.
|
||||
*
|
||||
* @property {Object} transform
|
||||
* Applies a tranformation to a document.
|
||||
* At most one `transform` per document is allowed in a given request.
|
||||
* An `update` cannot follow a `transform` on the same document in a given
|
||||
* request.
|
||||
*
|
||||
* This object should have the same structure as [DocumentTransform]{@link
|
||||
* google.firestore.v1beta1.DocumentTransform}
|
||||
*
|
||||
* @property {Object} updateMask
|
||||
* The fields to update in this write.
|
||||
*
|
||||
* This field can be set only when the operation is `update`.
|
||||
* If the mask is not set for an `update` and the document exists, any
|
||||
* existing data will be overwritten.
|
||||
* If the mask is set and the document on the server has fields not covered by
|
||||
* the mask, they are left unchanged.
|
||||
* Fields referenced in the mask, but not present in the input document, are
|
||||
* deleted from the document on the server.
|
||||
* The field paths in this mask must not contain a reserved field name.
|
||||
*
|
||||
* This object should have the same structure as [DocumentMask]{@link
|
||||
* google.firestore.v1beta1.DocumentMask}
|
||||
*
|
||||
* @property {Object} currentDocument
|
||||
* An optional precondition on the document.
|
||||
*
|
||||
* The write will fail if this is set and not met by the target document.
|
||||
*
|
||||
* This object should have the same structure as [Precondition]{@link
|
||||
* google.firestore.v1beta1.Precondition}
|
||||
*
|
||||
* @typedef Write
|
||||
* @memberof google.firestore.v1beta1
|
||||
* @see [google.firestore.v1beta1.Write definition in proto format]{@link https://github.com/googleapis/googleapis/blob/master/google/firestore/v1beta1/write.proto}
|
||||
*/
|
||||
const Write = {
|
||||
// This is for documentation. Actual contents will be loaded by gRPC.
|
||||
};
|
||||
/**
|
||||
* A transformation of a document.
|
||||
*
|
||||
* @property {string} document
|
||||
* The name of the document to transform.
|
||||
*
|
||||
* @property {Object[]} fieldTransforms
|
||||
* The list of transformations to apply to the fields of the document, in
|
||||
* order.
|
||||
* This must not be empty.
|
||||
*
|
||||
* This object should have the same structure as [FieldTransform]{@link
|
||||
* google.firestore.v1beta1.FieldTransform}
|
||||
*
|
||||
* @typedef DocumentTransform
|
||||
* @memberof google.firestore.v1beta1
|
||||
* @see [google.firestore.v1beta1.DocumentTransform definition in proto format]{@link https://github.com/googleapis/googleapis/blob/master/google/firestore/v1beta1/write.proto}
|
||||
*/
|
||||
const DocumentTransform = {
|
||||
// This is for documentation. Actual contents will be loaded by gRPC.
|
||||
/**
|
||||
* A transformation of a field of the document.
|
||||
*
|
||||
* @property {string} fieldPath
|
||||
* The path of the field. See Document.fields for the field path syntax
|
||||
* reference.
|
||||
*
|
||||
* @property {number} setToServerValue
|
||||
* Sets the field to the given server value.
|
||||
*
|
||||
* The number should be among the values of [ServerValue]{@link
|
||||
* google.firestore.v1beta1.ServerValue}
|
||||
*
|
||||
* @property {Object} appendMissingElements
|
||||
* Append the given elements in order if they are not already present in
|
||||
* the current field value.
|
||||
* If the field is not an array, or if the field does not yet exist, it is
|
||||
* first set to the empty array.
|
||||
*
|
||||
* Equivalent numbers of different types (e.g. 3L and 3.0) are
|
||||
* considered equal when checking if a value is missing.
|
||||
* NaN is equal to NaN, and Null is equal to Null.
|
||||
* If the input contains multiple equivalent values, only the first will
|
||||
* be considered.
|
||||
*
|
||||
* The corresponding transform_result will be the null value.
|
||||
*
|
||||
* This object should have the same structure as [ArrayValue]{@link
|
||||
* google.firestore.v1beta1.ArrayValue}
|
||||
*
|
||||
* @property {Object} removeAllFromArray
|
||||
* Remove all of the given elements from the array in the field.
|
||||
* If the field is not an array, or if the field does not yet exist, it is
|
||||
* set to the empty array.
|
||||
*
|
||||
* Equivalent numbers of the different types (e.g. 3L and 3.0) are
|
||||
* considered equal when deciding whether an element should be removed.
|
||||
* NaN is equal to NaN, and Null is equal to Null.
|
||||
* This will remove all equivalent values if there are duplicates.
|
||||
*
|
||||
* The corresponding transform_result will be the null value.
|
||||
*
|
||||
* This object should have the same structure as [ArrayValue]{@link
|
||||
* google.firestore.v1beta1.ArrayValue}
|
||||
*
|
||||
* @typedef FieldTransform
|
||||
* @memberof google.firestore.v1beta1
|
||||
* @see [google.firestore.v1beta1.DocumentTransform.FieldTransform definition in proto format]{@link https://github.com/googleapis/googleapis/blob/master/google/firestore/v1beta1/write.proto}
|
||||
*/
|
||||
FieldTransform: {
|
||||
// This is for documentation. Actual contents will be loaded by gRPC.
|
||||
/**
|
||||
* A value that is calculated by the server.
|
||||
*
|
||||
* @enum {number}
|
||||
* @memberof google.firestore.v1beta1
|
||||
*/
|
||||
ServerValue: {
|
||||
/**
|
||||
* Unspecified. This value must not be used.
|
||||
*/
|
||||
SERVER_VALUE_UNSPECIFIED: 0,
|
||||
/**
|
||||
* The time at which the server processed the request, with millisecond
|
||||
* precision.
|
||||
*/
|
||||
REQUEST_TIME: 1
|
||||
}
|
||||
}
|
||||
};
|
||||
/**
|
||||
* The result of applying a write.
|
||||
*
|
||||
* @property {Object} updateTime
|
||||
* The last update time of the document after applying the write. Not set
|
||||
* after a `delete`.
|
||||
*
|
||||
* If the write did not actually change the document, this will be the
|
||||
* previous update_time.
|
||||
*
|
||||
* This object should have the same structure as [Timestamp]{@link
|
||||
* google.protobuf.Timestamp}
|
||||
*
|
||||
* @property {Object[]} transformResults
|
||||
* The results of applying each DocumentTransform.FieldTransform, in the
|
||||
* same order.
|
||||
*
|
||||
* This object should have the same structure as [Value]{@link
|
||||
* google.firestore.v1beta1.Value}
|
||||
*
|
||||
* @typedef WriteResult
|
||||
* @memberof google.firestore.v1beta1
|
||||
* @see [google.firestore.v1beta1.WriteResult definition in proto format]{@link https://github.com/googleapis/googleapis/blob/master/google/firestore/v1beta1/write.proto}
|
||||
*/
|
||||
const WriteResult = {
|
||||
// This is for documentation. Actual contents will be loaded by gRPC.
|
||||
};
|
||||
/**
|
||||
* A Document has changed.
|
||||
*
|
||||
* May be the result of multiple writes, including deletes, that
|
||||
* ultimately resulted in a new value for the Document.
|
||||
*
|
||||
* Multiple DocumentChange messages may be returned for the same logical
|
||||
* change, if multiple targets are affected.
|
||||
*
|
||||
* @property {Object} document
|
||||
* The new state of the Document.
|
||||
*
|
||||
* If `mask` is set, contains only fields that were updated or added.
|
||||
*
|
||||
* This object should have the same structure as [Document]{@link
|
||||
* google.firestore.v1beta1.Document}
|
||||
*
|
||||
* @property {number[]} targetIds
|
||||
* A set of target IDs of targets that match this document.
|
||||
*
|
||||
* @property {number[]} removedTargetIds
|
||||
* A set of target IDs for targets that no longer match this document.
|
||||
*
|
||||
* @typedef DocumentChange
|
||||
* @memberof google.firestore.v1beta1
|
||||
* @see [google.firestore.v1beta1.DocumentChange definition in proto format]{@link https://github.com/googleapis/googleapis/blob/master/google/firestore/v1beta1/write.proto}
|
||||
*/
|
||||
const DocumentChange = {
|
||||
// This is for documentation. Actual contents will be loaded by gRPC.
|
||||
};
|
||||
/**
|
||||
* A Document has been deleted.
|
||||
*
|
||||
* May be the result of multiple writes, including updates, the
|
||||
* last of which deleted the Document.
|
||||
*
|
||||
* Multiple DocumentDelete messages may be returned for the same logical
|
||||
* delete, if multiple targets are affected.
|
||||
*
|
||||
* @property {string} document
|
||||
* The resource name of the Document that was deleted.
|
||||
*
|
||||
* @property {number[]} removedTargetIds
|
||||
* A set of target IDs for targets that previously matched this entity.
|
||||
*
|
||||
* @property {Object} readTime
|
||||
* The read timestamp at which the delete was observed.
|
||||
*
|
||||
* Greater or equal to the `commit_time` of the delete.
|
||||
*
|
||||
* This object should have the same structure as [Timestamp]{@link
|
||||
* google.protobuf.Timestamp}
|
||||
*
|
||||
* @typedef DocumentDelete
|
||||
* @memberof google.firestore.v1beta1
|
||||
* @see [google.firestore.v1beta1.DocumentDelete definition in proto format]{@link https://github.com/googleapis/googleapis/blob/master/google/firestore/v1beta1/write.proto}
|
||||
*/
|
||||
const DocumentDelete = {
|
||||
// This is for documentation. Actual contents will be loaded by gRPC.
|
||||
};
|
||||
/**
|
||||
* A Document has been removed from the view of the targets.
|
||||
*
|
||||
* Sent if the document is no longer relevant to a target and is out of view.
|
||||
* Can be sent instead of a DocumentDelete or a DocumentChange if the server
|
||||
* can not send the new value of the document.
|
||||
*
|
||||
* Multiple DocumentRemove messages may be returned for the same logical
|
||||
* write or delete, if multiple targets are affected.
|
||||
*
|
||||
* @property {string} document
|
||||
* The resource name of the Document that has gone out of view.
|
||||
*
|
||||
* @property {number[]} removedTargetIds
|
||||
* A set of target IDs for targets that previously matched this document.
|
||||
*
|
||||
* @property {Object} readTime
|
||||
* The read timestamp at which the remove was observed.
|
||||
*
|
||||
* Greater or equal to the `commit_time` of the change/delete/remove.
|
||||
*
|
||||
* This object should have the same structure as [Timestamp]{@link
|
||||
* google.protobuf.Timestamp}
|
||||
*
|
||||
* @typedef DocumentRemove
|
||||
* @memberof google.firestore.v1beta1
|
||||
* @see [google.firestore.v1beta1.DocumentRemove definition in proto format]{@link https://github.com/googleapis/googleapis/blob/master/google/firestore/v1beta1/write.proto}
|
||||
*/
|
||||
const DocumentRemove = {
|
||||
// This is for documentation. Actual contents will be loaded by gRPC.
|
||||
};
|
||||
/**
|
||||
* A digest of all the documents that match a given target.
|
||||
*
|
||||
* @property {number} targetId
|
||||
* The target ID to which this filter applies.
|
||||
*
|
||||
* @property {number} count
|
||||
* The total count of documents that match target_id.
|
||||
*
|
||||
* If different from the count of documents in the client that match, the
|
||||
* client must manually determine which documents no longer match the target.
|
||||
*
|
||||
* @typedef ExistenceFilter
|
||||
* @memberof google.firestore.v1beta1
|
||||
* @see [google.firestore.v1beta1.ExistenceFilter definition in proto format]{@link https://github.com/googleapis/googleapis/blob/master/google/firestore/v1beta1/write.proto}
|
||||
*/
|
||||
const ExistenceFilter = {
|
||||
// This is for documentation. Actual contents will be loaded by gRPC.
|
||||
};
|
||||
//# sourceMappingURL=doc_write.js.map
|
||||
136
express-server/node_modules/@google-cloud/firestore/build/src/v1beta1/doc/google/protobuf/doc_any.js
generated
vendored
Normal file
136
express-server/node_modules/@google-cloud/firestore/build/src/v1beta1/doc/google/protobuf/doc_any.js
generated
vendored
Normal file
@@ -0,0 +1,136 @@
|
||||
"use strict";
|
||||
// Copyright 2018 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// https://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
// Note: this file is purely for documentation. Any contents are not expected
|
||||
// to be loaded as the JS file.
|
||||
/**
|
||||
* `Any` contains an arbitrary serialized protocol buffer message along with a
|
||||
* URL that describes the type of the serialized message.
|
||||
*
|
||||
* Protobuf library provides support to pack/unpack Any values in the form
|
||||
* of utility functions or additional generated methods of the Any type.
|
||||
*
|
||||
* Example 1: Pack and unpack a message in C++.
|
||||
*
|
||||
* Foo foo = ...;
|
||||
* Any any;
|
||||
* any.PackFrom(foo);
|
||||
* ...
|
||||
* if (any.UnpackTo(&foo)) {
|
||||
* ...
|
||||
* }
|
||||
*
|
||||
* Example 2: Pack and unpack a message in Java.
|
||||
*
|
||||
* Foo foo = ...;
|
||||
* Any any = Any.pack(foo);
|
||||
* ...
|
||||
* if (any.is(Foo.class)) {
|
||||
* foo = any.unpack(Foo.class);
|
||||
* }
|
||||
*
|
||||
* Example 3: Pack and unpack a message in Python.
|
||||
*
|
||||
* foo = Foo(...)
|
||||
* any = Any()
|
||||
* any.Pack(foo)
|
||||
* ...
|
||||
* if any.Is(Foo.DESCRIPTOR):
|
||||
* any.Unpack(foo)
|
||||
* ...
|
||||
*
|
||||
* Example 4: Pack and unpack a message in Go
|
||||
*
|
||||
* foo := &pb.Foo{...}
|
||||
* any, err := ptypes.MarshalAny(foo)
|
||||
* ...
|
||||
* foo := &pb.Foo{}
|
||||
* if err := ptypes.UnmarshalAny(any, foo); err != nil {
|
||||
* ...
|
||||
* }
|
||||
*
|
||||
* The pack methods provided by protobuf library will by default use
|
||||
* 'type.googleapis.com/full.type.name' as the type URL and the unpack
|
||||
* methods only use the fully qualified type name after the last '/'
|
||||
* in the type URL, for example "foo.bar.com/x/y.z" will yield type
|
||||
* name "y.z".
|
||||
*
|
||||
*
|
||||
* # JSON
|
||||
*
|
||||
* The JSON representation of an `Any` value uses the regular
|
||||
* representation of the deserialized, embedded message, with an
|
||||
* additional field `@type` which contains the type URL. Example:
|
||||
*
|
||||
* package google.profile;
|
||||
* message Person {
|
||||
* string first_name = 1;
|
||||
* string last_name = 2;
|
||||
* }
|
||||
*
|
||||
* {
|
||||
* "@type": "type.googleapis.com/google.profile.Person",
|
||||
* "firstName": <string>,
|
||||
* "lastName": <string>
|
||||
* }
|
||||
*
|
||||
* If the embedded message type is well-known and has a custom JSON
|
||||
* representation, that representation will be embedded adding a field
|
||||
* `value` which holds the custom JSON in addition to the `@type`
|
||||
* field. Example (for message google.protobuf.Duration):
|
||||
*
|
||||
* {
|
||||
* "@type": "type.googleapis.com/google.protobuf.Duration",
|
||||
* "value": "1.212s"
|
||||
* }
|
||||
*
|
||||
* @property {string} typeUrl
|
||||
* A URL/resource name that uniquely identifies the type of the serialized
|
||||
* protocol buffer message. The last segment of the URL's path must represent
|
||||
* the fully qualified name of the type (as in
|
||||
* `path/google.protobuf.Duration`). The name should be in a canonical form
|
||||
* (e.g., leading "." is not accepted).
|
||||
*
|
||||
* In practice, teams usually precompile into the binary all types that they
|
||||
* expect it to use in the context of Any. However, for URLs which use the
|
||||
* scheme `http`, `https`, or no scheme, one can optionally set up a type
|
||||
* server that maps type URLs to message definitions as follows:
|
||||
*
|
||||
* * If no scheme is provided, `https` is assumed.
|
||||
* * An HTTP GET on the URL must yield a google.protobuf.Type
|
||||
* value in binary format, or produce an error.
|
||||
* * Applications are allowed to cache lookup results based on the
|
||||
* URL, or have them precompiled into a binary to avoid any
|
||||
* lookup. Therefore, binary compatibility needs to be preserved
|
||||
* on changes to types. (Use versioned type names to manage
|
||||
* breaking changes.)
|
||||
*
|
||||
* Note: this functionality is not currently available in the official
|
||||
* protobuf release, and it is not used for type URLs beginning with
|
||||
* type.googleapis.com.
|
||||
*
|
||||
* Schemes other than `http`, `https` (or the empty scheme) might be
|
||||
* used with implementation specific semantics.
|
||||
*
|
||||
* @property {string} value
|
||||
* Must be a valid serialized protocol buffer of the above specified type.
|
||||
*
|
||||
* @typedef Any
|
||||
* @memberof google.protobuf
|
||||
* @see [google.protobuf.Any definition in proto format]{@link https://github.com/google/protobuf/blob/master/src/google/protobuf/any.proto}
|
||||
*/
|
||||
const Any = {
|
||||
// This is for documentation. Actual contents will be loaded by gRPC.
|
||||
};
|
||||
//# sourceMappingURL=doc_any.js.map
|
||||
34
express-server/node_modules/@google-cloud/firestore/build/src/v1beta1/doc/google/protobuf/doc_empty.js
generated
vendored
Normal file
34
express-server/node_modules/@google-cloud/firestore/build/src/v1beta1/doc/google/protobuf/doc_empty.js
generated
vendored
Normal file
@@ -0,0 +1,34 @@
|
||||
"use strict";
|
||||
// Copyright 2018 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// https://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
// Note: this file is purely for documentation. Any contents are not expected
|
||||
// to be loaded as the JS file.
|
||||
/**
|
||||
* A generic empty message that you can re-use to avoid defining duplicated
|
||||
* empty messages in your APIs. A typical example is to use it as the request
|
||||
* or the response type of an API method. For instance:
|
||||
*
|
||||
* service Foo {
|
||||
* rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty);
|
||||
* }
|
||||
*
|
||||
* The JSON representation for `Empty` is empty JSON object `{}`.
|
||||
* @typedef Empty
|
||||
* @memberof google.protobuf
|
||||
* @see [google.protobuf.Empty definition in proto format]{@link https://github.com/google/protobuf/blob/master/src/google/protobuf/empty.proto}
|
||||
*/
|
||||
const Empty = {
|
||||
// This is for documentation. Actual contents will be loaded by gRPC.
|
||||
};
|
||||
//# sourceMappingURL=doc_empty.js.map
|
||||
118
express-server/node_modules/@google-cloud/firestore/build/src/v1beta1/doc/google/protobuf/doc_timestamp.js
generated
vendored
Normal file
118
express-server/node_modules/@google-cloud/firestore/build/src/v1beta1/doc/google/protobuf/doc_timestamp.js
generated
vendored
Normal file
@@ -0,0 +1,118 @@
|
||||
"use strict";
|
||||
// Copyright 2018 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// https://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
// Note: this file is purely for documentation. Any contents are not expected
|
||||
// to be loaded as the JS file.
|
||||
/**
|
||||
* A Timestamp represents a point in time independent of any time zone
|
||||
* or calendar, represented as seconds and fractions of seconds at
|
||||
* nanosecond resolution in UTC Epoch time. It is encoded using the
|
||||
* Proleptic Gregorian Calendar which extends the Gregorian calendar
|
||||
* backwards to year one. It is encoded assuming all minutes are 60
|
||||
* seconds long, i.e. leap seconds are "smeared" so that no leap second
|
||||
* table is needed for interpretation. Range is from
|
||||
* 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z.
|
||||
* By restricting to that range, we ensure that we can convert to
|
||||
* and from RFC 3339 date strings.
|
||||
* See
|
||||
* [https://www.ietf.org/rfc/rfc3339.txt](https://www.ietf.org/rfc/rfc3339.txt).
|
||||
*
|
||||
* # Examples
|
||||
*
|
||||
* Example 1: Compute Timestamp from POSIX `time()`.
|
||||
*
|
||||
* Timestamp timestamp;
|
||||
* timestamp.set_seconds(time(NULL));
|
||||
* timestamp.set_nanos(0);
|
||||
*
|
||||
* Example 2: Compute Timestamp from POSIX `gettimeofday()`.
|
||||
*
|
||||
* struct timeval tv;
|
||||
* gettimeofday(&tv, NULL);
|
||||
*
|
||||
* Timestamp timestamp;
|
||||
* timestamp.set_seconds(tv.tv_sec);
|
||||
* timestamp.set_nanos(tv.tv_usec * 1000);
|
||||
*
|
||||
* Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`.
|
||||
*
|
||||
* FILETIME ft;
|
||||
* GetSystemTimeAsFileTime(&ft);
|
||||
* UINT64 ticks = (((UINT64)ft.dwHighDateTime) << 32) | ft.dwLowDateTime;
|
||||
*
|
||||
* // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z
|
||||
* // is 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z.
|
||||
* Timestamp timestamp;
|
||||
* timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL));
|
||||
* timestamp.set_nanos((INT32) ((ticks % 10000000) * 100));
|
||||
*
|
||||
* Example 4: Compute Timestamp from Java `System.currentTimeMillis()`.
|
||||
*
|
||||
* long millis = System.currentTimeMillis();
|
||||
*
|
||||
* Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000)
|
||||
* .setNanos((int) ((millis % 1000) * 1000000)).build();
|
||||
*
|
||||
*
|
||||
* Example 5: Compute Timestamp from current time in Python.
|
||||
*
|
||||
* timestamp = Timestamp()
|
||||
* timestamp.GetCurrentTime()
|
||||
*
|
||||
* # JSON Mapping
|
||||
*
|
||||
* In JSON format, the Timestamp type is encoded as a string in the
|
||||
* [RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format. That is, the
|
||||
* format is "{year}-{month}-{day}T{hour}:{min}:{sec}[.{frac_sec}]Z"
|
||||
* where {year} is always expressed using four digits while {month}, {day},
|
||||
* {hour}, {min}, and {sec} are zero-padded to two digits each. The fractional
|
||||
* seconds, which can go up to 9 digits (i.e. up to 1 nanosecond resolution),
|
||||
* are optional. The "Z" suffix indicates the timezone ("UTC"); the timezone
|
||||
* is required. A proto3 JSON serializer should always use UTC (as indicated by
|
||||
* "Z") when printing the Timestamp type and a proto3 JSON parser should be
|
||||
* able to accept both UTC and other timezones (as indicated by an offset).
|
||||
*
|
||||
* For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past
|
||||
* 01:30 UTC on January 15, 2017.
|
||||
*
|
||||
* In JavaScript, one can convert a Date object to this format using the
|
||||
* standard
|
||||
* [toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString]
|
||||
* method. In Python, a standard `datetime.datetime` object can be converted
|
||||
* to this format using
|
||||
* [`strftime`](https://docs.python.org/2/library/time.html#time.strftime) with
|
||||
* the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one can use
|
||||
* the Joda Time's [`ISODateTimeFormat.dateTime()`](https://cloud.google.com
|
||||
* http://www.joda.org/joda-time/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime--
|
||||
* ) to obtain a formatter capable of generating timestamps in this format.
|
||||
*
|
||||
* @property {number} seconds
|
||||
* Represents seconds of UTC time since Unix epoch
|
||||
* 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to
|
||||
* 9999-12-31T23:59:59Z inclusive.
|
||||
*
|
||||
* @property {number} nanos
|
||||
* Non-negative fractions of a second at nanosecond resolution. Negative
|
||||
* second values with fractions must still have non-negative nanos values
|
||||
* that count forward in time. Must be from 0 to 999,999,999
|
||||
* inclusive.
|
||||
*
|
||||
* @typedef Timestamp
|
||||
* @memberof google.protobuf
|
||||
* @see [google.protobuf.Timestamp definition in proto format]{@link https://github.com/google/protobuf/blob/master/src/google/protobuf/timestamp.proto}
|
||||
*/
|
||||
const Timestamp = {
|
||||
// This is for documentation. Actual contents will be loaded by gRPC.
|
||||
};
|
||||
//# sourceMappingURL=doc_timestamp.js.map
|
||||
152
express-server/node_modules/@google-cloud/firestore/build/src/v1beta1/doc/google/protobuf/doc_wrappers.js
generated
vendored
Normal file
152
express-server/node_modules/@google-cloud/firestore/build/src/v1beta1/doc/google/protobuf/doc_wrappers.js
generated
vendored
Normal file
@@ -0,0 +1,152 @@
|
||||
"use strict";
|
||||
// Copyright 2018 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// https://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
// Note: this file is purely for documentation. Any contents are not expected
|
||||
// to be loaded as the JS file.
|
||||
/**
|
||||
* Wrapper message for `double`.
|
||||
*
|
||||
* The JSON representation for `DoubleValue` is JSON number.
|
||||
*
|
||||
* @property {number} value
|
||||
* The double value.
|
||||
*
|
||||
* @typedef DoubleValue
|
||||
* @memberof google.protobuf
|
||||
* @see [google.protobuf.DoubleValue definition in proto format]{@link https://github.com/google/protobuf/blob/master/src/google/protobuf/wrappers.proto}
|
||||
*/
|
||||
const DoubleValue = {
|
||||
// This is for documentation. Actual contents will be loaded by gRPC.
|
||||
};
|
||||
/**
|
||||
* Wrapper message for `float`.
|
||||
*
|
||||
* The JSON representation for `FloatValue` is JSON number.
|
||||
*
|
||||
* @property {number} value
|
||||
* The float value.
|
||||
*
|
||||
* @typedef FloatValue
|
||||
* @memberof google.protobuf
|
||||
* @see [google.protobuf.FloatValue definition in proto format]{@link https://github.com/google/protobuf/blob/master/src/google/protobuf/wrappers.proto}
|
||||
*/
|
||||
const FloatValue = {
|
||||
// This is for documentation. Actual contents will be loaded by gRPC.
|
||||
};
|
||||
/**
|
||||
* Wrapper message for `int64`.
|
||||
*
|
||||
* The JSON representation for `Int64Value` is JSON string.
|
||||
*
|
||||
* @property {number} value
|
||||
* The int64 value.
|
||||
*
|
||||
* @typedef Int64Value
|
||||
* @memberof google.protobuf
|
||||
* @see [google.protobuf.Int64Value definition in proto format]{@link https://github.com/google/protobuf/blob/master/src/google/protobuf/wrappers.proto}
|
||||
*/
|
||||
const Int64Value = {
|
||||
// This is for documentation. Actual contents will be loaded by gRPC.
|
||||
};
|
||||
/**
|
||||
* Wrapper message for `uint64`.
|
||||
*
|
||||
* The JSON representation for `UInt64Value` is JSON string.
|
||||
*
|
||||
* @property {number} value
|
||||
* The uint64 value.
|
||||
*
|
||||
* @typedef UInt64Value
|
||||
* @memberof google.protobuf
|
||||
* @see [google.protobuf.UInt64Value definition in proto format]{@link https://github.com/google/protobuf/blob/master/src/google/protobuf/wrappers.proto}
|
||||
*/
|
||||
const UInt64Value = {
|
||||
// This is for documentation. Actual contents will be loaded by gRPC.
|
||||
};
|
||||
/**
|
||||
* Wrapper message for `int32`.
|
||||
*
|
||||
* The JSON representation for `Int32Value` is JSON number.
|
||||
*
|
||||
* @property {number} value
|
||||
* The int32 value.
|
||||
*
|
||||
* @typedef Int32Value
|
||||
* @memberof google.protobuf
|
||||
* @see [google.protobuf.Int32Value definition in proto format]{@link https://github.com/google/protobuf/blob/master/src/google/protobuf/wrappers.proto}
|
||||
*/
|
||||
const Int32Value = {
|
||||
// This is for documentation. Actual contents will be loaded by gRPC.
|
||||
};
|
||||
/**
|
||||
* Wrapper message for `uint32`.
|
||||
*
|
||||
* The JSON representation for `UInt32Value` is JSON number.
|
||||
*
|
||||
* @property {number} value
|
||||
* The uint32 value.
|
||||
*
|
||||
* @typedef UInt32Value
|
||||
* @memberof google.protobuf
|
||||
* @see [google.protobuf.UInt32Value definition in proto format]{@link https://github.com/google/protobuf/blob/master/src/google/protobuf/wrappers.proto}
|
||||
*/
|
||||
const UInt32Value = {
|
||||
// This is for documentation. Actual contents will be loaded by gRPC.
|
||||
};
|
||||
/**
|
||||
* Wrapper message for `bool`.
|
||||
*
|
||||
* The JSON representation for `BoolValue` is JSON `true` and `false`.
|
||||
*
|
||||
* @property {boolean} value
|
||||
* The bool value.
|
||||
*
|
||||
* @typedef BoolValue
|
||||
* @memberof google.protobuf
|
||||
* @see [google.protobuf.BoolValue definition in proto format]{@link https://github.com/google/protobuf/blob/master/src/google/protobuf/wrappers.proto}
|
||||
*/
|
||||
const BoolValue = {
|
||||
// This is for documentation. Actual contents will be loaded by gRPC.
|
||||
};
|
||||
/**
|
||||
* Wrapper message for `string`.
|
||||
*
|
||||
* The JSON representation for `StringValue` is JSON string.
|
||||
*
|
||||
* @property {string} value
|
||||
* The string value.
|
||||
*
|
||||
* @typedef StringValue
|
||||
* @memberof google.protobuf
|
||||
* @see [google.protobuf.StringValue definition in proto format]{@link https://github.com/google/protobuf/blob/master/src/google/protobuf/wrappers.proto}
|
||||
*/
|
||||
const StringValue = {
|
||||
// This is for documentation. Actual contents will be loaded by gRPC.
|
||||
};
|
||||
/**
|
||||
* Wrapper message for `bytes`.
|
||||
*
|
||||
* The JSON representation for `BytesValue` is JSON string.
|
||||
*
|
||||
* @property {string} value
|
||||
* The bytes value.
|
||||
*
|
||||
* @typedef BytesValue
|
||||
* @memberof google.protobuf
|
||||
* @see [google.protobuf.BytesValue definition in proto format]{@link https://github.com/google/protobuf/blob/master/src/google/protobuf/wrappers.proto}
|
||||
*/
|
||||
const BytesValue = {
|
||||
// This is for documentation. Actual contents will be loaded by gRPC.
|
||||
};
|
||||
//# sourceMappingURL=doc_wrappers.js.map
|
||||
93
express-server/node_modules/@google-cloud/firestore/build/src/v1beta1/doc/google/rpc/doc_status.js
generated
vendored
Normal file
93
express-server/node_modules/@google-cloud/firestore/build/src/v1beta1/doc/google/rpc/doc_status.js
generated
vendored
Normal file
@@ -0,0 +1,93 @@
|
||||
"use strict";
|
||||
// Copyright 2018 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// https://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
// Note: this file is purely for documentation. Any contents are not expected
|
||||
// to be loaded as the JS file.
|
||||
/**
|
||||
* The `Status` type defines a logical error model that is suitable for
|
||||
* different programming environments, including REST APIs and RPC APIs. It is
|
||||
* used by [gRPC](https://github.com/grpc). The error model is designed to be:
|
||||
*
|
||||
* - Simple to use and understand for most users
|
||||
* - Flexible enough to meet unexpected needs
|
||||
*
|
||||
* # Overview
|
||||
*
|
||||
* The `Status` message contains three pieces of data: error code, error
|
||||
* message, and error details. The error code should be an enum value of
|
||||
* google.rpc.Code, but it may accept additional error codes if needed. The
|
||||
* error message should be a developer-facing English message that helps
|
||||
* developers *understand* and *resolve* the error. If a localized user-facing
|
||||
* error message is needed, put the localized message in the error details or
|
||||
* localize it in the client. The optional error details may contain arbitrary
|
||||
* information about the error. There is a predefined set of error detail types
|
||||
* in the package `google.rpc` that can be used for common error conditions.
|
||||
*
|
||||
* # Language mapping
|
||||
*
|
||||
* The `Status` message is the logical representation of the error model, but it
|
||||
* is not necessarily the actual wire format. When the `Status` message is
|
||||
* exposed in different client libraries and different wire protocols, it can be
|
||||
* mapped differently. For example, it will likely be mapped to some exceptions
|
||||
* in Java, but more likely mapped to some error codes in C.
|
||||
*
|
||||
* # Other uses
|
||||
*
|
||||
* The error model and the `Status` message can be used in a variety of
|
||||
* environments, either with or without APIs, to provide a
|
||||
* consistent developer experience across different environments.
|
||||
*
|
||||
* Example uses of this error model include:
|
||||
*
|
||||
* - Partial errors. If a service needs to return partial errors to the client,
|
||||
* it may embed the `Status` in the normal response to indicate the partial
|
||||
* errors.
|
||||
*
|
||||
* - Workflow errors. A typical workflow has multiple steps. Each step may
|
||||
* have a `Status` message for error reporting.
|
||||
*
|
||||
* - Batch operations. If a client uses batch request and batch response, the
|
||||
* `Status` message should be used directly inside batch response, one for
|
||||
* each error sub-response.
|
||||
*
|
||||
* - Asynchronous operations. If an API call embeds asynchronous operation
|
||||
* results in its response, the status of those operations should be
|
||||
* represented directly using the `Status` message.
|
||||
*
|
||||
* - Logging. If some API errors are stored in logs, the message `Status` could
|
||||
* be used directly after any stripping needed for security/privacy reasons.
|
||||
*
|
||||
* @property {number} code
|
||||
* The status code, which should be an enum value of google.rpc.Code.
|
||||
*
|
||||
* @property {string} message
|
||||
* A developer-facing error message, which should be in English. Any
|
||||
* user-facing error message should be localized and sent in the
|
||||
* google.rpc.Status.details field, or localized by the client.
|
||||
*
|
||||
* @property {Object[]} details
|
||||
* A list of messages that carry the error details. There is a common set of
|
||||
* message types for APIs to use.
|
||||
*
|
||||
* This object should have the same structure as [Any]{@link
|
||||
* google.protobuf.Any}
|
||||
*
|
||||
* @typedef Status
|
||||
* @memberof google.rpc
|
||||
* @see [google.rpc.Status definition in proto format]{@link https://github.com/googleapis/googleapis/blob/master/google/rpc/status.proto}
|
||||
*/
|
||||
const Status = {
|
||||
// This is for documentation. Actual contents will be loaded by gRPC.
|
||||
};
|
||||
//# sourceMappingURL=doc_status.js.map
|
||||
1394
express-server/node_modules/@google-cloud/firestore/build/src/v1beta1/firestore_client.js
generated
vendored
Normal file
1394
express-server/node_modules/@google-cloud/firestore/build/src/v1beta1/firestore_client.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
100
express-server/node_modules/@google-cloud/firestore/build/src/v1beta1/firestore_client_config.json
generated
vendored
Normal file
100
express-server/node_modules/@google-cloud/firestore/build/src/v1beta1/firestore_client_config.json
generated
vendored
Normal file
@@ -0,0 +1,100 @@
|
||||
{
|
||||
"interfaces": {
|
||||
"google.firestore.v1beta1.Firestore": {
|
||||
"retry_codes": {
|
||||
"idempotent": [
|
||||
"DEADLINE_EXCEEDED",
|
||||
"UNAVAILABLE"
|
||||
],
|
||||
"non_idempotent": []
|
||||
},
|
||||
"retry_params": {
|
||||
"default": {
|
||||
"initial_retry_delay_millis": 100,
|
||||
"retry_delay_multiplier": 1.3,
|
||||
"max_retry_delay_millis": 60000,
|
||||
"initial_rpc_timeout_millis": 20000,
|
||||
"rpc_timeout_multiplier": 1.0,
|
||||
"max_rpc_timeout_millis": 20000,
|
||||
"total_timeout_millis": 600000
|
||||
},
|
||||
"streaming": {
|
||||
"initial_retry_delay_millis": 100,
|
||||
"retry_delay_multiplier": 1.3,
|
||||
"max_retry_delay_millis": 60000,
|
||||
"initial_rpc_timeout_millis": 300000,
|
||||
"rpc_timeout_multiplier": 1.0,
|
||||
"max_rpc_timeout_millis": 300000,
|
||||
"total_timeout_millis": 600000
|
||||
}
|
||||
},
|
||||
"methods": {
|
||||
"GetDocument": {
|
||||
"timeout_millis": 60000,
|
||||
"retry_codes_name": "idempotent",
|
||||
"retry_params_name": "default"
|
||||
},
|
||||
"ListDocuments": {
|
||||
"timeout_millis": 60000,
|
||||
"retry_codes_name": "idempotent",
|
||||
"retry_params_name": "default"
|
||||
},
|
||||
"CreateDocument": {
|
||||
"timeout_millis": 60000,
|
||||
"retry_codes_name": "non_idempotent",
|
||||
"retry_params_name": "default"
|
||||
},
|
||||
"UpdateDocument": {
|
||||
"timeout_millis": 60000,
|
||||
"retry_codes_name": "non_idempotent",
|
||||
"retry_params_name": "default"
|
||||
},
|
||||
"DeleteDocument": {
|
||||
"timeout_millis": 60000,
|
||||
"retry_codes_name": "idempotent",
|
||||
"retry_params_name": "default"
|
||||
},
|
||||
"BatchGetDocuments": {
|
||||
"timeout_millis": 300000,
|
||||
"retry_codes_name": "idempotent",
|
||||
"retry_params_name": "streaming"
|
||||
},
|
||||
"BeginTransaction": {
|
||||
"timeout_millis": 60000,
|
||||
"retry_codes_name": "idempotent",
|
||||
"retry_params_name": "default"
|
||||
},
|
||||
"Commit": {
|
||||
"timeout_millis": 60000,
|
||||
"retry_codes_name": "non_idempotent",
|
||||
"retry_params_name": "default"
|
||||
},
|
||||
"Rollback": {
|
||||
"timeout_millis": 60000,
|
||||
"retry_codes_name": "idempotent",
|
||||
"retry_params_name": "default"
|
||||
},
|
||||
"RunQuery": {
|
||||
"timeout_millis": 60000,
|
||||
"retry_codes_name": "idempotent",
|
||||
"retry_params_name": "default"
|
||||
},
|
||||
"Write": {
|
||||
"timeout_millis": 86400000,
|
||||
"retry_codes_name": "non_idempotent",
|
||||
"retry_params_name": "streaming"
|
||||
},
|
||||
"Listen": {
|
||||
"timeout_millis": 86400000,
|
||||
"retry_codes_name": "idempotent",
|
||||
"retry_params_name": "streaming"
|
||||
},
|
||||
"ListCollectionIds": {
|
||||
"timeout_millis": 60000,
|
||||
"retry_codes_name": "idempotent",
|
||||
"retry_params_name": "default"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
17
express-server/node_modules/@google-cloud/firestore/build/src/v1beta1/index.js
generated
vendored
Normal file
17
express-server/node_modules/@google-cloud/firestore/build/src/v1beta1/index.js
generated
vendored
Normal file
@@ -0,0 +1,17 @@
|
||||
// Copyright 2018 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// https://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
'use strict';
|
||||
const FirestoreClient = require('./firestore_client');
|
||||
module.exports = FirestoreClient;
|
||||
//# sourceMappingURL=index.js.map
|
||||
182
express-server/node_modules/@google-cloud/firestore/build/src/validate.js
generated
vendored
Normal file
182
express-server/node_modules/@google-cloud/firestore/build/src/validate.js
generated
vendored
Normal file
@@ -0,0 +1,182 @@
|
||||
"use strict";
|
||||
/*!
|
||||
* Copyright 2017 Google Inc. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const is = require("is");
|
||||
/**
|
||||
* Formats the given word as plural conditionally given the preceding number.
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
function formatPlural(num, str) {
|
||||
return `${num} ${str}` + (num === 1 ? '' : 's');
|
||||
}
|
||||
/**
|
||||
* Provides argument validation for the Firestore Public API. Exposes validators
|
||||
* for strings, integers, numbers, objects and functions by default and can be
|
||||
* extended to provide custom validators.
|
||||
*
|
||||
* The exported validation functions follow the naming convention is{Type} and
|
||||
* isOptional{Type}, such as "isString" and "isOptionalString".
|
||||
*
|
||||
* To register custom validators, invoke the constructor with with a mapping
|
||||
* from type names to validation functions. Validation functions return 'true'
|
||||
* for valid inputs and may throw errors with custom validation messages for
|
||||
* easier diagnosis.
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
class Validator {
|
||||
/**
|
||||
* Create a new Validator, optionally registering the custom validators as
|
||||
* provided.
|
||||
*
|
||||
* @param customValidators A list of custom validators to register.
|
||||
*/
|
||||
constructor(customValidators) {
|
||||
const validators = Object.assign({
|
||||
function: is.function,
|
||||
integer: (value, min, max) => {
|
||||
min = is.defined(min) ? min : -Infinity;
|
||||
max = is.defined(max) ? max : Infinity;
|
||||
if (!is.integer(value)) {
|
||||
return false;
|
||||
}
|
||||
if (value < min || value > max) {
|
||||
throw new Error(`Value must be within [${min}, ${max}] inclusive, but was: ${value}`);
|
||||
}
|
||||
return true;
|
||||
},
|
||||
number: (value, min, max) => {
|
||||
min = is.defined(min) ? min : -Infinity;
|
||||
max = is.defined(max) ? max : Infinity;
|
||||
if (!is.number(value) || is.nan(value)) {
|
||||
return false;
|
||||
}
|
||||
if (value < min || value > max) {
|
||||
throw new Error(`Value must be within [${min}, ${max}] inclusive, but was: ${value}`);
|
||||
}
|
||||
return true;
|
||||
},
|
||||
object: is.object,
|
||||
string: is.string,
|
||||
boolean: is.boolean
|
||||
}, customValidators);
|
||||
const register = type => {
|
||||
const camelCase = type.substring(0, 1).toUpperCase() + type.substring(1);
|
||||
this[`is${camelCase}`] = (argumentName, ...values) => {
|
||||
let valid = false;
|
||||
let message = is.number(argumentName) ?
|
||||
`Argument at index ${argumentName} is not a valid ${type}.` :
|
||||
`Argument "${argumentName}" is not a valid ${type}.`;
|
||||
try {
|
||||
valid = validators[type].call(null, ...values);
|
||||
}
|
||||
catch (err) {
|
||||
message += ` ${err.message}`;
|
||||
}
|
||||
if (valid !== true) {
|
||||
throw new Error(message);
|
||||
}
|
||||
};
|
||||
this[`isOptional${camelCase}`] = function (argumentName, value) {
|
||||
if (is.defined(value)) {
|
||||
this[`is${camelCase}`].apply(null, arguments);
|
||||
}
|
||||
};
|
||||
};
|
||||
for (const type in validators) {
|
||||
if (validators.hasOwnProperty(type)) {
|
||||
register(type);
|
||||
}
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Verifies that 'args' has at least 'minSize' elements.
|
||||
*
|
||||
* @param {string} funcName The function name to use in the error message.
|
||||
* @param {Array.<*>} args The array (or array-like structure) to verify.
|
||||
* @param {number} minSize The minimum number of elements to enforce.
|
||||
* @throws if the expectation is not met.
|
||||
* @returns {boolean} 'true' when the minimum number of elements is available.
|
||||
*/
|
||||
minNumberOfArguments(funcName, args, minSize) {
|
||||
if (args.length < minSize) {
|
||||
throw new Error(`Function "${funcName}()" requires at least ` +
|
||||
`${formatPlural(minSize, 'argument')}.`);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
/**
|
||||
* Verifies that 'args' has at most 'maxSize' elements.
|
||||
*
|
||||
* @param {string} funcName The function name to use in the error message.
|
||||
* @param {Array.<*>} args The array (or array-like structure) to verify.
|
||||
* @param {number} maxSize The maximum number of elements to enforce.
|
||||
* @throws if the expectation is not met.
|
||||
* @returns {boolean} 'true' when only the maximum number of elements is
|
||||
* specified.
|
||||
*/
|
||||
maxNumberOfArguments(funcName, args, maxSize) {
|
||||
if (args.length > maxSize) {
|
||||
throw new Error(`Function "${funcName}()" accepts at most ` +
|
||||
`${formatPlural(maxSize, 'argument')}.`);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
}
|
||||
exports.Validator = Validator;
|
||||
function customObjectError(val, path) {
|
||||
const fieldPathMessage = path ? ` (found in field ${path.toString()})` : '';
|
||||
if (is.object(val) && val.constructor.name !== 'Object') {
|
||||
const typeName = val.constructor.name;
|
||||
switch (typeName) {
|
||||
case 'DocumentReference':
|
||||
case 'FieldPath':
|
||||
case 'FieldValue':
|
||||
case 'GeoPoint':
|
||||
case 'Timestamp':
|
||||
return new Error(`Detected an object of type "${typeName}" that doesn't match the ` +
|
||||
`expected instance${fieldPathMessage}. Please ensure that the ` +
|
||||
'Firestore types you are using are from the same NPM package.');
|
||||
default:
|
||||
return new Error(`Couldn't serialize object of type "${typeName}"${fieldPathMessage}. Firestore doesn't support JavaScript ` +
|
||||
'objects with custom prototypes (i.e. objects that were created ' +
|
||||
'via the "new" operator).');
|
||||
}
|
||||
}
|
||||
else if (!is.object(val)) {
|
||||
throw new Error(`Input is not a plain JavaScript object${fieldPathMessage}.`);
|
||||
}
|
||||
else {
|
||||
return new Error(`Invalid use of type "${typeof val}" as a Firestore argument${fieldPathMessage}.`);
|
||||
}
|
||||
}
|
||||
exports.customObjectError = customObjectError;
|
||||
/**
|
||||
* Create a new Validator, optionally registering the custom validators as
|
||||
* provided.
|
||||
*
|
||||
* @private
|
||||
* @param customValidators A list of custom validators to register.
|
||||
*/
|
||||
function createValidator(customValidators) {
|
||||
// This function exists to change the type of `Validator` to `any` so that
|
||||
// consumers can call the custom validator functions.
|
||||
return new Validator(customValidators);
|
||||
}
|
||||
exports.createValidator = createValidator;
|
||||
//# sourceMappingURL=validate.js.map
|
||||
634
express-server/node_modules/@google-cloud/firestore/build/src/watch.js
generated
vendored
Normal file
634
express-server/node_modules/@google-cloud/firestore/build/src/watch.js
generated
vendored
Normal file
@@ -0,0 +1,634 @@
|
||||
"use strict";
|
||||
/*!
|
||||
* Copyright 2017 Google Inc. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const assert = require("assert");
|
||||
const rbtree = require("functional-red-black-tree");
|
||||
const through2 = require("through2");
|
||||
const backoff_1 = require("./backoff");
|
||||
const document_1 = require("./document");
|
||||
const document_change_1 = require("./document-change");
|
||||
const logger_1 = require("./logger");
|
||||
const path_1 = require("./path");
|
||||
const timestamp_1 = require("./timestamp");
|
||||
const util_1 = require("./util");
|
||||
const types_1 = require("./types");
|
||||
/*!
|
||||
* Target ID used by watch. Watch uses a fixed target id since we only support
|
||||
* one target per stream.
|
||||
*
|
||||
* @private
|
||||
* @type {number}
|
||||
*/
|
||||
const WATCH_TARGET_ID = 0x1;
|
||||
/*!
|
||||
* The change type for document change events.
|
||||
*/
|
||||
// tslint:disable-next-line:variable-name
|
||||
const ChangeType = {
|
||||
added: 'added',
|
||||
modified: 'modified',
|
||||
removed: 'removed',
|
||||
};
|
||||
/*!
|
||||
* List of GRPC Error Codes.
|
||||
*
|
||||
* This corresponds to
|
||||
* {@link https://github.com/grpc/grpc/blob/master/doc/statuscodes.md}.
|
||||
*/
|
||||
const GRPC_STATUS_CODE = {
|
||||
// Not an error; returned on success.
|
||||
OK: 0,
|
||||
// The operation was cancelled (typically by the caller).
|
||||
CANCELLED: 1,
|
||||
// Unknown error. An example of where this error may be returned is if a
|
||||
// Status value received from another address space belongs to an error-space
|
||||
// that is not known in this address space. Also errors raised by APIs that
|
||||
// do not return enough error information may be converted to this error.
|
||||
UNKNOWN: 2,
|
||||
// Client specified an invalid argument. Note that this differs from
|
||||
// FAILED_PRECONDITION. INVALID_ARGUMENT indicates arguments that are
|
||||
// problematic regardless of the state of the system (e.g., a malformed file
|
||||
// name).
|
||||
INVALID_ARGUMENT: 3,
|
||||
// Deadline expired before operation could complete. For operations that
|
||||
// change the state of the system, this error may be returned even if the
|
||||
// operation has completed successfully. For example, a successful response
|
||||
// from a server could have been delayed long enough for the deadline to
|
||||
// expire.
|
||||
DEADLINE_EXCEEDED: 4,
|
||||
// Some requested entity (e.g., file or directory) was not found.
|
||||
NOT_FOUND: 5,
|
||||
// Some entity that we attempted to create (e.g., file or directory) already
|
||||
// exists.
|
||||
ALREADY_EXISTS: 6,
|
||||
// The caller does not have permission to execute the specified operation.
|
||||
// PERMISSION_DENIED must not be used for rejections caused by exhausting
|
||||
// some resource (use RESOURCE_EXHAUSTED instead for those errors).
|
||||
// PERMISSION_DENIED must not be used if the caller can not be identified
|
||||
// (use UNAUTHENTICATED instead for those errors).
|
||||
PERMISSION_DENIED: 7,
|
||||
// The request does not have valid authentication credentials for the
|
||||
// operation.
|
||||
UNAUTHENTICATED: 16,
|
||||
// Some resource has been exhausted, perhaps a per-user quota, or perhaps the
|
||||
// entire file system is out of space.
|
||||
RESOURCE_EXHAUSTED: 8,
|
||||
// Operation was rejected because the system is not in a state required for
|
||||
// the operation's execution. For example, directory to be deleted may be
|
||||
// non-empty, an rmdir operation is applied to a non-directory, etc.
|
||||
//
|
||||
// A litmus test that may help a service implementor in deciding
|
||||
// between FAILED_PRECONDITION, ABORTED, and UNAVAILABLE:
|
||||
// (a) Use UNAVAILABLE if the client can retry just the failing call.
|
||||
// (b) Use ABORTED if the client should retry at a higher-level
|
||||
// (e.g., restarting a read-modify-write sequence).
|
||||
// (c) Use FAILED_PRECONDITION if the client should not retry until
|
||||
// the system state has been explicitly fixed. E.g., if an "rmdir"
|
||||
// fails because the directory is non-empty, FAILED_PRECONDITION
|
||||
// should be returned since the client should not retry unless
|
||||
// they have first fixed up the directory by deleting files from it.
|
||||
// (d) Use FAILED_PRECONDITION if the client performs conditional
|
||||
// REST Get/Update/Delete on a resource and the resource on the
|
||||
// server does not match the condition. E.g., conflicting
|
||||
// read-modify-write on the same resource.
|
||||
FAILED_PRECONDITION: 9,
|
||||
// The operation was aborted, typically due to a concurrency issue like
|
||||
// sequencer check failures, transaction aborts, etc.
|
||||
//
|
||||
// See litmus test above for deciding between FAILED_PRECONDITION, ABORTED,
|
||||
// and UNAVAILABLE.
|
||||
ABORTED: 10,
|
||||
// Operation was attempted past the valid range. E.g., seeking or reading
|
||||
// past end of file.
|
||||
//
|
||||
// Unlike INVALID_ARGUMENT, this error indicates a problem that may be fixed
|
||||
// if the system state changes. For example, a 32-bit file system will
|
||||
// generate INVALID_ARGUMENT if asked to read at an offset that is not in the
|
||||
// range [0,2^32-1], but it will generate OUT_OF_RANGE if asked to read from
|
||||
// an offset past the current file size.
|
||||
//
|
||||
// There is a fair bit of overlap between FAILED_PRECONDITION and
|
||||
// OUT_OF_RANGE. We recommend using OUT_OF_RANGE (the more specific error)
|
||||
// when it applies so that callers who are iterating through a space can
|
||||
// easily look for an OUT_OF_RANGE error to detect when they are done.
|
||||
OUT_OF_RANGE: 11,
|
||||
// Operation is not implemented or not supported/enabled in this service.
|
||||
UNIMPLEMENTED: 12,
|
||||
// Internal errors. Means some invariants expected by underlying System has
|
||||
// been broken. If you see one of these errors, Something is very broken.
|
||||
INTERNAL: 13,
|
||||
// The service is currently unavailable. This is a most likely a transient
|
||||
// condition and may be corrected by retrying with a backoff.
|
||||
//
|
||||
// See litmus test above for deciding between FAILED_PRECONDITION, ABORTED,
|
||||
// and UNAVAILABLE.
|
||||
UNAVAILABLE: 14,
|
||||
// Unrecoverable data loss or corruption.
|
||||
DATA_LOSS: 15,
|
||||
// Force users to include a default branch:
|
||||
DO_NOT_USE: -1,
|
||||
};
|
||||
/*!
|
||||
* The comparator used for document watches (which should always get called with
|
||||
* the same document).
|
||||
*/
|
||||
const DOCUMENT_WATCH_COMPARATOR = (doc1, doc2) => {
|
||||
assert(doc1 === doc2, 'Document watches only support one document.');
|
||||
return 0;
|
||||
};
|
||||
/**
|
||||
* Watch provides listen functionality and exposes the 'onSnapshot' observer. It
|
||||
* can be used with a valid Firestore Listen target.
|
||||
*
|
||||
* @class
|
||||
* @private
|
||||
*/
|
||||
class Watch {
|
||||
/**
|
||||
* @private
|
||||
* @hideconstructor
|
||||
*
|
||||
* @param firestore The Firestore Database client.
|
||||
* @param target A Firestore 'Target' proto denoting the target to listen on.
|
||||
* @param comparator A comparator for QueryDocumentSnapshots that is used to
|
||||
* order the document snapshots returned by this watch.
|
||||
*/
|
||||
constructor(firestore, target, comparator) {
|
||||
this._firestore = firestore;
|
||||
this._target = target;
|
||||
this._comparator = comparator;
|
||||
this._backoff = new backoff_1.ExponentialBackoff();
|
||||
this._requestTag = util_1.requestTag();
|
||||
}
|
||||
/**
|
||||
* Creates a new Watch instance to listen on DocumentReferences.
|
||||
*
|
||||
* @private
|
||||
* @param documentRef - The document reference for this watch.
|
||||
* @returns A newly created Watch instance.
|
||||
*/
|
||||
static forDocument(documentRef) {
|
||||
return new Watch(documentRef.firestore, {
|
||||
documents: {
|
||||
documents: [documentRef.formattedName],
|
||||
},
|
||||
targetId: WATCH_TARGET_ID,
|
||||
}, DOCUMENT_WATCH_COMPARATOR);
|
||||
}
|
||||
/**
|
||||
* Creates a new Watch instance to listen on Queries.
|
||||
*
|
||||
* @private
|
||||
* @param query The query used for this watch.
|
||||
* @returns A newly created Watch instance.
|
||||
*/
|
||||
static forQuery(query) {
|
||||
return new Watch(query.firestore, {
|
||||
query: query.toProto(),
|
||||
targetId: WATCH_TARGET_ID,
|
||||
}, query.comparator());
|
||||
}
|
||||
/**
|
||||
* Determines whether an error is considered permanent and should not be
|
||||
* retried. Errors that don't provide a GRPC error code are always considered
|
||||
* transient in this context.
|
||||
*
|
||||
* @private
|
||||
* @param error An error object.
|
||||
* @return Whether the error is permanent.
|
||||
*/
|
||||
isPermanentError(error) {
|
||||
if (error.code === undefined) {
|
||||
logger_1.logger('Watch.onSnapshot', this._requestTag, 'Unable to determine error code: ', error);
|
||||
return false;
|
||||
}
|
||||
switch (error.code) {
|
||||
case GRPC_STATUS_CODE.CANCELLED:
|
||||
case GRPC_STATUS_CODE.UNKNOWN:
|
||||
case GRPC_STATUS_CODE.DEADLINE_EXCEEDED:
|
||||
case GRPC_STATUS_CODE.RESOURCE_EXHAUSTED:
|
||||
case GRPC_STATUS_CODE.INTERNAL:
|
||||
case GRPC_STATUS_CODE.UNAVAILABLE:
|
||||
case GRPC_STATUS_CODE.UNAUTHENTICATED:
|
||||
return false;
|
||||
default:
|
||||
return true;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Determines whether we need to initiate a longer backoff due to system
|
||||
* overload.
|
||||
*
|
||||
* @private
|
||||
* @param error A GRPC Error object that exposes an error code.
|
||||
* @return Whether we need to back off our retries.
|
||||
*/
|
||||
isResourceExhaustedError(error) {
|
||||
return error.code === GRPC_STATUS_CODE.RESOURCE_EXHAUSTED;
|
||||
}
|
||||
/**
|
||||
* Starts a watch and attaches a listener for document change events.
|
||||
*
|
||||
* @private
|
||||
* @param onNext A callback to be called every time a new snapshot is
|
||||
* available.
|
||||
* @param onError A callback to be called if the listen fails or is cancelled.
|
||||
* No further callbacks will occur.
|
||||
*
|
||||
* @returns An unsubscribe function that can be called to cancel the snapshot
|
||||
* listener.
|
||||
*/
|
||||
onSnapshot(onNext, onError) {
|
||||
// The sorted tree of QueryDocumentSnapshots as sent in the last snapshot.
|
||||
// We only look at the keys.
|
||||
let docTree = rbtree(this._comparator);
|
||||
// A map of document names to QueryDocumentSnapshots for the last sent
|
||||
// snapshot.
|
||||
let docMap = new Map();
|
||||
// The accumulates map of document changes (keyed by document name) for the
|
||||
// current snapshot.
|
||||
const changeMap = new Map();
|
||||
// The current state of the query results.
|
||||
let current = false;
|
||||
// We need this to track whether we've pushed an initial set of changes,
|
||||
// since we should push those even when there are no changes, if there \
|
||||
// aren't docs.
|
||||
let hasPushed = false;
|
||||
// The server assigns and updates the resume token.
|
||||
let resumeToken = null;
|
||||
// Indicates whether we are interested in data from the stream. Set to false
|
||||
// in the 'unsubscribe()' callback.
|
||||
let isActive = true;
|
||||
// Sentinel value for a document remove.
|
||||
const REMOVED = {};
|
||||
const request = {
|
||||
database: this._firestore.formattedName,
|
||||
addTarget: this._target,
|
||||
};
|
||||
// We may need to replace the underlying stream on reset events.
|
||||
// This is the one that will be returned and proxy the current one.
|
||||
const stream = through2.obj();
|
||||
// The current stream to the backend.
|
||||
let currentStream = null;
|
||||
/** Helper to clear the docs on RESET or filter mismatch. */
|
||||
const resetDocs = () => {
|
||||
logger_1.logger('Watch.onSnapshot', this._requestTag, 'Resetting documents');
|
||||
changeMap.clear();
|
||||
resumeToken = null;
|
||||
docTree.forEach(snapshot => {
|
||||
// Mark each document as deleted. If documents are not deleted, they
|
||||
// will be send again by the server.
|
||||
changeMap.set(snapshot.ref.formattedName, REMOVED);
|
||||
});
|
||||
current = false;
|
||||
};
|
||||
/** Closes the stream and calls onError() if the stream is still active. */
|
||||
const closeStream = (err) => {
|
||||
if (currentStream) {
|
||||
currentStream.unpipe(stream);
|
||||
currentStream.end();
|
||||
currentStream = null;
|
||||
}
|
||||
stream.end();
|
||||
if (isActive) {
|
||||
isActive = false;
|
||||
logger_1.logger('Watch.onSnapshot', this._requestTag, 'Invoking onError: ', err);
|
||||
onError(err);
|
||||
}
|
||||
};
|
||||
/**
|
||||
* Re-opens the stream unless the specified error is considered permanent.
|
||||
* Clears the change map.
|
||||
*/
|
||||
const maybeReopenStream = (err) => {
|
||||
if (isActive && !this.isPermanentError(err)) {
|
||||
logger_1.logger('Watch.onSnapshot', this._requestTag, 'Stream ended, re-opening after retryable error: ', err);
|
||||
request.addTarget.resumeToken = resumeToken;
|
||||
changeMap.clear();
|
||||
if (this.isResourceExhaustedError(err)) {
|
||||
this._backoff.resetToMax();
|
||||
}
|
||||
resetStream();
|
||||
}
|
||||
else {
|
||||
closeStream(err);
|
||||
}
|
||||
};
|
||||
/** Helper to restart the outgoing stream to the backend. */
|
||||
const resetStream = () => {
|
||||
logger_1.logger('Watch.onSnapshot', this._requestTag, 'Opening new stream');
|
||||
if (currentStream) {
|
||||
currentStream.unpipe(stream);
|
||||
currentStream.end();
|
||||
currentStream = null;
|
||||
initStream();
|
||||
}
|
||||
};
|
||||
/**
|
||||
* Initializes a new stream to the backend with backoff.
|
||||
*/
|
||||
const initStream = () => {
|
||||
this._backoff.backoffAndWait().then(() => {
|
||||
if (!isActive) {
|
||||
logger_1.logger('Watch.onSnapshot', this._requestTag, 'Not initializing inactive stream');
|
||||
return;
|
||||
}
|
||||
// Note that we need to call the internal _listen API to pass additional
|
||||
// header values in readWriteStream.
|
||||
this._firestore
|
||||
.readWriteStream('listen', request, this._requestTag, true)
|
||||
.then(backendStream => {
|
||||
if (!isActive) {
|
||||
logger_1.logger('Watch.onSnapshot', this._requestTag, 'Closing inactive stream');
|
||||
backendStream.end();
|
||||
return;
|
||||
}
|
||||
logger_1.logger('Watch.onSnapshot', this._requestTag, 'Opened new stream');
|
||||
currentStream = backendStream;
|
||||
currentStream.on('error', err => {
|
||||
maybeReopenStream(err);
|
||||
});
|
||||
currentStream.on('end', () => {
|
||||
const err = new types_1.GrpcError('Stream ended unexpectedly');
|
||||
err.code = GRPC_STATUS_CODE.UNKNOWN;
|
||||
maybeReopenStream(err);
|
||||
});
|
||||
currentStream.pipe(stream);
|
||||
currentStream.resume();
|
||||
})
|
||||
.catch(closeStream);
|
||||
});
|
||||
};
|
||||
/**
|
||||
* Checks if the current target id is included in the list of target ids.
|
||||
* If no targetIds are provided, returns true.
|
||||
*/
|
||||
function affectsTarget(targetIds, currentId) {
|
||||
if (targetIds === undefined || targetIds.length === 0) {
|
||||
return true;
|
||||
}
|
||||
for (const targetId of targetIds) {
|
||||
if (targetId === currentId) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
/** Splits up document changes into removals, additions, and updates. */
|
||||
function extractChanges(docMap, changes, readTime) {
|
||||
const deletes = [];
|
||||
const adds = [];
|
||||
const updates = [];
|
||||
changes.forEach((value, name) => {
|
||||
if (value === REMOVED) {
|
||||
if (docMap.has(name)) {
|
||||
deletes.push(name);
|
||||
}
|
||||
}
|
||||
else if (docMap.has(name)) {
|
||||
value.readTime = readTime;
|
||||
updates.push(value.build());
|
||||
}
|
||||
else {
|
||||
value.readTime = readTime;
|
||||
adds.push(value.build());
|
||||
}
|
||||
});
|
||||
return { deletes, adds, updates };
|
||||
}
|
||||
/**
|
||||
* Applies the mutations in changeMap to both the document tree and the
|
||||
* document lookup map. Modified docMap in-place and returns the updated
|
||||
* state.
|
||||
*/
|
||||
const computeSnapshot = (docTree, docMap, changes) => {
|
||||
let updatedTree = docTree;
|
||||
const updatedMap = docMap;
|
||||
assert(docTree.length === docMap.size, 'The document tree and document ' +
|
||||
'map should have the same number of entries.');
|
||||
/**
|
||||
* Applies a document delete to the document tree and the document
|
||||
* map. Returns the corresponding DocumentChange event.
|
||||
*/
|
||||
function deleteDoc(name) {
|
||||
assert(updatedMap.has(name), 'Document to delete does not exist');
|
||||
const oldDocument = updatedMap.get(name);
|
||||
const existing = updatedTree.find(oldDocument);
|
||||
const oldIndex = existing.index;
|
||||
updatedTree = existing.remove();
|
||||
updatedMap.delete(name);
|
||||
return new document_change_1.DocumentChange(ChangeType.removed, oldDocument, oldIndex, -1);
|
||||
}
|
||||
/**
|
||||
* Applies a document add to the document tree and the document map.
|
||||
* Returns the corresponding DocumentChange event.
|
||||
*/
|
||||
function addDoc(newDocument) {
|
||||
const name = newDocument.ref.formattedName;
|
||||
assert(!updatedMap.has(name), 'Document to add already exists');
|
||||
updatedTree = updatedTree.insert(newDocument, null);
|
||||
const newIndex = updatedTree.find(newDocument).index;
|
||||
updatedMap.set(name, newDocument);
|
||||
return new document_change_1.DocumentChange(ChangeType.added, newDocument, -1, newIndex);
|
||||
}
|
||||
/**
|
||||
* Applies a document modification to the document tree and the
|
||||
* document map. Returns the DocumentChange event for successful
|
||||
* modifications.
|
||||
*/
|
||||
function modifyDoc(newDocument) {
|
||||
const name = newDocument.ref.formattedName;
|
||||
assert(updatedMap.has(name), 'Document to modify does not exist');
|
||||
const oldDocument = updatedMap.get(name);
|
||||
if (!oldDocument.updateTime.isEqual(newDocument.updateTime)) {
|
||||
const removeChange = deleteDoc(name);
|
||||
const addChange = addDoc(newDocument);
|
||||
return new document_change_1.DocumentChange(ChangeType.modified, newDocument, removeChange.oldIndex, addChange.newIndex);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
// Process the sorted changes in the order that is expected by our
|
||||
// clients (removals, additions, and then modifications). We also need
|
||||
// to sort the individual changes to assure that oldIndex/newIndex
|
||||
// keep incrementing.
|
||||
const appliedChanges = [];
|
||||
changes.deletes.sort((name1, name2) => {
|
||||
// Deletes are sorted based on the order of the existing document.
|
||||
return this._comparator(updatedMap.get(name1), updatedMap.get(name2));
|
||||
});
|
||||
changes.deletes.forEach(name => {
|
||||
const change = deleteDoc(name);
|
||||
appliedChanges.push(change);
|
||||
});
|
||||
changes.adds.sort(this._comparator);
|
||||
changes.adds.forEach(snapshot => {
|
||||
const change = addDoc(snapshot);
|
||||
appliedChanges.push(change);
|
||||
});
|
||||
changes.updates.sort(this._comparator);
|
||||
changes.updates.forEach(snapshot => {
|
||||
const change = modifyDoc(snapshot);
|
||||
if (change) {
|
||||
appliedChanges.push(change);
|
||||
}
|
||||
});
|
||||
assert(updatedTree.length === updatedMap.size, 'The update document ' +
|
||||
'tree and document map should have the same number of entries.');
|
||||
return { updatedTree, updatedMap, appliedChanges };
|
||||
};
|
||||
/**
|
||||
* Assembles a new snapshot from the current set of changes and invokes the
|
||||
* user's callback. Clears the current changes on completion.
|
||||
*/
|
||||
const push = (readTime, nextResumeToken) => {
|
||||
const changes = extractChanges(docMap, changeMap, readTime);
|
||||
const diff = computeSnapshot(docTree, docMap, changes);
|
||||
if (!hasPushed || diff.appliedChanges.length > 0) {
|
||||
logger_1.logger('Watch.onSnapshot', this._requestTag, 'Sending snapshot with %d changes and %d documents', String(diff.appliedChanges.length), diff.updatedTree.length);
|
||||
onNext(readTime, diff.updatedTree.length, () => diff.updatedTree.keys, () => diff.appliedChanges);
|
||||
hasPushed = true;
|
||||
}
|
||||
docTree = diff.updatedTree;
|
||||
docMap = diff.updatedMap;
|
||||
changeMap.clear();
|
||||
resumeToken = nextResumeToken;
|
||||
};
|
||||
/**
|
||||
* Returns the current count of all documents, including the changes from
|
||||
* the current changeMap.
|
||||
*/
|
||||
function currentSize() {
|
||||
const changes = extractChanges(docMap, changeMap, timestamp_1.Timestamp.now());
|
||||
return docMap.size + changes.adds.length - changes.deletes.length;
|
||||
}
|
||||
initStream();
|
||||
stream
|
||||
.on('data', (proto) => {
|
||||
if (proto.targetChange) {
|
||||
logger_1.logger('Watch.onSnapshot', this._requestTag, 'Processing target change');
|
||||
const change = proto.targetChange;
|
||||
const noTargetIds = !change.targetIds || change.targetIds.length === 0;
|
||||
if (change.targetChangeType === 'NO_CHANGE') {
|
||||
if (noTargetIds && change.readTime && current) {
|
||||
// This means everything is up-to-date, so emit the current
|
||||
// set of docs as a snapshot, if there were changes.
|
||||
push(timestamp_1.Timestamp.fromProto(change.readTime), change.resumeToken);
|
||||
}
|
||||
}
|
||||
else if (change.targetChangeType === 'ADD') {
|
||||
if (WATCH_TARGET_ID !== change.targetIds[0]) {
|
||||
closeStream(Error('Unexpected target ID sent by server'));
|
||||
}
|
||||
}
|
||||
else if (change.targetChangeType === 'REMOVE') {
|
||||
let code = 13;
|
||||
let message = 'internal error';
|
||||
if (change.cause) {
|
||||
code = change.cause.code;
|
||||
message = change.cause.message;
|
||||
}
|
||||
// @todo: Surface a .code property on the exception.
|
||||
closeStream(new Error('Error ' + code + ': ' + message));
|
||||
}
|
||||
else if (change.targetChangeType === 'RESET') {
|
||||
// Whatever changes have happened so far no longer matter.
|
||||
resetDocs();
|
||||
}
|
||||
else if (change.targetChangeType === 'CURRENT') {
|
||||
current = true;
|
||||
}
|
||||
else {
|
||||
closeStream(new Error('Unknown target change type: ' + JSON.stringify(change)));
|
||||
}
|
||||
if (change.resumeToken &&
|
||||
affectsTarget(change.targetIds, WATCH_TARGET_ID)) {
|
||||
this._backoff.reset();
|
||||
}
|
||||
}
|
||||
else if (proto.documentChange) {
|
||||
logger_1.logger('Watch.onSnapshot', this._requestTag, 'Processing change event');
|
||||
// No other targetIds can show up here, but we still need to see
|
||||
// if the targetId was in the added list or removed list.
|
||||
const targetIds = proto.documentChange.targetIds || [];
|
||||
const removedTargetIds = proto.documentChange.removedTargetIds || [];
|
||||
let changed = false;
|
||||
let removed = false;
|
||||
for (let i = 0; i < targetIds.length; i++) {
|
||||
if (targetIds[i] === WATCH_TARGET_ID) {
|
||||
changed = true;
|
||||
}
|
||||
}
|
||||
for (let i = 0; i < removedTargetIds.length; i++) {
|
||||
if (removedTargetIds[i] === WATCH_TARGET_ID) {
|
||||
removed = true;
|
||||
}
|
||||
}
|
||||
const document = proto.documentChange.document;
|
||||
const name = document.name;
|
||||
if (changed) {
|
||||
logger_1.logger('Watch.onSnapshot', this._requestTag, 'Received document change');
|
||||
const snapshot = new document_1.DocumentSnapshotBuilder();
|
||||
snapshot.ref = this._firestore.doc(path_1.ResourcePath.fromSlashSeparatedString(name).relativeName);
|
||||
snapshot.fieldsProto = document.fields || {};
|
||||
snapshot.createTime =
|
||||
timestamp_1.Timestamp.fromProto(document.createTime);
|
||||
snapshot.updateTime =
|
||||
timestamp_1.Timestamp.fromProto(document.updateTime);
|
||||
changeMap.set(name, snapshot);
|
||||
}
|
||||
else if (removed) {
|
||||
logger_1.logger('Watch.onSnapshot', this._requestTag, 'Received document remove');
|
||||
changeMap.set(name, REMOVED);
|
||||
}
|
||||
}
|
||||
else if (proto.documentDelete || proto.documentRemove) {
|
||||
logger_1.logger('Watch.onSnapshot', this._requestTag, 'Processing remove event');
|
||||
const name = (proto.documentDelete || proto.documentRemove).document;
|
||||
changeMap.set(name, REMOVED);
|
||||
}
|
||||
else if (proto.filter) {
|
||||
logger_1.logger('Watch.onSnapshot', this._requestTag, 'Processing filter update');
|
||||
if (proto.filter.count !== currentSize()) {
|
||||
// We need to remove all the current results.
|
||||
resetDocs();
|
||||
// The filter didn't match, so re-issue the query.
|
||||
resetStream();
|
||||
}
|
||||
}
|
||||
else {
|
||||
closeStream(new Error('Unknown listen response type: ' + JSON.stringify(proto)));
|
||||
}
|
||||
})
|
||||
.on('end', () => {
|
||||
logger_1.logger('Watch.onSnapshot', this._requestTag, 'Processing stream end');
|
||||
if (currentStream) {
|
||||
// Pass the event on to the underlying stream.
|
||||
currentStream.end();
|
||||
}
|
||||
});
|
||||
return () => {
|
||||
logger_1.logger('Watch.onSnapshot', this._requestTag, 'Ending stream');
|
||||
// Prevent further callbacks.
|
||||
isActive = false;
|
||||
onNext = () => { };
|
||||
onError = () => { };
|
||||
stream.end();
|
||||
};
|
||||
}
|
||||
}
|
||||
exports.Watch = Watch;
|
||||
//# sourceMappingURL=watch.js.map
|
||||
507
express-server/node_modules/@google-cloud/firestore/build/src/write-batch.js
generated
vendored
Normal file
507
express-server/node_modules/@google-cloud/firestore/build/src/write-batch.js
generated
vendored
Normal file
@@ -0,0 +1,507 @@
|
||||
"use strict";
|
||||
/*!
|
||||
* Copyright 2017 Google Inc. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const assert = require("assert");
|
||||
const document_1 = require("./document");
|
||||
const logger_1 = require("./logger");
|
||||
const path_1 = require("./path");
|
||||
const serializer_1 = require("./serializer");
|
||||
const timestamp_1 = require("./timestamp");
|
||||
const util_1 = require("./util");
|
||||
/*!
|
||||
* Google Cloud Functions terminates idle connections after two minutes. After
|
||||
* longer periods of idleness, we issue transactional commits to allow for
|
||||
* retries.
|
||||
*/
|
||||
const GCF_IDLE_TIMEOUT_MS = 110 * 1000;
|
||||
/**
|
||||
* A WriteResult wraps the write time set by the Firestore servers on sets(),
|
||||
* updates(), and creates().
|
||||
*
|
||||
* @class
|
||||
*/
|
||||
class WriteResult {
|
||||
/**
|
||||
* @private
|
||||
* @hideconstructor
|
||||
*
|
||||
* @param _writeTime The time of the corresponding document write.
|
||||
*/
|
||||
constructor(_writeTime) {
|
||||
this._writeTime = _writeTime;
|
||||
}
|
||||
/**
|
||||
* The write time as set by the Firestore servers.
|
||||
*
|
||||
* @type {Timestamp}
|
||||
* @name WriteResult#writeTime
|
||||
* @readonly
|
||||
*
|
||||
* @example
|
||||
* let documentRef = firestore.doc('col/doc');
|
||||
*
|
||||
* documentRef.set({foo: 'bar'}).then(writeResult => {
|
||||
* console.log(`Document written at: ${writeResult.toDate()}`);
|
||||
* });
|
||||
*/
|
||||
get writeTime() {
|
||||
return this._writeTime;
|
||||
}
|
||||
/**
|
||||
* Returns true if this `WriteResult` is equal to the provided value.
|
||||
*
|
||||
* @param {*} other The value to compare against.
|
||||
* @return true if this `WriteResult` is equal to the provided value.
|
||||
*/
|
||||
isEqual(other) {
|
||||
return (this === other ||
|
||||
(other instanceof WriteResult &&
|
||||
this._writeTime.isEqual(other._writeTime)));
|
||||
}
|
||||
}
|
||||
exports.WriteResult = WriteResult;
|
||||
/**
|
||||
* A Firestore WriteBatch that can be used to atomically commit multiple write
|
||||
* operations at once.
|
||||
*
|
||||
* @class
|
||||
*/
|
||||
class WriteBatch {
|
||||
/**
|
||||
* @private
|
||||
* @hideconstructor
|
||||
*
|
||||
* @param {Firestore} firestore The Firestore Database client.
|
||||
*/
|
||||
constructor(firestore) {
|
||||
this._writes = [];
|
||||
this._committed = false;
|
||||
this._firestore = firestore;
|
||||
this._validator = firestore._validator;
|
||||
this._serializer = new serializer_1.Serializer(firestore);
|
||||
}
|
||||
/**
|
||||
* Checks if this write batch has any pending operations.
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
get isEmpty() {
|
||||
return this._writes.length === 0;
|
||||
}
|
||||
/**
|
||||
* Throws an error if this batch has already been committed.
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
verifyNotCommitted() {
|
||||
if (this._committed) {
|
||||
throw new Error('Cannot modify a WriteBatch that has been committed.');
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Create a document with the provided object values. This will fail the batch
|
||||
* if a document exists at its location.
|
||||
*
|
||||
* @param {DocumentReference} documentRef A reference to the document to be
|
||||
* created.
|
||||
* @param {DocumentData} data The object to serialize as the document.
|
||||
* @returns {WriteBatch} This WriteBatch instance. Used for chaining
|
||||
* method calls.
|
||||
*
|
||||
* @example
|
||||
* let writeBatch = firestore.batch();
|
||||
* let documentRef = firestore.collection('col').doc();
|
||||
*
|
||||
* writeBatch.create(documentRef, {foo: 'bar'});
|
||||
*
|
||||
* writeBatch.commit().then(() => {
|
||||
* console.log('Successfully executed batch.');
|
||||
* });
|
||||
*/
|
||||
create(documentRef, data) {
|
||||
this._validator.isDocumentReference('documentRef', documentRef);
|
||||
this._validator.isDocument('data', data, {
|
||||
allowEmpty: true,
|
||||
allowDeletes: 'none',
|
||||
allowTransforms: true,
|
||||
});
|
||||
this.verifyNotCommitted();
|
||||
const document = document_1.DocumentSnapshot.fromObject(documentRef, data);
|
||||
const precondition = new document_1.Precondition({ exists: false });
|
||||
const transform = document_1.DocumentTransform.fromObject(documentRef, data);
|
||||
transform.validate();
|
||||
this._writes.push({
|
||||
write: !document.isEmpty || transform.isEmpty ? document.toProto() : null,
|
||||
transform: transform.toProto(this._serializer),
|
||||
precondition: precondition.toProto(),
|
||||
});
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* Deletes a document from the database.
|
||||
*
|
||||
* @param {DocumentReference} documentRef A reference to the document to be
|
||||
* deleted.
|
||||
* @param {Precondition=} precondition A precondition to enforce for this
|
||||
* delete.
|
||||
* @param {Timestamp=} precondition.lastUpdateTime If set, enforces that the
|
||||
* document was last updated at lastUpdateTime. Fails the batch if the
|
||||
* document doesn't exist or was last updated at a different time.
|
||||
* @returns {WriteBatch} This WriteBatch instance. Used for chaining
|
||||
* method calls.
|
||||
*
|
||||
* @example
|
||||
* let writeBatch = firestore.batch();
|
||||
* let documentRef = firestore.doc('col/doc');
|
||||
*
|
||||
* writeBatch.delete(documentRef);
|
||||
*
|
||||
* writeBatch.commit().then(() => {
|
||||
* console.log('Successfully executed batch.');
|
||||
* });
|
||||
*/
|
||||
delete(documentRef, precondition) {
|
||||
this._validator.isDocumentReference('documentRef', documentRef);
|
||||
this._validator.isOptionalDeletePrecondition('precondition', precondition);
|
||||
this.verifyNotCommitted();
|
||||
const conditions = new document_1.Precondition(precondition);
|
||||
this._writes.push({
|
||||
write: {
|
||||
delete: documentRef.formattedName,
|
||||
},
|
||||
precondition: conditions.toProto(),
|
||||
});
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* Write to the document referred to by the provided
|
||||
* [DocumentReference]{@link DocumentReference}.
|
||||
* If the document does not exist yet, it will be created. If you pass
|
||||
* [SetOptions]{@link SetOptions}., the provided data can be merged
|
||||
* into the existing document.
|
||||
*
|
||||
* @param {DocumentReference} documentRef A reference to the document to be
|
||||
* set.
|
||||
* @param {DocumentData} data The object to serialize as the document.
|
||||
* @param {SetOptions=} options An object to configure the set behavior.
|
||||
* @param {boolean=} options.merge - If true, set() merges the values
|
||||
* specified in its data argument. Fields omitted from this set() call
|
||||
* remain untouched.
|
||||
* @param {Array.<string|FieldPath>=} options.mergeFields - If provided,
|
||||
* set() only replaces the specified field paths. Any field path that is not
|
||||
* specified is ignored and remains untouched.
|
||||
* @returns {WriteBatch} This WriteBatch instance. Used for chaining
|
||||
* method calls.
|
||||
*
|
||||
* @example
|
||||
* let writeBatch = firestore.batch();
|
||||
* let documentRef = firestore.doc('col/doc');
|
||||
*
|
||||
* writeBatch.set(documentRef, {foo: 'bar'});
|
||||
*
|
||||
* writeBatch.commit().then(() => {
|
||||
* console.log('Successfully executed batch.');
|
||||
* });
|
||||
*/
|
||||
set(documentRef, data, options) {
|
||||
this._validator.isOptionalSetOptions('options', options);
|
||||
const mergeLeaves = options && options.merge === true;
|
||||
const mergePaths = options && options.mergeFields;
|
||||
this._validator.isDocumentReference('documentRef', documentRef);
|
||||
this._validator.isDocument('data', data, {
|
||||
allowEmpty: true,
|
||||
allowDeletes: mergePaths || mergeLeaves ? 'all' : 'none',
|
||||
allowTransforms: true,
|
||||
});
|
||||
this.verifyNotCommitted();
|
||||
let documentMask;
|
||||
if (mergePaths) {
|
||||
documentMask = document_1.DocumentMask.fromFieldMask(options.mergeFields);
|
||||
data = documentMask.applyTo(data);
|
||||
}
|
||||
const transform = document_1.DocumentTransform.fromObject(documentRef, data);
|
||||
transform.validate();
|
||||
const document = document_1.DocumentSnapshot.fromObject(documentRef, data);
|
||||
if (mergePaths) {
|
||||
documentMask.removeFields(transform.fields);
|
||||
}
|
||||
else {
|
||||
documentMask = document_1.DocumentMask.fromObject(data);
|
||||
}
|
||||
const hasDocumentData = !document.isEmpty || !documentMask.isEmpty;
|
||||
let write;
|
||||
if (!mergePaths && !mergeLeaves) {
|
||||
write = document.toProto();
|
||||
}
|
||||
else if (hasDocumentData || transform.isEmpty) {
|
||||
write = document.toProto();
|
||||
write.updateMask = documentMask.toProto(this._serializer);
|
||||
}
|
||||
this._writes.push({
|
||||
write,
|
||||
transform: transform.toProto(this._serializer),
|
||||
});
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* Update fields of the document referred to by the provided
|
||||
* [DocumentReference]{@link DocumentReference}. If the document
|
||||
* doesn't yet exist, the update fails and the entire batch will be rejected.
|
||||
*
|
||||
* The update() method accepts either an object with field paths encoded as
|
||||
* keys and field values encoded as values, or a variable number of arguments
|
||||
* that alternate between field paths and field values. Nested fields can be
|
||||
* updated by providing dot-separated field path strings or by providing
|
||||
* FieldPath objects.
|
||||
*
|
||||
* A Precondition restricting this update can be specified as the last
|
||||
* argument.
|
||||
*
|
||||
* @param {DocumentReference} documentRef A reference to the document to be
|
||||
* updated.
|
||||
* @param {UpdateData|string|FieldPath} dataOrField An object
|
||||
* containing the fields and values with which to update the document
|
||||
* or the path of the first field to update.
|
||||
* @param {
|
||||
* ...(Precondition|*|string|FieldPath)} preconditionOrValues -
|
||||
* An alternating list of field paths and values to update or a Precondition
|
||||
* to restrict this update.
|
||||
* @returns {WriteBatch} This WriteBatch instance. Used for chaining
|
||||
* method calls.
|
||||
*
|
||||
* @example
|
||||
* let writeBatch = firestore.batch();
|
||||
* let documentRef = firestore.doc('col/doc');
|
||||
*
|
||||
* writeBatch.update(documentRef, {foo: 'bar'});
|
||||
*
|
||||
* writeBatch.commit().then(() => {
|
||||
* console.log('Successfully executed batch.');
|
||||
* });
|
||||
*/
|
||||
update(documentRef, dataOrField, ...preconditionOrValues) {
|
||||
this._validator.minNumberOfArguments('update', arguments, 2);
|
||||
this._validator.isDocumentReference('documentRef', documentRef);
|
||||
this.verifyNotCommitted();
|
||||
const updateMap = new Map();
|
||||
let precondition = new document_1.Precondition({ exists: true });
|
||||
const argumentError = 'Update() requires either a single JavaScript ' +
|
||||
'object or an alternating list of field/value pairs that can be ' +
|
||||
'followed by an optional precondition.';
|
||||
const usesVarargs = typeof dataOrField === 'string' || dataOrField instanceof path_1.FieldPath;
|
||||
if (usesVarargs) {
|
||||
try {
|
||||
for (let i = 1; i < arguments.length; i += 2) {
|
||||
if (i === arguments.length - 1) {
|
||||
this._validator.isUpdatePrecondition(i, arguments[i]);
|
||||
precondition = new document_1.Precondition(arguments[i]);
|
||||
}
|
||||
else {
|
||||
this._validator.isFieldPath(i, arguments[i]);
|
||||
this._validator.minNumberOfArguments('update', arguments, i + 1);
|
||||
const fieldPath = path_1.FieldPath.fromArgument(arguments[i]);
|
||||
this._validator.isFieldValue(i, arguments[i + 1], {
|
||||
allowDeletes: 'root',
|
||||
allowTransforms: true,
|
||||
}, fieldPath);
|
||||
updateMap.set(fieldPath, arguments[i + 1]);
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (err) {
|
||||
logger_1.logger('WriteBatch.update', null, 'Varargs validation failed:', err);
|
||||
// We catch the validation error here and re-throw to provide a better
|
||||
// error message.
|
||||
throw new Error(`${argumentError} ${err.message}`);
|
||||
}
|
||||
}
|
||||
else {
|
||||
try {
|
||||
this._validator.isDocument('dataOrField', dataOrField, {
|
||||
allowEmpty: false,
|
||||
allowDeletes: 'root',
|
||||
allowTransforms: true,
|
||||
});
|
||||
this._validator.maxNumberOfArguments('update', arguments, 3);
|
||||
Object.keys(dataOrField).forEach(key => {
|
||||
this._validator.isFieldPath(key, key);
|
||||
updateMap.set(path_1.FieldPath.fromArgument(key), dataOrField[key]);
|
||||
});
|
||||
if (preconditionOrValues.length > 0) {
|
||||
this._validator.isUpdatePrecondition('preconditionOrValues', preconditionOrValues[0]);
|
||||
precondition = new document_1.Precondition(preconditionOrValues[0]);
|
||||
}
|
||||
}
|
||||
catch (err) {
|
||||
logger_1.logger('WriteBatch.update', null, 'Non-varargs validation failed:', err);
|
||||
// We catch the validation error here and prefix the error with a custom
|
||||
// message to describe the usage of update() better.
|
||||
throw new Error(`${argumentError} ${err.message}`);
|
||||
}
|
||||
}
|
||||
this._validator.isUpdateMap('dataOrField', updateMap);
|
||||
const document = document_1.DocumentSnapshot.fromUpdateMap(documentRef, updateMap);
|
||||
const documentMask = document_1.DocumentMask.fromUpdateMap(updateMap);
|
||||
let write = null;
|
||||
if (!document.isEmpty || !documentMask.isEmpty) {
|
||||
write = document.toProto();
|
||||
write.updateMask = documentMask.toProto();
|
||||
}
|
||||
const transform = document_1.DocumentTransform.fromUpdateMap(documentRef, updateMap);
|
||||
transform.validate();
|
||||
this._writes.push({
|
||||
write,
|
||||
transform: transform.toProto(this._serializer),
|
||||
precondition: precondition.toProto(),
|
||||
});
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* Atomically commits all pending operations to the database and verifies all
|
||||
* preconditions. Fails the entire write if any precondition is not met.
|
||||
*
|
||||
* @returns {Promise.<Array.<WriteResult>>} A Promise that resolves
|
||||
* when this batch completes.
|
||||
*
|
||||
* @example
|
||||
* let writeBatch = firestore.batch();
|
||||
* let documentRef = firestore.doc('col/doc');
|
||||
*
|
||||
* writeBatch.set(documentRef, {foo: 'bar'});
|
||||
*
|
||||
* writeBatch.commit().then(() => {
|
||||
* console.log('Successfully executed batch.');
|
||||
* });
|
||||
*/
|
||||
commit() {
|
||||
return this.commit_();
|
||||
}
|
||||
/**
|
||||
* Commit method that takes an optional transaction ID.
|
||||
*
|
||||
* @private
|
||||
* @param commitOptions Options to use for this commit.
|
||||
* @param commitOptions.transactionId The transaction ID of this commit.
|
||||
* @param commitOptions.requestTag A unique client-assigned identifier for
|
||||
* this request.
|
||||
* @returns A Promise that resolves when this batch completes.
|
||||
*/
|
||||
commit_(commitOptions) {
|
||||
// Note: We don't call `verifyNotCommitted()` to allow for retries.
|
||||
const explicitTransaction = commitOptions && commitOptions.transactionId;
|
||||
const tag = (commitOptions && commitOptions.requestTag) || util_1.requestTag();
|
||||
const request = {
|
||||
database: this._firestore.formattedName,
|
||||
};
|
||||
// On GCF, we periodically force transactional commits to allow for
|
||||
// request retries in case GCF closes our backend connection.
|
||||
if (!explicitTransaction && this._shouldCreateTransaction()) {
|
||||
logger_1.logger('WriteBatch.commit', tag, 'Using transaction for commit');
|
||||
return this._firestore
|
||||
.request('beginTransaction', request, tag, true)
|
||||
.then(resp => {
|
||||
return this.commit_({ transactionId: resp.transaction });
|
||||
});
|
||||
}
|
||||
request.writes = [];
|
||||
for (const req of this._writes) {
|
||||
assert(req.write || req.transform, 'Either a write or transform must be set');
|
||||
if (req.precondition) {
|
||||
(req.write || req.transform).currentDocument = req.precondition;
|
||||
}
|
||||
if (req.write) {
|
||||
request.writes.push(req.write);
|
||||
}
|
||||
if (req.transform) {
|
||||
request.writes.push(req.transform);
|
||||
}
|
||||
}
|
||||
logger_1.logger('WriteBatch.commit', tag, 'Sending %d writes', request.writes.length);
|
||||
if (explicitTransaction) {
|
||||
request.transaction = explicitTransaction;
|
||||
}
|
||||
this._committed = true;
|
||||
return this._firestore
|
||||
.request('commit', request, tag, /* allowRetries= */ false)
|
||||
.then(resp => {
|
||||
const writeResults = [];
|
||||
if (request.writes.length > 0) {
|
||||
assert(Array.isArray(resp.writeResults) &&
|
||||
request.writes.length === resp.writeResults.length, `Expected one write result per operation, but got ${resp.writeResults.length} results for ${request.writes.length} operations.`);
|
||||
const commitTime = timestamp_1.Timestamp.fromProto(resp.commitTime);
|
||||
let offset = 0;
|
||||
for (let i = 0; i < this._writes.length; ++i) {
|
||||
const writeRequest = this._writes[i];
|
||||
// Don't return two write results for a write that contains a
|
||||
// transform, as the fact that we have to split one write
|
||||
// operation into two distinct write requests is an implementation
|
||||
// detail.
|
||||
if (writeRequest.write && writeRequest.transform) {
|
||||
// The document transform is always sent last and produces the
|
||||
// latest update time.
|
||||
++offset;
|
||||
}
|
||||
const writeResult = resp.writeResults[i + offset];
|
||||
writeResults.push(new WriteResult(writeResult.updateTime ?
|
||||
timestamp_1.Timestamp.fromProto(writeResult.updateTime) :
|
||||
commitTime));
|
||||
}
|
||||
}
|
||||
return writeResults;
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Determines whether we should issue a transactional commit. On GCF, this
|
||||
* happens after two minutes of idleness.
|
||||
*
|
||||
* @private
|
||||
* @returns Whether to use a transaction.
|
||||
*/
|
||||
_shouldCreateTransaction() {
|
||||
if (!this._firestore._preferTransactions) {
|
||||
return false;
|
||||
}
|
||||
if (this._firestore._lastSuccessfulRequest) {
|
||||
const now = new Date().getTime();
|
||||
return now - this._firestore._lastSuccessfulRequest > GCF_IDLE_TIMEOUT_MS;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
}
|
||||
exports.WriteBatch = WriteBatch;
|
||||
/*!
|
||||
* Validates that the update data does not contain any ambiguous field
|
||||
* definitions (such as 'a.b' and 'a').
|
||||
*
|
||||
* @param data An update map with field/value pairs.
|
||||
* @returns 'true' if the input is a valid update map.
|
||||
*/
|
||||
function validateUpdateMap(data) {
|
||||
const fields = [];
|
||||
data.forEach((value, key) => {
|
||||
fields.push(key);
|
||||
});
|
||||
fields.sort((left, right) => left.compareTo(right));
|
||||
for (let i = 1; i < fields.length; ++i) {
|
||||
if (fields[i - 1].isPrefixOf(fields[i])) {
|
||||
throw new Error(`Field "${fields[i - 1]}" was specified multiple times.`);
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
exports.validateUpdateMap = validateUpdateMap;
|
||||
//# sourceMappingURL=write-batch.js.map
|
||||
15
express-server/node_modules/@google-cloud/firestore/node_modules/.bin/pbjs
generated
vendored
Normal file
15
express-server/node_modules/@google-cloud/firestore/node_modules/.bin/pbjs
generated
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
#!/bin/sh
|
||||
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
|
||||
|
||||
case `uname` in
|
||||
*CYGWIN*) basedir=`cygpath -w "$basedir"`;;
|
||||
esac
|
||||
|
||||
if [ -x "$basedir/node" ]; then
|
||||
"$basedir/node" "$basedir/../protobufjs/bin/pbjs" "$@"
|
||||
ret=$?
|
||||
else
|
||||
node "$basedir/../protobufjs/bin/pbjs" "$@"
|
||||
ret=$?
|
||||
fi
|
||||
exit $ret
|
||||
7
express-server/node_modules/@google-cloud/firestore/node_modules/.bin/pbjs.cmd
generated
vendored
Normal file
7
express-server/node_modules/@google-cloud/firestore/node_modules/.bin/pbjs.cmd
generated
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
@IF EXIST "%~dp0\node.exe" (
|
||||
"%~dp0\node.exe" "%~dp0\..\protobufjs\bin\pbjs" %*
|
||||
) ELSE (
|
||||
@SETLOCAL
|
||||
@SET PATHEXT=%PATHEXT:;.JS;=;%
|
||||
node "%~dp0\..\protobufjs\bin\pbjs" %*
|
||||
)
|
||||
15
express-server/node_modules/@google-cloud/firestore/node_modules/.bin/pbts
generated
vendored
Normal file
15
express-server/node_modules/@google-cloud/firestore/node_modules/.bin/pbts
generated
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
#!/bin/sh
|
||||
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
|
||||
|
||||
case `uname` in
|
||||
*CYGWIN*) basedir=`cygpath -w "$basedir"`;;
|
||||
esac
|
||||
|
||||
if [ -x "$basedir/node" ]; then
|
||||
"$basedir/node" "$basedir/../protobufjs/bin/pbts" "$@"
|
||||
ret=$?
|
||||
else
|
||||
node "$basedir/../protobufjs/bin/pbts" "$@"
|
||||
ret=$?
|
||||
fi
|
||||
exit $ret
|
||||
7
express-server/node_modules/@google-cloud/firestore/node_modules/.bin/pbts.cmd
generated
vendored
Normal file
7
express-server/node_modules/@google-cloud/firestore/node_modules/.bin/pbts.cmd
generated
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
@IF EXIST "%~dp0\node.exe" (
|
||||
"%~dp0\node.exe" "%~dp0\..\protobufjs\bin\pbts" %*
|
||||
) ELSE (
|
||||
@SETLOCAL
|
||||
@SET PATHEXT=%PATHEXT:;.JS;=;%
|
||||
node "%~dp0\..\protobufjs\bin\pbts" %*
|
||||
)
|
||||
21
express-server/node_modules/@google-cloud/firestore/node_modules/@types/node/LICENSE
generated
vendored
Normal file
21
express-server/node_modules/@google-cloud/firestore/node_modules/@types/node/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE
|
||||
16
express-server/node_modules/@google-cloud/firestore/node_modules/@types/node/README.md
generated
vendored
Normal file
16
express-server/node_modules/@google-cloud/firestore/node_modules/@types/node/README.md
generated
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
# Installation
|
||||
> `npm install --save @types/node`
|
||||
|
||||
# Summary
|
||||
This package contains type definitions for Node.js (http://nodejs.org/).
|
||||
|
||||
# Details
|
||||
Files were exported from https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/node
|
||||
|
||||
Additional Details
|
||||
* Last updated: Wed, 19 Dec 2018 18:17:49 GMT
|
||||
* Dependencies: none
|
||||
* Global values: Buffer, NodeJS, SlowBuffer, Symbol, __dirname, __filename, clearImmediate, clearInterval, clearTimeout, console, exports, global, module, process, require, setImmediate, setInterval, setTimeout
|
||||
|
||||
# Credits
|
||||
These definitions were written by Microsoft TypeScript <https://github.com/Microsoft>, DefinitelyTyped <https://github.com/DefinitelyTyped>, Alberto Schiabel <https://github.com/jkomyno>, Alexander T. <https://github.com/a-tarasyuk>, Alvis HT Tang <https://github.com/alvis>, Andrew Makarov <https://github.com/r3nya>, Bruno Scheufler <https://github.com/brunoscheufler>, Chigozirim C. <https://github.com/smac89>, Christian Vaagland Tellnes <https://github.com/tellnes>, Deividas Bakanas <https://github.com/DeividasBakanas>, Eugene Y. Q. Shen <https://github.com/eyqs>, Flarna <https://github.com/Flarna>, Hannes Magnusson <https://github.com/Hannes-Magnusson-CK>, Hoàng Văn Khải <https://github.com/KSXGitHub>, Huw <https://github.com/hoo29>, Kelvin Jin <https://github.com/kjin>, Klaus Meinhardt <https://github.com/ajafff>, Lishude <https://github.com/islishude>, Mariusz Wiktorczyk <https://github.com/mwiktorczyk>, Matthieu Sieben <https://github.com/matthieusieben>, Mohsen Azimi <https://github.com/mohsen1>, Nicolas Even <https://github.com/n-e>, Nicolas Voigt <https://github.com/octo-sniffle>, Parambir Singh <https://github.com/parambirs>, Sebastian Silbermann <https://github.com/eps1lon>, Simon Schick <https://github.com/SimonSchick>, Thomas den Hollander <https://github.com/ThomasdenH>, Wilco Bakker <https://github.com/WilcoBakker>, wwwy3y3 <https://github.com/wwwy3y3>, Zane Hannan AU <https://github.com/ZaneHannanAU>, Jeremie Rodriguez <https://github.com/jeremiergz>, Samuel Ainsworth <https://github.com/samuela>.
|
||||
9219
express-server/node_modules/@google-cloud/firestore/node_modules/@types/node/index.d.ts
generated
vendored
Normal file
9219
express-server/node_modules/@google-cloud/firestore/node_modules/@types/node/index.d.ts
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
3163
express-server/node_modules/@google-cloud/firestore/node_modules/@types/node/inspector.d.ts
generated
vendored
Normal file
3163
express-server/node_modules/@google-cloud/firestore/node_modules/@types/node/inspector.d.ts
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
176
express-server/node_modules/@google-cloud/firestore/node_modules/@types/node/package.json
generated
vendored
Normal file
176
express-server/node_modules/@google-cloud/firestore/node_modules/@types/node/package.json
generated
vendored
Normal file
@@ -0,0 +1,176 @@
|
||||
{
|
||||
"_from": "@types/node@^10.1.0",
|
||||
"_id": "@types/node@10.12.18",
|
||||
"_inBundle": false,
|
||||
"_integrity": "sha512-fh+pAqt4xRzPfqA6eh3Z2y6fyZavRIumvjhaCL753+TVkGKGhpPeyrJG2JftD0T9q4GF00KjefsQ+PQNDdWQaQ==",
|
||||
"_location": "/@google-cloud/firestore/@types/node",
|
||||
"_phantomChildren": {},
|
||||
"_requested": {
|
||||
"type": "range",
|
||||
"registry": true,
|
||||
"raw": "@types/node@^10.1.0",
|
||||
"name": "@types/node",
|
||||
"escapedName": "@types%2fnode",
|
||||
"scope": "@types",
|
||||
"rawSpec": "^10.1.0",
|
||||
"saveSpec": null,
|
||||
"fetchSpec": "^10.1.0"
|
||||
},
|
||||
"_requiredBy": [
|
||||
"/@google-cloud/firestore/protobufjs"
|
||||
],
|
||||
"_resolved": "https://registry.npmjs.org/@types/node/-/node-10.12.18.tgz",
|
||||
"_shasum": "1d3ca764718915584fcd9f6344621b7672665c67",
|
||||
"_spec": "@types/node@^10.1.0",
|
||||
"_where": "D:\\Desktop\\Git\\Firebase\\SmartShopperFirebase\\node_modules\\@google-cloud\\firestore\\node_modules\\protobufjs",
|
||||
"bugs": {
|
||||
"url": "https://github.com/DefinitelyTyped/DefinitelyTyped/issues"
|
||||
},
|
||||
"bundleDependencies": false,
|
||||
"contributors": [
|
||||
{
|
||||
"name": "Microsoft TypeScript",
|
||||
"url": "https://github.com/Microsoft"
|
||||
},
|
||||
{
|
||||
"name": "DefinitelyTyped",
|
||||
"url": "https://github.com/DefinitelyTyped"
|
||||
},
|
||||
{
|
||||
"name": "Alberto Schiabel",
|
||||
"url": "https://github.com/jkomyno"
|
||||
},
|
||||
{
|
||||
"name": "Alexander T.",
|
||||
"url": "https://github.com/a-tarasyuk"
|
||||
},
|
||||
{
|
||||
"name": "Alvis HT Tang",
|
||||
"url": "https://github.com/alvis"
|
||||
},
|
||||
{
|
||||
"name": "Andrew Makarov",
|
||||
"url": "https://github.com/r3nya"
|
||||
},
|
||||
{
|
||||
"name": "Bruno Scheufler",
|
||||
"url": "https://github.com/brunoscheufler"
|
||||
},
|
||||
{
|
||||
"name": "Chigozirim C.",
|
||||
"url": "https://github.com/smac89"
|
||||
},
|
||||
{
|
||||
"name": "Christian Vaagland Tellnes",
|
||||
"url": "https://github.com/tellnes"
|
||||
},
|
||||
{
|
||||
"name": "Deividas Bakanas",
|
||||
"url": "https://github.com/DeividasBakanas"
|
||||
},
|
||||
{
|
||||
"name": "Eugene Y. Q. Shen",
|
||||
"url": "https://github.com/eyqs"
|
||||
},
|
||||
{
|
||||
"name": "Flarna",
|
||||
"url": "https://github.com/Flarna"
|
||||
},
|
||||
{
|
||||
"name": "Hannes Magnusson",
|
||||
"url": "https://github.com/Hannes-Magnusson-CK"
|
||||
},
|
||||
{
|
||||
"name": "Hoàng Văn Khải",
|
||||
"url": "https://github.com/KSXGitHub"
|
||||
},
|
||||
{
|
||||
"name": "Huw",
|
||||
"url": "https://github.com/hoo29"
|
||||
},
|
||||
{
|
||||
"name": "Kelvin Jin",
|
||||
"url": "https://github.com/kjin"
|
||||
},
|
||||
{
|
||||
"name": "Klaus Meinhardt",
|
||||
"url": "https://github.com/ajafff"
|
||||
},
|
||||
{
|
||||
"name": "Lishude",
|
||||
"url": "https://github.com/islishude"
|
||||
},
|
||||
{
|
||||
"name": "Mariusz Wiktorczyk",
|
||||
"url": "https://github.com/mwiktorczyk"
|
||||
},
|
||||
{
|
||||
"name": "Matthieu Sieben",
|
||||
"url": "https://github.com/matthieusieben"
|
||||
},
|
||||
{
|
||||
"name": "Mohsen Azimi",
|
||||
"url": "https://github.com/mohsen1"
|
||||
},
|
||||
{
|
||||
"name": "Nicolas Even",
|
||||
"url": "https://github.com/n-e"
|
||||
},
|
||||
{
|
||||
"name": "Nicolas Voigt",
|
||||
"url": "https://github.com/octo-sniffle"
|
||||
},
|
||||
{
|
||||
"name": "Parambir Singh",
|
||||
"url": "https://github.com/parambirs"
|
||||
},
|
||||
{
|
||||
"name": "Sebastian Silbermann",
|
||||
"url": "https://github.com/eps1lon"
|
||||
},
|
||||
{
|
||||
"name": "Simon Schick",
|
||||
"url": "https://github.com/SimonSchick"
|
||||
},
|
||||
{
|
||||
"name": "Thomas den Hollander",
|
||||
"url": "https://github.com/ThomasdenH"
|
||||
},
|
||||
{
|
||||
"name": "Wilco Bakker",
|
||||
"url": "https://github.com/WilcoBakker"
|
||||
},
|
||||
{
|
||||
"name": "wwwy3y3",
|
||||
"url": "https://github.com/wwwy3y3"
|
||||
},
|
||||
{
|
||||
"name": "Zane Hannan AU",
|
||||
"url": "https://github.com/ZaneHannanAU"
|
||||
},
|
||||
{
|
||||
"name": "Jeremie Rodriguez",
|
||||
"url": "https://github.com/jeremiergz"
|
||||
},
|
||||
{
|
||||
"name": "Samuel Ainsworth",
|
||||
"url": "https://github.com/samuela"
|
||||
}
|
||||
],
|
||||
"dependencies": {},
|
||||
"deprecated": false,
|
||||
"description": "TypeScript definitions for Node.js",
|
||||
"homepage": "https://github.com/DefinitelyTyped/DefinitelyTyped#readme",
|
||||
"license": "MIT",
|
||||
"main": "",
|
||||
"name": "@types/node",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/DefinitelyTyped/DefinitelyTyped.git"
|
||||
},
|
||||
"scripts": {},
|
||||
"typeScriptVersion": "2.0",
|
||||
"types": "index",
|
||||
"typesPublisherContentHash": "2ab4e2583634afae0837756aa0330daeca55b67e8b9947d540c0efdd33becf3d",
|
||||
"version": "10.12.18"
|
||||
}
|
||||
202
express-server/node_modules/@google-cloud/firestore/node_modules/long/LICENSE
generated
vendored
Normal file
202
express-server/node_modules/@google-cloud/firestore/node_modules/long/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,202 @@
|
||||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
246
express-server/node_modules/@google-cloud/firestore/node_modules/long/README.md
generated
vendored
Normal file
246
express-server/node_modules/@google-cloud/firestore/node_modules/long/README.md
generated
vendored
Normal file
@@ -0,0 +1,246 @@
|
||||
long.js
|
||||
=======
|
||||
|
||||
A Long class for representing a 64 bit two's-complement integer value derived from the [Closure Library](https://github.com/google/closure-library)
|
||||
for stand-alone use and extended with unsigned support.
|
||||
|
||||
[](https://travis-ci.org/dcodeIO/long.js)
|
||||
|
||||
Background
|
||||
----------
|
||||
|
||||
As of [ECMA-262 5th Edition](http://ecma262-5.com/ELS5_HTML.htm#Section_8.5), "all the positive and negative integers
|
||||
whose magnitude is no greater than 2<sup>53</sup> are representable in the Number type", which is "representing the
|
||||
doubleprecision 64-bit format IEEE 754 values as specified in the IEEE Standard for Binary Floating-Point Arithmetic".
|
||||
The [maximum safe integer](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Number/MAX_SAFE_INTEGER)
|
||||
in JavaScript is 2<sup>53</sup>-1.
|
||||
|
||||
Example: 2<sup>64</sup>-1 is 1844674407370955**1615** but in JavaScript it evaluates to 1844674407370955**2000**.
|
||||
|
||||
Furthermore, bitwise operators in JavaScript "deal only with integers in the range −2<sup>31</sup> through
|
||||
2<sup>31</sup>−1, inclusive, or in the range 0 through 2<sup>32</sup>−1, inclusive. These operators accept any value of
|
||||
the Number type but first convert each such value to one of 2<sup>32</sup> integer values."
|
||||
|
||||
In some use cases, however, it is required to be able to reliably work with and perform bitwise operations on the full
|
||||
64 bits. This is where long.js comes into play.
|
||||
|
||||
Usage
|
||||
-----
|
||||
|
||||
The class is compatible with CommonJS and AMD loaders and is exposed globally as `Long` if neither is available.
|
||||
|
||||
```javascript
|
||||
var Long = require("long");
|
||||
|
||||
var longVal = new Long(0xFFFFFFFF, 0x7FFFFFFF);
|
||||
|
||||
console.log(longVal.toString());
|
||||
...
|
||||
```
|
||||
|
||||
API
|
||||
---
|
||||
|
||||
### Constructor
|
||||
|
||||
* new **Long**(low: `number`, high: `number`, unsigned?: `boolean`)<br />
|
||||
Constructs a 64 bit two's-complement integer, given its low and high 32 bit values as *signed* integers. See the from* functions below for more convenient ways of constructing Longs.
|
||||
|
||||
### Fields
|
||||
|
||||
* Long#**low**: `number`<br />
|
||||
The low 32 bits as a signed value.
|
||||
|
||||
* Long#**high**: `number`<br />
|
||||
The high 32 bits as a signed value.
|
||||
|
||||
* Long#**unsigned**: `boolean`<br />
|
||||
Whether unsigned or not.
|
||||
|
||||
### Constants
|
||||
|
||||
* Long.**ZERO**: `Long`<br />
|
||||
Signed zero.
|
||||
|
||||
* Long.**ONE**: `Long`<br />
|
||||
Signed one.
|
||||
|
||||
* Long.**NEG_ONE**: `Long`<br />
|
||||
Signed negative one.
|
||||
|
||||
* Long.**UZERO**: `Long`<br />
|
||||
Unsigned zero.
|
||||
|
||||
* Long.**UONE**: `Long`<br />
|
||||
Unsigned one.
|
||||
|
||||
* Long.**MAX_VALUE**: `Long`<br />
|
||||
Maximum signed value.
|
||||
|
||||
* Long.**MIN_VALUE**: `Long`<br />
|
||||
Minimum signed value.
|
||||
|
||||
* Long.**MAX_UNSIGNED_VALUE**: `Long`<br />
|
||||
Maximum unsigned value.
|
||||
|
||||
### Utility
|
||||
|
||||
* Long.**isLong**(obj: `*`): `boolean`<br />
|
||||
Tests if the specified object is a Long.
|
||||
|
||||
* Long.**fromBits**(lowBits: `number`, highBits: `number`, unsigned?: `boolean`): `Long`<br />
|
||||
Returns a Long representing the 64 bit integer that comes by concatenating the given low and high bits. Each is assumed to use 32 bits.
|
||||
|
||||
* Long.**fromBytes**(bytes: `number[]`, unsigned?: `boolean`, le?: `boolean`): `Long`<br />
|
||||
Creates a Long from its byte representation.
|
||||
|
||||
* Long.**fromBytesLE**(bytes: `number[]`, unsigned?: `boolean`): `Long`<br />
|
||||
Creates a Long from its little endian byte representation.
|
||||
|
||||
* Long.**fromBytesBE**(bytes: `number[]`, unsigned?: `boolean`): `Long`<br />
|
||||
Creates a Long from its big endian byte representation.
|
||||
|
||||
* Long.**fromInt**(value: `number`, unsigned?: `boolean`): `Long`<br />
|
||||
Returns a Long representing the given 32 bit integer value.
|
||||
|
||||
* Long.**fromNumber**(value: `number`, unsigned?: `boolean`): `Long`<br />
|
||||
Returns a Long representing the given value, provided that it is a finite number. Otherwise, zero is returned.
|
||||
|
||||
* Long.**fromString**(str: `string`, unsigned?: `boolean`, radix?: `number`)<br />
|
||||
Long.**fromString**(str: `string`, radix: `number`)<br />
|
||||
Returns a Long representation of the given string, written using the specified radix.
|
||||
|
||||
* Long.**fromValue**(val: `*`, unsigned?: `boolean`): `Long`<br />
|
||||
Converts the specified value to a Long using the appropriate from* function for its type.
|
||||
|
||||
### Methods
|
||||
|
||||
* Long#**add**(addend: `Long | number | string`): `Long`<br />
|
||||
Returns the sum of this and the specified Long.
|
||||
|
||||
* Long#**and**(other: `Long | number | string`): `Long`<br />
|
||||
Returns the bitwise AND of this Long and the specified.
|
||||
|
||||
* Long#**compare**/**comp**(other: `Long | number | string`): `number`<br />
|
||||
Compares this Long's value with the specified's. Returns `0` if they are the same, `1` if the this is greater and `-1` if the given one is greater.
|
||||
|
||||
* Long#**divide**/**div**(divisor: `Long | number | string`): `Long`<br />
|
||||
Returns this Long divided by the specified.
|
||||
|
||||
* Long#**equals**/**eq**(other: `Long | number | string`): `boolean`<br />
|
||||
Tests if this Long's value equals the specified's.
|
||||
|
||||
* Long#**getHighBits**(): `number`<br />
|
||||
Gets the high 32 bits as a signed integer.
|
||||
|
||||
* Long#**getHighBitsUnsigned**(): `number`<br />
|
||||
Gets the high 32 bits as an unsigned integer.
|
||||
|
||||
* Long#**getLowBits**(): `number`<br />
|
||||
Gets the low 32 bits as a signed integer.
|
||||
|
||||
* Long#**getLowBitsUnsigned**(): `number`<br />
|
||||
Gets the low 32 bits as an unsigned integer.
|
||||
|
||||
* Long#**getNumBitsAbs**(): `number`<br />
|
||||
Gets the number of bits needed to represent the absolute value of this Long.
|
||||
|
||||
* Long#**greaterThan**/**gt**(other: `Long | number | string`): `boolean`<br />
|
||||
Tests if this Long's value is greater than the specified's.
|
||||
|
||||
* Long#**greaterThanOrEqual**/**gte**/**ge**(other: `Long | number | string`): `boolean`<br />
|
||||
Tests if this Long's value is greater than or equal the specified's.
|
||||
|
||||
* Long#**isEven**(): `boolean`<br />
|
||||
Tests if this Long's value is even.
|
||||
|
||||
* Long#**isNegative**(): `boolean`<br />
|
||||
Tests if this Long's value is negative.
|
||||
|
||||
* Long#**isOdd**(): `boolean`<br />
|
||||
Tests if this Long's value is odd.
|
||||
|
||||
* Long#**isPositive**(): `boolean`<br />
|
||||
Tests if this Long's value is positive.
|
||||
|
||||
* Long#**isZero**/**eqz**(): `boolean`<br />
|
||||
Tests if this Long's value equals zero.
|
||||
|
||||
* Long#**lessThan**/**lt**(other: `Long | number | string`): `boolean`<br />
|
||||
Tests if this Long's value is less than the specified's.
|
||||
|
||||
* Long#**lessThanOrEqual**/**lte**/**le**(other: `Long | number | string`): `boolean`<br />
|
||||
Tests if this Long's value is less than or equal the specified's.
|
||||
|
||||
* Long#**modulo**/**mod**/**rem**(divisor: `Long | number | string`): `Long`<br />
|
||||
Returns this Long modulo the specified.
|
||||
|
||||
* Long#**multiply**/**mul**(multiplier: `Long | number | string`): `Long`<br />
|
||||
Returns the product of this and the specified Long.
|
||||
|
||||
* Long#**negate**/**neg**(): `Long`<br />
|
||||
Negates this Long's value.
|
||||
|
||||
* Long#**not**(): `Long`<br />
|
||||
Returns the bitwise NOT of this Long.
|
||||
|
||||
* Long#**notEquals**/**neq**/**ne**(other: `Long | number | string`): `boolean`<br />
|
||||
Tests if this Long's value differs from the specified's.
|
||||
|
||||
* Long#**or**(other: `Long | number | string`): `Long`<br />
|
||||
Returns the bitwise OR of this Long and the specified.
|
||||
|
||||
* Long#**shiftLeft**/**shl**(numBits: `Long | number | string`): `Long`<br />
|
||||
Returns this Long with bits shifted to the left by the given amount.
|
||||
|
||||
* Long#**shiftRight**/**shr**(numBits: `Long | number | string`): `Long`<br />
|
||||
Returns this Long with bits arithmetically shifted to the right by the given amount.
|
||||
|
||||
* Long#**shiftRightUnsigned**/**shru**/**shr_u**(numBits: `Long | number | string`): `Long`<br />
|
||||
Returns this Long with bits logically shifted to the right by the given amount.
|
||||
|
||||
* Long#**subtract**/**sub**(subtrahend: `Long | number | string`): `Long`<br />
|
||||
Returns the difference of this and the specified Long.
|
||||
|
||||
* Long#**toBytes**(le?: `boolean`): `number[]`<br />
|
||||
Converts this Long to its byte representation.
|
||||
|
||||
* Long#**toBytesLE**(): `number[]`<br />
|
||||
Converts this Long to its little endian byte representation.
|
||||
|
||||
* Long#**toBytesBE**(): `number[]`<br />
|
||||
Converts this Long to its big endian byte representation.
|
||||
|
||||
* Long#**toInt**(): `number`<br />
|
||||
Converts the Long to a 32 bit integer, assuming it is a 32 bit integer.
|
||||
|
||||
* Long#**toNumber**(): `number`<br />
|
||||
Converts the Long to a the nearest floating-point representation of this value (double, 53 bit mantissa).
|
||||
|
||||
* Long#**toSigned**(): `Long`<br />
|
||||
Converts this Long to signed.
|
||||
|
||||
* Long#**toString**(radix?: `number`): `string`<br />
|
||||
Converts the Long to a string written in the specified radix.
|
||||
|
||||
* Long#**toUnsigned**(): `Long`<br />
|
||||
Converts this Long to unsigned.
|
||||
|
||||
* Long#**xor**(other: `Long | number | string`): `Long`<br />
|
||||
Returns the bitwise XOR of this Long and the given one.
|
||||
|
||||
Building
|
||||
--------
|
||||
|
||||
To build an UMD bundle to `dist/long.js`, run:
|
||||
|
||||
```
|
||||
$> npm install
|
||||
$> npm run build
|
||||
```
|
||||
|
||||
Running the [tests](./tests):
|
||||
|
||||
```
|
||||
$> npm test
|
||||
```
|
||||
2
express-server/node_modules/@google-cloud/firestore/node_modules/long/dist/long.js
generated
vendored
Normal file
2
express-server/node_modules/@google-cloud/firestore/node_modules/long/dist/long.js
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
1
express-server/node_modules/@google-cloud/firestore/node_modules/long/dist/long.js.map
generated
vendored
Normal file
1
express-server/node_modules/@google-cloud/firestore/node_modules/long/dist/long.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
1
express-server/node_modules/@google-cloud/firestore/node_modules/long/index.js
generated
vendored
Normal file
1
express-server/node_modules/@google-cloud/firestore/node_modules/long/index.js
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
module.exports = require("./src/long");
|
||||
63
express-server/node_modules/@google-cloud/firestore/node_modules/long/package.json
generated
vendored
Normal file
63
express-server/node_modules/@google-cloud/firestore/node_modules/long/package.json
generated
vendored
Normal file
@@ -0,0 +1,63 @@
|
||||
{
|
||||
"_from": "long@^4.0.0",
|
||||
"_id": "long@4.0.0",
|
||||
"_inBundle": false,
|
||||
"_integrity": "sha512-XsP+KhQif4bjX1kbuSiySJFNAehNxgLb6hPRGJ9QsUr8ajHkuXGdrHmFUTUUXhDwVX2R5bY4JNZEwbUiMhV+MA==",
|
||||
"_location": "/@google-cloud/firestore/long",
|
||||
"_phantomChildren": {},
|
||||
"_requested": {
|
||||
"type": "range",
|
||||
"registry": true,
|
||||
"raw": "long@^4.0.0",
|
||||
"name": "long",
|
||||
"escapedName": "long",
|
||||
"rawSpec": "^4.0.0",
|
||||
"saveSpec": null,
|
||||
"fetchSpec": "^4.0.0"
|
||||
},
|
||||
"_requiredBy": [
|
||||
"/@google-cloud/firestore/protobufjs"
|
||||
],
|
||||
"_resolved": "https://registry.npmjs.org/long/-/long-4.0.0.tgz",
|
||||
"_shasum": "9a7b71cfb7d361a194ea555241c92f7468d5bf28",
|
||||
"_spec": "long@^4.0.0",
|
||||
"_where": "D:\\Desktop\\Git\\Firebase\\SmartShopperFirebase\\node_modules\\@google-cloud\\firestore\\node_modules\\protobufjs",
|
||||
"author": {
|
||||
"name": "Daniel Wirtz",
|
||||
"email": "dcode@dcode.io"
|
||||
},
|
||||
"bugs": {
|
||||
"url": "https://github.com/dcodeIO/long.js/issues"
|
||||
},
|
||||
"bundleDependencies": false,
|
||||
"dependencies": {},
|
||||
"deprecated": false,
|
||||
"description": "A Long class for representing a 64-bit two's-complement integer value.",
|
||||
"devDependencies": {
|
||||
"webpack": "^3.10.0"
|
||||
},
|
||||
"files": [
|
||||
"index.js",
|
||||
"LICENSE",
|
||||
"README.md",
|
||||
"src/long.js",
|
||||
"dist/long.js",
|
||||
"dist/long.js.map"
|
||||
],
|
||||
"homepage": "https://github.com/dcodeIO/long.js#readme",
|
||||
"keywords": [
|
||||
"math"
|
||||
],
|
||||
"license": "Apache-2.0",
|
||||
"main": "src/long.js",
|
||||
"name": "long",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/dcodeIO/long.js.git"
|
||||
},
|
||||
"scripts": {
|
||||
"build": "webpack",
|
||||
"test": "node tests"
|
||||
},
|
||||
"version": "4.0.0"
|
||||
}
|
||||
1323
express-server/node_modules/@google-cloud/firestore/node_modules/long/src/long.js
generated
vendored
Normal file
1323
express-server/node_modules/@google-cloud/firestore/node_modules/long/src/long.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
935
express-server/node_modules/@google-cloud/firestore/node_modules/protobufjs/CHANGELOG.md
generated
vendored
Normal file
935
express-server/node_modules/@google-cloud/firestore/node_modules/protobufjs/CHANGELOG.md
generated
vendored
Normal file
@@ -0,0 +1,935 @@
|
||||
# [6.8.8](https://github.com/dcodeIO/protobuf.js/releases/tag/6.8.8)
|
||||
|
||||
## Fixed
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/3001425b0d896d14188307cd0cc84ce195ad9e04) Persist recent index.d.ts changes in JSDoc<br />
|
||||
|
||||
# [6.8.7](https://github.com/dcodeIO/protobuf.js/releases/tag/6.8.7)
|
||||
|
||||
## Fixed
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/e8449c4bf1269a2cc423708db6f0b47a383d33f0) Fix package browser field descriptor ([#1046](https://github.com/dcodeIO/protobuf.js/issues/1046))<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/996b3fa0c598ecc73302bfc39208c44830f07b1a) Fix static codegen issues with uglifyjs3<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/a06317139b92fdd8c6b3b188fb7b9704dc8ccbf1) Fix lint issues / pbts on windows<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/a927a6646e8fdddebcb3e13bc8b28b041b3ee40a) Fix empty 'bytes' field decoding, now using Buffer where applicable ([#1020](https://github.com/dcodeIO/protobuf.js/issues/1020))<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/f13a81fb41fbef2ce9dcee13f23b7276c83fbcfd) Fix circular dependency of Namespace and Enum ([#994](https://github.com/dcodeIO/protobuf.js/issues/994))<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/c05c58fad61c16e5ce20ca19758e4782cdd5d2e3) Ignore optional commas in aggregate options ([#999](https://github.com/dcodeIO/protobuf.js/issues/999))<br />
|
||||
|
||||
## New
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/36fc964b8db1e4372c76b1baf9f03857cd875b07) Make Message<T> have a default type param ([#1086](https://github.com/dcodeIO/protobuf.js/issues/1086))<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/996b3fa0c598ecc73302bfc39208c44830f07b1a) Explicitly define service method names when generating static code, see [#857](https://github.com/dcodeIO/protobuf.js/issues/857)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/07c5d59e1da8c5533a39007ba332928206281408) Also handle services in ext/descriptor ([#1001](https://github.com/dcodeIO/protobuf.js/issues/1001))<br />
|
||||
|
||||
## CLI
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/2c5ef95818a310243f88ffba0331cd47ee603c0a) Extend list of ignored ESLint rules for pbjs, fixes [#1085](https://github.com/dcodeIO/protobuf.js/issues/1085)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/8576b49ad3e55b8beae2a8f044c51040484eef12) Fix declared return type of pbjs/pbts callback ([#1025](https://github.com/dcodeIO/protobuf.js/issues/1025))<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/9fceaa69667895e609a3ed78eb2efa7a0ecfb890) Added an option to pbts to allow custom imports ([#1038](https://github.com/dcodeIO/protobuf.js/issues/1038))<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/65d113b0079fa2570837f3cf95268ce24714a248) Get node executable path from process.execPath ([#1018](https://github.com/dcodeIO/protobuf.js/issues/1018))<br />
|
||||
|
||||
## Other
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/b611875cfbc1f98d8973a2e86f1506de84f00049) Slim down CI testing and remove some not ultimately necesssary dependencies with audit issues<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/812b38ddabb35e154f9ff94f32ad8ce2a70310f1) Move global handling to util, see [#995](https://github.com/dcodeIO/protobuf.js/issues/995)<br />
|
||||
|
||||
# [6.8.6](https://github.com/dcodeIO/protobuf.js/releases/tag/6.8.6)
|
||||
|
||||
## Fixed
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/2ee1028d631a328e152d7e09f2a0e0c5c83dc2aa) Fix typeRefRe being vulnerable to ReDoS<br />
|
||||
|
||||
# [6.8.5](https://github.com/dcodeIO/protobuf.js/releases/tag/6.8.6)
|
||||
|
||||
## New
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/462132f222d8febb8211d839635aad5b82dc6315) Preserve comments when serializing/deserializing with toJSON and fromJSON. ([#983](https://github.com/dcodeIO/protobuf.js/issues/983))<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/d29c0caa715a14214fc755b3cf10ac119cdaf199) Add more details to some frequent error messages ([#962](https://github.com/dcodeIO/protobuf.js/issues/962))<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/8400f87ad8ed2b47e659bc8bb6c3cf2467802425) Add IParseOptions#alternateCommentMode ([#968](https://github.com/dcodeIO/protobuf.js/issues/968))<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/d6e3b9e218896ec1910e02448b5ee87e4d96ede6) Added field_mask to built-in common wrappers ([#982](https://github.com/dcodeIO/protobuf.js/issues/982))<br />
|
||||
|
||||
## Other
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/635fef013fbb3523536d92c690ffd7d84829db35) Remove code climate config in order to use 'in-app' config instead<br />
|
||||
|
||||
# [6.8.4](https://github.com/dcodeIO/protobuf.js/releases/tag/6.8.4)
|
||||
|
||||
## Other
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/69440c023e6962c644715a0c95363ddf19db648f) Update jsdoc dependency (pinned vulnerable marked)<br />
|
||||
|
||||
# [6.8.3](https://github.com/dcodeIO/protobuf.js/releases/tag/6.8.3)
|
||||
|
||||
## CLI
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/cc991a058b0636f3454166c76de7b664cf23a8f4) Use correct safeProp in json-module target, see [#956](https://github.com/dcodeIO/protobuf.js/issues/956)<br />
|
||||
|
||||
# [6.8.2](https://github.com/dcodeIO/protobuf.js/releases/tag/6.8.2)
|
||||
|
||||
## Other
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/6fc6481d790648e9e2169a961ad31a732398c911) Include dist files in npm package, see [#955](https://github.com/dcodeIO/protobuf.js/issues/955)<br />
|
||||
|
||||
# [6.8.1](https://github.com/dcodeIO/protobuf.js/releases/tag/6.8.1)
|
||||
|
||||
## Fixed
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/db2dd49f6aab6ecd606eee334b95cc0969e483c2) Prevent invalid JSDoc names when generating service methods, see [#870](https://github.com/dcodeIO/protobuf.js/issues/870)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/62297998d681357ada70fb370b99bac5573e5054) Prevent parse errors when generating service method names, see [#870](https://github.com/dcodeIO/protobuf.js/issues/870)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/478f332e0fc1d0c318a70b1514b1d59c8c200c37) Support parsing nested option-values with or without ':' ([#951](https://github.com/dcodeIO/protobuf.js/issues/951), fixes [#946](https://github.com/dcodeIO/protobuf.js/issues/946))<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/83477ca8e0e1f814ac79a642ea656f047563613a) Add support for reserved keyword in enums ([#950](https://github.com/dcodeIO/protobuf.js/issues/950), fixes [#949](https://github.com/dcodeIO/protobuf.js/issues/949))<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/c482a5b76fd57769eae4308793e3ff8725264664) Unified safe property escapes and added a test for [#834](https://github.com/dcodeIO/protobuf.js/issues/834)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/1724581c36ecc4fc166ea14a9dd57af5e093a467) Fix codegen if type name starts with "Object"<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/adecd544c5fcbeba28d502645f895024e3552970) Fixed dependency for json-module to use "light".<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/2a8dd74fca70d4e6fb41328a7cee81d1d50ad7ad) Basic support for URL prefixes in google.protobuf.Any types.<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/be78a3d9bc8d9618950c77f9e261b422670042ce) fixed 'error is not defined linter warning when using static/static-module and es6<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/c712447b309ae81134c7afd60f8dfa5ecd3be230) Fixed wrong type_url for any type (no leading '.' allowed).<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/145bda25ee1de2c0678ce7b8a093669ec2526b1d) Fixed fromObject() for google.protobuf.Any types.<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/7dec43d9d847481ad93fca498fd970b3a4a14b11) Handle case where 'extendee' is undefined in ext/descriptor<br />
|
||||
|
||||
## CLI
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/20a26271423319085d321878edc5166a5449e68a) Sanitize CR-only line endings (coming from jsdoc?)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/19d2af12b5db5a0f668f50b0cae3ee0f8a7affc2) Make sure enum typings become generated ([#884](https://github.com/dcodeIO/protobuf.js/issues/884) didn't solve this)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/a2c72c08b0265b112d367fa3d33407ff0de955b9) Remove exclude and include patterns from jsdoc config<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/9afb8a2ff27c1e0a999d7331f3f65f568f5cced5) Skip defaults when generating proto3<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/952c7d1b478cc7c6de82475a17a1387992e8651f) Wait for both the 'end' and 'close' event to happen before finishing in pbts, see [#863](https://github.com/dcodeIO/protobuf.js/issues/863)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/ed7e2e71f5cde27c4128f4f2e3f4782cc51fbec7) Accept null for optional fields in generated static code<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/27cc66a539251216ef10aea04652d58113949df9) Annotate TS classes with @implements<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/05e7e0636727008c72549459b8594fa0442d346f) Annotate virtual oneofs as string literal unions<br />
|
||||
|
||||
## New
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/685adb0e7ef0f50e4b93a105013547884957cc98) Also check for reserved ids and names in enums<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/843d0d5b927968025ca11babff28495dd3bb2863) Also support 'reserved' in enum descriptors<br />
|
||||
|
||||
## Other
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/a8376b57fb0a858adff9dc8a1d1b5372eff9d85c) Include just relevant files in npm package, fixes [#781](https://github.com/dcodeIO/protobuf.js/issues/781)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/bda1bc6917c681516f6be8be8f0e84ba1262c4ce) Fix travis build<br />
|
||||
|
||||
# [6.8.0](https://github.com/dcodeIO/protobuf.js/releases/tag/6.8.0)
|
||||
|
||||
## Breaking
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/ff858003f525db542cbb270777b6fab3a230c9bb) Replaced Buffer and Long types with interfaces and removed stubs<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/22f907c49adbbdf09b72bde5299271dbe0ee9cbe) Removed Message#toObject in favor of having just the static version (unnecessary static code otherwise)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/c97b61811248df002f1fb93557b982bc0aa27309) Everything uses interfaces now instead of typedefs (SomethingProperties is now ISomething)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/b9f179064f3ddf683f13e0d4e17840301be64010) ReflectionObject#toJSON properly omits explicit undefined values<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/7a6f98b5e74f9e9142f9be3ba0683caeaff916c4) Initial implementation of TypeScript decorators<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/7a6f98b5e74f9e9142f9be3ba0683caeaff916c4) Refactored protobuf.Class away<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/7a6f98b5e74f9e9142f9be3ba0683caeaff916c4) TypeScript definitions now have (a lot of) generics<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/7a6f98b5e74f9e9142f9be3ba0683caeaff916c4) Removed deprecated features<br />
|
||||
|
||||
## Fixed
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/c306d19d806eb697913ffa2b8613f650127a4c50) Added 'undefined' besides 'null' as a valid value of an optional field, fixes [#826](https://github.com/dcodeIO/protobuf.js/issues/826)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/c5518c3bac0da9c2045e6f1baf0dee915afb4221) Fixed an issue with codegen typings, see [#819](https://github.com/dcodeIO/protobuf.js/issues/819)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/66d149e92ff1baddfdfd4b6a88ca9bcea6fc6195) Ported utf8 chunking mechanism to base64 as well, fixes [#800](https://github.com/dcodeIO/protobuf.js/issues/800)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/e1f9d9856c98a0f0eb1aa8bdf4ac0df467bee8b9) Also be more verbose when defining properties for ES6, fixes [#820](https://github.com/dcodeIO/protobuf.js/issues/820)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/cf36097305ab02047be5014eabeccc3154e18bde) Generate more verbose JSDoc comments for ES6 support, fixes [#820](https://github.com/dcodeIO/protobuf.js/issues/820)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/f2959795330966f13cb65bbb6034c88a01fc0bcc) Emit a maximum of one error var when generating verifiers, fixes [#786](https://github.com/dcodeIO/protobuf.js/issues/786)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/3b848a10b39c1897ca1ea3b5149ef72ae43fcd11) Fixed missing semicolon after 'extensions' and 'reserved' when generating proto files, fixes [#810](https://github.com/dcodeIO/protobuf.js/issues/810)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/eb1b40497e14a09facbc370676f486bed1376f52) Call npm with '--no-bin-links' when installing CLI deps, fixes [#823](https://github.com/dcodeIO/protobuf.js/issues/823)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/429de19d851477f1df2804d5bc0be30228cd0924) Fix Reader argument conversion in static module<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/03194c203d6ff61ae825e66f8a29ca204fa503b9) Use JSDoc, they said, it documents code, they said. Fixes [#770](https://github.com/dcodeIO/protobuf.js/issues/770)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/ec6a133ff541c638517e00f47b772990207c8640) parser should not confuse previous trailing line comments with comments for the next declaration, see [#762](https://github.com/dcodeIO/protobuf.js/issues/762)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/0589ace4dc9e5c565ff996cf6e6bf94e63f43c4e) Types should not clear constructor with cache (fixes decorators)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/056ecc3834a3b323aaaa676957efcbe3f52365a0) Namespace#lookup should also check in nested namespaces (wtf)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/ed34b093839652db2ff7b84db87857fc57d96038) Reader#bytes should also support plain arrays<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/514afcfa890aa598e93254576c4fd6062e0eff3b) Fix markdown for pipe in code in table<br />
|
||||
|
||||
## New
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/17c2797592bc4effd9aaae3ba9777c9550bb75ac) Upgrade to codegen 2<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/57d7d35ddbb9e3a28c396b4ef1ae3b150eeb8035) ext/descriptor enables interoperability between reflection and descriptor.proto (experimental), see [#757](https://github.com/dcodeIO/protobuf.js/issues/757)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/3939667ef1f37b025bd7f9476015890496d50e00) Added 'json' conversion option for proto3 JSON mapping compatibility of NaN and Infinity + additional documentation of util.toJSONOptions, see [#351](https://github.com/dcodeIO/protobuf.js/issues/351)<br />
|
||||
|
||||
## CLI
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/4eac28c7d3acefb0af7b82c62cf8d19bf3e7d37b) Use protobuf/minimal when pbjs target is static-module<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/3a959453fe63706c38ebbacda208e1f25f27dc99) Added closure wrapper<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/13bf9c2635e6a1a2711670fc8e28ae9d7b8d1c8f) Various improvements to statically generated JSDoc, also fixes [#772](https://github.com/dcodeIO/protobuf.js/issues/772)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/ffdc93c7cf7c8a716316b00864ea7c510e05b0c8) Check incompatible properties for namespaces only in tsd-jsdoc<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/fb3f9c70436d4f81bcd0bf62b71af4d253390e4f) Additional tsd-jsdoc handling of properties inside of namespaces and TS specific API exposure<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/2dcae25c99e2ed8afd01e27d21b106633b8c31b9) Several improvements to tsd-jsdoc emitted comments<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/ff858003f525db542cbb270777b6fab3a230c9bb) Further TypeScript definition improvements<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/22f907c49adbbdf09b72bde5299271dbe0ee9cbe) Relieved tsd files from unnecessary comments<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/22f907c49adbbdf09b72bde5299271dbe0ee9cbe) Generate TS namespaces for vars and functions with properties<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/b355115e619c6595ac9d91897cfe628ef0e46054) Prefer @tstype over @type when generating typedefs (tsd-jsdoc)<br />
|
||||
|
||||
## Docs
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/23f4b990375efcac2c144592cf4ca558722dcf2d) Replaced nullable types with explicit type|null for better tooling compatibility, also fixes [#766](https://github.com/dcodeIO/protobuf.js/issues/766) and fixes 767<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/6493f52013c92a34b8305a25068ec7b8c4c29d54) Added more info to ext/descriptor README, see [#757](https://github.com/dcodeIO/protobuf.js/issues/757)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/ef92da3768d8746dbfe72e77232f78b879fc811d) Additional notes on ext/descriptor<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/b646cf7499791a41b75eef2de1a80fb558d4159e) Updated CHANGELOG so everyone knows what's going on (and soon, breaking)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/35a663757efe188bea552aef017837bc6c6a481a) Additional docs on TS/decorators usage<br />
|
||||
|
||||
## Other
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/9726be0888a9461721447677e9dece16a682b9f6) Updated dist files<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/9726be0888a9461721447677e9dece16a682b9f6) Added package-lock.json<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/114f7ea9fa3813003afc3ebb453b2dd2262808e1) Minor formatting<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/8a6e464954b472fdbb4d46d9270fe3b4b3c7272d) Generate files<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/42f8a97630bcb30d197b0f1d6cbdd96879d27f96) Remove the no-constructor arg<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/6446247cd7edbb77f03dc42c557f568811286a39) Remove the ctor option.<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/2059ee0f6f951575d5c5d2dc5eb06b6fa34e27aa) Add support to generate types for JSON object.<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/7445da0f8cb2e450eff17723f25f366daaf3bbbb) aspromise performance pass<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/3f8b74ba6726567eaf68c4d447c120f75eac042f) codegen 2 performance pass, [#653](https://github.com/dcodeIO/protobuf.js/issues/653) might benefit<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/d44a7eec2fd393e5cb24196fb5818c8c278a0f34) Fixed minimal library including reflection functionality<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/a18e6db9f02696c66032bce7ef4c0eb0568a8048) Minor compression ratio tuning<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/b49a4edd38395e209bedac2e0bfb7b9d5c4e980b) Fixed failing test case + coverage<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/8f7111cacd236501b7e26791b9747b1974a2d9eb) Improved fromObject wrapper for google.protobuf.Any.<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/0e471a2516bde3cd3c27b2691afa0dcfbb01f042) Fixed failing tokenize test case<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/5867f076d8510fa97e3bd6642bbe61960f7fd196) Removed debug build, made it an extension<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/22f907c49adbbdf09b72bde5299271dbe0ee9cbe) Regenerated dist files<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/5bc3541d2da19e2857dc884f743d37c27e8e21f2) Even more documentation and typings for ext/descriptor<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/773e6347b57e4a5236b1ef0bb8d361e4b233caf7) ext/descriptor docs<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/773e6347b57e4a5236b1ef0bb8d361e4b233caf7) Decorators coverage<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/9a23ded94729ceeea2f87cb7e8460eaaaf1c8269) ext/descriptor support for various standard options, see [#757](https://github.com/dcodeIO/protobuf.js/issues/757)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/2d8ce6ec0abd261f9b261a44a0a258fdf57ecec3) ext/descriptor passes descriptor.proto test with no differences, see [#757](https://github.com/dcodeIO/protobuf.js/issues/757)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/3a20968c6d676312e4f2a510f7e079e0e0819daf) Properly remove unnecessary (packed) options from JSON descriptors<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/2a30df8bd5f20d91143a38c2232dafc3a6f3a7bd) Use typedefs in ext/descriptor (like everywhere else), see [#757](https://github.com/dcodeIO/protobuf.js/issues/757)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/1fc911cef01e081c04fb82ead685f49dde1403bb) Fixed obvious issues with ext/descriptor, does not throw anymore when throwing descriptor.proto itself at it, see [#757](https://github.com/dcodeIO/protobuf.js/issues/757)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/6c37dbd14f39dad687f2f89f1558a875f7dcc882) Added still missing root traversal to ext/descriptor, see [#757](https://github.com/dcodeIO/protobuf.js/issues/757)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/7ab136daa5eb2769b616b6b7522e45a4e33a59f6) Initial map fields support for ext/descriptor, see [#757](https://github.com/dcodeIO/protobuf.js/issues/757)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/708552bb84508364b6e6fdf73906aa69e83854e1) Added infrastructure for TypeScript support of extensions<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/23f26defa793b371c16b5f920fbacb3fb66bdf22) TypeScript generics improvements<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/e49bef863c0fb10257ec1001a3c5561755f2ec6b) More ext/descriptor progress, see [#757](https://github.com/dcodeIO/protobuf.js/issues/757)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/6b94336c1e6eec0f2eb1bd5dca73a7a8e71a2153) Just export the relevant namespace in ext/descriptor<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/fbb99489ed0c095174feff8f53431d30fb6c34a0) Initial descriptor.proto extension for reflection interoperability, see [#757](https://github.com/dcodeIO/protobuf.js/issues/757)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/48e66d975bf7b4e6bdbb68ec24386c98b16c54c5) Moved custom wrappers to its own module instead, also makes the API easier to use manually, see [#677](https://github.com/dcodeIO/protobuf.js/issues/677)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/0c6e639d08fdf9be12677bf678563ea631bafb2c) Added infrastructure for custom wrapping/unwrapping of special types, see [#677](https://github.com/dcodeIO/protobuf.js/issues/677)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/0425b584f49841d87a8249fef30c78cc31c1c742) More decorator progress (MapField.d, optional Type.d)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/7a6f98b5e74f9e9142f9be3ba0683caeaff916c4) tsd-jsdoc now has limited generics support<br />
|
||||
|
||||
# [6.7.3](https://github.com/dcodeIO/protobuf.js/releases/tag/6.7.3)
|
||||
|
||||
## Other
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/57f1da64945f2dc5537c6eaa53e08e8fdd477b67) long, @types/long and @types/node are just dependencies, see [#753](https://github.com/dcodeIO/protobuf.js/issues/753)<br />
|
||||
|
||||
# [6.7.2](https://github.com/dcodeIO/protobuf.js/releases/tag/6.7.2)
|
||||
|
||||
## New
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/a7621be0a56585defc72d863f4e891e476905692) Split up NamespaceDescriptor to make nested plain namespaces a thing, see [#749](https://github.com/dcodeIO/protobuf.js/issues/749)<br />
|
||||
|
||||
## Docs
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/e980e72ae3d4697ef0426c8a51608d31f516a2c4) More README<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/1f76749d0b9a780c7b6cb56be304f7327d74ebdb) Replaced 'runtime message' with 'message instance' for clarity<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/e6b6dedb550edbd0e54e212799e42aae2f1a87f1) Rephrased the Usage section around the concept of valid messages<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/0d8100ba87be768ebdec834ca2759693e0bf4325) Added toolset diagram to README<br />
|
||||
|
||||
## Other
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/3405ae8d1ea775c96c30d1ef5cde666c9c7341b3) Touched benchmark output metrics once more<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/e36b228f4bb8b1cd835bf31f8605b759a7f1f501) Fixed failing browser test<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/7b3bdb562ee7d30c1a557d7b7851d55de3091da4) Output more human friendly metrics from benchmark<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/59e447889057c4575f383630942fd308a35c12e6) Stripped down static bench code to what's necessary<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/f88dad098282ece65f5d6e224ca38305a8431829) Revamped benchmark, now also covers Google's JS implementation<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/45356be81ba7796faee0d4d8ad324abdd9f301fb) Updated dependencies and dist files<br />
|
||||
|
||||
# [6.7.1](https://github.com/dcodeIO/protobuf.js/releases/tag/6.7.1)
|
||||
|
||||
## New
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/3d23eed6f7c79007969672f06c1a9ccd691e2411) Made .verify behave more like .encode, see [#710](https://github.com/dcodeIO/protobuf.js/issues/710)<br />
|
||||
|
||||
## CLI
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/bed514290c105c3b606f760f2abba80510721c77) With null/undefined eliminated by constructors and .create, document message fields as non-optional where applicable (ideally used with TS & strictNullChecks), see [#743](https://github.com/dcodeIO/protobuf.js/issues/743)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/007b2329842679ddf994df7ec0f9c70e73ee3caf) Renamed --strict-long/message to --force-long/message with backward compatible aliases, see [#741](https://github.com/dcodeIO/protobuf.js/issues/741)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/6aae71f75e82ffd899869b0c952daf98991421b8) Keep $Properties with --strict-message but require actual instances within, see [#741](https://github.com/dcodeIO/protobuf.js/issues/741)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/c812cef0eff26998f14c9d58d4486464ad7b2bbc) Added --strict-message option to pbjs to strictly reference message instances instead of $Properties, see [#741](https://github.com/dcodeIO/protobuf.js/issues/741)<br />
|
||||
|
||||
## Docs
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/412407de9afb7ec3a999c4c9a3a1f388f971fce7) Restructured README<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/1c4d9d7f024bfa096ddc24aabbdf39211ed8637a) Added more information on typings usage, see [#744](https://github.com/dcodeIO/protobuf.js/issues/744)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/602065e16862751c515c2f3391ee8b880e8140b1) Clarified typescript example in README, see [#744](https://github.com/dcodeIO/protobuf.js/issues/744)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/79d0ba2cc71a156910a9d937683af164df694f08) Clarified that the service API targets clients consuming a service, see [#742](https://github.com/dcodeIO/protobuf.js/issues/742)<br />
|
||||
|
||||
## Other
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/a66f76452ba050088efd1aaebf3c503a55e6287c) Omit copying of undefined or null in constructors and .create, see [#743](https://github.com/dcodeIO/protobuf.js/issues/743)<br />
|
||||
|
||||
# [6.7.0](https://github.com/dcodeIO/protobuf.js/releases/tag/6.7.0)
|
||||
|
||||
## Breaking
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/9c1bbf10e445c3495b23a354f9cbee951b4b20f0) Namespace#lookupEnum should actually look up the reflected enum and not just its values<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/44a8d3af5da578c2e6bbe0a1b948d469bbe27ca1) Decoder now throws if required fields are missing, see [#695](https://github.com/dcodeIO/protobuf.js/issues/695) / [#696](https://github.com/dcodeIO/protobuf.js/issues/696)<br />
|
||||
|
||||
## Fixed
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/d1e3122e326480fdd44e96afd76ee72e9744b246) Added functionality to filter for multiple types at once in lookup(), used by lookupTypeOrEnum(), fixes [#740](https://github.com/dcodeIO/protobuf.js/issues/740)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/8aa21268aa5e0f568cb39e99a83b99ccb4084381) Ensure that fields have been resolved when looking up js types in static target, see [#731](https://github.com/dcodeIO/protobuf.js/issues/731)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/f755d36829b9f1effd7960fab3a86a141aeb9fea) Properly copy fields array before sorting in toObject, fixes [#729](https://github.com/dcodeIO/protobuf.js/issues/729)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/a06691f5b87f7e90fed0115b78ce6febc4479206) Actually emit TS compatible enums in static target if not aliases, see [#720](https://github.com/dcodeIO/protobuf.js/issues/720)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/b01bb58dec92ebf6950846d9b8d8e3df5442b15d) Hardened tokenize/parse, esp. comment parsing, see [#713](https://github.com/dcodeIO/protobuf.js/issues/713)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/bc76ad732fc0689cb0a2aeeb91b06ec5331d7972) Exclude any fields part of some oneof when populating defaults in toObject, see [#710](https://github.com/dcodeIO/protobuf.js/issues/710)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/68cdb5f11fdbb950623be089f98e1356cb7b1ea3) Most of the parser is not case insensitive, see [#705](https://github.com/dcodeIO/protobuf.js/issues/705)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/3e930b907a834a7da759478b8d3f52fef1da22d8) Retain options argument in Root#load when used with promises, see [#684](https://github.com/dcodeIO/protobuf.js/issues/684)<br />
|
||||
|
||||
## New
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/3c14ef42b3c8f2fef2d96d65d6e288211f86c9ef) Created a micromodule from (currently still bundled) float support<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/7ecae9e9f2e1324ef72bf5073463e01deff50cd6) util.isset(obj, prop) can be used to test if a message property is considered to be set, see [#728](https://github.com/dcodeIO/protobuf.js/issues/728)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/c04d4a5ab8f91899bd3e1b17fe4407370ef8abb7) Implemented stubs for long.js / node buffers to be used where either one isn't wanted, see [#718](https://github.com/dcodeIO/protobuf.js/issues/718)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/b9574ad02521a31ebd509cdaa269e7807da78d7c) Simplified reusing / replacing internal constructors<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/1f97b7af05b49ef69bd6e9d54906d1b7583f42c4) Constructors/.create always initialize proper mutable objects/arrays, see [#700](https://github.com/dcodeIO/protobuf.js/issues/700)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/adb4bb001a894dd8d00bcfe03457497eb994f6ba) Verifiers return an error if multiple fields part of the same oneof are set, see [#710](https://github.com/dcodeIO/protobuf.js/issues/710)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/fe93d436b430d01b563318bff591e0dd408c06a4) Added `oneofs: true` to ConversionOptions, see [#710](https://github.com/dcodeIO/protobuf.js/issues/710)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/228c882410d47a26576f839b15f1601e8aa7914d) Optional fields handle null just like undefined regardless of type see [#709](https://github.com/dcodeIO/protobuf.js/issues/709)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/da6af8138afa5343a47c12a8beedb99889c0dd51) Encoders no longer examine virtual oneof properties but encode whatever is present, see [#710](https://github.com/dcodeIO/protobuf.js/issues/710)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/ac26a7aa60359a37dbddaad139c0134b592b3325) pbjs now generates multiple exports when using ES6 syntax, see [#686](https://github.com/dcodeIO/protobuf.js/issues/686)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/c1ca65dc6987384af6f9fac2fbd7700fcf5765b2) Sequentially serialize fields ordered by id, as of the spec.<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/26d9fadb21a85ca0b5609156c26453ae875e4933) decode throws specific ProtocolError with a reference to the so far decoded message if required fields are missing + example<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/2b5577b238a452ae86aa395fb2ad3a3f45d755dc) Reader.create asserts that `buffer` is a valid buffer, see [#695](https://github.com/dcodeIO/protobuf.js/issues/695)<br />
|
||||
|
||||
## CLI
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/6f74d30f059e33a4678f28e7a50dc4878c54bed2) Exclude JSDoc on typedefs from generated d.ts files because typescript@next, see [#737](https://github.com/dcodeIO/protobuf.js/issues/737)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/2ebb1b781812e77de914cd260e7ab69612ffd99e) Prepare static code with estraverse instead of regular expressions, see [#732](https://github.com/dcodeIO/protobuf.js/issues/732)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/29ce6cae0cacc0f1d87ca47e64be6a81325aaa55) Moved tsd-jsdoc to future cli package, see [#716](https://github.com/dcodeIO/protobuf.js/issues/716)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/8de21e1a947ddb50a167147dd63ad29d37b6a891) $Properties are just a type that's satisfied, not implemented, by classes, see [#723](https://github.com/dcodeIO/protobuf.js/issues/723)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/4bfe0c239b9c337f8fa64ea64f6a71baf5639b84) More progress on decoupling the CLI<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/8a60174932d15198883ac3f07000ab4e7179a695) Fixed computed array indexes not being renamed in static code, see [#726](https://github.com/dcodeIO/protobuf.js/issues/726)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/8d9981588d17709791846de63f1f3bfd09433b03) Check upfront if key-var is required in static decoders with maps, see [#726](https://github.com/dcodeIO/protobuf.js/issues/726)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/16adff0c7b67c69a2133b6aac375365c5f2bdbf7) Fixed handling of stdout if callback is specified, see [#724](https://github.com/dcodeIO/protobuf.js/issues/724)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/6423a419fe45e648593833bf535ba1736b31ef63) Preparations for moving the CLI to its own package, see [#716](https://github.com/dcodeIO/protobuf.js/issues/716)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/afefa3de09620f50346bdcfa04d52952824c3c8d) Properly implement $Properties interface in JSDoc, see [#723](https://github.com/dcodeIO/protobuf.js/issues/723)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/a1f23e09fb5635275bb7646dfafc70caef74c6b8) Recursively use $Properties inside of $Properties in static code, see [#717](https://github.com/dcodeIO/protobuf.js/issues/717)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/c3f0a2124c661bb9ba35f92c21a98a4405d30b47) Added --strict-long option to pbjs to always emit 'Long' instead of 'number|Long' (only relevant with long.js), see [#718](https://github.com/dcodeIO/protobuf.js/issues/718)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/0bc4a14501f84f93afd6ce2933ad00749c82f4df) Statically emitted long type is 'Long' now instead of '$protobuf.Long', see [#718](https://github.com/dcodeIO/protobuf.js/issues/718)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/a75625d176b7478e0e506f05e2cee5e3d7a0d89a) Decoupled message properties as an interface in static code for TS intellisense support, see [#717](https://github.com/dcodeIO/protobuf.js/issues/717)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/23f14a61e8c2f68b06d1bb4ed20b938764c78860) Static code statically resolves types[..], see [#715](https://github.com/dcodeIO/protobuf.js/issues/715)<br />
|
||||
|
||||
## Docs
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/ef71e77726b6bf5978b948d598c18bf8b237ade4) Added type definitions for all possible JSON descriptors<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/4bfe0c239b9c337f8fa64ea64f6a71baf5639b84) Explained the JSON structure in README and moved CLI specific information to the CLI package<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/3ba3ad762f7486b4806ad1c45764e92a81ca24dd) Added information on how to use the stubs to README, see [#718](https://github.com/dcodeIO/protobuf.js/issues/718)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/a5dbba41341bf44876cd4226f08044f88148f37d) Added 'What is a valid message' section to README<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/6f8f2c1fdf92e6f81363d77bc059820b2376fe32) Added a hint on using .create to initial example<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/3ad28ec920e0fe8d0223db28804a7b3f8a6880c2) Even more usage for README<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/5a1f861a0f6b582faae7a4cc5c6ca7e4418086da) Additional information on general usage (README)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/320dea5a1d1387c72759e10a17afd77dc48c3de0) Restructured README to Installation, Usage and Examples sections<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/1c9055dd69f7696d2582942b307a1ac8ac0f5533) Added a longish section on the correct use of the toolset to README<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/99667c8e1ff0fd3dac83ce8c0cff5d0b1e347310) Added a few additional notes on core methods to README, see [#710](https://github.com/dcodeIO/protobuf.js/issues/710)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/2130bc97e44567e766ea8efacb365383c909dbd4) Extended traverse-types example, see [#693](https://github.com/dcodeIO/protobuf.js/issues/693)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/13e4aa3ff274ab42f1302e16fd59d074c5587b5b) Better explain how .verify, .encode and .decode are connected<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/7502dd2dfdaea111e5c1a902c524ad0a51ff9bd4) Documented that Type#encode respectively Message.encode do not implicitly .verify, see [#696](https://github.com/dcodeIO/protobuf.js/issues/696) [ci-skip]<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/e7e123aa0b6c05eb4156a761739e37c008a3cbc1) Documented throwing behavior of Reader.create and Message.decode<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/0fcde32306da77f02cb1ea81ed18a32cee01f17b) Added error handling notes to README, see [#696](https://github.com/dcodeIO/protobuf.js/issues/696)<br />
|
||||
|
||||
## Other
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/fef924e5f708f14dac5713aedc484535d36bfb47) Use @protobufjs/float<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/fef924e5f708f14dac5713aedc484535d36bfb47) Rebuilt dist files for 6.7.0<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/ca0dce2d7f34cd45e4c1cc753a97c58e05b3b9d2) Updated deps, ts fixes and regenerated dist files<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/2c2d4002d6776f3edde608bd813c37d798d87e6b) Manually merged gentests improvements, fixes [#733](https://github.com/dcodeIO/protobuf.js/issues/733)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/e4a6b6f81fa492a63b12f0da0c381612deff1973) Make sure that util.Long is overridden by AMD loaders only if present, see [#730](https://github.com/dcodeIO/protobuf.js/issues/730)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/fff1eb297a728ed6d334c591e7d796636859aa9a) Coverage for util.isset and service as a namespace<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/8401a47d030214a54b5ee30426ebc7a9d9c3773d) Shortened !== undefined && !== null to equivalent != null in static code<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/e1dd1bc2667de73bb65d876162131be2a4d9fef4) With stubs in place, 'number|Long' return values can be just 'Long' instead, see [#718](https://github.com/dcodeIO/protobuf.js/issues/718)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/404ba8e03a63f708a70a72f0208e0ca9826fe20b) Just alias as the actual ideal type when using stubs, see [#718](https://github.com/dcodeIO/protobuf.js/issues/718)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/270cc94c7c4b8ad84d19498672bfc854b55130c9) General cleanup + regenerated dist/test files<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/017161ce97ceef3b2d0ce648651a4636f187d78b) Simplified camel case regex, see [#714](https://github.com/dcodeIO/protobuf.js/issues/714)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/d410fd20f35d2a35eb314783b17b6570a40a99e8) Regenerated dist files and changelog for 6.7.0<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/88ca8f0d1eb334646ca2625c78e63fdd57221408) Retain alias order in static code for what it's worth, see [#712](https://github.com/dcodeIO/protobuf.js/issues/712)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/2a74fbf551e934b3212273e6a28ad65ac4436faf) Everything can be block- or line-style when parsing, see [#713](https://github.com/dcodeIO/protobuf.js/issues/713)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/47bb95a31784b935b9ced52aa773b9d66236105e) Determine necessary aliases depending on config, see [#712](https://github.com/dcodeIO/protobuf.js/issues/712)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/588ffd9b129869de0abcef1d69bfa18f2f25d8e1) Use more precise types for message-like plain objects<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/37b39c8d1a5307eea09aa24d7fd9233a8df5b7b6) Regenerated dist files<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/9c94813f9a5f1eb114d7c6112f7e87cb116fe9da) Regenerated relevant files<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/d7493efe1a86a60f6cdcf7976523e69523d3f7a3) Moved field comparer to util<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/fe917652f88df17d4dbaae1cd74f470385342be2) Updated tests to use new simplified encoder logic<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/b69173b4e7b514c40bb4a85b54ca5465492a235b) Updated path to tsd-jsdoc template used by pbts, see [#707](https://github.com/dcodeIO/protobuf.js/issues/707)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/5041fad9defdb0bc8131560e92f3b454d8e45273) Additional restructuring for moving configuration files out of the root folder<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/c0b7c9fa6309d345c4ce8e06fd86f27528f4ea66) Added codegen support for constructor functions, see [#700](https://github.com/dcodeIO/protobuf.js/issues/700)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/4573f9aabd7e8f883e530f4d0b055e5ec9b75219) Attempted to fix broken custom error test<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/4b49f500fce156b164c757d8f17be2338f767c82) Trying out a more aggressive aproach for custom error subclasses<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/95cd64ee514dc60d10daac5180726ff39594e8e8) Moved a few things out of the root folder<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/db1030ed257f9699a0bcf3bad0bbe8acccf5d766) Coverage for encoder compat. / protocolerror<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/948a4caf5092453fa091ac7a594ccd1cc5b503d2) Updated dist and generated test files<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/3ead13e83ecdc8715fbab916f7ccaf3fbfdf59ed) Added tslint<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/364e7d457ed4c11328e609f600a57b7bc4888554) Exclude dist/ from codeclimate checks<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/6e81fcb05f25386e3997399e6596e9d9414f0286) Also lint cli utilities<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/e7e123aa0b6c05eb4156a761739e37c008a3cbc1) Cache any regexp instance (perf)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/d89c45f8af0293fb34e6f12b37ceca49083e1faa) Use code climate badges<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/e70fbe3492c37f009dbaccf910c1e0f81e8f0f44) Updated travis to pipe to codeclimate, coverage<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/a7ab1036906bb7638193a9e991cb62c86108880a) More precise linter configuration<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/58688c178257051ceb2dfea8a63eb6be7dcf1cf1) Added codeclimate<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/4b21e00adc6fae42e6a88deaeb0b7c077c6ca50e) Moved cli deps placeholder creation to post install script<br />
|
||||
|
||||
# [6.6.5](https://github.com/dcodeIO/protobuf.js/releases/tag/6.6.5)
|
||||
|
||||
## Fixed
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/478ee51194878f24be8607e42e5259952607bd44) sfixed64 is not zig-zag encoded, see [#692](https://github.com/dcodeIO/protobuf.js/issues/692)<br />
|
||||
|
||||
## CLI
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/7a944538c89492abbed147915acea611f11c03a2) Added a placeholder to cli deps node_modules folder to make sure node can load from it<br />
|
||||
|
||||
## Other
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/83142e420eb1167b2162063a092ae8d89c9dd4b2) Restructured a few failing tests<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/367d55523a3ae88f21d47aa96447ec3e943d4620) Traversal example + minimalistic documentation<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/8eeffcbcd027c929e2a76accad588c61dfa2e37c) Added a custom getters/setters example for gRPC<br />
|
||||
|
||||
# [6.6.4](https://github.com/dcodeIO/protobuf.js/releases/tag/6.6.4)
|
||||
|
||||
## Fixed
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/88eb7a603a21643d5012a374c7d246f4c27620f3) Made sure that LongBits ctor is always called with unsigned 32 bits + static codegen compat., fixes [#690](https://github.com/dcodeIO/protobuf.js/issues/690)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/50e82fa7759be035a67c7818a1e3ebe0d6f453b6) Properly handle multiple ../.. in path.normalize, see [#688](https://github.com/dcodeIO/protobuf.js/issues/688)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/7c3506b3f0c5a08a887e97313828af0c21effc61) Post-merge, also tackles [#683](https://github.com/dcodeIO/protobuf.js/issues/683) (packed option for repeated enum values)<br />
|
||||
|
||||
## New
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/7f3f4600bcae6f2e4dadd5cdb055886193a539b7) Verify accepts non-null objects only, see [#685](https://github.com/dcodeIO/protobuf.js/issues/685)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/d65c22936183d04014d6a8eb880ae0ec33aeba6d) allow_alias enum option was not being honored. This case is now handled and a test case was added<br />
|
||||
|
||||
## CLI
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/2ddb76b6e93174787a68f68fb28d26b8ece7cc56) Added an experimental --sparse option to limit pbjs output to actually referenced types within main files<br />
|
||||
|
||||
## Docs
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/33d14c97600ed954193301aecbf8492076dd0179) Added explicit hint on Uint8Array to initial example, see [#670](https://github.com/dcodeIO/protobuf.js/issues/670)<br />
|
||||
|
||||
## Other
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/cbd4c622912688b47658fea00fd53603049b5104) Ranges and names support for reserved fields, see [#676](https://github.com/dcodeIO/protobuf.js/issues/676)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/487f8922d879955ba22f89b036f897b9753b0355) Updated depdendencies / rebuilt dist files<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/37536e5fa7a15fbc851040e09beb465bc22d9cf3) Use ?: instead of |undefined in .d.ts files<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/f8b415a2fc2d1b1eff19333600a010bcaaebf890) Mark optional fields as possibly being undefined<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/2ddb76b6e93174787a68f68fb28d26b8ece7cc56) Added a few more common google types from google/api, see [#433](https://github.com/dcodeIO/protobuf.js/issues/433)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/d246024f4c7d13ca970c91a757e2f47432a619df) Minor optimizations to dependencies, build process and tsd<br />
|
||||
|
||||
# [6.6.3](https://github.com/dcodeIO/protobuf.js/releases/tag/6.6.3)
|
||||
|
||||
## Fixed
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/0be01a14915e3e510038808fedbc67192a182d9b) Support node 4.2.0 to 4.4.7 buffers + travis case, see [#665](https://github.com/dcodeIO/protobuf.js/issues/665)<br />
|
||||
|
||||
## CLI
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/6a0920b2c32e7963741693f5a773b89f4b262688) Added ES6 syntax flag to pbjs, see [#667](https://github.com/dcodeIO/protobuf.js/issues/667)<br />
|
||||
|
||||
## Docs
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/c365242bdc28a47f5c6ab91bae34c277d1044eb3) Reference Buffer for BufferReader/Writer, see [#668](https://github.com/dcodeIO/protobuf.js/issues/668)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/43976072d13bb760a0689b54cc35bdea6817ca0d) Slightly shortened README<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/e64cf65b09047755899ec2330ca0fc2f4d7932c2) Additional notes on the distinction of different use cases / distributions, see [#666](https://github.com/dcodeIO/protobuf.js/issues/666)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/83758c99275c2bbd30f63ea1661284578f5c9d91) Extended README with additional information on JSON format<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/fdc3102689e8a3e8345eee5ead07ba3c9c3fe80c) Added extended usage instructions for TypeScript and custom classes to README, see [#666](https://github.com/dcodeIO/protobuf.js/issues/666)<br />
|
||||
|
||||
## Other
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/3701488cca6bc56ce6b7ad93c7b80e16de2571a7) Updated dist files<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/579068a45e285c7d2c69b359716dd6870352f46f) Updated test cases to use new buffer util<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/0be01a14915e3e510038808fedbc67192a182d9b) Added fetch test cases + some test cleanup<br />
|
||||
|
||||
# [6.6.2](https://github.com/dcodeIO/protobuf.js/releases/tag/6.6.2)
|
||||
|
||||
## Fixed
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/3aea1bf3d4920dc01603fda25b86e6436ae45ec2) Properly replace short vars when beautifying static code, see [#663](https://github.com/dcodeIO/protobuf.js/issues/663)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/b6cf228a82152f72f21b1b307983126395313470) Use custom prelude in order to exclude any module loader code from source (for webpack), see [#658](https://github.com/dcodeIO/protobuf.js/issues/658)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/2b12fb7db9d4eaa3b76b7198539946e97db684c4) Make sure to check optional inner messages for null when encoding, see [#658](https://github.com/dcodeIO/protobuf.js/issues/658)<br />
|
||||
|
||||
## New
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/276a594771329da8334984771cb536de7322d5b4) Initial attempt on a backwards compatible fetch implementation with binary support, see [#661](https://github.com/dcodeIO/protobuf.js/issues/661)<br />
|
||||
|
||||
## Other
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/2d81864fa5c4dac75913456d582e0bea9cf0dd80) Root#resolvePath skips files when returning null, see [#368](https://github.com/dcodeIO/protobuf.js/issues/368)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/aab3ec1a757aff0f11402c3fb943c003f092c1af) Changes callback on failed response decode in rpc service to pass actual error instead of 'error' string<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/9044178c052299670108f10621d6e9b3d56e8a40) Travis should exit with the respective error when running sauce tests<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/73721f12072d77263e72a3b27cd5cf9409db9f8b) Moved checks whether a test case is applicable to parent case<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/3fcd88c3f9b1a084b06cab2d5881cb5bb895869d) Added eventemitter tests and updated micromodule dependencies (so far)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/2db4305ca67d003d57aa14eb23f25eb6c3672034) Added lib/path tests and updated a few dependencies<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/2b12fb7db9d4eaa3b76b7198539946e97db684c4) Moved micro modules to lib so they can have their own tests etc.<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/b6dfa9f0a4c899b5c217d60d1c2bb835e06b2122) Updated travis<br />
|
||||
|
||||
# [6.6.1](https://github.com/dcodeIO/protobuf.js/releases/tag/6.6.1)
|
||||
|
||||
## Fixed
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/039ac77b062ee6ebf4ec84a5e6c6ece221e63401) Properly set up reflection when using light build<br />
|
||||
|
||||
# [6.6.0](https://github.com/dcodeIO/protobuf.js/releases/tag/6.6.0) ([release](https://github.com/dcodeIO/protobuf.js/releases/tag/6.6.0))
|
||||
|
||||
## Breaking
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/cdfe6bfba27fa1a1d0e61887597ad4bb16d7e5ed) Inlined / refactored away .testJSON, see [#653](https://github.com/dcodeIO/protobuf.js/issues/653)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/6a483a529ef9345ed217a23394a136db0d9f7771) Refactored util.extend away<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/27b16351f3286468e539c2ab382de4b52667cf5e) Reflected and statically generated services use common utility, now work exactly the same<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/dca26badfb843a597f81e98738e2fda3f66c7341) fromObject now throws for entirely bogus values (repeated, map and inner message fields), fixes [#601](https://github.com/dcodeIO/protobuf.js/issues/601)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/4bff9c356ef5c10b4aa34d1921a3b513e03dbb3d) Cleaned up library distributions, now is full / light / minimal with proper browserify support for each<br />
|
||||
|
||||
## Fixed
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/301f7762ef724229cd1df51e496eed8cfd2f10eb) Do not randomly remove slashes from comments, fixes [#656](https://github.com/dcodeIO/protobuf.js/issues/656)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/ef7be352baaec26bdcdce01a71fbee47bbdeec15) Properly parse nested textformat options, also tackles [#655](https://github.com/dcodeIO/protobuf.js/issues/655)<br />
|
||||
|
||||
## New
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/b4f4f48f1949876ae92808b0a5ca5f2b29cc011c) Relieved the requirement to call .resolveAll() on roots in order to populate static code-compatible properties, see [#653](https://github.com/dcodeIO/protobuf.js/issues/653)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/56c8ec4196d461383c3e1f271da02553d877ae81) Added a (highly experimental) debug build as a starting point for [#653](https://github.com/dcodeIO/protobuf.js/issues/653)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/c5d291f9bab045385c5938ba0f6cdf50a315461f) Full build depends on light build depends on minimal build, shares all relevant code<br />
|
||||
|
||||
## CLI
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/735da4315a98a6960f3b5089115e308548b91c07) Also reuse specified root in pbjs for JSON modules, see [#653](https://github.com/dcodeIO/protobuf.js/issues/653)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/3a056244d3acf339722d56549469a8df018e682e) Reuse specified root name in pbjs to be able to split definitions over multiple files more easily, see [#653](https://github.com/dcodeIO/protobuf.js/issues/653)<br />
|
||||
|
||||
## Docs
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/28ddf756ab83cc890761ef2bd84a0788d2ad040d) Improved pbjs/pbts examples, better covers reflection with definitions for static modules<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/6f0b44aea6cf72d23042810f05a7cede85239eb3) Fixed centered formatting on npm<br />
|
||||
|
||||
## Other
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/dd96dcdacb8eae94942f7016b8dc37a2569fe420) Various other minor improvements / assertions refactored away, see [#653](https://github.com/dcodeIO/protobuf.js/issues/653)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/3317a76fb56b9b31bb07ad672d6bdda94b79b6c3) Fixed some common reflection deopt sites, see [#653](https://github.com/dcodeIO/protobuf.js/issues/653)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/6a483a529ef9345ed217a23394a136db0d9f7771) Reflection performance pass, see [#653](https://github.com/dcodeIO/protobuf.js/issues/653)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/6a483a529ef9345ed217a23394a136db0d9f7771) Added TS definitions to alternative builds' index files<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/6a483a529ef9345ed217a23394a136db0d9f7771) Removed unnecessary prototype aliases, improves gzip ratio<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/641625fd64aca55b1163845e6787b58054ac36ec) Unified behaviour of and docs on Class constructor / Class.create<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/7299929b37267af2100237d4f8b4ed8610b9f7e1) Statically generated services actually inherit from rpc.Service<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/f4cf75e4e4192910b52dd5864a32ee138bd4e508) Do not try to run sauce tests for PRs<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/33da148e2b750ce06591c1c66ce4c46ccecc3c8f) Added utility to enable/disable debugging extensions to experimental debug build<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/fdb1a729ae5f8ab762c51699bc4bb721102ef0c8) Fixed node 0.12 tests<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/6bc5bb4a7649d6b91a5944a9ae20178d004c8856) Fixed coverage<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/6f0b44aea6cf72d23042810f05a7cede85239eb3) Added a test case for [#652](https://github.com/dcodeIO/protobuf.js/issues/652)<br />
|
||||
|
||||
# [6.5.3](https://github.com/dcodeIO/protobuf.js/releases/tag/6.5.3)
|
||||
|
||||
## Other
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/799d0303bf289bb720f2b27af59e44c3197f3fb7) In fromObject, check if object is already a runtime message, see [#652](https://github.com/dcodeIO/protobuf.js/issues/652)<br />
|
||||
|
||||
# [6.5.2](https://github.com/dcodeIO/protobuf.js/releases/tag/6.5.2)
|
||||
|
||||
## Docs
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/8cff92fe3b7ddb1930371edb4937cd0db9216e52) Added coverage reporting<br />
|
||||
|
||||
## CLI
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/cbaaae99b4e39a859664df0e6d20f0491169f489) Added version scheme warning to everything CLI so that we don't need this overly explicit in README<br />
|
||||
|
||||
## Other
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/6877b3399f1a4c33568221bffb4e298b01b14439) Coverage progress, 100%<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/711a9eb55cb796ec1e51af7d56ef2ebbd5903063) Coverage progress<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/e7526283ee4dd82231235afefbfad6af54ba8970) Attempted to fix badges once and for all<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/5aa296c901c2b460ee3be4530ede394e2a45e0ea) Coverage progress<br />
|
||||
|
||||
# [6.5.1](https://github.com/dcodeIO/protobuf.js/releases/tag/6.5.1)
|
||||
|
||||
## CLI
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/9719fd2fa8fd97899c54712a238091e8fd1c57b2) Reuse module paths when looking up cli dependencies, see [#648](https://github.com/dcodeIO/protobuf.js/issues/648)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/6302655d1304cf662f556be5d9fe7a016fcedc3c) Check actual module directories to determine if cli dependencies are present and bootstrap semver, see [#648](https://github.com/dcodeIO/protobuf.js/issues/648)<br />
|
||||
|
||||
## Docs
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/dfc7c4323bf98fb26ddcfcfbb6896a6d6e8450a4) Added a note on semver-incompatibility, see [#649](https://github.com/dcodeIO/protobuf.js/issues/649)<br />
|
||||
|
||||
## Other
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/49053ffa0ea8a4ba5ae048706dba1ab6f3bc803b) Coverage progress<br />
|
||||
|
||||
# [6.5.0](https://github.com/dcodeIO/protobuf.js/releases/tag/6.5.0) ([release](https://github.com/dcodeIO/protobuf.js/releases/tag/6.5.0))
|
||||
|
||||
## Breaking
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/3946e0fefea415f52a16ea7a74109ff40eee9643) Initial upgrade of converters to real generated functions, see [#620](https://github.com/dcodeIO/protobuf.js/issues/620)<br />
|
||||
|
||||
## Fixed
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/08cda241a3e095f3123f8a991bfd80aa3eae9400) An enum's default value present as a string looks up using typeDefault, not defaultValue which is an array if repeated<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/c7e14b1d684aaba2080195cc83900288c5019bbc) Use common utility for virtual oneof getters and setters in both reflection and static code, see [#644](https://github.com/dcodeIO/protobuf.js/issues/644)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/508984b7ff9529906be282375d36fdbada66b8e6) Properly use Type.toObject/Message.toObject within converters, see [#641](https://github.com/dcodeIO/protobuf.js/issues/641)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/5bca18f2d32e8687986e23edade7c2aeb6b6bac1) Generate null/undefined assertion in fromObject if actually NOT an enum, see [#620](https://github.com/dcodeIO/protobuf.js/issues/620)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/508984b7ff9529906be282375d36fdbada66b8e6) Replace ALL occurencies of types[%d].values in static code, see [#641](https://github.com/dcodeIO/protobuf.js/issues/641)<br />
|
||||
|
||||
## New
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/9b090bb1673aeb9b8f1d7162316fce4d7a3348f0) Switched to own property-aware encoders for compatibility, see [#639](https://github.com/dcodeIO/protobuf.js/issues/639)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/340d6aa82ac17c4a761c681fa71d5a0955032c8b) Now also parses comments, sets them on reflected objects and re-uses them when generating static code, see [#640](https://github.com/dcodeIO/protobuf.js/issues/640)<br />
|
||||
|
||||
## CLI
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/3cb82628159db4d2aa721b63619b16aadc5f1981) Further improved generated static code style<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/cda5c5452fa0797f1e4c375471aef96f844711f1) Removed scoping iifes from generated static code<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/def7b45fb9b5e01028cfa3bf2ecd8272575feb4d) Removed even more clutter from generated static code<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/dbd19fd9d3a57d033aad1d7173f7f66db8f8db3e) Removed various clutter from generated static code<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/1cc8a2460c7e161c9bc58fa441ec88e752df409c) Made sure that static target's replacement regexes don't match fields<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/d4272dbf5d0b2577af8efb74a94d246e2e0d728e) Also accept (trailing) triple-slash comments for compatibility with protoc-gen-doc, see [#640](https://github.com/dcodeIO/protobuf.js/issues/640)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/0a3862b75fa60ef732e0cd36d623f025acc2fb45) Use semver to validate that CLI dependencies actually satisfy the required version, see [#637](https://github.com/dcodeIO/protobuf.js/issues/637)<br />
|
||||
|
||||
## Docs
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/9e360ea6a74d41307483e51f18769df7f5b047b9) Added a hint on documenting .proto files for static code<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/d2a97bb818474645cf7ce1832952b2c3c739b234) Documented internally used codegen partials for what it's worth<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/079388ca65dfd581d74188a6ae49cfa01b103809) Updated converter documentation<br />
|
||||
|
||||
## Other
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/168e448dba723d98be05c55dd24769dfe3f43d35) Bundler provides useful stuff to uglify and a global var without extra bloat<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/32e0529387ef97182ad0b9ae135fd8b883ed66b4) Cleaned and categorized tests, coverage progress<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/3325e86930a3cb70358c689cb3016c1be991628f) Properly removed builtins from bundle<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/2c94b641fc5700c8781ac0b9fe796debac8d6893) Call hasOwnProperty builtin as late as possible decreasing the probability of having to call it at all (perf)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/818bcacde267be70a75e689f480a3caad6f80cf7) Slightly hardened codegen sprintf<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/818bcacde267be70a75e689f480a3caad6f80cf7) Significantly improved uint32 write performance<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/b5daa272407cb31945fd38c34bbef7c9edd1db1c) Cleaned up test case data and removed unused files<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/c280a4a18c6d81c3468177b2ea58ae3bc4f25e73) Removed now useless trailing comment checks, see [#640](https://github.com/dcodeIO/protobuf.js/issues/640)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/44167db494c49d9e4b561a66ad9ce2d8ed865a21) Ensured that pbjs' beautify does not break regular expressions in generated verify functions<br />
|
||||
|
||||
# [6.4.6](https://github.com/dcodeIO/protobuf.js/releases/tag/6.4.6)
|
||||
|
||||
## Fixed
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/e11012ce047e8b231ba7d8cc896b8e3a88bcb902) Case-sensitively test for legacy group definitions, fixes [#638](https://github.com/dcodeIO/protobuf.js/issues/638)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/7e57f4cdd284f886b936511b213a6468e4ddcdce) Properly parse text format options + simple test case, fixes [#636](https://github.com/dcodeIO/protobuf.js/issues/636)<br />
|
||||
|
||||
## Docs
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/fe4d97bbc4d33ce94352dde62ddcd44ead02d7ad) Added SVG logo, see [#629](https://github.com/dcodeIO/protobuf.js/issues/629)<br />
|
||||
|
||||
## Other
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/57990f7ed8ad5c512c28ad040908cee23bbf2aa8) Also refactored Service and Type to inherit from NamespaceBase, see [#635](https://github.com/dcodeIO/protobuf.js/issues/635)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/fe4d97bbc4d33ce94352dde62ddcd44ead02d7ad) Moved TS-compatible Namespace features to a virtual NamespaceBase class, compiles with strictNullChecks by default now, see [#635](https://github.com/dcodeIO/protobuf.js/issues/635)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/fe4d97bbc4d33ce94352dde62ddcd44ead02d7ad) Minor codegen enhancements<br />
|
||||
|
||||
# [6.4.5](https://github.com/dcodeIO/protobuf.js/releases/tag/6.4.5)
|
||||
|
||||
## Fixed
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/1154ce0867306e810cf62a5b41bdb0b765aa8ff3) Properly handle empty/noop Writer#ldelim, fixes [#625](https://github.com/dcodeIO/protobuf.js/issues/625)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/f303049f92c53970619375653be46fbb4e3b7d78) Properly annotate map fields in pbjs, fixes [#624](https://github.com/dcodeIO/protobuf.js/issues/624)<br />
|
||||
|
||||
## New
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/4b786282a906387e071a5a28e4842a46df588c7d) Made sure that Writer#bytes is always able to handle plain arrays<br />
|
||||
|
||||
## Other
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/1e6a8d10f291a16631376dd85d5dd385937e6a55) Slightly restructured utility to better support static code default values<br />
|
||||
|
||||
# [6.4.4](https://github.com/dcodeIO/protobuf.js/releases/tag/6.4.4)
|
||||
|
||||
## Fixed
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/26d68e36e438b590589e5beaec418c63b8f939cf) Dynamically resolve jsdoc when running pbts, fixes [#622](https://github.com/dcodeIO/protobuf.js/issues/622)<br />
|
||||
|
||||
## Docs
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/69c04d7d374e70337352cec9b445301cd7fe60d6) Explain 6.4.2 vs 6.4.3 in changelog<br />
|
||||
|
||||
# [6.4.3](https://github.com/dcodeIO/protobuf.js/releases/tag/6.4.4)
|
||||
|
||||
## Fixed
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/c2c39fc7cec5634ecd1fbaebbe199bf097269097) Fixed invalid definition of Field#packed property, also introduced decoder.compat mode (packed fields, on by default)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/11fb1a66ae31af675d0d9ce0240cd8e920ae75e7) Always decode packed/non-packed based on wire format only, see [#602](https://github.com/dcodeIO/protobuf.js/issues/602)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/c9a61e574f5a2b06f6b15b14c0c0ff56f8381d1f) Use full library for JSON modules and runtime dependency for static modules, fixes [#621](https://github.com/dcodeIO/protobuf.js/issues/621)<br />
|
||||
|
||||
## CLI
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/e88d13ca7ee971451b57d056f747215f37dfd3d7) Additional workarounds for on demand CLI dependencies, see [#618](https://github.com/dcodeIO/protobuf.js/issues/618)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/44f6357557ab3d881310024342bcc1e0d336a20c) Revised automatic setup of cli dependencies, see [#618](https://github.com/dcodeIO/protobuf.js/issues/618)<br />
|
||||
|
||||
## Other
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/e027a3c7855368837e477ce074ac65f191bf774a) Removed Android 4.0 test (no longer supported by sauce)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/8ba3c5efd182bc80fc36f9d5fe5e2b615b358236) Removed some unused utility, slightly more efficient codegen, additional comments<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/f22a34a071753bca416732ec4d01892263f543fb) Updated tests for new package.json layout<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/f22a34a071753bca416732ec4d01892263f543fb) Added break/continue label support to codegen<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/f2ffa0731aea7c431c59e452e0f74247d815a352) Updated dependencies, rebuilt dist files and changed logo to use an absolute url<br />
|
||||
|
||||
6.4.2 had been accidentally published as 6.4.3.
|
||||
|
||||
# [6.4.1](https://github.com/dcodeIO/protobuf.js/releases/tag/6.4.1)
|
||||
|
||||
## Fixed
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/9035d4872e32d6402c8e4d8c915d4f24d5192ea9) Added more default value checks to converter, fixes [#616](https://github.com/dcodeIO/protobuf.js/issues/616)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/62eef58aa3b002115ebded0fa58acc770cd4e4f4) Respect long defaults in converters<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/e3170a160079a3a7a99997a2661cdf654cb69e24) Convert inner messages and undefined/null values more thoroughly, fixes [#615](https://github.com/dcodeIO/protobuf.js/issues/615)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/b52089efcb9827537012bebe83d1a15738e214f4) Always use first defined enum value as field default, fixes [#613](https://github.com/dcodeIO/protobuf.js/issues/613)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/64f95f9fa1bbe42717d261aeec5c16d1a7aedcfb) Install correct 'tmp' dependency when running pbts without dev dependencies installed, fixes [#612](https://github.com/dcodeIO/protobuf.js/issues/612)<br />
|
||||
|
||||
## New
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/cba46c389ed56737184e5bc2bcce07243d52e5ce) Generate named constructors for runtime messages, see [#588](https://github.com/dcodeIO/protobuf.js/issues/588)<br />
|
||||
|
||||
## CLI
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/ee20b81f9451c56dc106177bbf9758840b99d0f8) pbjs/pbts no longer generate any volatile headers, see [#614](https://github.com/dcodeIO/protobuf.js/issues/614)<br />
|
||||
|
||||
## Docs
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/ec9d517d0b87ebe489f02097c2fc8005fae38904) Attempted to make broken shields less annoying<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/5cd4c2f2a94bc3c0f2c580040bce28dd42eaccec) Updated README<br />
|
||||
|
||||
## Other
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/0643f93f5c0d96ed0ece5b47f54993ac3a827f1b) Some cleanup and added a logo<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/169638382de9efe35a1079c5f2045c33b858059a) use $protobuf.Long<br />
|
||||
|
||||
# [6.4.0](https://github.com/dcodeIO/protobuf.js/releases/tag/6.4.0) ([release](https://github.com/dcodeIO/protobuf.js/releases/tag/6.4.0))
|
||||
|
||||
## Breaking
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/a017bf8a2dbdb7f9e7ce4c026bb6845174feb3b1) Dropped IE8 support<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/39bc1031bb502f8b677b3736dd283736ea4d92c1) Removed now unused util.longNeq which was used by early static code<br />
|
||||
|
||||
## Fixed
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/5915ff972482e7db2a73629244ab8a93685b2e55) Do not swallow errors in loadSync, also accept negative enum values in Enum#add, fixes [#609](https://github.com/dcodeIO/protobuf.js/issues/609)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/fde56c0de69b480343931264a01a1ead1e3156ec) Improved bytes field support, also fixes [#606](https://github.com/dcodeIO/protobuf.js/issues/606)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/0c03f327115d57c4cd5eea3a9a1fad672ed6bd44) Fall back to browser Reader when passing an Uint8Array under node, fixes [#605](https://github.com/dcodeIO/protobuf.js/issues/605)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/7eb3d456370d7d66b0856e32b2d2602abf598516) Respect optional properties when writing interfaces in tsd-jsdoc, fixes [#598](https://github.com/dcodeIO/protobuf.js/issues/598)<br />
|
||||
|
||||
## New
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/bcadffecb3a8b98fbbd34b45bae0e6af58f9c810) Instead of protobuf.parse.keepCase, fall back to protobuf.parse.defaults holding all possible defaults, see [#608](https://github.com/dcodeIO/protobuf.js/issues/608)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/a4d6a2af0d57a2e0cccf31e3462c8b2465239f8b) Added global ParseOptions#keepCase fallback as protobuf.parse.keepCase, see [#608](https://github.com/dcodeIO/protobuf.js/issues/608)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/a017bf8a2dbdb7f9e7ce4c026bb6845174feb3b1) Converters use code generation and support custom implementations<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/28ce07d9812f5e1743afef95a94532d2c9488a84) Be more verbose when throwing invalid wire type errors, see [#602](https://github.com/dcodeIO/protobuf.js/issues/602)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/40074bb69c3ca4fcefe09d4cfe01f3a86844a7e8) Added an asJSON-option to always populate array fields, even if defaults=false, see [#597](https://github.com/dcodeIO/protobuf.js/issues/597)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/a7d23240a278aac0bf01767b6096d692c09ae1ce) Attempt to improve TypeScript support by using explicit exports<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/cec253fb9a177ac810ec96f4f87186506091fa37) Copy-pasted typescript definitions to micro modules, see [#599](https://github.com/dcodeIO/protobuf.js/issues/599)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/1f18453c7bfcce65c258fa98a3e3d4577d2e550f) Emit an error on resolveAll() if any extension fields cannot be resolved, see [#595](https://github.com/dcodeIO/protobuf.js/issues/595) + test case<br />
|
||||
|
||||
## CLI
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/804739dbb75359b0034db0097fe82081e3870a53) Removed 'not recommend' label for --keep-case, see [#608](https://github.com/dcodeIO/protobuf.js/issues/608)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/9681854526f1813a6ef08becf130ef2fbc28b638) Added customizable linter configuration to pbjs<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/9681854526f1813a6ef08becf130ef2fbc28b638) Added stdin support to pbjs and pbts<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/407223b5ceca3304bc65cb48888abfdc917d5800) Static code no longer uses IE8 support utility<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/a017bf8a2dbdb7f9e7ce4c026bb6845174feb3b1) Generated static code now supports asJSON/from<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/3c775535517b8385a1d3c1bf056f3da3b4266f8c) Added support for TypeScript enums to pbts<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/0cda72a55a1f2567a5d981dc5d924e55b8070513) Added a few helpful comments to static code<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/24b293c297feff8bda5ee7a2f8f3f83d77c156d0) Slightly beautify statically generated code<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/65637ffce20099df97ffbcdce50faccc8e97c366) Do not wrap main definition as a module and export directly instead<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/65637ffce20099df97ffbcdce50faccc8e97c366) Generate prettier definitions with --no-comments<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/20d8a2dd93d3bbb6990594286f992e703fc4e334) Added variable arguments support to tsd-jsdoc<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/8493dbd9a923693e943f710918937d83ae3c4572) Reference dependency imports as a module to prevent name collisions, see [#596](https://github.com/dcodeIO/protobuf.js/issues/596)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/39a2ea361c50d7f4aaa0408a0d55bb13823b906c) Removed now unnecessary comment lines in generated static code<br />
|
||||
|
||||
## Docs
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/a4e41b55471d83a8bf265c6641c3c6e0eee82e31) Added notes on CSP-restricted environments to README, see [#593](https://github.com/dcodeIO/protobuf.js/issues/593)<br />
|
||||
|
||||
## Other
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/1a3effdad171ded0608e8da021ba8f9dd017f2ff) Added test case for asJSON with arrays=true, see [#597](https://github.com/dcodeIO/protobuf.js/issues/597)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/751a90f509b68a5f410d1f1844ccff2fc1fc056a) Added a tape adapter to assert message equality accross browsers<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/fde56c0de69b480343931264a01a1ead1e3156ec) Refactored some internal utility away<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/805291086f6212d1f108b3d8f36325cf1739c0bd) Reverted previous attempt on [#597](https://github.com/dcodeIO/protobuf.js/issues/597)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/c5160217ea95996375460c5403dfe37b913d392e) Minor tsd-jsdoc refactor<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/961dd03061fc2c43ab3bf22b3f9f5165504c1002) Removed unused sandbox files<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/f625eb8b0762f8f5d35bcd5fc445e52b92d8e77d) Updated package.json of micro modules to reference types, see [#599](https://github.com/dcodeIO/protobuf.js/issues/599)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/46ec8209b21cf9ff09ae8674e2a5bbc49fd4991b) Reference dependencies as imports in generated typescript definitions, see [#596](https://github.com/dcodeIO/protobuf.js/issues/596)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/3bab132b871798c7c50c60a4c14c2effdffa372e) Allow null values on optional long fields, see [#590](https://github.com/dcodeIO/protobuf.js/issues/590)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/31da56c177f1e11ffe0072ad5f58a55e3f8008fd) Various jsdoc improvements and a workaround for d.ts generation, see [#592](https://github.com/dcodeIO/protobuf.js/issues/592)<br />
|
||||
|
||||
# [6.3.1](https://github.com/dcodeIO/protobuf.js/releases/tag/6.3.1)
|
||||
|
||||
## Fixed
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/95ed6e9e8268711db24f44f0d7e58dd278ddac4c) Empty inner messages are always present on the wire + test case + removed now unused Writer#ldelim parameter, see [#585](https://github.com/dcodeIO/protobuf.js/issues/585)<br />
|
||||
|
||||
## CLI
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/e8a4d5373b1a00cc6eafa5b201b91d0e250cc00b) Expose tsd-jsdoc's comments option to pbts as --no-comments, see [#587](https://github.com/dcodeIO/protobuf.js/issues/587)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/6fe099259b5985d873ba5bec88c049d7491a11cc) Increase child process max buffer when running jsdoc from pbts, see [#587](https://github.com/dcodeIO/protobuf.js/issues/587)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/3d84ecdb4788d71b5d3928e74db78e8e54695f0a) pbjs now generates more convenient dot-notation property accessors<br />
|
||||
|
||||
## Other
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/1e0ebc064e4f2566cebf525d526d0b701447bd6a) And fixed IE8 again (should probably just drop IE8 for good)<br />
|
||||
|
||||
# [6.3.0](https://github.com/dcodeIO/protobuf.js/releases/tag/6.3.0)
|
||||
|
||||
## Breaking
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/a97956b1322b6ee62d4fc9af885658cd5855e521) Moved camelCase/underScore away from util to where actually used<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/c144e7386529b53235a4a5bdd8383bdb322f2825) Renamed asJSON option keys (enum to enums, long to longs) because enum is a reserved keyword<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/5b9ade428dca2df6a13277522f2916e22092a98b) Moved JSON/Message conversion to its own source file and added Message/Type.from + test case, see [#575](https://github.com/dcodeIO/protobuf.js/issues/575)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/0b0de2458a1ade1ccd4ceb789697be13290f856b) Relicensed the library and its components to BSD-3-Clause to match the official implementation (again)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/22a64c641d4897965035cc80e92667bd243f182f) Dropped support for browser buffer entirely (is an Uint8Array anyway), ensures performance and makes things simpler<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/22a64c641d4897965035cc80e92667bd243f182f) Removed dead parts of the Reader API<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/964f65a9dd94ae0a18b8be3d9a9c1b0b1fdf6424) Refactored BufferReader/Writer to their own files and removed unnecessary operations (node always has FloatXXArray and browser buffer uses ieee anyway)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/bfac0ea9afa3dbaf5caf79ddf0600c3c7772a538) Stripped out fallback encoder/decoder/verifier completely (even IE8 supports codegen), significantly reduces bundle size, can use static codegen elsewhere<br />
|
||||
|
||||
## Fixed
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/c3023a2f51fc74547f6c6e53cf75feed60f3a25c) Actually concatenate mixed custom options when parsing<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/0d66b839df0acec2aea0566d2c0bbcec46c3cd1d) Fixed a couple of issues with alternative browser builds<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/33706cdc201bc863774c4af6ac2c38ad96a276e6) Properly set long defaults on prototypes<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/0ea2740f0774b4c5c349b9c303f3fb2c2743c37b) Fixed reference error in minimal runtime, see [#580](https://github.com/dcodeIO/protobuf.js/issues/580)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/741b6d8fde84d9574676a729a29a428d99f0a0a0) Non-repeated empty messages are always present on the wire, see [#581](https://github.com/dcodeIO/protobuf.js/issues/581)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/7fac9d6a39bf42d316c1676082a2d0804bc55934) Properly check Buffer.prototype.set with node v4<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/3ad8108eab57e2b061ee6f1fddf964abe3f4cbc7) Prevent NRE and properly annotate verify signature in tsd-jsdoc, fixed [#572](https://github.com/dcodeIO/protobuf.js/issues/572)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/6c2415d599847cbdadc17dee3cdf369fc9facade) Fix directly using Buffer instead of util.Buffer<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/19e906c2a15acc6178b3bba6b19c2f021e681176) Added filter type to Namespace#lookup, fixes [#569](https://github.com/dcodeIO/protobuf.js/issues/569)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/9c9a66bf393d9d6927f35a9c18abf5d1c31db912) Fixed parsing enum inner options, see [#565](https://github.com/dcodeIO/protobuf.js/issues/565)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/ea7ba8b83890084d61012cb5386dc11dadfb3908) Fixed release links in README files<br />
|
||||
|
||||
## New
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/442471363f99e67fa97044f234a47b3c9b929dfa) Added a noparse build for completeness<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/bfee1cc3624d0fa21f9553c2f6ce2fcf7fcc09b7) Now compresses .gz files using zopfli to make them useful beyond being just a reference<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/aed134aa1cd7edd801de77c736cf5efe6fa61cb0) Updated non-bundled google types folder with missing descriptors and added wrappers to core<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/0b0de2458a1ade1ccd4ceb789697be13290f856b) Replaced the ieee754 implementation for old browsers with a faster, use-case specific one + simple test case<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/99ad9cc08721b834a197d4bbb67fa152d7ad79aa) Added .create to statically generated types and uppercase nested elements to reflection namespaces, see [#576](https://github.com/dcodeIO/protobuf.js/issues/576)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/99ad9cc08721b834a197d4bbb67fa152d7ad79aa) Also added Namespace#getEnum for completeness, see [#576](https://github.com/dcodeIO/protobuf.js/issues/576)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/ef43acff547c0cd84cfb7a892fe94504a586e491) Added Namespace#getEnum and changed #lookupEnum to the same behavior, see [#576](https://github.com/dcodeIO/protobuf.js/issues/576)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/1fcfdfe21c1b321d975a8a96d133a452c9a9c0d8) Added a heap of coverage comments for usually unused code paths to open things up<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/c234de7f0573ee30ed1ecb15aa82b74c0f994876) Added codegen test to determine if any ancient browsers don't actually support it<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/fed2000e7e461efdb1c3a1a1aeefa8b255a7c20b) Added legacy groups support to pbjs, see [#568](https://github.com/dcodeIO/protobuf.js/issues/568)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/974a1321da3614832aa0a5b2e7c923f66e4ba8ae) Initial support for legacy groups + test case, see [#568](https://github.com/dcodeIO/protobuf.js/issues/568)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/9c9a66bf393d9d6927f35a9c18abf5d1c31db912) Added asJSON bytes as Buffer, see [#566](https://github.com/dcodeIO/protobuf.js/issues/566)<br />
|
||||
|
||||
## CLI
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/c60cd397e902ae6851c017f2c298520b8336cbee) Annotated callback types in pbjs-generated services, see [#582](https://github.com/dcodeIO/protobuf.js/issues/582)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/3e7e4fc59e6d2d6c862410b4b427fbedccdb237b) Removed type/ns alias comment in static target to not confuse jsdoc unnecessarily<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/99ad9cc08721b834a197d4bbb67fa152d7ad79aa) Made pbjs use loadSync for deterministic outputs, see [#573](https://github.com/dcodeIO/protobuf.js/issues/573)<br />
|
||||
|
||||
## Docs
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/4d1f5facfcaaf5f2ab6a70b12443ff1b66e7b94e) Updated documentation on runtime and noparse builds<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/c59647a7542cbc4292248787e5f32bb99a9b8d46) Fixed an issue with the changelog generator skipping some commits<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/24f2c03af9f13f5404259866fdc8fed33bfaae25) Added notes on how to use pbjs and pbts programmatically<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/3544576116146b209246d71c7f7a9ed687950b26) Manually sorted old changelog entries<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/d5812571f335bae68f924aa1098519683a9f3e44) Initial changelog generator, see [#574](https://github.com/dcodeIO/protobuf.js/issues/574)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/ab3e236a967a032a98267a648f84d129fdb4d4a6) Added static/JSON module interchangeability to README<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/7939a4bd8baca5f7e07530fc93f27911a6d91c6f) Updated README and bundler according to dynamic require calls<br />
|
||||
|
||||
## Other
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/93e04f1db4a9ef3accff8d071c75be3d74c0cd4a) Added basic services test case<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/b5a068f5b79b6f00c4b05d9ac458878650ffa09a) Just polyfill Buffer.from / .allocUnsafe for good<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/4375a485789e14f7bf24bece819001154a03dca2) Added a test case to find out if all the fallbacks are just for IE8<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/deb2e82ed7eda41d065a09d120e91c0f7ecf1e6a) Commented out float assertions in float test including explanation<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/d3ebd5745b024033fbc2410ecad4d4e02abd67db) Expose array implementation used with (older) browsers on util for tests<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/b1b6a813c93da4c7459755186aa02ef2f3765c94) Updated test cases<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/99dc5faa7b39fdad8ebc102de4463f8deb7f48ff) Added assumptions to float test case<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/948ca2e3c5c62fedcd918d75539c261abf1a7347) Updated travis config to use C++11<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/c59647a7542cbc4292248787e5f32bb99a9b8d46) Updated / added additional LICENSE files where appropriate<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/333f0221814be976874862dc83d0b216e07d4012) Integrated changelog into build process, now also has 'npm run make' for everything, see [#574](https://github.com/dcodeIO/protobuf.js/issues/574)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/ab3e236a967a032a98267a648f84d129fdb4d4a6) Minor optimizations through providing type-hints<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/ab3e236a967a032a98267a648f84d129fdb4d4a6) Reverted shortened switch statements in verifier<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/ab3e236a967a032a98267a648f84d129fdb4d4a6) Enums can't be map key types<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/8ef6975b0bd372b79e9b638f43940424824e7176) Use custom require (now a micromodule) for all optional modules, see [#571](https://github.com/dcodeIO/protobuf.js/issues/571)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/e226f001e4e4633d64c52be4abc1915d7b7bd515) Support usage when size = 0<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/19e906c2a15acc6178b3bba6b19c2f021e681176) Reverted aliases frequently used in codegen for better gzip ratio<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/47b51ec95a540681cbed0bac1b2f02fc4cf0b73d) Shrinked bundle size - a bit<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/f8451f0058fdf7a1fac15ffc529e4e899c6b343c) Can finally run with --trace-deopt again without crashes<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/9c9a66bf393d9d6927f35a9c18abf5d1c31db912) Other minor optimizations<br />
|
||||
|
||||
# [6.2.1](https://github.com/dcodeIO/protobuf.js/releases/tag/6.2.1)
|
||||
|
||||
## New
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/1a6fdc9a11fb08506d09351f8e853384c2b8be25) Added ParseOptions to protobuf.parse and --keep-case for .proto sources to pbjs, see [#564](https://github.com/dcodeIO/protobuf.js/issues/564)<br />
|
||||
|
||||
## Other
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/fc383d0721d83f66b2d941f0d9361621839327e9) Better TypeScript definition support for @property-annotated objects<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/4531d75cddee9a99adcac814d52613116ba789f3) Can't just inline longNeq but can be simplified<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/8f25377cf99036794ba13b160a5060f312d1a7e7) Array abuse and varint optimization<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/90b201209a03e8022ada0ab9182f338fa0813651) Updated dependencies<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/f1110b0993ec86e0a4aee1735bd75b901952cb36) Other minor improvements to short ifs<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/c079c900e2d61c63d5508eafacbd00163d377482) Reader/Writer example<br />
|
||||
|
||||
# [6.2.0](https://github.com/dcodeIO/protobuf.js/releases/tag/6.2.0)
|
||||
|
||||
## Fixed
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/9b7b92a4c7f8caa460d687778dc0628a74cdde37) Fixed reserved names re, also ensure valid service method names, see [#559](https://github.com/dcodeIO/protobuf.js/issues/559)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/a83425049c9a78c5607bc35e8089e08ce78a741e) Fix d.ts whitespace on empty lines, added tsd-jsdoc LICENSE<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/5f9bede280aa998afb7898e8d2718b4a229e8e6f) Fix asJSON defaults option, make it work for repeated fields.<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/b0aef62191b65cbb305ece84a6652d76f98da259) Inlined any Reader/Writer#tag calls, also fixes [#556](https://github.com/dcodeIO/protobuf.js/issues/556)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/4d091d41caad9e63cd64003a08210b78878e01dd) Fix building default dist files with explicit runtime=false<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/096dfb686f88db38ed2d8111ed7aac36f8ba658a) Apply asJSON recursively<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/19c269f1dce1b35fa190f264896d0865a54a4fff) Ensure working reflection class names with minified builds<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/9c769504e0ffa6cbe0b6f8cdc14f1231bed7ee34) Lazily resolve (some) cyclic dependencies, see [#560](https://github.com/dcodeIO/protobuf.js/issues/560)<br />
|
||||
|
||||
## New
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/da07d8bbbede4175cc45ca46d883210c1082e295) Added protobuf.roots to minimal runtime, see [#554](https://github.com/dcodeIO/protobuf.js/issues/554)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/8f407a18607334185afcc85ee98dc1478322bd01) Repo now includes a restructured version of tsd-jsdoc with our changes incorporated for issues/prs, see [#550](https://github.com/dcodeIO/protobuf.js/issues/550)<br />
|
||||
|
||||
## CLI
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/1b5e4250415c6169eadb405561242f847d75044b) Updated pbjs arguments<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/4750e3111b9fdb107d0fc811e99904fbcdbb6de1) Pipe tsd-jsdoc output (requires dcodeIO/tsd-jsdoc/master) and respect cwd, see [#550](https://github.com/dcodeIO/protobuf.js/issues/550)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/75f4b6cb6325a3fc7cd8fed3de5dbe0b6b29c748) tsd-jsdoc progress<br />
|
||||
|
||||
## Docs
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/766171e4c8b6650ea9c6bc3e76c9c96973c2f546) README<br />
|
||||
|
||||
## Other
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/c33835cb1fe1872d823e94b0fff024dc624323e8) Added GH issue template<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/6f9ffb6307476d48f45dc4f936744b82982d386b) Path micromodule, dependencies<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/0b9b1d8505743995c5328daab1f1e124debc63bd) Test case for [#556](https://github.com/dcodeIO/protobuf.js/issues/556)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/74b2c5c5d33a46c3751ebeadc9d934d4ccb8286c) Raw alloc benchmark<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/fb74223b7273530d8baa53437ee96c65a387436d) Other minor optimizations<br />
|
||||
|
||||
# [6.1.1](https://github.com/dcodeIO/protobuf.js/releases/tag/6.1.1)
|
||||
|
||||
## Fixed
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/baea920fa6bf5746e0a7888cdbb089cd5d94fc90) Properly encode/decode map kv pairs as repeated messages (codegen and fallback), see [#547](https://github.com/dcodeIO/protobuf.js/issues/547)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/28a1d26f28daf855c949614ef485237c6bf316e5) Make genVerifyKey actually generate conditions for 32bit values and bool, fixes [#546](https://github.com/dcodeIO/protobuf.js/issues/546)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/3e9d8ea9a5cbb2e029b5c892714edd6926d2e5a7) Fix to generation of verify methods for bytes<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/e7893675ccdf18f0fdaea8f9a054a6b5402b060e) Take special care of oneofs when encoding (i.e. when explicitly set to defaults), see [#542](https://github.com/dcodeIO/protobuf.js/issues/542)<br />
|
||||
|
||||
## New
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/52cd8b5a891ec8e11611127c8cfa6b3a91ff78e3) Added Message#asJSON option for bytes conversion<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/01365ba9116ca1649b682635bb29814657c4133c) Added Namespace#lookupType and Namespace#lookupService (throw instead of returning null), see [#544](https://github.com/dcodeIO/protobuf.js/issues/544)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/a54fbc918ef6bd627113f05049ff704e07bf33b4) Provide prebuilt browser versions of the static runtime<br />
|
||||
|
||||
## CLI
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/3783af7ca9187a1d9b1bb278ca69e0188c7e4c66) Initial pbts CLI for generating TypeScript definitions, see [#550](https://github.com/dcodeIO/protobuf.js/issues/550)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/b8bce03405196b1779727f246229fd9217b4303d) Refactored json/static-module targets to use common wrappers<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/691231fbc453a243f48a97bfb86794ab5718ef49) Refactor cli to support multiple built-in wrappers, added named roots instead of always using global.root and added additionally necessary eslint comments, see [#540](https://github.com/dcodeIO/protobuf.js/issues/540)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/e3e77d0c7dc973d3a5948a49d123bdaf8a048030) Annotate namespaces generated by static target, see [#550](https://github.com/dcodeIO/protobuf.js/issues/550)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/aff21a71e6bd949647b1b7721ea4e1fe16bcd933) static target: Basic support for oneof fields, see [#542](https://github.com/dcodeIO/protobuf.js/issues/542)<br />
|
||||
|
||||
## Docs
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/b6b00aa7b0cd35e0e8f3c16b322788e9942668d4) Fix to reflection documentation<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/ed86f3acbeb6145be5f24dcd05efb287b539e61b) README on minimal runtime / available downloads<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/d31590b82d8bafe6657bf877d403f01a034ab4ba) Notes on descriptors vs static modules<br />
|
||||
|
||||
## Other
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/ce41d0ef21cee2d918bdc5c3b542d3b7638b6ead) A lot of minor optimizations to performance and gzip ratio<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/ecbb4a52fbab445e63bf23b91539e853efaefa47) Minimized base64 tables<br />
|
||||
|
||||
# [6.1.0](https://github.com/dcodeIO/protobuf.js/releases/tag/6.1.0)
|
||||
|
||||
## Breaking
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/a46cc4934b7e888ae80e06fd7fdf91e5bc7f54f5) Removed as-function overload for Reader/Writer, profiler stub, optimized version of Reader#int32<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/7983ee0ba15dc5c1daad82a067616865051848c9) Refactored Prototype and inherits away, is now Class and Message for more intuitive documentation and type refs<br />
|
||||
|
||||
## Fixed
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/c3c70fe3a47fd4f7c85dc80e1af7d9403fe349cd) Fixed failing test case on node < 6<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/66be5983321dd06460382d045eb87ed72a186776) Fixed serialization order of sfixed64, fixes [#536](https://github.com/dcodeIO/protobuf.js/issues/536)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/7def340833f9f1cc41f4835bd0d62e203b54d9eb) Fixed serialization order of fixed64, fallback to parseInt with no long lib, see [#534](https://github.com/dcodeIO/protobuf.js/issues/534)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/98a58d40ca7ee7afb1f76c5804e82619104644f6) Actually allow undefined as service method type, fixes [#528](https://github.com/dcodeIO/protobuf.js/issues/528)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/38d867fc50a4d7eb1ca07525c9e4c71b8782443e) Do not skip optional delimiter after aggregate options, fixes [#520](https://github.com/dcodeIO/protobuf.js/issues/520)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/67449db7c7416cbc59ad230c168cf6e6b6dba0c5) Verify empty base64 encoded strings for bytes fields, see [#535](https://github.com/dcodeIO/protobuf.js/issues/535)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/ef0fcb6d525c5aab13a39b4f393adf03f751c8c9) wrong spell role should be rule<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/55db92e21a26c04f524aeecb2316968c000e744d) decodeDelimited always forks if writer is specified, see [#531](https://github.com/dcodeIO/protobuf.js/issues/531)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/ebae1e18152617f11ac07827828f5740d4f2eb7e) Mimic spec-compliant behaviour in oneof getVirtual, see [#523](https://github.com/dcodeIO/protobuf.js/issues/523)<br />
|
||||
|
||||
## New
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/a0398f5880c434ff88fd8d420ba07cc29c5d39d3) Initial base64 string support for bytes fields, see [#535](https://github.com/dcodeIO/protobuf.js/issues/535)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/6a6c00c3e1def5d35c7fcaa1bbb6ce4e0fe67544) Initial type-checking verifier, see [#526](https://github.com/dcodeIO/protobuf.js/issues/526), added to bench out of competition<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/3aa984e063cd73e4687102b4abd8adc16582dbc4) Initial loadSync (node only), see [#529](https://github.com/dcodeIO/protobuf.js/issues/529)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/f1370ff5b0db2ebb73b975a3d7c7bd5b901cbfac) Initial RPC service implementaion, see [#529](https://github.com/dcodeIO/protobuf.js/issues/529)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/090d8eaf10704a811a73e1becd52f2307cbcad48) added 'defaults' option to Prototype#asJSON, see [#521](https://github.com/dcodeIO/protobuf.js/issues/521)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/7c28483d65cde148e61fe9993f1716960b39e049) Use Uint8Array pool in browsers, just like node does with buffers<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/4157a0ec2e54c4d19794cb16edddcd8d4fbd3e76) Also validate map fields, see [#526](https://github.com/dcodeIO/protobuf.js/issues/526) (this really needs some tests)<br />
|
||||
|
||||
## CLI
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/0ce099bf4f4666fd00403a2839e6da628b8328a9) Added json-module target to pbjs, renamed static to static-module, see [#522](https://github.com/dcodeIO/protobuf.js/issues/522)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/1d99442fe65fcaa2f9e33cc0186ef1336057e0cf) updated internals and static target to use immutable objects on prototypes<br />
|
||||
|
||||
## Docs
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/e6eaa91b9fe021b3356d4d7e42033a877bc45871) Added a couple of alternative signatures, protobuf.load returns promise or undefined, aliased Reader/Writer-as-function signature with Reader/Writer.create for typed dialects, see [#518](https://github.com/dcodeIO/protobuf.js/issues/518)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/9df6a3d4a654c3e122f97d9a594574c7bbb412da) Added variations for Root#load, see [#527](https://github.com/dcodeIO/protobuf.js/issues/527)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/193e65c006a8df8e9b72e0f23ace14a94952ee36) Added benchmark and profile related information to README<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/228a2027de35238feb867cb0485c78c755c4d17d) Added service example to README, see [#529](https://github.com/dcodeIO/protobuf.js/issues/529)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/1a8c720714bf867f1f0195b4690faefa4f65e66a) README on tests<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/014fb668dcf853874c67e3e0aeb7b488a149d35c) Update README/dist to reflect recent changes<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/11d844c010c5a22eff9d5824714fb67feca77b26) Minimal documentation for micromodules<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/47608dd8595b0df2b30dd18fef4b8207f73ed56a) Document all the callbacks, see [#527](https://github.com/dcodeIO/protobuf.js/issues/527)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/3891ab07bbe20cf84701605aa62453a6dbdb6af2) Documented streaming-rpc example a bit<br />
|
||||
|
||||
## Other
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/5606cb1bc41bc90cb069de676650729186b38640) Removed the need for triple-slash references in .d.ts by providing a minimal Long interface, see [#527](https://github.com/dcodeIO/protobuf.js/issues/527), see [#530](https://github.com/dcodeIO/protobuf.js/issues/530)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/adf3cc3d340f8b2a596c892c64457b15e42a771b) Transition to micromodules<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/f3a9589b74af6a1bf175f2b1994badf703d7abc4) Refactored argument order of utf8 for plausibility<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/14c207ed6e05a61e756fa4192efb2fa219734dd6) Restructured reusable micromodules<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/b510ba258986271f07007aebc5dcfea7cfd90cf4) Can't use Uint8Array#set on node < 6 buffers<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/78952a50ceee8e196b4f156eb01f7f693b5b8aac) Test case for [#531](https://github.com/dcodeIO/protobuf.js/issues/531)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/954577c6b421f7d7f4905bcc32f57e4ebaf548da) Safer signaling for synchronous load, see [#529](https://github.com/dcodeIO/protobuf.js/issues/529)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/9ea3766ff1b8fb7ccad028f44efe27d3b019eeb7) Proper end of stream signaling to rpcImpl, see [#529](https://github.com/dcodeIO/protobuf.js/issues/529)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/e4faf7fac9b34d4776f3c15dfef8d2ae54104567) Moved event emitter to util, also accepts listener context, see [#529](https://github.com/dcodeIO/protobuf.js/issues/529)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/9bdec62793ce77c954774cc19106bde4132f24fc) Probably the worst form of hiding require programmatically, see [#527](https://github.com/dcodeIO/protobuf.js/issues/527)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/4462d8b05d3aba37c865cf53e09b3199cf051a92) Attempt to hide require('fs') from webpack, see [#527](https://github.com/dcodeIO/protobuf.js/issues/527)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/7c3bf8d32cbf831b251730b3876c35c901926300) Trying out jsdoc variations, see [#527](https://github.com/dcodeIO/protobuf.js/issues/527)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/bb4059467287fefda8f966de575fd0f8f9690bd3) by the way, why not include the json->proto functionality into "util"?<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/f1008e6ee53ee50358e19c10df8608e950be4be3) Update proto.js<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/fc9014822d9cdeae8c6e454ccb66ee28f579826c) Automatic profile generation and processing<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/2a2f6dcab5beaaa98e55a005b3d02643c45504d6) Generalized buffer pool and moved it to util<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/53a16bf3ada4a60cc09757712e0046f3f2d9d094) Make shields visible on npm, yey<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/9004b9d0c5135a7f6df208ea658258bf2f9e6fc9) More shields, I love shields, and maybe a workaround for travis timing out when sauce takes forever<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/060a7916a2715a9e4cd4d05d7c331bec33e60b7e) Trying SauceLabs with higher concurrency<br />
|
||||
|
||||
# [6.0.2](https://github.com/dcodeIO/protobuf.js/releases/tag/6.0.2)
|
||||
|
||||
## Fixed
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/23d664384900eb65e44910def45f04be996fbba1) Fix packable float/double see [#513](https://github.com/dcodeIO/protobuf.js/issues/513)<br />
|
||||
|
||||
## New
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/54283d39c4c955b6a84f7f53d4940eec39e4df5e) Handle oneofs in prototype ctor, add non-ES5 fallbacks, test case<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/0ae66752362899b8407918a759b09938e82436e1) Be nice to AMD, allow reconfiguration of Reader/Writer interface<br />
|
||||
|
||||
## CLI
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/00f3574ef4ee8b237600e41839bf0066719c4469) Initial static codegen target for reference<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/81e36a7c14d89b487dfe7cfb2f8380fcdf0df392) pbjs static target services support<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/4885b8239eb74c72e665787ea0ece3336e493d7f) pbjs static target progress, uses customizable wrapper template<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/ad5abe7bac7885ba4f68df7eeb800d2e3b81750b) Static pbjs target progress, now generates usable CommonJS code, see [#512](https://github.com/dcodeIO/protobuf.js/issues/512)<br />
|
||||
|
||||
## Docs
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/d9634d218849fb49ff5dfb4597bbb2c2d43bbf08) TypeScript example<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/fce8276193a5a9fabad5e5fbeb2ccd4f0f3294a9) Adjectives, notes on browserify<br />
|
||||
|
||||
## Other
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/23d664384900eb65e44910def45f04be996fbba1) Refactor runtime util into separate file, reader/writer uses runtime util<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/f91c432a498bebc0adecef1562061b392611f51a) Also optimize reader with what we have learned<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/d83f799519fe69808c88e83d9ad66c645d15e963) More (shameless) writer over-optimization<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/8a2dbc610a06fe3a1a2695a3ab032d073b77760d) Trading package size for float speed<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/95c5538cfaf1daf6b4990f6aa7599779aaacf99f) Skip defining getters and setters on IE8 entirely, automate defining fallbacks<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/09865d069303e795e475c82afe2b2267abaa59ea) Unified proto/reflection/classes/static encoding API to always return a writer<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/98d6ae186a48416e4ff3030987caed285f40a4f7) plain js utf8 is faster for short strings<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/79fbbf48b8e4dc9c41dcbdef2b73c5f2608b0318) improve TypeScript support. add simple test script.<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/96fa07adec8b0ae05e07c2c40383267f25f2fc92) Use long.js dependency in tests, reference types instead of paths in .d.ts see [#503](https://github.com/dcodeIO/protobuf.js/issues/503)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/5785dee15d07fbcd14025a96686707173bd649a0) Restructured encoder / decoder to better support static code gen<br />
|
||||
|
||||
# [6.0.1](https://github.com/dcodeIO/protobuf.js/releases/tag/6.0.1)
|
||||
|
||||
## Fixed
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/799c1c1a84b255d1831cc84c3d24e61b36fa2530) Add support for long strings, fixes [#509](https://github.com/dcodeIO/protobuf.js/issues/509)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/6e5fdb67cb34f90932e95a51370e1652acc55b4c) expose zero on LongBits, fixes [#508](https://github.com/dcodeIO/protobuf.js/issues/508)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/aa922c07490f185c5f97cf28ebbd65200fc5e377) Fixed issues with Root.fromJSON/#addJSON, search global for Long<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/51fe45656b530efbba6dad92f92db2300aa18761) Properly exclude browserify's annoying _process, again, fixes [#502](https://github.com/dcodeIO/protobuf.js/issues/502)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/3c16e462a28c36abbc8a176eab9ac2e10ba68597) Remember loaded files earlier to prevent race conditions, fixes [#501](https://github.com/dcodeIO/protobuf.js/issues/501)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/4012a00a0578185d92fb6e7d3babd059fee6d6ab) Allow negative enum ids even if super inefficient (encodes as 10 bytes), fixes [#499](https://github.com/dcodeIO/protobuf.js/issues/499), fixes [#500](https://github.com/dcodeIO/protobuf.js/issues/500)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/96dd8f1729ad72e29dbe08dd01bc0ba08446dbe6) set resolvedResponseType on resolve(), fixes [#497](https://github.com/dcodeIO/protobuf.js/issues/497)<br />
|
||||
|
||||
## New
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/d3ae961765e193ec11227d96d699463de346423f) Initial take on runtime services, see [#507](https://github.com/dcodeIO/protobuf.js/issues/507)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/90cd46b3576ddb2d0a6fc6ae55da512db4be3acc) Include dist/ in npm package for frontend use<br />
|
||||
|
||||
## CLI
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/4affa1b7c0544229fb5f0d3948df6d832f6feadb) pbjs proto target field options, language-level compliance with jspb test.proto<br />
|
||||
|
||||
## Docs
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/6a06e95222d741c47a51bcec85cd20317de7c0b0) always use Uint8Array in docs for tsd, see [#503](https://github.com/dcodeIO/protobuf.js/issues/503)<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/637698316e095fc35f62a304daaca22654974966) Notes on dist files<br />
|
||||
|
||||
## Other
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/29ff3f10e367d6a2ae15fb4254f4073541559c65) Update eslint env<br />
|
||||
[:hash:](https://github.com/dcodeIO/protobuf.js/commit/943be1749c7d37945c11d1ebffbed9112c528d9f) Browser field in package.json isn't required<br />
|
||||
39
express-server/node_modules/@google-cloud/firestore/node_modules/protobufjs/LICENSE
generated
vendored
Normal file
39
express-server/node_modules/@google-cloud/firestore/node_modules/protobufjs/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,39 @@
|
||||
This license applies to all parts of protobuf.js except those files
|
||||
either explicitly including or referencing a different license or
|
||||
located in a directory containing a different LICENSE file.
|
||||
|
||||
---
|
||||
|
||||
Copyright (c) 2016, Daniel Wirtz All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are
|
||||
met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
* Redistributions in binary form must reproduce the above copyright
|
||||
notice, this list of conditions and the following disclaimer in the
|
||||
documentation and/or other materials provided with the distribution.
|
||||
* Neither the name of its author, nor the names of its contributors
|
||||
may be used to endorse or promote products derived from this software
|
||||
without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
---
|
||||
|
||||
Code generated by the command line utilities is owned by the owner
|
||||
of the input file used when generating it. This code is not
|
||||
standalone and requires a support library to be linked with it. This
|
||||
support library is itself covered by the above license.
|
||||
879
express-server/node_modules/@google-cloud/firestore/node_modules/protobufjs/README.md
generated
vendored
Normal file
879
express-server/node_modules/@google-cloud/firestore/node_modules/protobufjs/README.md
generated
vendored
Normal file
@@ -0,0 +1,879 @@
|
||||
<h1><p align="center"><img alt="protobuf.js" src="https://github.com/dcodeIO/protobuf.js/raw/master/pbjs.png" width="120" height="104" /></p></h1>
|
||||
<p align="center"><a href="https://npmjs.org/package/protobufjs"><img src="https://img.shields.io/npm/v/protobufjs.svg" alt=""></a> <a href="https://travis-ci.org/dcodeIO/protobuf.js"><img src="https://travis-ci.org/dcodeIO/protobuf.js.svg?branch=master" alt=""></a> <a href="https://npmjs.org/package/protobufjs"><img src="https://img.shields.io/npm/dm/protobufjs.svg" alt=""></a> <a href="https://www.paypal.com/cgi-bin/webscr?cmd=_donations&business=dcode%40dcode.io&item_name=Open%20Source%20Software%20Donation&item_number=dcodeIO%2Fprotobuf.js"><img alt="donate ❤" src="https://img.shields.io/badge/donate-❤-ff2244.svg"></a></p>
|
||||
|
||||
**Protocol Buffers** are a language-neutral, platform-neutral, extensible way of serializing structured data for use in communications protocols, data storage, and more, originally designed at Google ([see](https://developers.google.com/protocol-buffers/)).
|
||||
|
||||
**protobuf.js** is a pure JavaScript implementation with [TypeScript](https://www.typescriptlang.org) support for [node.js](https://nodejs.org) and the browser. It's easy to use, blazingly fast and works out of the box with [.proto](https://developers.google.com/protocol-buffers/docs/proto) files!
|
||||
|
||||
Contents
|
||||
--------
|
||||
|
||||
* [Installation](#installation)<br />
|
||||
How to include protobuf.js in your project.
|
||||
|
||||
* [Usage](#usage)<br />
|
||||
A brief introduction to using the toolset.
|
||||
|
||||
* [Valid Message](#valid-message)
|
||||
* [Toolset](#toolset)<br />
|
||||
|
||||
* [Examples](#examples)<br />
|
||||
A few examples to get you started.
|
||||
|
||||
* [Using .proto files](#using-proto-files)
|
||||
* [Using JSON descriptors](#using-json-descriptors)
|
||||
* [Using reflection only](#using-reflection-only)
|
||||
* [Using custom classes](#using-custom-classes)
|
||||
* [Using services](#using-services)
|
||||
* [Usage with TypeScript](#usage-with-typescript)<br />
|
||||
|
||||
* [Command line](#command-line)<br />
|
||||
How to use the command line utility.
|
||||
|
||||
* [pbjs for JavaScript](#pbjs-for-javascript)
|
||||
* [pbts for TypeScript](#pbts-for-typescript)
|
||||
* [Reflection vs. static code](#reflection-vs-static-code)
|
||||
* [Command line API](#command-line-api)<br />
|
||||
|
||||
* [Additional documentation](#additional-documentation)<br />
|
||||
A list of available documentation resources.
|
||||
|
||||
* [Performance](#performance)<br />
|
||||
A few internals and a benchmark on performance.
|
||||
|
||||
* [Compatibility](#compatibility)<br />
|
||||
Notes on compatibility regarding browsers and optional libraries.
|
||||
|
||||
* [Building](#building)<br />
|
||||
How to build the library and its components yourself.
|
||||
|
||||
Installation
|
||||
---------------
|
||||
|
||||
### node.js
|
||||
|
||||
```
|
||||
$> npm install protobufjs [--save --save-prefix=~]
|
||||
```
|
||||
|
||||
```js
|
||||
var protobuf = require("protobufjs");
|
||||
```
|
||||
|
||||
**Note** that this library's versioning scheme is not semver-compatible for historical reasons. For guaranteed backward compatibility, always depend on `~6.A.B` instead of `^6.A.B` (hence the `--save-prefix` above).
|
||||
|
||||
### Browsers
|
||||
|
||||
Development:
|
||||
|
||||
```
|
||||
<script src="//cdn.rawgit.com/dcodeIO/protobuf.js/6.X.X/dist/protobuf.js"></script>
|
||||
```
|
||||
|
||||
Production:
|
||||
|
||||
```
|
||||
<script src="//cdn.rawgit.com/dcodeIO/protobuf.js/6.X.X/dist/protobuf.min.js"></script>
|
||||
```
|
||||
|
||||
**Remember** to replace the version tag with the exact [release](https://github.com/dcodeIO/protobuf.js/tags) your project depends upon.
|
||||
|
||||
The library supports CommonJS and AMD loaders and also exports globally as `protobuf`.
|
||||
|
||||
### Distributions
|
||||
|
||||
Where bundle size is a factor, there are additional stripped-down versions of the [full library][dist-full] (~19kb gzipped) available that exclude certain functionality:
|
||||
|
||||
* When working with JSON descriptors (i.e. generated by [pbjs](#pbjs-for-javascript)) and/or reflection only, see the [light library][dist-light] (~16kb gzipped) that excludes the parser. CommonJS entry point is:
|
||||
|
||||
```js
|
||||
var protobuf = require("protobufjs/light");
|
||||
```
|
||||
|
||||
* When working with statically generated code only, see the [minimal library][dist-minimal] (~6.5kb gzipped) that also excludes reflection. CommonJS entry point is:
|
||||
|
||||
```js
|
||||
var protobuf = require("protobufjs/minimal");
|
||||
```
|
||||
|
||||
[dist-full]: https://github.com/dcodeIO/protobuf.js/tree/master/dist
|
||||
[dist-light]: https://github.com/dcodeIO/protobuf.js/tree/master/dist/light
|
||||
[dist-minimal]: https://github.com/dcodeIO/protobuf.js/tree/master/dist/minimal
|
||||
|
||||
Usage
|
||||
-----
|
||||
|
||||
Because JavaScript is a dynamically typed language, protobuf.js introduces the concept of a **valid message** in order to provide the best possible [performance](#performance) (and, as a side product, proper typings):
|
||||
|
||||
### Valid message
|
||||
|
||||
> A valid message is an object (1) not missing any required fields and (2) exclusively composed of JS types understood by the wire format writer.
|
||||
|
||||
There are two possible types of valid messages and the encoder is able to work with both of these for convenience:
|
||||
|
||||
* **Message instances** (explicit instances of message classes with default values on their prototype) always (have to) satisfy the requirements of a valid message by design and
|
||||
* **Plain JavaScript objects** that just so happen to be composed in a way satisfying the requirements of a valid message as well.
|
||||
|
||||
In a nutshell, the wire format writer understands the following types:
|
||||
|
||||
| Field type | Expected JS type (create, encode) | Conversion (fromObject)
|
||||
|------------|-----------------------------------|------------------------
|
||||
| s-/u-/int32<br />s-/fixed32 | `number` (32 bit integer) | <code>value | 0</code> if signed<br />`value >>> 0` if unsigned
|
||||
| s-/u-/int64<br />s-/fixed64 | `Long`-like (optimal)<br />`number` (53 bit integer) | `Long.fromValue(value)` with long.js<br />`parseInt(value, 10)` otherwise
|
||||
| float<br />double | `number` | `Number(value)`
|
||||
| bool | `boolean` | `Boolean(value)`
|
||||
| string | `string` | `String(value)`
|
||||
| bytes | `Uint8Array` (optimal)<br />`Buffer` (optimal under node)<br />`Array.<number>` (8 bit integers) | `base64.decode(value)` if a `string`<br />`Object` with non-zero `.length` is assumed to be buffer-like
|
||||
| enum | `number` (32 bit integer) | Looks up the numeric id if a `string`
|
||||
| message | Valid message | `Message.fromObject(value)`
|
||||
|
||||
* Explicit `undefined` and `null` are considered as not set if the field is optional.
|
||||
* Repeated fields are `Array.<T>`.
|
||||
* Map fields are `Object.<string,T>` with the key being the string representation of the respective value or an 8 characters long binary hash string for `Long`-likes.
|
||||
* Types marked as *optimal* provide the best performance because no conversion step (i.e. number to low and high bits or base64 string to buffer) is required.
|
||||
|
||||
### Toolset
|
||||
|
||||
With that in mind and again for performance reasons, each message class provides a distinct set of methods with each method doing just one thing. This avoids unnecessary assertions / redundant operations where performance is a concern but also forces a user to perform verification (of plain JavaScript objects that *might* just so happen to be a valid message) explicitly where necessary - for example when dealing with user input.
|
||||
|
||||
**Note** that `Message` below refers to any message class.
|
||||
|
||||
* **Message.verify**(message: `Object`): `null|string`<br />
|
||||
verifies that a **plain JavaScript object** satisfies the requirements of a valid message and thus can be encoded without issues. Instead of throwing, it returns the error message as a string, if any.
|
||||
|
||||
```js
|
||||
var payload = "invalid (not an object)";
|
||||
var err = AwesomeMessage.verify(payload);
|
||||
if (err)
|
||||
throw Error(err);
|
||||
```
|
||||
|
||||
* **Message.encode**(message: `Message|Object` [, writer: `Writer`]): `Writer`<br />
|
||||
encodes a **message instance** or valid **plain JavaScript object**. This method does not implicitly verify the message and it's up to the user to make sure that the payload is a valid message.
|
||||
|
||||
```js
|
||||
var buffer = AwesomeMessage.encode(message).finish();
|
||||
```
|
||||
|
||||
* **Message.encodeDelimited**(message: `Message|Object` [, writer: `Writer`]): `Writer`<br />
|
||||
works like `Message.encode` but additionally prepends the length of the message as a varint.
|
||||
|
||||
* **Message.decode**(reader: `Reader|Uint8Array`): `Message`<br />
|
||||
decodes a buffer to a **message instance**. If required fields are missing, it throws a `util.ProtocolError` with an `instance` property set to the so far decoded message. If the wire format is invalid, it throws an `Error`.
|
||||
|
||||
```js
|
||||
try {
|
||||
var decodedMessage = AwesomeMessage.decode(buffer);
|
||||
} catch (e) {
|
||||
if (e instanceof protobuf.util.ProtocolError) {
|
||||
// e.instance holds the so far decoded message with missing required fields
|
||||
} else {
|
||||
// wire format is invalid
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
* **Message.decodeDelimited**(reader: `Reader|Uint8Array`): `Message`<br />
|
||||
works like `Message.decode` but additionally reads the length of the message prepended as a varint.
|
||||
|
||||
* **Message.create**(properties: `Object`): `Message`<br />
|
||||
creates a new **message instance** from a set of properties that satisfy the requirements of a valid message. Where applicable, it is recommended to prefer `Message.create` over `Message.fromObject` because it doesn't perform possibly redundant conversion.
|
||||
|
||||
```js
|
||||
var message = AwesomeMessage.create({ awesomeField: "AwesomeString" });
|
||||
```
|
||||
|
||||
* **Message.fromObject**(object: `Object`): `Message`<br />
|
||||
converts any non-valid **plain JavaScript object** to a **message instance** using the conversion steps outlined within the table above.
|
||||
|
||||
```js
|
||||
var message = AwesomeMessage.fromObject({ awesomeField: 42 });
|
||||
// converts awesomeField to a string
|
||||
```
|
||||
|
||||
* **Message.toObject**(message: `Message` [, options: `ConversionOptions`]): `Object`<br />
|
||||
converts a **message instance** to an arbitrary **plain JavaScript object** for interoperability with other libraries or storage. The resulting plain JavaScript object *might* still satisfy the requirements of a valid message depending on the actual conversion options specified, but most of the time it does not.
|
||||
|
||||
```js
|
||||
var object = AwesomeMessage.toObject(message, {
|
||||
enums: String, // enums as string names
|
||||
longs: String, // longs as strings (requires long.js)
|
||||
bytes: String, // bytes as base64 encoded strings
|
||||
defaults: true, // includes default values
|
||||
arrays: true, // populates empty arrays (repeated fields) even if defaults=false
|
||||
objects: true, // populates empty objects (map fields) even if defaults=false
|
||||
oneofs: true // includes virtual oneof fields set to the present field's name
|
||||
});
|
||||
```
|
||||
|
||||
For reference, the following diagram aims to display relationships between the different methods and the concept of a valid message:
|
||||
|
||||
<p align="center"><img alt="Toolset Diagram" src="http://dcode.io/protobuf.js/toolset.svg" /></p>
|
||||
|
||||
> In other words: `verify` indicates that calling `create` or `encode` directly on the plain object will [result in a valid message respectively] succeed. `fromObject`, on the other hand, does conversion from a broader range of plain objects to create valid messages. ([ref](https://github.com/dcodeIO/protobuf.js/issues/748#issuecomment-291925749))
|
||||
|
||||
Examples
|
||||
--------
|
||||
|
||||
### Using .proto files
|
||||
|
||||
It is possible to load existing .proto files using the full library, which parses and compiles the definitions to ready to use (reflection-based) message classes:
|
||||
|
||||
```protobuf
|
||||
// awesome.proto
|
||||
package awesomepackage;
|
||||
syntax = "proto3";
|
||||
|
||||
message AwesomeMessage {
|
||||
string awesome_field = 1; // becomes awesomeField
|
||||
}
|
||||
```
|
||||
|
||||
```js
|
||||
protobuf.load("awesome.proto", function(err, root) {
|
||||
if (err)
|
||||
throw err;
|
||||
|
||||
// Obtain a message type
|
||||
var AwesomeMessage = root.lookupType("awesomepackage.AwesomeMessage");
|
||||
|
||||
// Exemplary payload
|
||||
var payload = { awesomeField: "AwesomeString" };
|
||||
|
||||
// Verify the payload if necessary (i.e. when possibly incomplete or invalid)
|
||||
var errMsg = AwesomeMessage.verify(payload);
|
||||
if (errMsg)
|
||||
throw Error(errMsg);
|
||||
|
||||
// Create a new message
|
||||
var message = AwesomeMessage.create(payload); // or use .fromObject if conversion is necessary
|
||||
|
||||
// Encode a message to an Uint8Array (browser) or Buffer (node)
|
||||
var buffer = AwesomeMessage.encode(message).finish();
|
||||
// ... do something with buffer
|
||||
|
||||
// Decode an Uint8Array (browser) or Buffer (node) to a message
|
||||
var message = AwesomeMessage.decode(buffer);
|
||||
// ... do something with message
|
||||
|
||||
// If the application uses length-delimited buffers, there is also encodeDelimited and decodeDelimited.
|
||||
|
||||
// Maybe convert the message back to a plain object
|
||||
var object = AwesomeMessage.toObject(message, {
|
||||
longs: String,
|
||||
enums: String,
|
||||
bytes: String,
|
||||
// see ConversionOptions
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
Additionally, promise syntax can be used by omitting the callback, if preferred:
|
||||
|
||||
```js
|
||||
protobuf.load("awesome.proto")
|
||||
.then(function(root) {
|
||||
...
|
||||
});
|
||||
```
|
||||
|
||||
### Using JSON descriptors
|
||||
|
||||
The library utilizes JSON descriptors that are equivalent to a .proto definition. For example, the following is identical to the .proto definition seen above:
|
||||
|
||||
```json
|
||||
// awesome.json
|
||||
{
|
||||
"nested": {
|
||||
"AwesomeMessage": {
|
||||
"fields": {
|
||||
"awesomeField": {
|
||||
"type": "string",
|
||||
"id": 1
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
JSON descriptors closely resemble the internal reflection structure:
|
||||
|
||||
| Type (T) | Extends | Type-specific properties
|
||||
|--------------------|--------------------|-------------------------
|
||||
| *ReflectionObject* | | options
|
||||
| *Namespace* | *ReflectionObject* | nested
|
||||
| Root | *Namespace* | **nested**
|
||||
| Type | *Namespace* | **fields**
|
||||
| Enum | *ReflectionObject* | **values**
|
||||
| Field | *ReflectionObject* | rule, **type**, **id**
|
||||
| MapField | Field | **keyType**
|
||||
| OneOf | *ReflectionObject* | **oneof** (array of field names)
|
||||
| Service | *Namespace* | **methods**
|
||||
| Method | *ReflectionObject* | type, **requestType**, **responseType**, requestStream, responseStream
|
||||
|
||||
* **Bold properties** are required. *Italic types* are abstract.
|
||||
* `T.fromJSON(name, json)` creates the respective reflection object from a JSON descriptor
|
||||
* `T#toJSON()` creates a JSON descriptor from the respective reflection object (its name is used as the key within the parent)
|
||||
|
||||
Exclusively using JSON descriptors instead of .proto files enables the use of just the light library (the parser isn't required in this case).
|
||||
|
||||
A JSON descriptor can either be loaded the usual way:
|
||||
|
||||
```js
|
||||
protobuf.load("awesome.json", function(err, root) {
|
||||
if (err) throw err;
|
||||
|
||||
// Continue at "Obtain a message type" above
|
||||
});
|
||||
```
|
||||
|
||||
Or it can be loaded inline:
|
||||
|
||||
```js
|
||||
var jsonDescriptor = require("./awesome.json"); // exemplary for node
|
||||
|
||||
var root = protobuf.Root.fromJSON(jsonDescriptor);
|
||||
|
||||
// Continue at "Obtain a message type" above
|
||||
```
|
||||
|
||||
### Using reflection only
|
||||
|
||||
Both the full and the light library include full reflection support. One could, for example, define the .proto definitions seen in the examples above using just reflection:
|
||||
|
||||
```js
|
||||
...
|
||||
var Root = protobuf.Root,
|
||||
Type = protobuf.Type,
|
||||
Field = protobuf.Field;
|
||||
|
||||
var AwesomeMessage = new Type("AwesomeMessage").add(new Field("awesomeField", 1, "string"));
|
||||
|
||||
var root = new Root().define("awesomepackage").add(AwesomeMessage);
|
||||
|
||||
// Continue at "Create a new message" above
|
||||
...
|
||||
```
|
||||
|
||||
Detailed information on the reflection structure is available within the [API documentation](#additional-documentation).
|
||||
|
||||
### Using custom classes
|
||||
|
||||
Message classes can also be extended with custom functionality and it is also possible to register a custom constructor with a reflected message type:
|
||||
|
||||
```js
|
||||
...
|
||||
|
||||
// Define a custom constructor
|
||||
function AwesomeMessage(properties) {
|
||||
// custom initialization code
|
||||
...
|
||||
}
|
||||
|
||||
// Register the custom constructor with its reflected type (*)
|
||||
root.lookupType("awesomepackage.AwesomeMessage").ctor = AwesomeMessage;
|
||||
|
||||
// Define custom functionality
|
||||
AwesomeMessage.customStaticMethod = function() { ... };
|
||||
AwesomeMessage.prototype.customInstanceMethod = function() { ... };
|
||||
|
||||
// Continue at "Create a new message" above
|
||||
```
|
||||
|
||||
(*) Besides referencing its reflected type through `AwesomeMessage.$type` and `AwesomeMesage#$type`, the respective custom class is automatically populated with:
|
||||
|
||||
* `AwesomeMessage.create`
|
||||
* `AwesomeMessage.encode` and `AwesomeMessage.encodeDelimited`
|
||||
* `AwesomeMessage.decode` and `AwesomeMessage.decodeDelimited`
|
||||
* `AwesomeMessage.verify`
|
||||
* `AwesomeMessage.fromObject`, `AwesomeMessage.toObject`, `AwesomeMessage#toObject` and `AwesomeMessage#toJSON`
|
||||
|
||||
Afterwards, decoded messages of this type are `instanceof AwesomeMessage`.
|
||||
|
||||
Alternatively, it is also possible to reuse and extend the internal constructor if custom initialization code is not required:
|
||||
|
||||
```js
|
||||
...
|
||||
|
||||
// Reuse the internal constructor
|
||||
var AwesomeMessage = root.lookupType("awesomepackage.AwesomeMessage").ctor;
|
||||
|
||||
// Define custom functionality
|
||||
AwesomeMessage.customStaticMethod = function() { ... };
|
||||
AwesomeMessage.prototype.customInstanceMethod = function() { ... };
|
||||
|
||||
// Continue at "Create a new message" above
|
||||
```
|
||||
|
||||
### Using services
|
||||
|
||||
The library also supports consuming services but it doesn't make any assumptions about the actual transport channel. Instead, a user must provide a suitable RPC implementation, which is an asynchronous function that takes the reflected service method, the binary request and a node-style callback as its parameters:
|
||||
|
||||
```js
|
||||
function rpcImpl(method, requestData, callback) {
|
||||
// perform the request using an HTTP request or a WebSocket for example
|
||||
var responseData = ...;
|
||||
// and call the callback with the binary response afterwards:
|
||||
callback(null, responseData);
|
||||
}
|
||||
```
|
||||
|
||||
Example:
|
||||
|
||||
```protobuf
|
||||
// greeter.proto
|
||||
syntax = "proto3";
|
||||
|
||||
service Greeter {
|
||||
rpc SayHello (HelloRequest) returns (HelloReply) {}
|
||||
}
|
||||
|
||||
message HelloRequest {
|
||||
string name = 1;
|
||||
}
|
||||
|
||||
message HelloReply {
|
||||
string message = 1;
|
||||
}
|
||||
```
|
||||
|
||||
```js
|
||||
...
|
||||
var Greeter = root.lookup("Greeter");
|
||||
var greeter = Greeter.create(/* see above */ rpcImpl, /* request delimited? */ false, /* response delimited? */ false);
|
||||
|
||||
greeter.sayHello({ name: 'you' }, function(err, response) {
|
||||
console.log('Greeting:', response.message);
|
||||
});
|
||||
```
|
||||
|
||||
Services also support promises:
|
||||
|
||||
```js
|
||||
greeter.sayHello({ name: 'you' })
|
||||
.then(function(response) {
|
||||
console.log('Greeting:', response.message);
|
||||
});
|
||||
```
|
||||
|
||||
There is also an [example for streaming RPC](https://github.com/dcodeIO/protobuf.js/blob/master/examples/streaming-rpc.js).
|
||||
|
||||
Note that the service API is meant for clients. Implementing a server-side endpoint pretty much always requires transport channel (i.e. http, websocket, etc.) specific code with the only common denominator being that it decodes and encodes messages.
|
||||
|
||||
### Usage with TypeScript
|
||||
|
||||
The library ships with its own [type definitions](https://github.com/dcodeIO/protobuf.js/blob/master/index.d.ts) and modern editors like [Visual Studio Code](https://code.visualstudio.com/) will automatically detect and use them for code completion.
|
||||
|
||||
The npm package depends on [@types/node](https://www.npmjs.com/package/@types/node) because of `Buffer` and [@types/long](https://www.npmjs.com/package/@types/long) because of `Long`. If you are not building for node and/or not using long.js, it should be safe to exclude them manually.
|
||||
|
||||
#### Using the JS API
|
||||
|
||||
The API shown above works pretty much the same with TypeScript. However, because everything is typed, accessing fields on instances of dynamically generated message classes requires either using bracket-notation (i.e. `message["awesomeField"]`) or explicit casts. Alternatively, it is possible to use a [typings file generated for its static counterpart](#pbts-for-typescript).
|
||||
|
||||
```ts
|
||||
import { load } from "protobufjs"; // respectively "./node_modules/protobufjs"
|
||||
|
||||
load("awesome.proto", function(err, root) {
|
||||
if (err)
|
||||
throw err;
|
||||
|
||||
// example code
|
||||
const AwesomeMessage = root.lookupType("awesomepackage.AwesomeMessage");
|
||||
|
||||
let message = AwesomeMessage.create({ awesomeField: "hello" });
|
||||
console.log(`message = ${JSON.stringify(message)}`);
|
||||
|
||||
let buffer = AwesomeMessage.encode(message).finish();
|
||||
console.log(`buffer = ${Array.prototype.toString.call(buffer)}`);
|
||||
|
||||
let decoded = AwesomeMessage.decode(buffer);
|
||||
console.log(`decoded = ${JSON.stringify(decoded)}`);
|
||||
});
|
||||
```
|
||||
|
||||
#### Using generated static code
|
||||
|
||||
If you generated static code to `bundle.js` using the CLI and its type definitions to `bundle.d.ts`, then you can just do:
|
||||
|
||||
```ts
|
||||
import { AwesomeMessage } from "./bundle.js";
|
||||
|
||||
// example code
|
||||
let message = AwesomeMessage.create({ awesomeField: "hello" });
|
||||
let buffer = AwesomeMessage.encode(message).finish();
|
||||
let decoded = AwesomeMessage.decode(buffer);
|
||||
```
|
||||
|
||||
#### Using decorators
|
||||
|
||||
The library also includes an early implementation of [decorators](https://www.typescriptlang.org/docs/handbook/decorators.html).
|
||||
|
||||
**Note** that decorators are an experimental feature in TypeScript and that declaration order is important depending on the JS target. For example, `@Field.d(2, AwesomeArrayMessage)` requires that `AwesomeArrayMessage` has been defined earlier when targeting `ES5`.
|
||||
|
||||
```ts
|
||||
import { Message, Type, Field, OneOf } from "protobufjs/light"; // respectively "./node_modules/protobufjs/light.js"
|
||||
|
||||
export class AwesomeSubMessage extends Message<AwesomeSubMessage> {
|
||||
|
||||
@Field.d(1, "string")
|
||||
public awesomeString: string;
|
||||
|
||||
}
|
||||
|
||||
export enum AwesomeEnum {
|
||||
ONE = 1,
|
||||
TWO = 2
|
||||
}
|
||||
|
||||
@Type.d("SuperAwesomeMessage")
|
||||
export class AwesomeMessage extends Message<AwesomeMessage> {
|
||||
|
||||
@Field.d(1, "string", "optional", "awesome default string")
|
||||
public awesomeField: string;
|
||||
|
||||
@Field.d(2, AwesomeSubMessage)
|
||||
public awesomeSubMessage: AwesomeSubMessage;
|
||||
|
||||
@Field.d(3, AwesomeEnum, "optional", AwesomeEnum.ONE)
|
||||
public awesomeEnum: AwesomeEnum;
|
||||
|
||||
@OneOf.d("awesomeSubMessage", "awesomeEnum")
|
||||
public which: string;
|
||||
|
||||
}
|
||||
|
||||
// example code
|
||||
let message = new AwesomeMessage({ awesomeField: "hello" });
|
||||
let buffer = AwesomeMessage.encode(message).finish();
|
||||
let decoded = AwesomeMessage.decode(buffer);
|
||||
```
|
||||
|
||||
Supported decorators are:
|
||||
|
||||
* **Type.d(typeName?: `string`)** *(optional)*<br />
|
||||
annotates a class as a protobuf message type. If `typeName` is not specified, the constructor's runtime function name is used for the reflected type.
|
||||
|
||||
* **Field.d<T>(fieldId: `number`, fieldType: `string | Constructor<T>`, fieldRule?: `"optional" | "required" | "repeated"`, defaultValue?: `T`)**<br />
|
||||
annotates a property as a protobuf field with the specified id and protobuf type.
|
||||
|
||||
* **MapField.d<T extends { [key: string]: any }>(fieldId: `number`, fieldKeyType: `string`, fieldValueType. `string | Constructor<{}>`)**<br />
|
||||
annotates a property as a protobuf map field with the specified id, protobuf key and value type.
|
||||
|
||||
* **OneOf.d<T extends string>(...fieldNames: `string[]`)**<br />
|
||||
annotates a property as a protobuf oneof covering the specified fields.
|
||||
|
||||
Other notes:
|
||||
|
||||
* Decorated types reside in `protobuf.roots["decorated"]` using a flat structure, so no duplicate names.
|
||||
* Enums are copied to a reflected enum with a generic name on decorator evaluation because referenced enum objects have no runtime name the decorator could use.
|
||||
* Default values must be specified as arguments to the decorator instead of using a property initializer for proper prototype behavior.
|
||||
* Property names on decorated classes must not be renamed on compile time (i.e. by a minifier) because decorators just receive the original field name as a string.
|
||||
|
||||
**ProTip!** Not as pretty, but you can [use decorators in plain JavaScript](https://github.com/dcodeIO/protobuf.js/blob/master/examples/js-decorators.js) as well.
|
||||
|
||||
Command line
|
||||
------------
|
||||
|
||||
**Note** that moving the CLI to [its own package](./cli) is a work in progress. At the moment, it's still part of the main package.
|
||||
|
||||
The command line interface (CLI) can be used to translate between file formats and to generate static code as well as TypeScript definitions.
|
||||
|
||||
### pbjs for JavaScript
|
||||
|
||||
```
|
||||
Translates between file formats and generates static code.
|
||||
|
||||
-t, --target Specifies the target format. Also accepts a path to require a custom target.
|
||||
|
||||
json JSON representation
|
||||
json-module JSON representation as a module
|
||||
proto2 Protocol Buffers, Version 2
|
||||
proto3 Protocol Buffers, Version 3
|
||||
static Static code without reflection (non-functional on its own)
|
||||
static-module Static code without reflection as a module
|
||||
|
||||
-p, --path Adds a directory to the include path.
|
||||
|
||||
-o, --out Saves to a file instead of writing to stdout.
|
||||
|
||||
--sparse Exports only those types referenced from a main file (experimental).
|
||||
|
||||
Module targets only:
|
||||
|
||||
-w, --wrap Specifies the wrapper to use. Also accepts a path to require a custom wrapper.
|
||||
|
||||
default Default wrapper supporting both CommonJS and AMD
|
||||
commonjs CommonJS wrapper
|
||||
amd AMD wrapper
|
||||
es6 ES6 wrapper (implies --es6)
|
||||
closure A closure adding to protobuf.roots where protobuf is a global
|
||||
|
||||
-r, --root Specifies an alternative protobuf.roots name.
|
||||
|
||||
-l, --lint Linter configuration. Defaults to protobuf.js-compatible rules:
|
||||
|
||||
eslint-disable block-scoped-var, no-redeclare, no-control-regex, no-prototype-builtins
|
||||
|
||||
--es6 Enables ES6 syntax (const/let instead of var)
|
||||
|
||||
Proto sources only:
|
||||
|
||||
--keep-case Keeps field casing instead of converting to camel case.
|
||||
|
||||
Static targets only:
|
||||
|
||||
--no-create Does not generate create functions used for reflection compatibility.
|
||||
--no-encode Does not generate encode functions.
|
||||
--no-decode Does not generate decode functions.
|
||||
--no-verify Does not generate verify functions.
|
||||
--no-convert Does not generate convert functions like from/toObject
|
||||
--no-delimited Does not generate delimited encode/decode functions.
|
||||
--no-beautify Does not beautify generated code.
|
||||
--no-comments Does not output any JSDoc comments.
|
||||
|
||||
--force-long Enfores the use of 'Long' for s-/u-/int64 and s-/fixed64 fields.
|
||||
--force-message Enfores the use of message instances instead of plain objects.
|
||||
|
||||
usage: pbjs [options] file1.proto file2.json ... (or pipe) other | pbjs [options] -
|
||||
```
|
||||
|
||||
For production environments it is recommended to bundle all your .proto files to a single .json file, which minimizes the number of network requests and avoids any parser overhead (hint: works with just the **light** library):
|
||||
|
||||
```
|
||||
$> pbjs -t json file1.proto file2.proto > bundle.json
|
||||
```
|
||||
|
||||
Now, either include this file in your final bundle:
|
||||
|
||||
```js
|
||||
var root = protobuf.Root.fromJSON(require("./bundle.json"));
|
||||
```
|
||||
|
||||
or load it the usual way:
|
||||
|
||||
```js
|
||||
protobuf.load("bundle.json", function(err, root) {
|
||||
...
|
||||
});
|
||||
```
|
||||
|
||||
Generated static code, on the other hand, works with just the **minimal** library. For example
|
||||
|
||||
```
|
||||
$> pbjs -t static-module -w commonjs -o compiled.js file1.proto file2.proto
|
||||
```
|
||||
|
||||
will generate static code for definitions within `file1.proto` and `file2.proto` to a CommonJS module `compiled.js`.
|
||||
|
||||
**ProTip!** Documenting your .proto files with `/** ... */`-blocks or (trailing) `/// ...` lines translates to generated static code.
|
||||
|
||||
|
||||
### pbts for TypeScript
|
||||
|
||||
```
|
||||
Generates TypeScript definitions from annotated JavaScript files.
|
||||
|
||||
-o, --out Saves to a file instead of writing to stdout.
|
||||
|
||||
-g, --global Name of the global object in browser environments, if any.
|
||||
|
||||
--no-comments Does not output any JSDoc comments.
|
||||
|
||||
Internal flags:
|
||||
|
||||
-n, --name Wraps everything in a module of the specified name.
|
||||
|
||||
-m, --main Whether building the main library without any imports.
|
||||
|
||||
usage: pbts [options] file1.js file2.js ... (or) other | pbts [options] -
|
||||
```
|
||||
|
||||
Picking up on the example above, the following not only generates static code to a CommonJS module `compiled.js` but also its respective TypeScript definitions to `compiled.d.ts`:
|
||||
|
||||
```
|
||||
$> pbjs -t static-module -w commonjs -o compiled.js file1.proto file2.proto
|
||||
$> pbts -o compiled.d.ts compiled.js
|
||||
```
|
||||
|
||||
Additionally, TypeScript definitions of static modules are compatible with their reflection-based counterparts (i.e. as exported by JSON modules), as long as the following conditions are met:
|
||||
|
||||
1. Instead of using `new SomeMessage(...)`, always use `SomeMessage.create(...)` because reflection objects do not provide a constructor.
|
||||
2. Types, services and enums must start with an uppercase letter to become available as properties of the reflected types as well (i.e. to be able to use `MyMessage.MyEnum` instead of `root.lookup("MyMessage.MyEnum")`).
|
||||
|
||||
For example, the following generates a JSON module `bundle.js` and a `bundle.d.ts`, but no static code:
|
||||
|
||||
```
|
||||
$> pbjs -t json-module -w commonjs -o bundle.js file1.proto file2.proto
|
||||
$> pbjs -t static-module file1.proto file2.proto | pbts -o bundle.d.ts -
|
||||
```
|
||||
|
||||
### Reflection vs. static code
|
||||
|
||||
While using .proto files directly requires the full library respectively pure reflection/JSON the light library, pretty much all code but the relatively short descriptors is shared.
|
||||
|
||||
Static code, on the other hand, requires just the minimal library, but generates additional source code without any reflection features. This also implies that there is a break-even point where statically generated code becomes larger than descriptor-based code once the amount of code generated exceeds the size of the full respectively light library.
|
||||
|
||||
There is no significant difference performance-wise as the code generated statically is pretty much the same as generated at runtime and both are largely interchangeable as seen in the previous section.
|
||||
|
||||
| Source | Library | Advantages | Tradeoffs
|
||||
|--------|---------|------------|-----------
|
||||
| .proto | full | Easily editable<br />Interoperability with other libraries<br />No compile step | Some parsing and possibly network overhead
|
||||
| JSON | light | Easily editable<br />No parsing overhead<br />Single bundle (no network overhead) | protobuf.js specific<br />Has a compile step
|
||||
| static | minimal | Works where `eval` access is restricted<br />Fully documented<br />Small footprint for small protos | Can be hard to edit<br />No reflection<br />Has a compile step
|
||||
|
||||
### Command line API
|
||||
|
||||
Both utilities can be used programmatically by providing command line arguments and a callback to their respective `main` functions:
|
||||
|
||||
```js
|
||||
var pbjs = require("protobufjs/cli/pbjs"); // or require("protobufjs/cli").pbjs / .pbts
|
||||
|
||||
pbjs.main([ "--target", "json-module", "path/to/myproto.proto" ], function(err, output) {
|
||||
if (err)
|
||||
throw err;
|
||||
// do something with output
|
||||
});
|
||||
```
|
||||
|
||||
Additional documentation
|
||||
------------------------
|
||||
|
||||
#### Protocol Buffers
|
||||
* [Google's Developer Guide](https://developers.google.com/protocol-buffers/docs/overview)
|
||||
|
||||
#### protobuf.js
|
||||
* [API Documentation](http://dcode.io/protobuf.js)
|
||||
* [CHANGELOG](https://github.com/dcodeIO/protobuf.js/blob/master/CHANGELOG.md)
|
||||
* [Frequently asked questions](https://github.com/dcodeIO/protobuf.js/wiki) on our wiki
|
||||
|
||||
#### Community
|
||||
* [Questions and answers](http://stackoverflow.com/search?tab=newest&q=protobuf.js) on StackOverflow
|
||||
|
||||
Performance
|
||||
-----------
|
||||
The package includes a benchmark that compares protobuf.js performance to native JSON (as far as this is possible) and [Google's JS implementation](https://github.com/google/protobuf/tree/master/js). On an i7-2600K running node 6.9.1 it yields:
|
||||
|
||||
```
|
||||
benchmarking encoding performance ...
|
||||
|
||||
protobuf.js (reflect) x 541,707 ops/sec ±1.13% (87 runs sampled)
|
||||
protobuf.js (static) x 548,134 ops/sec ±1.38% (89 runs sampled)
|
||||
JSON (string) x 318,076 ops/sec ±0.63% (93 runs sampled)
|
||||
JSON (buffer) x 179,165 ops/sec ±2.26% (91 runs sampled)
|
||||
google-protobuf x 74,406 ops/sec ±0.85% (86 runs sampled)
|
||||
|
||||
protobuf.js (static) was fastest
|
||||
protobuf.js (reflect) was 0.9% ops/sec slower (factor 1.0)
|
||||
JSON (string) was 41.5% ops/sec slower (factor 1.7)
|
||||
JSON (buffer) was 67.6% ops/sec slower (factor 3.1)
|
||||
google-protobuf was 86.4% ops/sec slower (factor 7.3)
|
||||
|
||||
benchmarking decoding performance ...
|
||||
|
||||
protobuf.js (reflect) x 1,383,981 ops/sec ±0.88% (93 runs sampled)
|
||||
protobuf.js (static) x 1,378,925 ops/sec ±0.81% (93 runs sampled)
|
||||
JSON (string) x 302,444 ops/sec ±0.81% (93 runs sampled)
|
||||
JSON (buffer) x 264,882 ops/sec ±0.81% (93 runs sampled)
|
||||
google-protobuf x 179,180 ops/sec ±0.64% (94 runs sampled)
|
||||
|
||||
protobuf.js (reflect) was fastest
|
||||
protobuf.js (static) was 0.3% ops/sec slower (factor 1.0)
|
||||
JSON (string) was 78.1% ops/sec slower (factor 4.6)
|
||||
JSON (buffer) was 80.8% ops/sec slower (factor 5.2)
|
||||
google-protobuf was 87.0% ops/sec slower (factor 7.7)
|
||||
|
||||
benchmarking combined performance ...
|
||||
|
||||
protobuf.js (reflect) x 275,900 ops/sec ±0.78% (90 runs sampled)
|
||||
protobuf.js (static) x 290,096 ops/sec ±0.96% (90 runs sampled)
|
||||
JSON (string) x 129,381 ops/sec ±0.77% (90 runs sampled)
|
||||
JSON (buffer) x 91,051 ops/sec ±0.94% (90 runs sampled)
|
||||
google-protobuf x 42,050 ops/sec ±0.85% (91 runs sampled)
|
||||
|
||||
protobuf.js (static) was fastest
|
||||
protobuf.js (reflect) was 4.7% ops/sec slower (factor 1.0)
|
||||
JSON (string) was 55.3% ops/sec slower (factor 2.2)
|
||||
JSON (buffer) was 68.6% ops/sec slower (factor 3.2)
|
||||
google-protobuf was 85.5% ops/sec slower (factor 6.9)
|
||||
```
|
||||
|
||||
These results are achieved by
|
||||
|
||||
* generating type-specific encoders, decoders, verifiers and converters at runtime
|
||||
* configuring the reader/writer interface according to the environment
|
||||
* using node-specific functionality where beneficial and, of course
|
||||
* avoiding unnecessary operations through splitting up [the toolset](#toolset).
|
||||
|
||||
You can also run [the benchmark](https://github.com/dcodeIO/protobuf.js/blob/master/bench/index.js) ...
|
||||
|
||||
```
|
||||
$> npm run bench
|
||||
```
|
||||
|
||||
and [the profiler](https://github.com/dcodeIO/protobuf.js/blob/master/bench/prof.js) yourself (the latter requires a recent version of node):
|
||||
|
||||
```
|
||||
$> npm run prof <encode|decode|encode-browser|decode-browser> [iterations=10000000]
|
||||
```
|
||||
|
||||
Note that as of this writing, the benchmark suite performs significantly slower on node 7.2.0 compared to 6.9.1 because moths.
|
||||
|
||||
Compatibility
|
||||
-------------
|
||||
|
||||
* Works in all modern and not-so-modern browsers except IE8.
|
||||
* Because the internals of this package do not rely on `google/protobuf/descriptor.proto`, options are parsed and presented literally.
|
||||
* If typed arrays are not supported by the environment, plain arrays will be used instead.
|
||||
* Support for pre-ES5 environments (except IE8) can be achieved by [using a polyfill](https://github.com/dcodeIO/protobuf.js/blob/master/scripts/polyfill.js).
|
||||
* Support for [Content Security Policy](https://w3c.github.io/webappsec-csp/)-restricted environments (like Chrome extensions without [unsafe-eval](https://developer.chrome.com/extensions/contentSecurityPolicy#relaxing-eval)) can be achieved by generating and using static code instead.
|
||||
* If a proper way to work with 64 bit values (uint64, int64 etc.) is required, just install [long.js](https://github.com/dcodeIO/long.js) alongside this library. All 64 bit numbers will then be returned as a `Long` instance instead of a possibly unsafe JavaScript number ([see](https://github.com/dcodeIO/long.js)).
|
||||
* For descriptor.proto interoperability, see [ext/descriptor](https://github.com/dcodeIO/protobuf.js/tree/master/ext/descriptor)
|
||||
|
||||
Building
|
||||
--------
|
||||
|
||||
To build the library or its components yourself, clone it from GitHub and install the development dependencies:
|
||||
|
||||
```
|
||||
$> git clone https://github.com/dcodeIO/protobuf.js.git
|
||||
$> cd protobuf.js
|
||||
$> npm install
|
||||
```
|
||||
|
||||
Building the respective development and production versions with their respective source maps to `dist/`:
|
||||
|
||||
```
|
||||
$> npm run build
|
||||
```
|
||||
|
||||
Building the documentation to `docs/`:
|
||||
|
||||
```
|
||||
$> npm run docs
|
||||
```
|
||||
|
||||
Building the TypeScript definition to `index.d.ts`:
|
||||
|
||||
```
|
||||
$> npm run types
|
||||
```
|
||||
|
||||
### Browserify integration
|
||||
|
||||
By default, protobuf.js integrates into any browserify build-process without requiring any optional modules. Hence:
|
||||
|
||||
* If int64 support is required, explicitly require the `long` module somewhere in your project as it will be excluded otherwise. This assumes that a global `require` function is present that protobuf.js can call to obtain the long module.
|
||||
|
||||
If there is no global `require` function present after bundling, it's also possible to assign the long module programmatically:
|
||||
|
||||
```js
|
||||
var Long = ...;
|
||||
|
||||
protobuf.util.Long = Long;
|
||||
protobuf.configure();
|
||||
```
|
||||
|
||||
* If you have any special requirements, there is [the bundler](https://github.com/dcodeIO/protobuf.js/blob/master/scripts/bundle.js) for reference.
|
||||
|
||||
**License:** [BSD 3-Clause License](https://opensource.org/licenses/BSD-3-Clause)
|
||||
6
express-server/node_modules/@google-cloud/firestore/node_modules/protobufjs/bin/pbjs
generated
vendored
Normal file
6
express-server/node_modules/@google-cloud/firestore/node_modules/protobufjs/bin/pbjs
generated
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
#!/usr/bin/env node
|
||||
var path = require("path"),
|
||||
cli = require(path.join(__dirname, "..", "cli", "pbjs.js"));
|
||||
var ret = cli.main(process.argv.slice(2));
|
||||
if (typeof ret === 'number')
|
||||
process.exit(ret);
|
||||
6
express-server/node_modules/@google-cloud/firestore/node_modules/protobufjs/bin/pbts
generated
vendored
Normal file
6
express-server/node_modules/@google-cloud/firestore/node_modules/protobufjs/bin/pbts
generated
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
#!/usr/bin/env node
|
||||
var path = require("path"),
|
||||
cli = require(path.join(__dirname, "..", "cli", "pbts.js"));
|
||||
var ret = cli.main(process.argv.slice(2));
|
||||
if (typeof ret === 'number')
|
||||
process.exit(ret);
|
||||
33
express-server/node_modules/@google-cloud/firestore/node_modules/protobufjs/cli/LICENSE
generated
vendored
Normal file
33
express-server/node_modules/@google-cloud/firestore/node_modules/protobufjs/cli/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,33 @@
|
||||
Copyright (c) 2016, Daniel Wirtz All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are
|
||||
met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
* Redistributions in binary form must reproduce the above copyright
|
||||
notice, this list of conditions and the following disclaimer in the
|
||||
documentation and/or other materials provided with the distribution.
|
||||
* Neither the name of its author, nor the names of its contributors
|
||||
may be used to endorse or promote products derived from this software
|
||||
without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
---
|
||||
|
||||
Code generated by the command line utilities is owned by the owner
|
||||
of the input file used when generating it. This code is not
|
||||
standalone and requires a support library to be linked with it. This
|
||||
support library is itself covered by the above license.
|
||||
11
express-server/node_modules/@google-cloud/firestore/node_modules/protobufjs/cli/README.md
generated
vendored
Normal file
11
express-server/node_modules/@google-cloud/firestore/node_modules/protobufjs/cli/README.md
generated
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
protobufjs-cli
|
||||
==============
|
||||
[](https://www.npmjs.com/package/protobufjs-cli)
|
||||
|
||||
Command line interface (CLI) for [protobuf.js](https://github.com/dcodeIO/protobuf.js). Translates between file formats and generates static code as well as TypeScript definitions.
|
||||
|
||||
* [CLI Documentation](https://github.com/dcodeIO/protobuf.js#command-line)
|
||||
|
||||
**Note** that moving the CLI to its own package is a work in progress. At the moment, it's still part of the main package.
|
||||
|
||||
**License:** [BSD 3-Clause License](https://opensource.org/licenses/BSD-3-Clause)
|
||||
6
express-server/node_modules/@google-cloud/firestore/node_modules/protobufjs/cli/bin/pbjs
generated
vendored
Normal file
6
express-server/node_modules/@google-cloud/firestore/node_modules/protobufjs/cli/bin/pbjs
generated
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
#!/usr/bin/env node
|
||||
var path = require("path"),
|
||||
cli = require(path.join(__dirname, "..", "pbjs.js"));
|
||||
var ret = cli.main(process.argv.slice(2));
|
||||
if (typeof ret === 'number')
|
||||
process.exit(ret);
|
||||
6
express-server/node_modules/@google-cloud/firestore/node_modules/protobufjs/cli/bin/pbts
generated
vendored
Normal file
6
express-server/node_modules/@google-cloud/firestore/node_modules/protobufjs/cli/bin/pbts
generated
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
#!/usr/bin/env node
|
||||
var path = require("path"),
|
||||
cli = require(path.join(__dirname, "..", "pbts.js"));
|
||||
var ret = cli.main(process.argv.slice(2));
|
||||
if (typeof ret === 'number')
|
||||
process.exit(ret);
|
||||
3
express-server/node_modules/@google-cloud/firestore/node_modules/protobufjs/cli/index.d.ts
generated
vendored
Normal file
3
express-server/node_modules/@google-cloud/firestore/node_modules/protobufjs/cli/index.d.ts
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
import * as pbjs from "./pbjs.js";
|
||||
import * as pbts from "./pbts.js";
|
||||
export { pbjs, pbts };
|
||||
3
express-server/node_modules/@google-cloud/firestore/node_modules/protobufjs/cli/index.js
generated
vendored
Normal file
3
express-server/node_modules/@google-cloud/firestore/node_modules/protobufjs/cli/index.js
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
"use strict";
|
||||
exports.pbjs = require("./pbjs");
|
||||
exports.pbts = require("./pbts");
|
||||
18
express-server/node_modules/@google-cloud/firestore/node_modules/protobufjs/cli/lib/tsd-jsdoc.json
generated
vendored
Normal file
18
express-server/node_modules/@google-cloud/firestore/node_modules/protobufjs/cli/lib/tsd-jsdoc.json
generated
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
{
|
||||
"tags": {
|
||||
"allowUnknownTags": false
|
||||
},
|
||||
"plugins": [
|
||||
"./tsd-jsdoc/plugin"
|
||||
],
|
||||
"opts": {
|
||||
"encoding" : "utf8",
|
||||
"recurse" : true,
|
||||
"lenient" : true,
|
||||
"template" : "./tsd-jsdoc",
|
||||
|
||||
"private" : false,
|
||||
"comments" : true,
|
||||
"destination" : false
|
||||
}
|
||||
}
|
||||
21
express-server/node_modules/@google-cloud/firestore/node_modules/protobufjs/cli/lib/tsd-jsdoc/LICENSE
generated
vendored
Normal file
21
express-server/node_modules/@google-cloud/firestore/node_modules/protobufjs/cli/lib/tsd-jsdoc/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
The MIT License
|
||||
|
||||
Copyright (c) 2016 Chad Engler
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
23
express-server/node_modules/@google-cloud/firestore/node_modules/protobufjs/cli/lib/tsd-jsdoc/README.md
generated
vendored
Normal file
23
express-server/node_modules/@google-cloud/firestore/node_modules/protobufjs/cli/lib/tsd-jsdoc/README.md
generated
vendored
Normal file
@@ -0,0 +1,23 @@
|
||||
protobuf.js fork of tsd-jsdoc
|
||||
=============================
|
||||
|
||||
This is a modified version of [tsd-jsdoc](https://github.com/englercj/tsd-jsdoc) v1.0.1 for use with protobuf.js, parked here so we can process issues and pull requests. The ultimate goal is to switch back to the a recent version of tsd-jsdoc once it meets our needs.
|
||||
|
||||
Options
|
||||
-------
|
||||
|
||||
* **module: `string`**<br />
|
||||
Wraps everything in a module of the specified name.
|
||||
|
||||
* **private: `boolean`**<br />
|
||||
Includes private members when set to `true`.
|
||||
|
||||
* **comments: `boolean`**<br />
|
||||
Skips comments when explicitly set to `false`.
|
||||
|
||||
* **destination: `string|boolean`**<br />
|
||||
Saves to the specified destination file or to console when set to `false`.
|
||||
|
||||
Setting options on the command line
|
||||
-----------------------------------
|
||||
Providing `-q, --query <queryString>` on the command line will set respectively override existing options. Example: `-q module=protobufjs`
|
||||
21
express-server/node_modules/@google-cloud/firestore/node_modules/protobufjs/cli/lib/tsd-jsdoc/plugin.js
generated
vendored
Normal file
21
express-server/node_modules/@google-cloud/firestore/node_modules/protobufjs/cli/lib/tsd-jsdoc/plugin.js
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
"use strict";
|
||||
exports.defineTags = function(dictionary) {
|
||||
|
||||
dictionary.defineTag("template", {
|
||||
mustHaveValue: true,
|
||||
canHaveType: false,
|
||||
canHaveName: false,
|
||||
onTagged: function(doclet, tag) {
|
||||
(doclet.templates || (doclet.templates = [])).push(tag.text);
|
||||
}
|
||||
});
|
||||
|
||||
dictionary.defineTag("tstype", {
|
||||
mustHaveValue: true,
|
||||
canHaveType: false,
|
||||
canHaveName: false,
|
||||
onTagged: function(doclet, tag) {
|
||||
doclet.tsType = tag.text;
|
||||
}
|
||||
});
|
||||
};
|
||||
693
express-server/node_modules/@google-cloud/firestore/node_modules/protobufjs/cli/lib/tsd-jsdoc/publish.js
generated
vendored
Normal file
693
express-server/node_modules/@google-cloud/firestore/node_modules/protobufjs/cli/lib/tsd-jsdoc/publish.js
generated
vendored
Normal file
@@ -0,0 +1,693 @@
|
||||
"use strict";
|
||||
|
||||
var fs = require("fs");
|
||||
|
||||
// output stream
|
||||
var out = null;
|
||||
|
||||
// documentation data
|
||||
var data = null;
|
||||
|
||||
// already handled objects, by name
|
||||
var seen = {};
|
||||
|
||||
// indentation level
|
||||
var indent = 0;
|
||||
|
||||
// whether indent has been written for the current line yet
|
||||
var indentWritten = false;
|
||||
|
||||
// provided options
|
||||
var options = {};
|
||||
|
||||
// queued interfaces
|
||||
var queuedInterfaces = [];
|
||||
|
||||
// whether writing the first line
|
||||
var firstLine = true;
|
||||
|
||||
// JSDoc hook
|
||||
exports.publish = function publish(taffy, opts) {
|
||||
options = opts || {};
|
||||
|
||||
// query overrides options
|
||||
if (options.query)
|
||||
Object.keys(options.query).forEach(function(key) {
|
||||
if (key !== "query")
|
||||
switch (options[key] = options.query[key]) {
|
||||
case "true":
|
||||
options[key] = true;
|
||||
break;
|
||||
case "false":
|
||||
options[key] = false;
|
||||
break;
|
||||
case "null":
|
||||
options[key] = null;
|
||||
break;
|
||||
}
|
||||
});
|
||||
|
||||
// remove undocumented
|
||||
taffy({ undocumented: true }).remove();
|
||||
taffy({ ignore: true }).remove();
|
||||
taffy({ inherited: true }).remove();
|
||||
|
||||
// remove private
|
||||
if (!options.private)
|
||||
taffy({ access: "private" }).remove();
|
||||
|
||||
// setup output
|
||||
out = options.destination
|
||||
? fs.createWriteStream(options.destination)
|
||||
: process.stdout;
|
||||
|
||||
try {
|
||||
// setup environment
|
||||
data = taffy().get();
|
||||
indent = 0;
|
||||
indentWritten = false;
|
||||
firstLine = true;
|
||||
|
||||
// wrap everything in a module if configured
|
||||
if (options.module) {
|
||||
writeln("export = ", options.module, ";");
|
||||
writeln();
|
||||
writeln("declare namespace ", options.module, " {");
|
||||
writeln();
|
||||
++indent;
|
||||
}
|
||||
|
||||
// handle all
|
||||
getChildrenOf(undefined).forEach(function(child) {
|
||||
handleElement(child, null);
|
||||
});
|
||||
|
||||
// process queued
|
||||
while (queuedInterfaces.length) {
|
||||
var element = queuedInterfaces.shift();
|
||||
begin(element);
|
||||
writeInterface(element);
|
||||
writeln(";");
|
||||
}
|
||||
|
||||
// end wrap
|
||||
if (options.module) {
|
||||
--indent;
|
||||
writeln("}");
|
||||
}
|
||||
|
||||
// close file output
|
||||
if (out !== process.stdout)
|
||||
out.end();
|
||||
|
||||
} finally {
|
||||
// gc environment objects
|
||||
out = data = null;
|
||||
seen = options = {};
|
||||
queuedInterfaces = [];
|
||||
}
|
||||
};
|
||||
|
||||
//
|
||||
// Utility
|
||||
//
|
||||
|
||||
// writes one or multiple strings
|
||||
function write() {
|
||||
var s = Array.prototype.slice.call(arguments).join("");
|
||||
if (!indentWritten) {
|
||||
for (var i = 0; i < indent; ++i)
|
||||
s = " " + s;
|
||||
indentWritten = true;
|
||||
}
|
||||
out.write(s);
|
||||
firstLine = false;
|
||||
}
|
||||
|
||||
// writes zero or multiple strings, followed by a new line
|
||||
function writeln() {
|
||||
var s = Array.prototype.slice.call(arguments).join("");
|
||||
if (s.length)
|
||||
write(s, "\n");
|
||||
else if (!firstLine)
|
||||
out.write("\n");
|
||||
indentWritten = false;
|
||||
}
|
||||
|
||||
var keepTags = [
|
||||
"param",
|
||||
"returns",
|
||||
"throws",
|
||||
"see"
|
||||
];
|
||||
|
||||
// parses a comment into text and tags
|
||||
function parseComment(comment) {
|
||||
var lines = comment.replace(/^ *\/\*\* *|^ *\*\/| *\*\/ *$|^ *\* */mg, "").trim().split(/\r?\n|\r/g); // property.description has just "\r" ?!
|
||||
var desc;
|
||||
var text = [];
|
||||
var tags = null;
|
||||
for (var i = 0; i < lines.length; ++i) {
|
||||
var match = /^@(\w+)\b/.exec(lines[i]);
|
||||
if (match) {
|
||||
if (!tags) {
|
||||
tags = [];
|
||||
desc = text;
|
||||
}
|
||||
text = [];
|
||||
tags.push({ name: match[1], text: text });
|
||||
lines[i] = lines[i].substring(match[1].length + 1).trim();
|
||||
}
|
||||
if (lines[i].length || text.length)
|
||||
text.push(lines[i]);
|
||||
}
|
||||
return {
|
||||
text: desc || text,
|
||||
tags: tags || []
|
||||
};
|
||||
}
|
||||
|
||||
// writes a comment
|
||||
function writeComment(comment, otherwiseNewline) {
|
||||
if (!comment || options.comments === false) {
|
||||
if (otherwiseNewline)
|
||||
writeln();
|
||||
return;
|
||||
}
|
||||
if (typeof comment !== "object")
|
||||
comment = parseComment(comment);
|
||||
comment.tags = comment.tags.filter(function(tag) {
|
||||
return keepTags.indexOf(tag.name) > -1 && (tag.name !== "returns" || tag.text[0] !== "{undefined}");
|
||||
});
|
||||
writeln();
|
||||
if (!comment.tags.length && comment.text.length < 2) {
|
||||
writeln("/** " + comment.text[0] + " */");
|
||||
return;
|
||||
}
|
||||
writeln("/**");
|
||||
comment.text.forEach(function(line) {
|
||||
if (line.length)
|
||||
writeln(" * ", line);
|
||||
else
|
||||
writeln(" *");
|
||||
});
|
||||
comment.tags.forEach(function(tag) {
|
||||
var started = false;
|
||||
if (tag.text.length) {
|
||||
tag.text.forEach(function(line, i) {
|
||||
if (i > 0)
|
||||
write(" * ");
|
||||
else if (tag.name !== "throws")
|
||||
line = line.replace(/^\{[^\s]*} ?/, "");
|
||||
if (!line.length)
|
||||
return;
|
||||
if (!started) {
|
||||
write(" * @", tag.name, " ");
|
||||
started = true;
|
||||
}
|
||||
writeln(line);
|
||||
});
|
||||
}
|
||||
});
|
||||
writeln(" */");
|
||||
}
|
||||
|
||||
// recursively replaces all occurencies of re's match
|
||||
function replaceRecursive(name, re, fn) {
|
||||
var found;
|
||||
|
||||
function replacer() {
|
||||
found = true;
|
||||
return fn.apply(null, arguments);
|
||||
}
|
||||
|
||||
do {
|
||||
found = false;
|
||||
name = name.replace(re, replacer);
|
||||
} while (found);
|
||||
return name;
|
||||
}
|
||||
|
||||
// tests if an element is considered to be a class or class-like
|
||||
function isClassLike(element) {
|
||||
return isClass(element) || isInterface(element);
|
||||
}
|
||||
|
||||
// tests if an element is considered to be a class
|
||||
function isClass(element) {
|
||||
return element && element.kind === "class";
|
||||
}
|
||||
|
||||
// tests if an element is considered to be an interface
|
||||
function isInterface(element) {
|
||||
return element && (element.kind === "interface" || element.kind === "mixin");
|
||||
}
|
||||
|
||||
// tests if an element is considered to be a namespace
|
||||
function isNamespace(element) {
|
||||
return element && (element.kind === "namespace" || element.kind === "module");
|
||||
}
|
||||
|
||||
// gets all children of the specified parent
|
||||
function getChildrenOf(parent) {
|
||||
var memberof = parent ? parent.longname : undefined;
|
||||
return data.filter(function(element) {
|
||||
return element.memberof === memberof;
|
||||
});
|
||||
}
|
||||
|
||||
// gets the literal type of an element
|
||||
function getTypeOf(element) {
|
||||
if (element.tsType)
|
||||
return element.tsType.replace(/\r?\n|\r/g, "\n");
|
||||
var name = "any";
|
||||
var type = element.type;
|
||||
if (type && type.names && type.names.length) {
|
||||
if (type.names.length === 1)
|
||||
name = element.type.names[0].trim();
|
||||
else
|
||||
name = "(" + element.type.names.join("|") + ")";
|
||||
} else
|
||||
return name;
|
||||
|
||||
// Replace catchalls with any
|
||||
name = name.replace(/\*|\bmixed\b/g, "any");
|
||||
|
||||
// Ensure upper case Object for map expressions below
|
||||
name = name.replace(/\bobject\b/g, "Object");
|
||||
|
||||
// Correct Something.<Something> to Something<Something>
|
||||
name = replaceRecursive(name, /\b(?!Object|Array)([\w$]+)\.<([^>]*)>/gi, function($0, $1, $2) {
|
||||
return $1 + "<" + $2 + ">";
|
||||
});
|
||||
|
||||
// Replace Array.<string> with string[]
|
||||
name = replaceRecursive(name, /\bArray\.?<([^>]*)>/gi, function($0, $1) {
|
||||
return $1 + "[]";
|
||||
});
|
||||
|
||||
// Replace Object.<string,number> with { [k: string]: number }
|
||||
name = replaceRecursive(name, /\bObject\.?<([^,]*), *([^>]*)>/gi, function($0, $1, $2) {
|
||||
return "{ [k: " + $1 + "]: " + $2 + " }";
|
||||
});
|
||||
|
||||
// Replace functions (there are no signatures) with Function
|
||||
name = name.replace(/\bfunction(?:\(\))?\b/g, "Function");
|
||||
|
||||
// Convert plain Object back to just object
|
||||
name = name.replace(/\b(Object\b(?!\.))/g, function($0, $1) {
|
||||
return $1.toLowerCase();
|
||||
});
|
||||
|
||||
return name;
|
||||
}
|
||||
|
||||
// begins writing the definition of the specified element
|
||||
function begin(element, is_interface) {
|
||||
if (!seen[element.longname]) {
|
||||
if (isClass(element)) {
|
||||
var comment = parseComment(element.comment);
|
||||
var classdesc = comment.tags.find(function(tag) { return tag.name === "classdesc"; });
|
||||
if (classdesc) {
|
||||
comment.text = classdesc.text;
|
||||
comment.tags = [];
|
||||
}
|
||||
writeComment(comment, true);
|
||||
} else
|
||||
writeComment(element.comment, is_interface || isClassLike(element) || isNamespace(element) || element.isEnum || element.scope === "global");
|
||||
seen[element.longname] = element;
|
||||
} else
|
||||
writeln();
|
||||
if (element.scope !== "global" || options.module)
|
||||
return;
|
||||
write("export ");
|
||||
}
|
||||
|
||||
// writes the function signature describing element
|
||||
function writeFunctionSignature(element, isConstructor, isTypeDef) {
|
||||
write("(");
|
||||
|
||||
var params = {};
|
||||
|
||||
// this type
|
||||
if (element.this)
|
||||
params["this"] = {
|
||||
type: element.this.replace(/^{|}$/g, ""),
|
||||
optional: false
|
||||
};
|
||||
|
||||
// parameter types
|
||||
if (element.params)
|
||||
element.params.forEach(function(param) {
|
||||
var path = param.name.split(/\./g);
|
||||
if (path.length === 1)
|
||||
params[param.name] = {
|
||||
type: getTypeOf(param),
|
||||
variable: param.variable === true,
|
||||
optional: param.optional === true,
|
||||
defaultValue: param.defaultvalue // Not used yet (TODO)
|
||||
};
|
||||
else // Property syntax (TODO)
|
||||
params[path[0]].type = "{ [k: string]: any }";
|
||||
});
|
||||
|
||||
var paramNames = Object.keys(params);
|
||||
paramNames.forEach(function(name, i) {
|
||||
var param = params[name];
|
||||
var type = param.type;
|
||||
if (param.variable) {
|
||||
name = "..." + name;
|
||||
type = param.type.charAt(0) === "(" ? "any[]" : param.type + "[]";
|
||||
}
|
||||
write(name, !param.variable && param.optional ? "?: " : ": ", type);
|
||||
if (i < paramNames.length - 1)
|
||||
write(", ");
|
||||
});
|
||||
|
||||
write(")");
|
||||
|
||||
// return type
|
||||
if (!isConstructor) {
|
||||
write(isTypeDef ? " => " : ": ");
|
||||
var typeName;
|
||||
if (element.returns && element.returns.length && (typeName = getTypeOf(element.returns[0])) !== "undefined")
|
||||
write(typeName);
|
||||
else
|
||||
write("void");
|
||||
}
|
||||
}
|
||||
|
||||
// writes (a typedef as) an interface
|
||||
function writeInterface(element) {
|
||||
write("interface ", element.name);
|
||||
writeInterfaceBody(element);
|
||||
writeln();
|
||||
}
|
||||
|
||||
function writeInterfaceBody(element) {
|
||||
writeln("{");
|
||||
++indent;
|
||||
if (element.tsType)
|
||||
writeln(element.tsType.replace(/\r?\n|\r/g, "\n"));
|
||||
else if (element.properties && element.properties.length)
|
||||
element.properties.forEach(writeProperty);
|
||||
--indent;
|
||||
write("}");
|
||||
}
|
||||
|
||||
function writeProperty(property, declare) {
|
||||
writeComment(property.description);
|
||||
if (declare)
|
||||
write("let ");
|
||||
write(property.name);
|
||||
if (property.optional)
|
||||
write("?");
|
||||
writeln(": ", getTypeOf(property), ";");
|
||||
}
|
||||
|
||||
//
|
||||
// Handlers
|
||||
//
|
||||
|
||||
// handles a single element of any understood type
|
||||
function handleElement(element, parent) {
|
||||
if (element.scope === "inner")
|
||||
return false;
|
||||
|
||||
if (element.optional !== true && element.type && element.type.names && element.type.names.length) {
|
||||
for (var i = 0; i < element.type.names.length; i++) {
|
||||
if (element.type.names[i].toLowerCase() === "undefined") {
|
||||
// This element is actually optional. Set optional to true and
|
||||
// remove the 'undefined' type
|
||||
element.optional = true;
|
||||
element.type.names.splice(i, 1);
|
||||
i--;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (seen[element.longname])
|
||||
return true;
|
||||
if (isClassLike(element))
|
||||
handleClass(element, parent);
|
||||
else switch (element.kind) {
|
||||
case "module":
|
||||
case "namespace":
|
||||
handleNamespace(element, parent);
|
||||
break;
|
||||
case "constant":
|
||||
case "member":
|
||||
handleMember(element, parent);
|
||||
break;
|
||||
case "function":
|
||||
handleFunction(element, parent);
|
||||
break;
|
||||
case "typedef":
|
||||
handleTypeDef(element, parent);
|
||||
break;
|
||||
case "package":
|
||||
break;
|
||||
}
|
||||
seen[element.longname] = element;
|
||||
return true;
|
||||
}
|
||||
|
||||
// handles (just) a namespace
|
||||
function handleNamespace(element/*, parent*/) {
|
||||
var children = getChildrenOf(element);
|
||||
if (!children.length)
|
||||
return;
|
||||
var first = true;
|
||||
if (element.properties)
|
||||
element.properties.forEach(function(property) {
|
||||
if (!/^[$\w]+$/.test(property.name)) // incompatible in namespace
|
||||
return;
|
||||
if (first) {
|
||||
begin(element);
|
||||
writeln("namespace ", element.name, " {");
|
||||
++indent;
|
||||
first = false;
|
||||
}
|
||||
writeProperty(property, true);
|
||||
});
|
||||
children.forEach(function(child) {
|
||||
if (child.scope === "inner" || seen[child.longname])
|
||||
return;
|
||||
if (first) {
|
||||
begin(element);
|
||||
writeln("namespace ", element.name, " {");
|
||||
++indent;
|
||||
first = false;
|
||||
}
|
||||
handleElement(child, element);
|
||||
});
|
||||
if (!first) {
|
||||
--indent;
|
||||
writeln("}");
|
||||
}
|
||||
}
|
||||
|
||||
// a filter function to remove any module references
|
||||
function notAModuleReference(ref) {
|
||||
return ref.indexOf("module:") === -1;
|
||||
}
|
||||
|
||||
// handles a class or class-like
|
||||
function handleClass(element, parent) {
|
||||
var is_interface = isInterface(element);
|
||||
begin(element, is_interface);
|
||||
if (is_interface)
|
||||
write("interface ");
|
||||
else {
|
||||
if (element.virtual)
|
||||
write("abstract ");
|
||||
write("class ");
|
||||
}
|
||||
write(element.name);
|
||||
if (element.templates && element.templates.length)
|
||||
write("<", element.templates.join(", "), ">");
|
||||
write(" ");
|
||||
|
||||
// extended classes
|
||||
if (element.augments) {
|
||||
var augments = element.augments.filter(notAModuleReference);
|
||||
if (augments.length)
|
||||
write("extends ", augments[0], " ");
|
||||
}
|
||||
|
||||
// implemented interfaces
|
||||
var impls = [];
|
||||
if (element.implements)
|
||||
Array.prototype.push.apply(impls, element.implements);
|
||||
if (element.mixes)
|
||||
Array.prototype.push.apply(impls, element.mixes);
|
||||
impls = impls.filter(notAModuleReference);
|
||||
if (impls.length)
|
||||
write("implements ", impls.join(", "), " ");
|
||||
|
||||
writeln("{");
|
||||
++indent;
|
||||
|
||||
if (element.tsType)
|
||||
writeln(element.tsType.replace(/\r?\n|\r/g, "\n"));
|
||||
|
||||
// constructor
|
||||
if (!is_interface && !element.virtual)
|
||||
handleFunction(element, parent, true);
|
||||
|
||||
// properties
|
||||
if (is_interface && element.properties)
|
||||
element.properties.forEach(function(property) {
|
||||
writeProperty(property);
|
||||
});
|
||||
|
||||
// class-compatible members
|
||||
var incompatible = [];
|
||||
getChildrenOf(element).forEach(function(child) {
|
||||
if (isClassLike(child) || child.kind === "module" || child.kind === "typedef" || child.isEnum) {
|
||||
incompatible.push(child);
|
||||
return;
|
||||
}
|
||||
handleElement(child, element);
|
||||
});
|
||||
|
||||
--indent;
|
||||
writeln("}");
|
||||
|
||||
// class-incompatible members
|
||||
if (incompatible.length) {
|
||||
writeln();
|
||||
if (element.scope === "global" && !options.module)
|
||||
write("export ");
|
||||
writeln("namespace ", element.name, " {");
|
||||
++indent;
|
||||
incompatible.forEach(function(child) {
|
||||
handleElement(child, element);
|
||||
});
|
||||
--indent;
|
||||
writeln("}");
|
||||
}
|
||||
}
|
||||
|
||||
// handles a namespace or class member
|
||||
function handleMember(element, parent) {
|
||||
begin(element);
|
||||
|
||||
if (element.isEnum) {
|
||||
var stringEnum = false;
|
||||
element.properties.forEach(function(property) {
|
||||
if (isNaN(property.defaultvalue)) {
|
||||
stringEnum = true;
|
||||
}
|
||||
});
|
||||
if (stringEnum) {
|
||||
writeln("type ", element.name, " =");
|
||||
++indent;
|
||||
element.properties.forEach(function(property, i) {
|
||||
write(i === 0 ? "" : "| ", JSON.stringify(property.defaultvalue));
|
||||
});
|
||||
--indent;
|
||||
writeln(";");
|
||||
} else {
|
||||
writeln("enum ", element.name, " {");
|
||||
++indent;
|
||||
element.properties.forEach(function(property, i) {
|
||||
write(property.name);
|
||||
if (property.defaultvalue !== undefined)
|
||||
write(" = ", JSON.stringify(property.defaultvalue));
|
||||
if (i < element.properties.length - 1)
|
||||
writeln(",");
|
||||
else
|
||||
writeln();
|
||||
});
|
||||
--indent;
|
||||
writeln("}");
|
||||
}
|
||||
|
||||
} else {
|
||||
|
||||
var inClass = isClassLike(parent);
|
||||
if (inClass) {
|
||||
write(element.access || "public", " ");
|
||||
if (element.scope === "static")
|
||||
write("static ");
|
||||
if (element.readonly)
|
||||
write("readonly ");
|
||||
} else
|
||||
write(element.kind === "constant" ? "const " : "let ");
|
||||
|
||||
write(element.name);
|
||||
if (element.optional)
|
||||
write("?");
|
||||
write(": ");
|
||||
|
||||
if (element.type && element.type.names && /^Object\b/i.test(element.type.names[0]) && element.properties) {
|
||||
writeln("{");
|
||||
++indent;
|
||||
element.properties.forEach(function(property, i) {
|
||||
writeln(JSON.stringify(property.name), ": ", getTypeOf(property), i < element.properties.length - 1 ? "," : "");
|
||||
});
|
||||
--indent;
|
||||
writeln("};");
|
||||
} else
|
||||
writeln(getTypeOf(element), ";");
|
||||
}
|
||||
}
|
||||
|
||||
// handles a function or method
|
||||
function handleFunction(element, parent, isConstructor) {
|
||||
var insideClass = true;
|
||||
if (isConstructor) {
|
||||
writeComment(element.comment);
|
||||
write("constructor");
|
||||
} else {
|
||||
begin(element);
|
||||
insideClass = isClassLike(parent);
|
||||
if (insideClass) {
|
||||
write(element.access || "public", " ");
|
||||
if (element.scope === "static")
|
||||
write("static ");
|
||||
} else
|
||||
write("function ");
|
||||
write(element.name);
|
||||
if (element.templates && element.templates.length)
|
||||
write("<", element.templates.join(", "), ">");
|
||||
}
|
||||
writeFunctionSignature(element, isConstructor, false);
|
||||
writeln(";");
|
||||
if (!insideClass)
|
||||
handleNamespace(element);
|
||||
}
|
||||
|
||||
// handles a type definition (not a real type)
|
||||
function handleTypeDef(element, parent) {
|
||||
if (isInterface(element)) {
|
||||
if (isClassLike(parent))
|
||||
queuedInterfaces.push(element);
|
||||
else {
|
||||
begin(element);
|
||||
writeInterface(element);
|
||||
}
|
||||
} else {
|
||||
writeComment(element.comment, true);
|
||||
write("type ", element.name);
|
||||
if (element.templates && element.templates.length)
|
||||
write("<", element.templates.join(", "), ">");
|
||||
write(" = ");
|
||||
if (element.tsType)
|
||||
write(element.tsType.replace(/\r?\n|\r/g, "\n"));
|
||||
else {
|
||||
var type = getTypeOf(element);
|
||||
if (element.type && element.type.names.length === 1 && element.type.names[0] === "function")
|
||||
writeFunctionSignature(element, false, true);
|
||||
else if (type === "object") {
|
||||
if (element.properties && element.properties.length)
|
||||
writeInterfaceBody(element);
|
||||
else
|
||||
write("{}");
|
||||
} else
|
||||
write(type);
|
||||
}
|
||||
writeln(";");
|
||||
}
|
||||
}
|
||||
25
express-server/node_modules/@google-cloud/firestore/node_modules/protobufjs/cli/node_modules/os-tmpdir/index.js
generated
vendored
Normal file
25
express-server/node_modules/@google-cloud/firestore/node_modules/protobufjs/cli/node_modules/os-tmpdir/index.js
generated
vendored
Normal file
@@ -0,0 +1,25 @@
|
||||
'use strict';
|
||||
var isWindows = process.platform === 'win32';
|
||||
var trailingSlashRe = isWindows ? /[^:]\\$/ : /.\/$/;
|
||||
|
||||
// https://github.com/nodejs/node/blob/3e7a14381497a3b73dda68d05b5130563cdab420/lib/os.js#L25-L43
|
||||
module.exports = function () {
|
||||
var path;
|
||||
|
||||
if (isWindows) {
|
||||
path = process.env.TEMP ||
|
||||
process.env.TMP ||
|
||||
(process.env.SystemRoot || process.env.windir) + '\\temp';
|
||||
} else {
|
||||
path = process.env.TMPDIR ||
|
||||
process.env.TMP ||
|
||||
process.env.TEMP ||
|
||||
'/tmp';
|
||||
}
|
||||
|
||||
if (trailingSlashRe.test(path)) {
|
||||
path = path.slice(0, -1);
|
||||
}
|
||||
|
||||
return path;
|
||||
};
|
||||
21
express-server/node_modules/@google-cloud/firestore/node_modules/protobufjs/cli/node_modules/os-tmpdir/license
generated
vendored
Normal file
21
express-server/node_modules/@google-cloud/firestore/node_modules/protobufjs/cli/node_modules/os-tmpdir/license
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (sindresorhus.com)
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
32
express-server/node_modules/@google-cloud/firestore/node_modules/protobufjs/cli/node_modules/os-tmpdir/readme.md
generated
vendored
Normal file
32
express-server/node_modules/@google-cloud/firestore/node_modules/protobufjs/cli/node_modules/os-tmpdir/readme.md
generated
vendored
Normal file
@@ -0,0 +1,32 @@
|
||||
# os-tmpdir [](https://travis-ci.org/sindresorhus/os-tmpdir)
|
||||
|
||||
> Node.js [`os.tmpdir()`](https://nodejs.org/api/os.html#os_os_tmpdir) [ponyfill](https://ponyfill.com)
|
||||
|
||||
Use this instead of `require('os').tmpdir()` to get a consistent behavior on different Node.js versions (even 0.8).
|
||||
|
||||
|
||||
## Install
|
||||
|
||||
```
|
||||
$ npm install --save os-tmpdir
|
||||
```
|
||||
|
||||
|
||||
## Usage
|
||||
|
||||
```js
|
||||
const osTmpdir = require('os-tmpdir');
|
||||
|
||||
osTmpdir();
|
||||
//=> '/var/folders/m3/5574nnhn0yj488ccryqr7tc80000gn/T'
|
||||
```
|
||||
|
||||
|
||||
## API
|
||||
|
||||
See the [`os.tmpdir()` docs](https://nodejs.org/api/os.html#os_os_tmpdir).
|
||||
|
||||
|
||||
## License
|
||||
|
||||
MIT © [Sindre Sorhus](https://sindresorhus.com)
|
||||
21
express-server/node_modules/@google-cloud/firestore/node_modules/protobufjs/cli/node_modules/tmp/LICENSE
generated
vendored
Normal file
21
express-server/node_modules/@google-cloud/firestore/node_modules/protobufjs/cli/node_modules/tmp/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2014 KARASZI István
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
314
express-server/node_modules/@google-cloud/firestore/node_modules/protobufjs/cli/node_modules/tmp/README.md
generated
vendored
Normal file
314
express-server/node_modules/@google-cloud/firestore/node_modules/protobufjs/cli/node_modules/tmp/README.md
generated
vendored
Normal file
@@ -0,0 +1,314 @@
|
||||
# Tmp
|
||||
|
||||
A simple temporary file and directory creator for [node.js.][1]
|
||||
|
||||
[](https://travis-ci.org/raszi/node-tmp)
|
||||
[](https://david-dm.org/raszi/node-tmp)
|
||||
[](https://badge.fury.io/js/tmp)
|
||||
[](https://raszi.github.io/node-tmp/)
|
||||
[](https://snyk.io/test/npm/tmp)
|
||||
|
||||
## About
|
||||
|
||||
This is a [widely used library][2] to create temporary files and directories
|
||||
in a [node.js][1] environment.
|
||||
|
||||
Tmp offers both an asynchronous and a synchronous API. For all API calls, all
|
||||
the parameters are optional. There also exists a promisified version of the
|
||||
API, see (5) under references below.
|
||||
|
||||
Tmp uses crypto for determining random file names, or, when using templates,
|
||||
a six letter random identifier. And just in case that you do not have that much
|
||||
entropy left on your system, Tmp will fall back to pseudo random numbers.
|
||||
|
||||
You can set whether you want to remove the temporary file on process exit or
|
||||
not, and the destination directory can also be set.
|
||||
|
||||
## How to install
|
||||
|
||||
```bash
|
||||
npm install tmp
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
Please also check [API docs][4].
|
||||
|
||||
### Asynchronous file creation
|
||||
|
||||
Simple temporary file creation, the file will be closed and unlinked on process exit.
|
||||
|
||||
```javascript
|
||||
var tmp = require('tmp');
|
||||
|
||||
tmp.file(function _tempFileCreated(err, path, fd, cleanupCallback) {
|
||||
if (err) throw err;
|
||||
|
||||
console.log('File: ', path);
|
||||
console.log('Filedescriptor: ', fd);
|
||||
|
||||
// If we don't need the file anymore we could manually call the cleanupCallback
|
||||
// But that is not necessary if we didn't pass the keep option because the library
|
||||
// will clean after itself.
|
||||
cleanupCallback();
|
||||
});
|
||||
```
|
||||
|
||||
### Synchronous file creation
|
||||
|
||||
A synchronous version of the above.
|
||||
|
||||
```javascript
|
||||
var tmp = require('tmp');
|
||||
|
||||
var tmpobj = tmp.fileSync();
|
||||
console.log('File: ', tmpobj.name);
|
||||
console.log('Filedescriptor: ', tmpobj.fd);
|
||||
|
||||
// If we don't need the file anymore we could manually call the removeCallback
|
||||
// But that is not necessary if we didn't pass the keep option because the library
|
||||
// will clean after itself.
|
||||
tmpobj.removeCallback();
|
||||
```
|
||||
|
||||
Note that this might throw an exception if either the maximum limit of retries
|
||||
for creating a temporary name fails, or, in case that you do not have the permission
|
||||
to write to the directory where the temporary file should be created in.
|
||||
|
||||
### Asynchronous directory creation
|
||||
|
||||
Simple temporary directory creation, it will be removed on process exit.
|
||||
|
||||
If the directory still contains items on process exit, then it won't be removed.
|
||||
|
||||
```javascript
|
||||
var tmp = require('tmp');
|
||||
|
||||
tmp.dir(function _tempDirCreated(err, path, cleanupCallback) {
|
||||
if (err) throw err;
|
||||
|
||||
console.log('Dir: ', path);
|
||||
|
||||
// Manual cleanup
|
||||
cleanupCallback();
|
||||
});
|
||||
```
|
||||
|
||||
If you want to cleanup the directory even when there are entries in it, then
|
||||
you can pass the `unsafeCleanup` option when creating it.
|
||||
|
||||
### Synchronous directory creation
|
||||
|
||||
A synchronous version of the above.
|
||||
|
||||
```javascript
|
||||
var tmp = require('tmp');
|
||||
|
||||
var tmpobj = tmp.dirSync();
|
||||
console.log('Dir: ', tmpobj.name);
|
||||
// Manual cleanup
|
||||
tmpobj.removeCallback();
|
||||
```
|
||||
|
||||
Note that this might throw an exception if either the maximum limit of retries
|
||||
for creating a temporary name fails, or, in case that you do not have the permission
|
||||
to write to the directory where the temporary directory should be created in.
|
||||
|
||||
### Asynchronous filename generation
|
||||
|
||||
It is possible with this library to generate a unique filename in the specified
|
||||
directory.
|
||||
|
||||
```javascript
|
||||
var tmp = require('tmp');
|
||||
|
||||
tmp.tmpName(function _tempNameGenerated(err, path) {
|
||||
if (err) throw err;
|
||||
|
||||
console.log('Created temporary filename: ', path);
|
||||
});
|
||||
```
|
||||
|
||||
### Synchronous filename generation
|
||||
|
||||
A synchronous version of the above.
|
||||
|
||||
```javascript
|
||||
var tmp = require('tmp');
|
||||
|
||||
var name = tmp.tmpNameSync();
|
||||
console.log('Created temporary filename: ', name);
|
||||
```
|
||||
|
||||
## Advanced usage
|
||||
|
||||
### Asynchronous file creation
|
||||
|
||||
Creates a file with mode `0644`, prefix will be `prefix-` and postfix will be `.txt`.
|
||||
|
||||
```javascript
|
||||
var tmp = require('tmp');
|
||||
|
||||
tmp.file({ mode: 0644, prefix: 'prefix-', postfix: '.txt' }, function _tempFileCreated(err, path, fd) {
|
||||
if (err) throw err;
|
||||
|
||||
console.log('File: ', path);
|
||||
console.log('Filedescriptor: ', fd);
|
||||
});
|
||||
```
|
||||
|
||||
### Synchronous file creation
|
||||
|
||||
A synchronous version of the above.
|
||||
|
||||
```javascript
|
||||
var tmp = require('tmp');
|
||||
|
||||
var tmpobj = tmp.fileSync({ mode: 0644, prefix: 'prefix-', postfix: '.txt' });
|
||||
console.log('File: ', tmpobj.name);
|
||||
console.log('Filedescriptor: ', tmpobj.fd);
|
||||
```
|
||||
|
||||
### Controlling the Descriptor
|
||||
|
||||
As a side effect of creating a unique file `tmp` gets a file descriptor that is
|
||||
returned to the user as the `fd` parameter. The descriptor may be used by the
|
||||
application and is closed when the `removeCallback` is invoked.
|
||||
|
||||
In some use cases the application does not need the descriptor, needs to close it
|
||||
without removing the file, or needs to remove the file without closing the
|
||||
descriptor. Two options control how the descriptor is managed:
|
||||
|
||||
* `discardDescriptor` - if `true` causes `tmp` to close the descriptor after the file
|
||||
is created. In this case the `fd` parameter is undefined.
|
||||
* `detachDescriptor` - if `true` causes `tmp` to return the descriptor in the `fd`
|
||||
parameter, but it is the application's responsibility to close it when it is no
|
||||
longer needed.
|
||||
|
||||
```javascript
|
||||
var tmp = require('tmp');
|
||||
|
||||
tmp.file({ discardDescriptor: true }, function _tempFileCreated(err, path, fd, cleanupCallback) {
|
||||
if (err) throw err;
|
||||
// fd will be undefined, allowing application to use fs.createReadStream(path)
|
||||
// without holding an unused descriptor open.
|
||||
});
|
||||
```
|
||||
|
||||
```javascript
|
||||
var tmp = require('tmp');
|
||||
|
||||
tmp.file({ detachDescriptor: true }, function _tempFileCreated(err, path, fd, cleanupCallback) {
|
||||
if (err) throw err;
|
||||
|
||||
cleanupCallback();
|
||||
// Application can store data through fd here; the space used will automatically
|
||||
// be reclaimed by the operating system when the descriptor is closed or program
|
||||
// terminates.
|
||||
});
|
||||
```
|
||||
|
||||
### Asynchronous directory creation
|
||||
|
||||
Creates a directory with mode `0755`, prefix will be `myTmpDir_`.
|
||||
|
||||
```javascript
|
||||
var tmp = require('tmp');
|
||||
|
||||
tmp.dir({ mode: 0750, prefix: 'myTmpDir_' }, function _tempDirCreated(err, path) {
|
||||
if (err) throw err;
|
||||
|
||||
console.log('Dir: ', path);
|
||||
});
|
||||
```
|
||||
|
||||
### Synchronous directory creation
|
||||
|
||||
Again, a synchronous version of the above.
|
||||
|
||||
```javascript
|
||||
var tmp = require('tmp');
|
||||
|
||||
var tmpobj = tmp.dirSync({ mode: 0750, prefix: 'myTmpDir_' });
|
||||
console.log('Dir: ', tmpobj.name);
|
||||
```
|
||||
|
||||
### mkstemp like, asynchronously
|
||||
|
||||
Creates a new temporary directory with mode `0700` and filename like `/tmp/tmp-nk2J1u`.
|
||||
|
||||
```javascript
|
||||
var tmp = require('tmp');
|
||||
|
||||
tmp.dir({ template: '/tmp/tmp-XXXXXX' }, function _tempDirCreated(err, path) {
|
||||
if (err) throw err;
|
||||
|
||||
console.log('Dir: ', path);
|
||||
});
|
||||
```
|
||||
|
||||
### mkstemp like, synchronously
|
||||
|
||||
This will behave similarly to the asynchronous version.
|
||||
|
||||
```javascript
|
||||
var tmp = require('tmp');
|
||||
|
||||
var tmpobj = tmp.dirSync({ template: '/tmp/tmp-XXXXXX' });
|
||||
console.log('Dir: ', tmpobj.name);
|
||||
```
|
||||
|
||||
### Asynchronous filename generation
|
||||
|
||||
The `tmpName()` function accepts the `prefix`, `postfix`, `dir`, etc. parameters also:
|
||||
|
||||
```javascript
|
||||
var tmp = require('tmp');
|
||||
|
||||
tmp.tmpName({ template: '/tmp/tmp-XXXXXX' }, function _tempNameGenerated(err, path) {
|
||||
if (err) throw err;
|
||||
|
||||
console.log('Created temporary filename: ', path);
|
||||
});
|
||||
```
|
||||
|
||||
### Synchronous filename generation
|
||||
|
||||
The `tmpNameSync()` function works similarly to `tmpName()`.
|
||||
|
||||
```javascript
|
||||
var tmp = require('tmp');
|
||||
var tmpname = tmp.tmpNameSync({ template: '/tmp/tmp-XXXXXX' });
|
||||
console.log('Created temporary filename: ', tmpname);
|
||||
```
|
||||
|
||||
## Graceful cleanup
|
||||
|
||||
One may want to cleanup the temporary files even when an uncaught exception
|
||||
occurs. To enforce this, you can call the `setGracefulCleanup()` method:
|
||||
|
||||
```javascript
|
||||
var tmp = require('tmp');
|
||||
|
||||
tmp.setGracefulCleanup();
|
||||
```
|
||||
|
||||
## Options
|
||||
|
||||
All options are optional :)
|
||||
|
||||
* `mode`: the file mode to create with, it fallbacks to `0600` on file creation and `0700` on directory creation
|
||||
* `prefix`: the optional prefix, fallbacks to `tmp-` if not provided
|
||||
* `postfix`: the optional postfix, fallbacks to `.tmp` on file creation
|
||||
* `template`: [`mkstemp`][3] like filename template, no default
|
||||
* `dir`: the optional temporary directory, fallbacks to system default (guesses from environment)
|
||||
* `tries`: how many times should the function try to get a unique filename before giving up, default `3`
|
||||
* `keep`: signals that the temporary file or directory should not be deleted on exit, default is `false`, means delete
|
||||
* Please keep in mind that it is recommended in this case to call the provided `cleanupCallback` function manually.
|
||||
* `unsafeCleanup`: recursively removes the created temporary directory, even when it's not empty. default is `false`
|
||||
|
||||
[1]: http://nodejs.org/
|
||||
[2]: https://www.npmjs.com/browse/depended/tmp
|
||||
[3]: http://www.kernel.org/doc/man-pages/online/pages/man3/mkstemp.3.html
|
||||
[4]: https://raszi.github.io/node-tmp/
|
||||
[5]: https://github.com/benjamingr/tmp-promise
|
||||
611
express-server/node_modules/@google-cloud/firestore/node_modules/protobufjs/cli/node_modules/tmp/lib/tmp.js
generated
vendored
Normal file
611
express-server/node_modules/@google-cloud/firestore/node_modules/protobufjs/cli/node_modules/tmp/lib/tmp.js
generated
vendored
Normal file
@@ -0,0 +1,611 @@
|
||||
/*!
|
||||
* Tmp
|
||||
*
|
||||
* Copyright (c) 2011-2017 KARASZI Istvan <github@spam.raszi.hu>
|
||||
*
|
||||
* MIT Licensed
|
||||
*/
|
||||
|
||||
/*
|
||||
* Module dependencies.
|
||||
*/
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const crypto = require('crypto');
|
||||
const osTmpDir = require('os-tmpdir');
|
||||
const _c = process.binding('constants');
|
||||
|
||||
/*
|
||||
* The working inner variables.
|
||||
*/
|
||||
const
|
||||
/**
|
||||
* The temporary directory.
|
||||
* @type {string}
|
||||
*/
|
||||
tmpDir = osTmpDir(),
|
||||
|
||||
// the random characters to choose from
|
||||
RANDOM_CHARS = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz',
|
||||
|
||||
TEMPLATE_PATTERN = /XXXXXX/,
|
||||
|
||||
DEFAULT_TRIES = 3,
|
||||
|
||||
CREATE_FLAGS = (_c.O_CREAT || _c.fs.O_CREAT) | (_c.O_EXCL || _c.fs.O_EXCL) | (_c.O_RDWR || _c.fs.O_RDWR),
|
||||
|
||||
EBADF = _c.EBADF || _c.os.errno.EBADF,
|
||||
ENOENT = _c.ENOENT || _c.os.errno.ENOENT,
|
||||
|
||||
DIR_MODE = 448 /* 0o700 */,
|
||||
FILE_MODE = 384 /* 0o600 */,
|
||||
|
||||
// this will hold the objects need to be removed on exit
|
||||
_removeObjects = [];
|
||||
|
||||
var
|
||||
_gracefulCleanup = false,
|
||||
_uncaughtException = false;
|
||||
|
||||
/**
|
||||
* Random name generator based on crypto.
|
||||
* Adapted from http://blog.tompawlak.org/how-to-generate-random-values-nodejs-javascript
|
||||
*
|
||||
* @param {number} howMany
|
||||
* @returns {string} the generated random name
|
||||
* @private
|
||||
*/
|
||||
function _randomChars(howMany) {
|
||||
var
|
||||
value = [],
|
||||
rnd = null;
|
||||
|
||||
// make sure that we do not fail because we ran out of entropy
|
||||
try {
|
||||
rnd = crypto.randomBytes(howMany);
|
||||
} catch (e) {
|
||||
rnd = crypto.pseudoRandomBytes(howMany);
|
||||
}
|
||||
|
||||
for (var i = 0; i < howMany; i++) {
|
||||
value.push(RANDOM_CHARS[rnd[i] % RANDOM_CHARS.length]);
|
||||
}
|
||||
|
||||
return value.join('');
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks whether the `obj` parameter is defined or not.
|
||||
*
|
||||
* @param {Object} obj
|
||||
* @returns {boolean} true if the object is undefined
|
||||
* @private
|
||||
*/
|
||||
function _isUndefined(obj) {
|
||||
return typeof obj === 'undefined';
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses the function arguments.
|
||||
*
|
||||
* This function helps to have optional arguments.
|
||||
*
|
||||
* @param {(Options|Function)} options
|
||||
* @param {Function} callback
|
||||
* @returns {Array} parsed arguments
|
||||
* @private
|
||||
*/
|
||||
function _parseArguments(options, callback) {
|
||||
if (typeof options == 'function') {
|
||||
return [callback || {}, options];
|
||||
}
|
||||
|
||||
if (_isUndefined(options)) {
|
||||
return [{}, callback];
|
||||
}
|
||||
|
||||
return [options, callback];
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates a new temporary name.
|
||||
*
|
||||
* @param {Object} opts
|
||||
* @returns {string} the new random name according to opts
|
||||
* @private
|
||||
*/
|
||||
function _generateTmpName(opts) {
|
||||
if (opts.name) {
|
||||
return path.join(opts.dir || tmpDir, opts.name);
|
||||
}
|
||||
|
||||
// mkstemps like template
|
||||
if (opts.template) {
|
||||
return opts.template.replace(TEMPLATE_PATTERN, _randomChars(6));
|
||||
}
|
||||
|
||||
// prefix and postfix
|
||||
const name = [
|
||||
opts.prefix || 'tmp-',
|
||||
process.pid,
|
||||
_randomChars(12),
|
||||
opts.postfix || ''
|
||||
].join('');
|
||||
|
||||
return path.join(opts.dir || tmpDir, name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets a temporary file name.
|
||||
*
|
||||
* @param {(Options|tmpNameCallback)} options options or callback
|
||||
* @param {?tmpNameCallback} callback the callback function
|
||||
*/
|
||||
function tmpName(options, callback) {
|
||||
var
|
||||
args = _parseArguments(options, callback),
|
||||
opts = args[0],
|
||||
cb = args[1],
|
||||
tries = opts.name ? 1 : opts.tries || DEFAULT_TRIES;
|
||||
|
||||
if (isNaN(tries) || tries < 0)
|
||||
return cb(new Error('Invalid tries'));
|
||||
|
||||
if (opts.template && !opts.template.match(TEMPLATE_PATTERN))
|
||||
return cb(new Error('Invalid template provided'));
|
||||
|
||||
(function _getUniqueName() {
|
||||
const name = _generateTmpName(opts);
|
||||
|
||||
// check whether the path exists then retry if needed
|
||||
fs.stat(name, function (err) {
|
||||
if (!err) {
|
||||
if (tries-- > 0) return _getUniqueName();
|
||||
|
||||
return cb(new Error('Could not get a unique tmp filename, max tries reached ' + name));
|
||||
}
|
||||
|
||||
cb(null, name);
|
||||
});
|
||||
}());
|
||||
}
|
||||
|
||||
/**
|
||||
* Synchronous version of tmpName.
|
||||
*
|
||||
* @param {Object} options
|
||||
* @returns {string} the generated random name
|
||||
* @throws {Error} if the options are invalid or could not generate a filename
|
||||
*/
|
||||
function tmpNameSync(options) {
|
||||
var
|
||||
args = _parseArguments(options),
|
||||
opts = args[0],
|
||||
tries = opts.name ? 1 : opts.tries || DEFAULT_TRIES;
|
||||
|
||||
if (isNaN(tries) || tries < 0)
|
||||
throw new Error('Invalid tries');
|
||||
|
||||
if (opts.template && !opts.template.match(TEMPLATE_PATTERN))
|
||||
throw new Error('Invalid template provided');
|
||||
|
||||
do {
|
||||
const name = _generateTmpName(opts);
|
||||
try {
|
||||
fs.statSync(name);
|
||||
} catch (e) {
|
||||
return name;
|
||||
}
|
||||
} while (tries-- > 0);
|
||||
|
||||
throw new Error('Could not get a unique tmp filename, max tries reached');
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates and opens a temporary file.
|
||||
*
|
||||
* @param {(Options|fileCallback)} options the config options or the callback function
|
||||
* @param {?fileCallback} callback
|
||||
*/
|
||||
function file(options, callback) {
|
||||
var
|
||||
args = _parseArguments(options, callback),
|
||||
opts = args[0],
|
||||
cb = args[1];
|
||||
|
||||
opts.postfix = (_isUndefined(opts.postfix)) ? '.tmp' : opts.postfix;
|
||||
|
||||
// gets a temporary filename
|
||||
tmpName(opts, function _tmpNameCreated(err, name) {
|
||||
if (err) return cb(err);
|
||||
|
||||
// create and open the file
|
||||
fs.open(name, CREATE_FLAGS, opts.mode || FILE_MODE, function _fileCreated(err, fd) {
|
||||
if (err) return cb(err);
|
||||
|
||||
if (opts.discardDescriptor) {
|
||||
return fs.close(fd, function _discardCallback(err) {
|
||||
if (err) {
|
||||
// Low probability, and the file exists, so this could be
|
||||
// ignored. If it isn't we certainly need to unlink the
|
||||
// file, and if that fails too its error is more
|
||||
// important.
|
||||
try {
|
||||
fs.unlinkSync(name);
|
||||
} catch (e) {
|
||||
if (!isENOENT(e)) {
|
||||
err = e;
|
||||
}
|
||||
}
|
||||
return cb(err);
|
||||
}
|
||||
cb(null, name, undefined, _prepareTmpFileRemoveCallback(name, -1, opts));
|
||||
});
|
||||
}
|
||||
if (opts.detachDescriptor) {
|
||||
return cb(null, name, fd, _prepareTmpFileRemoveCallback(name, -1, opts));
|
||||
}
|
||||
cb(null, name, fd, _prepareTmpFileRemoveCallback(name, fd, opts));
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Synchronous version of file.
|
||||
*
|
||||
* @param {Options} options
|
||||
* @returns {FileSyncObject} object consists of name, fd and removeCallback
|
||||
* @throws {Error} if cannot create a file
|
||||
*/
|
||||
function fileSync(options) {
|
||||
var
|
||||
args = _parseArguments(options),
|
||||
opts = args[0];
|
||||
|
||||
opts.postfix = opts.postfix || '.tmp';
|
||||
|
||||
const discardOrDetachDescriptor = opts.discardDescriptor || opts.detachDescriptor;
|
||||
const name = tmpNameSync(opts);
|
||||
var fd = fs.openSync(name, CREATE_FLAGS, opts.mode || FILE_MODE);
|
||||
if (opts.discardDescriptor) {
|
||||
fs.closeSync(fd);
|
||||
fd = undefined;
|
||||
}
|
||||
|
||||
return {
|
||||
name: name,
|
||||
fd: fd,
|
||||
removeCallback: _prepareTmpFileRemoveCallback(name, discardOrDetachDescriptor ? -1 : fd, opts)
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes files and folders in a directory recursively.
|
||||
*
|
||||
* @param {string} root
|
||||
* @private
|
||||
*/
|
||||
function _rmdirRecursiveSync(root) {
|
||||
const dirs = [root];
|
||||
|
||||
do {
|
||||
var
|
||||
dir = dirs.pop(),
|
||||
deferred = false,
|
||||
files = fs.readdirSync(dir);
|
||||
|
||||
for (var i = 0, length = files.length; i < length; i++) {
|
||||
var
|
||||
file = path.join(dir, files[i]),
|
||||
stat = fs.lstatSync(file); // lstat so we don't recurse into symlinked directories
|
||||
|
||||
if (stat.isDirectory()) {
|
||||
if (!deferred) {
|
||||
deferred = true;
|
||||
dirs.push(dir);
|
||||
}
|
||||
dirs.push(file);
|
||||
} else {
|
||||
fs.unlinkSync(file);
|
||||
}
|
||||
}
|
||||
|
||||
if (!deferred) {
|
||||
fs.rmdirSync(dir);
|
||||
}
|
||||
} while (dirs.length !== 0);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a temporary directory.
|
||||
*
|
||||
* @param {(Options|dirCallback)} options the options or the callback function
|
||||
* @param {?dirCallback} callback
|
||||
*/
|
||||
function dir(options, callback) {
|
||||
var
|
||||
args = _parseArguments(options, callback),
|
||||
opts = args[0],
|
||||
cb = args[1];
|
||||
|
||||
// gets a temporary filename
|
||||
tmpName(opts, function _tmpNameCreated(err, name) {
|
||||
if (err) return cb(err);
|
||||
|
||||
// create the directory
|
||||
fs.mkdir(name, opts.mode || DIR_MODE, function _dirCreated(err) {
|
||||
if (err) return cb(err);
|
||||
|
||||
cb(null, name, _prepareTmpDirRemoveCallback(name, opts));
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Synchronous version of dir.
|
||||
*
|
||||
* @param {Options} options
|
||||
* @returns {DirSyncObject} object consists of name and removeCallback
|
||||
* @throws {Error} if it cannot create a directory
|
||||
*/
|
||||
function dirSync(options) {
|
||||
var
|
||||
args = _parseArguments(options),
|
||||
opts = args[0];
|
||||
|
||||
const name = tmpNameSync(opts);
|
||||
fs.mkdirSync(name, opts.mode || DIR_MODE);
|
||||
|
||||
return {
|
||||
name: name,
|
||||
removeCallback: _prepareTmpDirRemoveCallback(name, opts)
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Prepares the callback for removal of the temporary file.
|
||||
*
|
||||
* @param {string} name the path of the file
|
||||
* @param {number} fd file descriptor
|
||||
* @param {Object} opts
|
||||
* @returns {fileCallback}
|
||||
* @private
|
||||
*/
|
||||
function _prepareTmpFileRemoveCallback(name, fd, opts) {
|
||||
const removeCallback = _prepareRemoveCallback(function _removeCallback(fdPath) {
|
||||
try {
|
||||
if (0 <= fdPath[0]) {
|
||||
fs.closeSync(fdPath[0]);
|
||||
}
|
||||
}
|
||||
catch (e) {
|
||||
// under some node/windows related circumstances, a temporary file
|
||||
// may have not be created as expected or the file was already closed
|
||||
// by the user, in which case we will simply ignore the error
|
||||
if (!isEBADF(e) && !isENOENT(e)) {
|
||||
// reraise any unanticipated error
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
try {
|
||||
fs.unlinkSync(fdPath[1]);
|
||||
}
|
||||
catch (e) {
|
||||
if (!isENOENT(e)) {
|
||||
// reraise any unanticipated error
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}, [fd, name]);
|
||||
|
||||
if (!opts.keep) {
|
||||
_removeObjects.unshift(removeCallback);
|
||||
}
|
||||
|
||||
return removeCallback;
|
||||
}
|
||||
|
||||
/**
|
||||
* Prepares the callback for removal of the temporary directory.
|
||||
*
|
||||
* @param {string} name
|
||||
* @param {Object} opts
|
||||
* @returns {Function} the callback
|
||||
* @private
|
||||
*/
|
||||
function _prepareTmpDirRemoveCallback(name, opts) {
|
||||
const removeFunction = opts.unsafeCleanup ? _rmdirRecursiveSync : fs.rmdirSync.bind(fs);
|
||||
const removeCallback = _prepareRemoveCallback(removeFunction, name);
|
||||
|
||||
if (!opts.keep) {
|
||||
_removeObjects.unshift(removeCallback);
|
||||
}
|
||||
|
||||
return removeCallback;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a guarded function wrapping the removeFunction call.
|
||||
*
|
||||
* @param {Function} removeFunction
|
||||
* @param {Object} arg
|
||||
* @returns {Function}
|
||||
* @private
|
||||
*/
|
||||
function _prepareRemoveCallback(removeFunction, arg) {
|
||||
var called = false;
|
||||
|
||||
return function _cleanupCallback(next) {
|
||||
if (!called) {
|
||||
const index = _removeObjects.indexOf(_cleanupCallback);
|
||||
if (index >= 0) {
|
||||
_removeObjects.splice(index, 1);
|
||||
}
|
||||
|
||||
called = true;
|
||||
removeFunction(arg);
|
||||
}
|
||||
|
||||
if (next) next(null);
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* The garbage collector.
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
function _garbageCollector() {
|
||||
if (_uncaughtException && !_gracefulCleanup) {
|
||||
return;
|
||||
}
|
||||
|
||||
// the function being called removes itself from _removeObjects,
|
||||
// loop until _removeObjects is empty
|
||||
while (_removeObjects.length) {
|
||||
try {
|
||||
_removeObjects[0].call(null);
|
||||
} catch (e) {
|
||||
// already removed?
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper for testing against EBADF to compensate changes made to Node 7.x under Windows.
|
||||
*/
|
||||
function isEBADF(error) {
|
||||
return isExpectedError(error, -EBADF, 'EBADF');
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper for testing against ENOENT to compensate changes made to Node 7.x under Windows.
|
||||
*/
|
||||
function isENOENT(error) {
|
||||
return isExpectedError(error, -ENOENT, 'ENOENT');
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper to determine whether the expected error code matches the actual code and errno,
|
||||
* which will differ between the supported node versions.
|
||||
*
|
||||
* - Node >= 7.0:
|
||||
* error.code {String}
|
||||
* error.errno {String|Number} any numerical value will be negated
|
||||
*
|
||||
* - Node >= 6.0 < 7.0:
|
||||
* error.code {String}
|
||||
* error.errno {Number} negated
|
||||
*
|
||||
* - Node >= 4.0 < 6.0: introduces SystemError
|
||||
* error.code {String}
|
||||
* error.errno {Number} negated
|
||||
*
|
||||
* - Node >= 0.10 < 4.0:
|
||||
* error.code {Number} negated
|
||||
* error.errno n/a
|
||||
*/
|
||||
function isExpectedError(error, code, errno) {
|
||||
return error.code == code || error.code == errno;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the graceful cleanup.
|
||||
*
|
||||
* Also removes the created files and directories when an uncaught exception occurs.
|
||||
*/
|
||||
function setGracefulCleanup() {
|
||||
_gracefulCleanup = true;
|
||||
}
|
||||
|
||||
const version = process.versions.node.split('.').map(function (value) {
|
||||
return parseInt(value, 10);
|
||||
});
|
||||
|
||||
if (version[0] === 0 && (version[1] < 9 || version[1] === 9 && version[2] < 5)) {
|
||||
process.addListener('uncaughtException', function _uncaughtExceptionThrown(err) {
|
||||
_uncaughtException = true;
|
||||
_garbageCollector();
|
||||
|
||||
throw err;
|
||||
});
|
||||
}
|
||||
|
||||
process.addListener('exit', function _exit(code) {
|
||||
if (code) _uncaughtException = true;
|
||||
_garbageCollector();
|
||||
});
|
||||
|
||||
/**
|
||||
* Configuration options.
|
||||
*
|
||||
* @typedef {Object} Options
|
||||
* @property {?number} tries the number of tries before give up the name generation
|
||||
* @property {?string} template the "mkstemp" like filename template
|
||||
* @property {?string} name fix name
|
||||
* @property {?string} dir the tmp directory to use
|
||||
* @property {?string} prefix prefix for the generated name
|
||||
* @property {?string} postfix postfix for the generated name
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {Object} FileSyncObject
|
||||
* @property {string} name the name of the file
|
||||
* @property {string} fd the file descriptor
|
||||
* @property {fileCallback} removeCallback the callback function to remove the file
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {Object} DirSyncObject
|
||||
* @property {string} name the name of the directory
|
||||
* @property {fileCallback} removeCallback the callback function to remove the directory
|
||||
*/
|
||||
|
||||
/**
|
||||
* @callback tmpNameCallback
|
||||
* @param {?Error} err the error object if anything goes wrong
|
||||
* @param {string} name the temporary file name
|
||||
*/
|
||||
|
||||
/**
|
||||
* @callback fileCallback
|
||||
* @param {?Error} err the error object if anything goes wrong
|
||||
* @param {string} name the temporary file name
|
||||
* @param {number} fd the file descriptor
|
||||
* @param {cleanupCallback} fn the cleanup callback function
|
||||
*/
|
||||
|
||||
/**
|
||||
* @callback dirCallback
|
||||
* @param {?Error} err the error object if anything goes wrong
|
||||
* @param {string} name the temporary file name
|
||||
* @param {cleanupCallback} fn the cleanup callback function
|
||||
*/
|
||||
|
||||
/**
|
||||
* Removes the temporary created file or directory.
|
||||
*
|
||||
* @callback cleanupCallback
|
||||
* @param {simpleCallback} [next] function to call after entry was removed
|
||||
*/
|
||||
|
||||
/**
|
||||
* Callback function for function composition.
|
||||
* @see {@link https://github.com/raszi/node-tmp/issues/57|raszi/node-tmp#57}
|
||||
*
|
||||
* @callback simpleCallback
|
||||
*/
|
||||
|
||||
// exporting all the needed methods
|
||||
module.exports.tmpdir = tmpDir;
|
||||
|
||||
module.exports.dir = dir;
|
||||
module.exports.dirSync = dirSync;
|
||||
|
||||
module.exports.file = file;
|
||||
module.exports.fileSync = fileSync;
|
||||
|
||||
module.exports.tmpName = tmpName;
|
||||
module.exports.tmpNameSync = tmpNameSync;
|
||||
|
||||
module.exports.setGracefulCleanup = setGracefulCleanup;
|
||||
20
express-server/node_modules/@google-cloud/firestore/node_modules/protobufjs/cli/package-lock.json
generated
vendored
Normal file
20
express-server/node_modules/@google-cloud/firestore/node_modules/protobufjs/cli/package-lock.json
generated
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
{
|
||||
"version": "6.7.0",
|
||||
"lockfileVersion": 1,
|
||||
"requires": true,
|
||||
"dependencies": {
|
||||
"os-tmpdir": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/os-tmpdir/-/os-tmpdir-1.0.2.tgz",
|
||||
"integrity": "sha1-u+Z0BseaqFxc/sdm/lc0VV36EnQ="
|
||||
},
|
||||
"tmp": {
|
||||
"version": "0.0.33",
|
||||
"resolved": "https://registry.npmjs.org/tmp/-/tmp-0.0.33.tgz",
|
||||
"integrity": "sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw==",
|
||||
"requires": {
|
||||
"os-tmpdir": "1.0.2"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
1
express-server/node_modules/@google-cloud/firestore/node_modules/protobufjs/cli/package.json
generated
vendored
Normal file
1
express-server/node_modules/@google-cloud/firestore/node_modules/protobufjs/cli/package.json
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version": "6.7.0"}
|
||||
32
express-server/node_modules/@google-cloud/firestore/node_modules/protobufjs/cli/package.standalone.json
generated
vendored
Normal file
32
express-server/node_modules/@google-cloud/firestore/node_modules/protobufjs/cli/package.standalone.json
generated
vendored
Normal file
@@ -0,0 +1,32 @@
|
||||
{
|
||||
"name": "protobufjs-cli",
|
||||
"description": "Translates between file formats and generates static code as well as TypeScript definitions.",
|
||||
"version": "6.7.0",
|
||||
"author": "Daniel Wirtz <dcode+protobufjs@dcode.io>",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/dcodeIO/protobuf.js.git"
|
||||
},
|
||||
"license": "BSD-3-Clause",
|
||||
"main": "index.js",
|
||||
"types": "index.d.ts",
|
||||
"bin": {
|
||||
"pbjs": "bin/pbjs",
|
||||
"pbts": "bin/pbts"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"protobufjs": "~6.7.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"chalk": "^1.1.3",
|
||||
"escodegen": "^1.8.1",
|
||||
"espree": "^3.1.3",
|
||||
"estraverse": "^4.2.0",
|
||||
"glob": "^7.1.1",
|
||||
"jsdoc": "^3.4.2",
|
||||
"minimist": "^1.2.0",
|
||||
"semver": "^5.3.0",
|
||||
"tmp": "0.0.31",
|
||||
"uglify-js": "^2.8.15"
|
||||
}
|
||||
}
|
||||
9
express-server/node_modules/@google-cloud/firestore/node_modules/protobufjs/cli/pbjs.d.ts
generated
vendored
Normal file
9
express-server/node_modules/@google-cloud/firestore/node_modules/protobufjs/cli/pbjs.d.ts
generated
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
type pbjsCallback = (err: Error|null, output?: string) => void;
|
||||
|
||||
/**
|
||||
* Runs pbjs programmatically.
|
||||
* @param {string[]} args Command line arguments
|
||||
* @param {function(?Error, string=)} [callback] Optional completion callback
|
||||
* @returns {number|undefined} Exit code, if known
|
||||
*/
|
||||
export function main(args: string[], callback?: pbjsCallback): number|undefined;
|
||||
329
express-server/node_modules/@google-cloud/firestore/node_modules/protobufjs/cli/pbjs.js
generated
vendored
Normal file
329
express-server/node_modules/@google-cloud/firestore/node_modules/protobufjs/cli/pbjs.js
generated
vendored
Normal file
@@ -0,0 +1,329 @@
|
||||
"use strict";
|
||||
var path = require("path"),
|
||||
fs = require("fs"),
|
||||
pkg = require("./package.json"),
|
||||
util = require("./util");
|
||||
|
||||
util.setup();
|
||||
|
||||
var protobuf = require(util.pathToProtobufJs),
|
||||
minimist = require("minimist"),
|
||||
chalk = require("chalk"),
|
||||
glob = require("glob");
|
||||
|
||||
var targets = util.requireAll("./targets");
|
||||
|
||||
/**
|
||||
* Runs pbjs programmatically.
|
||||
* @param {string[]} args Command line arguments
|
||||
* @param {function(?Error, string=)} [callback] Optional completion callback
|
||||
* @returns {number|undefined} Exit code, if known
|
||||
*/
|
||||
exports.main = function main(args, callback) {
|
||||
var lintDefault = "eslint-disable " + [
|
||||
"block-scoped-var",
|
||||
"id-length",
|
||||
"no-control-regex",
|
||||
"no-magic-numbers",
|
||||
"no-prototype-builtins",
|
||||
"no-redeclare",
|
||||
"no-shadow",
|
||||
"no-var",
|
||||
"sort-vars"
|
||||
].join(", ");
|
||||
var argv = minimist(args, {
|
||||
alias: {
|
||||
target: "t",
|
||||
out: "o",
|
||||
path: "p",
|
||||
wrap: "w",
|
||||
root: "r",
|
||||
lint: "l",
|
||||
// backward compatibility:
|
||||
"force-long": "strict-long",
|
||||
"force-message": "strict-message"
|
||||
},
|
||||
string: [ "target", "out", "path", "wrap", "dependency", "root", "lint" ],
|
||||
boolean: [ "create", "encode", "decode", "verify", "convert", "delimited", "beautify", "comments", "es6", "sparse", "keep-case", "force-long", "force-number", "force-enum-string", "force-message" ],
|
||||
default: {
|
||||
target: "json",
|
||||
create: true,
|
||||
encode: true,
|
||||
decode: true,
|
||||
verify: true,
|
||||
convert: true,
|
||||
delimited: true,
|
||||
beautify: true,
|
||||
comments: true,
|
||||
es6: null,
|
||||
lint: lintDefault,
|
||||
"keep-case": false,
|
||||
"force-long": false,
|
||||
"force-number": false,
|
||||
"force-enum-string": false,
|
||||
"force-message": false
|
||||
}
|
||||
});
|
||||
|
||||
var target = targets[argv.target],
|
||||
files = argv._,
|
||||
paths = typeof argv.path === "string" ? [ argv.path ] : argv.path || [];
|
||||
|
||||
// alias hyphen args in camel case
|
||||
Object.keys(argv).forEach(function(key) {
|
||||
var camelKey = key.replace(/-([a-z])/g, function($0, $1) { return $1.toUpperCase(); });
|
||||
if (camelKey !== key)
|
||||
argv[camelKey] = argv[key];
|
||||
});
|
||||
|
||||
// protobuf.js package directory contains additional, otherwise non-bundled google types
|
||||
paths.push(path.relative(process.cwd(), path.join(__dirname, "..")) || ".");
|
||||
|
||||
if (!files.length) {
|
||||
var descs = Object.keys(targets).filter(function(key) { return !targets[key].private; }).map(function(key) {
|
||||
return " " + util.pad(key, 14, true) + targets[key].description;
|
||||
});
|
||||
if (callback)
|
||||
callback(Error("usage")); // eslint-disable-line callback-return
|
||||
else
|
||||
process.stderr.write([
|
||||
"protobuf.js v" + pkg.version + " CLI for JavaScript",
|
||||
"",
|
||||
chalk.bold.white("Translates between file formats and generates static code."),
|
||||
"",
|
||||
" -t, --target Specifies the target format. Also accepts a path to require a custom target.",
|
||||
"",
|
||||
descs.join("\n"),
|
||||
"",
|
||||
" -p, --path Adds a directory to the include path.",
|
||||
"",
|
||||
" -o, --out Saves to a file instead of writing to stdout.",
|
||||
"",
|
||||
" --sparse Exports only those types referenced from a main file (experimental).",
|
||||
"",
|
||||
chalk.bold.gray(" Module targets only:"),
|
||||
"",
|
||||
" -w, --wrap Specifies the wrapper to use. Also accepts a path to require a custom wrapper.",
|
||||
"",
|
||||
" default Default wrapper supporting both CommonJS and AMD",
|
||||
" commonjs CommonJS wrapper",
|
||||
" amd AMD wrapper",
|
||||
" es6 ES6 wrapper (implies --es6)",
|
||||
" closure A closure adding to protobuf.roots where protobuf is a global",
|
||||
"",
|
||||
" --dependency Specifies which version of protobuf to require. Accepts any valid module id",
|
||||
"",
|
||||
" -r, --root Specifies an alternative protobuf.roots name.",
|
||||
"",
|
||||
" -l, --lint Linter configuration. Defaults to protobuf.js-compatible rules:",
|
||||
"",
|
||||
" " + lintDefault,
|
||||
"",
|
||||
" --es6 Enables ES6 syntax (const/let instead of var)",
|
||||
"",
|
||||
chalk.bold.gray(" Proto sources only:"),
|
||||
"",
|
||||
" --keep-case Keeps field casing instead of converting to camel case.",
|
||||
"",
|
||||
chalk.bold.gray(" Static targets only:"),
|
||||
"",
|
||||
" --no-create Does not generate create functions used for reflection compatibility.",
|
||||
" --no-encode Does not generate encode functions.",
|
||||
" --no-decode Does not generate decode functions.",
|
||||
" --no-verify Does not generate verify functions.",
|
||||
" --no-convert Does not generate convert functions like from/toObject",
|
||||
" --no-delimited Does not generate delimited encode/decode functions.",
|
||||
" --no-beautify Does not beautify generated code.",
|
||||
" --no-comments Does not output any JSDoc comments.",
|
||||
"",
|
||||
" --force-long Enfores the use of 'Long' for s-/u-/int64 and s-/fixed64 fields.",
|
||||
" --force-number Enfores the use of 'number' for s-/u-/int64 and s-/fixed64 fields.",
|
||||
" --force-message Enfores the use of message instances instead of plain objects.",
|
||||
"",
|
||||
"usage: " + chalk.bold.green("pbjs") + " [options] file1.proto file2.json ..." + chalk.gray(" (or pipe) ") + "other | " + chalk.bold.green("pbjs") + " [options] -",
|
||||
""
|
||||
].join("\n"));
|
||||
return 1;
|
||||
}
|
||||
|
||||
if (typeof argv["strict-long"] === "boolean")
|
||||
argv["force-long"] = argv["strict-long"];
|
||||
|
||||
// Resolve glob expressions
|
||||
for (var i = 0; i < files.length;) {
|
||||
if (glob.hasMagic(files[i])) {
|
||||
var matches = glob.sync(files[i]);
|
||||
Array.prototype.splice.apply(files, [i, 1].concat(matches));
|
||||
i += matches.length;
|
||||
} else
|
||||
++i;
|
||||
}
|
||||
|
||||
// Require custom target
|
||||
if (!target)
|
||||
target = require(path.resolve(process.cwd(), argv.target));
|
||||
|
||||
var root = new protobuf.Root();
|
||||
|
||||
var mainFiles = [];
|
||||
|
||||
// Search include paths when resolving imports
|
||||
root.resolvePath = function pbjsResolvePath(origin, target) {
|
||||
var normOrigin = protobuf.util.path.normalize(origin),
|
||||
normTarget = protobuf.util.path.normalize(target);
|
||||
if (!normOrigin)
|
||||
mainFiles.push(normTarget);
|
||||
|
||||
var resolved = protobuf.util.path.resolve(normOrigin, normTarget, true);
|
||||
var idx = resolved.lastIndexOf("google/protobuf/");
|
||||
if (idx > -1) {
|
||||
var altname = resolved.substring(idx);
|
||||
if (altname in protobuf.common)
|
||||
resolved = altname;
|
||||
}
|
||||
|
||||
if (fs.existsSync(resolved))
|
||||
return resolved;
|
||||
|
||||
for (var i = 0; i < paths.length; ++i) {
|
||||
var iresolved = protobuf.util.path.resolve(paths[i] + "/", target);
|
||||
if (fs.existsSync(iresolved))
|
||||
return iresolved;
|
||||
}
|
||||
|
||||
return resolved;
|
||||
};
|
||||
|
||||
// Use es6 syntax if not explicitly specified on the command line and the es6 wrapper is used
|
||||
if (argv.wrap === "es6" || argv.es6) {
|
||||
argv.wrap = "es6";
|
||||
argv.es6 = true;
|
||||
}
|
||||
|
||||
var parseOptions = {
|
||||
"keepCase": argv["keep-case"] || false
|
||||
};
|
||||
|
||||
// Read from stdin
|
||||
if (files.length === 1 && files[0] === "-") {
|
||||
var data = [];
|
||||
process.stdin.on("data", function(chunk) {
|
||||
data.push(chunk);
|
||||
});
|
||||
process.stdin.on("end", function() {
|
||||
var source = Buffer.concat(data).toString("utf8");
|
||||
try {
|
||||
if (source.charAt(0) !== "{") {
|
||||
protobuf.parse.filename = "-";
|
||||
protobuf.parse(source, root, parseOptions);
|
||||
} else {
|
||||
var json = JSON.parse(source);
|
||||
root.setOptions(json.options).addJSON(json);
|
||||
}
|
||||
callTarget();
|
||||
} catch (err) {
|
||||
if (callback) {
|
||||
callback(err);
|
||||
return;
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
});
|
||||
|
||||
// Load from disk
|
||||
} else {
|
||||
try {
|
||||
root.loadSync(files, parseOptions).resolveAll(); // sync is deterministic while async is not
|
||||
if (argv.sparse)
|
||||
sparsify(root);
|
||||
callTarget();
|
||||
} catch (err) {
|
||||
if (callback) {
|
||||
callback(err);
|
||||
return undefined;
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
function markReferenced(tobj) {
|
||||
tobj.referenced = true;
|
||||
// also mark a type's fields and oneofs
|
||||
if (tobj.fieldsArray)
|
||||
tobj.fieldsArray.forEach(function(fobj) {
|
||||
fobj.referenced = true;
|
||||
});
|
||||
if (tobj.oneofsArray)
|
||||
tobj.oneofsArray.forEach(function(oobj) {
|
||||
oobj.referenced = true;
|
||||
});
|
||||
// also mark an extension field's extended type, but not its (other) fields
|
||||
if (tobj.extensionField)
|
||||
tobj.extensionField.parent.referenced = true;
|
||||
}
|
||||
|
||||
function sparsify(root) {
|
||||
|
||||
// 1. mark directly or indirectly referenced objects
|
||||
util.traverse(root, function(obj) {
|
||||
if (!obj.filename)
|
||||
return;
|
||||
if (mainFiles.indexOf(obj.filename) > -1)
|
||||
util.traverseResolved(obj, markReferenced);
|
||||
});
|
||||
|
||||
// 2. empty unreferenced objects
|
||||
util.traverse(root, function(obj) {
|
||||
var parent = obj.parent;
|
||||
if (!parent || obj.referenced) // root or referenced
|
||||
return;
|
||||
// remove unreferenced namespaces
|
||||
if (obj instanceof protobuf.Namespace) {
|
||||
var hasReferenced = false;
|
||||
util.traverse(obj, function(iobj) {
|
||||
if (iobj.referenced)
|
||||
hasReferenced = true;
|
||||
});
|
||||
if (hasReferenced) { // replace with plain namespace if a namespace subclass
|
||||
if (obj instanceof protobuf.Type || obj instanceof protobuf.Service) {
|
||||
var robj = new protobuf.Namespace(obj.name, obj.options);
|
||||
robj.nested = obj.nested;
|
||||
parent.add(robj);
|
||||
}
|
||||
} else // remove completely if nothing inside is referenced
|
||||
parent.remove(obj);
|
||||
|
||||
// remove everything else unreferenced
|
||||
} else if (!(obj instanceof protobuf.Namespace))
|
||||
parent.remove(obj);
|
||||
});
|
||||
|
||||
// 3. validate that everything is fine
|
||||
root.resolveAll();
|
||||
}
|
||||
|
||||
function callTarget() {
|
||||
target(root, argv, function targetCallback(err, output) {
|
||||
if (err) {
|
||||
if (callback)
|
||||
return callback(err);
|
||||
throw err;
|
||||
}
|
||||
try {
|
||||
if (argv.out)
|
||||
fs.writeFileSync(argv.out, output, { encoding: "utf8" });
|
||||
else if (!callback)
|
||||
process.stdout.write(output, "utf8");
|
||||
return callback
|
||||
? callback(null, output)
|
||||
: undefined;
|
||||
} catch (err) {
|
||||
if (callback)
|
||||
return callback(err);
|
||||
throw err;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return undefined;
|
||||
};
|
||||
9
express-server/node_modules/@google-cloud/firestore/node_modules/protobufjs/cli/pbts.d.ts
generated
vendored
Normal file
9
express-server/node_modules/@google-cloud/firestore/node_modules/protobufjs/cli/pbts.d.ts
generated
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
type pbtsCallback = (err: Error|null, output?: string) => void;
|
||||
|
||||
/**
|
||||
* Runs pbts programmatically.
|
||||
* @param {string[]} args Command line arguments
|
||||
* @param {function(?Error, string=)} [callback] Optional completion callback
|
||||
* @returns {number|undefined} Exit code, if known
|
||||
*/
|
||||
export function main(args: string[], callback?: pbtsCallback): number|undefined;
|
||||
197
express-server/node_modules/@google-cloud/firestore/node_modules/protobufjs/cli/pbts.js
generated
vendored
Normal file
197
express-server/node_modules/@google-cloud/firestore/node_modules/protobufjs/cli/pbts.js
generated
vendored
Normal file
@@ -0,0 +1,197 @@
|
||||
"use strict";
|
||||
var child_process = require("child_process"),
|
||||
path = require("path"),
|
||||
fs = require("fs"),
|
||||
pkg = require("./package.json"),
|
||||
util = require("./util");
|
||||
|
||||
util.setup();
|
||||
|
||||
var minimist = require("minimist"),
|
||||
chalk = require("chalk"),
|
||||
glob = require("glob"),
|
||||
tmp = require("tmp");
|
||||
|
||||
/**
|
||||
* Runs pbts programmatically.
|
||||
* @param {string[]} args Command line arguments
|
||||
* @param {function(?Error, string=)} [callback] Optional completion callback
|
||||
* @returns {number|undefined} Exit code, if known
|
||||
*/
|
||||
exports.main = function(args, callback) {
|
||||
var argv = minimist(args, {
|
||||
alias: {
|
||||
name: "n",
|
||||
out : "o",
|
||||
main: "m",
|
||||
global: "g",
|
||||
import: "i"
|
||||
},
|
||||
string: [ "name", "out", "global", "import" ],
|
||||
boolean: [ "comments", "main" ],
|
||||
default: {
|
||||
comments: true,
|
||||
main: false
|
||||
}
|
||||
});
|
||||
|
||||
var files = argv._;
|
||||
|
||||
if (!files.length) {
|
||||
if (callback)
|
||||
callback(Error("usage")); // eslint-disable-line callback-return
|
||||
else
|
||||
process.stderr.write([
|
||||
"protobuf.js v" + pkg.version + " CLI for TypeScript",
|
||||
"",
|
||||
chalk.bold.white("Generates TypeScript definitions from annotated JavaScript files."),
|
||||
"",
|
||||
" -o, --out Saves to a file instead of writing to stdout.",
|
||||
"",
|
||||
" -g, --global Name of the global object in browser environments, if any.",
|
||||
"",
|
||||
" -i, --import Comma delimited list of imports. Local names will equal camelCase of the basename.",
|
||||
"",
|
||||
" --no-comments Does not output any JSDoc comments.",
|
||||
"",
|
||||
chalk.bold.gray(" Internal flags:"),
|
||||
"",
|
||||
" -n, --name Wraps everything in a module of the specified name.",
|
||||
"",
|
||||
" -m, --main Whether building the main library without any imports.",
|
||||
"",
|
||||
"usage: " + chalk.bold.green("pbts") + " [options] file1.js file2.js ..." + chalk.bold.gray(" (or) ") + "other | " + chalk.bold.green("pbts") + " [options] -",
|
||||
""
|
||||
].join("\n"));
|
||||
return 1;
|
||||
}
|
||||
|
||||
// Resolve glob expressions
|
||||
for (var i = 0; i < files.length;) {
|
||||
if (glob.hasMagic(files[i])) {
|
||||
var matches = glob.sync(files[i]);
|
||||
Array.prototype.splice.apply(files, [i, 1].concat(matches));
|
||||
i += matches.length;
|
||||
} else
|
||||
++i;
|
||||
}
|
||||
|
||||
var cleanup = [];
|
||||
|
||||
// Read from stdin (to a temporary file)
|
||||
if (files.length === 1 && files[0] === "-") {
|
||||
var data = [];
|
||||
process.stdin.on("data", function(chunk) {
|
||||
data.push(chunk);
|
||||
});
|
||||
process.stdin.on("end", function() {
|
||||
files[0] = tmp.tmpNameSync() + ".js";
|
||||
fs.writeFileSync(files[0], Buffer.concat(data));
|
||||
cleanup.push(files[0]);
|
||||
callJsdoc();
|
||||
});
|
||||
|
||||
// Load from disk
|
||||
} else {
|
||||
callJsdoc();
|
||||
}
|
||||
|
||||
function callJsdoc() {
|
||||
|
||||
// There is no proper API for jsdoc, so this executes the CLI and pipes the output
|
||||
var basedir = path.join(__dirname, ".");
|
||||
var moduleName = argv.name || "null";
|
||||
var nodePath = process.execPath;
|
||||
var cmd = "\"" + nodePath + "\" \"" + require.resolve("jsdoc/jsdoc.js") + "\" -c \"" + path.join(basedir, "lib", "tsd-jsdoc.json") + "\" -q \"module=" + encodeURIComponent(moduleName) + "&comments=" + Boolean(argv.comments) + "\" " + files.map(function(file) { return "\"" + file + "\""; }).join(" ");
|
||||
var child = child_process.exec(cmd, {
|
||||
cwd: process.cwd(),
|
||||
argv0: "node",
|
||||
stdio: "pipe",
|
||||
maxBuffer: 1 << 24 // 16mb
|
||||
});
|
||||
var out = [];
|
||||
var ended = false;
|
||||
var closed = false;
|
||||
child.stdout.on("data", function(data) {
|
||||
out.push(data);
|
||||
});
|
||||
child.stdout.on("end", function() {
|
||||
if (closed) finish();
|
||||
else ended = true;
|
||||
});
|
||||
child.stderr.pipe(process.stderr);
|
||||
child.on("close", function(code) {
|
||||
// clean up temporary files, no matter what
|
||||
try { cleanup.forEach(fs.unlinkSync); } catch(e) {/**/} cleanup = [];
|
||||
|
||||
if (code) {
|
||||
out = out.join("").replace(/\s*JSDoc \d+\.\d+\.\d+ [^$]+/, "");
|
||||
process.stderr.write(out);
|
||||
var err = Error("code " + code);
|
||||
if (callback)
|
||||
return callback(err);
|
||||
throw err;
|
||||
}
|
||||
|
||||
if (ended) return finish();
|
||||
closed = true;
|
||||
return undefined;
|
||||
});
|
||||
|
||||
function getImportName(importItem) {
|
||||
return path.basename(importItem, ".js").replace(/([-_~.+]\w)/g, function(match) {
|
||||
return match[1].toUpperCase();
|
||||
});
|
||||
}
|
||||
|
||||
function finish() {
|
||||
var output = [];
|
||||
if (argv.main)
|
||||
output.push(
|
||||
"// DO NOT EDIT! This is a generated file. Edit the JSDoc in src/*.js instead and run 'npm run types'.",
|
||||
""
|
||||
);
|
||||
if (argv.global)
|
||||
output.push(
|
||||
"export as namespace " + argv.global + ";",
|
||||
""
|
||||
);
|
||||
|
||||
if (!argv.main) {
|
||||
// Ensure we have a usable array of imports
|
||||
var importArray = typeof argv.import === "string" ? argv.import.split(",") : argv.import || [];
|
||||
|
||||
// Build an object of imports and paths
|
||||
var imports = {
|
||||
$protobuf: "protobufjs"
|
||||
};
|
||||
importArray.forEach(function(importItem) {
|
||||
imports[getImportName(importItem)] = importItem;
|
||||
});
|
||||
|
||||
// Write out the imports
|
||||
Object.keys(imports).forEach(function(key) {
|
||||
output.push("import * as " + key + " from \"" + imports[key] + "\";");
|
||||
});
|
||||
}
|
||||
|
||||
output = output.join("\n") + "\n" + out.join("");
|
||||
|
||||
try {
|
||||
if (argv.out)
|
||||
fs.writeFileSync(argv.out, output, { encoding: "utf8" });
|
||||
else if (!callback)
|
||||
process.stdout.write(output, "utf8");
|
||||
return callback
|
||||
? callback(null, output)
|
||||
: undefined;
|
||||
} catch (err) {
|
||||
if (callback)
|
||||
return callback(err);
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return undefined;
|
||||
};
|
||||
38
express-server/node_modules/@google-cloud/firestore/node_modules/protobufjs/cli/targets/json-module.js
generated
vendored
Normal file
38
express-server/node_modules/@google-cloud/firestore/node_modules/protobufjs/cli/targets/json-module.js
generated
vendored
Normal file
@@ -0,0 +1,38 @@
|
||||
"use strict";
|
||||
module.exports = json_module;
|
||||
|
||||
var util = require("../util");
|
||||
|
||||
var protobuf = require("../..");
|
||||
|
||||
json_module.description = "JSON representation as a module";
|
||||
|
||||
function jsonSafeProp(json) {
|
||||
return json.replace(/^( +)"(\w+)":/mg, function($0, $1, $2) {
|
||||
return protobuf.util.safeProp($2).charAt(0) === "."
|
||||
? $1 + $2 + ":"
|
||||
: $0;
|
||||
});
|
||||
}
|
||||
|
||||
function json_module(root, options, callback) {
|
||||
try {
|
||||
var rootProp = protobuf.util.safeProp(options.root || "default");
|
||||
var output = [
|
||||
(options.es6 ? "const" : "var") + " $root = ($protobuf.roots" + rootProp + " || ($protobuf.roots" + rootProp + " = new $protobuf.Root()))\n"
|
||||
];
|
||||
if (root.options) {
|
||||
var optionsJson = jsonSafeProp(JSON.stringify(root.options, null, 2));
|
||||
output.push(".setOptions(" + optionsJson + ")\n");
|
||||
}
|
||||
var json = jsonSafeProp(JSON.stringify(root.nested, null, 2).trim());
|
||||
output.push(".addJSON(" + json + ");");
|
||||
output = util.wrap(output.join(""), protobuf.util.merge({ dependency: "protobufjs/light" }, options));
|
||||
process.nextTick(function() {
|
||||
callback(null, output);
|
||||
});
|
||||
} catch (e) {
|
||||
return callback(e);
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user