Firebase Update
This commit is contained in:
26
express-server/node_modules/@google-cloud/storage/CONTRIBUTORS
generated
vendored
Normal file
26
express-server/node_modules/@google-cloud/storage/CONTRIBUTORS
generated
vendored
Normal file
@ -0,0 +1,26 @@
|
||||
# The names of individuals who have contributed to this project.
|
||||
#
|
||||
# Names are formatted as:
|
||||
# name <email>
|
||||
#
|
||||
Ace Nassri <anassri@google.com>
|
||||
Alexander Borovykh <immaculate.pine@gmail.com>
|
||||
Alexander Fenster <github@fenster.name>
|
||||
Calvin Metcalf <calvin.metcalf@gmail.com>
|
||||
Colin Ihrig <cjihrig@gmail.com>
|
||||
Cristian Almstrand <almstrand@users.noreply.github.com>
|
||||
Dave Gramlich <callmehiphop@gmail.com>
|
||||
Dominic Valenciana <kiricon@live.com>
|
||||
Eric Uldall <ericuldall@gmail.com>
|
||||
Ernest Landrito <landrito@google.com>
|
||||
Frank Natividad <frankyn@users.noreply.github.com>
|
||||
Jason Dobry <jason.dobry@gmail.com>
|
||||
Jason Dobry <jmdobry@users.noreply.github.com>
|
||||
Justin Sprigg <justin.sprigg@gmail.com>
|
||||
Luke Sneeringer <luke@sneeringer.com>
|
||||
Stephen <stephenplusplus@users.noreply.github.com>
|
||||
Stephen Sawchuk <sawchuk@gmail.com>
|
||||
Stephen Sawchuk <stephenplusplus@users.noreply.github.com>
|
||||
Tyler Johnson <mail@tyler-johnson.ca>
|
||||
Zach Bjornson <bjornson@stanford.edu>
|
||||
greenkeeper[bot] <greenkeeper[bot]@users.noreply.github.com>
|
202
express-server/node_modules/@google-cloud/storage/LICENSE
generated
vendored
Normal file
202
express-server/node_modules/@google-cloud/storage/LICENSE
generated
vendored
Normal file
@ -0,0 +1,202 @@
|
||||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
134
express-server/node_modules/@google-cloud/storage/README.md
generated
vendored
Normal file
134
express-server/node_modules/@google-cloud/storage/README.md
generated
vendored
Normal file
@ -0,0 +1,134 @@
|
||||
<img src="https://avatars2.githubusercontent.com/u/2810941?v=3&s=96" alt="Google Cloud Platform logo" title="Google Cloud Platform" align="right" height="96" width="96"/>
|
||||
|
||||
# [Google Cloud Storage: Node.js Client](https://github.com/googleapis/nodejs-storage)
|
||||
|
||||
[](https://cloud.google.com/terms/launch-stages)
|
||||
[](https://circleci.com/gh/googleapis/nodejs-storage)
|
||||
[](https://ci.appveyor.com/project/googleapis/nodejs-storage)
|
||||
[](https://codecov.io/gh/googleapis/nodejs-storage)
|
||||
|
||||
> Node.js idiomatic client for [Cloud Storage][product-docs].
|
||||
|
||||
[Cloud Storage](https://cloud.google.com/storage/docs) allows world-wide storage and retrieval of any amount of data at any time. You can use Google Cloud Storage for a range of scenarios including serving website content, storing data for archival and disaster recovery, or distributing large data objects to users via direct download.
|
||||
|
||||
|
||||
* [Cloud Storage Node.js Client API Reference][client-docs]
|
||||
* [github.com/googleapis/nodejs-storage](https://github.com/googleapis/nodejs-storage)
|
||||
* [Cloud Storage Documentation][product-docs]
|
||||
|
||||
Read more about the client libraries for Cloud APIs, including the older
|
||||
Google APIs Client Libraries, in [Client Libraries Explained][explained].
|
||||
|
||||
[explained]: https://cloud.google.com/apis/docs/client-libraries-explained
|
||||
|
||||
**Table of contents:**
|
||||
|
||||
* [Quickstart](#quickstart)
|
||||
* [Before you begin](#before-you-begin)
|
||||
* [Installing the client library](#installing-the-client-library)
|
||||
* [Using the client library](#using-the-client-library)
|
||||
* [Samples](#samples)
|
||||
* [Versioning](#versioning)
|
||||
* [Contributing](#contributing)
|
||||
* [License](#license)
|
||||
|
||||
## Quickstart
|
||||
|
||||
### Before you begin
|
||||
|
||||
1. Select or create a Cloud Platform project.
|
||||
|
||||
[Go to the projects page][projects]
|
||||
|
||||
1. Enable billing for your project.
|
||||
|
||||
[Enable billing][billing]
|
||||
|
||||
1. Enable the Google Cloud Storage API.
|
||||
|
||||
[Enable the API][enable_api]
|
||||
|
||||
1. [Set up authentication with a service account][auth] so you can access the
|
||||
API from your local workstation.
|
||||
|
||||
[projects]: https://console.cloud.google.com/project
|
||||
[billing]: https://support.google.com/cloud/answer/6293499#enable-billing
|
||||
[enable_api]: https://console.cloud.google.com/flows/enableapi?apiid=storage-api.googleapis.com
|
||||
[auth]: https://cloud.google.com/docs/authentication/getting-started
|
||||
|
||||
### Installing the client library
|
||||
|
||||
npm install --save @google-cloud/storage
|
||||
|
||||
### Using the client library
|
||||
|
||||
```javascript
|
||||
// Imports the Google Cloud client library
|
||||
const Storage = require('@google-cloud/storage');
|
||||
|
||||
// Your Google Cloud Platform project ID
|
||||
const projectId = 'YOUR_PROJECT_ID';
|
||||
|
||||
// Creates a client
|
||||
const storage = new Storage({
|
||||
projectId: projectId,
|
||||
});
|
||||
|
||||
// The name for the new bucket
|
||||
const bucketName = 'my-new-bucket';
|
||||
|
||||
// Creates the new bucket
|
||||
storage
|
||||
.createBucket(bucketName)
|
||||
.then(() => {
|
||||
console.log(`Bucket ${bucketName} created.`);
|
||||
})
|
||||
.catch(err => {
|
||||
console.error('ERROR:', err);
|
||||
});
|
||||
```
|
||||
|
||||
## Samples
|
||||
|
||||
Samples are in the [`samples/`](https://github.com/googleapis/nodejs-storage/tree/master/samples) directory. The samples' `README.md`
|
||||
has instructions for running the samples.
|
||||
|
||||
| Sample | Source Code | Try it |
|
||||
| --------------------------- | --------------------------------- | ------ |
|
||||
| ACL (Access Control Lists) | [source code](https://github.com/googleapis/nodejs-storage/blob/master/samples/acl.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/acl.js,samples/README.md) |
|
||||
| Buckets | [source code](https://github.com/googleapis/nodejs-storage/blob/master/samples/buckets.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/buckets.js,samples/README.md) |
|
||||
| Encryption | [source code](https://github.com/googleapis/nodejs-storage/blob/master/samples/encryption.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/encryption.js,samples/README.md) |
|
||||
| Files | [source code](https://github.com/googleapis/nodejs-storage/blob/master/samples/files.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/files.js,samples/README.md) |
|
||||
| Notifications | [source code](https://github.com/googleapis/nodejs-storage/blob/master/samples/notifications.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/notifications.js,samples/README.md) |
|
||||
| Requester Pays | [source code](https://github.com/googleapis/nodejs-storage/blob/master/samples/requesterPays.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-storage&page=editor&open_in_editor=samples/requesterPays.js,samples/README.md) |
|
||||
|
||||
The [Cloud Storage Node.js Client API Reference][client-docs] documentation
|
||||
also contains samples.
|
||||
|
||||
## Versioning
|
||||
|
||||
This library follows [Semantic Versioning](http://semver.org/).
|
||||
|
||||
This library is considered to be **General Availability (GA)**. This means it
|
||||
is stable; the code surface will not change in backwards-incompatible ways
|
||||
unless absolutely necessary (e.g. because of critical security issues) or with
|
||||
an extensive deprecation period. Issues and requests against **GA** libraries
|
||||
are addressed with the highest priority.
|
||||
|
||||
More Information: [Google Cloud Platform Launch Stages][launch_stages]
|
||||
|
||||
[launch_stages]: https://cloud.google.com/terms/launch-stages
|
||||
|
||||
## Contributing
|
||||
|
||||
Contributions welcome! See the [Contributing Guide](https://github.com/googleapis/nodejs-storage/blob/master/.github/CONTRIBUTING.md).
|
||||
|
||||
## License
|
||||
|
||||
Apache Version 2.0
|
||||
|
||||
See [LICENSE](https://github.com/googleapis/nodejs-storage/blob/master/LICENSE)
|
||||
|
||||
[client-docs]: https://cloud.google.com/nodejs/docs/reference/storage/latest/
|
||||
[product-docs]: https://cloud.google.com/storage/docs
|
||||
[shell_img]: //gstatic.com/cloudssh/images/open-btn.png
|
15
express-server/node_modules/@google-cloud/storage/node_modules/.bin/mime
generated
vendored
Normal file
15
express-server/node_modules/@google-cloud/storage/node_modules/.bin/mime
generated
vendored
Normal file
@ -0,0 +1,15 @@
|
||||
#!/bin/sh
|
||||
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
|
||||
|
||||
case `uname` in
|
||||
*CYGWIN*) basedir=`cygpath -w "$basedir"`;;
|
||||
esac
|
||||
|
||||
if [ -x "$basedir/node" ]; then
|
||||
"$basedir/node" "$basedir/../mime/cli.js" "$@"
|
||||
ret=$?
|
||||
else
|
||||
node "$basedir/../mime/cli.js" "$@"
|
||||
ret=$?
|
||||
fi
|
||||
exit $ret
|
7
express-server/node_modules/@google-cloud/storage/node_modules/.bin/mime.cmd
generated
vendored
Normal file
7
express-server/node_modules/@google-cloud/storage/node_modules/.bin/mime.cmd
generated
vendored
Normal file
@ -0,0 +1,7 @@
|
||||
@IF EXIST "%~dp0\node.exe" (
|
||||
"%~dp0\node.exe" "%~dp0\..\mime\cli.js" %*
|
||||
) ELSE (
|
||||
@SETLOCAL
|
||||
@SET PATHEXT=%PATHEXT:;.JS;=;%
|
||||
node "%~dp0\..\mime\cli.js" %*
|
||||
)
|
1
express-server/node_modules/@google-cloud/storage/node_modules/isarray/.npmignore
generated
vendored
Normal file
1
express-server/node_modules/@google-cloud/storage/node_modules/isarray/.npmignore
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
node_modules
|
4
express-server/node_modules/@google-cloud/storage/node_modules/isarray/.travis.yml
generated
vendored
Normal file
4
express-server/node_modules/@google-cloud/storage/node_modules/isarray/.travis.yml
generated
vendored
Normal file
@ -0,0 +1,4 @@
|
||||
language: node_js
|
||||
node_js:
|
||||
- "0.8"
|
||||
- "0.10"
|
6
express-server/node_modules/@google-cloud/storage/node_modules/isarray/Makefile
generated
vendored
Normal file
6
express-server/node_modules/@google-cloud/storage/node_modules/isarray/Makefile
generated
vendored
Normal file
@ -0,0 +1,6 @@
|
||||
|
||||
test:
|
||||
@node_modules/.bin/tape test.js
|
||||
|
||||
.PHONY: test
|
||||
|
60
express-server/node_modules/@google-cloud/storage/node_modules/isarray/README.md
generated
vendored
Normal file
60
express-server/node_modules/@google-cloud/storage/node_modules/isarray/README.md
generated
vendored
Normal file
@ -0,0 +1,60 @@
|
||||
|
||||
# isarray
|
||||
|
||||
`Array#isArray` for older browsers.
|
||||
|
||||
[](http://travis-ci.org/juliangruber/isarray)
|
||||
[](https://www.npmjs.org/package/isarray)
|
||||
|
||||
[
|
||||
](https://ci.testling.com/juliangruber/isarray)
|
||||
|
||||
## Usage
|
||||
|
||||
```js
|
||||
var isArray = require('isarray');
|
||||
|
||||
console.log(isArray([])); // => true
|
||||
console.log(isArray({})); // => false
|
||||
```
|
||||
|
||||
## Installation
|
||||
|
||||
With [npm](http://npmjs.org) do
|
||||
|
||||
```bash
|
||||
$ npm install isarray
|
||||
```
|
||||
|
||||
Then bundle for the browser with
|
||||
[browserify](https://github.com/substack/browserify).
|
||||
|
||||
With [component](http://component.io) do
|
||||
|
||||
```bash
|
||||
$ component install juliangruber/isarray
|
||||
```
|
||||
|
||||
## License
|
||||
|
||||
(MIT)
|
||||
|
||||
Copyright (c) 2013 Julian Gruber <julian@juliangruber.com>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||
this software and associated documentation files (the "Software"), to deal in
|
||||
the Software without restriction, including without limitation the rights to
|
||||
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
|
||||
of the Software, and to permit persons to whom the Software is furnished to do
|
||||
so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
19
express-server/node_modules/@google-cloud/storage/node_modules/isarray/component.json
generated
vendored
Normal file
19
express-server/node_modules/@google-cloud/storage/node_modules/isarray/component.json
generated
vendored
Normal file
@ -0,0 +1,19 @@
|
||||
{
|
||||
"name" : "isarray",
|
||||
"description" : "Array#isArray for older browsers",
|
||||
"version" : "0.0.1",
|
||||
"repository" : "juliangruber/isarray",
|
||||
"homepage": "https://github.com/juliangruber/isarray",
|
||||
"main" : "index.js",
|
||||
"scripts" : [
|
||||
"index.js"
|
||||
],
|
||||
"dependencies" : {},
|
||||
"keywords": ["browser","isarray","array"],
|
||||
"author": {
|
||||
"name": "Julian Gruber",
|
||||
"email": "mail@juliangruber.com",
|
||||
"url": "http://juliangruber.com"
|
||||
},
|
||||
"license": "MIT"
|
||||
}
|
5
express-server/node_modules/@google-cloud/storage/node_modules/isarray/index.js
generated
vendored
Normal file
5
express-server/node_modules/@google-cloud/storage/node_modules/isarray/index.js
generated
vendored
Normal file
@ -0,0 +1,5 @@
|
||||
var toString = {}.toString;
|
||||
|
||||
module.exports = Array.isArray || function (arr) {
|
||||
return toString.call(arr) == '[object Array]';
|
||||
};
|
73
express-server/node_modules/@google-cloud/storage/node_modules/isarray/package.json
generated
vendored
Normal file
73
express-server/node_modules/@google-cloud/storage/node_modules/isarray/package.json
generated
vendored
Normal file
@ -0,0 +1,73 @@
|
||||
{
|
||||
"_from": "isarray@~1.0.0",
|
||||
"_id": "isarray@1.0.0",
|
||||
"_inBundle": false,
|
||||
"_integrity": "sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE=",
|
||||
"_location": "/@google-cloud/storage/isarray",
|
||||
"_phantomChildren": {},
|
||||
"_requested": {
|
||||
"type": "range",
|
||||
"registry": true,
|
||||
"raw": "isarray@~1.0.0",
|
||||
"name": "isarray",
|
||||
"escapedName": "isarray",
|
||||
"rawSpec": "~1.0.0",
|
||||
"saveSpec": null,
|
||||
"fetchSpec": "~1.0.0"
|
||||
},
|
||||
"_requiredBy": [
|
||||
"/@google-cloud/storage/readable-stream"
|
||||
],
|
||||
"_resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz",
|
||||
"_shasum": "bb935d48582cba168c06834957a54a3e07124f11",
|
||||
"_spec": "isarray@~1.0.0",
|
||||
"_where": "D:\\Desktop\\Git\\Firebase\\SmartShopperFirebase\\node_modules\\@google-cloud\\storage\\node_modules\\readable-stream",
|
||||
"author": {
|
||||
"name": "Julian Gruber",
|
||||
"email": "mail@juliangruber.com",
|
||||
"url": "http://juliangruber.com"
|
||||
},
|
||||
"bugs": {
|
||||
"url": "https://github.com/juliangruber/isarray/issues"
|
||||
},
|
||||
"bundleDependencies": false,
|
||||
"dependencies": {},
|
||||
"deprecated": false,
|
||||
"description": "Array#isArray for older browsers",
|
||||
"devDependencies": {
|
||||
"tape": "~2.13.4"
|
||||
},
|
||||
"homepage": "https://github.com/juliangruber/isarray",
|
||||
"keywords": [
|
||||
"browser",
|
||||
"isarray",
|
||||
"array"
|
||||
],
|
||||
"license": "MIT",
|
||||
"main": "index.js",
|
||||
"name": "isarray",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git://github.com/juliangruber/isarray.git"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "tape test.js"
|
||||
},
|
||||
"testling": {
|
||||
"files": "test.js",
|
||||
"browsers": [
|
||||
"ie/8..latest",
|
||||
"firefox/17..latest",
|
||||
"firefox/nightly",
|
||||
"chrome/22..latest",
|
||||
"chrome/canary",
|
||||
"opera/12..latest",
|
||||
"opera/next",
|
||||
"safari/5.1..latest",
|
||||
"ipad/6.0..latest",
|
||||
"iphone/6.0..latest",
|
||||
"android-browser/4.2..latest"
|
||||
]
|
||||
},
|
||||
"version": "1.0.0"
|
||||
}
|
20
express-server/node_modules/@google-cloud/storage/node_modules/isarray/test.js
generated
vendored
Normal file
20
express-server/node_modules/@google-cloud/storage/node_modules/isarray/test.js
generated
vendored
Normal file
@ -0,0 +1,20 @@
|
||||
var isArray = require('./');
|
||||
var test = require('tape');
|
||||
|
||||
test('is array', function(t){
|
||||
t.ok(isArray([]));
|
||||
t.notOk(isArray({}));
|
||||
t.notOk(isArray(null));
|
||||
t.notOk(isArray(false));
|
||||
|
||||
var obj = {};
|
||||
obj[0] = true;
|
||||
t.notOk(isArray(obj));
|
||||
|
||||
var arr = [];
|
||||
arr.foo = 'bar';
|
||||
t.ok(isArray(arr));
|
||||
|
||||
t.end();
|
||||
});
|
||||
|
51
express-server/node_modules/@google-cloud/storage/node_modules/mime/.eslintrc.json
generated
vendored
Normal file
51
express-server/node_modules/@google-cloud/storage/node_modules/mime/.eslintrc.json
generated
vendored
Normal file
@ -0,0 +1,51 @@
|
||||
{
|
||||
"root": true,
|
||||
"parserOptions": {
|
||||
"ecmaVersion": 6
|
||||
},
|
||||
"env": {
|
||||
"browser": true,
|
||||
"commonjs": true,
|
||||
"node": true,
|
||||
"mocha": true
|
||||
},
|
||||
"extends": ["eslint:recommended"],
|
||||
"rules": {
|
||||
"array-bracket-spacing": ["warn", "never"],
|
||||
"arrow-body-style": ["warn", "as-needed"],
|
||||
"arrow-parens": ["warn", "as-needed"],
|
||||
"arrow-spacing": "warn",
|
||||
"brace-style": ["warn", "1tbs"],
|
||||
"camelcase": "warn",
|
||||
"comma-spacing": ["warn", {"after": true}],
|
||||
"dot-notation": "warn",
|
||||
"eqeqeq": ["warn", "smart"],
|
||||
"indent": ["warn", 2, {
|
||||
"SwitchCase": 1,
|
||||
"FunctionDeclaration": {"parameters": 1},
|
||||
"MemberExpression": 1,
|
||||
"CallExpression": {"arguments": 1}
|
||||
}],
|
||||
"key-spacing": ["warn", {"beforeColon": false, "afterColon": true, "mode": "minimum"}],
|
||||
"keyword-spacing": "warn",
|
||||
"no-console": "off",
|
||||
"no-empty": "off",
|
||||
"no-multi-spaces": "warn",
|
||||
"no-redeclare": "off",
|
||||
"no-restricted-globals": ["warn", "Promise"],
|
||||
"no-trailing-spaces": "warn",
|
||||
"no-undef": "error",
|
||||
"no-unused-vars": ["warn", {"args": "none"}],
|
||||
"one-var": ["warn", "never"],
|
||||
"padded-blocks": ["warn", "never"],
|
||||
"object-curly-spacing": ["warn", "never"],
|
||||
"quotes": ["warn", "single"],
|
||||
"react/prop-types": "off",
|
||||
"react/jsx-no-bind": "off",
|
||||
"semi": ["warn", "always"],
|
||||
"space-before-blocks": ["warn", "always"],
|
||||
"space-before-function-paren": ["warn", "never"],
|
||||
"space-in-parens": ["warn", "never"],
|
||||
"strict": ["warn", "global"]
|
||||
}
|
||||
}
|
6
express-server/node_modules/@google-cloud/storage/node_modules/mime/.github/ISSUE_TEMPLATE.md
generated
vendored
Normal file
6
express-server/node_modules/@google-cloud/storage/node_modules/mime/.github/ISSUE_TEMPLATE.md
generated
vendored
Normal file
@ -0,0 +1,6 @@
|
||||
### If you have an issue with a specific extension or type
|
||||
|
||||
Locate the definition for your extension/type in the [db.json file](https://github.com/jshttp/mime-db/blob/master/db.json) in the `mime-db` project. Does it look right?
|
||||
|
||||
[ ] No. [File a `mime-db` issue](https://github.com/jshttp/mime-db/issues/new).
|
||||
[ ] Yes: Go ahead and submit your issue/PR here and I'll look into it.
|
6
express-server/node_modules/@google-cloud/storage/node_modules/mime/.github/PULL_REQUEST_TEMPLATE.md
generated
vendored
Normal file
6
express-server/node_modules/@google-cloud/storage/node_modules/mime/.github/PULL_REQUEST_TEMPLATE.md
generated
vendored
Normal file
@ -0,0 +1,6 @@
|
||||
### If you have an issue with a specific extension or type
|
||||
|
||||
Locate the definition for your extension/type in the [db.json file](https://github.com/jshttp/mime-db/blob/master/db.json) in the `mime-db` project. Does it look right?
|
||||
|
||||
[ ] No. [File a `mime-db` issue](https://github.com/jshttp/mime-db/issues/new).
|
||||
[ ] Yes: Go ahead and submit your issue/PR here and I'll look into it.
|
5
express-server/node_modules/@google-cloud/storage/node_modules/mime/.travis.yml
generated
vendored
Normal file
5
express-server/node_modules/@google-cloud/storage/node_modules/mime/.travis.yml
generated
vendored
Normal file
@ -0,0 +1,5 @@
|
||||
language: node_js
|
||||
node_js:
|
||||
- "4"
|
||||
- "6"
|
||||
- "8"
|
236
express-server/node_modules/@google-cloud/storage/node_modules/mime/CHANGELOG.md
generated
vendored
Normal file
236
express-server/node_modules/@google-cloud/storage/node_modules/mime/CHANGELOG.md
generated
vendored
Normal file
@ -0,0 +1,236 @@
|
||||
# Change Log
|
||||
|
||||
All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
|
||||
|
||||
<a name="2.4.0"></a>
|
||||
# [2.4.0](https://github.com/broofa/node-mime/compare/v2.3.1...v2.4.0) (2018-11-26)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* Bind exported methods ([9d2a7b8](https://github.com/broofa/node-mime/commit/9d2a7b8))
|
||||
* update to mime-db@1.37.0 ([49e6e41](https://github.com/broofa/node-mime/commit/49e6e41))
|
||||
|
||||
|
||||
|
||||
<a name="2.3.1"></a>
|
||||
## [2.3.1](https://github.com/broofa/node-mime/compare/v2.3.0...v2.3.1) (2018-04-11)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* fix [#198](https://github.com/broofa/node-mime/issues/198) ([25ca180](https://github.com/broofa/node-mime/commit/25ca180))
|
||||
|
||||
|
||||
|
||||
<a name="2.3.0"></a>
|
||||
# [2.3.0](https://github.com/broofa/node-mime/compare/v2.2.2...v2.3.0) (2018-04-11)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* fix [#192](https://github.com/broofa/node-mime/issues/192) ([5c35df6](https://github.com/broofa/node-mime/commit/5c35df6))
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* add travis-ci testing ([d64160f](https://github.com/broofa/node-mime/commit/d64160f))
|
||||
|
||||
|
||||
|
||||
<a name="2.2.2"></a>
|
||||
## [2.2.2](https://github.com/broofa/node-mime/compare/v2.2.1...v2.2.2) (2018-03-30)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* update types files to mime-db@1.32.0 ([85aac16](https://github.com/broofa/node-mime/commit/85aac16))
|
||||
|
||||
|
||||
|
||||
<a name="2.2.1"></a>
|
||||
## [2.2.1](https://github.com/broofa/node-mime/compare/v2.2.0...v2.2.1) (2018-03-30)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* Retain type->extension mappings for non-default types. Fixes [#180](https://github.com/broofa/node-mime/issues/180) ([b5c83fb](https://github.com/broofa/node-mime/commit/b5c83fb))
|
||||
|
||||
|
||||
|
||||
<a name="2.2.0"></a>
|
||||
# [2.2.0](https://github.com/broofa/node-mime/compare/v2.1.0...v2.2.0) (2018-01-04)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* Retain type->extension mappings for non-default types. Fixes [#180](https://github.com/broofa/node-mime/issues/180) ([10f82ac](https://github.com/broofa/node-mime/commit/10f82ac))
|
||||
|
||||
|
||||
|
||||
<a name="2.1.0"></a>
|
||||
# [2.1.0](https://github.com/broofa/node-mime/compare/v2.0.5...v2.1.0) (2017-12-22)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* Upgrade to mime-db@1.32.0. Fixes [#185](https://github.com/broofa/node-mime/issues/185) ([3f775ba](https://github.com/broofa/node-mime/commit/3f775ba))
|
||||
|
||||
|
||||
|
||||
<a name="2.0.5"></a>
|
||||
## [2.0.5](https://github.com/broofa/node-mime/compare/v2.0.1...v2.0.5) (2017-12-22)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* ES5 support (back to node v0.4) ([f14ccb6](https://github.com/broofa/node-mime/commit/f14ccb6))
|
||||
|
||||
|
||||
|
||||
# Changelog
|
||||
|
||||
## v2.0.4 (24/11/2017)
|
||||
- [**closed**] Switch to mime-score module for resolving extension contention issues. [#182](https://github.com/broofa/node-mime/issues/182)
|
||||
- [**closed**] Update mime-db to 1.31.0 in v1.x branch [#181](https://github.com/broofa/node-mime/issues/181)
|
||||
|
||||
---
|
||||
|
||||
## v1.5.0 (22/11/2017)
|
||||
- [**closed**] need ES5 version ready in npm package [#179](https://github.com/broofa/node-mime/issues/179)
|
||||
- [**closed**] mime-db no trace of iWork - pages / numbers / etc. [#178](https://github.com/broofa/node-mime/issues/178)
|
||||
- [**closed**] How it works in brownser ? [#176](https://github.com/broofa/node-mime/issues/176)
|
||||
- [**closed**] Missing `./Mime` [#175](https://github.com/broofa/node-mime/issues/175)
|
||||
- [**closed**] Vulnerable Regular Expression [#167](https://github.com/broofa/node-mime/issues/167)
|
||||
|
||||
---
|
||||
|
||||
## v2.0.3 (25/09/2017)
|
||||
*No changelog for this release.*
|
||||
|
||||
---
|
||||
|
||||
## v1.4.1 (25/09/2017)
|
||||
- [**closed**] Issue when bundling with webpack [#172](https://github.com/broofa/node-mime/issues/172)
|
||||
|
||||
---
|
||||
|
||||
## v2.0.2 (15/09/2017)
|
||||
- [**V2**] fs.readFileSync is not a function [#165](https://github.com/broofa/node-mime/issues/165)
|
||||
- [**closed**] The extension for video/quicktime should map to .mov, not .qt [#164](https://github.com/broofa/node-mime/issues/164)
|
||||
- [**V2**] [v2 Feedback request] Mime class API [#163](https://github.com/broofa/node-mime/issues/163)
|
||||
- [**V2**] [v2 Feedback request] Resolving conflicts over extensions [#162](https://github.com/broofa/node-mime/issues/162)
|
||||
- [**V2**] Allow callers to load module with official, full, or no defined types. [#161](https://github.com/broofa/node-mime/issues/161)
|
||||
- [**V2**] Use "facets" to resolve extension conflicts [#160](https://github.com/broofa/node-mime/issues/160)
|
||||
- [**V2**] Remove fs and path dependencies [#152](https://github.com/broofa/node-mime/issues/152)
|
||||
- [**V2**] Default content-type should not be application/octet-stream [#139](https://github.com/broofa/node-mime/issues/139)
|
||||
- [**V2**] reset mime-types [#124](https://github.com/broofa/node-mime/issues/124)
|
||||
- [**V2**] Extensionless paths should return null or false [#113](https://github.com/broofa/node-mime/issues/113)
|
||||
|
||||
---
|
||||
|
||||
## v2.0.1 (14/09/2017)
|
||||
- [**closed**] Changelog for v2.0 does not mention breaking changes [#171](https://github.com/broofa/node-mime/issues/171)
|
||||
- [**closed**] MIME breaking with 'class' declaration as it is without 'use strict mode' [#170](https://github.com/broofa/node-mime/issues/170)
|
||||
|
||||
---
|
||||
|
||||
## v2.0.0 (12/09/2017)
|
||||
- [**closed**] woff and woff2 [#168](https://github.com/broofa/node-mime/issues/168)
|
||||
|
||||
---
|
||||
|
||||
## v1.4.0 (28/08/2017)
|
||||
- [**closed**] support for ac3 voc files [#159](https://github.com/broofa/node-mime/issues/159)
|
||||
- [**closed**] Help understanding change from application/xml to text/xml [#158](https://github.com/broofa/node-mime/issues/158)
|
||||
- [**closed**] no longer able to override mimetype [#157](https://github.com/broofa/node-mime/issues/157)
|
||||
- [**closed**] application/vnd.adobe.photoshop [#147](https://github.com/broofa/node-mime/issues/147)
|
||||
- [**closed**] Directories should appear as something other than application/octet-stream [#135](https://github.com/broofa/node-mime/issues/135)
|
||||
- [**closed**] requested features [#131](https://github.com/broofa/node-mime/issues/131)
|
||||
- [**closed**] Make types.json loading optional? [#129](https://github.com/broofa/node-mime/issues/129)
|
||||
- [**closed**] Cannot find module './types.json' [#120](https://github.com/broofa/node-mime/issues/120)
|
||||
- [**V2**] .wav files show up as "audio/x-wav" instead of "audio/x-wave" [#118](https://github.com/broofa/node-mime/issues/118)
|
||||
- [**closed**] Don't be a pain in the ass for node community [#108](https://github.com/broofa/node-mime/issues/108)
|
||||
- [**closed**] don't make default_type global [#78](https://github.com/broofa/node-mime/issues/78)
|
||||
- [**closed**] mime.extension() fails if the content-type is parameterized [#74](https://github.com/broofa/node-mime/issues/74)
|
||||
|
||||
---
|
||||
|
||||
## v1.3.6 (11/05/2017)
|
||||
- [**closed**] .md should be text/markdown as of March 2016 [#154](https://github.com/broofa/node-mime/issues/154)
|
||||
- [**closed**] Error while installing mime [#153](https://github.com/broofa/node-mime/issues/153)
|
||||
- [**closed**] application/manifest+json [#149](https://github.com/broofa/node-mime/issues/149)
|
||||
- [**closed**] Dynamic adaptive streaming over HTTP (DASH) file extension typo [#141](https://github.com/broofa/node-mime/issues/141)
|
||||
- [**closed**] charsets image/png undefined [#140](https://github.com/broofa/node-mime/issues/140)
|
||||
- [**closed**] Mime-db dependency out of date [#130](https://github.com/broofa/node-mime/issues/130)
|
||||
- [**closed**] how to support plist? [#126](https://github.com/broofa/node-mime/issues/126)
|
||||
- [**closed**] how does .types file format look like? [#123](https://github.com/broofa/node-mime/issues/123)
|
||||
- [**closed**] Feature: support for expanding MIME patterns [#121](https://github.com/broofa/node-mime/issues/121)
|
||||
- [**closed**] DEBUG_MIME doesn't work [#117](https://github.com/broofa/node-mime/issues/117)
|
||||
|
||||
---
|
||||
|
||||
## v1.3.4 (06/02/2015)
|
||||
*No changelog for this release.*
|
||||
|
||||
---
|
||||
|
||||
## v1.3.3 (06/02/2015)
|
||||
*No changelog for this release.*
|
||||
|
||||
---
|
||||
|
||||
## v1.3.1 (05/02/2015)
|
||||
- [**closed**] Consider adding support for Handlebars .hbs file ending [#111](https://github.com/broofa/node-mime/issues/111)
|
||||
- [**closed**] Consider adding support for hjson. [#110](https://github.com/broofa/node-mime/issues/110)
|
||||
- [**closed**] Add mime type for Opus audio files [#94](https://github.com/broofa/node-mime/issues/94)
|
||||
- [**closed**] Consider making the `Requesting New Types` information more visible [#77](https://github.com/broofa/node-mime/issues/77)
|
||||
|
||||
---
|
||||
|
||||
## v1.3.0 (05/02/2015)
|
||||
- [**closed**] Add common name? [#114](https://github.com/broofa/node-mime/issues/114)
|
||||
- [**closed**] application/x-yaml [#104](https://github.com/broofa/node-mime/issues/104)
|
||||
- [**closed**] Add mime type for WOFF file format 2.0 [#102](https://github.com/broofa/node-mime/issues/102)
|
||||
- [**closed**] application/x-msi for .msi [#99](https://github.com/broofa/node-mime/issues/99)
|
||||
- [**closed**] Add mimetype for gettext translation files [#98](https://github.com/broofa/node-mime/issues/98)
|
||||
- [**closed**] collaborators [#88](https://github.com/broofa/node-mime/issues/88)
|
||||
- [**closed**] getting errot in installation of mime module...any1 can help? [#87](https://github.com/broofa/node-mime/issues/87)
|
||||
- [**closed**] should application/json's charset be utf8? [#86](https://github.com/broofa/node-mime/issues/86)
|
||||
- [**closed**] Add "license" and "licenses" to package.json [#81](https://github.com/broofa/node-mime/issues/81)
|
||||
- [**closed**] lookup with extension-less file on Windows returns wrong type [#68](https://github.com/broofa/node-mime/issues/68)
|
||||
|
||||
---
|
||||
|
||||
## v1.2.11 (15/08/2013)
|
||||
- [**closed**] Update mime.types [#65](https://github.com/broofa/node-mime/issues/65)
|
||||
- [**closed**] Publish a new version [#63](https://github.com/broofa/node-mime/issues/63)
|
||||
- [**closed**] README should state upfront that "application/octet-stream" is default for unknown extension [#55](https://github.com/broofa/node-mime/issues/55)
|
||||
- [**closed**] Suggested improvement to the charset API [#52](https://github.com/broofa/node-mime/issues/52)
|
||||
|
||||
---
|
||||
|
||||
## v1.2.10 (25/07/2013)
|
||||
- [**closed**] Mime type for woff files should be application/font-woff and not application/x-font-woff [#62](https://github.com/broofa/node-mime/issues/62)
|
||||
- [**closed**] node.types in conflict with mime.types [#51](https://github.com/broofa/node-mime/issues/51)
|
||||
|
||||
---
|
||||
|
||||
## v1.2.9 (17/01/2013)
|
||||
- [**closed**] Please update "mime" NPM [#49](https://github.com/broofa/node-mime/issues/49)
|
||||
- [**closed**] Please add semicolon [#46](https://github.com/broofa/node-mime/issues/46)
|
||||
- [**closed**] parse full mime types [#43](https://github.com/broofa/node-mime/issues/43)
|
||||
|
||||
---
|
||||
|
||||
## v1.2.8 (10/01/2013)
|
||||
- [**closed**] /js directory mime is application/javascript. Is it correct? [#47](https://github.com/broofa/node-mime/issues/47)
|
||||
- [**closed**] Add mime types for lua code. [#45](https://github.com/broofa/node-mime/issues/45)
|
||||
|
||||
---
|
||||
|
||||
## v1.2.7 (19/10/2012)
|
||||
- [**closed**] cannot install 1.2.7 via npm [#41](https://github.com/broofa/node-mime/issues/41)
|
||||
- [**closed**] Transfer ownership to @broofa [#36](https://github.com/broofa/node-mime/issues/36)
|
||||
- [**closed**] it's wrong to set charset to UTF-8 for text [#30](https://github.com/broofa/node-mime/issues/30)
|
||||
- [**closed**] Allow multiple instances of MIME types container [#27](https://github.com/broofa/node-mime/issues/27)
|
5
express-server/node_modules/@google-cloud/storage/node_modules/mime/CONTRIBUTING.md
generated
vendored
Normal file
5
express-server/node_modules/@google-cloud/storage/node_modules/mime/CONTRIBUTING.md
generated
vendored
Normal file
@ -0,0 +1,5 @@
|
||||
1. Commit messages should have a [Conventional Commit](https://conventionalcommits.org/) prefix.
|
||||
2. If you're editing the `types/*` files, just stop. These are auto-generated from [mime-db](https://github.com/jshttp/mime-db). Go talk to those folks.
|
||||
3. README edits should be made to [src/README_md.js](src/README_md.js).
|
||||
|
||||
Thanks for helping out with this project. You rock!
|
21
express-server/node_modules/@google-cloud/storage/node_modules/mime/LICENSE
generated
vendored
Normal file
21
express-server/node_modules/@google-cloud/storage/node_modules/mime/LICENSE
generated
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2010 Benjamin Thomas, Robert Kieffer
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
93
express-server/node_modules/@google-cloud/storage/node_modules/mime/Mime.js
generated
vendored
Normal file
93
express-server/node_modules/@google-cloud/storage/node_modules/mime/Mime.js
generated
vendored
Normal file
@ -0,0 +1,93 @@
|
||||
'use strict';
|
||||
|
||||
/**
|
||||
* @param typeMap [Object] Map of MIME type -> Array[extensions]
|
||||
* @param ...
|
||||
*/
|
||||
function Mime() {
|
||||
this._types = Object.create(null);
|
||||
this._extensions = Object.create(null);
|
||||
|
||||
for (var i = 0; i < arguments.length; i++) {
|
||||
this.define(arguments[i]);
|
||||
}
|
||||
|
||||
this.define = this.define.bind(this);
|
||||
this.getType = this.getType.bind(this);
|
||||
this.getExtension = this.getExtension.bind(this);
|
||||
}
|
||||
|
||||
/**
|
||||
* Define mimetype -> xtension mappings. Each key is a mime-type that maps
|
||||
* to an array of extensions associated with the type. The first extension is
|
||||
* used as the default extension for the type.
|
||||
*
|
||||
* e.g. mime.define({'audio/ogg', ['oga', 'ogg', 'spx']});
|
||||
*
|
||||
* If a type declares an extension that has already been defined, an error will
|
||||
* be thrown. To suppress this error and force the extension to be associated
|
||||
* with the new type, pass `force`=true. Alternatively, you may prefix the
|
||||
* extension with "*" to map the type to extension, without mapping the
|
||||
* extension to the type.
|
||||
*
|
||||
* e.g. mime.define({'audio/wav', ['wav']}, {'audio/x-wav', ['*wav']});
|
||||
*
|
||||
*
|
||||
* @param map (Object) type definitions
|
||||
* @param force (Boolean) if true, force overriding of existing definitions
|
||||
*/
|
||||
Mime.prototype.define = function(typeMap, force) {
|
||||
for (var type in typeMap) {
|
||||
var extensions = typeMap[type];
|
||||
for (var i = 0; i < extensions.length; i++) {
|
||||
var ext = extensions[i];
|
||||
|
||||
// '*' prefix = not the preferred type for this extension. So fixup the
|
||||
// extension, and skip it.
|
||||
if (ext[0] == '*') {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!force && (ext in this._types)) {
|
||||
throw new Error(
|
||||
'Attempt to change mapping for "' + ext +
|
||||
'" extension from "' + this._types[ext] + '" to "' + type +
|
||||
'". Pass `force=true` to allow this, otherwise remove "' + ext +
|
||||
'" from the list of extensions for "' + type + '".'
|
||||
);
|
||||
}
|
||||
|
||||
this._types[ext] = type;
|
||||
}
|
||||
|
||||
// Use first extension as default
|
||||
if (force || !this._extensions[type]) {
|
||||
var ext = extensions[0];
|
||||
this._extensions[type] = (ext[0] != '*') ? ext : ext.substr(1)
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Lookup a mime type based on extension
|
||||
*/
|
||||
Mime.prototype.getType = function(path) {
|
||||
path = String(path);
|
||||
var last = path.replace(/^.*[/\\]/, '').toLowerCase();
|
||||
var ext = last.replace(/^.*\./, '').toLowerCase();
|
||||
|
||||
var hasPath = last.length < path.length;
|
||||
var hasDot = ext.length < last.length - 1;
|
||||
|
||||
return (hasDot || !hasPath) && this._types[ext] || null;
|
||||
};
|
||||
|
||||
/**
|
||||
* Return file extension associated with a mime type
|
||||
*/
|
||||
Mime.prototype.getExtension = function(type) {
|
||||
type = /^\s*([^;\s]*)/.test(type) && RegExp.$1;
|
||||
return type && this._extensions[type.toLowerCase()] || null;
|
||||
};
|
||||
|
||||
module.exports = Mime;
|
190
express-server/node_modules/@google-cloud/storage/node_modules/mime/README.md
generated
vendored
Normal file
190
express-server/node_modules/@google-cloud/storage/node_modules/mime/README.md
generated
vendored
Normal file
@ -0,0 +1,190 @@
|
||||
<!--
|
||||
-- This file is auto-generated from src/README_js.md. Changes should be made there.
|
||||
-->
|
||||
# Mime
|
||||
|
||||
A comprehensive, compact MIME type module.
|
||||
|
||||
[](https://travis-ci.org/broofa/node-mime)
|
||||
|
||||
## Version 2 Notes
|
||||
|
||||
Version 2 is a breaking change from 1.x as the semver implies. Specifically:
|
||||
|
||||
* `lookup()` renamed to `getType()`
|
||||
* `extension()` renamed to `getExtension()`
|
||||
* `charset()` and `load()` methods have been removed
|
||||
|
||||
If you prefer the legacy version of this module please `npm install mime@^1`. Version 1 docs may be found [here](https://github.com/broofa/node-mime/tree/v1.4.0).
|
||||
|
||||
## Install
|
||||
|
||||
### NPM
|
||||
```
|
||||
npm install mime
|
||||
```
|
||||
|
||||
### Browser
|
||||
|
||||
It is recommended that you use a bundler such as
|
||||
[webpack](https://webpack.github.io/) or [browserify](http://browserify.org/) to
|
||||
package your code. However, browser-ready versions are available via wzrd.in.
|
||||
E.g. For the full version:
|
||||
|
||||
<script src="https://wzrd.in/standalone/mime@latest"></script>
|
||||
<script>
|
||||
mime.getType(...); // etc.
|
||||
<script>
|
||||
|
||||
Or, for the `mime/lite` version:
|
||||
|
||||
<script src="https://wzrd.in/standalone/mime%2flite@latest"></script>
|
||||
<script>
|
||||
mimelite.getType(...); // (Note `mimelite` here)
|
||||
<script>
|
||||
|
||||
## Quick Start
|
||||
|
||||
For the full version (800+ MIME types, 1,000+ extensions):
|
||||
|
||||
```javascript
|
||||
const mime = require('mime');
|
||||
|
||||
mime.getType('txt'); // ⇨ 'text/plain'
|
||||
mime.getExtension('text/plain'); // ⇨ 'txt'
|
||||
|
||||
```
|
||||
|
||||
See [Mime API](#mime-api) below for API details.
|
||||
|
||||
## Lite Version
|
||||
|
||||
There is also a "lite" version of this module that omits vendor-specific
|
||||
(`*/vnd.*`) and experimental (`*/x-*`) types. It weighs in at ~2.5KB, compared
|
||||
to 8KB for the full version. To load the lite version:
|
||||
|
||||
```javascript
|
||||
const mime = require('mime/lite');
|
||||
```
|
||||
|
||||
## Mime .vs. mime-types .vs. mime-db modules
|
||||
|
||||
For those of you wondering about the difference between these [popular] NPM modules,
|
||||
here's a brief rundown ...
|
||||
|
||||
[`mime-db`](https://github.com/jshttp/mime-db) is "the source of
|
||||
truth" for MIME type information. It is not an API. Rather, it is a canonical
|
||||
dataset of mime type definitions pulled from IANA, Apache, NGINX, and custom mappings
|
||||
submitted by the Node.js community.
|
||||
|
||||
[`mime-types`](https://github.com/jshttp/mime-types) is a thin
|
||||
wrapper around mime-db that provides an API drop-in compatible(ish) with `mime @ < v1.3.6` API.
|
||||
|
||||
`mime` is, as of v2, a self-contained module bundled with a pre-optimized version
|
||||
of the `mime-db` dataset. It provides a simplified API with the following characteristics:
|
||||
|
||||
* Intelligently resolved type conflicts (See [mime-score](https://github.com/broofa/mime-score) for details)
|
||||
* Method naming consistent with industry best-practices
|
||||
* Compact footprint. E.g. The minified+compressed sizes of the various modules:
|
||||
|
||||
Module | Size
|
||||
--- | ---
|
||||
`mime-db` | 18 KB
|
||||
`mime-types` | same as mime-db
|
||||
`mime` | 8 KB
|
||||
`mime/lite` | 2 KB
|
||||
|
||||
## Mime API
|
||||
|
||||
Both `require('mime')` and `require('mime/lite')` return instances of the MIME
|
||||
class, documented below.
|
||||
|
||||
### new Mime(typeMap, ... more maps)
|
||||
|
||||
Most users of this module will not need to create Mime instances directly.
|
||||
However if you would like to create custom mappings, you may do so as follows
|
||||
...
|
||||
|
||||
```javascript
|
||||
// Require Mime class
|
||||
const Mime = require('mime/Mime');
|
||||
|
||||
// Define mime type -> extensions map
|
||||
const typeMap = {
|
||||
'text/abc': ['abc', 'alpha', 'bet'],
|
||||
'text/def': ['leppard']
|
||||
};
|
||||
|
||||
// Create and use Mime instance
|
||||
const myMime = new Mime(typeMap);
|
||||
myMime.getType('abc'); // ⇨ 'text/abc'
|
||||
myMime.getExtension('text/def'); // ⇨ 'leppard'
|
||||
|
||||
```
|
||||
|
||||
If more than one map argument is provided, each map is `define()`ed (see below), in order.
|
||||
|
||||
### mime.getType(pathOrExtension)
|
||||
|
||||
Get mime type for the given path or extension. E.g.
|
||||
|
||||
```javascript
|
||||
mime.getType('js'); // ⇨ 'application/javascript'
|
||||
mime.getType('json'); // ⇨ 'application/json'
|
||||
|
||||
mime.getType('txt'); // ⇨ 'text/plain'
|
||||
mime.getType('dir/text.txt'); // ⇨ 'text/plain'
|
||||
mime.getType('dir\\text.txt'); // ⇨ 'text/plain'
|
||||
mime.getType('.text.txt'); // ⇨ 'text/plain'
|
||||
mime.getType('.txt'); // ⇨ 'text/plain'
|
||||
|
||||
```
|
||||
|
||||
`null` is returned in cases where an extension is not detected or recognized
|
||||
|
||||
```javascript
|
||||
mime.getType('foo/txt'); // ⇨ null
|
||||
mime.getType('bogus_type'); // ⇨ null
|
||||
|
||||
```
|
||||
|
||||
### mime.getExtension(type)
|
||||
Get extension for the given mime type. Charset options (often included in
|
||||
Content-Type headers) are ignored.
|
||||
|
||||
```javascript
|
||||
mime.getExtension('text/plain'); // ⇨ 'txt'
|
||||
mime.getExtension('application/json'); // ⇨ 'json'
|
||||
mime.getExtension('text/html; charset=utf8'); // ⇨ 'html'
|
||||
|
||||
```
|
||||
|
||||
### mime.define(typeMap[, force = false])
|
||||
|
||||
Define [more] type mappings.
|
||||
|
||||
`typeMap` is a map of type -> extensions, as documented in `new Mime`, above.
|
||||
|
||||
By default this method will throw an error if you try to map a type to an
|
||||
extension that is already assigned to another type. Passing `true` for the
|
||||
`force` argument will suppress this behavior (overriding any previous mapping).
|
||||
|
||||
```javascript
|
||||
mime.define({'text/x-abc': ['abc', 'abcd']});
|
||||
|
||||
mime.getType('abcd'); // ⇨ 'text/x-abc'
|
||||
mime.getExtension('text/x-abc') // ⇨ 'abc'
|
||||
|
||||
```
|
||||
|
||||
## Command Line
|
||||
|
||||
mime [path_or_extension]
|
||||
|
||||
E.g.
|
||||
|
||||
> mime scripts/jquery.js
|
||||
application/javascript
|
||||
|
||||
----
|
||||
Markdown generated from [src/README_js.md](src/README_js.md) by [](https://github.com/broofa/runmd)
|
10
express-server/node_modules/@google-cloud/storage/node_modules/mime/cli.js
generated
vendored
Normal file
10
express-server/node_modules/@google-cloud/storage/node_modules/mime/cli.js
generated
vendored
Normal file
@ -0,0 +1,10 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
'use strict';
|
||||
|
||||
var mime = require('.');
|
||||
var file = process.argv[2];
|
||||
var type = mime.getType(file);
|
||||
|
||||
process.stdout.write(type + '\n');
|
||||
|
4
express-server/node_modules/@google-cloud/storage/node_modules/mime/index.js
generated
vendored
Normal file
4
express-server/node_modules/@google-cloud/storage/node_modules/mime/index.js
generated
vendored
Normal file
@ -0,0 +1,4 @@
|
||||
'use strict';
|
||||
|
||||
var Mime = require('./Mime');
|
||||
module.exports = new Mime(require('./types/standard'), require('./types/other'));
|
4
express-server/node_modules/@google-cloud/storage/node_modules/mime/lite.js
generated
vendored
Normal file
4
express-server/node_modules/@google-cloud/storage/node_modules/mime/lite.js
generated
vendored
Normal file
@ -0,0 +1,4 @@
|
||||
'use strict';
|
||||
|
||||
var Mime = require('./Mime');
|
||||
module.exports = new Mime(require('./types/standard'));
|
72
express-server/node_modules/@google-cloud/storage/node_modules/mime/package.json
generated
vendored
Normal file
72
express-server/node_modules/@google-cloud/storage/node_modules/mime/package.json
generated
vendored
Normal file
@ -0,0 +1,72 @@
|
||||
{
|
||||
"_from": "mime@^2.2.0",
|
||||
"_id": "mime@2.4.0",
|
||||
"_inBundle": false,
|
||||
"_integrity": "sha512-ikBcWwyqXQSHKtciCcctu9YfPbFYZ4+gbHEmE0Q8jzcTYQg5dHCr3g2wwAZjPoJfQVXZq6KXAjpXOTf5/cjT7w==",
|
||||
"_location": "/@google-cloud/storage/mime",
|
||||
"_phantomChildren": {},
|
||||
"_requested": {
|
||||
"type": "range",
|
||||
"registry": true,
|
||||
"raw": "mime@^2.2.0",
|
||||
"name": "mime",
|
||||
"escapedName": "mime",
|
||||
"rawSpec": "^2.2.0",
|
||||
"saveSpec": null,
|
||||
"fetchSpec": "^2.2.0"
|
||||
},
|
||||
"_requiredBy": [
|
||||
"/@google-cloud/storage"
|
||||
],
|
||||
"_resolved": "https://registry.npmjs.org/mime/-/mime-2.4.0.tgz",
|
||||
"_shasum": "e051fd881358585f3279df333fe694da0bcffdd6",
|
||||
"_spec": "mime@^2.2.0",
|
||||
"_where": "D:\\Desktop\\Git\\Firebase\\SmartShopperFirebase\\node_modules\\@google-cloud\\storage",
|
||||
"author": {
|
||||
"name": "Robert Kieffer",
|
||||
"email": "robert@broofa.com",
|
||||
"url": "http://github.com/broofa"
|
||||
},
|
||||
"bin": {
|
||||
"mime": "cli.js"
|
||||
},
|
||||
"bugs": {
|
||||
"url": "https://github.com/broofa/node-mime/issues"
|
||||
},
|
||||
"bundleDependencies": false,
|
||||
"contributors": [],
|
||||
"dependencies": {},
|
||||
"deprecated": false,
|
||||
"description": "A comprehensive library for mime-type mapping",
|
||||
"devDependencies": {
|
||||
"chalk": "1.1.3",
|
||||
"eslint": "^5.9.0",
|
||||
"mime-db": "^1.37.0",
|
||||
"mime-score": "1.0.1",
|
||||
"mime-types": "2.1.15",
|
||||
"mocha": "5.2.0",
|
||||
"runmd": "1.0.1",
|
||||
"standard-version": "^4.4.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=4.0.0"
|
||||
},
|
||||
"homepage": "https://github.com/broofa/node-mime#readme",
|
||||
"keywords": [
|
||||
"util",
|
||||
"mime"
|
||||
],
|
||||
"license": "MIT",
|
||||
"name": "mime",
|
||||
"repository": {
|
||||
"url": "git+https://github.com/broofa/node-mime.git",
|
||||
"type": "git"
|
||||
},
|
||||
"scripts": {
|
||||
"md": "runmd --watch --output README.md src/README_js.md",
|
||||
"prepare": "node src/build.js && runmd --output README.md src/README_js.md",
|
||||
"release": "standard-version",
|
||||
"test": "mocha src/test.js"
|
||||
},
|
||||
"version": "2.4.0"
|
||||
}
|
181
express-server/node_modules/@google-cloud/storage/node_modules/mime/src/README_js.md
generated
vendored
Normal file
181
express-server/node_modules/@google-cloud/storage/node_modules/mime/src/README_js.md
generated
vendored
Normal file
@ -0,0 +1,181 @@
|
||||
```javascript --hide
|
||||
runmd.onRequire = path => path.replace(/^mime/, '..');
|
||||
```
|
||||
# Mime
|
||||
|
||||
A comprehensive, compact MIME type module.
|
||||
|
||||
[](https://travis-ci.org/broofa/node-mime)
|
||||
|
||||
## Version 2 Notes
|
||||
|
||||
Version 2 is a breaking change from 1.x as the semver implies. Specifically:
|
||||
|
||||
* `lookup()` renamed to `getType()`
|
||||
* `extension()` renamed to `getExtension()`
|
||||
* `charset()` and `load()` methods have been removed
|
||||
|
||||
If you prefer the legacy version of this module please `npm install mime@^1`. Version 1 docs may be found [here](https://github.com/broofa/node-mime/tree/v1.4.0).
|
||||
|
||||
## Install
|
||||
|
||||
### NPM
|
||||
```
|
||||
npm install mime
|
||||
```
|
||||
|
||||
### Browser
|
||||
|
||||
It is recommended that you use a bundler such as
|
||||
[webpack](https://webpack.github.io/) or [browserify](http://browserify.org/) to
|
||||
package your code. However, browser-ready versions are available via wzrd.in.
|
||||
E.g. For the full version:
|
||||
|
||||
<script src="https://wzrd.in/standalone/mime@latest"></script>
|
||||
<script>
|
||||
mime.getType(...); // etc.
|
||||
<script>
|
||||
|
||||
Or, for the `mime/lite` version:
|
||||
|
||||
<script src="https://wzrd.in/standalone/mime%2flite@latest"></script>
|
||||
<script>
|
||||
mimelite.getType(...); // (Note `mimelite` here)
|
||||
<script>
|
||||
|
||||
## Quick Start
|
||||
|
||||
For the full version (800+ MIME types, 1,000+ extensions):
|
||||
|
||||
```javascript --run default
|
||||
const mime = require('mime');
|
||||
|
||||
mime.getType('txt'); // RESULT
|
||||
mime.getExtension('text/plain'); // RESULT
|
||||
```
|
||||
|
||||
See [Mime API](#mime-api) below for API details.
|
||||
|
||||
## Lite Version
|
||||
|
||||
There is also a "lite" version of this module that omits vendor-specific
|
||||
(`*/vnd.*`) and experimental (`*/x-*`) types. It weighs in at ~2.5KB, compared
|
||||
to 8KB for the full version. To load the lite version:
|
||||
|
||||
```javascript
|
||||
const mime = require('mime/lite');
|
||||
```
|
||||
|
||||
## Mime .vs. mime-types .vs. mime-db modules
|
||||
|
||||
For those of you wondering about the difference between these [popular] NPM modules,
|
||||
here's a brief rundown ...
|
||||
|
||||
[`mime-db`](https://github.com/jshttp/mime-db) is "the source of
|
||||
truth" for MIME type information. It is not an API. Rather, it is a canonical
|
||||
dataset of mime type definitions pulled from IANA, Apache, NGINX, and custom mappings
|
||||
submitted by the Node.js community.
|
||||
|
||||
[`mime-types`](https://github.com/jshttp/mime-types) is a thin
|
||||
wrapper around mime-db that provides an API drop-in compatible(ish) with `mime @ < v1.3.6` API.
|
||||
|
||||
`mime` is, as of v2, a self-contained module bundled with a pre-optimized version
|
||||
of the `mime-db` dataset. It provides a simplified API with the following characteristics:
|
||||
|
||||
* Intelligently resolved type conflicts (See [mime-score](https://github.com/broofa/mime-score) for details)
|
||||
* Method naming consistent with industry best-practices
|
||||
* Compact footprint. E.g. The minified+compressed sizes of the various modules:
|
||||
|
||||
Module | Size
|
||||
--- | ---
|
||||
`mime-db` | 18 KB
|
||||
`mime-types` | same as mime-db
|
||||
`mime` | 8 KB
|
||||
`mime/lite` | 2 KB
|
||||
|
||||
## Mime API
|
||||
|
||||
Both `require('mime')` and `require('mime/lite')` return instances of the MIME
|
||||
class, documented below.
|
||||
|
||||
### new Mime(typeMap, ... more maps)
|
||||
|
||||
Most users of this module will not need to create Mime instances directly.
|
||||
However if you would like to create custom mappings, you may do so as follows
|
||||
...
|
||||
|
||||
```javascript --run default
|
||||
// Require Mime class
|
||||
const Mime = require('mime/Mime');
|
||||
|
||||
// Define mime type -> extensions map
|
||||
const typeMap = {
|
||||
'text/abc': ['abc', 'alpha', 'bet'],
|
||||
'text/def': ['leppard']
|
||||
};
|
||||
|
||||
// Create and use Mime instance
|
||||
const myMime = new Mime(typeMap);
|
||||
myMime.getType('abc'); // RESULT
|
||||
myMime.getExtension('text/def'); // RESULT
|
||||
```
|
||||
|
||||
If more than one map argument is provided, each map is `define()`ed (see below), in order.
|
||||
|
||||
### mime.getType(pathOrExtension)
|
||||
|
||||
Get mime type for the given path or extension. E.g.
|
||||
|
||||
```javascript --run default
|
||||
mime.getType('js'); // RESULT
|
||||
mime.getType('json'); // RESULT
|
||||
|
||||
mime.getType('txt'); // RESULT
|
||||
mime.getType('dir/text.txt'); // RESULT
|
||||
mime.getType('dir\\text.txt'); // RESULT
|
||||
mime.getType('.text.txt'); // RESULT
|
||||
mime.getType('.txt'); // RESULT
|
||||
```
|
||||
|
||||
`null` is returned in cases where an extension is not detected or recognized
|
||||
|
||||
```javascript --run default
|
||||
mime.getType('foo/txt'); // RESULT
|
||||
mime.getType('bogus_type'); // RESULT
|
||||
```
|
||||
|
||||
### mime.getExtension(type)
|
||||
Get extension for the given mime type. Charset options (often included in
|
||||
Content-Type headers) are ignored.
|
||||
|
||||
```javascript --run default
|
||||
mime.getExtension('text/plain'); // RESULT
|
||||
mime.getExtension('application/json'); // RESULT
|
||||
mime.getExtension('text/html; charset=utf8'); // RESULT
|
||||
```
|
||||
|
||||
### mime.define(typeMap[, force = false])
|
||||
|
||||
Define [more] type mappings.
|
||||
|
||||
`typeMap` is a map of type -> extensions, as documented in `new Mime`, above.
|
||||
|
||||
By default this method will throw an error if you try to map a type to an
|
||||
extension that is already assigned to another type. Passing `true` for the
|
||||
`force` argument will suppress this behavior (overriding any previous mapping).
|
||||
|
||||
```javascript --run default
|
||||
mime.define({'text/x-abc': ['abc', 'abcd']});
|
||||
|
||||
mime.getType('abcd'); // RESULT
|
||||
mime.getExtension('text/x-abc') // RESULT
|
||||
```
|
||||
|
||||
## Command Line
|
||||
|
||||
mime [path_or_extension]
|
||||
|
||||
E.g.
|
||||
|
||||
> mime scripts/jquery.js
|
||||
application/javascript
|
71
express-server/node_modules/@google-cloud/storage/node_modules/mime/src/build.js
generated
vendored
Normal file
71
express-server/node_modules/@google-cloud/storage/node_modules/mime/src/build.js
generated
vendored
Normal file
@ -0,0 +1,71 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
'use strict';
|
||||
|
||||
var fs = require('fs');
|
||||
var path = require('path');
|
||||
var mimeScore = require('mime-score');
|
||||
|
||||
var db = require('mime-db');
|
||||
var chalk = require('chalk');
|
||||
|
||||
var STANDARD_FACET_SCORE = 900;
|
||||
|
||||
var byExtension = {};
|
||||
|
||||
// Clear out any conflict extensions in mime-db
|
||||
for (var type in db) {
|
||||
var entry = db[type];
|
||||
entry.type = type;
|
||||
if (!entry.extensions) continue;
|
||||
|
||||
entry.extensions.forEach(function(ext) {
|
||||
var drop;
|
||||
var keep = entry;
|
||||
if (ext in byExtension) {
|
||||
var e0 = entry;
|
||||
var e1 = byExtension[ext];
|
||||
|
||||
e0.pri = mimeScore(e0.type, e0.source);
|
||||
e1.pri = mimeScore(e1.type, e1.source);
|
||||
|
||||
drop = e0.pri < e1.pri ? e0 : e1;
|
||||
keep = e0.pri >= e1.pri ? e0 : e1;
|
||||
|
||||
// Prefix lower-priority extensions with '*'
|
||||
drop.extensions = drop.extensions.map(function(e) {return e == ext ? '*' + e : e});
|
||||
|
||||
console.log(
|
||||
ext + ': Preferring ' + chalk.green(keep.type) + ' (' + keep.pri +
|
||||
') over ' + chalk.red(drop.type) + ' (' + drop.pri + ')' + ' for ' + ext
|
||||
);
|
||||
}
|
||||
|
||||
// Cache the hightest ranking type for this extension
|
||||
if (keep == entry) byExtension[ext] = entry;
|
||||
});
|
||||
}
|
||||
|
||||
function writeTypesFile(types, path) {
|
||||
fs.writeFileSync(path, JSON.stringify(types));
|
||||
}
|
||||
|
||||
// Segregate into standard and non-standard types based on facet per
|
||||
// https://tools.ietf.org/html/rfc6838#section-3.1
|
||||
var standard = {};
|
||||
var other = {};
|
||||
|
||||
Object.keys(db).sort().forEach(function(k) {
|
||||
var entry = db[k];
|
||||
|
||||
if (entry.extensions) {
|
||||
if (mimeScore(entry.type, entry.source) >= STANDARD_FACET_SCORE) {
|
||||
standard[entry.type] = entry.extensions;
|
||||
} else {
|
||||
other[entry.type] = entry.extensions;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
writeTypesFile(standard, path.join(__dirname, '../types', 'standard.json'));
|
||||
writeTypesFile(other, path.join(__dirname, '../types', 'other.json'));
|
257
express-server/node_modules/@google-cloud/storage/node_modules/mime/src/test.js
generated
vendored
Normal file
257
express-server/node_modules/@google-cloud/storage/node_modules/mime/src/test.js
generated
vendored
Normal file
@ -0,0 +1,257 @@
|
||||
'use strict';
|
||||
|
||||
var mime = require('..');
|
||||
var mimeTypes = require('../node_modules/mime-types');
|
||||
var assert = require('assert');
|
||||
var chalk = require('chalk');
|
||||
|
||||
describe('class Mime', function() {
|
||||
it('mime and mime/lite coexist', function() {
|
||||
assert.doesNotThrow(function() {
|
||||
require('../lite');
|
||||
});
|
||||
});
|
||||
|
||||
it('new constructor()', function() {
|
||||
var Mime = require('../Mime');
|
||||
|
||||
var mime = new Mime(
|
||||
{'text/a': ['a', 'a1']},
|
||||
{'text/b': ['b', 'b1']}
|
||||
);
|
||||
|
||||
assert.deepEqual(mime._types, {
|
||||
a: 'text/a',
|
||||
a1: 'text/a',
|
||||
b: 'text/b',
|
||||
b1: 'text/b',
|
||||
});
|
||||
|
||||
assert.deepEqual(mime._extensions, {
|
||||
'text/a': 'a',
|
||||
'text/b': 'b',
|
||||
});
|
||||
});
|
||||
|
||||
it('define()', function() {
|
||||
var Mime = require('../Mime');
|
||||
|
||||
var mime = new Mime({'text/a': ['a']}, {'text/b': ['b']});
|
||||
|
||||
assert.throws(function() {
|
||||
mime.define({'text/c': ['b']});
|
||||
});
|
||||
|
||||
assert.doesNotThrow(function() {
|
||||
mime.define({'text/c': ['b']}, true);
|
||||
});
|
||||
|
||||
assert.deepEqual(mime._types, {
|
||||
a: 'text/a',
|
||||
b: 'text/c',
|
||||
});
|
||||
|
||||
assert.deepEqual(mime._extensions, {
|
||||
'text/a': 'a',
|
||||
'text/b': 'b',
|
||||
'text/c': 'b',
|
||||
});
|
||||
});
|
||||
|
||||
it('define() *\'ed types', function() {
|
||||
var Mime = require('../Mime');
|
||||
|
||||
var mime = new Mime(
|
||||
{'text/a': ['*b']},
|
||||
{'text/b': ['b']}
|
||||
);
|
||||
|
||||
assert.deepEqual(mime._types, {
|
||||
b: 'text/b',
|
||||
});
|
||||
|
||||
assert.deepEqual(mime._extensions, {
|
||||
'text/a': 'b',
|
||||
'text/b': 'b',
|
||||
});
|
||||
});
|
||||
|
||||
it('getType()', function() {
|
||||
// Upper/lower case
|
||||
assert.equal(mime.getType('text.txt'), 'text/plain');
|
||||
assert.equal(mime.getType('TEXT.TXT'), 'text/plain');
|
||||
|
||||
// Bare extension
|
||||
assert.equal(mime.getType('txt'), 'text/plain');
|
||||
assert.equal(mime.getType('.txt'), 'text/plain');
|
||||
assert.strictEqual(mime.getType('.bogus'), null);
|
||||
assert.strictEqual(mime.getType('bogus'), null);
|
||||
|
||||
// Non-sensical
|
||||
assert.strictEqual(mime.getType(null), null);
|
||||
assert.strictEqual(mime.getType(undefined), null);
|
||||
assert.strictEqual(mime.getType(42), null);
|
||||
assert.strictEqual(mime.getType({}), null);
|
||||
|
||||
// File paths
|
||||
assert.equal(mime.getType('dir/text.txt'), 'text/plain');
|
||||
assert.equal(mime.getType('dir\\text.txt'), 'text/plain');
|
||||
assert.equal(mime.getType('.text.txt'), 'text/plain');
|
||||
assert.equal(mime.getType('.txt'), 'text/plain');
|
||||
assert.equal(mime.getType('txt'), 'text/plain');
|
||||
assert.equal(mime.getType('/path/to/page.html'), 'text/html');
|
||||
assert.equal(mime.getType('c:\\path\\to\\page.html'), 'text/html');
|
||||
assert.equal(mime.getType('page.html'), 'text/html');
|
||||
assert.equal(mime.getType('path/to/page.html'), 'text/html');
|
||||
assert.equal(mime.getType('path\\to\\page.html'), 'text/html');
|
||||
assert.strictEqual(mime.getType('/txt'), null);
|
||||
assert.strictEqual(mime.getType('\\txt'), null);
|
||||
assert.strictEqual(mime.getType('text.nope'), null);
|
||||
assert.strictEqual(mime.getType('/path/to/file.bogus'), null);
|
||||
assert.strictEqual(mime.getType('/path/to/json'), null);
|
||||
assert.strictEqual(mime.getType('/path/to/.json'), null);
|
||||
assert.strictEqual(mime.getType('/path/to/.config.json'), 'application/json');
|
||||
assert.strictEqual(mime.getType('.config.json'), 'application/json');
|
||||
});
|
||||
|
||||
it('getExtension()', function() {
|
||||
assert.equal(mime.getExtension('text/html'), 'html');
|
||||
assert.equal(mime.getExtension(' text/html'), 'html');
|
||||
assert.equal(mime.getExtension('text/html '), 'html');
|
||||
assert.strictEqual(mime.getExtension('application/x-bogus'), null);
|
||||
assert.strictEqual(mime.getExtension('bogus'), null);
|
||||
assert.strictEqual(mime.getExtension(null), null);
|
||||
assert.strictEqual(mime.getExtension(undefined), null);
|
||||
assert.strictEqual(mime.getExtension(42), null);
|
||||
assert.strictEqual(mime.getExtension({}), null);
|
||||
});
|
||||
});
|
||||
|
||||
describe('DB', function() {
|
||||
var diffs = [];
|
||||
|
||||
after(function() {
|
||||
if (diffs.length) {
|
||||
console.log('\n[INFO] The following inconsistencies with MDN (https://goo.gl/lHrFU6) and/or mime-types (https://github.com/jshttp/mime-types) are expected:');
|
||||
diffs.forEach(function(d) {
|
||||
console.warn(
|
||||
' ' + d[0]+ '[' + chalk.blue(d[1]) + '] = ' + chalk.red(d[2]) +
|
||||
', mime[' + d[1] + '] = ' + chalk.green(d[3])
|
||||
);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
it('Consistency', function() {
|
||||
for (var ext in this.types) {
|
||||
assert.equal(ext, this.extensions[this.types[ext]], '${ext} does not have consistent ext->type->ext mapping');
|
||||
}
|
||||
});
|
||||
|
||||
it('MDN types', function() {
|
||||
// MDN types listed at https://goo.gl/lHrFU6
|
||||
var MDN = {
|
||||
'aac': 'audio/aac',
|
||||
'abw': 'application/x-abiword',
|
||||
'arc': 'application/octet-stream',
|
||||
'avi': 'video/x-msvideo',
|
||||
'azw': 'application/vnd.amazon.ebook',
|
||||
'bin': 'application/octet-stream',
|
||||
'bz': 'application/x-bzip',
|
||||
'bz2': 'application/x-bzip2',
|
||||
'csh': 'application/x-csh',
|
||||
'css': 'text/css',
|
||||
'csv': 'text/csv',
|
||||
'doc': 'application/msword',
|
||||
'epub': 'application/epub+zip',
|
||||
'gif': 'image/gif',
|
||||
'html': 'text/html',
|
||||
'ico': 'image/x-icon',
|
||||
'ics': 'text/calendar',
|
||||
'jar': 'application/java-archive',
|
||||
'jpg': 'image/jpeg',
|
||||
'js': 'application/javascript',
|
||||
'json': 'application/json',
|
||||
'midi': 'audio/midi',
|
||||
'mpeg': 'video/mpeg',
|
||||
'mpkg': 'application/vnd.apple.installer+xml',
|
||||
'odp': 'application/vnd.oasis.opendocument.presentation',
|
||||
'ods': 'application/vnd.oasis.opendocument.spreadsheet',
|
||||
'odt': 'application/vnd.oasis.opendocument.text',
|
||||
'oga': 'audio/ogg',
|
||||
'ogv': 'video/ogg',
|
||||
'ogx': 'application/ogg',
|
||||
'png': 'image/png',
|
||||
'pdf': 'application/pdf',
|
||||
'ppt': 'application/vnd.ms-powerpoint',
|
||||
'rar': 'application/x-rar-compressed',
|
||||
'rtf': 'application/rtf',
|
||||
'sh': 'application/x-sh',
|
||||
'svg': 'image/svg+xml',
|
||||
'swf': 'application/x-shockwave-flash',
|
||||
'tar': 'application/x-tar',
|
||||
'tiff': 'image/tiff',
|
||||
'ttf': 'font/ttf',
|
||||
'vsd': 'application/vnd.visio',
|
||||
'wav': 'audio/x-wav',
|
||||
'weba': 'audio/webm',
|
||||
'webm': 'video/webm',
|
||||
'webp': 'image/webp',
|
||||
'woff': 'font/woff',
|
||||
'woff2': 'font/woff2',
|
||||
'xhtml': 'application/xhtml+xml',
|
||||
'xls': 'application/vnd.ms-excel',
|
||||
'xml': 'application/xml',
|
||||
'xul': 'application/vnd.mozilla.xul+xml',
|
||||
'zip': 'application/zip',
|
||||
'3gp': 'video/3gpp',
|
||||
'3g2': 'video/3gpp2',
|
||||
'7z': 'application/x-7z-compressed',
|
||||
};
|
||||
|
||||
for (var ext in MDN) {
|
||||
var expected = MDN[ext];
|
||||
var actual = mime.getType(ext);
|
||||
if (actual !== expected) diffs.push(['MDN', ext, expected, actual]);
|
||||
}
|
||||
|
||||
for (var ext in mimeTypes.types) {
|
||||
var expected = mimeTypes.types[ext];
|
||||
var actual = mime.getType(ext);
|
||||
if (actual !== expected) diffs.push(['mime-types', ext, expected, actual]);
|
||||
}
|
||||
});
|
||||
|
||||
it('Specific types', function() {
|
||||
// Assortment of types we sanity check for good measure
|
||||
assert.equal(mime.getType('html'), 'text/html');
|
||||
assert.equal(mime.getType('js'), 'application/javascript');
|
||||
assert.equal(mime.getType('json'), 'application/json');
|
||||
assert.equal(mime.getType('rtf'), 'application/rtf');
|
||||
assert.equal(mime.getType('txt'), 'text/plain');
|
||||
assert.equal(mime.getType('xml'), 'application/xml');
|
||||
|
||||
assert.equal(mime.getType('wasm'), 'application/wasm');
|
||||
});
|
||||
|
||||
it('Specific extensions', function() {
|
||||
assert.equal(mime.getExtension('text/html;charset=UTF-8'), 'html');
|
||||
assert.equal(mime.getExtension('text/HTML; charset=UTF-8'), 'html');
|
||||
assert.equal(mime.getExtension('text/html; charset=UTF-8'), 'html');
|
||||
assert.equal(mime.getExtension('text/html; charset=UTF-8 '), 'html');
|
||||
assert.equal(mime.getExtension('text/html ; charset=UTF-8'), 'html');
|
||||
assert.equal(mime.getExtension(mime._types.text), 'txt');
|
||||
assert.equal(mime.getExtension(mime._types.htm), 'html');
|
||||
assert.equal(mime.getExtension('application/octet-stream'), 'bin');
|
||||
assert.equal(mime.getExtension('application/octet-stream '), 'bin');
|
||||
assert.equal(mime.getExtension(' text/html; charset=UTF-8'), 'html');
|
||||
assert.equal(mime.getExtension('text/html; charset=UTF-8 '), 'html');
|
||||
assert.equal(mime.getExtension('text/html; charset=UTF-8'), 'html');
|
||||
assert.equal(mime.getExtension('text/html ; charset=UTF-8'), 'html');
|
||||
assert.equal(mime.getExtension('text/html;charset=UTF-8'), 'html');
|
||||
assert.equal(mime.getExtension('text/Html;charset=UTF-8'), 'html');
|
||||
assert.equal(mime.getExtension('unrecognized'), null);
|
||||
|
||||
assert.equal(mime.getExtension('text/xml'), 'xml'); // See #180
|
||||
});
|
||||
});
|
1
express-server/node_modules/@google-cloud/storage/node_modules/mime/types/other.json
generated
vendored
Normal file
1
express-server/node_modules/@google-cloud/storage/node_modules/mime/types/other.json
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
1
express-server/node_modules/@google-cloud/storage/node_modules/mime/types/standard.json
generated
vendored
Normal file
1
express-server/node_modules/@google-cloud/storage/node_modules/mime/types/standard.json
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
55
express-server/node_modules/@google-cloud/storage/node_modules/readable-stream/.travis.yml
generated
vendored
Normal file
55
express-server/node_modules/@google-cloud/storage/node_modules/readable-stream/.travis.yml
generated
vendored
Normal file
@ -0,0 +1,55 @@
|
||||
sudo: false
|
||||
language: node_js
|
||||
before_install:
|
||||
- npm install -g npm@2
|
||||
- test $NPM_LEGACY && npm install -g npm@latest-3 || npm install npm -g
|
||||
notifications:
|
||||
email: false
|
||||
matrix:
|
||||
fast_finish: true
|
||||
include:
|
||||
- node_js: '0.8'
|
||||
env:
|
||||
- TASK=test
|
||||
- NPM_LEGACY=true
|
||||
- node_js: '0.10'
|
||||
env:
|
||||
- TASK=test
|
||||
- NPM_LEGACY=true
|
||||
- node_js: '0.11'
|
||||
env:
|
||||
- TASK=test
|
||||
- NPM_LEGACY=true
|
||||
- node_js: '0.12'
|
||||
env:
|
||||
- TASK=test
|
||||
- NPM_LEGACY=true
|
||||
- node_js: 1
|
||||
env:
|
||||
- TASK=test
|
||||
- NPM_LEGACY=true
|
||||
- node_js: 2
|
||||
env:
|
||||
- TASK=test
|
||||
- NPM_LEGACY=true
|
||||
- node_js: 3
|
||||
env:
|
||||
- TASK=test
|
||||
- NPM_LEGACY=true
|
||||
- node_js: 4
|
||||
env: TASK=test
|
||||
- node_js: 5
|
||||
env: TASK=test
|
||||
- node_js: 6
|
||||
env: TASK=test
|
||||
- node_js: 7
|
||||
env: TASK=test
|
||||
- node_js: 8
|
||||
env: TASK=test
|
||||
- node_js: 9
|
||||
env: TASK=test
|
||||
script: "npm run $TASK"
|
||||
env:
|
||||
global:
|
||||
- secure: rE2Vvo7vnjabYNULNyLFxOyt98BoJexDqsiOnfiD6kLYYsiQGfr/sbZkPMOFm9qfQG7pjqx+zZWZjGSswhTt+626C0t/njXqug7Yps4c3dFblzGfreQHp7wNX5TFsvrxd6dAowVasMp61sJcRnB2w8cUzoe3RAYUDHyiHktwqMc=
|
||||
- secure: g9YINaKAdMatsJ28G9jCGbSaguXCyxSTy+pBO6Ch0Cf57ZLOTka3HqDj8p3nV28LUIHZ3ut5WO43CeYKwt4AUtLpBS3a0dndHdY6D83uY6b2qh5hXlrcbeQTq2cvw2y95F7hm4D1kwrgZ7ViqaKggRcEupAL69YbJnxeUDKWEdI=
|
38
express-server/node_modules/@google-cloud/storage/node_modules/readable-stream/CONTRIBUTING.md
generated
vendored
Normal file
38
express-server/node_modules/@google-cloud/storage/node_modules/readable-stream/CONTRIBUTING.md
generated
vendored
Normal file
@ -0,0 +1,38 @@
|
||||
# Developer's Certificate of Origin 1.1
|
||||
|
||||
By making a contribution to this project, I certify that:
|
||||
|
||||
* (a) The contribution was created in whole or in part by me and I
|
||||
have the right to submit it under the open source license
|
||||
indicated in the file; or
|
||||
|
||||
* (b) The contribution is based upon previous work that, to the best
|
||||
of my knowledge, is covered under an appropriate open source
|
||||
license and I have the right under that license to submit that
|
||||
work with modifications, whether created in whole or in part
|
||||
by me, under the same open source license (unless I am
|
||||
permitted to submit under a different license), as indicated
|
||||
in the file; or
|
||||
|
||||
* (c) The contribution was provided directly to me by some other
|
||||
person who certified (a), (b) or (c) and I have not modified
|
||||
it.
|
||||
|
||||
* (d) I understand and agree that this project and the contribution
|
||||
are public and that a record of the contribution (including all
|
||||
personal information I submit with it, including my sign-off) is
|
||||
maintained indefinitely and may be redistributed consistent with
|
||||
this project or the open source license(s) involved.
|
||||
|
||||
## Moderation Policy
|
||||
|
||||
The [Node.js Moderation Policy] applies to this WG.
|
||||
|
||||
## Code of Conduct
|
||||
|
||||
The [Node.js Code of Conduct][] applies to this WG.
|
||||
|
||||
[Node.js Code of Conduct]:
|
||||
https://github.com/nodejs/node/blob/master/CODE_OF_CONDUCT.md
|
||||
[Node.js Moderation Policy]:
|
||||
https://github.com/nodejs/TSC/blob/master/Moderation-Policy.md
|
136
express-server/node_modules/@google-cloud/storage/node_modules/readable-stream/GOVERNANCE.md
generated
vendored
Normal file
136
express-server/node_modules/@google-cloud/storage/node_modules/readable-stream/GOVERNANCE.md
generated
vendored
Normal file
@ -0,0 +1,136 @@
|
||||
### Streams Working Group
|
||||
|
||||
The Node.js Streams is jointly governed by a Working Group
|
||||
(WG)
|
||||
that is responsible for high-level guidance of the project.
|
||||
|
||||
The WG has final authority over this project including:
|
||||
|
||||
* Technical direction
|
||||
* Project governance and process (including this policy)
|
||||
* Contribution policy
|
||||
* GitHub repository hosting
|
||||
* Conduct guidelines
|
||||
* Maintaining the list of additional Collaborators
|
||||
|
||||
For the current list of WG members, see the project
|
||||
[README.md](./README.md#current-project-team-members).
|
||||
|
||||
### Collaborators
|
||||
|
||||
The readable-stream GitHub repository is
|
||||
maintained by the WG and additional Collaborators who are added by the
|
||||
WG on an ongoing basis.
|
||||
|
||||
Individuals making significant and valuable contributions are made
|
||||
Collaborators and given commit-access to the project. These
|
||||
individuals are identified by the WG and their addition as
|
||||
Collaborators is discussed during the WG meeting.
|
||||
|
||||
_Note:_ If you make a significant contribution and are not considered
|
||||
for commit-access log an issue or contact a WG member directly and it
|
||||
will be brought up in the next WG meeting.
|
||||
|
||||
Modifications of the contents of the readable-stream repository are
|
||||
made on
|
||||
a collaborative basis. Anybody with a GitHub account may propose a
|
||||
modification via pull request and it will be considered by the project
|
||||
Collaborators. All pull requests must be reviewed and accepted by a
|
||||
Collaborator with sufficient expertise who is able to take full
|
||||
responsibility for the change. In the case of pull requests proposed
|
||||
by an existing Collaborator, an additional Collaborator is required
|
||||
for sign-off. Consensus should be sought if additional Collaborators
|
||||
participate and there is disagreement around a particular
|
||||
modification. See _Consensus Seeking Process_ below for further detail
|
||||
on the consensus model used for governance.
|
||||
|
||||
Collaborators may opt to elevate significant or controversial
|
||||
modifications, or modifications that have not found consensus to the
|
||||
WG for discussion by assigning the ***WG-agenda*** tag to a pull
|
||||
request or issue. The WG should serve as the final arbiter where
|
||||
required.
|
||||
|
||||
For the current list of Collaborators, see the project
|
||||
[README.md](./README.md#members).
|
||||
|
||||
### WG Membership
|
||||
|
||||
WG seats are not time-limited. There is no fixed size of the WG.
|
||||
However, the expected target is between 6 and 12, to ensure adequate
|
||||
coverage of important areas of expertise, balanced with the ability to
|
||||
make decisions efficiently.
|
||||
|
||||
There is no specific set of requirements or qualifications for WG
|
||||
membership beyond these rules.
|
||||
|
||||
The WG may add additional members to the WG by unanimous consensus.
|
||||
|
||||
A WG member may be removed from the WG by voluntary resignation, or by
|
||||
unanimous consensus of all other WG members.
|
||||
|
||||
Changes to WG membership should be posted in the agenda, and may be
|
||||
suggested as any other agenda item (see "WG Meetings" below).
|
||||
|
||||
If an addition or removal is proposed during a meeting, and the full
|
||||
WG is not in attendance to participate, then the addition or removal
|
||||
is added to the agenda for the subsequent meeting. This is to ensure
|
||||
that all members are given the opportunity to participate in all
|
||||
membership decisions. If a WG member is unable to attend a meeting
|
||||
where a planned membership decision is being made, then their consent
|
||||
is assumed.
|
||||
|
||||
No more than 1/3 of the WG members may be affiliated with the same
|
||||
employer. If removal or resignation of a WG member, or a change of
|
||||
employment by a WG member, creates a situation where more than 1/3 of
|
||||
the WG membership shares an employer, then the situation must be
|
||||
immediately remedied by the resignation or removal of one or more WG
|
||||
members affiliated with the over-represented employer(s).
|
||||
|
||||
### WG Meetings
|
||||
|
||||
The WG meets occasionally on a Google Hangout On Air. A designated moderator
|
||||
approved by the WG runs the meeting. Each meeting should be
|
||||
published to YouTube.
|
||||
|
||||
Items are added to the WG agenda that are considered contentious or
|
||||
are modifications of governance, contribution policy, WG membership,
|
||||
or release process.
|
||||
|
||||
The intention of the agenda is not to approve or review all patches;
|
||||
that should happen continuously on GitHub and be handled by the larger
|
||||
group of Collaborators.
|
||||
|
||||
Any community member or contributor can ask that something be added to
|
||||
the next meeting's agenda by logging a GitHub Issue. Any Collaborator,
|
||||
WG member or the moderator can add the item to the agenda by adding
|
||||
the ***WG-agenda*** tag to the issue.
|
||||
|
||||
Prior to each WG meeting the moderator will share the Agenda with
|
||||
members of the WG. WG members can add any items they like to the
|
||||
agenda at the beginning of each meeting. The moderator and the WG
|
||||
cannot veto or remove items.
|
||||
|
||||
The WG may invite persons or representatives from certain projects to
|
||||
participate in a non-voting capacity.
|
||||
|
||||
The moderator is responsible for summarizing the discussion of each
|
||||
agenda item and sends it as a pull request after the meeting.
|
||||
|
||||
### Consensus Seeking Process
|
||||
|
||||
The WG follows a
|
||||
[Consensus
|
||||
Seeking](http://en.wikipedia.org/wiki/Consensus-seeking_decision-making)
|
||||
decision-making model.
|
||||
|
||||
When an agenda item has appeared to reach a consensus the moderator
|
||||
will ask "Does anyone object?" as a final call for dissent from the
|
||||
consensus.
|
||||
|
||||
If an agenda item cannot reach a consensus a WG member can call for
|
||||
either a closing vote or a vote to table the issue to the next
|
||||
meeting. The call for a vote must be seconded by a majority of the WG
|
||||
or else the discussion will continue. Simple majority wins.
|
||||
|
||||
Note that changes to WG membership require a majority consensus. See
|
||||
"WG Membership" above.
|
47
express-server/node_modules/@google-cloud/storage/node_modules/readable-stream/LICENSE
generated
vendored
Normal file
47
express-server/node_modules/@google-cloud/storage/node_modules/readable-stream/LICENSE
generated
vendored
Normal file
@ -0,0 +1,47 @@
|
||||
Node.js is licensed for use as follows:
|
||||
|
||||
"""
|
||||
Copyright Node.js contributors. All rights reserved.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to
|
||||
deal in the Software without restriction, including without limitation the
|
||||
rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
|
||||
sell copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
||||
IN THE SOFTWARE.
|
||||
"""
|
||||
|
||||
This license applies to parts of Node.js originating from the
|
||||
https://github.com/joyent/node repository:
|
||||
|
||||
"""
|
||||
Copyright Joyent, Inc. and other Node contributors. All rights reserved.
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to
|
||||
deal in the Software without restriction, including without limitation the
|
||||
rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
|
||||
sell copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
||||
IN THE SOFTWARE.
|
||||
"""
|
58
express-server/node_modules/@google-cloud/storage/node_modules/readable-stream/README.md
generated
vendored
Normal file
58
express-server/node_modules/@google-cloud/storage/node_modules/readable-stream/README.md
generated
vendored
Normal file
@ -0,0 +1,58 @@
|
||||
# readable-stream
|
||||
|
||||
***Node-core v8.11.1 streams for userland*** [](https://travis-ci.org/nodejs/readable-stream)
|
||||
|
||||
|
||||
[](https://nodei.co/npm/readable-stream/)
|
||||
[](https://nodei.co/npm/readable-stream/)
|
||||
|
||||
|
||||
[](https://saucelabs.com/u/readable-stream)
|
||||
|
||||
```bash
|
||||
npm install --save readable-stream
|
||||
```
|
||||
|
||||
***Node-core streams for userland***
|
||||
|
||||
This package is a mirror of the Streams2 and Streams3 implementations in
|
||||
Node-core.
|
||||
|
||||
Full documentation may be found on the [Node.js website](https://nodejs.org/dist/v8.11.1/docs/api/stream.html).
|
||||
|
||||
If you want to guarantee a stable streams base, regardless of what version of
|
||||
Node you, or the users of your libraries are using, use **readable-stream** *only* and avoid the *"stream"* module in Node-core, for background see [this blogpost](http://r.va.gg/2014/06/why-i-dont-use-nodes-core-stream-module.html).
|
||||
|
||||
As of version 2.0.0 **readable-stream** uses semantic versioning.
|
||||
|
||||
# Streams Working Group
|
||||
|
||||
`readable-stream` is maintained by the Streams Working Group, which
|
||||
oversees the development and maintenance of the Streams API within
|
||||
Node.js. The responsibilities of the Streams Working Group include:
|
||||
|
||||
* Addressing stream issues on the Node.js issue tracker.
|
||||
* Authoring and editing stream documentation within the Node.js project.
|
||||
* Reviewing changes to stream subclasses within the Node.js project.
|
||||
* Redirecting changes to streams from the Node.js project to this
|
||||
project.
|
||||
* Assisting in the implementation of stream providers within Node.js.
|
||||
* Recommending versions of `readable-stream` to be included in Node.js.
|
||||
* Messaging about the future of streams to give the community advance
|
||||
notice of changes.
|
||||
|
||||
<a name="members"></a>
|
||||
## Team Members
|
||||
|
||||
* **Chris Dickinson** ([@chrisdickinson](https://github.com/chrisdickinson)) <christopher.s.dickinson@gmail.com>
|
||||
- Release GPG key: 9554F04D7259F04124DE6B476D5A82AC7E37093B
|
||||
* **Calvin Metcalf** ([@calvinmetcalf](https://github.com/calvinmetcalf)) <calvin.metcalf@gmail.com>
|
||||
- Release GPG key: F3EF5F62A87FC27A22E643F714CE4FF5015AA242
|
||||
* **Rod Vagg** ([@rvagg](https://github.com/rvagg)) <rod@vagg.org>
|
||||
- Release GPG key: DD8F2338BAE7501E3DD5AC78C273792F7D83545D
|
||||
* **Sam Newman** ([@sonewman](https://github.com/sonewman)) <newmansam@outlook.com>
|
||||
* **Mathias Buus** ([@mafintosh](https://github.com/mafintosh)) <mathiasbuus@gmail.com>
|
||||
* **Domenic Denicola** ([@domenic](https://github.com/domenic)) <d@domenic.me>
|
||||
* **Matteo Collina** ([@mcollina](https://github.com/mcollina)) <matteo.collina@gmail.com>
|
||||
- Release GPG key: 3ABC01543F22DD2239285CDD818674489FBC127E
|
||||
* **Irina Shestak** ([@lrlna](https://github.com/lrlna)) <shestak.irina@gmail.com>
|
60
express-server/node_modules/@google-cloud/storage/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md
generated
vendored
Normal file
60
express-server/node_modules/@google-cloud/storage/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md
generated
vendored
Normal file
@ -0,0 +1,60 @@
|
||||
# streams WG Meeting 2015-01-30
|
||||
|
||||
## Links
|
||||
|
||||
* **Google Hangouts Video**: http://www.youtube.com/watch?v=I9nDOSGfwZg
|
||||
* **GitHub Issue**: https://github.com/iojs/readable-stream/issues/106
|
||||
* **Original Minutes Google Doc**: https://docs.google.com/document/d/17aTgLnjMXIrfjgNaTUnHQO7m3xgzHR2VXBTmi03Qii4/
|
||||
|
||||
## Agenda
|
||||
|
||||
Extracted from https://github.com/iojs/readable-stream/labels/wg-agenda prior to meeting.
|
||||
|
||||
* adopt a charter [#105](https://github.com/iojs/readable-stream/issues/105)
|
||||
* release and versioning strategy [#101](https://github.com/iojs/readable-stream/issues/101)
|
||||
* simpler stream creation [#102](https://github.com/iojs/readable-stream/issues/102)
|
||||
* proposal: deprecate implicit flowing of streams [#99](https://github.com/iojs/readable-stream/issues/99)
|
||||
|
||||
## Minutes
|
||||
|
||||
### adopt a charter
|
||||
|
||||
* group: +1's all around
|
||||
|
||||
### What versioning scheme should be adopted?
|
||||
* group: +1’s 3.0.0
|
||||
* domenic+group: pulling in patches from other sources where appropriate
|
||||
* mikeal: version independently, suggesting versions for io.js
|
||||
* mikeal+domenic: work with TC to notify in advance of changes
|
||||
simpler stream creation
|
||||
|
||||
### streamline creation of streams
|
||||
* sam: streamline creation of streams
|
||||
* domenic: nice simple solution posted
|
||||
but, we lose the opportunity to change the model
|
||||
may not be backwards incompatible (double check keys)
|
||||
|
||||
**action item:** domenic will check
|
||||
|
||||
### remove implicit flowing of streams on(‘data’)
|
||||
* add isFlowing / isPaused
|
||||
* mikeal: worrying that we’re documenting polyfill methods – confuses users
|
||||
* domenic: more reflective API is probably good, with warning labels for users
|
||||
* new section for mad scientists (reflective stream access)
|
||||
* calvin: name the “third state”
|
||||
* mikeal: maybe borrow the name from whatwg?
|
||||
* domenic: we’re missing the “third state”
|
||||
* consensus: kind of difficult to name the third state
|
||||
* mikeal: figure out differences in states / compat
|
||||
* mathias: always flow on data – eliminates third state
|
||||
* explore what it breaks
|
||||
|
||||
**action items:**
|
||||
* ask isaac for ability to list packages by what public io.js APIs they use (esp. Stream)
|
||||
* ask rod/build for infrastructure
|
||||
* **chris**: explore the “flow on data” approach
|
||||
* add isPaused/isFlowing
|
||||
* add new docs section
|
||||
* move isPaused to that section
|
||||
|
||||
|
1
express-server/node_modules/@google-cloud/storage/node_modules/readable-stream/duplex-browser.js
generated
vendored
Normal file
1
express-server/node_modules/@google-cloud/storage/node_modules/readable-stream/duplex-browser.js
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
module.exports = require('./lib/_stream_duplex.js');
|
1
express-server/node_modules/@google-cloud/storage/node_modules/readable-stream/duplex.js
generated
vendored
Normal file
1
express-server/node_modules/@google-cloud/storage/node_modules/readable-stream/duplex.js
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
module.exports = require('./readable').Duplex
|
131
express-server/node_modules/@google-cloud/storage/node_modules/readable-stream/lib/_stream_duplex.js
generated
vendored
Normal file
131
express-server/node_modules/@google-cloud/storage/node_modules/readable-stream/lib/_stream_duplex.js
generated
vendored
Normal file
@ -0,0 +1,131 @@
|
||||
// Copyright Joyent, Inc. and other Node contributors.
|
||||
//
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a
|
||||
// copy of this software and associated documentation files (the
|
||||
// "Software"), to deal in the Software without restriction, including
|
||||
// without limitation the rights to use, copy, modify, merge, publish,
|
||||
// distribute, sublicense, and/or sell copies of the Software, and to permit
|
||||
// persons to whom the Software is furnished to do so, subject to the
|
||||
// following conditions:
|
||||
//
|
||||
// The above copyright notice and this permission notice shall be included
|
||||
// in all copies or substantial portions of the Software.
|
||||
//
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
||||
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
|
||||
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
|
||||
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
||||
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
|
||||
// USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
||||
// a duplex stream is just a stream that is both readable and writable.
|
||||
// Since JS doesn't have multiple prototypal inheritance, this class
|
||||
// prototypally inherits from Readable, and then parasitically from
|
||||
// Writable.
|
||||
|
||||
'use strict';
|
||||
|
||||
/*<replacement>*/
|
||||
|
||||
var pna = require('process-nextick-args');
|
||||
/*</replacement>*/
|
||||
|
||||
/*<replacement>*/
|
||||
var objectKeys = Object.keys || function (obj) {
|
||||
var keys = [];
|
||||
for (var key in obj) {
|
||||
keys.push(key);
|
||||
}return keys;
|
||||
};
|
||||
/*</replacement>*/
|
||||
|
||||
module.exports = Duplex;
|
||||
|
||||
/*<replacement>*/
|
||||
var util = require('core-util-is');
|
||||
util.inherits = require('inherits');
|
||||
/*</replacement>*/
|
||||
|
||||
var Readable = require('./_stream_readable');
|
||||
var Writable = require('./_stream_writable');
|
||||
|
||||
util.inherits(Duplex, Readable);
|
||||
|
||||
{
|
||||
// avoid scope creep, the keys array can then be collected
|
||||
var keys = objectKeys(Writable.prototype);
|
||||
for (var v = 0; v < keys.length; v++) {
|
||||
var method = keys[v];
|
||||
if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method];
|
||||
}
|
||||
}
|
||||
|
||||
function Duplex(options) {
|
||||
if (!(this instanceof Duplex)) return new Duplex(options);
|
||||
|
||||
Readable.call(this, options);
|
||||
Writable.call(this, options);
|
||||
|
||||
if (options && options.readable === false) this.readable = false;
|
||||
|
||||
if (options && options.writable === false) this.writable = false;
|
||||
|
||||
this.allowHalfOpen = true;
|
||||
if (options && options.allowHalfOpen === false) this.allowHalfOpen = false;
|
||||
|
||||
this.once('end', onend);
|
||||
}
|
||||
|
||||
Object.defineProperty(Duplex.prototype, 'writableHighWaterMark', {
|
||||
// making it explicit this property is not enumerable
|
||||
// because otherwise some prototype manipulation in
|
||||
// userland will fail
|
||||
enumerable: false,
|
||||
get: function () {
|
||||
return this._writableState.highWaterMark;
|
||||
}
|
||||
});
|
||||
|
||||
// the no-half-open enforcer
|
||||
function onend() {
|
||||
// if we allow half-open state, or if the writable side ended,
|
||||
// then we're ok.
|
||||
if (this.allowHalfOpen || this._writableState.ended) return;
|
||||
|
||||
// no more data can be written.
|
||||
// But allow more writes to happen in this tick.
|
||||
pna.nextTick(onEndNT, this);
|
||||
}
|
||||
|
||||
function onEndNT(self) {
|
||||
self.end();
|
||||
}
|
||||
|
||||
Object.defineProperty(Duplex.prototype, 'destroyed', {
|
||||
get: function () {
|
||||
if (this._readableState === undefined || this._writableState === undefined) {
|
||||
return false;
|
||||
}
|
||||
return this._readableState.destroyed && this._writableState.destroyed;
|
||||
},
|
||||
set: function (value) {
|
||||
// we ignore the value if the stream
|
||||
// has not been initialized yet
|
||||
if (this._readableState === undefined || this._writableState === undefined) {
|
||||
return;
|
||||
}
|
||||
|
||||
// backward compatibility, the user is explicitly
|
||||
// managing destroyed
|
||||
this._readableState.destroyed = value;
|
||||
this._writableState.destroyed = value;
|
||||
}
|
||||
});
|
||||
|
||||
Duplex.prototype._destroy = function (err, cb) {
|
||||
this.push(null);
|
||||
this.end();
|
||||
|
||||
pna.nextTick(cb, err);
|
||||
};
|
47
express-server/node_modules/@google-cloud/storage/node_modules/readable-stream/lib/_stream_passthrough.js
generated
vendored
Normal file
47
express-server/node_modules/@google-cloud/storage/node_modules/readable-stream/lib/_stream_passthrough.js
generated
vendored
Normal file
@ -0,0 +1,47 @@
|
||||
// Copyright Joyent, Inc. and other Node contributors.
|
||||
//
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a
|
||||
// copy of this software and associated documentation files (the
|
||||
// "Software"), to deal in the Software without restriction, including
|
||||
// without limitation the rights to use, copy, modify, merge, publish,
|
||||
// distribute, sublicense, and/or sell copies of the Software, and to permit
|
||||
// persons to whom the Software is furnished to do so, subject to the
|
||||
// following conditions:
|
||||
//
|
||||
// The above copyright notice and this permission notice shall be included
|
||||
// in all copies or substantial portions of the Software.
|
||||
//
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
||||
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
|
||||
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
|
||||
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
||||
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
|
||||
// USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
||||
// a passthrough stream.
|
||||
// basically just the most minimal sort of Transform stream.
|
||||
// Every written chunk gets output as-is.
|
||||
|
||||
'use strict';
|
||||
|
||||
module.exports = PassThrough;
|
||||
|
||||
var Transform = require('./_stream_transform');
|
||||
|
||||
/*<replacement>*/
|
||||
var util = require('core-util-is');
|
||||
util.inherits = require('inherits');
|
||||
/*</replacement>*/
|
||||
|
||||
util.inherits(PassThrough, Transform);
|
||||
|
||||
function PassThrough(options) {
|
||||
if (!(this instanceof PassThrough)) return new PassThrough(options);
|
||||
|
||||
Transform.call(this, options);
|
||||
}
|
||||
|
||||
PassThrough.prototype._transform = function (chunk, encoding, cb) {
|
||||
cb(null, chunk);
|
||||
};
|
1019
express-server/node_modules/@google-cloud/storage/node_modules/readable-stream/lib/_stream_readable.js
generated
vendored
Normal file
1019
express-server/node_modules/@google-cloud/storage/node_modules/readable-stream/lib/_stream_readable.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
214
express-server/node_modules/@google-cloud/storage/node_modules/readable-stream/lib/_stream_transform.js
generated
vendored
Normal file
214
express-server/node_modules/@google-cloud/storage/node_modules/readable-stream/lib/_stream_transform.js
generated
vendored
Normal file
@ -0,0 +1,214 @@
|
||||
// Copyright Joyent, Inc. and other Node contributors.
|
||||
//
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a
|
||||
// copy of this software and associated documentation files (the
|
||||
// "Software"), to deal in the Software without restriction, including
|
||||
// without limitation the rights to use, copy, modify, merge, publish,
|
||||
// distribute, sublicense, and/or sell copies of the Software, and to permit
|
||||
// persons to whom the Software is furnished to do so, subject to the
|
||||
// following conditions:
|
||||
//
|
||||
// The above copyright notice and this permission notice shall be included
|
||||
// in all copies or substantial portions of the Software.
|
||||
//
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
||||
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
|
||||
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
|
||||
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
||||
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
|
||||
// USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
||||
// a transform stream is a readable/writable stream where you do
|
||||
// something with the data. Sometimes it's called a "filter",
|
||||
// but that's not a great name for it, since that implies a thing where
|
||||
// some bits pass through, and others are simply ignored. (That would
|
||||
// be a valid example of a transform, of course.)
|
||||
//
|
||||
// While the output is causally related to the input, it's not a
|
||||
// necessarily symmetric or synchronous transformation. For example,
|
||||
// a zlib stream might take multiple plain-text writes(), and then
|
||||
// emit a single compressed chunk some time in the future.
|
||||
//
|
||||
// Here's how this works:
|
||||
//
|
||||
// The Transform stream has all the aspects of the readable and writable
|
||||
// stream classes. When you write(chunk), that calls _write(chunk,cb)
|
||||
// internally, and returns false if there's a lot of pending writes
|
||||
// buffered up. When you call read(), that calls _read(n) until
|
||||
// there's enough pending readable data buffered up.
|
||||
//
|
||||
// In a transform stream, the written data is placed in a buffer. When
|
||||
// _read(n) is called, it transforms the queued up data, calling the
|
||||
// buffered _write cb's as it consumes chunks. If consuming a single
|
||||
// written chunk would result in multiple output chunks, then the first
|
||||
// outputted bit calls the readcb, and subsequent chunks just go into
|
||||
// the read buffer, and will cause it to emit 'readable' if necessary.
|
||||
//
|
||||
// This way, back-pressure is actually determined by the reading side,
|
||||
// since _read has to be called to start processing a new chunk. However,
|
||||
// a pathological inflate type of transform can cause excessive buffering
|
||||
// here. For example, imagine a stream where every byte of input is
|
||||
// interpreted as an integer from 0-255, and then results in that many
|
||||
// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in
|
||||
// 1kb of data being output. In this case, you could write a very small
|
||||
// amount of input, and end up with a very large amount of output. In
|
||||
// such a pathological inflating mechanism, there'd be no way to tell
|
||||
// the system to stop doing the transform. A single 4MB write could
|
||||
// cause the system to run out of memory.
|
||||
//
|
||||
// However, even in such a pathological case, only a single written chunk
|
||||
// would be consumed, and then the rest would wait (un-transformed) until
|
||||
// the results of the previous transformed chunk were consumed.
|
||||
|
||||
'use strict';
|
||||
|
||||
module.exports = Transform;
|
||||
|
||||
var Duplex = require('./_stream_duplex');
|
||||
|
||||
/*<replacement>*/
|
||||
var util = require('core-util-is');
|
||||
util.inherits = require('inherits');
|
||||
/*</replacement>*/
|
||||
|
||||
util.inherits(Transform, Duplex);
|
||||
|
||||
function afterTransform(er, data) {
|
||||
var ts = this._transformState;
|
||||
ts.transforming = false;
|
||||
|
||||
var cb = ts.writecb;
|
||||
|
||||
if (!cb) {
|
||||
return this.emit('error', new Error('write callback called multiple times'));
|
||||
}
|
||||
|
||||
ts.writechunk = null;
|
||||
ts.writecb = null;
|
||||
|
||||
if (data != null) // single equals check for both `null` and `undefined`
|
||||
this.push(data);
|
||||
|
||||
cb(er);
|
||||
|
||||
var rs = this._readableState;
|
||||
rs.reading = false;
|
||||
if (rs.needReadable || rs.length < rs.highWaterMark) {
|
||||
this._read(rs.highWaterMark);
|
||||
}
|
||||
}
|
||||
|
||||
function Transform(options) {
|
||||
if (!(this instanceof Transform)) return new Transform(options);
|
||||
|
||||
Duplex.call(this, options);
|
||||
|
||||
this._transformState = {
|
||||
afterTransform: afterTransform.bind(this),
|
||||
needTransform: false,
|
||||
transforming: false,
|
||||
writecb: null,
|
||||
writechunk: null,
|
||||
writeencoding: null
|
||||
};
|
||||
|
||||
// start out asking for a readable event once data is transformed.
|
||||
this._readableState.needReadable = true;
|
||||
|
||||
// we have implemented the _read method, and done the other things
|
||||
// that Readable wants before the first _read call, so unset the
|
||||
// sync guard flag.
|
||||
this._readableState.sync = false;
|
||||
|
||||
if (options) {
|
||||
if (typeof options.transform === 'function') this._transform = options.transform;
|
||||
|
||||
if (typeof options.flush === 'function') this._flush = options.flush;
|
||||
}
|
||||
|
||||
// When the writable side finishes, then flush out anything remaining.
|
||||
this.on('prefinish', prefinish);
|
||||
}
|
||||
|
||||
function prefinish() {
|
||||
var _this = this;
|
||||
|
||||
if (typeof this._flush === 'function') {
|
||||
this._flush(function (er, data) {
|
||||
done(_this, er, data);
|
||||
});
|
||||
} else {
|
||||
done(this, null, null);
|
||||
}
|
||||
}
|
||||
|
||||
Transform.prototype.push = function (chunk, encoding) {
|
||||
this._transformState.needTransform = false;
|
||||
return Duplex.prototype.push.call(this, chunk, encoding);
|
||||
};
|
||||
|
||||
// This is the part where you do stuff!
|
||||
// override this function in implementation classes.
|
||||
// 'chunk' is an input chunk.
|
||||
//
|
||||
// Call `push(newChunk)` to pass along transformed output
|
||||
// to the readable side. You may call 'push' zero or more times.
|
||||
//
|
||||
// Call `cb(err)` when you are done with this chunk. If you pass
|
||||
// an error, then that'll put the hurt on the whole operation. If you
|
||||
// never call cb(), then you'll never get another chunk.
|
||||
Transform.prototype._transform = function (chunk, encoding, cb) {
|
||||
throw new Error('_transform() is not implemented');
|
||||
};
|
||||
|
||||
Transform.prototype._write = function (chunk, encoding, cb) {
|
||||
var ts = this._transformState;
|
||||
ts.writecb = cb;
|
||||
ts.writechunk = chunk;
|
||||
ts.writeencoding = encoding;
|
||||
if (!ts.transforming) {
|
||||
var rs = this._readableState;
|
||||
if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark);
|
||||
}
|
||||
};
|
||||
|
||||
// Doesn't matter what the args are here.
|
||||
// _transform does all the work.
|
||||
// That we got here means that the readable side wants more data.
|
||||
Transform.prototype._read = function (n) {
|
||||
var ts = this._transformState;
|
||||
|
||||
if (ts.writechunk !== null && ts.writecb && !ts.transforming) {
|
||||
ts.transforming = true;
|
||||
this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform);
|
||||
} else {
|
||||
// mark that we need a transform, so that any data that comes in
|
||||
// will get processed, now that we've asked for it.
|
||||
ts.needTransform = true;
|
||||
}
|
||||
};
|
||||
|
||||
Transform.prototype._destroy = function (err, cb) {
|
||||
var _this2 = this;
|
||||
|
||||
Duplex.prototype._destroy.call(this, err, function (err2) {
|
||||
cb(err2);
|
||||
_this2.emit('close');
|
||||
});
|
||||
};
|
||||
|
||||
function done(stream, er, data) {
|
||||
if (er) return stream.emit('error', er);
|
||||
|
||||
if (data != null) // single equals check for both `null` and `undefined`
|
||||
stream.push(data);
|
||||
|
||||
// if there's nothing in the write buffer, then that means
|
||||
// that nothing more will ever be provided
|
||||
if (stream._writableState.length) throw new Error('Calling transform done when ws.length != 0');
|
||||
|
||||
if (stream._transformState.transforming) throw new Error('Calling transform done when still transforming');
|
||||
|
||||
return stream.push(null);
|
||||
}
|
687
express-server/node_modules/@google-cloud/storage/node_modules/readable-stream/lib/_stream_writable.js
generated
vendored
Normal file
687
express-server/node_modules/@google-cloud/storage/node_modules/readable-stream/lib/_stream_writable.js
generated
vendored
Normal file
@ -0,0 +1,687 @@
|
||||
// Copyright Joyent, Inc. and other Node contributors.
|
||||
//
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a
|
||||
// copy of this software and associated documentation files (the
|
||||
// "Software"), to deal in the Software without restriction, including
|
||||
// without limitation the rights to use, copy, modify, merge, publish,
|
||||
// distribute, sublicense, and/or sell copies of the Software, and to permit
|
||||
// persons to whom the Software is furnished to do so, subject to the
|
||||
// following conditions:
|
||||
//
|
||||
// The above copyright notice and this permission notice shall be included
|
||||
// in all copies or substantial portions of the Software.
|
||||
//
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
||||
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
|
||||
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
|
||||
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
||||
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
|
||||
// USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
||||
// A bit simpler than readable streams.
|
||||
// Implement an async ._write(chunk, encoding, cb), and it'll handle all
|
||||
// the drain event emission and buffering.
|
||||
|
||||
'use strict';
|
||||
|
||||
/*<replacement>*/
|
||||
|
||||
var pna = require('process-nextick-args');
|
||||
/*</replacement>*/
|
||||
|
||||
module.exports = Writable;
|
||||
|
||||
/* <replacement> */
|
||||
function WriteReq(chunk, encoding, cb) {
|
||||
this.chunk = chunk;
|
||||
this.encoding = encoding;
|
||||
this.callback = cb;
|
||||
this.next = null;
|
||||
}
|
||||
|
||||
// It seems a linked list but it is not
|
||||
// there will be only 2 of these for each stream
|
||||
function CorkedRequest(state) {
|
||||
var _this = this;
|
||||
|
||||
this.next = null;
|
||||
this.entry = null;
|
||||
this.finish = function () {
|
||||
onCorkedFinish(_this, state);
|
||||
};
|
||||
}
|
||||
/* </replacement> */
|
||||
|
||||
/*<replacement>*/
|
||||
var asyncWrite = !process.browser && ['v0.10', 'v0.9.'].indexOf(process.version.slice(0, 5)) > -1 ? setImmediate : pna.nextTick;
|
||||
/*</replacement>*/
|
||||
|
||||
/*<replacement>*/
|
||||
var Duplex;
|
||||
/*</replacement>*/
|
||||
|
||||
Writable.WritableState = WritableState;
|
||||
|
||||
/*<replacement>*/
|
||||
var util = require('core-util-is');
|
||||
util.inherits = require('inherits');
|
||||
/*</replacement>*/
|
||||
|
||||
/*<replacement>*/
|
||||
var internalUtil = {
|
||||
deprecate: require('util-deprecate')
|
||||
};
|
||||
/*</replacement>*/
|
||||
|
||||
/*<replacement>*/
|
||||
var Stream = require('./internal/streams/stream');
|
||||
/*</replacement>*/
|
||||
|
||||
/*<replacement>*/
|
||||
|
||||
var Buffer = require('safe-buffer').Buffer;
|
||||
var OurUint8Array = global.Uint8Array || function () {};
|
||||
function _uint8ArrayToBuffer(chunk) {
|
||||
return Buffer.from(chunk);
|
||||
}
|
||||
function _isUint8Array(obj) {
|
||||
return Buffer.isBuffer(obj) || obj instanceof OurUint8Array;
|
||||
}
|
||||
|
||||
/*</replacement>*/
|
||||
|
||||
var destroyImpl = require('./internal/streams/destroy');
|
||||
|
||||
util.inherits(Writable, Stream);
|
||||
|
||||
function nop() {}
|
||||
|
||||
function WritableState(options, stream) {
|
||||
Duplex = Duplex || require('./_stream_duplex');
|
||||
|
||||
options = options || {};
|
||||
|
||||
// Duplex streams are both readable and writable, but share
|
||||
// the same options object.
|
||||
// However, some cases require setting options to different
|
||||
// values for the readable and the writable sides of the duplex stream.
|
||||
// These options can be provided separately as readableXXX and writableXXX.
|
||||
var isDuplex = stream instanceof Duplex;
|
||||
|
||||
// object stream flag to indicate whether or not this stream
|
||||
// contains buffers or objects.
|
||||
this.objectMode = !!options.objectMode;
|
||||
|
||||
if (isDuplex) this.objectMode = this.objectMode || !!options.writableObjectMode;
|
||||
|
||||
// the point at which write() starts returning false
|
||||
// Note: 0 is a valid value, means that we always return false if
|
||||
// the entire buffer is not flushed immediately on write()
|
||||
var hwm = options.highWaterMark;
|
||||
var writableHwm = options.writableHighWaterMark;
|
||||
var defaultHwm = this.objectMode ? 16 : 16 * 1024;
|
||||
|
||||
if (hwm || hwm === 0) this.highWaterMark = hwm;else if (isDuplex && (writableHwm || writableHwm === 0)) this.highWaterMark = writableHwm;else this.highWaterMark = defaultHwm;
|
||||
|
||||
// cast to ints.
|
||||
this.highWaterMark = Math.floor(this.highWaterMark);
|
||||
|
||||
// if _final has been called
|
||||
this.finalCalled = false;
|
||||
|
||||
// drain event flag.
|
||||
this.needDrain = false;
|
||||
// at the start of calling end()
|
||||
this.ending = false;
|
||||
// when end() has been called, and returned
|
||||
this.ended = false;
|
||||
// when 'finish' is emitted
|
||||
this.finished = false;
|
||||
|
||||
// has it been destroyed
|
||||
this.destroyed = false;
|
||||
|
||||
// should we decode strings into buffers before passing to _write?
|
||||
// this is here so that some node-core streams can optimize string
|
||||
// handling at a lower level.
|
||||
var noDecode = options.decodeStrings === false;
|
||||
this.decodeStrings = !noDecode;
|
||||
|
||||
// Crypto is kind of old and crusty. Historically, its default string
|
||||
// encoding is 'binary' so we have to make this configurable.
|
||||
// Everything else in the universe uses 'utf8', though.
|
||||
this.defaultEncoding = options.defaultEncoding || 'utf8';
|
||||
|
||||
// not an actual buffer we keep track of, but a measurement
|
||||
// of how much we're waiting to get pushed to some underlying
|
||||
// socket or file.
|
||||
this.length = 0;
|
||||
|
||||
// a flag to see when we're in the middle of a write.
|
||||
this.writing = false;
|
||||
|
||||
// when true all writes will be buffered until .uncork() call
|
||||
this.corked = 0;
|
||||
|
||||
// a flag to be able to tell if the onwrite cb is called immediately,
|
||||
// or on a later tick. We set this to true at first, because any
|
||||
// actions that shouldn't happen until "later" should generally also
|
||||
// not happen before the first write call.
|
||||
this.sync = true;
|
||||
|
||||
// a flag to know if we're processing previously buffered items, which
|
||||
// may call the _write() callback in the same tick, so that we don't
|
||||
// end up in an overlapped onwrite situation.
|
||||
this.bufferProcessing = false;
|
||||
|
||||
// the callback that's passed to _write(chunk,cb)
|
||||
this.onwrite = function (er) {
|
||||
onwrite(stream, er);
|
||||
};
|
||||
|
||||
// the callback that the user supplies to write(chunk,encoding,cb)
|
||||
this.writecb = null;
|
||||
|
||||
// the amount that is being written when _write is called.
|
||||
this.writelen = 0;
|
||||
|
||||
this.bufferedRequest = null;
|
||||
this.lastBufferedRequest = null;
|
||||
|
||||
// number of pending user-supplied write callbacks
|
||||
// this must be 0 before 'finish' can be emitted
|
||||
this.pendingcb = 0;
|
||||
|
||||
// emit prefinish if the only thing we're waiting for is _write cbs
|
||||
// This is relevant for synchronous Transform streams
|
||||
this.prefinished = false;
|
||||
|
||||
// True if the error was already emitted and should not be thrown again
|
||||
this.errorEmitted = false;
|
||||
|
||||
// count buffered requests
|
||||
this.bufferedRequestCount = 0;
|
||||
|
||||
// allocate the first CorkedRequest, there is always
|
||||
// one allocated and free to use, and we maintain at most two
|
||||
this.corkedRequestsFree = new CorkedRequest(this);
|
||||
}
|
||||
|
||||
WritableState.prototype.getBuffer = function getBuffer() {
|
||||
var current = this.bufferedRequest;
|
||||
var out = [];
|
||||
while (current) {
|
||||
out.push(current);
|
||||
current = current.next;
|
||||
}
|
||||
return out;
|
||||
};
|
||||
|
||||
(function () {
|
||||
try {
|
||||
Object.defineProperty(WritableState.prototype, 'buffer', {
|
||||
get: internalUtil.deprecate(function () {
|
||||
return this.getBuffer();
|
||||
}, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.', 'DEP0003')
|
||||
});
|
||||
} catch (_) {}
|
||||
})();
|
||||
|
||||
// Test _writableState for inheritance to account for Duplex streams,
|
||||
// whose prototype chain only points to Readable.
|
||||
var realHasInstance;
|
||||
if (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === 'function') {
|
||||
realHasInstance = Function.prototype[Symbol.hasInstance];
|
||||
Object.defineProperty(Writable, Symbol.hasInstance, {
|
||||
value: function (object) {
|
||||
if (realHasInstance.call(this, object)) return true;
|
||||
if (this !== Writable) return false;
|
||||
|
||||
return object && object._writableState instanceof WritableState;
|
||||
}
|
||||
});
|
||||
} else {
|
||||
realHasInstance = function (object) {
|
||||
return object instanceof this;
|
||||
};
|
||||
}
|
||||
|
||||
function Writable(options) {
|
||||
Duplex = Duplex || require('./_stream_duplex');
|
||||
|
||||
// Writable ctor is applied to Duplexes, too.
|
||||
// `realHasInstance` is necessary because using plain `instanceof`
|
||||
// would return false, as no `_writableState` property is attached.
|
||||
|
||||
// Trying to use the custom `instanceof` for Writable here will also break the
|
||||
// Node.js LazyTransform implementation, which has a non-trivial getter for
|
||||
// `_writableState` that would lead to infinite recursion.
|
||||
if (!realHasInstance.call(Writable, this) && !(this instanceof Duplex)) {
|
||||
return new Writable(options);
|
||||
}
|
||||
|
||||
this._writableState = new WritableState(options, this);
|
||||
|
||||
// legacy.
|
||||
this.writable = true;
|
||||
|
||||
if (options) {
|
||||
if (typeof options.write === 'function') this._write = options.write;
|
||||
|
||||
if (typeof options.writev === 'function') this._writev = options.writev;
|
||||
|
||||
if (typeof options.destroy === 'function') this._destroy = options.destroy;
|
||||
|
||||
if (typeof options.final === 'function') this._final = options.final;
|
||||
}
|
||||
|
||||
Stream.call(this);
|
||||
}
|
||||
|
||||
// Otherwise people can pipe Writable streams, which is just wrong.
|
||||
Writable.prototype.pipe = function () {
|
||||
this.emit('error', new Error('Cannot pipe, not readable'));
|
||||
};
|
||||
|
||||
function writeAfterEnd(stream, cb) {
|
||||
var er = new Error('write after end');
|
||||
// TODO: defer error events consistently everywhere, not just the cb
|
||||
stream.emit('error', er);
|
||||
pna.nextTick(cb, er);
|
||||
}
|
||||
|
||||
// Checks that a user-supplied chunk is valid, especially for the particular
|
||||
// mode the stream is in. Currently this means that `null` is never accepted
|
||||
// and undefined/non-string values are only allowed in object mode.
|
||||
function validChunk(stream, state, chunk, cb) {
|
||||
var valid = true;
|
||||
var er = false;
|
||||
|
||||
if (chunk === null) {
|
||||
er = new TypeError('May not write null values to stream');
|
||||
} else if (typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) {
|
||||
er = new TypeError('Invalid non-string/buffer chunk');
|
||||
}
|
||||
if (er) {
|
||||
stream.emit('error', er);
|
||||
pna.nextTick(cb, er);
|
||||
valid = false;
|
||||
}
|
||||
return valid;
|
||||
}
|
||||
|
||||
Writable.prototype.write = function (chunk, encoding, cb) {
|
||||
var state = this._writableState;
|
||||
var ret = false;
|
||||
var isBuf = !state.objectMode && _isUint8Array(chunk);
|
||||
|
||||
if (isBuf && !Buffer.isBuffer(chunk)) {
|
||||
chunk = _uint8ArrayToBuffer(chunk);
|
||||
}
|
||||
|
||||
if (typeof encoding === 'function') {
|
||||
cb = encoding;
|
||||
encoding = null;
|
||||
}
|
||||
|
||||
if (isBuf) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding;
|
||||
|
||||
if (typeof cb !== 'function') cb = nop;
|
||||
|
||||
if (state.ended) writeAfterEnd(this, cb);else if (isBuf || validChunk(this, state, chunk, cb)) {
|
||||
state.pendingcb++;
|
||||
ret = writeOrBuffer(this, state, isBuf, chunk, encoding, cb);
|
||||
}
|
||||
|
||||
return ret;
|
||||
};
|
||||
|
||||
Writable.prototype.cork = function () {
|
||||
var state = this._writableState;
|
||||
|
||||
state.corked++;
|
||||
};
|
||||
|
||||
Writable.prototype.uncork = function () {
|
||||
var state = this._writableState;
|
||||
|
||||
if (state.corked) {
|
||||
state.corked--;
|
||||
|
||||
if (!state.writing && !state.corked && !state.finished && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state);
|
||||
}
|
||||
};
|
||||
|
||||
Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) {
|
||||
// node::ParseEncoding() requires lower case.
|
||||
if (typeof encoding === 'string') encoding = encoding.toLowerCase();
|
||||
if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new TypeError('Unknown encoding: ' + encoding);
|
||||
this._writableState.defaultEncoding = encoding;
|
||||
return this;
|
||||
};
|
||||
|
||||
function decodeChunk(state, chunk, encoding) {
|
||||
if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') {
|
||||
chunk = Buffer.from(chunk, encoding);
|
||||
}
|
||||
return chunk;
|
||||
}
|
||||
|
||||
Object.defineProperty(Writable.prototype, 'writableHighWaterMark', {
|
||||
// making it explicit this property is not enumerable
|
||||
// because otherwise some prototype manipulation in
|
||||
// userland will fail
|
||||
enumerable: false,
|
||||
get: function () {
|
||||
return this._writableState.highWaterMark;
|
||||
}
|
||||
});
|
||||
|
||||
// if we're already writing something, then just put this
|
||||
// in the queue, and wait our turn. Otherwise, call _write
|
||||
// If we return false, then we need a drain event, so set that flag.
|
||||
function writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) {
|
||||
if (!isBuf) {
|
||||
var newChunk = decodeChunk(state, chunk, encoding);
|
||||
if (chunk !== newChunk) {
|
||||
isBuf = true;
|
||||
encoding = 'buffer';
|
||||
chunk = newChunk;
|
||||
}
|
||||
}
|
||||
var len = state.objectMode ? 1 : chunk.length;
|
||||
|
||||
state.length += len;
|
||||
|
||||
var ret = state.length < state.highWaterMark;
|
||||
// we must ensure that previous needDrain will not be reset to false.
|
||||
if (!ret) state.needDrain = true;
|
||||
|
||||
if (state.writing || state.corked) {
|
||||
var last = state.lastBufferedRequest;
|
||||
state.lastBufferedRequest = {
|
||||
chunk: chunk,
|
||||
encoding: encoding,
|
||||
isBuf: isBuf,
|
||||
callback: cb,
|
||||
next: null
|
||||
};
|
||||
if (last) {
|
||||
last.next = state.lastBufferedRequest;
|
||||
} else {
|
||||
state.bufferedRequest = state.lastBufferedRequest;
|
||||
}
|
||||
state.bufferedRequestCount += 1;
|
||||
} else {
|
||||
doWrite(stream, state, false, len, chunk, encoding, cb);
|
||||
}
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
function doWrite(stream, state, writev, len, chunk, encoding, cb) {
|
||||
state.writelen = len;
|
||||
state.writecb = cb;
|
||||
state.writing = true;
|
||||
state.sync = true;
|
||||
if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite);
|
||||
state.sync = false;
|
||||
}
|
||||
|
||||
function onwriteError(stream, state, sync, er, cb) {
|
||||
--state.pendingcb;
|
||||
|
||||
if (sync) {
|
||||
// defer the callback if we are being called synchronously
|
||||
// to avoid piling up things on the stack
|
||||
pna.nextTick(cb, er);
|
||||
// this can emit finish, and it will always happen
|
||||
// after error
|
||||
pna.nextTick(finishMaybe, stream, state);
|
||||
stream._writableState.errorEmitted = true;
|
||||
stream.emit('error', er);
|
||||
} else {
|
||||
// the caller expect this to happen before if
|
||||
// it is async
|
||||
cb(er);
|
||||
stream._writableState.errorEmitted = true;
|
||||
stream.emit('error', er);
|
||||
// this can emit finish, but finish must
|
||||
// always follow error
|
||||
finishMaybe(stream, state);
|
||||
}
|
||||
}
|
||||
|
||||
function onwriteStateUpdate(state) {
|
||||
state.writing = false;
|
||||
state.writecb = null;
|
||||
state.length -= state.writelen;
|
||||
state.writelen = 0;
|
||||
}
|
||||
|
||||
function onwrite(stream, er) {
|
||||
var state = stream._writableState;
|
||||
var sync = state.sync;
|
||||
var cb = state.writecb;
|
||||
|
||||
onwriteStateUpdate(state);
|
||||
|
||||
if (er) onwriteError(stream, state, sync, er, cb);else {
|
||||
// Check if we're actually ready to finish, but don't emit yet
|
||||
var finished = needFinish(state);
|
||||
|
||||
if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) {
|
||||
clearBuffer(stream, state);
|
||||
}
|
||||
|
||||
if (sync) {
|
||||
/*<replacement>*/
|
||||
asyncWrite(afterWrite, stream, state, finished, cb);
|
||||
/*</replacement>*/
|
||||
} else {
|
||||
afterWrite(stream, state, finished, cb);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function afterWrite(stream, state, finished, cb) {
|
||||
if (!finished) onwriteDrain(stream, state);
|
||||
state.pendingcb--;
|
||||
cb();
|
||||
finishMaybe(stream, state);
|
||||
}
|
||||
|
||||
// Must force callback to be called on nextTick, so that we don't
|
||||
// emit 'drain' before the write() consumer gets the 'false' return
|
||||
// value, and has a chance to attach a 'drain' listener.
|
||||
function onwriteDrain(stream, state) {
|
||||
if (state.length === 0 && state.needDrain) {
|
||||
state.needDrain = false;
|
||||
stream.emit('drain');
|
||||
}
|
||||
}
|
||||
|
||||
// if there's something in the buffer waiting, then process it
|
||||
function clearBuffer(stream, state) {
|
||||
state.bufferProcessing = true;
|
||||
var entry = state.bufferedRequest;
|
||||
|
||||
if (stream._writev && entry && entry.next) {
|
||||
// Fast case, write everything using _writev()
|
||||
var l = state.bufferedRequestCount;
|
||||
var buffer = new Array(l);
|
||||
var holder = state.corkedRequestsFree;
|
||||
holder.entry = entry;
|
||||
|
||||
var count = 0;
|
||||
var allBuffers = true;
|
||||
while (entry) {
|
||||
buffer[count] = entry;
|
||||
if (!entry.isBuf) allBuffers = false;
|
||||
entry = entry.next;
|
||||
count += 1;
|
||||
}
|
||||
buffer.allBuffers = allBuffers;
|
||||
|
||||
doWrite(stream, state, true, state.length, buffer, '', holder.finish);
|
||||
|
||||
// doWrite is almost always async, defer these to save a bit of time
|
||||
// as the hot path ends with doWrite
|
||||
state.pendingcb++;
|
||||
state.lastBufferedRequest = null;
|
||||
if (holder.next) {
|
||||
state.corkedRequestsFree = holder.next;
|
||||
holder.next = null;
|
||||
} else {
|
||||
state.corkedRequestsFree = new CorkedRequest(state);
|
||||
}
|
||||
state.bufferedRequestCount = 0;
|
||||
} else {
|
||||
// Slow case, write chunks one-by-one
|
||||
while (entry) {
|
||||
var chunk = entry.chunk;
|
||||
var encoding = entry.encoding;
|
||||
var cb = entry.callback;
|
||||
var len = state.objectMode ? 1 : chunk.length;
|
||||
|
||||
doWrite(stream, state, false, len, chunk, encoding, cb);
|
||||
entry = entry.next;
|
||||
state.bufferedRequestCount--;
|
||||
// if we didn't call the onwrite immediately, then
|
||||
// it means that we need to wait until it does.
|
||||
// also, that means that the chunk and cb are currently
|
||||
// being processed, so move the buffer counter past them.
|
||||
if (state.writing) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (entry === null) state.lastBufferedRequest = null;
|
||||
}
|
||||
|
||||
state.bufferedRequest = entry;
|
||||
state.bufferProcessing = false;
|
||||
}
|
||||
|
||||
Writable.prototype._write = function (chunk, encoding, cb) {
|
||||
cb(new Error('_write() is not implemented'));
|
||||
};
|
||||
|
||||
Writable.prototype._writev = null;
|
||||
|
||||
Writable.prototype.end = function (chunk, encoding, cb) {
|
||||
var state = this._writableState;
|
||||
|
||||
if (typeof chunk === 'function') {
|
||||
cb = chunk;
|
||||
chunk = null;
|
||||
encoding = null;
|
||||
} else if (typeof encoding === 'function') {
|
||||
cb = encoding;
|
||||
encoding = null;
|
||||
}
|
||||
|
||||
if (chunk !== null && chunk !== undefined) this.write(chunk, encoding);
|
||||
|
||||
// .end() fully uncorks
|
||||
if (state.corked) {
|
||||
state.corked = 1;
|
||||
this.uncork();
|
||||
}
|
||||
|
||||
// ignore unnecessary end() calls.
|
||||
if (!state.ending && !state.finished) endWritable(this, state, cb);
|
||||
};
|
||||
|
||||
function needFinish(state) {
|
||||
return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing;
|
||||
}
|
||||
function callFinal(stream, state) {
|
||||
stream._final(function (err) {
|
||||
state.pendingcb--;
|
||||
if (err) {
|
||||
stream.emit('error', err);
|
||||
}
|
||||
state.prefinished = true;
|
||||
stream.emit('prefinish');
|
||||
finishMaybe(stream, state);
|
||||
});
|
||||
}
|
||||
function prefinish(stream, state) {
|
||||
if (!state.prefinished && !state.finalCalled) {
|
||||
if (typeof stream._final === 'function') {
|
||||
state.pendingcb++;
|
||||
state.finalCalled = true;
|
||||
pna.nextTick(callFinal, stream, state);
|
||||
} else {
|
||||
state.prefinished = true;
|
||||
stream.emit('prefinish');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function finishMaybe(stream, state) {
|
||||
var need = needFinish(state);
|
||||
if (need) {
|
||||
prefinish(stream, state);
|
||||
if (state.pendingcb === 0) {
|
||||
state.finished = true;
|
||||
stream.emit('finish');
|
||||
}
|
||||
}
|
||||
return need;
|
||||
}
|
||||
|
||||
function endWritable(stream, state, cb) {
|
||||
state.ending = true;
|
||||
finishMaybe(stream, state);
|
||||
if (cb) {
|
||||
if (state.finished) pna.nextTick(cb);else stream.once('finish', cb);
|
||||
}
|
||||
state.ended = true;
|
||||
stream.writable = false;
|
||||
}
|
||||
|
||||
function onCorkedFinish(corkReq, state, err) {
|
||||
var entry = corkReq.entry;
|
||||
corkReq.entry = null;
|
||||
while (entry) {
|
||||
var cb = entry.callback;
|
||||
state.pendingcb--;
|
||||
cb(err);
|
||||
entry = entry.next;
|
||||
}
|
||||
if (state.corkedRequestsFree) {
|
||||
state.corkedRequestsFree.next = corkReq;
|
||||
} else {
|
||||
state.corkedRequestsFree = corkReq;
|
||||
}
|
||||
}
|
||||
|
||||
Object.defineProperty(Writable.prototype, 'destroyed', {
|
||||
get: function () {
|
||||
if (this._writableState === undefined) {
|
||||
return false;
|
||||
}
|
||||
return this._writableState.destroyed;
|
||||
},
|
||||
set: function (value) {
|
||||
// we ignore the value if the stream
|
||||
// has not been initialized yet
|
||||
if (!this._writableState) {
|
||||
return;
|
||||
}
|
||||
|
||||
// backward compatibility, the user is explicitly
|
||||
// managing destroyed
|
||||
this._writableState.destroyed = value;
|
||||
}
|
||||
});
|
||||
|
||||
Writable.prototype.destroy = destroyImpl.destroy;
|
||||
Writable.prototype._undestroy = destroyImpl.undestroy;
|
||||
Writable.prototype._destroy = function (err, cb) {
|
||||
this.end();
|
||||
cb(err);
|
||||
};
|
79
express-server/node_modules/@google-cloud/storage/node_modules/readable-stream/lib/internal/streams/BufferList.js
generated
vendored
Normal file
79
express-server/node_modules/@google-cloud/storage/node_modules/readable-stream/lib/internal/streams/BufferList.js
generated
vendored
Normal file
@ -0,0 +1,79 @@
|
||||
'use strict';
|
||||
|
||||
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
|
||||
|
||||
var Buffer = require('safe-buffer').Buffer;
|
||||
var util = require('util');
|
||||
|
||||
function copyBuffer(src, target, offset) {
|
||||
src.copy(target, offset);
|
||||
}
|
||||
|
||||
module.exports = function () {
|
||||
function BufferList() {
|
||||
_classCallCheck(this, BufferList);
|
||||
|
||||
this.head = null;
|
||||
this.tail = null;
|
||||
this.length = 0;
|
||||
}
|
||||
|
||||
BufferList.prototype.push = function push(v) {
|
||||
var entry = { data: v, next: null };
|
||||
if (this.length > 0) this.tail.next = entry;else this.head = entry;
|
||||
this.tail = entry;
|
||||
++this.length;
|
||||
};
|
||||
|
||||
BufferList.prototype.unshift = function unshift(v) {
|
||||
var entry = { data: v, next: this.head };
|
||||
if (this.length === 0) this.tail = entry;
|
||||
this.head = entry;
|
||||
++this.length;
|
||||
};
|
||||
|
||||
BufferList.prototype.shift = function shift() {
|
||||
if (this.length === 0) return;
|
||||
var ret = this.head.data;
|
||||
if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next;
|
||||
--this.length;
|
||||
return ret;
|
||||
};
|
||||
|
||||
BufferList.prototype.clear = function clear() {
|
||||
this.head = this.tail = null;
|
||||
this.length = 0;
|
||||
};
|
||||
|
||||
BufferList.prototype.join = function join(s) {
|
||||
if (this.length === 0) return '';
|
||||
var p = this.head;
|
||||
var ret = '' + p.data;
|
||||
while (p = p.next) {
|
||||
ret += s + p.data;
|
||||
}return ret;
|
||||
};
|
||||
|
||||
BufferList.prototype.concat = function concat(n) {
|
||||
if (this.length === 0) return Buffer.alloc(0);
|
||||
if (this.length === 1) return this.head.data;
|
||||
var ret = Buffer.allocUnsafe(n >>> 0);
|
||||
var p = this.head;
|
||||
var i = 0;
|
||||
while (p) {
|
||||
copyBuffer(p.data, ret, i);
|
||||
i += p.data.length;
|
||||
p = p.next;
|
||||
}
|
||||
return ret;
|
||||
};
|
||||
|
||||
return BufferList;
|
||||
}();
|
||||
|
||||
if (util && util.inspect && util.inspect.custom) {
|
||||
module.exports.prototype[util.inspect.custom] = function () {
|
||||
var obj = util.inspect({ length: this.length });
|
||||
return this.constructor.name + ' ' + obj;
|
||||
};
|
||||
}
|
74
express-server/node_modules/@google-cloud/storage/node_modules/readable-stream/lib/internal/streams/destroy.js
generated
vendored
Normal file
74
express-server/node_modules/@google-cloud/storage/node_modules/readable-stream/lib/internal/streams/destroy.js
generated
vendored
Normal file
@ -0,0 +1,74 @@
|
||||
'use strict';
|
||||
|
||||
/*<replacement>*/
|
||||
|
||||
var pna = require('process-nextick-args');
|
||||
/*</replacement>*/
|
||||
|
||||
// undocumented cb() API, needed for core, not for public API
|
||||
function destroy(err, cb) {
|
||||
var _this = this;
|
||||
|
||||
var readableDestroyed = this._readableState && this._readableState.destroyed;
|
||||
var writableDestroyed = this._writableState && this._writableState.destroyed;
|
||||
|
||||
if (readableDestroyed || writableDestroyed) {
|
||||
if (cb) {
|
||||
cb(err);
|
||||
} else if (err && (!this._writableState || !this._writableState.errorEmitted)) {
|
||||
pna.nextTick(emitErrorNT, this, err);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
// we set destroyed to true before firing error callbacks in order
|
||||
// to make it re-entrance safe in case destroy() is called within callbacks
|
||||
|
||||
if (this._readableState) {
|
||||
this._readableState.destroyed = true;
|
||||
}
|
||||
|
||||
// if this is a duplex stream mark the writable part as destroyed as well
|
||||
if (this._writableState) {
|
||||
this._writableState.destroyed = true;
|
||||
}
|
||||
|
||||
this._destroy(err || null, function (err) {
|
||||
if (!cb && err) {
|
||||
pna.nextTick(emitErrorNT, _this, err);
|
||||
if (_this._writableState) {
|
||||
_this._writableState.errorEmitted = true;
|
||||
}
|
||||
} else if (cb) {
|
||||
cb(err);
|
||||
}
|
||||
});
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
function undestroy() {
|
||||
if (this._readableState) {
|
||||
this._readableState.destroyed = false;
|
||||
this._readableState.reading = false;
|
||||
this._readableState.ended = false;
|
||||
this._readableState.endEmitted = false;
|
||||
}
|
||||
|
||||
if (this._writableState) {
|
||||
this._writableState.destroyed = false;
|
||||
this._writableState.ended = false;
|
||||
this._writableState.ending = false;
|
||||
this._writableState.finished = false;
|
||||
this._writableState.errorEmitted = false;
|
||||
}
|
||||
}
|
||||
|
||||
function emitErrorNT(self, err) {
|
||||
self.emit('error', err);
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
destroy: destroy,
|
||||
undestroy: undestroy
|
||||
};
|
1
express-server/node_modules/@google-cloud/storage/node_modules/readable-stream/lib/internal/streams/stream-browser.js
generated
vendored
Normal file
1
express-server/node_modules/@google-cloud/storage/node_modules/readable-stream/lib/internal/streams/stream-browser.js
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
module.exports = require('events').EventEmitter;
|
1
express-server/node_modules/@google-cloud/storage/node_modules/readable-stream/lib/internal/streams/stream.js
generated
vendored
Normal file
1
express-server/node_modules/@google-cloud/storage/node_modules/readable-stream/lib/internal/streams/stream.js
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
module.exports = require('stream');
|
81
express-server/node_modules/@google-cloud/storage/node_modules/readable-stream/package.json
generated
vendored
Normal file
81
express-server/node_modules/@google-cloud/storage/node_modules/readable-stream/package.json
generated
vendored
Normal file
@ -0,0 +1,81 @@
|
||||
{
|
||||
"_from": "readable-stream@~2.3.6",
|
||||
"_id": "readable-stream@2.3.6",
|
||||
"_inBundle": false,
|
||||
"_integrity": "sha512-tQtKA9WIAhBF3+VLAseyMqZeBjW0AHJoxOtYqSUZNJxauErmLbVm2FW1y+J/YA9dUrAC39ITejlZWhVIwawkKw==",
|
||||
"_location": "/@google-cloud/storage/readable-stream",
|
||||
"_phantomChildren": {},
|
||||
"_requested": {
|
||||
"type": "range",
|
||||
"registry": true,
|
||||
"raw": "readable-stream@~2.3.6",
|
||||
"name": "readable-stream",
|
||||
"escapedName": "readable-stream",
|
||||
"rawSpec": "~2.3.6",
|
||||
"saveSpec": null,
|
||||
"fetchSpec": "~2.3.6"
|
||||
},
|
||||
"_requiredBy": [
|
||||
"/@google-cloud/storage/through2"
|
||||
],
|
||||
"_resolved": "http://registry.npmjs.org/readable-stream/-/readable-stream-2.3.6.tgz",
|
||||
"_shasum": "b11c27d88b8ff1fbe070643cf94b0c79ae1b0aaf",
|
||||
"_spec": "readable-stream@~2.3.6",
|
||||
"_where": "D:\\Desktop\\Git\\Firebase\\SmartShopperFirebase\\node_modules\\@google-cloud\\storage\\node_modules\\through2",
|
||||
"browser": {
|
||||
"util": false,
|
||||
"./readable.js": "./readable-browser.js",
|
||||
"./writable.js": "./writable-browser.js",
|
||||
"./duplex.js": "./duplex-browser.js",
|
||||
"./lib/internal/streams/stream.js": "./lib/internal/streams/stream-browser.js"
|
||||
},
|
||||
"bugs": {
|
||||
"url": "https://github.com/nodejs/readable-stream/issues"
|
||||
},
|
||||
"bundleDependencies": false,
|
||||
"dependencies": {
|
||||
"core-util-is": "~1.0.0",
|
||||
"inherits": "~2.0.3",
|
||||
"isarray": "~1.0.0",
|
||||
"process-nextick-args": "~2.0.0",
|
||||
"safe-buffer": "~5.1.1",
|
||||
"string_decoder": "~1.1.1",
|
||||
"util-deprecate": "~1.0.1"
|
||||
},
|
||||
"deprecated": false,
|
||||
"description": "Streams3, a user-land copy of the stream library from Node.js",
|
||||
"devDependencies": {
|
||||
"assert": "^1.4.0",
|
||||
"babel-polyfill": "^6.9.1",
|
||||
"buffer": "^4.9.0",
|
||||
"lolex": "^2.3.2",
|
||||
"nyc": "^6.4.0",
|
||||
"tap": "^0.7.0",
|
||||
"tape": "^4.8.0"
|
||||
},
|
||||
"homepage": "https://github.com/nodejs/readable-stream#readme",
|
||||
"keywords": [
|
||||
"readable",
|
||||
"stream",
|
||||
"pipe"
|
||||
],
|
||||
"license": "MIT",
|
||||
"main": "readable.js",
|
||||
"name": "readable-stream",
|
||||
"nyc": {
|
||||
"include": [
|
||||
"lib/**.js"
|
||||
]
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git://github.com/nodejs/readable-stream.git"
|
||||
},
|
||||
"scripts": {
|
||||
"ci": "tap test/parallel/*.js test/ours/*.js --tap | tee test.tap && node test/verify-dependencies.js",
|
||||
"cover": "nyc npm test",
|
||||
"report": "nyc report --reporter=lcov",
|
||||
"test": "tap test/parallel/*.js test/ours/*.js && node test/verify-dependencies.js"
|
||||
},
|
||||
"version": "2.3.6"
|
||||
}
|
1
express-server/node_modules/@google-cloud/storage/node_modules/readable-stream/passthrough.js
generated
vendored
Normal file
1
express-server/node_modules/@google-cloud/storage/node_modules/readable-stream/passthrough.js
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
module.exports = require('./readable').PassThrough
|
7
express-server/node_modules/@google-cloud/storage/node_modules/readable-stream/readable-browser.js
generated
vendored
Normal file
7
express-server/node_modules/@google-cloud/storage/node_modules/readable-stream/readable-browser.js
generated
vendored
Normal file
@ -0,0 +1,7 @@
|
||||
exports = module.exports = require('./lib/_stream_readable.js');
|
||||
exports.Stream = exports;
|
||||
exports.Readable = exports;
|
||||
exports.Writable = require('./lib/_stream_writable.js');
|
||||
exports.Duplex = require('./lib/_stream_duplex.js');
|
||||
exports.Transform = require('./lib/_stream_transform.js');
|
||||
exports.PassThrough = require('./lib/_stream_passthrough.js');
|
19
express-server/node_modules/@google-cloud/storage/node_modules/readable-stream/readable.js
generated
vendored
Normal file
19
express-server/node_modules/@google-cloud/storage/node_modules/readable-stream/readable.js
generated
vendored
Normal file
@ -0,0 +1,19 @@
|
||||
var Stream = require('stream');
|
||||
if (process.env.READABLE_STREAM === 'disable' && Stream) {
|
||||
module.exports = Stream;
|
||||
exports = module.exports = Stream.Readable;
|
||||
exports.Readable = Stream.Readable;
|
||||
exports.Writable = Stream.Writable;
|
||||
exports.Duplex = Stream.Duplex;
|
||||
exports.Transform = Stream.Transform;
|
||||
exports.PassThrough = Stream.PassThrough;
|
||||
exports.Stream = Stream;
|
||||
} else {
|
||||
exports = module.exports = require('./lib/_stream_readable.js');
|
||||
exports.Stream = Stream || exports;
|
||||
exports.Readable = exports;
|
||||
exports.Writable = require('./lib/_stream_writable.js');
|
||||
exports.Duplex = require('./lib/_stream_duplex.js');
|
||||
exports.Transform = require('./lib/_stream_transform.js');
|
||||
exports.PassThrough = require('./lib/_stream_passthrough.js');
|
||||
}
|
1
express-server/node_modules/@google-cloud/storage/node_modules/readable-stream/transform.js
generated
vendored
Normal file
1
express-server/node_modules/@google-cloud/storage/node_modules/readable-stream/transform.js
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
module.exports = require('./readable').Transform
|
1
express-server/node_modules/@google-cloud/storage/node_modules/readable-stream/writable-browser.js
generated
vendored
Normal file
1
express-server/node_modules/@google-cloud/storage/node_modules/readable-stream/writable-browser.js
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
module.exports = require('./lib/_stream_writable.js');
|
8
express-server/node_modules/@google-cloud/storage/node_modules/readable-stream/writable.js
generated
vendored
Normal file
8
express-server/node_modules/@google-cloud/storage/node_modules/readable-stream/writable.js
generated
vendored
Normal file
@ -0,0 +1,8 @@
|
||||
var Stream = require("stream")
|
||||
var Writable = require("./lib/_stream_writable.js")
|
||||
|
||||
if (process.env.READABLE_STREAM === 'disable') {
|
||||
module.exports = Stream && Stream.Writable || Writable
|
||||
} else {
|
||||
module.exports = Writable
|
||||
}
|
50
express-server/node_modules/@google-cloud/storage/node_modules/string_decoder/.travis.yml
generated
vendored
Normal file
50
express-server/node_modules/@google-cloud/storage/node_modules/string_decoder/.travis.yml
generated
vendored
Normal file
@ -0,0 +1,50 @@
|
||||
sudo: false
|
||||
language: node_js
|
||||
before_install:
|
||||
- npm install -g npm@2
|
||||
- test $NPM_LEGACY && npm install -g npm@latest-3 || npm install npm -g
|
||||
notifications:
|
||||
email: false
|
||||
matrix:
|
||||
fast_finish: true
|
||||
include:
|
||||
- node_js: '0.8'
|
||||
env:
|
||||
- TASK=test
|
||||
- NPM_LEGACY=true
|
||||
- node_js: '0.10'
|
||||
env:
|
||||
- TASK=test
|
||||
- NPM_LEGACY=true
|
||||
- node_js: '0.11'
|
||||
env:
|
||||
- TASK=test
|
||||
- NPM_LEGACY=true
|
||||
- node_js: '0.12'
|
||||
env:
|
||||
- TASK=test
|
||||
- NPM_LEGACY=true
|
||||
- node_js: 1
|
||||
env:
|
||||
- TASK=test
|
||||
- NPM_LEGACY=true
|
||||
- node_js: 2
|
||||
env:
|
||||
- TASK=test
|
||||
- NPM_LEGACY=true
|
||||
- node_js: 3
|
||||
env:
|
||||
- TASK=test
|
||||
- NPM_LEGACY=true
|
||||
- node_js: 4
|
||||
env: TASK=test
|
||||
- node_js: 5
|
||||
env: TASK=test
|
||||
- node_js: 6
|
||||
env: TASK=test
|
||||
- node_js: 7
|
||||
env: TASK=test
|
||||
- node_js: 8
|
||||
env: TASK=test
|
||||
- node_js: 9
|
||||
env: TASK=test
|
48
express-server/node_modules/@google-cloud/storage/node_modules/string_decoder/LICENSE
generated
vendored
Normal file
48
express-server/node_modules/@google-cloud/storage/node_modules/string_decoder/LICENSE
generated
vendored
Normal file
@ -0,0 +1,48 @@
|
||||
Node.js is licensed for use as follows:
|
||||
|
||||
"""
|
||||
Copyright Node.js contributors. All rights reserved.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to
|
||||
deal in the Software without restriction, including without limitation the
|
||||
rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
|
||||
sell copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
||||
IN THE SOFTWARE.
|
||||
"""
|
||||
|
||||
This license applies to parts of Node.js originating from the
|
||||
https://github.com/joyent/node repository:
|
||||
|
||||
"""
|
||||
Copyright Joyent, Inc. and other Node contributors. All rights reserved.
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to
|
||||
deal in the Software without restriction, including without limitation the
|
||||
rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
|
||||
sell copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
||||
IN THE SOFTWARE.
|
||||
"""
|
||||
|
47
express-server/node_modules/@google-cloud/storage/node_modules/string_decoder/README.md
generated
vendored
Normal file
47
express-server/node_modules/@google-cloud/storage/node_modules/string_decoder/README.md
generated
vendored
Normal file
@ -0,0 +1,47 @@
|
||||
# string_decoder
|
||||
|
||||
***Node-core v8.9.4 string_decoder for userland***
|
||||
|
||||
|
||||
[](https://nodei.co/npm/string_decoder/)
|
||||
[](https://nodei.co/npm/string_decoder/)
|
||||
|
||||
|
||||
```bash
|
||||
npm install --save string_decoder
|
||||
```
|
||||
|
||||
***Node-core string_decoder for userland***
|
||||
|
||||
This package is a mirror of the string_decoder implementation in Node-core.
|
||||
|
||||
Full documentation may be found on the [Node.js website](https://nodejs.org/dist/v8.9.4/docs/api/).
|
||||
|
||||
As of version 1.0.0 **string_decoder** uses semantic versioning.
|
||||
|
||||
## Previous versions
|
||||
|
||||
Previous version numbers match the versions found in Node core, e.g. 0.10.24 matches Node 0.10.24, likewise 0.11.10 matches Node 0.11.10.
|
||||
|
||||
## Update
|
||||
|
||||
The *build/* directory contains a build script that will scrape the source from the [nodejs/node](https://github.com/nodejs/node) repo given a specific Node version.
|
||||
|
||||
## Streams Working Group
|
||||
|
||||
`string_decoder` is maintained by the Streams Working Group, which
|
||||
oversees the development and maintenance of the Streams API within
|
||||
Node.js. The responsibilities of the Streams Working Group include:
|
||||
|
||||
* Addressing stream issues on the Node.js issue tracker.
|
||||
* Authoring and editing stream documentation within the Node.js project.
|
||||
* Reviewing changes to stream subclasses within the Node.js project.
|
||||
* Redirecting changes to streams from the Node.js project to this
|
||||
project.
|
||||
* Assisting in the implementation of stream providers within Node.js.
|
||||
* Recommending versions of `readable-stream` to be included in Node.js.
|
||||
* Messaging about the future of streams to give the community advance
|
||||
notice of changes.
|
||||
|
||||
See [readable-stream](https://github.com/nodejs/readable-stream) for
|
||||
more details.
|
296
express-server/node_modules/@google-cloud/storage/node_modules/string_decoder/lib/string_decoder.js
generated
vendored
Normal file
296
express-server/node_modules/@google-cloud/storage/node_modules/string_decoder/lib/string_decoder.js
generated
vendored
Normal file
@ -0,0 +1,296 @@
|
||||
// Copyright Joyent, Inc. and other Node contributors.
|
||||
//
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a
|
||||
// copy of this software and associated documentation files (the
|
||||
// "Software"), to deal in the Software without restriction, including
|
||||
// without limitation the rights to use, copy, modify, merge, publish,
|
||||
// distribute, sublicense, and/or sell copies of the Software, and to permit
|
||||
// persons to whom the Software is furnished to do so, subject to the
|
||||
// following conditions:
|
||||
//
|
||||
// The above copyright notice and this permission notice shall be included
|
||||
// in all copies or substantial portions of the Software.
|
||||
//
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
||||
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
|
||||
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
|
||||
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
||||
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
|
||||
// USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
||||
'use strict';
|
||||
|
||||
/*<replacement>*/
|
||||
|
||||
var Buffer = require('safe-buffer').Buffer;
|
||||
/*</replacement>*/
|
||||
|
||||
var isEncoding = Buffer.isEncoding || function (encoding) {
|
||||
encoding = '' + encoding;
|
||||
switch (encoding && encoding.toLowerCase()) {
|
||||
case 'hex':case 'utf8':case 'utf-8':case 'ascii':case 'binary':case 'base64':case 'ucs2':case 'ucs-2':case 'utf16le':case 'utf-16le':case 'raw':
|
||||
return true;
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
};
|
||||
|
||||
function _normalizeEncoding(enc) {
|
||||
if (!enc) return 'utf8';
|
||||
var retried;
|
||||
while (true) {
|
||||
switch (enc) {
|
||||
case 'utf8':
|
||||
case 'utf-8':
|
||||
return 'utf8';
|
||||
case 'ucs2':
|
||||
case 'ucs-2':
|
||||
case 'utf16le':
|
||||
case 'utf-16le':
|
||||
return 'utf16le';
|
||||
case 'latin1':
|
||||
case 'binary':
|
||||
return 'latin1';
|
||||
case 'base64':
|
||||
case 'ascii':
|
||||
case 'hex':
|
||||
return enc;
|
||||
default:
|
||||
if (retried) return; // undefined
|
||||
enc = ('' + enc).toLowerCase();
|
||||
retried = true;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Do not cache `Buffer.isEncoding` when checking encoding names as some
|
||||
// modules monkey-patch it to support additional encodings
|
||||
function normalizeEncoding(enc) {
|
||||
var nenc = _normalizeEncoding(enc);
|
||||
if (typeof nenc !== 'string' && (Buffer.isEncoding === isEncoding || !isEncoding(enc))) throw new Error('Unknown encoding: ' + enc);
|
||||
return nenc || enc;
|
||||
}
|
||||
|
||||
// StringDecoder provides an interface for efficiently splitting a series of
|
||||
// buffers into a series of JS strings without breaking apart multi-byte
|
||||
// characters.
|
||||
exports.StringDecoder = StringDecoder;
|
||||
function StringDecoder(encoding) {
|
||||
this.encoding = normalizeEncoding(encoding);
|
||||
var nb;
|
||||
switch (this.encoding) {
|
||||
case 'utf16le':
|
||||
this.text = utf16Text;
|
||||
this.end = utf16End;
|
||||
nb = 4;
|
||||
break;
|
||||
case 'utf8':
|
||||
this.fillLast = utf8FillLast;
|
||||
nb = 4;
|
||||
break;
|
||||
case 'base64':
|
||||
this.text = base64Text;
|
||||
this.end = base64End;
|
||||
nb = 3;
|
||||
break;
|
||||
default:
|
||||
this.write = simpleWrite;
|
||||
this.end = simpleEnd;
|
||||
return;
|
||||
}
|
||||
this.lastNeed = 0;
|
||||
this.lastTotal = 0;
|
||||
this.lastChar = Buffer.allocUnsafe(nb);
|
||||
}
|
||||
|
||||
StringDecoder.prototype.write = function (buf) {
|
||||
if (buf.length === 0) return '';
|
||||
var r;
|
||||
var i;
|
||||
if (this.lastNeed) {
|
||||
r = this.fillLast(buf);
|
||||
if (r === undefined) return '';
|
||||
i = this.lastNeed;
|
||||
this.lastNeed = 0;
|
||||
} else {
|
||||
i = 0;
|
||||
}
|
||||
if (i < buf.length) return r ? r + this.text(buf, i) : this.text(buf, i);
|
||||
return r || '';
|
||||
};
|
||||
|
||||
StringDecoder.prototype.end = utf8End;
|
||||
|
||||
// Returns only complete characters in a Buffer
|
||||
StringDecoder.prototype.text = utf8Text;
|
||||
|
||||
// Attempts to complete a partial non-UTF-8 character using bytes from a Buffer
|
||||
StringDecoder.prototype.fillLast = function (buf) {
|
||||
if (this.lastNeed <= buf.length) {
|
||||
buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, this.lastNeed);
|
||||
return this.lastChar.toString(this.encoding, 0, this.lastTotal);
|
||||
}
|
||||
buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, buf.length);
|
||||
this.lastNeed -= buf.length;
|
||||
};
|
||||
|
||||
// Checks the type of a UTF-8 byte, whether it's ASCII, a leading byte, or a
|
||||
// continuation byte. If an invalid byte is detected, -2 is returned.
|
||||
function utf8CheckByte(byte) {
|
||||
if (byte <= 0x7F) return 0;else if (byte >> 5 === 0x06) return 2;else if (byte >> 4 === 0x0E) return 3;else if (byte >> 3 === 0x1E) return 4;
|
||||
return byte >> 6 === 0x02 ? -1 : -2;
|
||||
}
|
||||
|
||||
// Checks at most 3 bytes at the end of a Buffer in order to detect an
|
||||
// incomplete multi-byte UTF-8 character. The total number of bytes (2, 3, or 4)
|
||||
// needed to complete the UTF-8 character (if applicable) are returned.
|
||||
function utf8CheckIncomplete(self, buf, i) {
|
||||
var j = buf.length - 1;
|
||||
if (j < i) return 0;
|
||||
var nb = utf8CheckByte(buf[j]);
|
||||
if (nb >= 0) {
|
||||
if (nb > 0) self.lastNeed = nb - 1;
|
||||
return nb;
|
||||
}
|
||||
if (--j < i || nb === -2) return 0;
|
||||
nb = utf8CheckByte(buf[j]);
|
||||
if (nb >= 0) {
|
||||
if (nb > 0) self.lastNeed = nb - 2;
|
||||
return nb;
|
||||
}
|
||||
if (--j < i || nb === -2) return 0;
|
||||
nb = utf8CheckByte(buf[j]);
|
||||
if (nb >= 0) {
|
||||
if (nb > 0) {
|
||||
if (nb === 2) nb = 0;else self.lastNeed = nb - 3;
|
||||
}
|
||||
return nb;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
// Validates as many continuation bytes for a multi-byte UTF-8 character as
|
||||
// needed or are available. If we see a non-continuation byte where we expect
|
||||
// one, we "replace" the validated continuation bytes we've seen so far with
|
||||
// a single UTF-8 replacement character ('\ufffd'), to match v8's UTF-8 decoding
|
||||
// behavior. The continuation byte check is included three times in the case
|
||||
// where all of the continuation bytes for a character exist in the same buffer.
|
||||
// It is also done this way as a slight performance increase instead of using a
|
||||
// loop.
|
||||
function utf8CheckExtraBytes(self, buf, p) {
|
||||
if ((buf[0] & 0xC0) !== 0x80) {
|
||||
self.lastNeed = 0;
|
||||
return '\ufffd';
|
||||
}
|
||||
if (self.lastNeed > 1 && buf.length > 1) {
|
||||
if ((buf[1] & 0xC0) !== 0x80) {
|
||||
self.lastNeed = 1;
|
||||
return '\ufffd';
|
||||
}
|
||||
if (self.lastNeed > 2 && buf.length > 2) {
|
||||
if ((buf[2] & 0xC0) !== 0x80) {
|
||||
self.lastNeed = 2;
|
||||
return '\ufffd';
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Attempts to complete a multi-byte UTF-8 character using bytes from a Buffer.
|
||||
function utf8FillLast(buf) {
|
||||
var p = this.lastTotal - this.lastNeed;
|
||||
var r = utf8CheckExtraBytes(this, buf, p);
|
||||
if (r !== undefined) return r;
|
||||
if (this.lastNeed <= buf.length) {
|
||||
buf.copy(this.lastChar, p, 0, this.lastNeed);
|
||||
return this.lastChar.toString(this.encoding, 0, this.lastTotal);
|
||||
}
|
||||
buf.copy(this.lastChar, p, 0, buf.length);
|
||||
this.lastNeed -= buf.length;
|
||||
}
|
||||
|
||||
// Returns all complete UTF-8 characters in a Buffer. If the Buffer ended on a
|
||||
// partial character, the character's bytes are buffered until the required
|
||||
// number of bytes are available.
|
||||
function utf8Text(buf, i) {
|
||||
var total = utf8CheckIncomplete(this, buf, i);
|
||||
if (!this.lastNeed) return buf.toString('utf8', i);
|
||||
this.lastTotal = total;
|
||||
var end = buf.length - (total - this.lastNeed);
|
||||
buf.copy(this.lastChar, 0, end);
|
||||
return buf.toString('utf8', i, end);
|
||||
}
|
||||
|
||||
// For UTF-8, a replacement character is added when ending on a partial
|
||||
// character.
|
||||
function utf8End(buf) {
|
||||
var r = buf && buf.length ? this.write(buf) : '';
|
||||
if (this.lastNeed) return r + '\ufffd';
|
||||
return r;
|
||||
}
|
||||
|
||||
// UTF-16LE typically needs two bytes per character, but even if we have an even
|
||||
// number of bytes available, we need to check if we end on a leading/high
|
||||
// surrogate. In that case, we need to wait for the next two bytes in order to
|
||||
// decode the last character properly.
|
||||
function utf16Text(buf, i) {
|
||||
if ((buf.length - i) % 2 === 0) {
|
||||
var r = buf.toString('utf16le', i);
|
||||
if (r) {
|
||||
var c = r.charCodeAt(r.length - 1);
|
||||
if (c >= 0xD800 && c <= 0xDBFF) {
|
||||
this.lastNeed = 2;
|
||||
this.lastTotal = 4;
|
||||
this.lastChar[0] = buf[buf.length - 2];
|
||||
this.lastChar[1] = buf[buf.length - 1];
|
||||
return r.slice(0, -1);
|
||||
}
|
||||
}
|
||||
return r;
|
||||
}
|
||||
this.lastNeed = 1;
|
||||
this.lastTotal = 2;
|
||||
this.lastChar[0] = buf[buf.length - 1];
|
||||
return buf.toString('utf16le', i, buf.length - 1);
|
||||
}
|
||||
|
||||
// For UTF-16LE we do not explicitly append special replacement characters if we
|
||||
// end on a partial character, we simply let v8 handle that.
|
||||
function utf16End(buf) {
|
||||
var r = buf && buf.length ? this.write(buf) : '';
|
||||
if (this.lastNeed) {
|
||||
var end = this.lastTotal - this.lastNeed;
|
||||
return r + this.lastChar.toString('utf16le', 0, end);
|
||||
}
|
||||
return r;
|
||||
}
|
||||
|
||||
function base64Text(buf, i) {
|
||||
var n = (buf.length - i) % 3;
|
||||
if (n === 0) return buf.toString('base64', i);
|
||||
this.lastNeed = 3 - n;
|
||||
this.lastTotal = 3;
|
||||
if (n === 1) {
|
||||
this.lastChar[0] = buf[buf.length - 1];
|
||||
} else {
|
||||
this.lastChar[0] = buf[buf.length - 2];
|
||||
this.lastChar[1] = buf[buf.length - 1];
|
||||
}
|
||||
return buf.toString('base64', i, buf.length - n);
|
||||
}
|
||||
|
||||
function base64End(buf) {
|
||||
var r = buf && buf.length ? this.write(buf) : '';
|
||||
if (this.lastNeed) return r + this.lastChar.toString('base64', 0, 3 - this.lastNeed);
|
||||
return r;
|
||||
}
|
||||
|
||||
// Pass bytes on through for single-byte encodings (e.g. ascii, latin1, hex)
|
||||
function simpleWrite(buf) {
|
||||
return buf.toString(this.encoding);
|
||||
}
|
||||
|
||||
function simpleEnd(buf) {
|
||||
return buf && buf.length ? this.write(buf) : '';
|
||||
}
|
59
express-server/node_modules/@google-cloud/storage/node_modules/string_decoder/package.json
generated
vendored
Normal file
59
express-server/node_modules/@google-cloud/storage/node_modules/string_decoder/package.json
generated
vendored
Normal file
@ -0,0 +1,59 @@
|
||||
{
|
||||
"_from": "string_decoder@~1.1.1",
|
||||
"_id": "string_decoder@1.1.1",
|
||||
"_inBundle": false,
|
||||
"_integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==",
|
||||
"_location": "/@google-cloud/storage/string_decoder",
|
||||
"_phantomChildren": {},
|
||||
"_requested": {
|
||||
"type": "range",
|
||||
"registry": true,
|
||||
"raw": "string_decoder@~1.1.1",
|
||||
"name": "string_decoder",
|
||||
"escapedName": "string_decoder",
|
||||
"rawSpec": "~1.1.1",
|
||||
"saveSpec": null,
|
||||
"fetchSpec": "~1.1.1"
|
||||
},
|
||||
"_requiredBy": [
|
||||
"/@google-cloud/storage/readable-stream"
|
||||
],
|
||||
"_resolved": "http://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz",
|
||||
"_shasum": "9cf1611ba62685d7030ae9e4ba34149c3af03fc8",
|
||||
"_spec": "string_decoder@~1.1.1",
|
||||
"_where": "D:\\Desktop\\Git\\Firebase\\SmartShopperFirebase\\node_modules\\@google-cloud\\storage\\node_modules\\readable-stream",
|
||||
"bugs": {
|
||||
"url": "https://github.com/nodejs/string_decoder/issues"
|
||||
},
|
||||
"bundleDependencies": false,
|
||||
"dependencies": {
|
||||
"safe-buffer": "~5.1.0"
|
||||
},
|
||||
"deprecated": false,
|
||||
"description": "The string_decoder module from Node core",
|
||||
"devDependencies": {
|
||||
"babel-polyfill": "^6.23.0",
|
||||
"core-util-is": "^1.0.2",
|
||||
"inherits": "^2.0.3",
|
||||
"tap": "~0.4.8"
|
||||
},
|
||||
"homepage": "https://github.com/nodejs/string_decoder",
|
||||
"keywords": [
|
||||
"string",
|
||||
"decoder",
|
||||
"browser",
|
||||
"browserify"
|
||||
],
|
||||
"license": "MIT",
|
||||
"main": "lib/string_decoder.js",
|
||||
"name": "string_decoder",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git://github.com/nodejs/string_decoder.git"
|
||||
},
|
||||
"scripts": {
|
||||
"ci": "tap test/parallel/*.js test/ours/*.js --tap | tee test.tap && node test/verify-dependencies.js",
|
||||
"test": "tap test/parallel/*.js && node test/verify-dependencies"
|
||||
},
|
||||
"version": "1.1.1"
|
||||
}
|
9
express-server/node_modules/@google-cloud/storage/node_modules/through2/LICENSE.md
generated
vendored
Normal file
9
express-server/node_modules/@google-cloud/storage/node_modules/through2/LICENSE.md
generated
vendored
Normal file
@ -0,0 +1,9 @@
|
||||
# The MIT License (MIT)
|
||||
|
||||
**Copyright (c) Rod Vagg (the "Original Author") and additional contributors**
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
134
express-server/node_modules/@google-cloud/storage/node_modules/through2/README.md
generated
vendored
Normal file
134
express-server/node_modules/@google-cloud/storage/node_modules/through2/README.md
generated
vendored
Normal file
@ -0,0 +1,134 @@
|
||||
# through2
|
||||
|
||||
[](https://nodei.co/npm/through2/)
|
||||
|
||||
**A tiny wrapper around Node streams.Transform (Streams2/3) to avoid explicit subclassing noise**
|
||||
|
||||
Inspired by [Dominic Tarr](https://github.com/dominictarr)'s [through](https://github.com/dominictarr/through) in that it's so much easier to make a stream out of a function than it is to set up the prototype chain properly: `through(function (chunk) { ... })`.
|
||||
|
||||
Note: As 2.x.x this module starts using **Streams3** instead of Stream2. To continue using a Streams2 version use `npm install through2@0` to fetch the latest version of 0.x.x. More information about Streams2 vs Streams3 and recommendations see the article **[Why I don't use Node's core 'stream' module](http://r.va.gg/2014/06/why-i-dont-use-nodes-core-stream-module.html)**.
|
||||
|
||||
```js
|
||||
fs.createReadStream('ex.txt')
|
||||
.pipe(through2(function (chunk, enc, callback) {
|
||||
for (var i = 0; i < chunk.length; i++)
|
||||
if (chunk[i] == 97)
|
||||
chunk[i] = 122 // swap 'a' for 'z'
|
||||
|
||||
this.push(chunk)
|
||||
|
||||
callback()
|
||||
}))
|
||||
.pipe(fs.createWriteStream('out.txt'))
|
||||
.on('finish', () => doSomethingSpecial())
|
||||
```
|
||||
|
||||
Or object streams:
|
||||
|
||||
```js
|
||||
var all = []
|
||||
|
||||
fs.createReadStream('data.csv')
|
||||
.pipe(csv2())
|
||||
.pipe(through2.obj(function (chunk, enc, callback) {
|
||||
var data = {
|
||||
name : chunk[0]
|
||||
, address : chunk[3]
|
||||
, phone : chunk[10]
|
||||
}
|
||||
this.push(data)
|
||||
|
||||
callback()
|
||||
}))
|
||||
.on('data', (data) => {
|
||||
all.push(data)
|
||||
})
|
||||
.on('end', () => {
|
||||
doSomethingSpecial(all)
|
||||
})
|
||||
```
|
||||
|
||||
Note that `through2.obj(fn)` is a convenience wrapper around `through2({ objectMode: true }, fn)`.
|
||||
|
||||
## API
|
||||
|
||||
<b><code>through2([ options, ] [ transformFunction ] [, flushFunction ])</code></b>
|
||||
|
||||
Consult the **[stream.Transform](http://nodejs.org/docs/latest/api/stream.html#stream_class_stream_transform)** documentation for the exact rules of the `transformFunction` (i.e. `this._transform`) and the optional `flushFunction` (i.e. `this._flush`).
|
||||
|
||||
### options
|
||||
|
||||
The options argument is optional and is passed straight through to `stream.Transform`. So you can use `objectMode:true` if you are processing non-binary streams (or just use `through2.obj()`).
|
||||
|
||||
The `options` argument is first, unlike standard convention, because if I'm passing in an anonymous function then I'd prefer for the options argument to not get lost at the end of the call:
|
||||
|
||||
```js
|
||||
fs.createReadStream('/tmp/important.dat')
|
||||
.pipe(through2({ objectMode: true, allowHalfOpen: false },
|
||||
(chunk, enc, cb) => {
|
||||
cb(null, 'wut?') // note we can use the second argument on the callback
|
||||
// to provide data as an alternative to this.push('wut?')
|
||||
}
|
||||
)
|
||||
.pipe(fs.createWriteStream('/tmp/wut.txt'))
|
||||
```
|
||||
|
||||
### transformFunction
|
||||
|
||||
The `transformFunction` must have the following signature: `function (chunk, encoding, callback) {}`. A minimal implementation should call the `callback` function to indicate that the transformation is done, even if that transformation means discarding the chunk.
|
||||
|
||||
To queue a new chunk, call `this.push(chunk)`—this can be called as many times as required before the `callback()` if you have multiple pieces to send on.
|
||||
|
||||
Alternatively, you may use `callback(err, chunk)` as shorthand for emitting a single chunk or an error.
|
||||
|
||||
If you **do not provide a `transformFunction`** then you will get a simple pass-through stream.
|
||||
|
||||
### flushFunction
|
||||
|
||||
The optional `flushFunction` is provided as the last argument (2nd or 3rd, depending on whether you've supplied options) is called just prior to the stream ending. Can be used to finish up any processing that may be in progress.
|
||||
|
||||
```js
|
||||
fs.createReadStream('/tmp/important.dat')
|
||||
.pipe(through2(
|
||||
(chunk, enc, cb) => cb(null, chunk), // transform is a noop
|
||||
function (cb) { // flush function
|
||||
this.push('tacking on an extra buffer to the end');
|
||||
cb();
|
||||
}
|
||||
))
|
||||
.pipe(fs.createWriteStream('/tmp/wut.txt'));
|
||||
```
|
||||
|
||||
<b><code>through2.ctor([ options, ] transformFunction[, flushFunction ])</code></b>
|
||||
|
||||
Instead of returning a `stream.Transform` instance, `through2.ctor()` returns a **constructor** for a custom Transform. This is useful when you want to use the same transform logic in multiple instances.
|
||||
|
||||
```js
|
||||
var FToC = through2.ctor({objectMode: true}, function (record, encoding, callback) {
|
||||
if (record.temp != null && record.unit == "F") {
|
||||
record.temp = ( ( record.temp - 32 ) * 5 ) / 9
|
||||
record.unit = "C"
|
||||
}
|
||||
this.push(record)
|
||||
callback()
|
||||
})
|
||||
|
||||
// Create instances of FToC like so:
|
||||
var converter = new FToC()
|
||||
// Or:
|
||||
var converter = FToC()
|
||||
// Or specify/override options when you instantiate, if you prefer:
|
||||
var converter = FToC({objectMode: true})
|
||||
```
|
||||
|
||||
## See Also
|
||||
|
||||
- [through2-map](https://github.com/brycebaril/through2-map) - Array.prototype.map analog for streams.
|
||||
- [through2-filter](https://github.com/brycebaril/through2-filter) - Array.prototype.filter analog for streams.
|
||||
- [through2-reduce](https://github.com/brycebaril/through2-reduce) - Array.prototype.reduce analog for streams.
|
||||
- [through2-spy](https://github.com/brycebaril/through2-spy) - Wrapper for simple stream.PassThrough spies.
|
||||
- the [mississippi stream utility collection](https://github.com/maxogden/mississippi) includes `through2` as well as many more useful stream modules similar to this one
|
||||
|
||||
## License
|
||||
|
||||
**through2** is Copyright (c) Rod Vagg [@rvagg](https://twitter.com/rvagg) and additional contributors and licensed under the MIT license. All rights not explicitly granted in the MIT license are reserved. See the included LICENSE file for more details.
|
66
express-server/node_modules/@google-cloud/storage/node_modules/through2/package.json
generated
vendored
Normal file
66
express-server/node_modules/@google-cloud/storage/node_modules/through2/package.json
generated
vendored
Normal file
@ -0,0 +1,66 @@
|
||||
{
|
||||
"_from": "through2@^2.0.0",
|
||||
"_id": "through2@2.0.5",
|
||||
"_inBundle": false,
|
||||
"_integrity": "sha512-/mrRod8xqpA+IHSLyGCQ2s8SPHiCDEeQJSep1jqLYeEUClOFG2Qsh+4FU6G9VeqpZnGW/Su8LQGc4YKni5rYSQ==",
|
||||
"_location": "/@google-cloud/storage/through2",
|
||||
"_phantomChildren": {},
|
||||
"_requested": {
|
||||
"type": "range",
|
||||
"registry": true,
|
||||
"raw": "through2@^2.0.0",
|
||||
"name": "through2",
|
||||
"escapedName": "through2",
|
||||
"rawSpec": "^2.0.0",
|
||||
"saveSpec": null,
|
||||
"fetchSpec": "^2.0.0"
|
||||
},
|
||||
"_requiredBy": [
|
||||
"/@google-cloud/storage"
|
||||
],
|
||||
"_resolved": "https://registry.npmjs.org/through2/-/through2-2.0.5.tgz",
|
||||
"_shasum": "01c1e39eb31d07cb7d03a96a70823260b23132cd",
|
||||
"_spec": "through2@^2.0.0",
|
||||
"_where": "D:\\Desktop\\Git\\Firebase\\SmartShopperFirebase\\node_modules\\@google-cloud\\storage",
|
||||
"author": {
|
||||
"name": "Rod Vagg",
|
||||
"email": "r@va.gg",
|
||||
"url": "https://github.com/rvagg"
|
||||
},
|
||||
"bugs": {
|
||||
"url": "https://github.com/rvagg/through2/issues"
|
||||
},
|
||||
"bundleDependencies": false,
|
||||
"dependencies": {
|
||||
"readable-stream": "~2.3.6",
|
||||
"xtend": "~4.0.1"
|
||||
},
|
||||
"deprecated": false,
|
||||
"description": "A tiny wrapper around Node streams2 Transform to avoid explicit subclassing noise",
|
||||
"devDependencies": {
|
||||
"bl": "~2.0.1",
|
||||
"faucet": "0.0.1",
|
||||
"nyc": "~13.1.0",
|
||||
"safe-buffer": "~5.1.2",
|
||||
"stream-spigot": "~3.0.6",
|
||||
"tape": "~4.9.1"
|
||||
},
|
||||
"homepage": "https://github.com/rvagg/through2#readme",
|
||||
"keywords": [
|
||||
"stream",
|
||||
"streams2",
|
||||
"through",
|
||||
"transform"
|
||||
],
|
||||
"license": "MIT",
|
||||
"main": "through2.js",
|
||||
"name": "through2",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/rvagg/through2.git"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "node test/test.js | faucet"
|
||||
},
|
||||
"version": "2.0.5"
|
||||
}
|
96
express-server/node_modules/@google-cloud/storage/node_modules/through2/through2.js
generated
vendored
Normal file
96
express-server/node_modules/@google-cloud/storage/node_modules/through2/through2.js
generated
vendored
Normal file
@ -0,0 +1,96 @@
|
||||
var Transform = require('readable-stream').Transform
|
||||
, inherits = require('util').inherits
|
||||
, xtend = require('xtend')
|
||||
|
||||
function DestroyableTransform(opts) {
|
||||
Transform.call(this, opts)
|
||||
this._destroyed = false
|
||||
}
|
||||
|
||||
inherits(DestroyableTransform, Transform)
|
||||
|
||||
DestroyableTransform.prototype.destroy = function(err) {
|
||||
if (this._destroyed) return
|
||||
this._destroyed = true
|
||||
|
||||
var self = this
|
||||
process.nextTick(function() {
|
||||
if (err)
|
||||
self.emit('error', err)
|
||||
self.emit('close')
|
||||
})
|
||||
}
|
||||
|
||||
// a noop _transform function
|
||||
function noop (chunk, enc, callback) {
|
||||
callback(null, chunk)
|
||||
}
|
||||
|
||||
|
||||
// create a new export function, used by both the main export and
|
||||
// the .ctor export, contains common logic for dealing with arguments
|
||||
function through2 (construct) {
|
||||
return function (options, transform, flush) {
|
||||
if (typeof options == 'function') {
|
||||
flush = transform
|
||||
transform = options
|
||||
options = {}
|
||||
}
|
||||
|
||||
if (typeof transform != 'function')
|
||||
transform = noop
|
||||
|
||||
if (typeof flush != 'function')
|
||||
flush = null
|
||||
|
||||
return construct(options, transform, flush)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// main export, just make me a transform stream!
|
||||
module.exports = through2(function (options, transform, flush) {
|
||||
var t2 = new DestroyableTransform(options)
|
||||
|
||||
t2._transform = transform
|
||||
|
||||
if (flush)
|
||||
t2._flush = flush
|
||||
|
||||
return t2
|
||||
})
|
||||
|
||||
|
||||
// make me a reusable prototype that I can `new`, or implicitly `new`
|
||||
// with a constructor call
|
||||
module.exports.ctor = through2(function (options, transform, flush) {
|
||||
function Through2 (override) {
|
||||
if (!(this instanceof Through2))
|
||||
return new Through2(override)
|
||||
|
||||
this.options = xtend(options, override)
|
||||
|
||||
DestroyableTransform.call(this, this.options)
|
||||
}
|
||||
|
||||
inherits(Through2, DestroyableTransform)
|
||||
|
||||
Through2.prototype._transform = transform
|
||||
|
||||
if (flush)
|
||||
Through2.prototype._flush = flush
|
||||
|
||||
return Through2
|
||||
})
|
||||
|
||||
|
||||
module.exports.obj = through2(function (options, transform, flush) {
|
||||
var t2 = new DestroyableTransform(xtend({ objectMode: true, highWaterMark: 16 }, options))
|
||||
|
||||
t2._transform = transform
|
||||
|
||||
if (flush)
|
||||
t2._flush = flush
|
||||
|
||||
return t2
|
||||
})
|
216
express-server/node_modules/@google-cloud/storage/package.json
generated
vendored
Normal file
216
express-server/node_modules/@google-cloud/storage/package.json
generated
vendored
Normal file
@ -0,0 +1,216 @@
|
||||
{
|
||||
"_from": "@google-cloud/storage@^1.6.0",
|
||||
"_id": "@google-cloud/storage@1.7.0",
|
||||
"_inBundle": false,
|
||||
"_integrity": "sha512-QaAxzCkbhspwajoaEnT0GcnQcpjPRcBrHYuQsXtD05BtOJgVnHCLXSsfUiRdU0nVpK+Thp7+sTkQ0fvk5PanKg==",
|
||||
"_location": "/@google-cloud/storage",
|
||||
"_phantomChildren": {
|
||||
"core-util-is": "1.0.2",
|
||||
"inherits": "2.0.3",
|
||||
"process-nextick-args": "2.0.0",
|
||||
"safe-buffer": "5.1.2",
|
||||
"util-deprecate": "1.0.2",
|
||||
"xtend": "4.0.1"
|
||||
},
|
||||
"_requested": {
|
||||
"type": "range",
|
||||
"registry": true,
|
||||
"raw": "@google-cloud/storage@^1.6.0",
|
||||
"name": "@google-cloud/storage",
|
||||
"escapedName": "@google-cloud%2fstorage",
|
||||
"scope": "@google-cloud",
|
||||
"rawSpec": "^1.6.0",
|
||||
"saveSpec": null,
|
||||
"fetchSpec": "^1.6.0"
|
||||
},
|
||||
"_requiredBy": [
|
||||
"/firebase-admin"
|
||||
],
|
||||
"_resolved": "https://registry.npmjs.org/@google-cloud/storage/-/storage-1.7.0.tgz",
|
||||
"_shasum": "07bff573d92d5c294db6a04af246688875a8f74b",
|
||||
"_spec": "@google-cloud/storage@^1.6.0",
|
||||
"_where": "D:\\Desktop\\Git\\Firebase\\SmartShopperFirebase\\node_modules\\firebase-admin",
|
||||
"author": {
|
||||
"name": "Google Inc."
|
||||
},
|
||||
"bugs": {
|
||||
"url": "https://github.com/googleapis/nodejs-storage/issues"
|
||||
},
|
||||
"bundleDependencies": false,
|
||||
"contributors": [
|
||||
{
|
||||
"name": "Ace Nassri",
|
||||
"email": "anassri@google.com"
|
||||
},
|
||||
{
|
||||
"name": "Alexander Borovykh",
|
||||
"email": "immaculate.pine@gmail.com"
|
||||
},
|
||||
{
|
||||
"name": "Alexander Fenster",
|
||||
"email": "github@fenster.name"
|
||||
},
|
||||
{
|
||||
"name": "Calvin Metcalf",
|
||||
"email": "calvin.metcalf@gmail.com"
|
||||
},
|
||||
{
|
||||
"name": "Colin Ihrig",
|
||||
"email": "cjihrig@gmail.com"
|
||||
},
|
||||
{
|
||||
"name": "Cristian Almstrand",
|
||||
"email": "almstrand@users.noreply.github.com"
|
||||
},
|
||||
{
|
||||
"name": "Dave Gramlich",
|
||||
"email": "callmehiphop@gmail.com"
|
||||
},
|
||||
{
|
||||
"name": "Dominic Valenciana",
|
||||
"email": "kiricon@live.com"
|
||||
},
|
||||
{
|
||||
"name": "Eric Uldall",
|
||||
"email": "ericuldall@gmail.com"
|
||||
},
|
||||
{
|
||||
"name": "Ernest Landrito",
|
||||
"email": "landrito@google.com"
|
||||
},
|
||||
{
|
||||
"name": "Frank Natividad",
|
||||
"email": "frankyn@users.noreply.github.com"
|
||||
},
|
||||
{
|
||||
"name": "Jason Dobry",
|
||||
"email": "jason.dobry@gmail.com"
|
||||
},
|
||||
{
|
||||
"name": "Jason Dobry",
|
||||
"email": "jmdobry@users.noreply.github.com"
|
||||
},
|
||||
{
|
||||
"name": "Justin Sprigg",
|
||||
"email": "justin.sprigg@gmail.com"
|
||||
},
|
||||
{
|
||||
"name": "Luke Sneeringer",
|
||||
"email": "luke@sneeringer.com"
|
||||
},
|
||||
{
|
||||
"name": "Stephen",
|
||||
"email": "stephenplusplus@users.noreply.github.com"
|
||||
},
|
||||
{
|
||||
"name": "Stephen Sawchuk",
|
||||
"email": "sawchuk@gmail.com"
|
||||
},
|
||||
{
|
||||
"name": "Stephen Sawchuk",
|
||||
"email": "stephenplusplus@users.noreply.github.com"
|
||||
},
|
||||
{
|
||||
"name": "Tyler Johnson",
|
||||
"email": "mail@tyler-johnson.ca"
|
||||
},
|
||||
{
|
||||
"name": "Zach Bjornson",
|
||||
"email": "bjornson@stanford.edu"
|
||||
},
|
||||
{
|
||||
"name": "greenkeeper[bot]",
|
||||
"email": "greenkeeper[bot]@users.noreply.github.com"
|
||||
}
|
||||
],
|
||||
"dependencies": {
|
||||
"@google-cloud/common": "^0.17.0",
|
||||
"arrify": "^1.0.0",
|
||||
"async": "^2.0.1",
|
||||
"compressible": "^2.0.12",
|
||||
"concat-stream": "^1.5.0",
|
||||
"create-error-class": "^3.0.2",
|
||||
"duplexify": "^3.5.0",
|
||||
"extend": "^3.0.0",
|
||||
"gcs-resumable-upload": "^0.10.2",
|
||||
"hash-stream-validation": "^0.2.1",
|
||||
"is": "^3.0.1",
|
||||
"mime": "^2.2.0",
|
||||
"mime-types": "^2.0.8",
|
||||
"once": "^1.3.1",
|
||||
"pumpify": "^1.5.1",
|
||||
"request": "^2.85.0",
|
||||
"safe-buffer": "^5.1.1",
|
||||
"snakeize": "^0.1.0",
|
||||
"stream-events": "^1.0.1",
|
||||
"through2": "^2.0.0",
|
||||
"xdg-basedir": "^3.0.0"
|
||||
},
|
||||
"deprecated": false,
|
||||
"description": "Cloud Storage Client Library for Node.js",
|
||||
"devDependencies": {
|
||||
"@google-cloud/nodejs-repo-tools": "^2.2.3",
|
||||
"@google-cloud/pubsub": "*",
|
||||
"codecov": "^3.0.0",
|
||||
"eslint": "^4.7.1",
|
||||
"eslint-config-prettier": "^2.5.0",
|
||||
"eslint-plugin-node": "^6.0.0",
|
||||
"eslint-plugin-prettier": "^2.3.1",
|
||||
"ink-docstrap": "https://github.com/docstrap/docstrap/tarball/master",
|
||||
"intelli-espower-loader": "^1.0.1",
|
||||
"jsdoc": "^3.5.4",
|
||||
"mocha": "^5.0.0",
|
||||
"normalize-newline": "^3.0.0",
|
||||
"nyc": "^11.1.0",
|
||||
"power-assert": "^1.4.4",
|
||||
"prettier": "^1.7.0",
|
||||
"prop-assign": "^1.0.0",
|
||||
"propprop": "^0.3.0",
|
||||
"proxyquire": "^2.0.0",
|
||||
"semistandard": "^12.0.0",
|
||||
"tmp": "^0.0.33",
|
||||
"uuid": "^3.1.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=4"
|
||||
},
|
||||
"files": [
|
||||
"src",
|
||||
"AUTHORS",
|
||||
"CONTRIBUTORS",
|
||||
"COPYING"
|
||||
],
|
||||
"homepage": "https://github.com/googleapis/nodejs-storage#readme",
|
||||
"keywords": [
|
||||
"google apis client",
|
||||
"google api client",
|
||||
"google apis",
|
||||
"google api",
|
||||
"google",
|
||||
"google cloud platform",
|
||||
"google cloud",
|
||||
"cloud",
|
||||
"google storage",
|
||||
"storage"
|
||||
],
|
||||
"license": "Apache-2.0",
|
||||
"main": "./src/index.js",
|
||||
"name": "@google-cloud/storage",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/googleapis/nodejs-storage.git"
|
||||
},
|
||||
"scripts": {
|
||||
"all-test": "npm test && npm run system-test && npm run samples-test",
|
||||
"cover": "nyc --reporter=lcov mocha --require intelli-espower-loader test/*.js && nyc report",
|
||||
"docs": "repo-tools exec -- jsdoc -c .jsdoc.js",
|
||||
"generate-scaffolding": "repo-tools generate all && repo-tools generate lib_samples_readme -l samples/ --config ../.cloud-repo-tools.json",
|
||||
"lint": "repo-tools lint --cmd eslint -- src/ samples/ system-test/ test/",
|
||||
"prettier": "repo-tools exec -- prettier --write src/**/*.js samples/*.js samples/**/*.js system-test/**/*.js test/**/*.js",
|
||||
"samples-test": "npm link && cd samples/ && npm link @google-cloud/storage && npm test && cd ../",
|
||||
"system-test": "repo-tools test run --cmd mocha -- system-test/ --timeout 600000",
|
||||
"test": "repo-tools test run --cmd npm -- run cover",
|
||||
"test-no-cover": "repo-tools test run --cmd mocha -- test/"
|
||||
},
|
||||
"version": "1.7.0"
|
||||
}
|
766
express-server/node_modules/@google-cloud/storage/src/acl.js
generated
vendored
Normal file
766
express-server/node_modules/@google-cloud/storage/src/acl.js
generated
vendored
Normal file
@ -0,0 +1,766 @@
|
||||
/*!
|
||||
* Copyright 2014 Google Inc. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
const arrify = require('arrify');
|
||||
const common = require('@google-cloud/common');
|
||||
const extend = require('extend');
|
||||
const is = require('is');
|
||||
const util = require('util');
|
||||
|
||||
/**
|
||||
* Cloud Storage uses access control lists (ACLs) to manage object and
|
||||
* bucket access. ACLs are the mechanism you use to share objects with other
|
||||
* users and allow other users to access your buckets and objects.
|
||||
*
|
||||
* An ACL consists of one or more entries, where each entry grants permissions
|
||||
* to an entity. Permissions define the actions that can be performed against an
|
||||
* object or bucket (for example, `READ` or `WRITE`); the entity defines who the
|
||||
* permission applies to (for example, a specific user or group of users).
|
||||
*
|
||||
* Where an `entity` value is accepted, we follow the format the Cloud Storage
|
||||
* API expects.
|
||||
*
|
||||
* Refer to
|
||||
* https://cloud.google.com/storage/docs/json_api/v1/defaultObjectAccessControls
|
||||
* for the most up-to-date values.
|
||||
*
|
||||
* - `user-userId`
|
||||
* - `user-email`
|
||||
* - `group-groupId`
|
||||
* - `group-email`
|
||||
* - `domain-domain`
|
||||
* - `project-team-projectId`
|
||||
* - `allUsers`
|
||||
* - `allAuthenticatedUsers`
|
||||
*
|
||||
* Examples:
|
||||
*
|
||||
* - The user "liz@example.com" would be `user-liz@example.com`.
|
||||
* - The group "example@googlegroups.com" would be
|
||||
* `group-example@googlegroups.com`.
|
||||
* - To refer to all members of the Google Apps for Business domain
|
||||
* "example.com", the entity would be `domain-example.com`.
|
||||
*
|
||||
* For more detailed information, see
|
||||
* [About Access Control Lists](http://goo.gl/6qBBPO).
|
||||
*
|
||||
* @constructor Acl
|
||||
* @mixin
|
||||
* @param {object} options Configuration options.
|
||||
*/
|
||||
function Acl(options) {
|
||||
AclRoleAccessorMethods.call(this);
|
||||
|
||||
this.pathPrefix = options.pathPrefix;
|
||||
this.request_ = options.request;
|
||||
}
|
||||
|
||||
/**
|
||||
* An object of convenience methods to add or delete owner ACL permissions for a
|
||||
* given entity.
|
||||
*
|
||||
* The supported methods include:
|
||||
*
|
||||
* - `myFile.acl.owners.addAllAuthenticatedUsers`
|
||||
* - `myFile.acl.owners.deleteAllAuthenticatedUsers`
|
||||
* - `myFile.acl.owners.addAllUsers`
|
||||
* - `myFile.acl.owners.deleteAllUsers`
|
||||
* - `myFile.acl.owners.addDomain`
|
||||
* - `myFile.acl.owners.deleteDomain`
|
||||
* - `myFile.acl.owners.addGroup`
|
||||
* - `myFile.acl.owners.deleteGroup`
|
||||
* - `myFile.acl.owners.addProject`
|
||||
* - `myFile.acl.owners.deleteProject`
|
||||
* - `myFile.acl.owners.addUser`
|
||||
* - `myFile.acl.owners.deleteUser`
|
||||
*
|
||||
* @return {object}
|
||||
*
|
||||
* @example
|
||||
* const storage = require('@google-cloud/storage')();
|
||||
* const myBucket = storage.bucket('my-bucket');
|
||||
* const myFile = myBucket.file('my-file');
|
||||
*
|
||||
* //-
|
||||
* // Add a user as an owner of a file.
|
||||
* //-
|
||||
* const myBucket = gcs.bucket('my-bucket');
|
||||
* const myFile = myBucket.file('my-file');
|
||||
* myFile.acl.owners.addUser('email@example.com', function(err, aclObject) {});
|
||||
*
|
||||
* //-
|
||||
* // For reference, the above command is the same as running the following.
|
||||
* //-
|
||||
* myFile.acl.add({
|
||||
* entity: 'user-email@example.com',
|
||||
* role: gcs.acl.OWNER_ROLE
|
||||
* }, function(err, aclObject) {});
|
||||
*
|
||||
* //-
|
||||
* // If the callback is omitted, we'll return a Promise.
|
||||
* //-
|
||||
* myFile.acl.owners.addUser('email@example.com').then(function(data) {
|
||||
* const aclObject = data[0];
|
||||
* const apiResponse = data[1];
|
||||
* });
|
||||
*/
|
||||
Acl.prototype.owners = {};
|
||||
|
||||
/**
|
||||
* An object of convenience methods to add or delete reader ACL permissions for
|
||||
* a given entity.
|
||||
*
|
||||
* The supported methods include:
|
||||
*
|
||||
* - `myFile.acl.readers.addAllAuthenticatedUsers`
|
||||
* - `myFile.acl.readers.deleteAllAuthenticatedUsers`
|
||||
* - `myFile.acl.readers.addAllUsers`
|
||||
* - `myFile.acl.readers.deleteAllUsers`
|
||||
* - `myFile.acl.readers.addDomain`
|
||||
* - `myFile.acl.readers.deleteDomain`
|
||||
* - `myFile.acl.readers.addGroup`
|
||||
* - `myFile.acl.readers.deleteGroup`
|
||||
* - `myFile.acl.readers.addProject`
|
||||
* - `myFile.acl.readers.deleteProject`
|
||||
* - `myFile.acl.readers.addUser`
|
||||
* - `myFile.acl.readers.deleteUser`
|
||||
*
|
||||
* @return {object}
|
||||
*
|
||||
* @example
|
||||
* const storage = require('@google-cloud/storage')();
|
||||
* const myBucket = storage.bucket('my-bucket');
|
||||
* const myFile = myBucket.file('my-file');
|
||||
*
|
||||
* //-
|
||||
* // Add a user as a reader of a file.
|
||||
* //-
|
||||
* myFile.acl.readers.addUser('email@example.com', function(err, aclObject) {});
|
||||
*
|
||||
* //-
|
||||
* // For reference, the above command is the same as running the following.
|
||||
* //-
|
||||
* myFile.acl.add({
|
||||
* entity: 'user-email@example.com',
|
||||
* role: gcs.acl.READER_ROLE
|
||||
* }, function(err, aclObject) {});
|
||||
*
|
||||
* //-
|
||||
* // If the callback is omitted, we'll return a Promise.
|
||||
* //-
|
||||
* myFile.acl.readers.addUser('email@example.com').then(function(data) {
|
||||
* const aclObject = data[0];
|
||||
* const apiResponse = data[1];
|
||||
* });
|
||||
*/
|
||||
Acl.prototype.readers = {};
|
||||
|
||||
/**
|
||||
* An object of convenience methods to add or delete writer ACL permissions for
|
||||
* a given entity.
|
||||
*
|
||||
* The supported methods include:
|
||||
*
|
||||
* - `myFile.acl.writers.addAllAuthenticatedUsers`
|
||||
* - `myFile.acl.writers.deleteAllAuthenticatedUsers`
|
||||
* - `myFile.acl.writers.addAllUsers`
|
||||
* - `myFile.acl.writers.deleteAllUsers`
|
||||
* - `myFile.acl.writers.addDomain`
|
||||
* - `myFile.acl.writers.deleteDomain`
|
||||
* - `myFile.acl.writers.addGroup`
|
||||
* - `myFile.acl.writers.deleteGroup`
|
||||
* - `myFile.acl.writers.addProject`
|
||||
* - `myFile.acl.writers.deleteProject`
|
||||
* - `myFile.acl.writers.addUser`
|
||||
* - `myFile.acl.writers.deleteUser`
|
||||
*
|
||||
* @return {object}
|
||||
*
|
||||
* @example
|
||||
* const storage = require('@google-cloud/storage')();
|
||||
* const myBucket = storage.bucket('my-bucket');
|
||||
* const myFile = myBucket.file('my-file');
|
||||
*
|
||||
* //-
|
||||
* // Add a user as a writer of a file.
|
||||
* //-
|
||||
* myFile.acl.writers.addUser('email@example.com', function(err, aclObject) {});
|
||||
*
|
||||
* //-
|
||||
* // For reference, the above command is the same as running the following.
|
||||
* //-
|
||||
* myFile.acl.add({
|
||||
* entity: 'user-email@example.com',
|
||||
* role: gcs.acl.WRITER_ROLE
|
||||
* }, function(err, aclObject) {});
|
||||
*
|
||||
* //-
|
||||
* // If the callback is omitted, we'll return a Promise.
|
||||
* //-
|
||||
* myFile.acl.writers.addUser('email@example.com').then(function(data) {
|
||||
* const aclObject = data[0];
|
||||
* const apiResponse = data[1];
|
||||
* });
|
||||
*/
|
||||
Acl.prototype.writers = {};
|
||||
|
||||
util.inherits(Acl, AclRoleAccessorMethods);
|
||||
|
||||
/**
|
||||
* @typedef {array} AddAclResponse
|
||||
* @property {object} 0 The Acl Objects.
|
||||
* @property {object} 1 The full API response.
|
||||
*/
|
||||
/**
|
||||
* @callback AddAclCallback
|
||||
* @param {?Error} err Request error, if any.
|
||||
* @param {object} acl The Acl Objects.
|
||||
* @param {object} apiResponse The full API response.
|
||||
*/
|
||||
/**
|
||||
* Add access controls on a {@link Bucket} or {@link File}.
|
||||
*
|
||||
* @see [BucketAccessControls: insert API Documentation]{@link https://cloud.google.com/storage/docs/json_api/v1/bucketAccessControls/insert}
|
||||
* @see [ObjectAccessControls: insert API Documentation]{@link https://cloud.google.com/storage/docs/json_api/v1/objectAccessControls/insert}
|
||||
*
|
||||
* @param {object} options Configuration options.
|
||||
* @param {string} options.entity Whose permissions will be added.
|
||||
* @param {string} options.role Permissions allowed for the defined entity.
|
||||
* See {@link https://cloud.google.com/storage/docs/access-control Access Control}.
|
||||
* @param {number} [options.generation] **File Objects Only** Select a specific
|
||||
* revision of this file (as opposed to the latest version, the default).
|
||||
* @param {string} [options.userProject] The ID of the project which will be
|
||||
* billed for the request.
|
||||
* @param {AddAclCallback} [callback] Callback function.
|
||||
* @returns {Promise<AddAclResponse>}
|
||||
*
|
||||
* @example
|
||||
* const storage = require('@google-cloud/storage')();
|
||||
* const myBucket = storage.bucket('my-bucket');
|
||||
* const myFile = myBucket.file('my-file');
|
||||
*
|
||||
* const options = {
|
||||
* entity: 'user-useremail@example.com',
|
||||
* role: gcs.acl.OWNER_ROLE
|
||||
* };
|
||||
*
|
||||
* myBucket.acl.add(options, function(err, aclObject, apiResponse) {});
|
||||
*
|
||||
* //-
|
||||
* // For file ACL operations, you can also specify a `generation` property.
|
||||
* // Here is how you would grant ownership permissions to a user on a specific
|
||||
* // revision of a file.
|
||||
* //-
|
||||
* myFile.acl.add({
|
||||
* entity: 'user-useremail@example.com',
|
||||
* role: gcs.acl.OWNER_ROLE,
|
||||
* generation: 1
|
||||
* }, function(err, aclObject, apiResponse) {});
|
||||
*
|
||||
* //-
|
||||
* // If the callback is omitted, we'll return a Promise.
|
||||
* //-
|
||||
* myBucket.acl.add(options).then(function(data) {
|
||||
* const aclObject = data[0];
|
||||
* const apiResponse = data[1];
|
||||
* });
|
||||
*
|
||||
* @example <caption>include:samples/acl.js</caption>
|
||||
* region_tag:storage_add_file_owner
|
||||
* Example of adding an owner to a file:
|
||||
*
|
||||
* @example <caption>include:samples/acl.js</caption>
|
||||
* region_tag:storage_add_bucket_owner
|
||||
* Example of adding an owner to a bucket:
|
||||
*
|
||||
* @example <caption>include:samples/acl.js</caption>
|
||||
* region_tag:storage_add_bucket_default_owner
|
||||
* Example of adding a default owner to a bucket:
|
||||
*/
|
||||
Acl.prototype.add = function(options, callback) {
|
||||
const self = this;
|
||||
|
||||
const query = {};
|
||||
|
||||
if (options.generation) {
|
||||
query.generation = options.generation;
|
||||
}
|
||||
|
||||
if (options.userProject) {
|
||||
query.userProject = options.userProject;
|
||||
}
|
||||
|
||||
this.request(
|
||||
{
|
||||
method: 'POST',
|
||||
uri: '',
|
||||
qs: query,
|
||||
json: {
|
||||
entity: options.entity,
|
||||
role: options.role.toUpperCase(),
|
||||
},
|
||||
},
|
||||
function(err, resp) {
|
||||
if (err) {
|
||||
callback(err, null, resp);
|
||||
return;
|
||||
}
|
||||
|
||||
callback(null, self.makeAclObject_(resp), resp);
|
||||
}
|
||||
);
|
||||
};
|
||||
|
||||
/**
|
||||
* @typedef {array} RemoveAclResponse
|
||||
* @property {object} 0 The full API response.
|
||||
*/
|
||||
/**
|
||||
* @callback RemoveAclCallback
|
||||
* @param {?Error} err Request error, if any.
|
||||
* @param {object} apiResponse The full API response.
|
||||
*/
|
||||
/**
|
||||
* Delete access controls on a {@link Bucket} or {@link File}.
|
||||
*
|
||||
* @see [BucketAccessControls: delete API Documentation]{@link https://cloud.google.com/storage/docs/json_api/v1/bucketAccessControls/delete}
|
||||
* @see [ObjectAccessControls: delete API Documentation]{@link https://cloud.google.com/storage/docs/json_api/v1/objectAccessControls/delete}
|
||||
*
|
||||
* @param {object} options Configuration object.
|
||||
* @param {string} options.entity Whose permissions will be revoked.
|
||||
* @param {int} [options.generation] **File Objects Only** Select a specific
|
||||
* revision of this file (as opposed to the latest version, the default).
|
||||
* @param {string} [options.userProject] The ID of the project which will be
|
||||
* billed for the request.
|
||||
* @param {RemoveAclCallback} callback The callback function.
|
||||
* @returns {Promise<RemoveAclResponse>}
|
||||
*
|
||||
* @example
|
||||
* const storage = require('@google-cloud/storage')();
|
||||
* const myBucket = storage.bucket('my-bucket');
|
||||
* const myFile = myBucket.file('my-file');
|
||||
*
|
||||
* myBucket.acl.delete({
|
||||
* entity: 'user-useremail@example.com'
|
||||
* }, function(err, apiResponse) {});
|
||||
*
|
||||
* //-
|
||||
* // For file ACL operations, you can also specify a `generation` property.
|
||||
* //-
|
||||
* myFile.acl.delete({
|
||||
* entity: 'user-useremail@example.com',
|
||||
* generation: 1
|
||||
* }, function(err, apiResponse) {});
|
||||
*
|
||||
* //-
|
||||
* // If the callback is omitted, we'll return a Promise.
|
||||
* //-
|
||||
* myFile.acl.delete().then(function(data) {
|
||||
* const apiResponse = data[0];
|
||||
* });
|
||||
*
|
||||
* @example <caption>include:samples/acl.js</caption>
|
||||
* region_tag:storage_remove_bucket_owner
|
||||
* Example of removing an owner from a bucket:
|
||||
*
|
||||
* @example <caption>include:samples/acl.js</caption>
|
||||
* region_tag:storage_remove_bucket_default_owner
|
||||
* Example of removing a default owner from a bucket:
|
||||
*
|
||||
* @example <caption>include:samples/acl.js</caption>
|
||||
* region_tag:storage_remove_file_owner
|
||||
* Example of removing an owner from a bucket:
|
||||
*/
|
||||
Acl.prototype.delete = function(options, callback) {
|
||||
const query = {};
|
||||
|
||||
if (options.generation) {
|
||||
query.generation = options.generation;
|
||||
}
|
||||
|
||||
if (options.userProject) {
|
||||
query.userProject = options.userProject;
|
||||
}
|
||||
|
||||
this.request(
|
||||
{
|
||||
method: 'DELETE',
|
||||
uri: '/' + encodeURIComponent(options.entity),
|
||||
qs: query,
|
||||
},
|
||||
function(err, resp) {
|
||||
callback(err, resp);
|
||||
}
|
||||
);
|
||||
};
|
||||
|
||||
/**
|
||||
* @typedef {array} GetAclResponse
|
||||
* @property {object|object[]} 0 Single or array of Acl Objects.
|
||||
* @property {object} 1 The full API response.
|
||||
*/
|
||||
/**
|
||||
* @callback GetAclCallback
|
||||
* @param {?Error} err Request error, if any.
|
||||
* @param {object|object[]} acl Single or array of Acl Objects.
|
||||
* @param {object} apiResponse The full API response.
|
||||
*/
|
||||
/**
|
||||
* Get access controls on a {@link Bucket} or {@link File}. If
|
||||
* an entity is omitted, you will receive an array of all applicable access
|
||||
* controls.
|
||||
*
|
||||
* @see [BucketAccessControls: get API Documentation]{@link https://cloud.google.com/storage/docs/json_api/v1/bucketAccessControls/get}
|
||||
* @see [ObjectAccessControls: get API Documentation]{@link https://cloud.google.com/storage/docs/json_api/v1/objectAccessControls/get}
|
||||
*
|
||||
* @param {object|function} [options] Configuration options. If you want to
|
||||
* receive a list of all access controls, pass the callback function as the
|
||||
* only argument.
|
||||
* @param {string} [options.entity] Whose permissions will be fetched.
|
||||
* @param {number} [options.generation] **File Objects Only** Select a specific
|
||||
* revision of this file (as opposed to the latest version, the default).
|
||||
* @param {string} [options.userProject] The ID of the project which will be
|
||||
* billed for the request.
|
||||
* @param {GetAclCallback} [callback] Callback function.
|
||||
* @returns {Promise<GetAclResponse>}
|
||||
*
|
||||
* @example
|
||||
* const storage = require('@google-cloud/storage')();
|
||||
* const myBucket = storage.bucket('my-bucket');
|
||||
* const myFile = myBucket.file('my-file');
|
||||
*
|
||||
* myBucket.acl.get({
|
||||
* entity: 'user-useremail@example.com'
|
||||
* }, function(err, aclObject, apiResponse) {});
|
||||
*
|
||||
* //-
|
||||
* // Get all access controls.
|
||||
* //-
|
||||
* myBucket.acl.get(function(err, aclObjects, apiResponse) {
|
||||
* // aclObjects = [
|
||||
* // {
|
||||
* // entity: 'user-useremail@example.com',
|
||||
* // role: 'owner'
|
||||
* // }
|
||||
* // ]
|
||||
* });
|
||||
*
|
||||
* //-
|
||||
* // For file ACL operations, you can also specify a `generation` property.
|
||||
* //-
|
||||
* myFile.acl.get({
|
||||
* entity: 'user-useremail@example.com',
|
||||
* generation: 1
|
||||
* }, function(err, aclObject, apiResponse) {});
|
||||
*
|
||||
* //-
|
||||
* // If the callback is omitted, we'll return a Promise.
|
||||
* //-
|
||||
* myBucket.acl.get().then(function(data) {
|
||||
* const aclObject = data[0];
|
||||
* const apiResponse = data[1];
|
||||
* });
|
||||
*
|
||||
* @example <caption>include:samples/acl.js</caption>
|
||||
* region_tag:storage_print_file_acl
|
||||
* Example of printing a file's ACL:
|
||||
*
|
||||
* @example <caption>include:samples/acl.js</caption>
|
||||
* region_tag:storage_print_file_acl_for_user
|
||||
* Example of printing a file's ACL for a specific user:
|
||||
*
|
||||
* @example <caption>include:samples/acl.js</caption>
|
||||
* region_tag:storage_print_bucket_acl
|
||||
* Example of printing a bucket's ACL:
|
||||
*
|
||||
* @example <caption>include:samples/acl.js</caption>
|
||||
* region_tag:storage_print_bucket_acl_for_user
|
||||
* Example of printing a bucket's ACL for a specific user:
|
||||
*/
|
||||
Acl.prototype.get = function(options, callback) {
|
||||
const self = this;
|
||||
let path = '';
|
||||
const query = {};
|
||||
|
||||
if (is.fn(options)) {
|
||||
callback = options;
|
||||
options = null;
|
||||
} else {
|
||||
path = '/' + encodeURIComponent(options.entity);
|
||||
|
||||
if (options.generation) {
|
||||
query.generation = options.generation;
|
||||
}
|
||||
|
||||
if (options.userProject) {
|
||||
query.userProject = options.userProject;
|
||||
}
|
||||
}
|
||||
|
||||
this.request(
|
||||
{
|
||||
uri: path,
|
||||
qs: query,
|
||||
},
|
||||
function(err, resp) {
|
||||
if (err) {
|
||||
callback(err, null, resp);
|
||||
return;
|
||||
}
|
||||
|
||||
let results;
|
||||
|
||||
if (resp.items) {
|
||||
results = arrify(resp.items).map(self.makeAclObject_);
|
||||
} else {
|
||||
results = self.makeAclObject_(resp);
|
||||
}
|
||||
|
||||
callback(null, results, resp);
|
||||
}
|
||||
);
|
||||
};
|
||||
|
||||
/**
|
||||
* @typedef {array} UpdateAclResponse
|
||||
* @property {object} 0 The updated Acl Objects.
|
||||
* @property {object} 1 The full API response.
|
||||
*/
|
||||
/**
|
||||
* @callback UpdateAclCallback
|
||||
* @param {?Error} err Request error, if any.
|
||||
* @param {object} acl The updated Acl Objects.
|
||||
* @param {object} apiResponse The full API response.
|
||||
*/
|
||||
/**
|
||||
* Update access controls on a {@link Bucket} or {@link File}.
|
||||
*
|
||||
* @see [BucketAccessControls: update API Documentation]{@link https://cloud.google.com/storage/docs/json_api/v1/bucketAccessControls/update}
|
||||
* @see [ObjectAccessControls: update API Documentation]{@link https://cloud.google.com/storage/docs/json_api/v1/objectAccessControls/update}
|
||||
*
|
||||
* @param {object} options Configuration options.
|
||||
* @param {string} options.entity Whose permissions will be updated.
|
||||
* @param {string} options.role Permissions allowed for the defined entity.
|
||||
* See {@link Storage.acl}.
|
||||
* @param {number} [options.generation] **File Objects Only** Select a specific
|
||||
* revision of this file (as opposed to the latest version, the default).
|
||||
* @param {string} [options.userProject] The ID of the project which will be
|
||||
* billed for the request.
|
||||
* @param {UpdateAclCallback} [callback] Callback function.
|
||||
* @returns {Promise<UpdateAclResponse>}
|
||||
*
|
||||
* @example
|
||||
* const storage = require('@google-cloud/storage')();
|
||||
* const myBucket = storage.bucket('my-bucket');
|
||||
* const myFile = myBucket.file('my-file');
|
||||
*
|
||||
* const options = {
|
||||
* entity: 'user-useremail@example.com',
|
||||
* role: gcs.acl.WRITER_ROLE
|
||||
* };
|
||||
*
|
||||
* myBucket.acl.update(options, function(err, aclObject, apiResponse) {});
|
||||
*
|
||||
* //-
|
||||
* // For file ACL operations, you can also specify a `generation` property.
|
||||
* //-
|
||||
* myFile.acl.update({
|
||||
* entity: 'user-useremail@example.com',
|
||||
* role: gcs.acl.WRITER_ROLE,
|
||||
* generation: 1
|
||||
* }, function(err, aclObject, apiResponse) {});
|
||||
*
|
||||
* //-
|
||||
* // If the callback is omitted, we'll return a Promise.
|
||||
* //-
|
||||
* myFile.acl.update(options).then(function(data) {
|
||||
* const aclObject = data[0];
|
||||
* const apiResponse = data[1];
|
||||
* });
|
||||
*/
|
||||
Acl.prototype.update = function(options, callback) {
|
||||
const self = this;
|
||||
|
||||
const query = {};
|
||||
|
||||
if (options.generation) {
|
||||
query.generation = options.generation;
|
||||
}
|
||||
|
||||
if (options.userProject) {
|
||||
query.userProject = options.userProject;
|
||||
}
|
||||
|
||||
this.request(
|
||||
{
|
||||
method: 'PUT',
|
||||
uri: '/' + encodeURIComponent(options.entity),
|
||||
qs: query,
|
||||
json: {
|
||||
role: options.role.toUpperCase(),
|
||||
},
|
||||
},
|
||||
function(err, resp) {
|
||||
if (err) {
|
||||
callback(err, null, resp);
|
||||
return;
|
||||
}
|
||||
|
||||
callback(null, self.makeAclObject_(resp), resp);
|
||||
}
|
||||
);
|
||||
};
|
||||
|
||||
/**
|
||||
* Transform API responses to a consistent object format.
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
Acl.prototype.makeAclObject_ = function(accessControlObject) {
|
||||
const obj = {
|
||||
entity: accessControlObject.entity,
|
||||
role: accessControlObject.role,
|
||||
};
|
||||
|
||||
if (accessControlObject.projectTeam) {
|
||||
obj.projectTeam = accessControlObject.projectTeam;
|
||||
}
|
||||
|
||||
return obj;
|
||||
};
|
||||
|
||||
/**
|
||||
* Patch requests up to the bucket's request object.
|
||||
*
|
||||
* @private
|
||||
*
|
||||
* @param {string} method Action.
|
||||
* @param {string} path Request path.
|
||||
* @param {*} query Request query object.
|
||||
* @param {*} body Request body contents.
|
||||
* @param {function} callback Callback function.
|
||||
*/
|
||||
Acl.prototype.request = function(reqOpts, callback) {
|
||||
reqOpts.uri = this.pathPrefix + reqOpts.uri;
|
||||
this.request_(reqOpts, callback);
|
||||
};
|
||||
|
||||
/*! Developer Documentation
|
||||
*
|
||||
* All async methods (except for streams) will return a Promise in the event
|
||||
* that a callback is omitted.
|
||||
*/
|
||||
common.util.promisifyAll(Acl);
|
||||
|
||||
module.exports = Acl;
|
||||
|
||||
/**
|
||||
* Attach functionality to a {@link Storage.acl} instance. This will add an
|
||||
* object for each role group (owners, readers, and writers), with each object
|
||||
* containing methods to add or delete a type of entity.
|
||||
*
|
||||
* As an example, here are a few methods that are created.
|
||||
*
|
||||
* myBucket.acl.readers.deleteGroup('groupId', function(err) {});
|
||||
*
|
||||
* myBucket.acl.owners.addUser('email@example.com', function(err, acl) {});
|
||||
*
|
||||
* myBucket.acl.writers.addDomain('example.com', function(err, acl) {});
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
function AclRoleAccessorMethods() {
|
||||
AclRoleAccessorMethods.roles.forEach(this._assignAccessMethods.bind(this));
|
||||
}
|
||||
|
||||
AclRoleAccessorMethods.accessMethods = ['add', 'delete'];
|
||||
|
||||
AclRoleAccessorMethods.entities = [
|
||||
// Special entity groups that do not require further specification.
|
||||
'allAuthenticatedUsers',
|
||||
'allUsers',
|
||||
|
||||
// Entity groups that require specification, e.g. `user-email@example.com`.
|
||||
'domain-',
|
||||
'group-',
|
||||
'project-',
|
||||
'user-',
|
||||
];
|
||||
|
||||
AclRoleAccessorMethods.roles = ['OWNER', 'READER', 'WRITER'];
|
||||
|
||||
AclRoleAccessorMethods.prototype._assignAccessMethods = function(role) {
|
||||
const self = this;
|
||||
|
||||
const accessMethods = AclRoleAccessorMethods.accessMethods;
|
||||
const entities = AclRoleAccessorMethods.entities;
|
||||
const roleGroup = role.toLowerCase() + 's';
|
||||
|
||||
this[roleGroup] = entities.reduce(function(acc, entity) {
|
||||
const isPrefix = entity.charAt(entity.length - 1) === '-';
|
||||
|
||||
accessMethods.forEach(function(accessMethod) {
|
||||
let method = accessMethod + entity[0].toUpperCase() + entity.substr(1);
|
||||
|
||||
if (isPrefix) {
|
||||
method = method.replace('-', '');
|
||||
}
|
||||
|
||||
// Wrap the parent accessor method (e.g. `add` or `delete`) to avoid the
|
||||
// more complex API of specifying an `entity` and `role`.
|
||||
acc[method] = function(entityId, options, callback) {
|
||||
let apiEntity;
|
||||
|
||||
if (is.fn(options)) {
|
||||
callback = options;
|
||||
options = {};
|
||||
}
|
||||
|
||||
if (isPrefix) {
|
||||
apiEntity = entity + entityId;
|
||||
} else {
|
||||
// If the entity is not a prefix, it is a special entity group that
|
||||
// does not require further details. The accessor methods only accept
|
||||
// a callback.
|
||||
apiEntity = entity;
|
||||
callback = entityId;
|
||||
}
|
||||
|
||||
options = extend(
|
||||
{
|
||||
entity: apiEntity,
|
||||
role: role,
|
||||
},
|
||||
options
|
||||
);
|
||||
|
||||
const args = [options];
|
||||
|
||||
if (is.fn(callback)) {
|
||||
args.push(callback);
|
||||
}
|
||||
|
||||
return self[accessMethod].apply(self, args);
|
||||
};
|
||||
});
|
||||
|
||||
return acc;
|
||||
}, {});
|
||||
};
|
||||
|
||||
module.exports.AclRoleAccessorMethods = AclRoleAccessorMethods;
|
2458
express-server/node_modules/@google-cloud/storage/src/bucket.js
generated
vendored
Normal file
2458
express-server/node_modules/@google-cloud/storage/src/bucket.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
116
express-server/node_modules/@google-cloud/storage/src/channel.js
generated
vendored
Normal file
116
express-server/node_modules/@google-cloud/storage/src/channel.js
generated
vendored
Normal file
@ -0,0 +1,116 @@
|
||||
/*!
|
||||
* Copyright 2015 Google Inc. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
const common = require('@google-cloud/common');
|
||||
const util = require('util');
|
||||
|
||||
/**
|
||||
* Create a channel object to interact with a Cloud Storage channel.
|
||||
*
|
||||
* @see [Object Change Notification]{@link https://cloud.google.com/storage/docs/object-change-notification}
|
||||
*
|
||||
* @class
|
||||
*
|
||||
* @param {string} id The ID of the channel.
|
||||
* @param {string} resourceId The resource ID of the channel.
|
||||
*
|
||||
* @example
|
||||
* const storage = require('@google-cloud/storage')();
|
||||
* const channel = storage.channel('id', 'resource-id');
|
||||
*/
|
||||
function Channel(storage, id, resourceId) {
|
||||
const config = {
|
||||
parent: storage,
|
||||
baseUrl: '/channels',
|
||||
|
||||
// An ID shouldn't be included in the API requests.
|
||||
// RE: https://github.com/GoogleCloudPlatform/google-cloud-node/issues/1145
|
||||
id: '',
|
||||
|
||||
methods: {
|
||||
// Only need `request`.
|
||||
},
|
||||
};
|
||||
|
||||
common.ServiceObject.call(this, config);
|
||||
|
||||
this.metadata.id = id;
|
||||
this.metadata.resourceId = resourceId;
|
||||
}
|
||||
|
||||
util.inherits(Channel, common.ServiceObject);
|
||||
|
||||
/**
|
||||
* @typedef {array} StopResponse
|
||||
* @property {object} 0 The full API response.
|
||||
*/
|
||||
/**
|
||||
* @callback StopCallback
|
||||
* @param {?Error} err Request error, if any.
|
||||
* @param {object} apiResponse The full API response.
|
||||
*/
|
||||
/**
|
||||
* Stop this channel.
|
||||
*
|
||||
* @param {StopCallback} [callback] Callback function.
|
||||
* @returns {Promise<StopResponse>}
|
||||
*
|
||||
* @example
|
||||
* const storage = require('@google-cloud/storage')();
|
||||
* const channel = storage.channel('id', 'resource-id');
|
||||
* channel.stop(function(err, apiResponse) {
|
||||
* if (!err) {
|
||||
* // Channel stopped successfully.
|
||||
* }
|
||||
* });
|
||||
*
|
||||
* //-
|
||||
* // If the callback is omitted, we'll return a Promise.
|
||||
* //-
|
||||
* channel.stop().then(function(data) {
|
||||
* const apiResponse = data[0];
|
||||
* });
|
||||
*/
|
||||
Channel.prototype.stop = function(callback) {
|
||||
callback = callback || common.util.noop;
|
||||
|
||||
this.request(
|
||||
{
|
||||
method: 'POST',
|
||||
uri: '/stop',
|
||||
json: this.metadata,
|
||||
},
|
||||
function(err, apiResponse) {
|
||||
callback(err, apiResponse);
|
||||
}
|
||||
);
|
||||
};
|
||||
|
||||
/*! Developer Documentation
|
||||
*
|
||||
* All async methods (except for streams) will return a Promise in the event
|
||||
* that a callback is omitted.
|
||||
*/
|
||||
common.util.promisifyAll(Channel);
|
||||
|
||||
/**
|
||||
* Reference to the {@link Channel} class.
|
||||
* @name module:@google-cloud/storage.Channel
|
||||
* @see Channel
|
||||
*/
|
||||
module.exports = Channel;
|
2475
express-server/node_modules/@google-cloud/storage/src/file.js
generated
vendored
Normal file
2475
express-server/node_modules/@google-cloud/storage/src/file.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
300
express-server/node_modules/@google-cloud/storage/src/iam.js
generated
vendored
Normal file
300
express-server/node_modules/@google-cloud/storage/src/iam.js
generated
vendored
Normal file
@ -0,0 +1,300 @@
|
||||
/*!
|
||||
* Copyright 2017 Google Inc. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
const arrify = require('arrify');
|
||||
const common = require('@google-cloud/common');
|
||||
const extend = require('extend');
|
||||
const is = require('is');
|
||||
|
||||
/**
|
||||
* Get and set IAM policies for your Cloud Storage bucket.
|
||||
*
|
||||
* @see [Cloud Storage IAM Management](https://cloud.google.com/storage/docs/access-control/iam#short_title_iam_management)
|
||||
* @see [Granting, Changing, and Revoking Access](https://cloud.google.com/iam/docs/granting-changing-revoking-access)
|
||||
* @see [IAM Roles](https://cloud.google.com/iam/docs/understanding-roles)
|
||||
*
|
||||
* @constructor Iam
|
||||
* @mixin
|
||||
*
|
||||
* @param {Bucket} bucket The parent instance.
|
||||
* @example
|
||||
* const storage = require('@google-cloud/storage')();
|
||||
* const bucket = storage.bucket('my-bucket');
|
||||
* // bucket.iam
|
||||
*/
|
||||
function Iam(bucket) {
|
||||
this.request_ = bucket.request.bind(bucket);
|
||||
this.resourceId_ = 'buckets/' + bucket.id;
|
||||
}
|
||||
|
||||
/**
|
||||
* @typedef {object} GetPolicyRequest
|
||||
* @property {string} userProject The ID of the project which will be billed for
|
||||
* the request.
|
||||
*/
|
||||
/**
|
||||
* @typedef {array} GetPolicyResponse
|
||||
* @property {object} 0 The policy.
|
||||
* @property {object} 1 The full API response.
|
||||
*/
|
||||
/**
|
||||
* @callback GetPolicyCallback
|
||||
* @param {?Error} err Request error, if any.
|
||||
* @param {object} acl The policy.
|
||||
* @param {object} apiResponse The full API response.
|
||||
*/
|
||||
/**
|
||||
* Get the IAM policy.
|
||||
*
|
||||
* @param {GetPolicyRequest} [options] Request options.
|
||||
* @param {GetPolicyCallback} [callback] Callback function.
|
||||
* @returns {Promise<GetPolicyResponse>}
|
||||
*
|
||||
* @see [Buckets: setIamPolicy API Documentation]{@link https://cloud.google.com/storage/docs/json_api/v1/buckets/getIamPolicy}
|
||||
*
|
||||
* @example
|
||||
* const storage = require('@google-cloud/storage')();
|
||||
* const bucket = storage.bucket('my-bucket');
|
||||
* bucket.iam.getPolicy(function(err, policy, apiResponse) {});
|
||||
*
|
||||
* //-
|
||||
* // If the callback is omitted, we'll return a Promise.
|
||||
* //-
|
||||
* bucket.iam.getPolicy().then(function(data) {
|
||||
* const policy = data[0];
|
||||
* const apiResponse = data[1];
|
||||
* });
|
||||
*
|
||||
* @example <caption>include:samples/iam.js</caption>
|
||||
* region_tag:storage_view_bucket_iam_members
|
||||
* Example of retrieving a bucket's IAM policy:
|
||||
*/
|
||||
Iam.prototype.getPolicy = function(options, callback) {
|
||||
if (is.fn(options)) {
|
||||
callback = options;
|
||||
options = {};
|
||||
}
|
||||
|
||||
this.request_(
|
||||
{
|
||||
uri: '/iam',
|
||||
qs: options,
|
||||
},
|
||||
callback
|
||||
);
|
||||
};
|
||||
|
||||
/**
|
||||
* @typedef {array} SetPolicyResponse
|
||||
* @property {object} 0 The policy.
|
||||
* @property {object} 1 The full API response.
|
||||
*/
|
||||
/**
|
||||
* @callback SetPolicyCallback
|
||||
* @param {?Error} err Request error, if any.
|
||||
* @param {object} acl The policy.
|
||||
* @param {object} apiResponse The full API response.
|
||||
*/
|
||||
/**
|
||||
* Set the IAM policy.
|
||||
*
|
||||
* @throws {Error} If no policy is provided.
|
||||
*
|
||||
* @param {object} policy The policy.
|
||||
* @param {array} policy.bindings Bindings associate members with roles.
|
||||
* @param {string} [policy.etag] Etags are used to perform a read-modify-write.
|
||||
* @param {object} [options] Configuration opbject.
|
||||
* @param {string} [options.userProject] The ID of the project which will be
|
||||
* billed for the request.
|
||||
* @param {SetPolicyCallback} callback Callback function.
|
||||
* @returns {Promise<SetPolicyResponse>}
|
||||
*
|
||||
* @see [Buckets: setIamPolicy API Documentation]{@link https://cloud.google.com/storage/docs/json_api/v1/buckets/setIamPolicy}
|
||||
* @see [IAM Roles](https://cloud.google.com/iam/docs/understanding-roles)
|
||||
*
|
||||
* @example
|
||||
* const storage = require('@google-cloud/storage')();
|
||||
* const bucket = storage.bucket('my-bucket');
|
||||
*
|
||||
* const myPolicy = {
|
||||
* bindings: [
|
||||
* {
|
||||
* role: 'roles/storage.admin',
|
||||
* members: ['serviceAccount:myotherproject@appspot.gserviceaccount.com']
|
||||
* }
|
||||
* ]
|
||||
* };
|
||||
*
|
||||
* bucket.iam.setPolicy(myPolicy, function(err, policy, apiResponse) {});
|
||||
*
|
||||
* //-
|
||||
* // If the callback is omitted, we'll return a Promise.
|
||||
* //-
|
||||
* bucket.iam.setPolicy(myPolicy).then(function(data) {
|
||||
* const policy = data[0];
|
||||
* const apiResponse = data[1];
|
||||
* });
|
||||
*
|
||||
* @example <caption>include:samples/iam.js</caption>
|
||||
* region_tag:storage_add_bucket_iam_member
|
||||
* Example of adding to a bucket's IAM policy:
|
||||
*
|
||||
* @example <caption>include:samples/iam.js</caption>
|
||||
* region_tag:storage_remove_bucket_iam_member
|
||||
* Example of removing from a bucket's IAM policy:
|
||||
*/
|
||||
Iam.prototype.setPolicy = function(policy, options, callback) {
|
||||
if (!is.object(policy)) {
|
||||
throw new Error('A policy object is required.');
|
||||
}
|
||||
|
||||
if (is.fn(options)) {
|
||||
callback = options;
|
||||
options = {};
|
||||
}
|
||||
|
||||
this.request_(
|
||||
{
|
||||
method: 'PUT',
|
||||
uri: '/iam',
|
||||
json: extend(
|
||||
{
|
||||
resourceId: this.resourceId_,
|
||||
},
|
||||
policy
|
||||
),
|
||||
qs: options,
|
||||
},
|
||||
callback
|
||||
);
|
||||
};
|
||||
|
||||
/**
|
||||
* @typedef {array} TestIamPermissionsResponse
|
||||
* @property {object[]} 0 A subset of permissions that the caller is allowed.
|
||||
* @property {object} 1 The full API response.
|
||||
*/
|
||||
/**
|
||||
* @callback TestIamPermissionsCallback
|
||||
* @param {?Error} err Request error, if any.
|
||||
* @param {object[]} acl A subset of permissions that the caller is allowed.
|
||||
* @param {object} apiResponse The full API response.
|
||||
*/
|
||||
/**
|
||||
* Test a set of permissions for a resource.
|
||||
*
|
||||
* @throws {Error} If permissions are not provided.
|
||||
*
|
||||
* @param {string|string[]} permissions The permission(s) to test for.
|
||||
* @param {object} [options] Configuration object.
|
||||
* @param {string} [options.userProject] The ID of the project which will be
|
||||
* billed for the request.
|
||||
* @param {TestIamPermissionsCallback} [callback] Callback function.
|
||||
* @returns {Promise<TestIamPermissionsResponse>}
|
||||
*
|
||||
* @see [Buckets: testIamPermissions API Documentation]{@link https://cloud.google.com/storage/docs/json_api/v1/buckets/testIamPermissions}
|
||||
*
|
||||
* @example
|
||||
* const storage = require('@google-cloud/storage')();
|
||||
* const bucket = storage.bucket('my-bucket');
|
||||
*
|
||||
* //-
|
||||
* // Test a single permission.
|
||||
* //-
|
||||
* const test = 'storage.buckets.delete';
|
||||
*
|
||||
* bucket.iam.testPermissions(test, function(err, permissions, apiResponse) {
|
||||
* console.log(permissions);
|
||||
* // {
|
||||
* // "storage.buckets.delete": true
|
||||
* // }
|
||||
* });
|
||||
*
|
||||
* //-
|
||||
* // Test several permissions at once.
|
||||
* //-
|
||||
* const tests = [
|
||||
* 'storage.buckets.delete',
|
||||
* 'storage.buckets.get'
|
||||
* ];
|
||||
*
|
||||
* bucket.iam.testPermissions(tests, function(err, permissions) {
|
||||
* console.log(permissions);
|
||||
* // {
|
||||
* // "storage.buckets.delete": false,
|
||||
* // "storage.buckets.get": true
|
||||
* // }
|
||||
* });
|
||||
*
|
||||
* //-
|
||||
* // If the callback is omitted, we'll return a Promise.
|
||||
* //-
|
||||
* bucket.iam.testPermissions(test).then(function(data) {
|
||||
* const permissions = data[0];
|
||||
* const apiResponse = data[1];
|
||||
* });
|
||||
*/
|
||||
Iam.prototype.testPermissions = function(permissions, options, callback) {
|
||||
if (!is.array(permissions) && !is.string(permissions)) {
|
||||
throw new Error('Permissions are required.');
|
||||
}
|
||||
|
||||
if (is.fn(options)) {
|
||||
callback = options;
|
||||
options = {};
|
||||
}
|
||||
|
||||
options = extend(
|
||||
{
|
||||
permissions: arrify(permissions),
|
||||
},
|
||||
options
|
||||
);
|
||||
|
||||
this.request_(
|
||||
{
|
||||
uri: '/iam/testPermissions',
|
||||
qs: options,
|
||||
useQuerystring: true,
|
||||
},
|
||||
function(err, resp) {
|
||||
if (err) {
|
||||
callback(err, null, resp);
|
||||
return;
|
||||
}
|
||||
|
||||
const availablePermissions = arrify(resp.permissions);
|
||||
|
||||
const permissionsHash = permissions.reduce(function(acc, permission) {
|
||||
acc[permission] = availablePermissions.indexOf(permission) > -1;
|
||||
return acc;
|
||||
}, {});
|
||||
|
||||
callback(null, permissionsHash, resp);
|
||||
}
|
||||
);
|
||||
};
|
||||
|
||||
/*! Developer Documentation
|
||||
*
|
||||
* All async methods (except for streams) will return a Promise in the event
|
||||
* that a callback is omitted.
|
||||
*/
|
||||
common.util.promisifyAll(Iam);
|
||||
|
||||
module.exports = Iam;
|
591
express-server/node_modules/@google-cloud/storage/src/index.js
generated
vendored
Normal file
591
express-server/node_modules/@google-cloud/storage/src/index.js
generated
vendored
Normal file
@ -0,0 +1,591 @@
|
||||
/**
|
||||
* Copyright 2014-2017 Google Inc. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
const arrify = require('arrify');
|
||||
const common = require('@google-cloud/common');
|
||||
const extend = require('extend');
|
||||
const util = require('util');
|
||||
|
||||
const Bucket = require('./bucket.js');
|
||||
const Channel = require('./channel.js');
|
||||
const File = require('./file.js');
|
||||
|
||||
/**
|
||||
* @typedef {object} ClientConfig
|
||||
* @property {string} [projectId] The project ID from the Google Developer's
|
||||
* Console, e.g. 'grape-spaceship-123'. We will also check the environment
|
||||
* variable `GCLOUD_PROJECT` for your project ID. If your app is running in
|
||||
* an environment which supports {@link https://cloud.google.com/docs/authentication/production#providing_credentials_to_your_application Application Default Credentials},
|
||||
* your project ID will be detected automatically.
|
||||
* @property {string} [keyFilename] Full path to the a .json, .pem, or .p12 key
|
||||
* downloaded from the Google Developers Console. If you provide a path to a
|
||||
* JSON file, the `projectId` option above is not necessary. NOTE: .pem and
|
||||
* .p12 require you to specify the `email` option as well.
|
||||
* @property {string} [email] Account email address. Required when using a .pem
|
||||
* or .p12 keyFilename.
|
||||
* @property {object} [credentials] Credentials object.
|
||||
* @property {string} [credentials.client_email]
|
||||
* @property {string} [credentials.private_key]
|
||||
* @property {boolean} [autoRetry=true] Automatically retry requests if the
|
||||
* response is related to rate limits or certain intermittent server errors.
|
||||
* We will exponentially backoff subsequent requests by default.
|
||||
* @property {number} [maxRetries=3] Maximum number of automatic retries
|
||||
* attempted before returning the error.
|
||||
* @property {Constructor} [promise] Custom promise module to use instead of
|
||||
* native Promises.
|
||||
*/
|
||||
|
||||
/*! Developer Documentation
|
||||
*
|
||||
* Invoke this method to create a new Storage object bound with pre-determined
|
||||
* configuration options. For each object that can be created (e.g., a bucket),
|
||||
* there is an equivalent static and instance method. While they are classes,
|
||||
* they can be instantiated without use of the `new` keyword.
|
||||
*/
|
||||
/**
|
||||
* <h4>ACLs</h4>
|
||||
* Cloud Storage uses access control lists (ACLs) to manage object and
|
||||
* bucket access. ACLs are the mechanism you use to share files with other users
|
||||
* and allow other users to access your buckets and files.
|
||||
*
|
||||
* To learn more about ACLs, read this overview on
|
||||
* [Access Control](https://cloud.google.com/storage/docs/access-control).
|
||||
*
|
||||
* @see [Cloud Storage overview]{@link https://cloud.google.com/storage/docs/overview}
|
||||
* @see [Access Control]{@link https://cloud.google.com/storage/docs/access-control}
|
||||
*
|
||||
* @class
|
||||
* @hideconstructor
|
||||
*
|
||||
* @example <caption>Create a client that uses Application Default Credentials (ADC)</caption>
|
||||
* const Storage = require('@google-cloud/storage');
|
||||
* const storage = new Storage();
|
||||
*
|
||||
* @example <caption>Create a client with explicit credentials</caption>
|
||||
* const Storage = require('@google-cloud/storage');
|
||||
* const storage = new Storage({
|
||||
* projectId: 'your-project-id',
|
||||
* keyFilename: '/path/to/keyfile.json'
|
||||
* });
|
||||
*
|
||||
* @param {ClientConfig} [options] Configuration options.
|
||||
*/
|
||||
function Storage(options) {
|
||||
if (!(this instanceof Storage)) {
|
||||
return new Storage(options);
|
||||
}
|
||||
|
||||
options = common.util.normalizeArguments(this, options);
|
||||
|
||||
const config = {
|
||||
baseUrl: 'https://www.googleapis.com/storage/v1',
|
||||
projectIdRequired: false,
|
||||
scopes: [
|
||||
'https://www.googleapis.com/auth/iam',
|
||||
'https://www.googleapis.com/auth/cloud-platform',
|
||||
'https://www.googleapis.com/auth/devstorage.full_control',
|
||||
],
|
||||
packageJson: require('../package.json'),
|
||||
};
|
||||
|
||||
common.Service.call(this, config, options);
|
||||
}
|
||||
|
||||
util.inherits(Storage, common.Service);
|
||||
|
||||
/**
|
||||
* Cloud Storage uses access control lists (ACLs) to manage object and
|
||||
* bucket access. ACLs are the mechanism you use to share objects with other
|
||||
* users and allow other users to access your buckets and objects.
|
||||
*
|
||||
* This object provides constants to refer to the three permission levels that
|
||||
* can be granted to an entity:
|
||||
*
|
||||
* - `gcs.acl.OWNER_ROLE` - ("OWNER")
|
||||
* - `gcs.acl.READER_ROLE` - ("READER")
|
||||
* - `gcs.acl.WRITER_ROLE` - ("WRITER")
|
||||
*
|
||||
* @see [About Access Control Lists]{@link https://cloud.google.com/storage/docs/access-control/lists}
|
||||
*
|
||||
* @name Storage.acl
|
||||
* @type {object}
|
||||
* @property {string} OWNER_ROLE
|
||||
* @property {string} READER_ROLE
|
||||
* @property {string} WRITER_ROLE
|
||||
*
|
||||
* @example
|
||||
* const storage = require('@google-cloud/storage')();
|
||||
* const albums = storage.bucket('albums');
|
||||
*
|
||||
* //-
|
||||
* // Make all of the files currently in a bucket publicly readable.
|
||||
* //-
|
||||
* const options = {
|
||||
* entity: 'allUsers',
|
||||
* role: storage.acl.READER_ROLE
|
||||
* };
|
||||
*
|
||||
* albums.acl.add(options, function(err, aclObject) {});
|
||||
*
|
||||
* //-
|
||||
* // Make any new objects added to a bucket publicly readable.
|
||||
* //-
|
||||
* albums.acl.default.add(options, function(err, aclObject) {});
|
||||
*
|
||||
* //-
|
||||
* // Grant a user ownership permissions to a bucket.
|
||||
* //-
|
||||
* albums.acl.add({
|
||||
* entity: 'user-useremail@example.com',
|
||||
* role: storage.acl.OWNER_ROLE
|
||||
* }, function(err, aclObject) {});
|
||||
*
|
||||
* //-
|
||||
* // If the callback is omitted, we'll return a Promise.
|
||||
* //-
|
||||
* albums.acl.add(options).then(function(data) {
|
||||
* const aclObject = data[0];
|
||||
* const apiResponse = data[1];
|
||||
* });
|
||||
*/
|
||||
Storage.acl = {
|
||||
OWNER_ROLE: 'OWNER',
|
||||
READER_ROLE: 'READER',
|
||||
WRITER_ROLE: 'WRITER',
|
||||
};
|
||||
|
||||
/**
|
||||
* Reference to {@link Storage.acl}.
|
||||
*
|
||||
* @name Storage#acl
|
||||
* @see Storage.acl
|
||||
*/
|
||||
Storage.prototype.acl = Storage.acl;
|
||||
|
||||
/**
|
||||
* Get a reference to a Cloud Storage bucket.
|
||||
*
|
||||
* @param {string} name Name of the bucket.
|
||||
* @param {object} [options] Configuration object.
|
||||
* @param {string} [options.kmsKeyName] A Cloud KMS key that will be used to
|
||||
* encrypt objects inserted into this bucket, if no encryption method is
|
||||
* specified.
|
||||
* @param {string} [options.userProject] User project to be billed for all
|
||||
* requests made from this Bucket object.
|
||||
* @returns {Bucket}
|
||||
* @see Bucket
|
||||
*
|
||||
* @example
|
||||
* const storage = require('@google-cloud/storage')();
|
||||
* const albums = storage.bucket('albums');
|
||||
* const photos = storage.bucket('photos');
|
||||
*/
|
||||
Storage.prototype.bucket = function(name, options) {
|
||||
if (!name) {
|
||||
throw new Error('A bucket name is needed to use Cloud Storage.');
|
||||
}
|
||||
|
||||
return new Bucket(this, name, options);
|
||||
};
|
||||
|
||||
/**
|
||||
* Reference a channel to receive notifications about changes to your bucket.
|
||||
*
|
||||
* @param {string} id The ID of the channel.
|
||||
* @param {string} resourceId The resource ID of the channel.
|
||||
* @returns {Channel}
|
||||
* @see Channel
|
||||
*
|
||||
* @example
|
||||
* const storage = require('@google-cloud/storage')();
|
||||
* const channel = storage.channel('id', 'resource-id');
|
||||
*/
|
||||
Storage.prototype.channel = function(id, resourceId) {
|
||||
return new Channel(this, id, resourceId);
|
||||
};
|
||||
|
||||
/**
|
||||
* Metadata to set for the bucket.
|
||||
*
|
||||
* @typedef {object} CreateBucketRequest
|
||||
* @property {boolean} [coldline=false] Specify the storage class as Coldline.
|
||||
* @property {boolean} [dra=false] Specify the storage class as Durable Reduced
|
||||
* Availability.
|
||||
* @property {boolean} [multiRegional=false] Specify the storage class as
|
||||
* Multi-Regional.
|
||||
* @property {boolean} [nearline=false] Specify the storage class as Nearline.
|
||||
* @property {boolean} [regional=false] Specify the storage class as Regional.
|
||||
* @property {boolean} [requesterPays=false] **Early Access Testers Only**
|
||||
* Force the use of the User Project metadata field to assign operational
|
||||
* costs when an operation is made on a Bucket and its objects.
|
||||
* @property {string} [userProject] The ID of the project which will be billed
|
||||
* for the request.
|
||||
*/
|
||||
/**
|
||||
* @typedef {array} CreateBucketResponse
|
||||
* @property {Bucket} 0 The new {@link Bucket}.
|
||||
* @property {object} 1 The full API response.
|
||||
*/
|
||||
/**
|
||||
* @callback CreateBucketCallback
|
||||
* @param {?Error} err Request error, if any.
|
||||
* @param {Bucket} bucket The new {@link Bucket}.
|
||||
* @param {object} apiResponse The full API response.
|
||||
*/
|
||||
/**
|
||||
* Create a bucket.
|
||||
*
|
||||
* Cloud Storage uses a flat namespace, so you can't create a bucket with
|
||||
* a name that is already in use. For more information, see
|
||||
* [Bucket Naming Guidelines](https://cloud.google.com/storage/docs/bucketnaming.html#requirements).
|
||||
*
|
||||
* @see [Buckets: insert API Documentation]{@link https://cloud.google.com/storage/docs/json_api/v1/buckets/insert}
|
||||
* @see [Storage Classes]{@link https://cloud.google.com/storage/docs/storage-classes}
|
||||
*
|
||||
* @param {string} name Name of the bucket to create.
|
||||
* @param {CreateBucketRequest} [metadata] Metadata to set for the bucket.
|
||||
* @param {CreateBucketCallback} [callback] Callback function.
|
||||
* @returns {Promise<CreateBucketResponse>}
|
||||
* @throws {Error} If a name is not provided.
|
||||
* @see Bucket#create
|
||||
*
|
||||
* @example
|
||||
* const storage = require('@google-cloud/storage')();
|
||||
* const callback = function(err, bucket, apiResponse) {
|
||||
* // `bucket` is a Bucket object.
|
||||
* };
|
||||
*
|
||||
* storage.createBucket('new-bucket', callback);
|
||||
*
|
||||
* //-
|
||||
* // Create a bucket in a specific location and region. <em>See the <a
|
||||
* // href="https://cloud.google.com/storage/docs/json_api/v1/buckets/insert">
|
||||
* // Official JSON API docs</a> for complete details on the `location` option.
|
||||
* // </em>
|
||||
* //-
|
||||
* const metadata = {
|
||||
* location: 'US-CENTRAL1',
|
||||
* regional: true
|
||||
* };
|
||||
*
|
||||
* storage.createBucket('new-bucket', metadata, callback);
|
||||
*
|
||||
* //-
|
||||
* // Enable versioning on a new bucket.
|
||||
* //-
|
||||
* const metadata = {
|
||||
* versioning: {
|
||||
* enabled: true
|
||||
* }
|
||||
* };
|
||||
*
|
||||
* storage.createBucket('new-bucket', metadata, callback);
|
||||
*
|
||||
* //-
|
||||
* // If the callback is omitted, we'll return a Promise.
|
||||
* //-
|
||||
* storage.createBucket('new-bucket').then(function(data) {
|
||||
* const bucket = data[0];
|
||||
* const apiResponse = data[1];
|
||||
* });
|
||||
*
|
||||
* @example <caption>include:samples/buckets.js</caption>
|
||||
* region_tag:storage_create_bucket
|
||||
* Another example:
|
||||
*/
|
||||
Storage.prototype.createBucket = function(name, metadata, callback) {
|
||||
const self = this;
|
||||
|
||||
if (!name) {
|
||||
throw new Error('A name is required to create a bucket.');
|
||||
}
|
||||
|
||||
if (!callback) {
|
||||
callback = metadata;
|
||||
metadata = {};
|
||||
}
|
||||
|
||||
const body = extend({}, metadata, {
|
||||
name: name,
|
||||
});
|
||||
|
||||
const storageClasses = {
|
||||
coldline: 'COLDLINE',
|
||||
dra: 'DURABLE_REDUCED_AVAILABILITY',
|
||||
multiRegional: 'MULTI_REGIONAL',
|
||||
nearline: 'NEARLINE',
|
||||
regional: 'REGIONAL',
|
||||
};
|
||||
|
||||
Object.keys(storageClasses).forEach(function(storageClass) {
|
||||
if (body[storageClass]) {
|
||||
body.storageClass = storageClasses[storageClass];
|
||||
delete body[storageClass];
|
||||
}
|
||||
});
|
||||
|
||||
if (body.requesterPays) {
|
||||
body.billing = {
|
||||
requesterPays: body.requesterPays,
|
||||
};
|
||||
delete body.requesterPays;
|
||||
}
|
||||
|
||||
const query = {
|
||||
project: this.projectId,
|
||||
};
|
||||
|
||||
if (body.userProject) {
|
||||
query.userProject = body.userProject;
|
||||
delete body.userProject;
|
||||
}
|
||||
|
||||
this.request(
|
||||
{
|
||||
method: 'POST',
|
||||
uri: '/b',
|
||||
qs: query,
|
||||
json: body,
|
||||
},
|
||||
function(err, resp) {
|
||||
if (err) {
|
||||
callback(err, null, resp);
|
||||
return;
|
||||
}
|
||||
|
||||
const bucket = self.bucket(name);
|
||||
bucket.metadata = resp;
|
||||
|
||||
callback(null, bucket, resp);
|
||||
}
|
||||
);
|
||||
};
|
||||
|
||||
/**
|
||||
* Query object for listing buckets.
|
||||
*
|
||||
* @typedef {object} GetBucketsRequest
|
||||
* @property {boolean} [autoPaginate=true] Have pagination handled
|
||||
* automatically.
|
||||
* @property {number} [maxApiCalls] Maximum number of API calls to make.
|
||||
* @property {number} [maxResults] Maximum number of items plus prefixes to
|
||||
* return.
|
||||
* @property {string} [pageToken] A previously-returned page token
|
||||
* representing part of the larger set of results to view.
|
||||
* @property {string} [userProject] The ID of the project which will be billed
|
||||
* for the request.
|
||||
*/
|
||||
/**
|
||||
* @typedef {array} GetBucketsResponse
|
||||
* @property {Bucket[]} 0 Array of {@link Bucket} instances.
|
||||
*/
|
||||
/**
|
||||
* @callback GetBucketsCallback
|
||||
* @param {?Error} err Request error, if any.
|
||||
* @param {Bucket[]} buckets Array of {@link Bucket} instances.
|
||||
*/
|
||||
/**
|
||||
* Get Bucket objects for all of the buckets in your project.
|
||||
*
|
||||
* @see [Buckets: list API Documentation]{@link https://cloud.google.com/storage/docs/json_api/v1/buckets/list}
|
||||
*
|
||||
* @param {GetBucketsRequest} [query] Query object for listing buckets.
|
||||
* @param {GetBucketsCallback} [callback] Callback function.
|
||||
* @returns {Promise<GetBucketsResponse>}
|
||||
*
|
||||
* @example
|
||||
* const storage = require('@google-cloud/storage')();
|
||||
* storage.getBuckets(function(err, buckets) {
|
||||
* if (!err) {
|
||||
* // buckets is an array of Bucket objects.
|
||||
* }
|
||||
* });
|
||||
*
|
||||
* //-
|
||||
* // To control how many API requests are made and page through the results
|
||||
* // manually, set `autoPaginate` to `false`.
|
||||
* //-
|
||||
* const callback = function(err, buckets, nextQuery, apiResponse) {
|
||||
* if (nextQuery) {
|
||||
* // More results exist.
|
||||
* storage.getBuckets(nextQuery, callback);
|
||||
* }
|
||||
*
|
||||
* // The `metadata` property is populated for you with the metadata at the
|
||||
* // time of fetching.
|
||||
* buckets[0].metadata;
|
||||
*
|
||||
* // However, in cases where you are concerned the metadata could have
|
||||
* // changed, use the `getMetadata` method.
|
||||
* buckets[0].getMetadata(function(err, metadata, apiResponse) {});
|
||||
* };
|
||||
*
|
||||
* storage.getBuckets({
|
||||
* autoPaginate: false
|
||||
* }, callback);
|
||||
*
|
||||
* //-
|
||||
* // If the callback is omitted, we'll return a Promise.
|
||||
* //-
|
||||
* storage.getBuckets().then(function(data) {
|
||||
* const buckets = data[0];
|
||||
* });
|
||||
*
|
||||
* @example <caption>include:samples/buckets.js</caption>
|
||||
* region_tag:storage_list_buckets
|
||||
* Another example:
|
||||
*/
|
||||
Storage.prototype.getBuckets = function(query, callback) {
|
||||
const self = this;
|
||||
|
||||
if (!callback) {
|
||||
callback = query;
|
||||
query = {};
|
||||
}
|
||||
|
||||
query.project = query.project || this.projectId;
|
||||
|
||||
this.request(
|
||||
{
|
||||
uri: '/b',
|
||||
qs: query,
|
||||
},
|
||||
function(err, resp) {
|
||||
if (err) {
|
||||
callback(err, null, null, resp);
|
||||
return;
|
||||
}
|
||||
|
||||
const buckets = arrify(resp.items).map(function(bucket) {
|
||||
const bucketInstance = self.bucket(bucket.id);
|
||||
bucketInstance.metadata = bucket;
|
||||
return bucketInstance;
|
||||
});
|
||||
|
||||
let nextQuery = null;
|
||||
if (resp.nextPageToken) {
|
||||
nextQuery = extend({}, query, {pageToken: resp.nextPageToken});
|
||||
}
|
||||
|
||||
callback(null, buckets, nextQuery, resp);
|
||||
}
|
||||
);
|
||||
};
|
||||
|
||||
/**
|
||||
* Get {@link Bucket} objects for all of the buckets in your project as
|
||||
* a readable object stream.
|
||||
*
|
||||
* @method Storage#getBucketsStream
|
||||
* @param {GetBucketsRequest} [query] Query object for listing buckets.
|
||||
* @returns {ReadableStream} A readable stream that emits {@link Bucket} instances.
|
||||
*
|
||||
* @example
|
||||
* storage.getBucketsStream()
|
||||
* .on('error', console.error)
|
||||
* .on('data', function(bucket) {
|
||||
* // bucket is a Bucket object.
|
||||
* })
|
||||
* .on('end', function() {
|
||||
* // All buckets retrieved.
|
||||
* });
|
||||
*
|
||||
* //-
|
||||
* // If you anticipate many results, you can end a stream early to prevent
|
||||
* // unnecessary processing and API requests.
|
||||
* //-
|
||||
* storage.getBucketsStream()
|
||||
* .on('data', function(bucket) {
|
||||
* this.end();
|
||||
* });
|
||||
*/
|
||||
Storage.prototype.getBucketsStream = common.paginator.streamify('getBuckets');
|
||||
|
||||
/*! Developer Documentation
|
||||
*
|
||||
* These methods can be auto-paginated.
|
||||
*/
|
||||
common.paginator.extend(Storage, 'getBuckets');
|
||||
|
||||
/*! Developer Documentation
|
||||
*
|
||||
* All async methods (except for streams) will return a Promise in the event
|
||||
* that a callback is omitted.
|
||||
*/
|
||||
common.util.promisifyAll(Storage, {
|
||||
exclude: ['bucket', 'channel'],
|
||||
});
|
||||
|
||||
/**
|
||||
* {@link Bucket} class.
|
||||
*
|
||||
* @name Storage.Bucket
|
||||
* @see Bucket
|
||||
* @type {Constructor}
|
||||
*/
|
||||
Storage.Bucket = Bucket;
|
||||
|
||||
/**
|
||||
* {@link Channel} class.
|
||||
*
|
||||
* @name Storage.Channel
|
||||
* @see Channel
|
||||
* @type {Constructor}
|
||||
*/
|
||||
Storage.Channel = Channel;
|
||||
|
||||
/**
|
||||
* {@link File} class.
|
||||
*
|
||||
* @name Storage.File
|
||||
* @see File
|
||||
* @type {Constructor}
|
||||
*/
|
||||
Storage.File = File;
|
||||
|
||||
/**
|
||||
* The default export of the `@google-cloud/storage` package is the
|
||||
* {@link Storage} class, which also serves as a factory function which produces
|
||||
* {@link Storage} instances.
|
||||
*
|
||||
* See {@link Storage} and {@link ClientConfig} for client methods and
|
||||
* configuration options.
|
||||
*
|
||||
* @module {Storage} @google-cloud/storage
|
||||
* @alias nodejs-storage
|
||||
*
|
||||
* @example <caption>Install the client library with <a href="https://www.npmjs.com/">npm</a>:</caption>
|
||||
* npm install --save @google-cloud/storage
|
||||
*
|
||||
* @example <caption>Import the client library</caption>
|
||||
* const Storage = require('@google-cloud/storage');
|
||||
*
|
||||
* @example <caption>Create a client that uses <a href="https://cloud.google.com/docs/authentication/production#providing_credentials_to_your_application">Application Default Credentials (ADC)</a>:</caption>
|
||||
* const storage = new Storage();
|
||||
*
|
||||
* @example <caption>Create a client with <a href="https://cloud.google.com/docs/authentication/production#obtaining_and_providing_service_account_credentials_manually">explicit credentials</a>:</caption>
|
||||
* const storage = new Storage({
|
||||
* projectId: 'your-project-id',
|
||||
* keyFilename: '/path/to/keyfile.json'
|
||||
* });
|
||||
*
|
||||
* @example <caption>include:samples/quickstart.js</caption>
|
||||
* region_tag:storage_quickstart
|
||||
* Full quickstart example:
|
||||
*/
|
||||
module.exports = Storage;
|
350
express-server/node_modules/@google-cloud/storage/src/notification.js
generated
vendored
Normal file
350
express-server/node_modules/@google-cloud/storage/src/notification.js
generated
vendored
Normal file
@ -0,0 +1,350 @@
|
||||
/*!
|
||||
* Copyright 2017 Google Inc. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
const common = require('@google-cloud/common');
|
||||
const is = require('is');
|
||||
const util = require('util');
|
||||
|
||||
/**
|
||||
* A Notification object is created from your {@link Bucket} object using
|
||||
* {@link Bucket#notification}. Use it to interact with Cloud Pub/Sub
|
||||
* notifications.
|
||||
*
|
||||
* @see [Cloud Pub/Sub Notifications for Google Cloud Storage]{@link https://cloud.google.com/storage/docs/pubsub-notifications}
|
||||
*
|
||||
* @class
|
||||
* @hideconstructor
|
||||
*
|
||||
* @param {Bucket} bucket The bucket instance this notification is attached to.
|
||||
* @param {string} id The ID of the notification.
|
||||
*
|
||||
* @example
|
||||
* const storage = require('@google-cloud/storage')();
|
||||
* const myBucket = storage.bucket('my-bucket');
|
||||
*
|
||||
* const notification = myBucket.notification('1');
|
||||
*/
|
||||
function Notification(bucket, id) {
|
||||
const methods = {
|
||||
/**
|
||||
* Creates a notification subscription for the bucket.
|
||||
*
|
||||
* @see [Notifications: insert]{@link https://cloud.google.com/storage/docs/json_api/v1/notifications/insert}
|
||||
*
|
||||
* @param {Topic|string} topic The Cloud PubSub topic to which this
|
||||
* subscription publishes. If the project ID is omitted, the current
|
||||
* project ID will be used.
|
||||
*
|
||||
* Acceptable formats are:
|
||||
* - `projects/grape-spaceship-123/topics/my-topic`
|
||||
*
|
||||
* - `my-topic`
|
||||
* @param {CreateNotificationRequest} [options] Metadata to set for
|
||||
* the notification.
|
||||
* @param {CreateNotificationCallback} [callback] Callback function.
|
||||
* @returns {Promise<CreateNotificationResponse>}
|
||||
* @throws {Error} If a valid topic is not provided.
|
||||
*
|
||||
* @example
|
||||
* const storage = require('@google-cloud/storage')();
|
||||
* const myBucket = storage.bucket('my-bucket');
|
||||
* const notification = myBucket.notification('1');
|
||||
*
|
||||
* notification.create(function(err, notification, apiResponse) {
|
||||
* if (!err) {
|
||||
* // The notification was created successfully.
|
||||
* }
|
||||
* });
|
||||
*
|
||||
* //-
|
||||
* // If the callback is omitted, we'll return a Promise.
|
||||
* //-
|
||||
* notification.create().then(function(data) {
|
||||
* const notification = data[0];
|
||||
* const apiResponse = data[1];
|
||||
* });
|
||||
*/
|
||||
create: true,
|
||||
|
||||
/**
|
||||
* @typedef {array} NotificationExistsResponse
|
||||
* @property {boolean} 0 Whether the notification exists or not.
|
||||
*/
|
||||
/**
|
||||
* @callback NotificationExistsCallback
|
||||
* @param {?Error} err Request error, if any.
|
||||
* @param {boolean} exists Whether the notification exists or not.
|
||||
*/
|
||||
/**
|
||||
* Check if the notification exists.
|
||||
*
|
||||
* @param {NotificationExistsCallback} [callback] Callback function.
|
||||
* @returns {Promise<NotificationExistsResponse>}
|
||||
*
|
||||
* @example
|
||||
* const storage = require('@google-cloud/storage')();
|
||||
* const myBucket = storage.bucket('my-bucket');
|
||||
* const notification = myBucket.notification('1');
|
||||
*
|
||||
* notification.exists(function(err, exists) {});
|
||||
*
|
||||
* //-
|
||||
* // If the callback is omitted, we'll return a Promise.
|
||||
* //-
|
||||
* notification.exists().then(function(data) {
|
||||
* const exists = data[0];
|
||||
* });
|
||||
*/
|
||||
exists: true,
|
||||
};
|
||||
|
||||
common.ServiceObject.call(this, {
|
||||
parent: bucket,
|
||||
baseUrl: '/notificationConfigs',
|
||||
id: id.toString(),
|
||||
createMethod: bucket.createNotification.bind(bucket),
|
||||
methods: methods,
|
||||
});
|
||||
}
|
||||
|
||||
util.inherits(Notification, common.ServiceObject);
|
||||
|
||||
/**
|
||||
* @typedef {array} DeleteNotificationResponse
|
||||
* @property {object} 0 The full API response.
|
||||
*/
|
||||
/**
|
||||
* @callback DeleteNotificationCallback
|
||||
* @param {?Error} err Request error, if any.
|
||||
* @param {object} apiResponse The full API response.
|
||||
*/
|
||||
/**
|
||||
* Permanently deletes a notification subscription.
|
||||
*
|
||||
* @see [Notifications: delete API Documentation]{@link https://cloud.google.com/storage/docs/json_api/v1/notifications/delete}
|
||||
*
|
||||
* @param {object} [options] Configuration options.
|
||||
* @param {string} [options.userProject] The ID of the project which will be
|
||||
* billed for the request.
|
||||
* @param {DeleteNotificationCallback} [callback] Callback function.
|
||||
* @returns {Promise<DeleteNotificationResponse>}
|
||||
*
|
||||
* @example
|
||||
* const storage = require('@google-cloud/storage')();
|
||||
* const myBucket = storage.bucket('my-bucket');
|
||||
* const notification = myBucket.notification('1');
|
||||
*
|
||||
* notification.delete(function(err, apiResponse) {});
|
||||
*
|
||||
* //-
|
||||
* // If the callback is omitted, we'll return a Promise.
|
||||
* //-
|
||||
* notification.delete().then(function(data) {
|
||||
* const apiResponse = data[0];
|
||||
* });
|
||||
*
|
||||
* @example <caption>include:samples/notifications.js</caption>
|
||||
* region_tag:storage_delete_notification
|
||||
* Another example:
|
||||
*/
|
||||
Notification.prototype.delete = function(options, callback) {
|
||||
if (is.fn(options)) {
|
||||
callback = options;
|
||||
options = {};
|
||||
}
|
||||
|
||||
this.request(
|
||||
{
|
||||
method: 'DELETE',
|
||||
uri: '',
|
||||
qs: options,
|
||||
},
|
||||
callback || common.util.noop
|
||||
);
|
||||
};
|
||||
|
||||
/**
|
||||
* @typedef {array} GetNotificationResponse
|
||||
* @property {Notification} 0 The {@link Notification}
|
||||
* @property {object} 1 The full API response.
|
||||
*/
|
||||
/**
|
||||
* @callback GetNotificationCallback
|
||||
* @param {?Error} err Request error, if any.
|
||||
* @param {Notification} notification The {@link Notification}.
|
||||
* @param {object} apiResponse The full API response.
|
||||
*/
|
||||
/**
|
||||
* Get a notification and its metadata if it exists.
|
||||
*
|
||||
* @see [Notifications: get API Documentation]{@link https://cloud.google.com/storage/docs/json_api/v1/notifications/get}
|
||||
*
|
||||
* @param {object} [options] Configuration options.
|
||||
* See {@link Bucket#createNotification} for create options.
|
||||
* @param {boolean} [options.autoCreate] Automatically create the object if
|
||||
* it does not exist. Default: `false`.
|
||||
* @param {string} [options.userProject] The ID of the project which will be
|
||||
* billed for the request.
|
||||
* @param {GetNotificationCallback} [callback] Callback function.
|
||||
* @return {Promise<GetNotificationCallback>}
|
||||
*
|
||||
* @example
|
||||
* const storage = require('@google-cloud/storage')();
|
||||
* const myBucket = storage.bucket('my-bucket');
|
||||
* const notification = myBucket.notification('1');
|
||||
*
|
||||
* notification.get(function(err, notification, apiResponse) {
|
||||
* // `notification.metadata` has been populated.
|
||||
* });
|
||||
*
|
||||
* //-
|
||||
* // If the callback is omitted, we'll return a Promise.
|
||||
* //-
|
||||
* notification.get().then(function(data) {
|
||||
* const notification = data[0];
|
||||
* const apiResponse = data[1];
|
||||
* });
|
||||
*/
|
||||
Notification.prototype.get = function(options, callback) {
|
||||
const self = this;
|
||||
|
||||
if (is.fn(options)) {
|
||||
callback = options;
|
||||
options = {};
|
||||
}
|
||||
|
||||
const autoCreate = options.autoCreate;
|
||||
delete options.autoCreate;
|
||||
|
||||
function onCreate(err, notification, apiResponse) {
|
||||
if (err) {
|
||||
if (err.code === 409) {
|
||||
self.get(options, callback);
|
||||
return;
|
||||
}
|
||||
|
||||
callback(err, null, apiResponse);
|
||||
return;
|
||||
}
|
||||
|
||||
callback(null, notification, apiResponse);
|
||||
}
|
||||
|
||||
this.getMetadata(options, function(err, metadata) {
|
||||
if (err) {
|
||||
if (err.code === 404 && autoCreate) {
|
||||
const args = [];
|
||||
|
||||
if (!is.empty(options)) {
|
||||
args.push(options);
|
||||
}
|
||||
|
||||
args.push(onCreate);
|
||||
|
||||
self.create.apply(self, args);
|
||||
return;
|
||||
}
|
||||
|
||||
callback(err, null, metadata);
|
||||
return;
|
||||
}
|
||||
|
||||
callback(null, self, metadata);
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* @typedef {array} GetNotificationMetadataResponse
|
||||
* @property {object} 0 The notification metadata.
|
||||
* @property {object} 1 The full API response.
|
||||
*/
|
||||
/**
|
||||
* @callback GetNotificationMetadataCallback
|
||||
* @param {?Error} err Request error, if any.
|
||||
* @param {object} files The notification metadata.
|
||||
* @param {object} apiResponse The full API response.
|
||||
*/
|
||||
/**
|
||||
* Get the notification's metadata.
|
||||
*
|
||||
* @see [Notifications: get API Documentation]{@link https://cloud.google.com/storage/docs/json_api/v1/notifications/get}
|
||||
*
|
||||
* @param {object} [options] Configuration options.
|
||||
* @param {string} [options.userProject] The ID of the project which will be
|
||||
* billed for the request.
|
||||
* @param {GetNotificationMetadataCallback} [callback] Callback function.
|
||||
* @returns {Promise<GetNotificationMetadataResponse>}
|
||||
*
|
||||
* @example
|
||||
* const storage = require('@google-cloud/storage')();
|
||||
* const myBucket = storage.bucket('my-bucket');
|
||||
* const notification = myBucket.notification('1');
|
||||
*
|
||||
* notification.getMetadata(function(err, metadata, apiResponse) {});
|
||||
*
|
||||
* //-
|
||||
* // If the callback is omitted, we'll return a Promise.
|
||||
* //-
|
||||
* notification.getMetadata().then(function(data) {
|
||||
* const metadata = data[0];
|
||||
* const apiResponse = data[1];
|
||||
* });
|
||||
*
|
||||
* @example <caption>include:samples/notifications.js</caption>
|
||||
* region_tag:storage_notifications_get_metadata
|
||||
* Another example:
|
||||
*/
|
||||
Notification.prototype.getMetadata = function(options, callback) {
|
||||
const self = this;
|
||||
|
||||
if (is.fn(options)) {
|
||||
callback = options;
|
||||
options = {};
|
||||
}
|
||||
|
||||
this.request(
|
||||
{
|
||||
uri: '',
|
||||
qs: options,
|
||||
},
|
||||
function(err, resp) {
|
||||
if (err) {
|
||||
callback(err, null, resp);
|
||||
return;
|
||||
}
|
||||
|
||||
self.metadata = resp;
|
||||
|
||||
callback(null, self.metadata, resp);
|
||||
}
|
||||
);
|
||||
};
|
||||
|
||||
/*! Developer Documentation
|
||||
*
|
||||
* All async methods (except for streams) will return a Promise in the event
|
||||
* that a callback is omitted.
|
||||
*/
|
||||
common.util.promisifyAll(Notification);
|
||||
|
||||
/**
|
||||
* Reference to the {@link Notification} class.
|
||||
* @name module:@google-cloud/storage.Notification
|
||||
* @see Notification
|
||||
*/
|
||||
module.exports = Notification;
|
Reference in New Issue
Block a user