Name: google-cloud-node
Owner: Google Cloud Platform
Description: Google Cloud Client Library for Node.js
Created: 2014-05-06 06:18:00.0
Updated: 2018-01-17 15:48:34.0
Pushed: 2018-01-18 15:21:45.0
Homepage: https://googlecloudplatform.github.io/google-cloud-node/
Size: 23681
Language: JavaScript
GitHub Committers
User | Most Recent Commit | # Commits |
---|
Other Committers
User | Most Recent Commit | # Commits |
---|
Node.js idiomatic client for Google Cloud Platform services.
This client supports the following Google Cloud Platform services at a General Availability (GA) quality level:
This client supports the following Google Cloud Platform services at a Beta quality level:
This client supports the following Google Cloud Platform services at an Alpha quality level:
The following client libraries are deprecated due to the underlying API also being deprecated:
If you need support for other Google APIs, check out the Google Node.js API Client library.
We recommend installing the individual packages that you need, which are provided under the @google-cloud
namespace. For example:
m install --save @google-cloud/datastore
m install --save @google-cloud/storage
config = {
ojectId: 'grape-spaceship-123',
yFilename: '/path/to/keyfile.json'
datastore = require('@google-cloud/datastore')(config);
storage = require('@google-cloud/storage')(config);
We also provide a meta-package, google-cloud
, which provides all of the individual APIs. However, in order to keep file size and memory use low, the use of this package is not recommended.
If you want the kitchen sink, however, get it with:
m install --save google-cloud
gcloud = require('google-cloud')({
ojectId: 'grape-spaceship-123',
yFilename: '/path/to/keyfile.json'
datastore = gcloud.datastore();
storage = gcloud.storage();
With google-cloud
it's incredibly easy to get authenticated and start using Google's APIs. You can set your credentials on a global basis as well as on a per-API basis. See each individual API section below to see how you can auth on a per-API-basis. This is useful if you want to use different accounts for different Cloud services.
If you are running this client on Google Cloud Platform, we handle authentication for you with no configuration. You just need to make sure that when you set up the GCE instance, you add the correct scopes for the APIs you want to access.
storage = require('@google-cloud/storage')();
f you're using the google-cloud meta-package:
gcloud = require('google-cloud');
storage = gcloud.storage();
..you're good to go! See the next section to get started using the APIs.
If you are not running this client on Google Cloud Platform, you need a Google Developers service account. To create a service account:
uthenticating on a global basis.
projectId = process.env.GCLOUD_PROJECT; // E.g. 'grape-spaceship-123'
gcloud = require('google-cloud')({
ojectId: projectId,
The path to your key file:
yFilename: '/path/to/keyfile.json'
Or the contents of the key file:
edentials: require('./path/to/keyfile.json')
For any APIs that accept an API key:
y: '...'
..you're good to go! See the next section to get started using the APIs.
You can also set auth on a per-API-instance basis. The examples below show you how.
Follow the activation instructions to use the Cloud Datastore API with your project.
m install --save @google-cloud/datastore
s
datastore = require('@google-cloud/datastore');
See Authentication.
datastoreClient = datastore({
ojectId: 'grape-spaceship-123',
yFilename: '/path/to/keyfile.json'
key = datastoreClient.key(['Product', 'Computer']);
storeClient.get(key, function(err, entity) {
nsole.log(err || entity);
ave data to Datastore.
blogPostData = {
tle: 'How to make the perfect homemade pasta',
thor: 'Andrew Chilton',
Draft: true
blogPostKey = datastoreClient.key('BlogPost');
storeClient.save({
y: blogPostKey,
ta: blogPostData
unction(err) {
`blogPostKey` has been updated with an ID so you can do more operations
with it, such as an update.
ogPostData.isDraft = false;
tastoreClient.save({
key: blogPostKey,
data: blogPostData
function(err) {
if (!err) {
// The blog post is now published!
}
;
m install --save @google-cloud/language
s
language = require('@google-cloud/language');
See Authentication.
languageClient = language({
ojectId: 'grape-spaceship-123',
yFilename: '/path/to/keyfile.json'
content = 'Hello, world!';
type = language.v1.types.Document.Type.PLAIN_TEXT;
document = {
content : content,
type : type
uageClient.analyzeSentiment({document: document}).then(function(responses) {
var response = responses[0];
// doThingsWith(response)
ch(function(err) {
console.error(err);
m install --save @google-cloud/storage
s
storage = require('@google-cloud/storage');
See Authentication.
fs = require('fs');
gcs = storage({
ojectId: 'grape-spaceship-123',
yFilename: '/path/to/keyfile.json'
reate a new bucket.
createBucket('my-new-bucket', function(err, bucket) {
(!err) {
// "my-new-bucket" was successfully created.
eference an existing bucket.
bucket = gcs.bucket('my-existing-bucket');
pload a local file to a new file to be created in your bucket.
et.upload('/photos/zoo/zebra.jpg', function(err, file) {
(!err) {
// "zebra.jpg" is now in your bucket.
ownload a file from your bucket.
et.file('giraffe.jpg').download({
stination: '/photos/zoo/giraffe.jpg'
unction(err) {});
treams are also supported for reading and writing files.
remoteReadStream = bucket.file('giraffe.jpg').createReadStream();
localWriteStream = fs.createWriteStream('/photos/zoo/giraffe.jpg');
teReadStream.pipe(localWriteStream);
localReadStream = fs.createReadStream('/photos/zoo/zebra.jpg');
remoteWriteStream = bucket.file('zebra.jpg').createWriteStream();
lReadStream.pipe(remoteWriteStream);
m install --save @google-cloud/translate
s
translate = require('@google-cloud/translate');
See Authentication.
translateClient = translate({
ojectId: 'grape-spaceship-123',
yFilename: '/path/to/keyfile.json'
ranslate a string of text.
slateClient.translate('Hello', 'es', function(err, translation) {
(!err) {
// translation = 'Hola'
etect a language from a string of text.
slateClient.detect('Hello', function(err, results) {
(!err) {
// results = {
// language: 'en',
// confidence: 1,
// input: 'Hello'
// }
et a list of supported languages.
slateClient.getLanguages(function(err, languages) {
(!err) {
// languages = [
// 'af',
// 'ar',
// 'az',
// ...
// ]
m install --save @google-cloud/logging
s
logging = require('@google-cloud/logging');
See Authentication.
loggingClient = logging({
ojectId: 'grape-spaceship-123',
yFilename: '/path/to/keyfile.json'
reate a sink using a Bucket as a destination.
gcs = storage();
ingClient.createSink('my-new-sink', {
stination: gcs.bucket('my-sink')
unction(err, sink) {});
rite a critical entry to a log.
syslog = loggingClient.log('syslog');
metadata = {
source: {
type: 'gce_instance',
labels: {
zone: 'global',
instance_id: '3'
}
entry = syslog.entry(metadata, {
legate: process.env.user
og.critical(entry, function(err) {});
et all entries in your project.
ingClient.getEntries(function(err, entries) {
(!err) {
// `entries` contains all of the entries from the logs in your project.
m install --save @google-cloud/firestore
s
t Firestore = require('@google-cloud/firestore');
See Authentication.
t firestore = new Firestore({
ojectId: 'YOUR_PROJECT_ID',
yFilename: '/path/to/keyfile.json',
t document = firestore.doc('posts/intro-to-firestore');
nter new data into the document.
ment.set({
tle: 'Welcome to Firestore',
dy: 'Hello World',
hen(() => {
Document created successfully.
pdate an existing document.
ment.update({
dy: 'My first Firestore app',
hen(() => {
Document updated successfully.
ead the document.
ment.get().then(doc => {
Document read successfully.
elete the document.
ment.delete().then(() => {
Document deleted successfully.
m install --save @google-cloud/pubsub
s
pubsub = require('@google-cloud/pubsub');
See Authentication.
pubsubClient = pubsub({
ojectId: 'grape-spaceship-123',
yFilename: '/path/to/keyfile.json'
eference a topic that has been previously created.
topic = pubsubClient.topic('my-topic');
ublish a message to the topic.
publisher = topic.publisher();
message = new Buffer('New message!');
isher.publish(message, function(err, messageId) {});
ubscribe to the topic.
c.createSubscription('subscription-name', function(err, subscription) {
Register listeners to start pulling for messages.
nction onError(err) {}
nction onMessage(message) {}
bscription.on('error', onError);
bscription.on('message', onMessage);
Remove listeners to stop pulling for messages.
bscription.removeListener('message', onMessage);
bscription.removeListener('error', onError);
m install --save @google-cloud/spanner
s
spanner = require('@google-cloud/spanner');
See Authentication.
spannerClient = spanner({
ojectId: 'grape-spaceship-123',
yFilename: '/path/to/keyfile.json'
instance = spannerClient.instance('my-instance');
database = instance.database('my-database');
reate a table.
schema = `
EATE TABLE Singers (
SingerId INT64 NOT NULL,
FirstName STRING(1024),
LastName STRING(1024),
SingerInfo BYTES(MAX),
PRIMARY KEY(SingerId)
base.createTable(schema, function(err, table, operation) {
(err) {
// Error handling omitted.
eration
.on('error', function(err) {})
.on('complete', function() {
// Table created successfully.
});
nsert data into the table.
table = database.table('Singers');
e.insert({
ngerId: 10,
rstName: 'Eddie',
stName: 'Wilson'
unction(err) {
(!err) {
// Row inserted successfully.
un a query as a readable object stream.
base.runStream('SELECT * FROM Singers')
n('error', function(err) {})
n('data', function(row) {
// row.toJSON() = {
// SingerId: 10,
// FirstName: 'Eddie',
// LastName: 'Wilson'
// }
n('end', function() {
// All results retrieved.
;
m install --save @google-cloud/speech
s
speech = require('@google-cloud/speech');
See Authentication.
speechClient = speech({
ojectId: 'my-project',
yFilename: '/path/to/keyfile.json'
languageCode = 'en-US';
sampleRateHertz = 44100;
encoding = speech.v1.types.RecognitionConfig.AudioEncoding.FLAC;
config = {
languageCode : languageCode,
sampleRateHertz : sampleRateHertz,
encoding : encoding
uri = 'gs://gapic-toolkit/hello.flac';
audio = {
uri : uri
request = {
config: config,
audio: audio
chClient.recognize(request).then(function(responses) {
var response = responses[0];
// doThingsWith(response)
ch(function(err) {
console.error(err);
m install --save @google-cloud/vision
s
vision = require('@google-cloud/vision');
See Authentication.
visionClient = vision({
ojectId: 'grape-spaceship-123',
yFilename: '/path/to/keyfile.json'
gcsImageUri = 'gs://gapic-toolkit/President_Barack_Obama.jpg';
source = {
gcsImageUri : gcsImageUri
image = {
source : source
type = vision.v1.types.Feature.Type.FACE_DETECTION;
featuresElement = {
type : type
features = [featuresElement];
requestsElement = {
image : image,
features : features
requests = [requestsElement];
onClient.batchAnnotateImages({requests: requests}).then(function(responses) {
var response = responses[0];
// doThingsWith(response)
ch(function(err) {
console.error(err);
m install --save @google-cloud/bigquery
s
bigquery = require('@google-cloud/bigquery');
See Authentication.
bigqueryClient = bigquery({
ojectId: 'grape-spaceship-123',
yFilename: '/path/to/keyfile.json'
ccess an existing dataset and table.
schoolsDataset = bigqueryClient.dataset('schools');
schoolsTable = schoolsDataset.table('schoolsData');
mport data into a table.
olsTable.import('/local/file.json', function(err, job) {});
et results from a query job.
job = bigqueryClient.job('job-id');
se a callback.
getQueryResults(function(err, rows) {});
r get the same results as a readable stream.
getQueryResults().on('data', function(row) {});
It does not follow the conventions you're familiar with from other parts of our library. A handwritten layer is not yet available.
The example below shows you how to instantiate the generated client. For further documentation, please browse the Monitoring .proto files on GitHub.
m install --save @google-cloud/monitoring
s
monitoring = require('@google-cloud/monitoring');
See Authentication.
client = monitoring.metric({
// optional auth parameters.
terate over all elements.
formattedName = client.projectPath(projectId);
nt.listMonitoredResourceDescriptors({name: formattedName}).then(function(responses) {
var resources = responses[0];
for (var i = 0; i < resources.length; ++i) {
// doThingsWith(resources[i])
}
ch(function(err) {
console.error(err);
r obtain the paged response.
formattedName = client.projectPath(projectId);
options = {autoPaginate: false};
tion callback(responses) {
// The actual resources in a response.
var resources = responses[0];
// The next request if the response shows there's more responses.
var nextRequest = responses[1];
// The actual response object, if necessary.
// var rawResponse = responses[2];
for (var i = 0; i < resources.length; ++i) {
// doThingsWith(resources[i]);
}
if (nextRequest) {
// Fetch the next page.
return client.listMonitoredResourceDescriptors(nextRequest, options).then(callback);
}
nt.listMonitoredResourceDescriptors({name: formattedName}, options)
.then(callback)
.catch(function(err) {
console.error(err);
});
You may need to create an instance to use the Cloud Bigtable API with your project.
m install --save @google-cloud/bigtable
s
bigtable = require('@google-cloud/bigtable');
See Authentication.
bigtableClient = bigtable({
ojectId: 'grape-spaceship-123',
yFilename: '/path/to/keyfile.json'
instance = bigtableClient.instance('my-instance');
table = instance.table('prezzy');
e.getRows(function(err, rows) {});
pdate a row in your table.
row = table.row('alincoln');
save('follows:gwashington', 1, function(err) {
(err) {
// Error handling omitted.
w.get('follows:gwashington', function(err, data) {
if (err) {
// Error handling omitted.
}
// data = {
// follows: {
// gwashington: [
// {
// value: 1
// }
// ]
// }
// }
;
m install --save @google-cloud/dns
s
dns = require('@google-cloud/dns');
See Authentication.
dnsClient = dns({
ojectId: 'grape-spaceship-123',
yFilename: '/path/to/keyfile.json'
reate a managed zone.
lient.createZone('my-new-zone', {
sName: 'my-domain.com.'
unction(err, zone) {});
eference an existing zone.
zone = dnsClient.zone('my-existing-zone');
reate an NS record.
nsRecord = zone.record('ns', {
l: 86400,
me: 'my-domain.com.',
ta: 'ns-cloud1.googledomains.com.'
.addRecords([nsRecord], function(err, change) {});
reate a zonefile from the records in your zone.
.export('/zonefile.zone', function(err) {});
m install --save @google-cloud/resource
s
resource = require('@google-cloud/resource');
See Authentication.
resourceClient = resource({
ojectId: 'grape-spaceship-123',
yFilename: '/path/to/keyfile.json'
et all of the projects you maintain.
urceClient.getProjects(function(err, projects) {
(!err) {
// `projects` contains all of your projects.
et the metadata from your project. (defaults to `grape-spaceship-123`)
project = resourceClient.project();
ect.getMetadata(function(err, metadata) {
`metadata` describes your project.
m install --save @google-cloud/compute
s
compute = require('@google-cloud/compute');
See Authentication.
gce = compute({
ojectId: 'grape-spaceship-123',
yFilename: '/path/to/keyfile.json'
reate a new VM using the latest OS image of your choice.
zone = gce.zone('us-central1-a');
name = 'ubuntu-http';
.createVM(name, { os: 'ubuntu' }, function(err, vm, operation) {
`operation` lets you check the status of long-running tasks.
eration
.on('error', function(err) {})
.on('running', function(metadata) {})
.on('complete', function(metadata) {
// Virtual machine created!
});
m install --save @google-cloud/prediction
s
prediction = require('@google-cloud/prediction');
See Authentication.
predictionClient = prediction({
ojectId: 'grape-spaceship-123',
yFilename: '/path/to/keyfile.json'
et all of the trained models in your project.
ictionClient.getModels(function(err, models) {
(!err) {
// `models` is an array of Model objects.
eference an existing trained model.
model = predictionClient.model('my-existing-model');
rain a model.
l.train('english', 'Hello from your friends at Google!', function(err) {});
uery a model.
l.query('Hello', function(err, results) {
(!err) {
// results.winner == 'english'
// results.scores == [
// {
// label: 'english',
// score: 1
// },
// {
// label: 'spanish',
// score: 0
// }
// ]
The source code for the Node.js Cloud Debugger Agent lives in a separate repo.
m install --save @google-cloud/debug-agent
s
ire('@google-cloud/debug-agent').start({ allowExpressions: true });
For more details on API usage, please see the Stackdriver Debug Agent Github Repository.
m install --save @google-cloud/error-reporting
The module provides automatic uncaught exception handling, manual error reporting, and integration with common frameworks like express and hapi.
errors = require('@google-cloud/error-reporting')();
See Authentication.
rs.report(new Error('Something broke!'));
For more details on API usage, please see the documentation.
The source code for the Node.js Cloud Trace Agent lives in a separate repo.
m install --save @google-cloud/trace-agent
s
trace = require('@google-cloud/trace-agent').start();
For more details on API usage, please see the Stackdriver Trace Agent Github Repository.
This library follows Semantic Versioning.
Please note it is currently under active development. Any release versioned 0.x.y
is subject to backwards-incompatible changes at any time.
GA: Libraries defined at the GA (general availability) quality level are stable. The code surface will not change in backwards-incompatible ways unless absolutely necessary (e.g. because of critical security issues) or with an extensive deprecation period. Issues and requests against GA libraries are addressed with the highest priority.
Please note that the auto-generated portions of the GA libraries (the ones in modules such as v1
or v2
) are considered to be of Beta quality, even if the libraries that wrap them are GA.
Beta: Libraries defined at the Beta quality level are expected to be mostly stable, while we work towards their release candidate. We will address issues and requests with a higher priority.
Alpha: Libraries defined at the Alpha quality level are still a work-in-progress and are more likely to get backwards-incompatible updates.
Contributions to this library are always welcome and highly encouraged.
See CONTRIBUTING for more information on how to get started.
Apache 2.0 - See LICENSE for more information.