// const chalk = require('chalk');
// const fs = require('fs');
// const path = require('path');
// const common = require(path.join(__dirname, '..', 'common'));
/**
* Get authenticated HTTP request headers that can be used to authorize a request.
*
* @example
* // Generates a signed JSON Web Token using a Google API Service Account.
* node ./bin/cli.js auth headers
*
* @return {string} authenticated HTTP request headers.
*/
exports.headers = async () => {
const {GoogleAuth} = require('google-auth-library');
const auth = new GoogleAuth({
scopes: 'https://www.googleapis.com/auth/cloud-platform',
});
// obtain an authenticated client
const client = await auth.getClient();
// Use the client to get authenticated request headers
const headers = await client.getRequestHeaders();
console.log(headers);
return headers;
};
/**
* Generates a signed JSON Web Token using a Google API Service Account.
*
* @see [google.auth.jwt.Credentials]
* (http://google-auth.readthedocs.io/en/latest/reference/google.auth.jwt.html#google.auth.jwt.Credentials)
* @see [Why and when to use API keys](https://cloud.google.com/endpoints/docs/openapi/when-why-api-key)
*
* @example
* // Generates a signed JSON Web Token using a Google API Service Account.
* node ./bin/cli.js auth create jwt <path>
*
* @param {string} sub - Subject to set for the JWT.
* @param {(string|process.env.GOOGLE_APPLICATION_CREDENTIALS)} path - File systems path for the Service Account
* JSON credentials file for authorization.
* @return {string} Signed JSON Web Token.
*/
exports.jwt = async (sub, path = process.env.GOOGLE_APPLICATION_CREDENTIALS) => {
const {JWT} = require('google-auth-library');
let client;
if (path) {
const keys = require(path);
client = new JWT({
email: keys.client_email,
key: keys.private_key,
scopes: ['https://www.googleapis.com/auth/cloud-platform'],
});
} else {
const {GoogleAuth} = require('google-auth-library');
const auth = new GoogleAuth();
const authClient = await auth.getClient();
client = new JWT({
email: authClient.email,
key: authClient.key,
scopes: ['https://www.googleapis.com/auth/cloud-platform'],
});
}
const url = `https://dns.googleapis.com/dns/v1/projects/${client.projectId}`;
await client.getRequestHeaders(url);
console.log(client.credentials.access_token);
return client.credentials.access_token;
};
/**
* Create Google Token Id to authorize Cloud Run and Identity-Aware Proxy (IAP) request.
*
* @see [google.auth.jwt.Credentials]
* (http://google-auth.readthedocs.io/en/latest/reference/google.auth.jwt.html#google.auth.jwt.Credentials)
* @see [Why and when to use API keys](https://cloud.google.com/endpoints/docs/openapi/when-why-api-key)
*
* @example
* // Generates a signed JSON Web Token using a Google API Service Account.
* node ./bin/cli.js auth createTokenId <url> <audience>
*
* @param {string} sub - Subject to set for the JWT.
* @param {string} path - File systems path for the Service Account JSON credentials file for authorization.
* @return {Promise<string>} Signed Token id for Cloud Run.
*/
exports.tokenId = async (sub, path) => {
const {OAuth2Client} = require('google-auth-library');
const http = require('http');
const url = require('url');
const open = require('open');
const destroyer = require('server-destroy');
// load OAuth2 Webserver Client ID credentials from JSON file.
const keys = require(path);
console.log(`BEGIN - create Token Id sub: ${sub} path: ${path}`);
/**
* Create a new OAuth2Client, and go through the OAuth2 content
* workflow. Return the full client to the callback.
* @param {object} keys - web oauth client_id, client_secret and redirect_uris.
* @return {Promise<unknown>} - OAuth2Client object.
*/
function getAuthenticatedClient(keys) {
return new Promise((resolve, reject) => {
console.log(`BEGIN - getAuthenticatedClient Promise`);
// create an oAuth client to authorize the API call. Secrets are kept in a `keys.json` file,
// which should be downloaded from the Google Developers Console.
console.log(`keys.web.client_id: ${keys.web.client_id}`);
const oAuth2Client = new OAuth2Client(keys.web.client_id, keys.web.client_secret, keys.web.redirect_uris[0]);
// Generate the url that will be used for the consent dialog.
const authorizeUrl = oAuth2Client.generateAuthUrl({
access_type: 'offline',
scope: 'https://www.googleapis.com/auth/userinfo.profile',
});
// Open an http server to accept the oauth callback. In this simple example, the
// only request to our webserver is to /oauth2callback?code=<code>
const server = http
.createServer(async (req, res) => {
try {
if (req.url.indexOf('/oauth2callback') > -1) {
// acquire the code from the querystring, and close the web server.
const qs = new url.URL(req.url, 'http://localhost:3000').searchParams;
const code = qs.get('code');
console.log(`Code is ${code}`);
res.end('Authentication successful! Please return to the console.');
server.destroy();
// Now that we have the code, use that to acquire tokens.
const r = await oAuth2Client.getToken(code);
// Make sure to set the credentials on the OAuth2 client.
oAuth2Client.setCredentials(r.tokens);
console.info('Tokens acquired.');
resolve(oAuth2Client);
}
} catch (e) {
reject(e);
}
})
.listen(3000, () => {
// open authorize url in browser to start the workflow
open(authorizeUrl, {wait: false}).then((cp) => cp.unref());
});
destroyer(server);
});
}
const oAuth2Client = await getAuthenticatedClient(keys);
const reqUrl = 'https://people.googleapis.com/v1/people/me?personFields=names';
await oAuth2Client.getRequestHeaders(reqUrl);
// After acquiring an access_token, you may want to check on the audience, expiration,
// or original scopes requested. You can do that with the `getTokenInfo` method.
const tokenInfo = await oAuth2Client.getTokenInfo(oAuth2Client.credentials.access_token);
console.log(tokenInfo);
return tokenInfo;
};
/**
* Create Google Cloud Platform PubSub Topic.
* @see https://googleapis.dev/nodejs/pubsub/latest/v1.PublisherClient.html#createTopic
* @see https://cloud.google.com/nodejs/docs/reference/pubsub/0.28.x/v1.PublisherClient#createTopic
*
* @example
* // create PubSub Topic named "commander-test" in the yeti-dev-edw Google Project
* node ./bin/cli.js topic create commander-test storage-notifications yeti-dev-edw 643664262938 publisher
*
* @param {string} name - The name of the topic. It must have the format "projects/{project}/topics/{topic}".
* {topic} must start with a letter, and contain only letters ([A-Za-z]), numbers ([0-9]), dashes (-), underscores (_),
* periods (.), tildes (~), plus (+) or percent signs (%). It must be between 3 and 255 characters in length,
* and it must not start with "goog".
* @param {(string|null)} [accountId=null] - Id (aka name) of a user managed service account to grant Topic permissions.
* @param {(string|null)} [projectId=process.env.GCP_PROJECT_ID] - String project id of the Topic GCP project.
* @param {(string|null)} [projectNum=process.env.GCP_PROJECT_NUM] - Numeric project id of the Topic GCP project.
* @param {(string|null)} [role=publisher] - Name of the PubSub Topic IAM Role to grant to the supplied user managed
* service account and the Google managed PubSub service account. This is used to grant the service accounts
* permissions required to forward undeliverable message to dead-letter subscriptions.
* @param {(Object|null)} [labels=null] - Object with string properties.
* @param {(Object|null)} [messageStoragePolicy={allowedPersistenceRegions: ['us-central1']}] - Policy constraining
* how messages published to the topic may be stored. It is determined when the topic is created based on the policy
* configured at the project level. It must not be set by the caller in the request to CreateTopic or to UpdateTopic.
* This field will be populated in the responses for GetTopic, CreateTopic, and UpdateTopic:
* if not present in the response, then no constraints are in effect.
* This object should have the same structure as MessageStoragePolicy
* @see https://cloud.google.com/nodejs/docs/reference/pubsub/0.28.x/google.pubsub.v1#.MessageStoragePolicy
* @param {(string[]|null)} [messageStoragePolicy.allowedPersistenceRegions=['us-central1']] - The list of GCP
* region IDs where messages that are published to the topic may be persisted in storage. Messages published by
* publishers running in non-allowed GCP regions (or running outside of GCP altogether) will be routed for storage in
* one of the allowed regions. An empty list indicates a misconfiguration at the project or organization level,
* which will result in all Publish operations failing.
* @param {(string|null)} [kmsKeyName=null] - The resource name of the Cloud KMS CryptoKey to be used to protect access
* to messages published on this topic. The expected format is projects/* /locations/* /keyRings/* /cryptoKeys/*.
* @return {Promise<void>}
*/
exports.topic = async (name, accountId = null, projectId = null, projectNum = null, role = null, labels = null, messageStoragePolicy = null, kmsKeyName = null) => {
const pubsub = require('@google-cloud/pubsub');
if (!projectId) {
projectId = process.env.GCP_PROJECT_ID;
}
// google-cloud/pubsub - PubSub : PubSub CLASS, a reliable, many-to-many, asynchronous messaging service.
const PubSub = new pubsub.PubSub();
// google-cloud/pubsub - v1.PublisherClient : Service applications use to manipulate topics, and send messages.
const client = new pubsub.v1.PublisherClient();
/**
* Grant the supplied user managed service account and the
* Google managed Cloud Pub/Sub service account permissions
* required to forward messages to dead-letter topics.
* @see https://cloud.google.com/nodejs/docs/reference/pubsub/0.28.x/v1.PublisherClient
*
* @return {Promise<google.iam.v1.Policy>}
*/
async function setPolicy() {
if (!projectNum) {
projectNum = process.env.GCP_PROJECT_NUM || this.getProjectNum();
}
if (!role) {
role = 'publisher';
}
const members = [`serviceAccount:service-${projectNum}@gcp-sa-pubsub.iam.gserviceaccount.com`];
if (accountId) {
members.push(`serviceAccount:${accountId}@${projectId}.iam.gserviceaccount.com`);
}
const formattedName = client.topicPath(projectId, name);
const request = {
resource: formattedName,
policy: {
bindings: [
{
role: `roles/pubsub.${role}`,
members: members,
},
],
},
};
await client.setIamPolicy(request);
}
const topic = PubSub.topic(name);
const exists = await topic.exists();
if (exists.toString() === 'true') {
console.log();
// console.log(chalk.yellow(` ! Topic ${chalk.magenta(name)} already exists.`));
console.log(` ! Topic ${name} already exists.`);
console.log();
} else {
if (!labels) {
labels = {
owner: 'adam-cox',
department: 'edw',
environment: 'dev',
'gitlab-group': 'nodejs',
'gitlab-project': 'storage-notifications',
'technology-lifecycle': 'adopt',
tier: 'tier1',
};
}
if (!messageStoragePolicy) {
messageStoragePolicy = {
allowedPersistenceRegions: ['us-central1'],
};
}
const formattedName = client.topicPath(projectId, name);
const request = {
name: formattedName,
labels: labels,
messageStoragePolicy: messageStoragePolicy,
kmsKeyName: kmsKeyName,
};
client
.createTopic(request)
.then(setPolicy)
.then(() => {
console.log();
// console.log(` ✔ Created new ${chalk.magenta(name)} PubSub Topic.`);
console.log(` ✔ Created new ${name} PubSub Topic.`);
console.log();
})
.catch((err) => {
console.error(err.stack);
console.trace();
process.exitCode = -1;
});
}
};