Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
// Imports the Google Cloud client library
const language = require('@google-cloud/language');
// Instantiates a client
const client = new language.LanguageServiceClient();
// The text to analyze
const text = 'Google Natural Language API is fantastic. It gives me great results and I love it.';
const document = {
content: text,
type: 'PLAIN_TEXT',
};
// Detects the sentiment of the text
client
.analyzeSentiment({document: document})
.then(results => {
const sentiment = results[0].documentSentiment;
console.log(`Text: ${text}`);
async function analyzeSyntaxOfText(text) {
// [START language_syntax_text]
// Imports the Google Cloud client library
const language = require('@google-cloud/language');
// Creates a client
const client = new language.LanguageServiceClient();
/**
* TODO(developer): Uncomment the following line to run this code.
*/
// const text = 'Your text to analyze, e.g. Hello, world!';
// Prepares a document, representing the provided text
const document = {
content: text,
type: 'PLAIN_TEXT',
};
// Detects syntax in the document
const [syntax] = await client.analyzeSyntax({document});
console.log('Tokens:');
'use strict';
const functions = require('firebase-functions');
const admin = require('firebase-admin');
// Follow instructions to set up admin credentials:
// https://firebase.google.com/docs/functions/local-emulator#set_up_admin_credentials_optional
admin.initializeApp({
credential: admin.credential.applicationDefault(),
// TODO: ADD YOUR DATABASE URL
databaseURL: undefined
});
const language = require('@google-cloud/language');
const client = new language.LanguageServiceClient();
const express = require('express');
const app = express();
// Express middleware that validates Firebase ID Tokens passed in the Authorization HTTP header.
// The Firebase ID token needs to be passed as a Bearer token in the Authorization HTTP header like this:
// `Authorization: Bearer `.
// when decoded successfully, the ID Token content will be added as `req.user`.
const authenticate = async (req, res, next) => {
if (!req.headers.authorization || !req.headers.authorization.startsWith('Bearer ')) {
res.status(403).send('Unauthorized');
return;
}
const idToken = req.headers.authorization.split('Bearer ')[1];
try {
const decodedIdToken = await admin.auth().verifyIdToken(idToken);
// TODO: Load the Natural Language ML API module
const Language = require('@google-cloud/language');
// END TODO
// TODO: Create a client object against the Language API
// using the Language.LanguageServiceClient function
// The LanguageServiceClient function accepts an options
// object which is used to specify which project should be
// billed for use of the API via the projectId property.
// The projectId is retrieved from the config module.
// This module retrieves the project ID from the
// GCLOUD_PROJECT environment variable.
const language = new Language.LanguageServiceClient({
projectId: config.get('GCLOUD_PROJECT')
});
// END TODO
function analyze(text) {
// TODO: Create an object named document with the
// correct structure for the Natural Language ML API
// TODO: Initialize object content & type properties
// TODO: Set content from text arg
// TODO: Set type to PLAIN_TEXT
const document = {
content: text,
type: 'PLAIN_TEXT'
};
const path = __dirname;
if (process.env.NODE_ENV === 'production') {
const creds = process.env.GOOGLE_KEY
const JSONcreds = JSON.parse(creds)
process.env.GOOGLE_APPLICATION_CREDENTIALS = JSONcreds
} else {
process.env.GOOGLE_APPLICATION_CREDENTIALS = `${path}/../../config/google_key.json`;
}
// Imports the Google Cloud client library
const language = require('@google-cloud/language');
// Instantiates a client
const client = new language.LanguageServiceClient();
const Fup = require('../../models/Fup');
const Word = require('../../models/Word');
const Like = require('../../models/Like');
const validateFupInput = require('../../validation/fups');
const getWordsFromFup = (fup, req) => {
const document = {
content: req.body.text,
type: 'PLAIN_TEXT',
};
client
.analyzeEntitySentiment({ document })
.then((results) => {
const entities = results[0].entities;
async function classifyTextInFile(bucketName, fileName) {
// [START language_classify_gcs]
// Imports the Google Cloud client library.
const language = require('@google-cloud/language');
// Creates a client.
const client = new language.LanguageServiceClient();
/**
* TODO(developer): Uncomment the following lines to run this code
*/
// const bucketName = 'Your bucket name, e.g. my-bucket';
// const fileName = 'Your file name, e.g. my-file.txt';
// Prepares a document, representing a text file in Cloud Storage
const document = {
gcsContentUri: `gs://${bucketName}/${fileName}`,
type: 'PLAIN_TEXT',
};
// Classifies text in the document
const [classification] = await client.classifyText({document});
// The sentiment field contains magnitude and score.
node.send(msg);
} // Input
/**
* Cleanup this node.
*/
function Close() {
} // Close
// We must have EITHER credentials or a keyFilename. If neither are supplied, that
// is an error. If both are supplied, then credentials will be used.
if (credentials) {
languageServiceClient = new language.LanguageServiceClient({
"credentials": credentials
});
} else if (keyFilename) {
languageServiceClient = new language.LanguageServiceClient({
"keyFilename": keyFilename
});
} else {
languageServiceClient = new language.LanguageServiceClient({});
}
node.on("input", Input);
node.on("close", Close);
} // SentimentNode
async function analyzeEntitySentimentOfText(text) {
// [START language_entity_sentiment_text]
// Imports the Google Cloud client library
const language = require('@google-cloud/language');
// Creates a client
const client = new language.LanguageServiceClient();
/**
* TODO(developer): Uncomment the following line to run this code.
*/
// const text = 'Your text to analyze, e.g. Hello, world!';
// Prepares a document, representing the provided text
const document = {
content: text,
type: 'PLAIN_TEXT',
};
// Detects sentiment of entities in the document
const [result] = await client.analyzeEntitySentiment({document});
const entities = result.entities;
async function analyzeSentimentOfText(text) {
// [START language_sentiment_text]
// Imports the Google Cloud client library
const language = require('@google-cloud/language');
// Creates a client
const client = new language.LanguageServiceClient();
/**
* TODO(developer): Uncomment the following line to run this code.
*/
// const text = 'Your text to analyze, e.g. Hello, world!';
// Prepares a document, representing the provided text
const document = {
content: text,
type: 'PLAIN_TEXT',
};
// Detects the sentiment of the document
const [result] = await client.analyzeSentiment({document});
const sentiment = result.documentSentiment;
function main() {
const languageServiceClient = new language.LanguageServiceClient();
}