-
Notifications
You must be signed in to change notification settings - Fork 0
/
index.js
139 lines (129 loc) · 5.48 KB
/
index.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
// This sample demonstrates handling intents from an Alexa skill using the Alexa Skills Kit SDK (v2).
// Please visit https://alexa.design/cookbook for additional examples on implementing slots, dialog management,
// session persistence, api calls, and more.
const Alexa = require('ask-sdk-core');
const data = [
'Green, yellow, and red bell peppers are not actually the same vegetable.',
'white chocolate is not actually chocolate at all.',
'Scientists can turn peanut butter into diamonds.',
'Ripe cranberries will bounce like rubber balls.',
'Chocolate was once used as currency.',
'Fruit snacks and cars are coated in the same type of wax.',
'Potatoes can absorb and reflect Wi-fi signals.',
'Carrots were originally purple.',
];
const GET_FACT_MESSAGE = "This is your Fact " ;
// whenever we say 'food fact' this will execute
const LaunchRequestHandler = {
canHandle(handlerInput) {
return handlerInput.requestEnvelope.request.type === 'LaunchRequest';
},
handle(handlerInput) {
const speechText = 'Welcome to food fact. you can ask me about a food fact';
return handlerInput.responseBuilder
.speak(speechText)
.reprompt(speechText)
.getResponse();
}
};
const FoodFactsHandler = {
canHandle(handlerInput) {
return handlerInput.requestEnvelope.request.type === 'IntentRequest'
&& handlerInput.requestEnvelope.request.intent.name === 'FoodFacts';
},
// handle() to handle the intent
handle(handlerInput) {
const factArr = data;
const factIndex = Math.floor(Math.random() * factArr.length);
const randomFact = factArr[factIndex];
const speechOutput = GET_FACT_MESSAGE + randomFact;
const speechText = speechOutput;
return handlerInput.responseBuilder
.speak(speechText)
.reprompt(speechText)
//.reprompt('add a reprompt if you want to keep the session open for the user to respond')
.getResponse();
}
};
const HelpIntentHandler = {
canHandle(handlerInput) {
return handlerInput.requestEnvelope.request.type === 'IntentRequest'
&& handlerInput.requestEnvelope.request.intent.name === 'AMAZON.HelpIntent';
},
handle(handlerInput) {
const speechText = 'You can ask me about a food fact';
return handlerInput.responseBuilder
.speak(speechText)
.reprompt(speechText)
.getResponse();
}
};
const CancelAndStopIntentHandler = {
canHandle(handlerInput) {
return handlerInput.requestEnvelope.request.type === 'IntentRequest'
&& (handlerInput.requestEnvelope.request.intent.name === 'AMAZON.CancelIntent'
|| handlerInput.requestEnvelope.request.intent.name === 'AMAZON.StopIntent');
},
handle(handlerInput) {
const speechText = 'Goodbye!';
return handlerInput.responseBuilder
.speak(speechText)
.getResponse();
}
};
const SessionEndedRequestHandler = {
canHandle(handlerInput) {
return handlerInput.requestEnvelope.request.type === 'SessionEndedRequest';
},
handle(handlerInput) {
// Any cleanup logic goes here.
return handlerInput.responseBuilder.getResponse();
}
};
// The intent reflector is used for interaction model testing and debugging.
// It will simply repeat the intent the user said. You can create custom handlers
// for your intents by defining them above, then also adding them to the request
// handler chain below.
const IntentReflectorHandler = {
canHandle(handlerInput) {
return handlerInput.requestEnvelope.request.type === 'IntentRequest';
},
handle(handlerInput) {
const intentName = handlerInput.requestEnvelope.request.intent.name;
const speechText = `You just triggered ${intentName}`;
return handlerInput.responseBuilder
.speak(speechText)
//.reprompt('add a reprompt if you want to keep the session open for the user to respond')
.getResponse();
}
};
// Generic error handling to capture any syntax or routing errors. If you receive an error
// stating the request handler chain is not found, you have not implemented a handler for
// the intent being invoked or included it in the skill builder below.
const ErrorHandler = {
canHandle() {
return true;
},
handle(handlerInput, error) {
console.log(`~~~~ Error handled: ${error.message}`);
const speechText = `Sorry, I couldn't understand what you said. Please try again.`;
return handlerInput.responseBuilder
.speak(speechText)
.reprompt(speechText)
.getResponse();
}
};
// This handler acts as the entry point for your skill, routing all request and response
// payloads to the handlers above. Make sure any new handlers or interceptors you've
// defined are included below. The order matters - they're processed top to bottom.
exports.handler = Alexa.SkillBuilders.custom()
.addRequestHandlers(
LaunchRequestHandler,
FoodFactsHandler,
HelpIntentHandler,
CancelAndStopIntentHandler,
SessionEndedRequestHandler,
IntentReflectorHandler) // make sure IntentReflectorHandler is last so it doesn't override your custom intent handlers
.addErrorHandlers(
ErrorHandler)
.lambda();