Skip to content

Commit 7f6cb8e

Browse files
authored
test(visual recognition): fixes visual recognition tests to correcly use environment variables (#709)
* test(visual recognition): fixes visual recognition tests to correcly use environment variables * test(visual recognition): removing console log statements in tests
1 parent a8b7066 commit 7f6cb8e

File tree

4 files changed

+101
-40
lines changed

4 files changed

+101
-40
lines changed

examples/assistant.v1.js

Lines changed: 60 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,60 @@
1+
'use strict';
2+
3+
var AssistantV1 = require('watson-developer-cloud/assistant/v1');
4+
5+
/**
6+
* Instantiate the Watson Assistant Service
7+
*/
8+
var assistant = new AssistantV1({
9+
username: process.env.ASSISTANT_USERNAME || '<assistant_username>',
10+
password: process.env.ASSISTANT_PASSWORD || '<assistant_password>',
11+
version: '2018-02-16'
12+
});
13+
14+
/**
15+
* Calls the assistant message api.
16+
* returns a promise
17+
*/
18+
var message = function(text, context) {
19+
var payload = {
20+
workspace_id: process.env.WORKSPACE_ID || '<workspace_id>',
21+
input: {
22+
text: text
23+
},
24+
context: context
25+
};
26+
return new Promise((resolve, reject) =>
27+
assistant.message(payload, function(err, data) {
28+
if (err) {
29+
reject(err);
30+
} else {
31+
resolve(data);
32+
}
33+
})
34+
);
35+
};
36+
37+
// This example makes two successive calls to assistant service.
38+
// Note how the context is passed:
39+
// In the first message the context is undefined. The service starts a new assistant.
40+
// The context returned from the first call is passed in the second request - to continue the assistant.
41+
message('first message', undefined)
42+
.then(response1 => {
43+
// APPLICATION-SPECIFIC CODE TO PROCESS THE DATA
44+
// FROM ASSISTANT SERVICE
45+
console.log(JSON.stringify(response1, null, 2), '\n--------');
46+
47+
// invoke a second call to assistant
48+
return message('second message', response1.context);
49+
})
50+
.then(response2 => {
51+
console.log(JSON.stringify(response2, null, 2), '\n--------');
52+
console.log(
53+
'Note that the two reponses should have the same context.conversation_id'
54+
);
55+
})
56+
.catch(err => {
57+
// APPLICATION-SPECIFIC CODE TO PROCESS THE ERROR
58+
// FROM ASSISTANT SERVICE
59+
console.error(JSON.stringify(err, null, 2));
60+
});

examples/discovery.v1.js

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@ var discovery = new DiscoveryV1({
1111
// url: 'INSERT YOUR URL FOR THE SERVICE HERE'
1212
username: 'YOUR USERNAME',
1313
password: 'YOUR PASSWORD',
14-
version: '2017_04_27',
14+
version: '2018-03-05',
1515
url: 'https://gateway.watsonplatform.net/discovery/api/'
1616
});
1717

test/integration/test.visual_recognition.js

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,6 @@ describe('visual_recognition_integration', function() {
4040
if (err) {
4141
return done(err);
4242
}
43-
// console.log(JSON.stringify(result, null, 2));
4443
assert.equal(result.images_processed, 1);
4544
assert.equal(result.images[0].image, 'car.png');
4645
assert(result.images[0].classifiers.length);
@@ -63,7 +62,6 @@ describe('visual_recognition_integration', function() {
6362
if (err) {
6463
return done(err);
6564
}
66-
// console.log(JSON.stringify(result, null, 2));
6765
assert.equal(result.images_processed, 1);
6866
assert(result.images[0].classifiers.length);
6967
assert(

test/unit/test.visual_recognition.v3.js

Lines changed: 40 additions & 37 deletions
Original file line numberDiff line numberDiff line change
@@ -96,11 +96,11 @@ describe('visual_recognition', function() {
9696

9797
describe('credentials', function() {
9898
let env;
99-
before(function() {
99+
beforeEach(function() {
100100
env = process.env;
101101
process.env = {};
102102
});
103-
after(function() {
103+
afterEach(function() {
104104
process.env = env;
105105
});
106106

@@ -148,10 +148,8 @@ describe('visual_recognition', function() {
148148
});
149149

150150
it('should read VISUAL_RECOGNITION_USERNAME / PASSWORD from environment properties', function() {
151-
process.env = {
152-
VISUAL_RECOGNITION_USERNAME: 'foo',
153-
VISUAL_RECOGNITION_PASSWORD: 'bar',
154-
};
151+
process.env.VISUAL_RECOGNITION_USERNAME = 'foo';
152+
process.env.VISUAL_RECOGNITION_PASSWORD = 'bar';
155153
const instance = new watson.VisualRecognitionV3({
156154
version: '2016-05-20',
157155
});
@@ -161,48 +159,45 @@ describe('visual_recognition', function() {
161159
});
162160

163161
it('should read api_key from cf/bluemix environment properties', function() {
164-
process.env = {
165-
VCAP_SERVICES: JSON.stringify({
166-
watson_vision_combined: [
167-
{
168-
name: 'Visual Recognition-mj',
169-
label: 'watson_vision_combined',
170-
plan: 'free',
171-
credentials: {
172-
url: 'https://gateway-a.watsonplatform.net/visual-recognition/api',
173-
note: 'It may take up to 5 minutes for this key to become active',
174-
api_key: 'foo',
175-
},
162+
process.env.VCAP_SERVICES = JSON.stringify({
163+
watson_vision_combined: [
164+
{
165+
name: 'Visual Recognition-mj',
166+
label: 'watson_vision_combined',
167+
plan: 'free',
168+
credentials: {
169+
url: 'https://gateway-a.watsonplatform.net/visual-recognition/api',
170+
note: 'It may take up to 5 minutes for this key to become active',
171+
api_key: 'foo',
176172
},
177-
],
178-
}),
179-
};
173+
},
174+
],
175+
});
180176
const instance = new watson.VisualRecognitionV3({
181177
version: '2016-05-20',
182178
});
179+
console.log(instance._options);
183180
assert.equal(instance._options.api_key, 'foo');
184181
assert.equal(instance._options.username, undefined);
185182
assert.equal(instance._options.password, undefined);
186183
});
187184

188185
it('should read username / password from cf/bluemix environment properties', function() {
189-
process.env = {
190-
VCAP_SERVICES: JSON.stringify({
191-
watson_vision_combined: [
192-
{
193-
name: 'Visual Recognition-mj',
194-
label: 'watson_vision_combined',
195-
plan: 'free',
196-
credentials: {
197-
url: 'https://gateway-a.watsonplatform.net/visual-recognition/api',
198-
note: 'It may take up to 5 minutes for this key to become active',
199-
username: 'foo',
200-
password: 'bar',
201-
},
186+
process.env.VCAP_SERVICES = JSON.stringify({
187+
watson_vision_combined: [
188+
{
189+
name: 'Visual Recognition-mj',
190+
label: 'watson_vision_combined',
191+
plan: 'free',
192+
credentials: {
193+
url: 'https://gateway-a.watsonplatform.net/visual-recognition/api',
194+
note: 'It may take up to 5 minutes for this key to become active',
195+
username: 'foo',
196+
password: 'bar',
202197
},
203-
],
204-
}),
205-
};
198+
},
199+
],
200+
});
206201
const instance = new watson.VisualRecognitionV3({
207202
version: '2016-05-20',
208203
});
@@ -524,6 +519,14 @@ describe('visual_recognition', function() {
524519
});
525520

526521
describe('RC and CF urls', function() {
522+
let env;
523+
beforeEach(function() {
524+
env = process.env;
525+
process.env = {};
526+
});
527+
afterEach(function() {
528+
process.env = env;
529+
});
527530
it('should have the correct URL depending on rc/cf', function(done) {
528531
const visual_recognition_cf = new watson.VisualRecognitionV3({
529532
api_key: 'apikey',

0 commit comments

Comments
 (0)