Skip to content

Commit 82611fd

Browse files
committed
Fixes to address PR comments, fix config.ts
1 parent 0113036 commit 82611fd

File tree

3 files changed

+10
-12
lines changed

3 files changed

+10
-12
lines changed
Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,8 @@
11
export const firebaseConfig = {
2-
${YOUR_FIREBASE_CONFIG}
2+
YOUR_FIREBASE_CONFIG
33
};
44

55
// Your ReCAPTCHA Enterprise site key (must be from the same project
66
// as the Firebase config above).
77
export const RECAPTCHA_ENTERPRISE_SITE_KEY =
8-
"${YOUR_RECAPTCHA_KEY}";
8+
"YOUR_RECAPTCHA_KEY";

Node/call-vertex-remote-config-server/functions/index.js

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -114,8 +114,7 @@ exports.callVertexWithRC = onCall(
114114
generation_config: generationConfig,
115115
});
116116

117-
// Create the chat; append user input to Remote Config-defined prompt.
118-
const chat = generativeModel.startChat();
117+
// Combine prompt from Remote Config with optional user input.
119118
const chatInput = textPrompt + " " + userInput;
120119

121120
if (!chatInput) {
@@ -141,7 +140,7 @@ exports.callVertexWithRC = onCall(
141140
"\n"
142141
);
143142

144-
const result = await chat.sendMessageStream(chatInput);
143+
const result = await generativeModel.generateContentStream(chatInput);
145144

146145
const chunks = [];
147146
for await (const item of result.stream) {
@@ -152,7 +151,7 @@ exports.callVertexWithRC = onCall(
152151

153152
return chunks.join(""); // Return the concatenated chunks
154153
} catch (error) {
155-
console.error(error);
154+
logger.error(error);
156155
throw new HttpsError("internal", "Internal server error");
157156
}
158157
}

Node/remote-config-server-with-vertex/functions/index.js

Lines changed: 5 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -80,8 +80,7 @@ exports.generateWithVertex = onRequest(async (request, response) => {
8080
generation_config: generationConfig,
8181
});
8282

83-
// Create the chat; append user input to Remote Config-defined prompt.
84-
const chat = generativeModel.startChat();
83+
// Combine prompt from Remote Config with optional user input.
8584
const chatInput = textPrompt + " " + userInput;
8685

8786
if (!chatInput) {
@@ -95,23 +94,23 @@ exports.generateWithVertex = onRequest(async (request, response) => {
9594
return;
9695
}
9796

98-
console.log("\nRunning with model ", textModel, ", prompt: ", textPrompt,
97+
logger.log("\nRunning with model ", textModel, ", prompt: ", textPrompt,
9998
", generationConfig: ", generationConfig, ", safetySettings: ",
10099
safetySettings, " in ", location, "\n");
101100

102-
const result = await chat.sendMessageStream(chatInput);
101+
const result = await generativeModel.generateContentStream(chatInput);
103102
response.writeHead(200, { 'Content-Type': 'text/plain' });
104103

105104
for await (const item of result.stream) {
106105
const chunk = item.candidates[0].content.parts[0].text;
107-
console.log("Received chunk:", chunk);
106+
logger.log("Received chunk:", chunk);
108107
response.write(chunk);
109108
}
110109

111110
response.end();
112111

113112
} catch (error) {
114-
console.error(error);
113+
logger.error(error);
115114
response.status(500).send('Internal server error');
116115
}
117116
});

0 commit comments

Comments
 (0)