|
| 1 | +// Copyright 2024 Google LLC |
| 2 | +// |
| 3 | +// Licensed under the Apache License, Version 2.0 (the "License"); |
| 4 | +// you may not use this file except in compliance with the License. |
| 5 | +// You may obtain a copy of the License at |
| 6 | +// |
| 7 | +// https://www.apache.org/licenses/LICENSE-2.0 |
| 8 | +// |
| 9 | +// Unless required by applicable law or agreed to in writing, software |
| 10 | +// distributed under the License is distributed on an "AS IS" BASIS, |
| 11 | +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 12 | +// See the License for the specific language governing permissions and |
| 13 | +// limitations under the License. |
| 14 | + |
| 15 | +#include "google/cloud/aiplatform/v1/prediction_client.h" |
| 16 | +#include "google/cloud/internal/getenv.h" |
| 17 | +#include "google/cloud/location.h" |
| 18 | +#include "google/cloud/testing_util/example_driver.h" |
| 19 | +#include <string> |
| 20 | +#include <vector> |
| 21 | + |
| 22 | +namespace { |
| 23 | + |
| 24 | +void GeminiGenerateFromTextInput(std::vector<std::string> const& argv) { |
| 25 | + if (argv.size() < 4) { |
| 26 | + throw google::cloud::testing_util::Usage( |
| 27 | + "gemini-generate-from-text-input <project> <location> <model-name> " |
| 28 | + "[<content>]+"); |
| 29 | + } |
| 30 | + // [START generativeaionvertexai_gemini_generate_from_text_input] |
| 31 | + namespace vertex_ai = ::google::cloud::aiplatform_v1; |
| 32 | + namespace vertex_ai_proto = ::google::cloud::aiplatform::v1; |
| 33 | + [](std::string const& project_id, std::string const& location_id, |
| 34 | + std::string const& model, std::vector<std::string> const& content) { |
| 35 | + google::cloud::Location location(project_id, location_id); |
| 36 | + auto client = vertex_ai::PredictionServiceClient( |
| 37 | + vertex_ai::MakePredictionServiceConnection(location.location_id())); |
| 38 | + |
| 39 | + std::vector<vertex_ai_proto::Content> contents; |
| 40 | + for (auto const& c : content) { |
| 41 | + vertex_ai_proto::Content content; |
| 42 | + content.set_role("user"); |
| 43 | + content.add_parts()->set_text(c); |
| 44 | + contents.push_back(std::move(content)); |
| 45 | + } |
| 46 | + auto response = client.GenerateContent( |
| 47 | + location.FullName() + "/publishers/google/models/" + model, contents); |
| 48 | + if (!response) throw std::move(response).status(); |
| 49 | + |
| 50 | + for (auto const& candidate : response->candidates()) { |
| 51 | + for (auto const& p : candidate.content().parts()) { |
| 52 | + std::cout << p.text() << "\n"; |
| 53 | + } |
| 54 | + } |
| 55 | + } |
| 56 | + // [END generativeaionvertexai_gemini_generate_from_text_input] |
| 57 | + (argv.at(0), argv.at(1), argv.at(2), {argv.begin() + 3, argv.end()}); |
| 58 | +} |
| 59 | + |
| 60 | +void GeminiGenerateWithImage(std::vector<std::string> const& argv) { |
| 61 | + if (argv.size() != 6) { |
| 62 | + throw google::cloud::testing_util::Usage( |
| 63 | + "gemini-generate-with-image <project> <location> <model-name> " |
| 64 | + "<prompt> <mime-type> <file-uri>"); |
| 65 | + } |
| 66 | + // [START generativeaionvertexai_gemini_get_started] |
| 67 | + namespace vertex_ai = ::google::cloud::aiplatform_v1; |
| 68 | + namespace vertex_ai_proto = ::google::cloud::aiplatform::v1; |
| 69 | + [](std::string const& project_id, std::string const& location_id, |
| 70 | + std::string const& model, std::string const& prompt, |
| 71 | + std::string const& mime_type, std::string const& file_uri) { |
| 72 | + google::cloud::Location location(project_id, location_id); |
| 73 | + auto client = vertex_ai::PredictionServiceClient( |
| 74 | + vertex_ai::MakePredictionServiceConnection(location.location_id())); |
| 75 | + |
| 76 | + vertex_ai_proto::GenerateContentRequest request; |
| 77 | + request.set_model(location.FullName() + "/publishers/google/models/" + |
| 78 | + model); |
| 79 | + auto generation_config = request.mutable_generation_config(); |
| 80 | + generation_config->set_temperature(0.4f); |
| 81 | + generation_config->set_top_k(32); |
| 82 | + generation_config->set_top_p(1); |
| 83 | + generation_config->set_max_output_tokens(2048); |
| 84 | + |
| 85 | + auto contents = request.add_contents(); |
| 86 | + contents->set_role("user"); |
| 87 | + contents->add_parts()->set_text(prompt); |
| 88 | + auto image_part = contents->add_parts(); |
| 89 | + image_part->mutable_file_data()->set_file_uri(file_uri); |
| 90 | + image_part->mutable_file_data()->set_mime_type(mime_type); |
| 91 | + |
| 92 | + auto response = client.GenerateContent(request); |
| 93 | + if (!response) throw std::move(response).status(); |
| 94 | + |
| 95 | + for (auto const& candidate : response->candidates()) { |
| 96 | + for (auto const& p : candidate.content().parts()) { |
| 97 | + std::cout << p.text() << "\n"; |
| 98 | + } |
| 99 | + } |
| 100 | + } |
| 101 | + // [END generativeaionvertexai_gemini_get_started] |
| 102 | + (argv.at(0), argv.at(1), argv.at(2), argv.at(3), argv.at(4), argv.at(5)); |
| 103 | +} |
| 104 | + |
| 105 | +void GeminiVideoWithAudio(std::vector<std::string> const& argv) { |
| 106 | + if (argv.size() != 6) { |
| 107 | + throw google::cloud::testing_util::Usage( |
| 108 | + "gemini-video-with-audio <project> <location> <model-name> " |
| 109 | + "<prompt> <mime-type> <file-uri>"); |
| 110 | + } |
| 111 | + // [START generativeaionvertexai_gemini_video_with_audio] |
| 112 | + namespace vertex_ai = ::google::cloud::aiplatform_v1; |
| 113 | + namespace vertex_ai_proto = ::google::cloud::aiplatform::v1; |
| 114 | + [](std::string const& project_id, std::string const& location_id, |
| 115 | + std::string const& model, std::string const& prompt, |
| 116 | + std::string const& mime_type, std::string const& file_uri) { |
| 117 | + google::cloud::Location location(project_id, location_id); |
| 118 | + auto client = vertex_ai::PredictionServiceClient( |
| 119 | + vertex_ai::MakePredictionServiceConnection(location.location_id())); |
| 120 | + |
| 121 | + vertex_ai_proto::GenerateContentRequest request; |
| 122 | + request.set_model(location.FullName() + "/publishers/google/models/" + |
| 123 | + model); |
| 124 | + auto contents = request.add_contents(); |
| 125 | + contents->set_role("user"); |
| 126 | + contents->add_parts()->set_text(prompt); |
| 127 | + auto image_part = contents->add_parts(); |
| 128 | + image_part->mutable_file_data()->set_file_uri(file_uri); |
| 129 | + image_part->mutable_file_data()->set_mime_type(mime_type); |
| 130 | + |
| 131 | + auto response = client.GenerateContent(request); |
| 132 | + if (!response) throw std::move(response).status(); |
| 133 | + |
| 134 | + for (auto const& candidate : response->candidates()) { |
| 135 | + for (auto const& p : candidate.content().parts()) { |
| 136 | + std::cout << p.text() << "\n"; |
| 137 | + } |
| 138 | + } |
| 139 | + } |
| 140 | + // [END generativeaionvertexai_gemini_video_with_audio] |
| 141 | + (argv.at(0), argv.at(1), argv.at(2), argv.at(3), argv.at(4), argv.at(5)); |
| 142 | +} |
| 143 | + |
| 144 | +void AutoRun(std::vector<std::string> const& argv) { |
| 145 | + namespace examples = ::google::cloud::testing_util; |
| 146 | + if (!argv.empty()) throw examples::Usage{"auto"}; |
| 147 | + examples::CheckEnvironmentVariablesAreSet({ |
| 148 | + "GOOGLE_CLOUD_PROJECT", |
| 149 | + }); |
| 150 | + auto const project_id = |
| 151 | + google::cloud::internal::GetEnv("GOOGLE_CLOUD_PROJECT").value(); |
| 152 | + |
| 153 | + std::cout << "Executing GeminiGenerateFromTextInput sample:\n"; |
| 154 | + GeminiGenerateFromTextInput( |
| 155 | + {project_id, "us-central1", "gemini-1.5-flash-001", |
| 156 | + "What's a good name for a flower shop that specializes in selling " |
| 157 | + "bouquets of dried flowers?"}); |
| 158 | + |
| 159 | + std::cout << "\nExecuting GeminiGenerateWithImage sample:\n"; |
| 160 | + GeminiGenerateWithImage({project_id, "us-central1", "gemini-1.5-flash-001", |
| 161 | + "What's in this photo?", "image/png", |
| 162 | + "gs://generativeai-downloads/images/scones.jpg"}); |
| 163 | + |
| 164 | + std::cout << "\nExecuting GeminiVideoWithAudio sample:\n"; |
| 165 | + GeminiVideoWithAudio( |
| 166 | + {project_id, "us-central1", "gemini-1.5-flash-001", |
| 167 | + "Provide a description of the video.\n" |
| 168 | + "The description should also contain anything important which people " |
| 169 | + "say in the video.", |
| 170 | + "video/mp4", "gs://cloud-samples-data/generative-ai/video/pixel8.mp4"}); |
| 171 | + |
| 172 | + std::cout << "\nAutoRun done" << std::endl; |
| 173 | +} |
| 174 | + |
| 175 | +} // namespace |
| 176 | + |
| 177 | +int main(int argc, char* argv[]) { // NOLINT(bugprone-exception-escape) |
| 178 | + google::cloud::testing_util::Example example( |
| 179 | + {{"gemini-generate-with-image", GeminiGenerateWithImage}, |
| 180 | + {"gemini-generate-from-text-input", GeminiGenerateFromTextInput}, |
| 181 | + {"gemini-video-with-audio", GeminiVideoWithAudio}, |
| 182 | + {"auto", AutoRun}}); |
| 183 | + return example.Run(argc, argv); |
| 184 | +} |
0 commit comments