|
85 | 85 | }, |
86 | 86 | { |
87 | 87 | "cell_type": "code", |
88 | | - "execution_count": 2, |
| 88 | + "execution_count": null, |
89 | 89 | "metadata": { |
90 | 90 | "id": "ikIM1o9cHNcS" |
91 | 91 | }, |
92 | | - "outputs": [ |
93 | | - { |
94 | | - "name": "stderr", |
95 | | - "output_type": "stream", |
96 | | - "text": [ |
97 | | - "/Users/amna.mubashar/Library/Python/3.9/lib/python/site-packages/urllib3/__init__.py:35: NotOpenSSLWarning: urllib3 v2 only supports OpenSSL 1.1.1+, currently the 'ssl' module is compiled with 'LibreSSL 2.8.3'. See: https://github.com/urllib3/urllib3/issues/3020\n", |
98 | | - " warnings.warn(\n", |
99 | | - "/Users/amna.mubashar/Library/Python/3.9/lib/python/site-packages/tqdm/auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n", |
100 | | - " from .autonotebook import tqdm as notebook_tqdm\n" |
101 | | - ] |
102 | | - } |
103 | | - ], |
| 92 | + "outputs": [], |
104 | 93 | "source": [ |
105 | 94 | "from haystack.telemetry import tutorial_running\n", |
106 | 95 | "\n", |
|
147 | 136 | "from haystack.dataclasses import ChatMessage\n", |
148 | 137 | "from haystack.components.generators.chat import HuggingFaceLocalChatGenerator\n", |
149 | 138 | "\n", |
150 | | - "template = [ChatMessage.from_user(\"\"\"\n", |
| 139 | + "template = [\n", |
| 140 | + " ChatMessage.from_user(\n", |
| 141 | + " \"\"\"\n", |
151 | 142 | "Please create a summary about the following topic:\n", |
152 | 143 | "{{ topic }}\n", |
153 | | - "\"\"\")]\n", |
154 | | - "chat_template = \"{% for message in messages %}{% if message['role'] == 'user' %}{{ ' ' }}{% endif %}{{ message['content'] }}{% if not loop.last %}{{ ' ' }}{% endif %}{% endfor %}{{ eos_token }}\"\n", |
| 144 | + "\"\"\"\n", |
| 145 | + " )\n", |
| 146 | + "]\n", |
155 | 147 | "\n", |
156 | 148 | "builder = ChatPromptBuilder(template=template)\n", |
157 | | - "llm = HuggingFaceLocalChatGenerator(\n", |
158 | | - " model=\"google/flan-t5-large\", task=\"text2text-generation\", generation_kwargs={\"max_new_tokens\": 150},\n", |
159 | | - " chat_template=chat_template\n", |
160 | | - ")\n", |
| 149 | + "llm = HuggingFaceLocalChatGenerator(model=\"Qwen/Qwen2.5-1.5B-Instruct\", generation_kwargs={\"max_new_tokens\": 150})\n", |
161 | 150 | "\n", |
162 | 151 | "pipeline = Pipeline()\n", |
163 | 152 | "pipeline.add_component(name=\"builder\", instance=builder)\n", |
|
181 | 170 | "name": "stdout", |
182 | 171 | "output_type": "stream", |
183 | 172 | "text": [ |
184 | | - "Climate change is a major threat to the planet.\n" |
| 173 | + "Climate change is a global issue that has been on the rise in recent years due to various factors such as human activities, natural disasters, and climate variability. The impacts of climate change can be seen in extreme weather events, rising sea levels, melting ice caps, and changing ecosystems.\n", |
| 174 | + "There are different types of climate change, including global warming, local climate change, and regional climate change. Global warming refers to changes in the Earth's temperature caused by greenhouse gas emissions from human activities, while local climate change refers to changes in the climate of specific areas or regions.\n", |
| 175 | + "To address climate change, there needs to be a concerted effort from governments, businesses, and individuals around the world to reduce greenhouse gas emissions, protect natural habitats, and invest in renewable energy sources\n" |
185 | 176 | ] |
186 | 177 | } |
187 | 178 | ], |
|
240 | 231 | " max_new_tokens: 150\n", |
241 | 232 | " stop_sequences: []\n", |
242 | 233 | " huggingface_pipeline_kwargs:\n", |
243 | | - " device: mps\n", |
244 | | - " model: google/flan-t5-large\n", |
245 | | - " task: text2text-generation\n", |
| 234 | + " device: cpu\n", |
| 235 | + " model: Qwen/Qwen2.5-1.5B-Instruct\n", |
| 236 | + " task: text-generation\n", |
246 | 237 | " streaming_callback: null\n", |
247 | 238 | " token:\n", |
248 | 239 | " env_vars:\n", |
|
300 | 291 | " stop_sequences: []\n", |
301 | 292 | " huggingface_pipeline_kwargs:\n", |
302 | 293 | " device: cpu\n", |
303 | | - " model: google/flan-t5-large\n", |
304 | | - " task: text2text-generation\n", |
| 294 | + " model: Qwen/Qwen2.5-1.5B-Instruct\n", |
| 295 | + " task: text-generation\n", |
305 | 296 | " streaming_callback: null\n", |
306 | 297 | " token:\n", |
307 | 298 | " env_vars:\n", |
|
356 | 347 | " stop_sequences: []\n", |
357 | 348 | " huggingface_pipeline_kwargs:\n", |
358 | 349 | " device: cpu\n", |
359 | | - " model: google/flan-t5-large\n", |
360 | | - " task: text2text-generation\n", |
| 350 | + " model: Qwen/Qwen2.5-1.5B-Instruct\n", |
| 351 | + " task: text-generation\n", |
361 | 352 | " streaming_callback: null\n", |
362 | 353 | " chat_template : \"{% for message in messages %}{% if message['role'] == 'user' %}{{ ' ' }}{% endif %}{{ message['content'] }}{% if not loop.last %}{{ ' ' }}{% endif %}{% endfor %}{{ eos_token }}\"\n", |
363 | 354 | " token:\n", |
|
395 | 386 | "outputs": [], |
396 | 387 | "source": [ |
397 | 388 | "from haystack import Pipeline\n", |
398 | | - "from haystack.components.builders import ChatPromptBuilder\n", |
399 | | - "from haystack.components.generators.chat import HuggingFaceLocalChatGenerator\n", |
400 | 389 | "\n", |
401 | 390 | "new_pipeline = Pipeline.loads(yaml_pipeline)" |
402 | 391 | ] |
|
424 | 413 | { |
425 | 414 | "data": { |
426 | 415 | "text/plain": [ |
427 | | - "{'llm': {'replies': [ChatMessage(content='Je me félicite des capybaras', role=<ChatRole.ASSISTANT: 'assistant'>, name=None, meta={'finish_reason': 'stop', 'index': 0, 'model': 'google/flan-t5-large', 'usage': {'completion_tokens': 13, 'prompt_tokens': 16, 'total_tokens': 29}})]}}" |
| 416 | + "{'llm': {'replies': [ChatMessage(content='J'aime les capybaras', role=<ChatRole.ASSISTANT: 'assistant'>, name=None, meta={'finish_reason': 'stop', 'index': 0, 'model': 'Qwen/Qwen2.5-1.5B-Instruct', 'usage': {'completion_tokens': 13, 'prompt_tokens': 16, 'total_tokens': 29}})]}}" |
428 | 417 | ] |
429 | 418 | }, |
430 | 419 | "execution_count": 7, |
|
0 commit comments