Skip to content

Conversation

deepankarm
Copy link

@deepankarm deepankarm commented Jul 2, 2025

A proposal to fix #1143

Since all invocations in llm.Attachment are doing IO using httpx non-async primitives, we can asyncify build_messages by executing it in a different thread using asyncio.to_thread.

The same can be verified using the following code (event loop block detected with pyleak).

import os
import asyncio

from llm.default_plugins.openai_models import AsyncChat
from llm.models import Attachment
from pyleak import no_event_loop_blocking

async def main():
    model = AsyncChat(
        model_id="gpt-4.1-2025-04-14",
        api_base="https://api.openai.com/v1",
        key=os.getenv("OPENAI_API_KEY"),
    )
    conversation = model.conversation()
    async with no_event_loop_blocking(action="raise"):
        await conversation.prompt(
            prompt="What is this image?",
            attachments=[
                Attachment(
                    url="https://upload.wikimedia.org/wikipedia/commons/thumb/7/74/A-Cat.jpg/1200px-A-Cat.jpg?20101227100718",
                ),
            ],
        )
    print(conversation.responses)

if __name__ == "__main__":
    asyncio.run(main())

Before

with messages = self.build_messages(prompt, conversation)

pyleak.eventloop.EventLoopBlockError: Detected 1 event loop blocks

Event Loop Block: block-1
  Duration: 0.305s (threshold: 0.100s)
  Timestamp: 1751442718.988
  Blocking Stack:
    File "/.../deepankarm/dummy/llm-run-1.py", line 30, in <module>
        asyncio.run(main())
      File "/opt/homebrew/anaconda3/envs/ffa/lib/python3.12/asyncio/runners.py", line 194, in run
        return runner.run(main)
      File "/opt/homebrew/anaconda3/envs/ffa/lib/python3.12/asyncio/runners.py", line 118, in run
        return self._loop.run_until_complete(task)
      File "/opt/homebrew/anaconda3/envs/ffa/lib/python3.12/asyncio/base_events.py", line 671, in run_until_complete
        self.run_forever()
      File "/opt/homebrew/anaconda3/envs/ffa/lib/python3.12/asyncio/base_events.py", line 638, in run_forever
        self._run_once()
      File "/opt/homebrew/anaconda3/envs/ffa/lib/python3.12/asyncio/base_events.py", line 1971, in _run_once
        handle._run()
      File "/opt/homebrew/anaconda3/envs/ffa/lib/python3.12/asyncio/events.py", line 84, in _run
        self._context.run(self._callback, *self._args)
      File "/.../deepankarm/dummy/llm-1.py", line 17, in main
        await conversation.prompt(
      File "/.../deepankarm/llm/llm/models.py", line 1382, in _force
        async for chunk in self:
      File "/.../deepankarm/llm/llm/models.py", line 1365, in __anext__
        chunk = await self._generator.__anext__()
      File "/.../deepankarm/llm/llm/default_plugins/openai_models.py", line 758, in execute
        messages = self.build_messages(prompt, conversation)
      File "/.../deepankarm/llm/llm/default_plugins/openai_models.py", line 593, in build_messages
        attachment_message.append(_attachment(attachment))
      File "/.../deepankarm/llm/llm/default_plugins/openai_models.py", line 437, in _attachment
        if attachment.resolve_type() == "application/pdf":
      File "/.../deepankarm/llm/llm/models.py", line 79, in resolve_type
        response = httpx.head(self.url)
      File "/opt/homebrew/anaconda3/envs/ffa/lib/python3.12/site-packages/httpx/_api.py", line 267, in head
        return request(
      File "/opt/homebrew/anaconda3/envs/ffa/lib/python3.12/site-packages/httpx/_api.py", line 102, in request
        with Client(
      File "/opt/homebrew/anaconda3/envs/ffa/lib/python3.12/site-packages/httpx/_client.py", line 688, in __init__
        self._transport = self._init_transport(
      File "/opt/homebrew/anaconda3/envs/ffa/lib/python3.12/site-packages/httpx/_client.py", line 731, in _init_transport
        return HTTPTransport(
      File "/opt/homebrew/anaconda3/envs/ffa/lib/python3.12/site-packages/httpx/_transports/default.py", line 153, in __init__
        ssl_context = create_ssl_context(verify=verify, cert=cert, trust_env=trust_env)
      File "/opt/homebrew/anaconda3/envs/ffa/lib/python3.12/site-packages/httpx/_config.py", line 40, in create_ssl_context
        ctx = ssl.create_default_context(cafile=certifi.where())
      File "/opt/homebrew/anaconda3/envs/ffa/lib/python3.12/ssl.py", line 708, in create_default_context
        context.load_verify_locations(cafile, capath, cadata)

After

with messages = await asyncify(self.build_messages, prompt, conversation)

[<AsyncResponse prompt='What is this image?' text='This image shows a close-up of a tabby cat. The cat has greenish-yellow eyes, a pink nose, and a coat with distinctive stripes and markings typical of a tabby pattern. The background appears to be outdoors, possibly on a tiled surface with some dry grass or plants.'>]

I've also added a unit test that detects event loop blocking using pytest.mark.pyleak with mocked a httpx.head response.

Disclaimer: I'm the author of pyleak.


@pytest.mark.asyncio
@pytest.mark.no_leaks
async def test_async_chat_with_attachment_non_blocking(httpx_mock):
Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I'm not sure it is the right file to add this test.

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment

Labels

None yet

Projects

None yet

Development

Successfully merging this pull request may close these issues.

Make attachment content loading properly asyncio-friendly

1 participant