|
3 | 3 | RSpec.describe DiscourseAi::AiBot::Playground do |
4 | 4 | subject(:playground) { described_class.new(bot) } |
5 | 5 |
|
6 | | - fab!(:claude_2) { Fabricate(:llm_model, name: "claude-2") } |
| 6 | + fab!(:claude_2) do |
| 7 | + Fabricate( |
| 8 | + :llm_model, |
| 9 | + provider: "anthropic", |
| 10 | + url: "https://api.anthropic.com/v1/messages", |
| 11 | + name: "claude-2", |
| 12 | + ) |
| 13 | + end |
7 | 14 | fab!(:opus_model) { Fabricate(:anthropic_model) } |
8 | 15 |
|
9 | 16 | fab!(:bot_user) do |
|
948 | 955 | end |
949 | 956 | end |
950 | 957 |
|
| 958 | + describe "#canceling a completions" do |
| 959 | + after { DiscourseAi::AiBot::PostStreamer.on_callback = nil } |
| 960 | + |
| 961 | + it "should be able to cancel a completion halfway through" do |
| 962 | + body = (<<~STRING).strip |
| 963 | + event: message_start |
| 964 | + data: {"type": "message_start", "message": {"id": "msg_1nZdL29xx5MUA1yADyHTEsnR8uuvGzszyY", "type": "message", "role": "assistant", "content": [], "model": "claude-3-opus-20240229", "stop_reason": null, "stop_sequence": null, "usage": {"input_tokens": 25, "output_tokens": 1}}} |
| 965 | +
|
| 966 | + event: content_block_start |
| 967 | + data: {"type": "content_block_start", "index":0, "content_block": {"type": "text", "text": ""}} |
| 968 | +
|
| 969 | + event: ping |
| 970 | + data: {"type": "ping"} |
| 971 | +
|
| 972 | + |event: content_block_delta |
| 973 | + data: {"type": "content_block_delta", "index": 0, "delta": {"type": "text_delta", "text": "Hello"}} |
| 974 | +
|
| 975 | + |event: content_block_delta |
| 976 | + data: {"type": "content_block_delta", "index": 0, "delta": {"type": "text_delta", "text": "1"}} |
| 977 | +
|
| 978 | + |event: content_block_delta |
| 979 | + data: {"type": "content_block_delta", "index": 0, "delta": {"type": "text_delta", "text": "2"}} |
| 980 | +
|
| 981 | + |event: content_block_delta |
| 982 | + data: {"type": "content_block_delta", "index": 0, "delta": {"type": "text_delta", "text": "3"}} |
| 983 | +
|
| 984 | + event: content_block_stop |
| 985 | + data: {"type": "content_block_stop", "index": 0} |
| 986 | +
|
| 987 | + event: message_delta |
| 988 | + data: {"type": "message_delta", "delta": {"stop_reason": "end_turn", "stop_sequence":null, "usage":{"output_tokens": 15}}} |
| 989 | +
|
| 990 | + event: message_stop |
| 991 | + data: {"type": "message_stop"} |
| 992 | + STRING |
| 993 | + |
| 994 | + split = body.split("|") |
| 995 | + |
| 996 | + count = 0 |
| 997 | + DiscourseAi::AiBot::PostStreamer.on_callback = |
| 998 | + proc do |callback| |
| 999 | + count += 1 |
| 1000 | + if count == 2 |
| 1001 | + last_post = third_post.topic.posts.order(:id).last |
| 1002 | + Discourse.redis.del("gpt_cancel:#{last_post.id}") |
| 1003 | + end |
| 1004 | + raise "this should not happen" if count > 2 |
| 1005 | + end |
| 1006 | + |
| 1007 | + require_relative("../../completions/endpoints/endpoint_compliance") |
| 1008 | + EndpointMock.with_chunk_array_support do |
| 1009 | + stub_request(:post, "https://api.anthropic.com/v1/messages").to_return( |
| 1010 | + status: 200, |
| 1011 | + body: split, |
| 1012 | + ) |
| 1013 | + # we are going to need to use real data here cause we want to trigger the |
| 1014 | + # base endpoint to cancel part way through |
| 1015 | + playground.reply_to(third_post) |
| 1016 | + end |
| 1017 | + |
| 1018 | + last_post = third_post.topic.posts.order(:id).last |
| 1019 | + |
| 1020 | + # not Hello123, we cancelled at 1 which means we may get 2 and then be done |
| 1021 | + expect(last_post.raw).to eq("Hello12") |
| 1022 | + end |
| 1023 | + end |
| 1024 | + |
951 | 1025 | describe "#available_bot_usernames" do |
952 | 1026 | it "includes persona users" do |
953 | 1027 | persona = Fabricate(:ai_persona) |
|
0 commit comments