|
115 | 115 | ] |
116 | 116 |
|
117 | 117 |
|
| 118 | +@pytest.fixture(scope="session", params=[pytest.param(True, id="streaming"), pytest.param(False, id="non-streaming")]) |
| 119 | +def response_streaming(request): |
| 120 | + return request.param |
| 121 | + |
| 122 | + |
118 | 123 | @pytest.fixture(scope="module") |
119 | | -def exercise_model(bedrock_converse_server): |
| 124 | +def exercise_model(bedrock_converse_server, response_streaming): |
120 | 125 | def _exercise_model(message): |
121 | 126 | inference_config = {"temperature": 0.7, "maxTokens": 100} |
122 | 127 |
|
123 | | - response = bedrock_converse_server.converse( |
| 128 | + _response = bedrock_converse_server.converse( |
| 129 | + modelId="anthropic.claude-3-sonnet-20240229-v1:0", |
| 130 | + messages=message, |
| 131 | + system=[{"text": "You are a scientist."}], |
| 132 | + inferenceConfig=inference_config, |
| 133 | + ) |
| 134 | + |
| 135 | + def _exercise_model_streaming(message): |
| 136 | + inference_config = {"temperature": 0.7, "maxTokens": 100} |
| 137 | + |
| 138 | + response = bedrock_converse_server.converse_stream( |
124 | 139 | modelId="anthropic.claude-3-sonnet-20240229-v1:0", |
125 | 140 | messages=message, |
126 | 141 | system=[{"text": "You are a scientist."}], |
127 | 142 | inferenceConfig=inference_config, |
128 | 143 | ) |
| 144 | + _responses = list(response["stream"]) # Consume the response stream |
| 145 | + pass |
129 | 146 |
|
130 | | - return _exercise_model |
| 147 | + return _exercise_model_streaming if response_streaming else _exercise_model |
131 | 148 |
|
132 | 149 |
|
133 | 150 | @reset_core_stats_engine() |
|
0 commit comments