|
1 | 1 | local helpers = require "spec.helpers" |
2 | 2 | local shell = require "resty.shell" |
3 | 3 | local pl_file = require "pl.file" |
| 4 | +local timeout = 10 |
| 5 | +local step = 1 |
4 | 6 |
|
5 | 7 | local tcp_service_port = helpers.get_available_port() |
6 | 8 | local tcp_proxy_port = helpers.get_available_port() |
@@ -814,14 +816,16 @@ describe("Plugin: prometheus (access) AI metrics", function() |
814 | 816 | assert.res_status(200, res) |
815 | 817 |
|
816 | 818 | local body |
| 819 | + -- wait until the histogram observe finished and get the correct metrics. |
817 | 820 | helpers.wait_until(function() |
818 | 821 | local res = assert(admin_client:send { |
819 | 822 | method = "GET", |
820 | 823 | path = "/metrics", |
821 | 824 | }) |
822 | 825 | body = assert.res_status(200, res) |
823 | | - return res.status == 200 |
824 | | - end) |
| 826 | + return body:find('ai_llm_provider_latency_ms_bucket{ai_provider="openai",ai_model="gpt-3.5-turbo",cache_status="",vector_db="",embeddings_provider="",embeddings_model="",workspace="default",le="+Inf"} 1', |
| 827 | + nil, true) |
| 828 | + end, timeout, step) |
825 | 829 |
|
826 | 830 | assert.matches('kong_nginx_metric_errors_total 0', body, nil, true) |
827 | 831 | assert.matches('http_requests_total{service="empty_service",route="http-route",code="200",source="service",workspace="default",consumer=""} 2', body, nil, true) |
@@ -858,8 +862,9 @@ describe("Plugin: prometheus (access) AI metrics", function() |
858 | 862 | path = "/metrics", |
859 | 863 | }) |
860 | 864 | body = assert.res_status(200, res) |
861 | | - return res.status == 200 |
862 | | - end) |
| 865 | + return body:find('ai_llm_provider_latency_ms_bucket{ai_provider="openai",ai_model="gpt-3.5-turbo",cache_status="",vector_db="",embeddings_provider="",embeddings_model="",workspace="default",le="+Inf"} 2', |
| 866 | + nil, true) |
| 867 | + end, timeout, step) |
863 | 868 |
|
864 | 869 | assert.matches('kong_nginx_metric_errors_total 0', body, nil, true) |
865 | 870 | assert.matches('http_requests_total{service="empty_service",route="http-route",code="200",source="service",workspace="default",consumer=""} 3', body, nil, true) |
@@ -892,8 +897,9 @@ describe("Plugin: prometheus (access) AI metrics", function() |
892 | 897 | path = "/metrics", |
893 | 898 | }) |
894 | 899 | body = assert.res_status(200, res) |
895 | | - return res.status == 200 |
896 | | - end) |
| 900 | + return body:find('ai_llm_provider_latency_ms_bucket{ai_provider="openai",ai_model="gpt-3.5-turbo",cache_status="",vector_db="",embeddings_provider="",embeddings_model="",workspace="default",le="+Inf"} 2', |
| 901 | + nil, true) |
| 902 | + end, timeout, step) |
897 | 903 |
|
898 | 904 | assert.matches('http_requests_total{service="empty_service",route="http-route",code="400",source="kong",workspace="default",consumer=""} 1', body, nil, true) |
899 | 905 | assert.matches('kong_nginx_metric_errors_total 0', body, nil, true) |
|
0 commit comments