File tree Expand file tree Collapse file tree 7 files changed +20
-24
lines changed Expand file tree Collapse file tree 7 files changed +20
-24
lines changed Original file line number Diff line number Diff line change 1+ Dockerfile
Original file line number Diff line number Diff line change @@ -28,6 +28,11 @@ run-ci: format lint type ## Running all CI checks
2828run-benchmarks : # # Run benchmarks
2929 @echo " Running benchmarks..."
3030 @cd $(GIT_ROOT ) /tests/benchmarks && python benchmark_eval.py
31+ run-benchmarks-in-docker : # # Run benchmarks in docker
32+ @echo " Running benchmarks in docker..."
33+ @cd $(GIT_ROOT )
34+ docker buildx build --build-arg OPENAI_API_KEY=$(OPENAI_API_KEY ) -t ragas-benchmark -f $(GIT_ROOT ) /tests/benchmarks/Dockerfile .
35+ docker inspect ragas-benchmark:latest | jq " .[0].Size" | numfmt --to=si
3136test : # # Run tests
3237 @echo " Running tests..."
3338 @pytest tests/unit $(shell if [ -n "$(k ) " ]; then echo "-k $(k ) "; fi)
Original file line number Diff line number Diff line change 22name = " ragas"
33dependencies = [
44 " numpy" ,
5- " transformers" ,
6- " sentence-transformers" ,
75 " datasets" ,
86 " tiktoken" ,
97 " langchain" ,
@@ -13,6 +11,11 @@ dependencies = [
1311]
1412dynamic = [" version" , " readme" ]
1513
14+ [project .optional-dependencies ]
15+ all = [
16+ " sentence-transformers" ,
17+ ]
18+
1619[tool .setuptools ]
1720package-dir = {"" = " src" }
1821
Original file line number Diff line number Diff line change 1616from langchain .callbacks .manager import CallbackManager , trace_as_chain_group
1717from tqdm import tqdm
1818
19+ from ragas .embeddings .base import RagasEmbeddings
1920from ragas .llms import llm_factory
2021
2122if t .TYPE_CHECKING :
2223 from langchain .callbacks .base import Callbacks
2324
24- from ragas .embeddings .base import RagasEmbeddings
2525 from ragas .llms import RagasLLM
2626
2727
Original file line number Diff line number Diff line change 22
33import logging
44import os
5- import typing as t
65from functools import lru_cache
7- from warnings import warn
86
9- import torch
10- from torch import device as Device
11-
12- DEVICES = ["cpu" , "cuda" ]
137DEBUG_ENV_VAR = "RAGAS_DEBUG"
148# constant to tell us that there is no key passed to the llm/embeddings
159NO_KEY = "no-key"
1610
1711
18- def device_check (device : t .Literal ["cpu" , "cuda" ] | Device ) -> torch .device :
19- if isinstance (device , Device ):
20- return device
21- if device not in DEVICES :
22- raise ValueError (f"Invalid device { device } " )
23- if device == "cuda" and not torch .cuda .is_available ():
24- warn ("cuda not available, using cpu" )
25- device = "cpu"
26-
27- return torch .device (device )
28-
29-
3012@lru_cache (maxsize = 1 )
3113def get_debug_mode () -> bool :
3214 if os .environ .get (DEBUG_ENV_VAR , str (False )).lower () == "true" :
Original file line number Diff line number Diff line change 1+ FROM python:3.9-slim
2+ RUN apt-get update && apt-get install -y git make
3+ COPY . /app
4+ WORKDIR /app
5+ RUN pip install -e /app/
6+ ARG OPENAI_API_KEY
7+ ENV OPENAI_API_KEY=$OPENAI_API_KEY
8+ RUN make run-benchmarks
Original file line number Diff line number Diff line change 11import time
22
33from datasets import DatasetDict , load_dataset
4- from torch .cuda import is_available
54
65from ragas import evaluate
76from ragas .metrics import (
1211)
1312from ragas .metrics .critique import harmfulness
1413
15- DEVICE = "cuda" if is_available () else "cpu"
16-
1714# data
1815ds = load_dataset ("explodinggradients/fiqa" , "ragas_eval" )
1916assert isinstance (ds , DatasetDict )
You can’t perform that action at this time.
0 commit comments