Skip to content

Commit

Permalink
chore: removed async time tests from benchmarks (#1061)
Browse files Browse the repository at this point in the history
  • Loading branch information
jjmachan authored Jul 2, 2024
1 parent e0181fd commit 710b384
Show file tree
Hide file tree
Showing 6 changed files with 29 additions and 55 deletions.
11 changes: 6 additions & 5 deletions src/ragas/metrics/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,11 +63,13 @@ def get_required_columns(
class Metric(ABC):
@property
@abstractmethod
def name(self) -> str: ...
def name(self) -> str:
...

@property
@abstractmethod
def evaluation_mode(self) -> EvaluationMode: ...
def evaluation_mode(self) -> EvaluationMode:
...

@abstractmethod
def init(self, run_config: RunConfig):
Expand Down Expand Up @@ -136,9 +138,8 @@ async def ascore(
return score

@abstractmethod
async def _ascore(
self, row: t.Dict, callbacks: Callbacks, is_async: bool
) -> float: ...
async def _ascore(self, row: t.Dict, callbacks: Callbacks, is_async: bool) -> float:
...


@dataclass
Expand Down
4 changes: 2 additions & 2 deletions src/ragas/testset/generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -218,10 +218,10 @@ def generate_with_langchain_docs(

def init_evolution(self, evolution: Evolution) -> None:
evolution.docstore = self.docstore

if evolution.generator_llm is None:
evolution.generator_llm = self.generator_llm

if evolution.question_filter is None:
evolution.question_filter = QuestionFilter(llm=self.critic_llm)
if evolution.node_filter is None:
Expand Down
6 changes: 3 additions & 3 deletions src/ragas/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -148,8 +148,8 @@ def emit_warning(*args, **kwargs):

def get_or_init(
dictionary: t.Dict[str, t.Any], key: str, default: t.Callable[[], t.Any]
) -> t.Any:
) -> t.Any:
_value = dictionary.get("key")
value = _value if _value is not None else default()
return value

return value
29 changes: 8 additions & 21 deletions tests/benchmarks/benchmark_eval.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,28 +34,15 @@
]

# os.environ["PYTHONASYNCIODEBUG"] = "1"
IGNORE_THREADS = True
IGNORE_ASYNCIO = False

if __name__ == "__main__":
# asyncio
if not IGNORE_ASYNCIO:
print("Starting [Asyncio]")
start = time.time()
_ = evaluate(
eval_dataset,
metrics=metrics,
is_async=True,
)
print(f"Time taken [Asyncio]: {time.time() - start:.2f}s")

# Threads
if not IGNORE_THREADS:
print("Starting [Threads]")
start = time.time()
_ = evaluate(
eval_dataset,
metrics=metrics,
is_async=False,
)
print(f"Time taken [Threads]: {time.time() - start:.2f}s")
print("Starting [Asyncio]")
start = time.time()
_ = evaluate(
eval_dataset,
metrics=metrics,
is_async=True,
)
print(f"Time taken [Asyncio]: {time.time() - start:.2f}s")
32 changes: 9 additions & 23 deletions tests/benchmarks/benchmark_testsetgen.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,33 +26,19 @@ def get_documents():
return documents


IGNORE_THREADS = False
IGNORE_ASYNCIO = False
# os.environ["PYTHONASYNCIODEBUG"] = "1"

if __name__ == "__main__":
documents = get_documents()

# asyncio
if not IGNORE_ASYNCIO:
print("Starting [Asyncio]")
start = time.time()
generator.generate_with_llamaindex_docs(
documents=documents,
test_size=50,
distributions=distributions,
is_async=True,
)
print(f"Time taken: {time.time() - start:.2f}s")

# Threads
if not IGNORE_THREADS:
print("Starting [Threads]")
start = time.time()
generator.generate_with_llamaindex_docs(
documents=documents,
test_size=50,
distributions=distributions,
is_async=False,
)
print(f"Time taken [Threads]: {time.time() - start:.2f}s")
print("Starting [Asyncio]")
start = time.time()
generator.generate_with_llamaindex_docs(
documents=documents,
test_size=50,
distributions=distributions,
is_async=True,
)
print(f"Time taken: {time.time() - start:.2f}s")
2 changes: 1 addition & 1 deletion tests/unit/test_metric.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
from ragas.metrics.utils import get_available_metrics
from ragas.metrics.base import EvaluationMode
from ragas.metrics.utils import get_available_metrics


def test_get_available_metrics():
Expand Down

0 comments on commit 710b384

Please sign in to comment.