Skip to content

Commit

Permalink
Factor out count_tokens
Browse files Browse the repository at this point in the history
  • Loading branch information
nicovank committed Dec 19, 2023
1 parent b50b1c0 commit d8ad801
Show file tree
Hide file tree
Showing 2 changed files with 2 additions and 10 deletions.
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ version = "0.2.2"
authors = [
{ name="Emery Berger", email="[email protected]" },
]
dependencies = ["llm_utils==0.2.2", "openai>=0.27.0", "tiktoken>=0.4.0"]
dependencies = ["llm_utils==0.2.2", "openai>=0.27.0"]
description = "ChatDBG."
readme = "README.md"
requires-python = ">=3.7"
Expand Down
10 changes: 1 addition & 9 deletions src/chatdbg/chatdbg_utils.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
import os
import tiktoken
import openai

from llm_utils import llm_utils
Expand Down Expand Up @@ -55,13 +54,6 @@ def read_lines(file_path: str, start_line: int, end_line: int) -> str:
return "\n".join(lines[start_line:end_line])


def num_tokens_from_string(string: str, model: str) -> int:
"""Returns the number of tokens in a text string."""
encoding = tiktoken.encoding_for_model(model)
num_tokens = len(encoding.encode(string))
return num_tokens


def explain(source_code: str, traceback: str, exception: str, really_run=True) -> None:
import httpx

Expand All @@ -77,7 +69,7 @@ def explain(source_code: str, traceback: str, exception: str, really_run=True) -
if not model:
return

input_tokens = num_tokens_from_string(user_prompt, model)
input_tokens = llm_utils.count_tokens(model, user_prompt)

if not really_run:
print(user_prompt)
Expand Down

0 comments on commit d8ad801

Please sign in to comment.