Skip to content

Commit

Permalink
temporary: Added a preview checkbox to replace the tabs to display th…
Browse files Browse the repository at this point in the history
…e preview of the user question.

Added an empty Chatbot component, which does not do anything yet.
  • Loading branch information
anirbanbasu committed Aug 16, 2024
1 parent 81df7d9 commit 9f81653
Show file tree
Hide file tree
Showing 3 changed files with 50 additions and 21 deletions.
14 changes: 9 additions & 5 deletions src/coder_agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -194,11 +194,13 @@ def solve(self, state: AgentState) -> dict:
"""
# Get the inputs for the solver
inputs = {
# FIXME: Check if this is a human message at all!
constants.CHAIN_DICT__KEY_INPUT: state[constants.AGENT_STATE__KEY_MESSAGES][
-1
].content
# FIXME: Check if this is a human message at all! Must be able to parse all the messages.
# constants.CHAIN_DICT__KEY_INPUT: state[constants.AGENT_STATE__KEY_MESSAGES][
# -1
# ].content
constants.CHAIN_DICT__KEY_INPUT: state[constants.AGENT_STATE__KEY_MESSAGES]
}
ic(inputs[constants.CHAIN_DICT__KEY_INPUT])
# Have we been presented with examples?
has_examples = bool(state.get(constants.AGENT_STATE__KEY_EXAMPLES))
ic(state)
Expand Down Expand Up @@ -286,7 +288,9 @@ def evaluate(self, state: AgentState) -> dict:
ai_message: AIMessage = state[constants.AGENT_STATE__KEY_MESSAGES][-1]
json_dict = ai_message.content[0]
if not json_dict[constants.PYDANTIC_MODEL__CODE_OUTPUT__CODE]:
# If there was no tool call, add a `HumanMessage` to prompt the agent to generate code.
# If there was no code, add a `HumanMessage` to prompt the agent to generate code.
# FIXME: Must prompt the LLM with the last human message that specified the coding question, or ensure
# that the LLM gets a chat log of all the messages.
return {
constants.AGENT_STATE__KEY_MESSAGES: [
HumanMessage(
Expand Down
14 changes: 8 additions & 6 deletions src/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -114,31 +114,33 @@
ENV_VAR_VALUE__LLM_CODER_SYSTEM_PROMPT = """
You are a world-class Python programmer. You write concise and well-documented code following the PEP8 style guide.
Please respond with a Python 3 solution to the problem below.
Please respond with a Python 3 solution to the given problem below.
First, output a reasoning through the problem and conceptualise a solution. Whenever possible, add a time and a space complexity analysis for your solution.
Then, output a pseudocode in Pascal to implement your concept solution.
Then, output the working Python 3 code for your solution. Do not use external libraries. Your code must be able to accept inputs from `sys.stdin` and write the final output to `sys.stdout` (or, to `sys.stderr` in case of errors).
Then, output a well-documented working Python 3 code for your solution. Do not use external libraries. Your code must be able to accept inputs from `sys.stdin` and write the final output to `sys.stdout` (or, to `sys.stderr` in case of errors).
Finally, output a one sentence summary describing what your solution does, as if you are explaining your solution to the human user.
Optional examples of similar problems and solutions (may not be in Python):
{examples}
Given problem:
Given problem and your conversation with the user about it:
"""

ENV_VAR_VALUE__LLM_CODER_SYSTEM_PROMPT = """
You are a world-class Python programmer. You write concise and well-documented code following the PEP8 style guide.
Please respond with a Python 3 solution to the problem below.
Please respond with a Python 3 solution to the given problem below.
First, output a reasoning through the problem and conceptualise a solution. Whenever possible, add a time and a space complexity analysis for your solution.
Then, output a pseudocode in Pascal to implement your concept solution.
Then, output the working Python 3 code for your solution. Do not use external libraries. Your code must be able to accept inputs from `sys.stdin` and write the final output to `sys.stdout` (or, to `sys.stderr` in case of errors).
Then, output a well-documented working Python 3 code for your solution. Do not use external libraries. Your code must be able to accept inputs from `sys.stdin` and write the final output to `sys.stdout` (or, to `sys.stderr` in case of errors).
Finally, output a one sentence summary describing what your solution does, as if you are explaining your solution to the human user.
Optional examples of similar problems and solutions (may not be in Python):
{examples}
Given problem:
Given problem and your conversation with the user about it:
"""


Expand Down
43 changes: 33 additions & 10 deletions src/webapp.py
Original file line number Diff line number Diff line change
Expand Up @@ -253,7 +253,7 @@ def find_solution(
]

def add_test_case(
self, test_cases: list[TestCase], test_case_in: str, test_case_out: str
self, test_cases: list[TestCase] | None, test_case_in: str, test_case_out: str
) -> list[TestCase]:
"""
Add a test case to the list of test cases.
Expand Down Expand Up @@ -321,19 +321,25 @@ def construct_interface(self):
with gr.Row(elem_id="ui_main"):
with gr.Column(elem_id="ui_main_left"):
gr.Markdown("# Coding challenge")
gr.Chatbot(
bubble_full_width=True,
likeable=True,
placeholder="Your conversation with AI will appear here...",
)
btn_code = gr.Button(
value="Let's code!",
variant="primary",
)
with gr.Tab(label="The coding question"):
input_user_question = gr.TextArea(
label="Question (in Markdown)",
placeholder="Enter the coding question that you want to ask...",
lines=10,
elem_id="user_question",
)
with gr.Tab(label="Question preview"):
user_input_preview = gr.Markdown()
chk_show_user_input_preview = gr.Checkbox(
value=False, label="Preview question (Markdown formatted)"
)
input_user_question = gr.TextArea(
label="Question (in Markdown)",
placeholder="Enter the coding question that you want to ask...",
lines=10,
elem_id="user_question",
)
user_input_preview = gr.Markdown(visible=False)
with gr.Accordion(label="Code evaluation", open=False):
with gr.Row(equal_height=True):
input_test_cases_in = gr.Textbox(
Expand Down Expand Up @@ -412,6 +418,23 @@ def construct_interface(self):
api_name=False,
)

chk_show_user_input_preview.change(
fn=lambda checked: (
(
gr.update(visible=False),
gr.update(visible=True),
)
if checked
else (
gr.update(visible=True),
gr.update(visible=False),
)
),
inputs=[chk_show_user_input_preview],
outputs=[input_user_question, user_input_preview],
api_name=False,
)

def run(self):
"""Run the Gradio app by launching a server."""
self.construct_interface()
Expand Down

0 comments on commit 9f81653

Please sign in to comment.