-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathquestion_prerequsite.py
55 lines (43 loc) · 2.1 KB
/
question_prerequsite.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
from prompt_builder import PromptBuilder
from llm_model import LLMModel, OpenAIModel
class QuestionPrerequisite:
def __init__(self, model: LLMModel):
"""
Initializes the QuestionPrerequisite with a specified model.
Args:
model: An instance of a language model (e.g., OpenAIModel).
"""
self.model = model
self.prompt_builder = PromptBuilder()
def question_prerequisites(self, question: str, options: list[str]) -> list[str]:
"""
Generates prerequisites for a given question based on provided options.
Args:
question (str): The question for which prerequisites are to be generated.
options (list[str]): A list of answer options related to the question.
Returns:
list[str]: A list of prerequisite strings generated by the model.
Raises:
RuntimeError: If prompt generation or model response retrieval fails.
ValueError: If the response is not a list of strings.
"""
try:
prompt = self.prompt_builder.get_prerequisites_prompt(question, options)
except Exception as e:
raise RuntimeError(f"Failed to generate prompt: {e}")
try:
response = self.model.get_response([{"role": "user", "content": prompt}])
except Exception as e:
raise RuntimeError(f"Failed to get response from model: {e}")
# Improved error handling for response
if not isinstance(response, list) or not all(isinstance(msg, str) for msg in response):
raise ValueError("Expected response to be a list of strings.")
message_contents = response # Directly assign if it's a list
return message_contents # Return the message contents
if __name__ == "__main__":
model = OpenAIModel()
question_prerequisite = QuestionPrerequisite(model)
question = "What is the unit of force?"
options = ["A) Joule", "B) Newton", "C) Pascal", "D) Watt"]
response = question_prerequisite.question_prerequisites(question, options)
print(response)