Uncompiled CoTWithThoughtSimplifiedBaleen as baseline
This commit is contained in:
34
src/cot.py
Normal file
34
src/cot.py
Normal file
@@ -0,0 +1,34 @@
|
||||
import dspy
|
||||
from modaic import PrecompiledAgent, PrecompiledConfig
|
||||
|
||||
|
||||
# DSPy code
|
||||
class GenerateAnswer(dspy.Signature):
|
||||
"""Generate answers to the questions"""
|
||||
|
||||
context = dspy.InputField(
|
||||
desc="may contain relevant facts and psychological insights"
|
||||
)
|
||||
question = dspy.InputField()
|
||||
answer_choices = dspy.InputField()
|
||||
answer = dspy.OutputField(desc="often between 1 and 5 words")
|
||||
|
||||
|
||||
class CoTSimplifiedBaleenConfig(PrecompiledConfig):
|
||||
model: str = "gpt-3.5-turbo"
|
||||
max_tokens: int = 1000
|
||||
|
||||
|
||||
class CoTSimplifiedBaleen(PrecompiledAgent):
|
||||
config: CoTSimplifiedBaleenConfig
|
||||
|
||||
def __init__(self, config: CoTSimplifiedBaleenConfig, **kwargs):
|
||||
super().__init__(config, **kwargs)
|
||||
self.generate_answer = dspy.ChainOfThought(GenerateAnswer)
|
||||
self.generate_answer.set_lm(dspy.LM(model=config.model, max_tokens=config.max_tokens))
|
||||
|
||||
def forward(self, question, context, answer_choices):
|
||||
pred = self.generate_answer(
|
||||
context=context, question=question, answer_choices=answer_choices
|
||||
)
|
||||
return dspy.Prediction(context=context, answer=pred.answer)
|
||||
Reference in New Issue
Block a user