Compiled CoTWithThoughtSimplifiedBaleen with bootstrap_fewshot_with_random_search for location-fo-fine
This commit is contained in:
0
src/__init__.py
Normal file
0
src/__init__.py
Normal file
34
src/cot.py
Normal file
34
src/cot.py
Normal file
@@ -0,0 +1,34 @@
|
||||
import dspy
|
||||
from modaic import PrecompiledAgent, PrecompiledConfig
|
||||
|
||||
|
||||
# DSPy code
|
||||
class GenerateAnswer(dspy.Signature):
|
||||
"""Generate answers to the questions"""
|
||||
|
||||
context = dspy.InputField(
|
||||
desc="may contain relevant facts and psychological insights"
|
||||
)
|
||||
question = dspy.InputField()
|
||||
answer_choices = dspy.InputField()
|
||||
answer = dspy.OutputField(desc="often between 1 and 5 words")
|
||||
|
||||
|
||||
class CoTSimplifiedBaleenConfig(PrecompiledConfig):
|
||||
model: str = "gpt-3.5-turbo"
|
||||
max_tokens: int = 1000
|
||||
|
||||
|
||||
class CoTSimplifiedBaleen(PrecompiledAgent):
|
||||
config: CoTSimplifiedBaleenConfig
|
||||
|
||||
def __init__(self, config: CoTSimplifiedBaleenConfig, **kwargs):
|
||||
super().__init__(config, **kwargs)
|
||||
self.generate_answer = dspy.ChainOfThought(GenerateAnswer)
|
||||
self.generate_answer.set_lm(dspy.LM(model=config.model, max_tokens=config.max_tokens))
|
||||
|
||||
def forward(self, question, context, answer_choices):
|
||||
pred = self.generate_answer(
|
||||
context=context, question=question, answer_choices=answer_choices
|
||||
)
|
||||
return dspy.Prediction(context=context, answer=pred.answer)
|
||||
51
src/cot_with_thought.py
Normal file
51
src/cot_with_thought.py
Normal file
@@ -0,0 +1,51 @@
|
||||
import dspy
|
||||
from modaic import PrecompiledAgent, PrecompiledConfig
|
||||
|
||||
|
||||
# DSPy code
|
||||
class GenerateAnswer(dspy.Signature):
|
||||
"""Generate answers to the questions"""
|
||||
|
||||
context = dspy.InputField(
|
||||
desc="may contain relevant facts and psychological insights"
|
||||
)
|
||||
question = dspy.InputField()
|
||||
thought = dspy.InputField(desc="a thought that might help answer the question")
|
||||
answer_choices = dspy.InputField()
|
||||
answer = dspy.OutputField(desc="often between 1 and 5 words")
|
||||
|
||||
|
||||
class GenerateThought(dspy.Signature):
|
||||
"""Generate thoughts about questions"""
|
||||
|
||||
context = dspy.InputField(
|
||||
desc="may contain relevant facts and psychological insights"
|
||||
)
|
||||
question = dspy.InputField()
|
||||
thought = dspy.OutputField(desc="a thought that might help answer the question")
|
||||
|
||||
|
||||
class CoTWithThoughtSimplifiedBaleenConfig(PrecompiledConfig):
|
||||
model: str = "gpt-3.5-turbo"
|
||||
max_tokens: int = 1000
|
||||
|
||||
|
||||
class CoTWithThoughtSimplifiedBaleen(PrecompiledAgent):
|
||||
config: CoTWithThoughtSimplifiedBaleenConfig
|
||||
|
||||
def __init__(self, config: CoTWithThoughtSimplifiedBaleenConfig, **kwargs):
|
||||
super().__init__(config, **kwargs)
|
||||
self.generate_thought = dspy.ChainOfThought(GenerateThought)
|
||||
self.generate_answer = dspy.ChainOfThought(GenerateAnswer)
|
||||
self.generate_thought.set_lm(dspy.LM(model=config.model, max_tokens=config.max_tokens))
|
||||
self.generate_answer.set_lm(dspy.LM(model=config.model, max_tokens=config.max_tokens))
|
||||
|
||||
def forward(self, question, context, answer_choices):
|
||||
pred_thought = self.generate_thought(context=context, question=question)
|
||||
pred = self.generate_answer(
|
||||
context=context,
|
||||
question=question,
|
||||
thought=pred_thought.thought,
|
||||
answer_choices=answer_choices,
|
||||
)
|
||||
return dspy.Prediction(context=context, answer=pred.answer)
|
||||
Reference in New Issue
Block a user