Working with Llama 3
Imtihan Ahmed
Machine Learning Engineer
Conversation
classclass Conversation:
def __init__(self, llm: Llama, system_prompt='', history=[]): self.llm = llm self.system_prompt = system_prompt
self.history = [{"role": "system", "content": self.system_prompt}] + history
class Conversation: def __init__(self, llm: Llama, system_prompt='', history=[]): self.llm = llm self.system_prompt = system_prompt self.history = [{"role": "system", "content": self.system_prompt}] + history
def create_completion(self, user_prompt=''):
self.history.append({"role": "user", "content": user_prompt}) # Append input
output = self.llm.create_chat_completion(messages=self.history)
conversation_result = output['choices'][0]['message'] self.history.append(conversation_result) # Append output return conversation_result['content'] # Return output
conversation = Conversation(llm, system_prompt="You are a virtual travel assistant helping with planning trips.")
response1 = conversation.create_completion("What are some destinations in France for a short weekend break?")
print(f"Response 1: {response1}")
response2 = conversation.create_completion("How about Spain?")
print(f"Response 2: {response2}")
print(f"Response 1: {response1}")
print(f"Response 2: {response2}")
print(f"Response 1: {response1}")
print(f"Response 2: {response2}")
Working with Llama 3