Python:
import requests
import json
import threading
import time
# Initialize conversation history
conversation_history = []
# Global variables for async operation
is_working = False
current_reply = ""
def talk_to_waifu(prompt, history):
global is_working, current_reply
# Build the full prompt with conversation history
full_prompt = "This is a conversation with Potatoe, a loving waifubot:\n\n"
# Add previous conversation history
for message in history[-6:]: # Keep last 6 messages for context
full_prompt += f"{message}\n"
# Add current prompt
full_prompt += f"Human: {prompt}\nPotatoe:"
response = requests.post(
"http://localhost:11434/api/generate",
json={"model": "llama3", "prompt": full_prompt},
stream=True
)
full_reply = ""
for line in response.iter_lines():
if line:
try:
chunk = line.decode("utf-8")
data = json.loads(chunk)
full_reply += data.get("response", "")
except Exception as e:
print("Error decoding chunk:", e)
current_reply = (prompt, full_reply) # Store both input and reply
is_working = False
return full_reply
def start_waifu_conversation(prompt):
"""Start the waifu conversation in a daemon thread"""
global is_working
is_working = True
thread = threading.Thread(
target=talk_to_waifu,
args=(user_input, conversation_history),
daemon=True
)
thread.start()
print("Waifu: Hello darling~ Ready to chat? Type 'exit' to leave 💕")
# Initial system prompt to set up the character
initial_prompt = "Your name is Potatoe. You're affectionate, playful, and always supportive."
conversation_history.append(f"System: {initial_prompt}")
while True:
if is_working:
print("Waifu: Thinking... 💭")
time.sleep(0.5)
continue
if current_reply:
user_input, reply = current_reply
print(f"Waifu: {reply}")
# Add both user input and bot response to history
conversation_history.append(f"Human: {user_input}")
conversation_history.append(f"Potatoe: {reply}")
# Optional: Limit history size to prevent it from growing too large
if len(conversation_history) > 20: # Keep last 20 messages
conversation_history = conversation_history[-20:]
current_reply = ""
continue
user_input = input("You: ")
if user_input.lower() in ["exit", "quit"]:
print("Waifu: Bye bye~ I'll miss you! 💖")
break
# Clean wrapper function call
start_waifu_conversation(user_input)