128 lines
3.9 KiB
Python
128 lines
3.9 KiB
Python
import multiprocessing
|
|
import sys
|
|
from langchain_community.chat_models import ChatLlamaCpp
|
|
from langchain_community.embeddings import HuggingFaceEmbeddings
|
|
from langchain_community.vectorstores import FAISS
|
|
from langchain_core.messages import SystemMessage, HumanMessage
|
|
|
|
# --- 1. GLOBAL SETUP (Loads once) ---
|
|
local_model = "/home/sortedcord/.cache/huggingface/hub/models--ggml-org--gemma-4-E4B-it-GGUF/snapshots/6b352c53e1d2e4bb974d9f8cafcf85887c224219/gemma-4-e4b-it-Q4_K_M.gguf"
|
|
|
|
print("--- Initializing Models (Please wait...) ---")
|
|
embeddings = HuggingFaceEmbeddings(model_name="all-MiniLM-L6-v2")
|
|
|
|
llm = ChatLlamaCpp(
|
|
temperature=0.2,
|
|
model_path=local_model,
|
|
n_ctx=4096,
|
|
n_gpu_layers=8,
|
|
max_tokens=256,
|
|
n_threads=multiprocessing.cpu_count() - 1,
|
|
repeat_penalty=1.5,
|
|
)
|
|
|
|
|
|
# --- 2. THE ARCHITECTURE ---
|
|
class EntityMemory:
|
|
def __init__(self):
|
|
self.vector_store = None
|
|
|
|
def save(self, text: str):
|
|
if self.vector_store is None:
|
|
self.vector_store = FAISS.from_texts([text], embeddings)
|
|
else:
|
|
self.vector_store.add_texts([text])
|
|
|
|
def retrieve(self, query: str, k=2):
|
|
if self.vector_store is None:
|
|
return "I have no memory of this."
|
|
docs = self.vector_store.similarity_search(query, k=k)
|
|
return " ".join([d.page_content for d in docs])
|
|
|
|
|
|
class NPC:
|
|
def __init__(self, name, traits, stats):
|
|
self.name = name
|
|
self.traits = traits
|
|
self.stats = stats
|
|
self.current_mood = "Neutral"
|
|
self.current_activity = "Waiting"
|
|
self.memory = EntityMemory()
|
|
|
|
def perceive(self, observation: str):
|
|
self.memory.save(observation)
|
|
|
|
def get_context(self, query: str):
|
|
subjective_facts = self.memory.retrieve(query)
|
|
internal_state = (
|
|
f"Mood: {self.current_mood}. Activity: {self.current_activity}."
|
|
)
|
|
return subjective_facts, internal_state
|
|
|
|
|
|
# --- 3. THE INTERACTION HANDLER ---
|
|
def ask_npc(npc: NPC, player_query: str):
|
|
facts, state = npc.get_context(player_query)
|
|
prompt = [
|
|
SystemMessage(
|
|
content=f"""
|
|
Role: You are {npc.name}.
|
|
Persona Traits: {", ".join(npc.traits)}.
|
|
INTERNAL STATE: {state}
|
|
STRICT RULES:
|
|
1. You ONLY know what is in your 'MEMORIES'.
|
|
2. Answer in character, reflecting your traits and current mood.
|
|
MEMORIES: {facts}
|
|
"""
|
|
),
|
|
HumanMessage(content=player_query),
|
|
]
|
|
response = llm.invoke(prompt)
|
|
print(f"\n[{npc.name.upper()}] says: {response.content.strip()}")
|
|
|
|
|
|
# --- 4. DATA INITIALIZATION ---
|
|
barnaby = NPC("Barnaby", ["Grumbling", "Duty-bound"], {"Str": 15})
|
|
sybil = NPC("Sybil", ["Mysterious", "Gloomy"], {"Mag": 20})
|
|
|
|
barnaby.perceive("I saw the Merchant enter the Blue Tavern at sunset.")
|
|
barnaby.perceive("The Bard was tuning his instrument near the fireplace.")
|
|
sybil.perceive("I smelled bitter almonds (poison) coming from the Bard's bag.")
|
|
sybil.current_mood = "Deeply troubled"
|
|
|
|
npcs = {"barnaby": barnaby, "sybil": sybil}
|
|
|
|
|
|
# --- 5. THE EVENT LOOP ---
|
|
def start_game():
|
|
print("\n==========================================")
|
|
print("WORLD INITIALIZED. TYPE 'exit' TO QUIT.")
|
|
print("==========================================\n")
|
|
|
|
while True:
|
|
# Choose target
|
|
target = (
|
|
input("\nWho do you want to talk to? (Barnaby/Sybil): ").lower().strip()
|
|
)
|
|
|
|
if target in ["exit", "quit"]:
|
|
print("Exiting simulation...")
|
|
break
|
|
|
|
if target not in npcs:
|
|
print(f"I don't see anyone named '{target}' here.")
|
|
continue
|
|
|
|
# Get query
|
|
user_msg = input(f"What do you say to {target.capitalize()}?: ")
|
|
|
|
if user_msg.lower().strip() in ["exit", "quit"]:
|
|
break
|
|
|
|
# Execute
|
|
ask_npc(npcs[target], user_msg)
|
|
|
|
|
|
if __name__ == "__main__":
|
|
start_game()
|