testing an rpg engine with a small rag system

This commit is contained in:
2026-04-11 13:56:06 +05:30
commit 113889575f
6 changed files with 2568 additions and 0 deletions

10
.gitignore vendored Normal file
View File

@@ -0,0 +1,10 @@
# Python-generated files
__pycache__/
*.py[oc]
build/
dist/
wheels/
*.egg-info
# Virtual environments
.venv

1
.python-version Normal file
View File

@@ -0,0 +1 @@
3.12

0
README.md Normal file
View File

92
main.py Normal file
View File

@@ -0,0 +1,92 @@
import multiprocessing
from langchain_community.embeddings import HuggingFaceEmbeddings
from langchain_community.vectorstores import FAISS
from langchain_community.chat_models import ChatLlamaCpp
from langchain_core.messages import SystemMessage, HumanMessage
local_model = "/home/sortedcord/.cache/huggingface/hub/models--ggml-org--gemma-4-E4B-it-GGUF/snapshots/6b352c53e1d2e4bb974d9f8cafcf85887c224219/gemma-4-e4b-it-Q4_K_M.gguf"
llm = ChatLlamaCpp(
temperature=0.2, # Lower temperature for consistency in logic
model_path=local_model,
n_ctx=4096,
n_gpu_layers=8,
max_tokens=256,
n_threads=multiprocessing.cpu_count() - 1,
repeat_penalty=1.2,
)
embeddings = HuggingFaceEmbeddings(model_name="all-MiniLM-L6-v2")
class GameWorld:
def __init__(self):
self.global_state = {
"murderer": "The Bard",
"weapon": "Poisoned Lute String",
"location": "The Blue Tavern",
"body_discovered": False,
}
self.npc_memories = {}
def add_npc_memory(self, npc_name, observation):
"""Injects a specific fact into an NPC's subjective reality."""
if npc_name not in self.npc_memories:
self.npc_memories[npc_name] = FAISS.from_texts([observation], embeddings)
else:
self.npc_memories[npc_name].add_texts([observation])
def get_npc_context(self, npc_name, query):
"""Retrieves only what the NPC knows regarding a query."""
if npc_name not in self.npc_memories:
return "I don't know anything about that."
docs = self.npc_memories[npc_name].similarity_search(query, k=2)
return " ".join([d.page_content for d in docs])
world = GameWorld()
# THE TRUTH: The Bard killed the Merchant.
# NPC "Guard Barnaby" only saw the Merchant enter the tavern.
world.add_npc_memory(
"Barnaby", "I saw the Merchant enter the Blue Tavern at sunset. He looked happy."
)
world.add_npc_memory(
"Barnaby", "The Bard was tuning his instrument near the fireplace."
)
world.add_npc_memory(
"Sybil", "I smelled bitter almonds (poison) coming from the Bard's bag."
)
def ask_npc(npc_name, player_query):
# Retrieve ONLY this NPC's memories
subjective_knowledge = world.get_npc_context(npc_name, player_query)
prompt = [
SystemMessage(
content=f"""
You are {npc_name}, a character in a fantasy world.
Strict Rule: You ONLY know what is in your 'Memory' block.
If the information isn't there, you must honestly say you don't know or speculate based ONLY on your memory.
Do not use outside knowledge.
Your Memory: {subjective_knowledge}
"""
),
HumanMessage(content=player_query),
]
response = llm.invoke(prompt)
print(f"\n--- {npc_name.upper()} ---")
print(f"Player: {player_query}")
print(f"Response: {response.content.strip()}")
# Ask the Guard about the murder (He shouldn't know it happened)
ask_npc("Barnaby", "Did you see Bard?")
# Ask the Witch about the Bard (She has a suspicious clue)
ask_npc("Sybil", "Do you know anything about bard? Did you see him?")

13
pyproject.toml Normal file
View File

@@ -0,0 +1,13 @@
[project]
name = "langchain-tutorial"
version = "0.1.0"
description = "Add your description here"
readme = "README.md"
requires-python = ">=3.12"
dependencies = [
"faiss-cpu>=1.13.2",
"langchain-community>=0.4.1",
"langchain[llms]>=1.2.15",
"llama-cpp-python>=0.3.20",
"sentence-transformers>=5.4.0",
]

2452
uv.lock generated Normal file

File diff suppressed because it is too large Load Diff