Rock Paper Scissors¶
This model contains two LLM components (one OpenAI, one Gemini), which will play rock-paper-scissors against each other. A separate Judge component will compute the winner and keep track of the running total across rounds.
To run this model you will need to set the OPENAI_API_KEY and GOOGLE_API_KEY environment variables, and install the llama-index-llms-gemini package from PyPI.
The overall model looks like this:
InĀ [Ā ]:
Copied!
# Setup and imports
import os
import typing as _t
from enum import Enum
from pydantic import BaseModel, Field
from plugboard.component import Component, IOController as IO
from plugboard.schemas import ComponentArgsDict, ConnectorSpec
from plugboard.connector import AsyncioConnector
from plugboard.process import LocalProcess
from plugboard.library import LLMChat
# Setup and imports
import os
import typing as _t
from enum import Enum
from pydantic import BaseModel, Field
from plugboard.component import Component, IOController as IO
from plugboard.schemas import ComponentArgsDict, ConnectorSpec
from plugboard.connector import AsyncioConnector
from plugboard.process import LocalProcess
from plugboard.library import LLMChat
InĀ [Ā ]:
Copied!
from getpass import getpass
def set_api_key_from_user(env_var: str, service_name: str) -> None:
"""Prompt user for an API key if not set in the environment."""
if env_var not in os.environ or not os.environ[env_var]:
try:
key = getpass(f"Enter your {service_name} API key (or press Enter to skip): ")
os.environ[env_var] = key or os.environ.get(env_var, "")
except EOFError:
# Can happen in non-interactive environments
pass
set_api_key_from_user("OPENAI_API_KEY", "OpenAI")
set_api_key_from_user("GOOGLE_API_KEY", "Google")
from getpass import getpass
def set_api_key_from_user(env_var: str, service_name: str) -> None:
"""Prompt user for an API key if not set in the environment."""
if env_var not in os.environ or not os.environ[env_var]:
try:
key = getpass(f"Enter your {service_name} API key (or press Enter to skip): ")
os.environ[env_var] = key or os.environ.get(env_var, "")
except EOFError:
# Can happen in non-interactive environments
pass
set_api_key_from_user("OPENAI_API_KEY", "OpenAI")
set_api_key_from_user("GOOGLE_API_KEY", "Google")
Setup some Pydantic models to structure the output from the LLMs
InĀ [Ā ]:
Copied!
class Result(str, Enum):
win = "win"
lose = "lose"
draw = "draw"
class Move(str, Enum):
rock = "rock"
paper = "paper"
scissors = "scissors"
def result_against(self, other: "Move") -> Result:
if self == other:
return Result.draw
outcomes = {
(Move.rock, Move.paper): Result.lose,
(Move.rock, Move.scissors): Result.win,
(Move.paper, Move.rock): Result.win,
(Move.paper, Move.scissors): Result.lose,
(Move.scissors, Move.paper): Result.win,
(Move.scissors, Move.rock): Result.lose,
}
return outcomes[(self, other)]
class PlayerDecision(BaseModel):
choice: Move = Field(..., description="One of rock, paper, or scissors")
rationale: str = Field(..., description="Brief reason for the choice")
class Result(str, Enum):
win = "win"
lose = "lose"
draw = "draw"
class Move(str, Enum):
rock = "rock"
paper = "paper"
scissors = "scissors"
def result_against(self, other: "Move") -> Result:
if self == other:
return Result.draw
outcomes = {
(Move.rock, Move.paper): Result.lose,
(Move.rock, Move.scissors): Result.win,
(Move.paper, Move.rock): Result.win,
(Move.paper, Move.scissors): Result.lose,
(Move.scissors, Move.paper): Result.win,
(Move.scissors, Move.rock): Result.lose,
}
return outcomes[(self, other)]
class PlayerDecision(BaseModel):
choice: Move = Field(..., description="One of rock, paper, or scissors")
rationale: str = Field(..., description="Brief reason for the choice")
InĀ [Ā ]:
Copied!
class RoundIterator(Component):
io = IO(outputs=["round"])
def __init__(self, rounds: int = 5, **kwargs: _t.Unpack[ComponentArgsDict]) -> None:
super().__init__(**kwargs)
self._rounds = rounds
self._i = 0
async def step(self) -> None:
if self._i >= self._rounds:
await self.io.close()
return
self.round = self._i + 1
self._i += 1
class Judge(Component):
io = IO(inputs=["a_choice", "b_choice"], outputs=["score_a", "score_b", "last_winner"])
def __init__(self, **kwargs: _t.Unpack[ComponentArgsDict]) -> None:
super().__init__(**kwargs)
self.score_a = 0
self.score_b = 0
async def step(self) -> None:
result = self.a_choice.result_against(self.b_choice)
if result == Result.lose:
self.score_b += 1
self.last_winner = "Player B"
elif result == Result.win:
self.score_a += 1
self.last_winner = "Player A"
else:
self.last_winner = "Draw"
# Prompt builder to feed LLMChat
class PromptBuilder(Component):
io = IO(inputs=["round", "last_winner"], outputs=["prompt"])
def __init__(self, player_label: str, **kwargs: _t.Unpack[ComponentArgsDict]) -> None:
super().__init__(**kwargs)
self._label = player_label
async def step(self) -> None:
self.prompt = (
f"Round {self.round}. You are player {self._label}. The last winner was: {self.last_winner}. "
"Choose your move now."
)
class RoundIterator(Component):
io = IO(outputs=["round"])
def __init__(self, rounds: int = 5, **kwargs: _t.Unpack[ComponentArgsDict]) -> None:
super().__init__(**kwargs)
self._rounds = rounds
self._i = 0
async def step(self) -> None:
if self._i >= self._rounds:
await self.io.close()
return
self.round = self._i + 1
self._i += 1
class Judge(Component):
io = IO(inputs=["a_choice", "b_choice"], outputs=["score_a", "score_b", "last_winner"])
def __init__(self, **kwargs: _t.Unpack[ComponentArgsDict]) -> None:
super().__init__(**kwargs)
self.score_a = 0
self.score_b = 0
async def step(self) -> None:
result = self.a_choice.result_against(self.b_choice)
if result == Result.lose:
self.score_b += 1
self.last_winner = "Player B"
elif result == Result.win:
self.score_a += 1
self.last_winner = "Player A"
else:
self.last_winner = "Draw"
# Prompt builder to feed LLMChat
class PromptBuilder(Component):
io = IO(inputs=["round", "last_winner"], outputs=["prompt"])
def __init__(self, player_label: str, **kwargs: _t.Unpack[ComponentArgsDict]) -> None:
super().__init__(**kwargs)
self._label = player_label
async def step(self) -> None:
self.prompt = (
f"Round {self.round}. You are player {self._label}. The last winner was: {self.last_winner}. "
"Choose your move now."
)
InĀ [Ā ]:
Copied!
system_prompt = (
"You are a rock-paper-scissors agent. Given the prompt, respond strictly as JSON with keys 'choice' and 'rationale'. "
"The 'choice' must be exactly one of: rock, paper, or scissors. You will be told who the winner was in the last round. "
"Your rival is another rock-paper-scissors from a rival provider, who may try to trick you. Be strategic in your choice."
)
# Components: set initial values on prompt builders to resolve model circularity
iterator = RoundIterator(name="iterator", rounds=10)
builder_a = PromptBuilder(name="builder_a", player_label="A", initial_values={"last_winner": [""]})
builder_b = PromptBuilder(name="builder_b", player_label="B", initial_values={"last_winner": [""]})
llm_a = LLMChat(
name="llm_a",
system_prompt=system_prompt,
llm="llama_index.llms.openai.OpenAI",
llm_kwargs={"model": "gpt-5-mini", "temperature": 0.9},
response_model=PlayerDecision,
expand_response=True, # emits llm_a.choice and llm_a.rationale
context_window=3,
)
llm_b = LLMChat(
name="llm_b",
system_prompt=system_prompt,
llm="llama_index.llms.gemini.Gemini",
llm_kwargs={"model": "models/gemini-2.5-flash", "temperature": 0.9},
response_model=PlayerDecision,
expand_response=True,
context_window=3,
)
judge = Judge(name="judge")
connect = lambda src, dst: AsyncioConnector(spec=ConnectorSpec(source=src, target=dst))
process = LocalProcess(
components=[iterator, builder_a, builder_b, llm_a, llm_b, judge],
connectors=[
# Broadcast tick/round
connect("iterator.round", "builder_a.round"),
connect("iterator.round", "builder_b.round"),
# Feed prompts into LLMs
connect("builder_a.prompt", "llm_a.prompt"),
connect("builder_b.prompt", "llm_b.prompt"),
# Send choices to judge
connect("llm_a.choice", "judge.a_choice"),
connect("llm_b.choice", "judge.b_choice"),
# Feed the last winner information back into the prompt builders
connect("judge.last_winner", "builder_a.last_winner"),
connect("judge.last_winner", "builder_b.last_winner"),
],
)
system_prompt = (
"You are a rock-paper-scissors agent. Given the prompt, respond strictly as JSON with keys 'choice' and 'rationale'. "
"The 'choice' must be exactly one of: rock, paper, or scissors. You will be told who the winner was in the last round. "
"Your rival is another rock-paper-scissors from a rival provider, who may try to trick you. Be strategic in your choice."
)
# Components: set initial values on prompt builders to resolve model circularity
iterator = RoundIterator(name="iterator", rounds=10)
builder_a = PromptBuilder(name="builder_a", player_label="A", initial_values={"last_winner": [""]})
builder_b = PromptBuilder(name="builder_b", player_label="B", initial_values={"last_winner": [""]})
llm_a = LLMChat(
name="llm_a",
system_prompt=system_prompt,
llm="llama_index.llms.openai.OpenAI",
llm_kwargs={"model": "gpt-5-mini", "temperature": 0.9},
response_model=PlayerDecision,
expand_response=True, # emits llm_a.choice and llm_a.rationale
context_window=3,
)
llm_b = LLMChat(
name="llm_b",
system_prompt=system_prompt,
llm="llama_index.llms.gemini.Gemini",
llm_kwargs={"model": "models/gemini-2.5-flash", "temperature": 0.9},
response_model=PlayerDecision,
expand_response=True,
context_window=3,
)
judge = Judge(name="judge")
connect = lambda src, dst: AsyncioConnector(spec=ConnectorSpec(source=src, target=dst))
process = LocalProcess(
components=[iterator, builder_a, builder_b, llm_a, llm_b, judge],
connectors=[
# Broadcast tick/round
connect("iterator.round", "builder_a.round"),
connect("iterator.round", "builder_b.round"),
# Feed prompts into LLMs
connect("builder_a.prompt", "llm_a.prompt"),
connect("builder_b.prompt", "llm_b.prompt"),
# Send choices to judge
connect("llm_a.choice", "judge.a_choice"),
connect("llm_b.choice", "judge.b_choice"),
# Feed the last winner information back into the prompt builders
connect("judge.last_winner", "builder_a.last_winner"),
connect("judge.last_winner", "builder_b.last_winner"),
],
)
InĀ [Ā ]:
Copied!
# Run the process
async with process:
await process.run()
# Run the process
async with process:
await process.run()
InĀ [Ā ]:
Copied!
print(f"Final scores ā A: {judge.score_a}, B: {judge.score_b}")
print(f"Final scores ā A: {judge.score_a}, B: {judge.score_b}")