Documentation Index Fetch the complete documentation index at: https://docs.pixeltable.com/llms.txt
Use this file to discover all available pages before exploring further.
If you’ve been building AI agents with LangGraph or CrewAI — defining state graphs, tool nodes, conditional edges, and bolting on separate memory stores — this guide shows how Pixeltable replaces the graph DSL with declarative tables.
Concept Mapping
Agent Framework Pixeltable Equivalent StateGraph / AgentExecutorpxt.create_table() with computed columns Graph nodes (functions) Computed columns — dependencies resolved automatically Graph edges / conditional routing Column references — Pixeltable infers the DAG ToolNode / @toolpxt.tools() + invoke_tools()MemorySaver / checkpointerTables are persistent by default Separate vector DB for RAG add_embedding_index() + @pxt.queryLangSmith for observability t.select() on any column — every step is queryable
An agent that picks tools, calls them, and answers based on the results.
from typing import Annotated, Sequence, TypedDict
from langchain_core.messages import BaseMessage, HumanMessage
from langchain_openai import ChatOpenAI
from langgraph.graph import StateGraph, END, add_messages
from langgraph.prebuilt import ToolNode
from langchain_core.tools import tool
class AgentState ( TypedDict ):
messages: Annotated[Sequence[BaseMessage], add_messages]
@tool
def get_weather ( city : str ) -> str :
"""Get current weather for a city."""
return f 'Weather in { city } : 72°F, sunny'
@tool
def search_docs ( query : str ) -> str :
"""Search internal documents."""
return f 'Results for: { query } '
tools = [get_weather, search_docs]
model = ChatOpenAI( model = 'gpt-4o-mini' ).bind_tools(tools)
def call_model ( state ):
return { 'messages' : [model.invoke(state[ 'messages' ])]}
def should_continue ( state ):
last = state[ 'messages' ][- 1 ]
return 'tools' if last.tool_calls else END
workflow = StateGraph(AgentState)
workflow.add_node( 'agent' , call_model)
workflow.add_node( 'tools' , ToolNode(tools))
workflow.set_entry_point( 'agent' )
workflow.add_conditional_edges(
'agent' , should_continue, { 'tools' : 'tools' , END: END})
workflow.add_edge( 'tools' , 'agent' )
graph = workflow.compile()
result = graph.invoke(
{ 'messages' : [HumanMessage( content = 'Weather in SF?' )]})
print (result[ 'messages' ][- 1 ].content)
Packages: langgraph, langchain-openai, langchain-core, plus a vector DB client for RAGimport pixeltable as pxt
from pixeltable.functions.openai import chat_completions, invoke_tools
@pxt.udf
def get_weather ( city : str ) -> str :
"""Get current weather for a city."""
return f 'Weather in { city } : 72°F, sunny'
@pxt.udf
def search_docs ( query : str ) -> str :
"""Search internal documents."""
return f 'Results for: { query } '
tools = pxt.tools(get_weather, search_docs)
agent = pxt.create_table( 'agents.assistant' , { 'message' : pxt.String})
agent.add_computed_column( response =chat_completions(
messages =[{ 'role' : 'user' , 'content' : agent.message}],
model = 'gpt-4o-mini' , tools =tools))
agent.add_computed_column(
tool_output =invoke_tools(tools, agent.response))
@pxt.udf
def build_followup ( message : str , tool_output : dict ) -> list[ dict ]:
results = [
str (r) for vals in (tool_output or {}).values()
if vals for r in vals
]
return [
{ 'role' : 'user' , 'content' : message},
{ 'role' : 'assistant' , 'content' : ' \n ' .join(results)},
{ 'role' : 'user' , 'content' :
'Answer my original question using that information.' },
]
agent.add_computed_column(
followup =build_followup(agent.message, agent.tool_output))
agent.add_computed_column(
final =chat_completions( messages =agent.followup, model = 'gpt-4o-mini' ))
agent.add_computed_column(
answer =agent.final.choices[ 0 ].message.content)
agent.insert([{ 'message' : 'What is the weather in SF?' }])
agent.select(agent.message, agent.answer).collect()
Packages: pixeltable, openai
What Changes
LangGraph / CrewAI Pixeltable State Ephemeral — lost when the process ends Persistent — every row survives restarts Caching No built-in caching of tool results Same input returns cached result Observability LangSmith (separate service + API key) agent.select(agent.tool_output).collect()Adding RAG Separate vector DB integration add_embedding_index() + @pxt.query — no extra serviceGraph definition Nodes, edges, conditional routing DSL Computed columns — Pixeltable infers the DAG MCP tools Custom integration pxt.mcp_udfs() loads tools from any MCP server
Common Patterns
Adding persistent memory
from langgraph.checkpoint.memory import MemorySaver
checkpointer = MemorySaver()
graph = workflow.compile( checkpointer =checkpointer)
# In-process only — lost on restart
from pixeltable.functions.openai import embeddings
memories = pxt.create_table( 'agents.memories' , {
'content' : pxt.String, 'timestamp' : pxt.Timestamp})
memories.add_embedding_index( 'content' ,
string_embed =embeddings.using( model = 'text-embedding-3-small' ))
@pxt.query
def recall ( query : str , top_k : int = 5 ) -> pxt.Query:
sim = memories.content.similarity( string =query)
return memories.order_by(sim, asc = False ) \
.limit(top_k).select(memories.content)
Adding RAG to an agent
from langchain_pinecone import PineconeVectorStore
vector_store = PineconeVectorStore(
index_name = 'docs' , embedding =embeddings)
@tool
def search_kb ( query : str ) -> str :
"""Search the knowledge base."""
docs = vector_store.as_retriever() \
.get_relevant_documents(query)
return ' \n ' .join(d.page_content for d in docs)
# Must add tool to graph, re-compile...
@pxt.query
def search_kb ( query : str ) -> pxt.Query:
"""Search the knowledge base."""
sim = chunks.text.similarity( string =query)
return chunks.order_by(sim, asc = False ) \
.limit( 5 ).select(chunks.text)
tools = pxt.tools(get_weather, search_kb)
Inspecting agent behavior
# Requires LangSmith: set LANGSMITH_API_KEY,
# LANGSMITH_PROJECT, then view traces in dashboard
agent.select(
agent.message,
agent.tool_output,
agent.answer
).collect()
Next Steps
Agents & MCP Full use case walkthrough
Agentic Patterns All 8 agentic patterns as Pixeltable tables
Tool Calling Register UDFs and queries as LLM tools
Pixelagent Lightweight agent framework built on Pixeltable