Skip to main content
Version: Beta 🚧

AgentService

AgentService is a class that helps to create a collection of feature services from a list of tools, prompts, and knowledge bases.

Example

Let's build a chatbot helping a student to shop for items.
from tecton_gen_ai.testing import make_local_source, make_local_batch_feature_view, set_dev_mode
from tecton_gen_ai.testing.utils import make_local_vector_db_config
set_dev_mode() # Set the dev mode to avoid tecton login
student = make_local_batch_feature_view(
"student",
{"student_id": 1, "name": "Jim", "teacher": "Mr. Smith", "preference": "fruit"},
["student_id"],
description="Student information including name, teacher and shopping preference",
)
df = [
{"zip":"98005", "item_id":1, "description":"pencil"},
{"zip":"98005", "item_id":2, "description":"car"},
{"zip":"98005", "item_id":3, "description":"paper"},
{"zip":"10065", "item_id":4, "description":"boat"},
{"zip":"10065", "item_id":5, "description":"cheese"},
{"zip":"10065", "item_id":6, "description":"apple"},
]
src = make_local_source(
"for_sale",
df,
description="Items information", # required for source_as_knowledge
)
vdb_conf = make_local_vector_db_config()
# Create a knowledge base from the source
from tecton_gen_ai.fco import prompt, source_as_knowledge
@prompt(sources=[student])
def sys_prompt(student) -> str:
return "You are serving a 4 years old child "+student["name"]
knowledge = source_as_knowledge(
src,
vector_db_config=vdb_conf,
vectorize_column="description",
filter = [("zip", str, "the zip code of the item for sale")]
)
# Serve the knowledge base
from tecton_gen_ai.fco import AgentService
service = AgentService(
"app",
prompts=[sys_prompt],
tools=[student],
knowledge=[knowledge],
)
# Test locally
from langchain_openai import ChatOpenAI
openai = ChatOpenAI(model = "gpt-4o", temperature=0)
from tecton_gen_ai.fco import AgentClient
client = AgentClient.from_local(service)
with client.set_context({"zip":"98005", "student_id":1}):
print(client.invoke_agent(openai, "Suggest something for me to buy"))
with client.set_context({"zip":"10065", "student_id":1}):
print(client.invoke_agent(openai, "Suggest something for me to buy"))

Methods​

NameDescription
__init__(...)Initialize AgentService

init(...)​

Parameters

  • name (str) - The name of the agent service

  • prompts (Optional[List[Any]]) - A list of prompts Default: None

  • tools (Optional[List[Any]]) - A list of tools Default: None

  • knowledge (Optional[List[Any]]) - A list of knowledge bases Default: None

  • feature_service_kwargs (Any) -


Example

Let's build a chatbot helping a student to shop for items.
from tecton_gen_ai.testing import make_local_source, make_local_batch_feature_view, set_dev_mode
from tecton_gen_ai.testing.utils import make_local_vector_db_config
set_dev_mode() # Set the dev mode to avoid tecton login
student = make_local_batch_feature_view(
"student",
{"student_id": 1, "name": "Jim", "teacher": "Mr. Smith", "preference": "fruit"},
["student_id"],
description="Student information including name, teacher and shopping preference",
)
df = [
{"zip":"98005", "item_id":1, "description":"pencil"},
{"zip":"98005", "item_id":2, "description":"car"},
{"zip":"98005", "item_id":3, "description":"paper"},
{"zip":"10065", "item_id":4, "description":"boat"},
{"zip":"10065", "item_id":5, "description":"cheese"},
{"zip":"10065", "item_id":6, "description":"apple"},
]
src = make_local_source(
"for_sale",
df,
description="Items information", # required for source_as_knowledge
)
vdb_conf = make_local_vector_db_config()
# Create a knowledge base from the source
from tecton_gen_ai.fco import prompt, source_as_knowledge
@prompt(sources=[student])
def sys_prompt(student) -> str:
return "You are serving a 4 years old child "+student["name"]
knowledge = source_as_knowledge(
src,
vector_db_config=vdb_conf,
vectorize_column="description",
filter = [("zip", str, "the zip code of the item for sale")]
)
# Serve the knowledge base
from tecton_gen_ai.fco import AgentService
service = AgentService(
"app",
prompts=[sys_prompt],
tools=[student],
knowledge=[knowledge],
)
# Test locally
from langchain_openai import ChatOpenAI
openai = ChatOpenAI(model = "gpt-4o", temperature=0)
from tecton_gen_ai.fco import AgentClient
client = AgentClient.from_local(service)
with client.set_context({"zip":"98005", "student_id":1}):
print(client.invoke_agent(openai, "Suggest something for me to buy"))
with client.set_context({"zip":"10065", "student_id":1}):
print(client.invoke_agent(openai, "Suggest something for me to buy"))

Was this page helpful?