AgentService
Example
Let's build a chatbot helping a student to shop for items.from tecton_gen_ai.testing import make_local_source, make_local_batch_feature_view, set_dev_modefrom tecton_gen_ai.testing.utils import make_local_vector_db_configset_dev_mode() # Set the dev mode to avoid tecton loginstudent = make_local_batch_feature_view("student",{"student_id": 1, "name": "Jim", "teacher": "Mr. Smith", "preference": "fruit"},["student_id"],description="Student information including name, teacher and shopping preference",)df = [{"zip":"98005", "item_id":1, "description":"pencil"},{"zip":"98005", "item_id":2, "description":"car"},{"zip":"98005", "item_id":3, "description":"paper"},{"zip":"10065", "item_id":4, "description":"boat"},{"zip":"10065", "item_id":5, "description":"cheese"},{"zip":"10065", "item_id":6, "description":"apple"},]src = make_local_source("for_sale",df,description="Items information", # required for source_as_knowledge)vdb_conf = make_local_vector_db_config()# Create a knowledge base from the sourcefrom tecton_gen_ai.fco import prompt, source_as_knowledge@prompt(sources=[student])def sys_prompt(student) -> str:return "You are serving a 4 years old child "+student["name"]knowledge = source_as_knowledge(src,vector_db_config=vdb_conf,vectorize_column="description",filter = [("zip", str, "the zip code of the item for sale")])# Serve the knowledge basefrom tecton_gen_ai.fco import AgentServiceservice = AgentService("app",prompts=[sys_prompt],tools=[student],knowledge=[knowledge],)# Test locallyfrom langchain_openai import ChatOpenAIopenai = ChatOpenAI(model = "gpt-4o", temperature=0)from tecton_gen_ai.fco import AgentClientclient = AgentClient.from_local(service)with client.set_context({"zip":"98005", "student_id":1}):print(client.invoke_agent(openai, "Suggest something for me to buy"))with client.set_context({"zip":"10065", "student_id":1}):print(client.invoke_agent(openai, "Suggest something for me to buy"))
Methods​
Name | Description |
---|---|
__init__(...) | Initialize AgentService |
init(...)​
Parameters
name
(str
) - The name of the agent serviceprompts
(Optional
[List
[Any
]]) - A list of prompts Default:None
tools
(Optional
[List
[Any
]]) - A list of tools Default:None
knowledge
(Optional
[List
[Any
]]) - A list of knowledge bases Default:None
feature_service_kwargs
(Any
) -
Example
Let's build a chatbot helping a student to shop for items.from tecton_gen_ai.testing import make_local_source, make_local_batch_feature_view, set_dev_modefrom tecton_gen_ai.testing.utils import make_local_vector_db_configset_dev_mode() # Set the dev mode to avoid tecton loginstudent = make_local_batch_feature_view("student",{"student_id": 1, "name": "Jim", "teacher": "Mr. Smith", "preference": "fruit"},["student_id"],description="Student information including name, teacher and shopping preference",)df = [{"zip":"98005", "item_id":1, "description":"pencil"},{"zip":"98005", "item_id":2, "description":"car"},{"zip":"98005", "item_id":3, "description":"paper"},{"zip":"10065", "item_id":4, "description":"boat"},{"zip":"10065", "item_id":5, "description":"cheese"},{"zip":"10065", "item_id":6, "description":"apple"},]src = make_local_source("for_sale",df,description="Items information", # required for source_as_knowledge)vdb_conf = make_local_vector_db_config()# Create a knowledge base from the sourcefrom tecton_gen_ai.fco import prompt, source_as_knowledge@prompt(sources=[student])def sys_prompt(student) -> str:return "You are serving a 4 years old child "+student["name"]knowledge = source_as_knowledge(src,vector_db_config=vdb_conf,vectorize_column="description",filter = [("zip", str, "the zip code of the item for sale")])# Serve the knowledge basefrom tecton_gen_ai.fco import AgentServiceservice = AgentService("app",prompts=[sys_prompt],tools=[student],knowledge=[knowledge],)# Test locallyfrom langchain_openai import ChatOpenAIopenai = ChatOpenAI(model = "gpt-4o", temperature=0)from tecton_gen_ai.fco import AgentClientclient = AgentClient.from_local(service)with client.set_context({"zip":"98005", "student_id":1}):print(client.invoke_agent(openai, "Suggest something for me to buy"))with client.set_context({"zip":"10065", "student_id":1}):print(client.invoke_agent(openai, "Suggest something for me to buy"))