result = client_sync.search(query="What are the latest developments in AI?",model="o3-mini-high",date_context="2024-08-25",location="us",response_language="en",answer_type="text",search_type="general",return_citations=False,return_sources=False,return_images=False,recency_filter="anytime")print(result)# Streaming searchforchunkin client_sync.search_stream(query="Explain quantum computing",model="o3-mini-high",date_context="2024-08-25",location="us", response_language="en",answer_type="text",search_type="general",return_citations=False,return_sources=False,return_images=False,recency_filter="anytime"): print(result)
Asynchronous:
import asyncioasync def search_async():# Non-streaming search result = await client_async.search(query="What are the latest developments in AI?",model='o3-mini-high',date_context="2024-08-25",location="us",response_language="en",answer_type="text",search_type="general",return_citations=False,return_sources=False,return_images=False,recency_filter="anytime") print(result)# Streaming search async forchunkin client_async.search_stream(query="Explain quantum computing",model='o3-mini-high',date_context="2024-08-25",location="us",response_language="en",answer_type="text",search_type="general",return_citations=False,return_sources=False,return_images=False,recency_filter="anytime"): print(chunk)asyncio.run(search_async())
The custom search endpoints allow you to tailor both your system prompts and user prompts to fit your specific needs.
Synchronous:
# Non-streaming custom searchresult = client_sync.custom_search(system_prompt="You are a helpful assistant.",user_prompt="Explain the theory of relativity",model='o3-mini-high',location="us",search_type="general",return_images=False,return_sources=False,temperature=0.2,top_p=0.9,recency_filter="anytime")print(result)# Streaming custom searchforchunkin client_sync.custom_search_stream(system_prompt="You are a helpful assistant.",user_prompt="Explain the theory of relativity",model='o3-mini-high',location="us",search_type="general",return_images=False,return_sources=False,temperature=0.2,top_p=0.9,recency_filter="anytime"): print(chunk)
#syncresponse = client_sync.query_from_url(url="https://www.example.com/article",query="What is the main topic of this article?",model='o3-mini-high',response_language="it",answer_type="text", # default is "text" if not specified)print(response)#asyncresponse = await client_async.query_from_url(url="https://www.example.com/article",query="What is the main topic of this article?",model='o3-mini-high',response_language="it",answer_type="text"# default is "text" if not specified)print(response)