instructor
instructor copied to clipboard
[Bug] VertexAI client always fails when `response_model` is None
The code below will surely fail because the response_model
is set to None.
import instructor
import vertexai.generative_models as gm
import vertexai
vertexai.init()
client = instructor.from_vertexai(gm.GenerativeModel("gemini-1.5-pro-preview-0409"))
if __name__ == "__main__":
resp = client.create(
response_model=None,
messages=[
{
"role": "user",
"content": 'Extract the following entities: "Jason is 20"',
},
],
)
print(resp)
# Output:
"""
{
"name": "InstructorRetryException",
"message": "RetryError[<Future at 0x19eb55d1610 state=finished raised TypeError>]",
"stack": "---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
File c:\\project\\.venv\\Lib\\site-packages\\instructor\\retry.py:161, in retry_sync(func, response_model, validation_context, args, kwargs, max_retries, strict, mode)
160 try:
--> 161 response = func(*args, **kwargs)
162 stream = kwargs.get(\"stream\", False)
TypeError: _GenerativeModel.generate_content() got an unexpected keyword argument 'messages'
"""
The reason for the error is that the function below, instructor.process_response.handle_response_model
, returns new_kwargs
as is.
# instructor.client_vertexai
def from_vertexai(
client: gm.GenerativeModel,
mode: instructor.Mode = instructor.Mode.VERTEXAI_TOOLS,
_async: bool = False,
**kwargs: Any,
) -> instructor.Instructor:
assert mode in {
instructor.Mode.VERTEXAI_TOOLS,
instructor.Mode.VERTEXAI_JSON,
}, "Mode must be instructor.Mode.VERTEXAI_TOOLS"
assert isinstance(
client, gm.GenerativeModel
), "Client must be an instance of vertexai.generative_models.GenerativeModel"
create = client.generate_content_async if _async else client.generate_content
return instructor.Instructor(
client=client,
create=instructor.patch(create=create, mode=mode),
provider=instructor.Provider.VERTEXAI,
mode=mode,
**kwargs,
)
# instructor.patch
def patch(
client: Union[OpenAI, AsyncOpenAI] = None,
create: Callable[T_ParamSpec, T_Retval] = None,
mode: Mode = Mode.TOOLS,
) -> Union[OpenAI, AsyncOpenAI]:
if create is not None:
func = create
...
@wraps(func)
def new_create_sync(
response_model: type[T_Model] = None,
validation_context: dict = None,
max_retries: int = 1,
strict: bool = True,
*args: T_ParamSpec.args,
**kwargs: T_ParamSpec.kwargs,
) -> T_Model:
response_model, new_kwargs = handle_response_model(
response_model=response_model, mode=mode, **kwargs
)
response = retry_sync(
func=func,
response_model=response_model,
validation_context=validation_context,
max_retries=max_retries,
args=args,
strict=strict,
kwargs=new_kwargs,
mode=mode,
)
return response
new_create = new_create_async if func_is_async else new_create_sync
if client is not None:
client.chat.completions.create = new_create
return client
...
# instructor.process_response.handle_response_model
def handle_response_model(
response_model: type[T] | None, mode: Mode = Mode.TOOLS, **kwargs: Any
) -> tuple[type[T], dict[str, Any]]:
new_kwargs = kwargs.copy()
if response_model is not None:
...
elif response_model is None and mode in {
Mode.COHERE_JSON_SCHEMA,
Mode.COHERE_TOOLS,
}:
...
return response_model, new_kwargs
This is likely due to the fact that the generate_content
function is named contents
instead of messages
.
However, even if you try to use the contents keyword argument, it will still result in another error, as shown below
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
Cell In[38], line 11
7 client = instructor.from_vertexai(gm.GenerativeModel("gemini-1.5-pro-preview-0409"))
10 if __name__ == "__main__":
---> 11 resp = client.chat.completions.create(
12 response_model=None,
13 contents=[
14 {
15 "role": "user",
16 "content": 'Extract the following entities: "Jason is 20"',
17 },
18 ],
19 )
20 print(resp)
TypeError: Instructor.create() missing 1 required positional argument: 'messages'
# vertexai.generative_models
def generate_content(
self,
contents: ContentsType,
*,
generation_config: Optional[GenerationConfigType] = None,
safety_settings: Optional[SafetySettingsType] = None,
tools: Optional[List["Tool"]] = None,
tool_config: Optional["ToolConfig"] = None,
stream: bool = False,
) -> Union["GenerationResponse", Iterable["GenerationResponse"],]:
...
I think appropriate exception handling or argument modification is required when response_model
for vertexai is None.