UNBOUND_SQL_PARAMETER with Runtime 15.4
Hello, I have trouble using parameters with databricks-sql-python, using a Databricks cluster with Unity Catalog with Runtime 15.4.
This will fail:
from databricks import sql as databricks_sql
with databricks_sql.connect(
server_hostname="...",
http_path="...",
auth_type="databricks-oauth",
catalog="...",
schema="...",
) as connection:
with connection.cursor() as cursor:
cursor.execute(
"SELECT * FROM experimentation.time_series WHERE experimentation.time_series.name = :myparam",
{"myparam": "myvalue"},
)
result = cursor.fetchall()
result
with the following exception: ServerOperationError: [UNBOUND_SQL_PARAMETER] Found the unbound parameter: myparam. Please, fix args and provide a mapping of the parameter to a SQL literal.; line 1 pos 83
Stacktrace
---------------------------------------------------------------------------
ServerOperationError Traceback (most recent call last)
Cell In[1], line 14
6 with databricks_sql.connect(
7 server_hostname="...",
8 http_path="...",
(...)
11 schema="...",
12 ) as connection:
13 with connection.cursor() as cursor:
---> 14 cursor.execute(
15 "SELECT * FROM experimentation.time_series WHERE experimentation.time_series.name = :myparam",
16 {"myparam": "myvalue"},
17 )
18 result = cursor.fetchall()
19 result
File ...\Lib\site-packages\databricks\sql\client.py:796, in Cursor.execute(self, operation, parameters)
794 self._check_not_closed()
795 self._close_and_clear_active_result_set()
--> 796 execute_response = self.thrift_backend.execute_command(
797 operation=prepared_operation,
798 session_handle=self.connection._session_handle,
799 max_rows=self.arraysize,
800 max_bytes=self.buffer_size_bytes,
801 lz4_compression=self.connection.lz4_compression,
802 cursor=self,
803 use_cloud_fetch=self.connection.use_cloud_fetch,
804 parameters=prepared_params,
805 )
806 self.active_result_set = ResultSet(
807 self.connection,
808 execute_response,
(...)
811 self.arraysize,
812 )
814 if execute_response.is_staging_operation:
File ...\Lib\site-packages\databricks\sql\thrift_backend.py:849, in ThriftBackend.execute_command(self, operation, session_handle, max_rows, max_bytes, lz4_compression, cursor, use_cloud_fetch, parameters)
831 req = ttypes.TExecuteStatementReq(
832 sessionHandle=session_handle,
833 statement=operation,
(...)
846 parameters=parameters,
847 )
848 resp = self.make_request(self._client.ExecuteStatement, req)
--> 849 return self._handle_execute_response(resp, cursor)
File ...\Lib\site-packages\databricks\sql\thrift_backend.py:941, in ThriftBackend._handle_execute_response(self, resp, cursor)
938 cursor.active_op_handle = resp.operationHandle
939 self._check_direct_results_for_error(resp.directResults)
--> 941 final_operation_state = self._wait_until_command_done(
942 resp.operationHandle,
943 resp.directResults and resp.directResults.operationStatus,
944 )
946 return self._results_message_to_execute_response(resp, final_operation_state)
File ...\Lib\site-packages\databricks\sql\thrift_backend.py:787, in ThriftBackend._wait_until_command_done(self, op_handle, initial_operation_status_resp)
785 poll_resp = self._poll_for_status(op_handle)
786 operation_state = poll_resp.operationState
--> 787 self._check_command_not_in_error_or_closed_state(op_handle, poll_resp)
788 return operation_state
File ...\Lib\site-packages\databricks\sql\thrift_backend.py:572, in ThriftBackend._check_command_not_in_error_or_closed_state(self, op_handle, get_operations_resp)
570 if get_operations_resp.operationState == ttypes.TOperationState.ERROR_STATE:
571 if get_operations_resp.displayMessage:
--> 572 raise ServerOperationError(
573 get_operations_resp.displayMessage,
574 {
575 "operation-id": op_handle
576 and self.guid_to_hex_id(op_handle.operationId.guid),
577 "diagnostic-info": get_operations_resp.diagnosticInfo,
578 },
579 )
580 else:
581 raise ServerOperationError(
582 get_operations_resp.errorMessage,
583 {
(...)
587 },
588 )
Dependencies
sqlalchemy = "^2.0.36"
jupyter = "^1.1.1"
databricks-connect = "15.4.3"
databricks-sql-connector = {version = "^3.6.0", extras = ["sqlalchemy", "alembic"]}
However, this works using a Databricks notebook:
%python
myparam = 'myvalue'
%sql
SELECT * FROM experimentation.time_series
WHERE experimentation.time_series.name = :myparam
Please note that this issue seems related to https://github.com/databricks/databricks-sql-python/issues/288.
As a temporary workaround, I can inline the parameters inside the query as documented here for SqlAlchemy: https://docs.sqlalchemy.org/en/20/faq/sqlexpressions.html#rendering-bound-parameters-inline However, this is not recommended for security reasons.