@@ -229,6 +229,19 @@ def _get_partitions(
229
229
partition_names = [desc [0 ] for desc in res .cursor .description ]
230
230
return partition_names
231
231
232
+ def _connector_is_hive (self , connection : Connection , catalog_name : str ):
233
+ query = dedent (
234
+ """
235
+ SELECT
236
+ COUNT(*)
237
+ FROM "system"."metadata"."table_properties"
238
+ WHERE "catalog_name" = :catalog_name
239
+ AND "property_name" = 'bucketing_version'
240
+ """
241
+ ).strip ()
242
+ res = connection .execute (sql .text (query ), {"catalog_name" : catalog_name })
243
+ return res .scalar () == 1
244
+
232
245
def get_pk_constraint (self , connection : Connection , table_name : str , schema : str = None , ** kw ) -> Dict [str , Any ]:
233
246
"""Trino has no support for primary keys. Returns a dummy"""
234
247
return dict (name = None , constrained_columns = [])
@@ -322,11 +335,17 @@ def get_indexes(self, connection: Connection, table_name: str, schema: str = Non
322
335
if not self .has_table (connection , table_name , schema ):
323
336
raise exc .NoSuchTableError (f"schema={ schema } , table={ table_name } " )
324
337
338
+ catalog_name = self ._get_default_catalog_name (connection )
339
+ if catalog_name is None :
340
+ raise exc .NoSuchTableError ("catalog is required in connection" )
341
+ if not self ._connector_is_hive (connection , catalog_name ):
342
+ return []
343
+
325
344
partitioned_columns = None
326
345
try :
327
346
partitioned_columns = self ._get_partitions (connection , f"{ table_name } " , schema )
328
347
except Exception as e :
329
- # e.g. it's not a Hive table or an unpartitioned Hive table
348
+ # e.g. it's an unpartitioned Hive table
330
349
logger .debug ("Couldn't fetch partition columns. schema: %s, table: %s, error: %s" , schema , table_name , e )
331
350
if not partitioned_columns :
332
351
return []
0 commit comments