44from datetime import datetime
55from sempy_labs ._helper_functions import (
66 _get_column_aggregate ,
7- resolve_workspace_name_and_id ,
87 resolve_lakehouse_name_and_id ,
98 save_as_delta_table ,
10- _base_api ,
11- _create_dataframe ,
9+ resolve_workspace_id ,
1210 _read_delta_table ,
1311 _get_delta_table ,
1412 _mount ,
2422import sempy_labs ._icons as icons
2523from sempy ._utils ._log import log
2624from uuid import UUID
25+ from sempy_labs .lakehouse ._schemas import list_tables
2726
2827
2928@log
@@ -70,84 +69,14 @@ def get_lakehouse_tables(
7069 Shows the tables/columns within a lakehouse and their properties.
7170 """
7271
73- columns = {
74- "Workspace Name" : "string" ,
75- "Lakehouse Name" : "string" ,
76- "Schema Name" : "string" ,
77- "Table Name" : "string" ,
78- "Format" : "string" ,
79- "Type" : "string" ,
80- "Location" : "string" ,
81- }
82- df = _create_dataframe (columns = columns )
83-
84- (workspace_name , workspace_id ) = resolve_workspace_name_and_id (workspace )
72+ workspace_id = resolve_workspace_id (workspace )
8573 (lakehouse_name , lakehouse_id ) = resolve_lakehouse_name_and_id (
8674 lakehouse = lakehouse , workspace = workspace_id
8775 )
8876
89- # Test if valid lakehouse:
90- x = _base_api (f"v1/workspaces/{ workspace_id } /lakehouses/{ lakehouse_id } " )
77+ df = list_tables (lakehouse = lakehouse , workspace = workspace )
9178
92- if count_rows : # Setting countrows defaults to extended=True
93- extended = True
94-
95- API_called = True
96- try :
97- responses = _base_api (
98- request = f"v1/workspaces/{ workspace_id } /lakehouses/{ lakehouse_id } /tables" ,
99- uses_pagination = True ,
100- client = "fabric_sp" ,
101- )
102-
103- except Exception :
104- API_called = False
105-
106- rows = []
107- local_path = None
108- if API_called :
109- if not responses [0 ].get ("data" ):
110- return df
111-
112- for r in responses :
113- for i in r .get ("data" , []):
114- rows .append (
115- {
116- "Workspace Name" : workspace_name ,
117- "Lakehouse Name" : lakehouse_name ,
118- "Schema Name" : "" ,
119- "Table Name" : i .get ("name" ),
120- "Format" : i .get ("format" ),
121- "Type" : i .get ("type" ),
122- "Location" : i .get ("location" ),
123- }
124- )
125- else :
126- local_path = _mount (lakehouse = lakehouse_id , workspace = workspace_id )
127- tables_path = os .path .join (local_path , "Tables" )
128- list_schema = os .listdir (tables_path )
129-
130- for schema_name in list_schema :
131- schema_table_path = os .path .join (local_path , "Tables" , schema_name )
132- list_tables = os .listdir (schema_table_path )
133- for table_name in list_tables :
134- location_path = create_abfss_path (
135- lakehouse_id , workspace_id , table_name , schema_name
136- )
137- rows .append (
138- {
139- "Workspace Name" : workspace_name ,
140- "Lakehouse Name" : lakehouse_name ,
141- "Schema Name" : schema_name ,
142- "Table Name" : table_name ,
143- "Format" : "delta" ,
144- "Type" : "Managed" ,
145- "Location" : location_path ,
146- }
147- )
148-
149- if rows :
150- df = pd .DataFrame (rows , columns = list (columns .keys ()))
79+ local_path = _mount (lakehouse = lakehouse_id , workspace = workspace_id )
15180
15281 if extended :
15382 sku_value = get_sku_size (workspace_id )
@@ -161,7 +90,6 @@ def get_lakehouse_tables(
16190 df ["Row Count" ] = None
16291
16392 for i , r in df .iterrows ():
164- use_schema = True
16593 schema_name = r ["Schema Name" ]
16694 table_name = r ["Table Name" ]
16795 if r ["Type" ] == "Managed" and r ["Format" ] == "delta" :
0 commit comments