Skip to content

Commit 705aa99

Browse files
committed
Merge branch 'm-kovalsky/graph_model'
2 parents def9a99 + 7714169 commit 705aa99

File tree

7 files changed

+273
-47
lines changed

7 files changed

+273
-47
lines changed

src/sempy_labs/_helper_functions.py

Lines changed: 31 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -1568,7 +1568,9 @@ def lro(
15681568
status_codes: Optional[List[str]] = [200, 202],
15691569
sleep_time: Optional[int] = 1,
15701570
return_status_code: bool = False,
1571+
job_scheduler: bool = False,
15711572
):
1573+
from sempy_labs._job_scheduler import _get_item_job_instance
15721574

15731575
if response.status_code not in status_codes:
15741576
raise FabricHTTPException(response)
@@ -1578,20 +1580,32 @@ def lro(
15781580
else:
15791581
result = response
15801582
if response.status_code == status_codes[1]:
1581-
operationId = response.headers["x-ms-operation-id"]
1582-
response = client.get(f"/v1/operations/{operationId}")
1583-
response_body = json.loads(response.content)
1584-
while response_body["status"] not in ["Succeeded", "Failed"]:
1585-
time.sleep(sleep_time)
1586-
response = client.get(f"/v1/operations/{operationId}")
1587-
response_body = json.loads(response.content)
1588-
if response_body["status"] != "Succeeded":
1589-
raise FabricHTTPException(response)
1590-
if return_status_code:
1591-
result = response.status_code
1583+
if job_scheduler:
1584+
status_url = response.headers.get("Location").split("fabric.microsoft.com")[
1585+
1
1586+
]
1587+
status = None
1588+
while status not in ["Completed", "Failed"]:
1589+
response = _base_api(request=status_url)
1590+
status = response.json().get("status")
1591+
time.sleep(3)
1592+
1593+
return _get_item_job_instance(url=status_url)
15921594
else:
1593-
response = client.get(f"/v1/operations/{operationId}/result")
1594-
result = response
1595+
operation_id = response.headers["x-ms-operation-id"]
1596+
response = client.get(f"/v1/operations/{operation_id}")
1597+
response_body = json.loads(response.content)
1598+
while response_body["status"] not in ["Succeeded", "Failed"]:
1599+
time.sleep(sleep_time)
1600+
response = client.get(f"/v1/operations/{operation_id}")
1601+
response_body = json.loads(response.content)
1602+
if response_body["status"] != "Succeeded":
1603+
raise FabricHTTPException(response)
1604+
if return_status_code:
1605+
result = response.status_code
1606+
else:
1607+
response = client.get(f"/v1/operations/{operation_id}/result")
1608+
result = response
15951609

15961610
return result
15971611

@@ -2212,6 +2226,7 @@ def _base_api(
22122226
uses_pagination: bool = False,
22132227
lro_return_json: bool = False,
22142228
lro_return_status_code: bool = False,
2229+
lro_return_df: bool = False,
22152230
):
22162231
import notebookutils
22172232
from sempy_labs._authentication import _get_headers
@@ -2267,7 +2282,9 @@ def get_token(self, *scopes, **kwargs) -> AccessToken:
22672282
json=payload,
22682283
)
22692284

2270-
if lro_return_json:
2285+
if lro_return_df:
2286+
return lro(c, response, status_codes, job_scheduler=True)
2287+
elif lro_return_json:
22712288
return lro(c, response, status_codes).json()
22722289
elif lro_return_status_code:
22732290
return lro(c, response, status_codes, return_status_code=True)

src/sempy_labs/_utils.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -71,4 +71,6 @@
7171
"UserDataFunction": "UserDataFunctions",
7272
"SQLDatabase": "sqlDatabases",
7373
"Reflex": "reflexes",
74+
"GraphModel": "GraphModels",
75+
"GraphQuerySet": "GraphQuerySets",
7476
}
Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,15 @@
1+
from ._items import (
2+
list_graph_models,
3+
execute_query,
4+
get_queryable_graph_type,
5+
)
6+
from ._background_jobs import (
7+
refresh_graph,
8+
)
9+
10+
__all__ = [
11+
"list_graph_models",
12+
"execute_query",
13+
"get_queryable_graph_type",
14+
"refresh_graph",
15+
]
Lines changed: 63 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,63 @@
1+
import pandas as pd
2+
from uuid import UUID
3+
from typing import Optional
4+
from sempy._utils._log import log
5+
from sempy_labs._helper_functions import (
6+
_base_api,
7+
resolve_item_name_and_id,
8+
resolve_workspace_name_and_id,
9+
)
10+
import sempy_labs._icons as icons
11+
12+
13+
@log
14+
def refresh_graph(
15+
graph_model: str | UUID, workspace: Optional[str | UUID] = None
16+
) -> pd.DataFrame:
17+
"""
18+
Refreshes the graph model.
19+
20+
This is a wrapper function for the following API: `Background Jobs - Run On Demand Refresh Graph <https://learn.microsoft.com/rest/api/fabric/graphmodel/background-jobs/run-on-demand-refresh-graph>`_.
21+
22+
Parameters
23+
----------
24+
graph_model : str | uuid.UUID
25+
The graph model name or ID.
26+
workspace : str | uuid.UUID, default=None
27+
The Fabric workspace name or ID.
28+
Defaults to None which resolves to the workspace of the attached lakehouse
29+
or if no lakehouse attached, resolves to the workspace of the notebook.
30+
31+
Returns
32+
-------
33+
pandas.DataFrame
34+
A pandas dataframe showing the result of the refresh operation.
35+
"""
36+
37+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
38+
(item_name, item_id) = resolve_item_name_and_id(
39+
item=graph_model, type="GraphModel", workspace=workspace_id
40+
)
41+
42+
print(
43+
f"{icons.in_progress} The refresh graph job for the '{item_name}' graph model within the '{workspace_name}' workspace has been initiated."
44+
)
45+
46+
df = _base_api(
47+
request=f"/v1/workspaces/{workspace_id}/GraphModels/{item_id}/jobs/instances?jobType=RefreshGraph",
48+
method="post",
49+
lro_return_df=True,
50+
)
51+
status = df["Status"].iloc[0]
52+
53+
if status == "Completed":
54+
print(
55+
f"{icons.green_dot} The refresh graph job for the '{item_name}' graph model within the '{workspace_name}' workspace has succeeded."
56+
)
57+
else:
58+
print(status)
59+
print(
60+
f"{icons.red_dot} The refresh graph job for the '{item_name}' graph model within the '{workspace_name}' workspace has failed."
61+
)
62+
63+
return df
Lines changed: 149 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,149 @@
1+
import pandas as pd
2+
from uuid import UUID
3+
from typing import Optional
4+
from sempy._utils._log import log
5+
from sempy_labs._helper_functions import (
6+
_base_api,
7+
_create_dataframe,
8+
resolve_item_id,
9+
resolve_workspace_id,
10+
resolve_item_name_and_id,
11+
resolve_workspace_name_and_id,
12+
)
13+
import sempy_labs._icons as icons
14+
15+
16+
@log
17+
def list_graph_models(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
18+
"""
19+
Shows the graph models within a workspace.
20+
21+
This is a wrapper function for the following API: `Items - List Graph Models <https://learn.microsoft.com/rest/api/fabric/graphmodel/items/list-graph-models>`_.
22+
23+
Parameters
24+
----------
25+
workspace : str | uuid.UUID, default=None
26+
The Fabric workspace name or ID.
27+
Defaults to None which resolves to the workspace of the attached lakehouse
28+
or if no lakehouse attached, resolves to the workspace of the notebook.
29+
30+
Returns
31+
-------
32+
pandas.DataFrame
33+
A pandas dataframe showing the graph models within a workspace.
34+
"""
35+
36+
columns = {
37+
"Graph Model Name": "string",
38+
"Graph Model Id": "string",
39+
"Description": "string",
40+
"OneLake Root Path": "string",
41+
}
42+
df = _create_dataframe(columns=columns)
43+
44+
workspace_id = resolve_workspace_id(workspace)
45+
46+
responses = _base_api(
47+
request=f"/v1/workspaces/{workspace_id}/GraphModels",
48+
uses_pagination=True,
49+
)
50+
51+
rows = []
52+
for r in responses:
53+
for v in r.get("value", []):
54+
rows.append(
55+
{
56+
"Graph Model Name": v.get("displayName"),
57+
"Graph Model Id": v.get("id"),
58+
"Description": v.get("description"),
59+
"OneLake Root Path": v.get("properties", {}).get("oneLakeRootPath"),
60+
}
61+
)
62+
63+
if rows:
64+
df = pd.DataFrame(rows, columns=list(columns.keys()))
65+
66+
return df
67+
68+
69+
@log
70+
def execute_query(
71+
graph_model: str | UUID, query: str, workspace: Optional[str | UUID] = None
72+
) -> dict:
73+
"""
74+
Executes a query on the specified graph model.
75+
76+
This is a wrapper function for the following API: `Items - ExecuteQuery <https://learn.microsoft.com/rest/api/fabric/graphmodel/items/execute-query(preview)>`_.
77+
78+
Parameters
79+
----------
80+
graph_model : str | uuid.UUID
81+
The graph model name or ID.
82+
query : str
83+
The query string.
84+
workspace : str | uuid.UUID, default=None
85+
The Fabric workspace name or ID.
86+
Defaults to None which resolves to the workspace of the attached lakehouse
87+
or if no lakehouse attached, resolves to the workspace of the notebook.
88+
89+
Returns
90+
-------
91+
dict
92+
The response from the API.
93+
"""
94+
95+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
96+
(item_name, item_id) = resolve_item_name_and_id(
97+
item=graph_model, type="GraphModel", workspace=workspace_id
98+
)
99+
100+
payload = {
101+
"query": query,
102+
}
103+
response = _base_api(
104+
request=f"/v1/workspaces/{workspace_id}/GraphModels/{item_id}/executeQuery?preview=True",
105+
method="post",
106+
payload=payload,
107+
)
108+
109+
print(
110+
f"{icons.green_dot} Executed query on Graph Model '{item_name}' in workspace '{workspace_name}' successfully."
111+
)
112+
113+
return response.json()
114+
115+
116+
@log
117+
def get_queryable_graph_type(
118+
graph_model: str | UUID, workspace: Optional[str | UUID] = None
119+
) -> dict:
120+
"""
121+
Gets the current queryable graph type.
122+
123+
This is a wrapper function for the following API: `Items - GetQueryableGraphType <https://learn.microsoft.com/rest/api/fabric/graphmodel/items/get-queryable-graph-type(preview)>`_.
124+
125+
Parameters
126+
----------
127+
graph_model : str | uuid.UUID
128+
The graph model name or ID.
129+
workspace : str | uuid.UUID, default=None
130+
The Fabric workspace name or ID.
131+
Defaults to None which resolves to the workspace of the attached lakehouse
132+
or if no lakehouse attached, resolves to the workspace of the notebook.
133+
134+
Returns
135+
-------
136+
dict
137+
A dictionary showing the current queryable graph type.
138+
"""
139+
140+
workspace_id = resolve_workspace_id(workspace)
141+
item_id = resolve_item_id(
142+
item=graph_model, type="GraphModel", workspace=workspace_id
143+
)
144+
145+
response = _base_api(
146+
request=f"/v1/workspaces/{workspace_id}/GraphModels/{item_id}/getQueryableGraphType?preview=True"
147+
)
148+
149+
return response.json()

src/sempy_labs/lakehouse/_lakehouse.py

Lines changed: 8 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -14,11 +14,7 @@
1414
)
1515
import sempy_labs._icons as icons
1616
import re
17-
import time
1817
import pandas as pd
19-
from sempy_labs._job_scheduler import (
20-
_get_item_job_instance,
21-
)
2218

2319

2420
@log
@@ -327,26 +323,20 @@ def is_valid_format(time_string):
327323
if vacuum and retention_period is not None:
328324
payload["executionData"]["vacuumSettings"]["retentionPeriod"] = retention_period
329325

330-
response = _base_api(
326+
print(
327+
f"{icons.in_progress} The table maintenance job for the '{table_name}' table in the '{lakehouse_name}' lakehouse within the '{workspace_name}' workspace has been initiated."
328+
)
329+
330+
df = _base_api(
331331
request=f"/v1/workspaces/{workspace_id}/lakehouses/{lakehouse_id}/jobs/instances?jobType=TableMaintenance",
332332
method="post",
333333
payload=payload,
334-
status_codes=202,
334+
status_codes=[200, 202],
335335
client="fabric_sp",
336+
lro_return_df=True,
336337
)
337338

338-
print(
339-
f"{icons.in_progress} The table maintenance job for the '{table_name}' table in the '{lakehouse_name}' lakehouse within the '{workspace_name}' workspace has been initiated."
340-
)
341-
342-
status_url = response.headers.get("Location").split("fabric.microsoft.com")[1]
343-
status = None
344-
while status not in ["Completed", "Failed"]:
345-
response = _base_api(request=status_url)
346-
status = response.json().get("status")
347-
time.sleep(3)
348-
349-
df = _get_item_job_instance(url=status_url)
339+
status = df["Status"].iloc[0]
350340

351341
if status == "Completed":
352342
print(

src/sempy_labs/lakehouse/_materialized_lake_views.py

Lines changed: 5 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -9,10 +9,6 @@
99
from uuid import UUID
1010
from sempy._utils._log import log
1111
import sempy_labs._icons as icons
12-
import time
13-
from sempy_labs._job_scheduler import (
14-
_get_item_job_instance,
15-
)
1612
import pandas as pd
1713

1814

@@ -46,22 +42,16 @@ def refresh_materialized_lake_views(
4642
lakehouse=lakehouse, workspace=workspace_id
4743
)
4844

49-
response = _base_api(
50-
request=f"/v1/workspaces/{workspace_id}/lakehouses/{lakehouse_id}/jobs/instances?jobType=RefreshMaterializedLakeViews",
51-
)
52-
5345
print(
5446
f"{icons.in_progress} The refresh materialized lake views job for the '{lakehouse_name}' lakehouse within the '{workspace_name}' workspace has been initiated."
5547
)
5648

57-
status_url = response.headers.get("Location").split("fabric.microsoft.com")[1]
58-
status = None
59-
while status not in ["Completed", "Failed"]:
60-
response = _base_api(request=status_url)
61-
status = response.json().get("status")
62-
time.sleep(3)
49+
df = _base_api(
50+
request=f"/v1/workspaces/{workspace_id}/lakehouses/{lakehouse_id}/jobs/instances?jobType=RefreshMaterializedLakeViews",
51+
lro_return_df=True,
52+
)
6353

64-
df = _get_item_job_instance(url=status_url)
54+
status = df["Status"].iloc[0]
6555

6656
if status == "Completed":
6757
print(

0 commit comments

Comments
 (0)