Skip to content

Commit 9236a16

Browse files
committed
apache_airflow_job
1 parent 0ec9ee8 commit 9236a16

File tree

3 files changed

+280
-0
lines changed

3 files changed

+280
-0
lines changed
Lines changed: 17 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,17 @@
1+
from ._items import (
2+
list_apache_airflow_jobs,
3+
delete_apache_airflow_job,
4+
)
5+
from ._files import (
6+
list_apache_airflow_job_files,
7+
delete_apache_airflow_job_file,
8+
create_or_update_apache_airflow_job_file,
9+
)
10+
11+
__all__ = [
12+
"list_apache_airflow_jobs",
13+
"delete_apache_airflow_job",
14+
"list_apache_airflow_job_files",
15+
"delete_apache_airflow_job_file",
16+
"create_or_update_apache_airflow_job_file",
17+
]
Lines changed: 174 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,174 @@
1+
import pandas as pd
2+
from typing import Optional
3+
from sempy_labs._helper_functions import (
4+
resolve_item_name_and_id,
5+
resolve_workspace_id,
6+
_base_api,
7+
_create_dataframe,
8+
delete_item,
9+
resolve_item_id,
10+
resolve_workspace_name_and_id,
11+
)
12+
from uuid import UUID
13+
from sempy._utils._log import log
14+
import sempy_labs._icons as icons
15+
16+
17+
@log
18+
def list_apache_airflow_job_files(
19+
apache_airflow_job: str | UUID,
20+
workspace: Optional[str | UUID] = None,
21+
root_path: Optional[str] = None,
22+
) -> pd.DataFrame:
23+
"""
24+
Shows a list of Apache Airflow job files from the specified Apache Airflow job.
25+
26+
This is a wrapper function for the following API: `Files - List Apache Airflow Job Files <https://learn.microsoft.com/rest/api/fabric/apacheairflowjob/files/list-apache-airflow-job-files>`_.
27+
28+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
29+
30+
Parameters
31+
----------
32+
apache_airflow_job : str | uuid.UUID
33+
The Apache Airflow job name or ID.
34+
workspace : str | uuid.UUID, default=None
35+
The Fabric workspace name or ID.
36+
Defaults to None which resolves to the workspace of the attached lakehouse
37+
or if no lakehouse attached, resolves to the workspace of the notebook.
38+
root_path : str, default=None
39+
The folder path to list. If not provided, the root directory is used.
40+
41+
Returns
42+
-------
43+
pandas.DataFrame
44+
A pandas dataframe showing a list of Apache Airflow job files from the specified Apache Airflow job.
45+
"""
46+
47+
columns = {
48+
"File Path": "string",
49+
"Size In Bytes": "int",
50+
}
51+
52+
df = _create_dataframe(columns=columns)
53+
54+
workspace_id = resolve_workspace_id(workspace)
55+
56+
apache_airflow_job_id = resolve_item_id(
57+
item=apache_airflow_job,
58+
type="ApacheAirflowJob",
59+
workspace=workspace_id,
60+
)
61+
62+
url = f"/v1/workspaces/{workspace_id}/ApacheAirflowJobs/{apache_airflow_job_id}/files?beta=True"
63+
if root_path:
64+
url += f"&rootPath={root_path}"
65+
responses = _base_api(
66+
request=url,
67+
client="fabric_sp",
68+
uses_pagination=True,
69+
)
70+
71+
rows = []
72+
for r in responses:
73+
for v in r.get("value", []):
74+
rows.append(
75+
{
76+
"File Path": v.get("filePath"),
77+
"Size In Bytes": v.get("sizeInBytes"),
78+
}
79+
)
80+
81+
if rows:
82+
df = pd.DataFrame(rows, columns=list(columns.keys()))
83+
84+
return df
85+
86+
87+
@log
88+
def delete_apache_airflow_job_file(
89+
apache_airflow_job: str | UUID,
90+
file_path: str,
91+
workspace: Optional[str | UUID] = None,
92+
):
93+
"""
94+
Deletes a file from a Fabric Apache Airflow Job.
95+
96+
This is a wrapper function for the following API: `Files - Delete Apache Airflow Job File <https://learn.microsoft.com/rest/api/fabric/apacheairflowjob/files/delete-apache-airflow-job-file>`_.
97+
98+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
99+
100+
Parameters
101+
----------
102+
apache_airflow_job : str | uuid.UUID
103+
The Apache Airflow job name or ID.
104+
file_path : str
105+
The file path relative to the Apache Airflow job root. It must begin with either 'dags/' or 'plugins/' (for example, dags/example_dag.py).
106+
workspace : str | uuid.UUID, default=None
107+
The Fabric workspace name or ID.
108+
Defaults to None which resolves to the workspace of the attached lakehouse
109+
or if no lakehouse attached, resolves to the workspace of the notebook.
110+
"""
111+
112+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
113+
(item_name, item_id) = resolve_item_name_and_id(
114+
item=apache_airflow_job,
115+
type="ApacheAirflowJob",
116+
workspace=workspace_id,
117+
)
118+
119+
_base_api(
120+
request=f"/v1/workspaces/{workspace_id}/ApacheAirflowJobs/{item_id}/files/{file_path}?beta=True",
121+
client="fabric_sp",
122+
method="delete",
123+
)
124+
125+
print(
126+
f"{icons.green_dot} The '{file_path}' file has been deleted from the '{item_name}' Apache Airflow Job within the '{workspace_name}' workspace."
127+
)
128+
129+
130+
@log
131+
def create_or_update_apache_airflow_job_file(
132+
apache_airflow_job: str | UUID,
133+
file_path: str,
134+
file_content: bytes,
135+
workspace: Optional[str | UUID] = None,
136+
):
137+
"""
138+
Creates or updates an Apache Airflow job file.
139+
140+
This is a wrapper function for the following API: `Files - Create Or Update Apache Airflow Job File <https://learn.microsoft.com/rest/api/fabric/apacheairflowjob/files/create-or-update-apache-airflow-job-file>`_.
141+
142+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
143+
144+
Parameters
145+
----------
146+
apache_airflow_job : str | uuid.UUID
147+
The Apache Airflow job name or ID.
148+
file_path : str
149+
The file path relative to the Apache Airflow job root. It must begin with either 'dags/' or 'plugins/' (for example, dags/example_dag.py).
150+
file_content : bytes
151+
The file content. Text files must be UTF-8 encoded.
152+
workspace : str | uuid.UUID, default=None
153+
The Fabric workspace name or ID.
154+
Defaults to None which resolves to the workspace of the attached lakehouse
155+
or if no lakehouse attached, resolves to the workspace of the notebook.
156+
"""
157+
158+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
159+
(item_name, item_id) = resolve_item_name_and_id(
160+
item=apache_airflow_job,
161+
type="ApacheAirflowJob",
162+
workspace=workspace_id,
163+
)
164+
165+
_base_api(
166+
request=f"/v1/workspaces/{workspace_id}/ApacheAirflowJobs/{item_id}/files/{file_path}?beta=True",
167+
client="fabric_sp",
168+
method="put",
169+
payload=file_content,
170+
)
171+
172+
print(
173+
f"{icons.green_dot} The '{file_path}' file has been created/updated in the '{item_name}' Apache Airflow Job within the '{workspace_name}' workspace."
174+
)
Lines changed: 89 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,89 @@
1+
import pandas as pd
2+
from typing import Optional
3+
from sempy_labs._helper_functions import (
4+
resolve_workspace_id,
5+
_base_api,
6+
_create_dataframe,
7+
delete_item,
8+
)
9+
from uuid import UUID
10+
from sempy._utils._log import log
11+
12+
13+
@log
14+
def list_apache_airflow_jobs(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
15+
"""
16+
Shows the Apache Airflow Jobs within a workspace.
17+
18+
This is a wrapper function for the following API: `Items - List Apache Airflow Jobs <https://learn.microsoft.com/rest/api/fabric/apacheairflowjob/items/list-apache-airflow-jobs>`_.
19+
20+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
21+
22+
Parameters
23+
----------
24+
workspace : str | uuid.UUID, default=None
25+
The Fabric workspace name or ID.
26+
Defaults to None which resolves to the workspace of the attached lakehouse
27+
or if no lakehouse attached, resolves to the workspace of the notebook.
28+
29+
Returns
30+
-------
31+
pandas.DataFrame
32+
A pandas dataframe showing the Apache Airflow Jobs within a workspace.
33+
"""
34+
35+
columns = {
36+
"Apache Airflow Job Name": "string",
37+
"Apache Airflow Job Id": "string",
38+
"Description": "string",
39+
}
40+
41+
df = _create_dataframe(columns=columns)
42+
43+
workspace_id = resolve_workspace_id(workspace)
44+
45+
responses = _base_api(
46+
request=f"/v1/workspaces/{workspace_id}/ApacheAirflowJobs",
47+
client="fabric_sp",
48+
uses_pagination=True,
49+
)
50+
51+
rows = []
52+
for r in responses:
53+
for v in r.get("value", []):
54+
rows.append(
55+
{
56+
"Apache Airflow Job Name": v.get("displayName"),
57+
"Apache Airflow Job Id": v.get("id"),
58+
"Description": v.get("description"),
59+
}
60+
)
61+
62+
if rows:
63+
df = pd.DataFrame(rows, columns=list(columns.keys()))
64+
65+
return df
66+
67+
68+
@log
69+
def delete_apache_airflow_job(
70+
apache_airflow_job: str | UUID, workspace: Optional[str | UUID] = None
71+
):
72+
"""
73+
Deletes a Fabric Apache Airflow Job.
74+
75+
Parameters
76+
----------
77+
apache_airflow_job : str | UUID
78+
The name or ID of the Apache Airflow Job to delete.
79+
workspace : str | uuid.UUID, default=None
80+
The Fabric workspace name or ID.
81+
Defaults to None which resolves to the workspace of the attached lakehouse
82+
or if no lakehouse attached, resolves to the workspace of the notebook.
83+
"""
84+
85+
delete_item(
86+
item_id=apache_airflow_job,
87+
type="ApacheAirflowJob",
88+
workspace=workspace,
89+
)

0 commit comments

Comments
 (0)