Skip to content

Commit 189cd39

Browse files
adealdaglhercot
authored andcommitted
[minor_change] Add module nd_instant_assurance_analysis and nd_delta_analysis
1 parent 5304216 commit 189cd39

File tree

11 files changed

+971
-74
lines changed

11 files changed

+971
-74
lines changed

galaxy.yml

Lines changed: 18 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -5,25 +5,27 @@ name: nd
55
version: 0.1.2
66
readme: README.md
77
authors:
8-
- Lionel Hercot (@lhercot)
8+
- "Lionel Hercot (@lhercot)"
9+
- "Cindy Zhao (@cizhao)"
10+
- "Alejandro de Alda (@adealdag)"
911
description: An Ansible collection for managing Cisco Nexus Dashboard
10-
license_file: 'LICENSE'
11-
tags:
12-
- cisco
13-
- nd
14-
- aci
15-
- dcnm
16-
- orchestrator
17-
- multisite
18-
- insights
19-
- nexus
20-
- cloud
21-
- collection
22-
- networking
23-
- sdn
12+
license_file: "LICENSE"
13+
tags:
14+
- cisco
15+
- nd
16+
- aci
17+
- dcnm
18+
- orchestrator
19+
- multisite
20+
- insights
21+
- nexus
22+
- cloud
23+
- collection
24+
- networking
25+
- sdn
2426
dependencies:
2527
"ansible.netcommon": "*"
2628
repository: https://github.com/CiscoDevNet/ansible-nd
2729
#documentation: https://docs.ansible.com/ansible/latest/scenario_guides/guide_nd.html
2830
homepage: https://cisco.com/go/nexusdashboard
29-
issues: https://github.com/CiscoDevNet/ansible-nd/issues
31+
issues: https://github.com/CiscoDevNet/ansible-nd/issues

plugins/module_utils/ndi.py

Lines changed: 65 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,9 @@ def __init__(self, nd_module):
2525
self.event_insight_group_path = "events/insightsGroup/{0}/fabric/{1}"
2626
self.compliance_path = "model/aciPolicy/complianceAnalysis"
2727
self.epoch_delta_ig_path = "epochDelta/insightsGroup/{0}/fabric/{1}/job/{2}/health/view"
28+
self.run_analysis_ig_path = "{0}/fabric/{1}/runOnlineAnalysis"
29+
self.run_epoch_delta_ig_path = "{0}/fabric/{1}/runEpochDelta"
30+
self.jobs_ig_path = "jobs/summary.json"
2831

2932
def get_site_id(self, ig_name, site_name, **kwargs):
3033
obj = self.nd.query_obj(self.config_ig_path, **kwargs)
@@ -50,6 +53,12 @@ def get_last_epoch(self, ig_name, site_name):
5053
obj = self.nd.query_obj(path, prefix=self.prefix)
5154
return obj["value"]["data"][0]
5255

56+
def get_epoch_by_jobid(self, ig_name, site_name, job_id):
57+
ig_base_path = self.event_insight_group_path.format(ig_name, site_name)
58+
path = "{0}/epochs?analysisId={1}".format(ig_base_path, job_id)
59+
obj = self.nd.query_obj(path, prefix=self.prefix)
60+
return obj["value"]["data"][0]
61+
5362
def query_data(self, path):
5463
obj = self.nd.query_obj(path, prefix=self.prefix)
5564
return obj["value"]["data"]
@@ -97,6 +106,62 @@ def query_anomalies(self, ig_name, site_name, epoch_delta_job_id, epoch_choice,
97106
result.append(entry)
98107
return result
99108

109+
def query_instant_assurance_analysis(self, ig_name, site_name, jobId=None):
110+
instant_assurance_jobs_path = (
111+
self.jobs_ig_path
112+
+ "?insightsGroupName={0}&fabricName={1}&orderBy=startTs,desc&filter=(jobType:ONLINE\\-ANALYSIS*%20AND%20triggeredBy:INSTANT)&startTs={2}".format(
113+
ig_name, site_name, 0
114+
)
115+
)
116+
if jobId:
117+
instant_assurance_jobs_path = instant_assurance_jobs_path + "&jobId={0}".format(jobId)
118+
119+
size = 1000
120+
path = instant_assurance_jobs_path
121+
# + "&count={0}&offset={1}" does not work with current implementation of query_entry
122+
123+
entries = self.query_entry(path, size)
124+
return entries
125+
126+
def query_delta_analysis(self, ig_name, site_name, jobId=None, jobName=None):
127+
if jobId:
128+
delta_job_path = (
129+
self.jobs_ig_path
130+
+ "?jobType=EPOCH-DELTA-ANALYSIS&insightsGroupName={0}&fabricName={1}&filter=(!configData:pcvJobId%20AND%20jobId:{2})".format(
131+
ig_name, site_name, jobId
132+
)
133+
)
134+
entries = self.query_entry(delta_job_path, 1)
135+
if len(entries) == 1:
136+
return entries[0]
137+
else:
138+
return {}
139+
elif jobName:
140+
delta_job_path = (
141+
self.jobs_ig_path
142+
+ "?jobType=EPOCH-DELTA-ANALYSIS&insightsGroupName={0}&fabricName={1}&filter=(!configData:pcvJobId%20AND%20jobName:{2})".format(
143+
ig_name, site_name, jobName
144+
)
145+
)
146+
entries = self.query_entry(delta_job_path, 1)
147+
if len(entries) == 1:
148+
return entries[0]
149+
else:
150+
return {}
151+
else:
152+
delta_jobs_path = (
153+
self.jobs_ig_path
154+
+ "?jobType=EPOCH-DELTA-ANALYSIS&insightsGroupName={0}&fabricName={1}&filter=(!configData:pcvJobId)&orderBy=startTs,desc&startTs={2}".format(
155+
ig_name, site_name, 0
156+
)
157+
)
158+
size = 1000
159+
path = delta_jobs_path
160+
# + "&count={0}&offset={1}" does not work with current implementation of query_entry
161+
162+
entries = self.query_entry(path, size)
163+
return entries
164+
100165
def format_event_severity(self, events_severity):
101166
result = {}
102167
for each in events_severity:
Lines changed: 268 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,268 @@
1+
#!/usr/bin/python
2+
# -*- coding: utf-8 -*-
3+
4+
# Copyright: (c) 2023, Alejandro de Alda (@adealdag) <[email protected]>
5+
# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
6+
7+
from __future__ import absolute_import, division, print_function
8+
9+
__metaclass__ = type
10+
11+
ANSIBLE_METADATA = {"metadata_version": "1.1", "status": ["preview"], "supported_by": "community"}
12+
13+
DOCUMENTATION = r"""
14+
---
15+
module: nd_delta_analysis
16+
version_added: "0.2.0"
17+
short_description: Manage delta analysis jobs
18+
description:
19+
- Manage delta analysis jobs on Cisco Nexus Dashboard Insights (NDI).
20+
author:
21+
- Alejandro de Alda (@adealdag)
22+
options:
23+
insights_group:
24+
description:
25+
- The name of the insights group.
26+
type: str
27+
required: yes
28+
aliases: [ fab_name, ig_name ]
29+
site_name:
30+
description:
31+
- Name of the Assurance Entity.
32+
type: str
33+
required: yes
34+
aliases: [ site ]
35+
name:
36+
description:
37+
- The name of the delta analysis job
38+
type: str
39+
aliases: [ job_name, delta_name ]
40+
earlier_epoch_id:
41+
description:
42+
- Epoch UUID for the earlier epoch
43+
- Ignored if state is C(query) or C(absent)
44+
type: str
45+
aliases: [ earlier_epoch_uuid, earlier_epoch ]
46+
later_epoch_id:
47+
description:
48+
- Epoch UUID for the later epoch
49+
- Ignored if state is C(query) or C(absent)
50+
type: str
51+
aliases: [ later_epoch_uuid, later_epoch ]
52+
earlier_epoch_time:
53+
description:
54+
- Epoch collection time, in ISO format, for the earlier epoch
55+
- Ignored if state is C(query) or C(absent)
56+
type: str
57+
later_epoch_time:
58+
description:
59+
- Epoch collection time, in ISO format, for the later epoch
60+
- Ignored if state is C(query) or C(absent)
61+
type: str
62+
state:
63+
description:
64+
- Use C(present) or C(absent) for creating or deleting a delta analysis job.
65+
- Use C(query) for querying existing delta analysis jobs.
66+
- Use C(validate) to wait for completion, validate and return an error if any unacknowledged anomalies (non-info) exist
67+
type: str
68+
choices: [ absent, present, query, validate ]
69+
default: query
70+
extends_documentation_fragment: cisco.nd.modules
71+
"""
72+
73+
EXAMPLES = r"""
74+
- name: Creates a new delta analysis job using epoch UUIDs
75+
cisco.nd.nd_delta_analysis:
76+
insights_group: exampleIG
77+
site_name: siteName
78+
name: testDeltaAnalysis
79+
earlier_epoch_id: 0e5604f9-53b9c234-03dc-3997-9850-501b925f7d65
80+
later_epoch_id: 0e5604f9-ad5b12ae-9834-348b-aed1-8ca124e32e9b
81+
state: present
82+
- name: Creates a new delta analysis job using epoch time
83+
cisco.nd.nd_delta_analysis:
84+
insights_group: exampleIG
85+
site_name: siteName
86+
name: testDeltaAnalysis
87+
earlier_epoch_time: 2023-01-15T12:24:34Z
88+
later_epoch_time: 2023-01-17T18:27:34Z
89+
state: present
90+
- name: Validates a running delta analysis job
91+
cisco.nd.nd_delta_analysis:
92+
insights_group: exampleIG
93+
site_name: siteName
94+
name: testDeltaAnalysis
95+
state: validate
96+
- name: Delete an existing delta analysis
97+
cisco.nd.nd_delta_analysis:
98+
insights_group: exampleIG
99+
site_name: siteName
100+
name: testDeltaAnalysis
101+
state: absent
102+
- name: Queries existing delta analysis jobs
103+
cisco.nd.nd_delta_analysis:
104+
insights_group: exampleIG
105+
site_name: siteName
106+
state: query
107+
register: query_results
108+
- name: Queries an specific delta analysis job
109+
cisco.nd.nd_delta_analysis:
110+
insights_group: exampleIG
111+
site_name: siteName
112+
name: testDeltaAnalysis
113+
state: query
114+
register: query_results
115+
"""
116+
117+
RETURN = r"""
118+
"""
119+
120+
import datetime
121+
from ansible_collections.cisco.nd.plugins.module_utils.ndi import NDI
122+
from ansible_collections.cisco.nd.plugins.module_utils.nd import NDModule, nd_argument_spec
123+
from ansible.module_utils.basic import AnsibleModule
124+
125+
epoch_map = {
126+
"epoch2": "EPOCH2_ONLY",
127+
"epoch1": "EPOCH1_ONLY",
128+
"both_epoch": "BOTH_EPOCHS",
129+
"all": None,
130+
}
131+
132+
133+
def main():
134+
argument_spec = nd_argument_spec()
135+
argument_spec.update(
136+
insights_group=dict(type="str", required=True, aliases=["fab_name", "ig_name"]),
137+
site_name=dict(type="str", required=True, aliases=["site"]),
138+
name=dict(type="str", aliases=["job_name", "delta_name"]),
139+
earlier_epoch_id=dict(type="str", aliases=["earlier_epoch_uuid", "earlier_epoch"]),
140+
later_epoch_id=dict(type="str", aliases=["later_epoch_uuid", "later_epoch"]),
141+
earlier_epoch_time=dict(type="str"),
142+
later_epoch_time=dict(type="str"),
143+
state=dict(type="str", default="query", choices=["query", "absent", "present", "validate"]),
144+
)
145+
146+
module = AnsibleModule(
147+
argument_spec=argument_spec,
148+
supports_check_mode=True,
149+
required_if=[
150+
["state", "validate", ["name"]],
151+
["state", "absent", ["name"]],
152+
["state", "present", ["name"]],
153+
["state", "present", ("earlier_epoch_id", "earlier_epoch_time"), True],
154+
["state", "present", ("later_epoch_id", "later_epoch_time"), True],
155+
],
156+
mutually_exclusive=[("earlier_epoch_id", "earlier_epoch_time"), ("later_epoch_id", "later_epoch_time")],
157+
required_together=[("earlier_epoch_id", "later_epoch_id"), ("earlier_epoch_time", "later_epoch_time")],
158+
)
159+
160+
nd = NDModule(module)
161+
ndi = NDI(nd)
162+
163+
state = nd.params.get("state")
164+
insights_group = nd.params.get("insights_group")
165+
site_name = nd.params.get("site_name")
166+
name = nd.params.get("name")
167+
earlier_epoch_id = nd.params.get("earlier_epoch_id")
168+
later_epoch_id = nd.params.get("later_epoch_id")
169+
earlier_epoch_time = nd.params.get("earlier_epoch_time")
170+
later_epoch_time = nd.params.get("later_epoch_time")
171+
172+
if name:
173+
nd.existing = ndi.query_delta_analysis(insights_group, site_name, jobName=name)
174+
else:
175+
nd.existing = {}
176+
177+
if state == "query":
178+
if name is None:
179+
delta_job_list = ndi.query_delta_analysis(insights_group, site_name)
180+
nd.existing = delta_job_list
181+
182+
elif state == "present":
183+
if earlier_epoch_id and later_epoch_id:
184+
data = {"jobName": name, "priorEpochUuid": earlier_epoch_id, "laterEpochUuid": later_epoch_id}
185+
elif earlier_epoch_time and later_epoch_time:
186+
earlier_epoch_dt = datetime.datetime.fromisoformat(earlier_epoch_time.replace("Z", ""))
187+
later_epoch_dt = datetime.datetime.fromisoformat(later_epoch_time.replace("Z", ""))
188+
data = {
189+
"jobName": name,
190+
"priorEpochTime": round(earlier_epoch_dt.timestamp() * 1000),
191+
"laterEpochTime": round(later_epoch_dt.timestamp() * 1000),
192+
}
193+
194+
if module.check_mode:
195+
nd.existing = data
196+
nd.exit_json()
197+
198+
nd.previous = nd.existing
199+
if nd.existing:
200+
nd.exit_json()
201+
202+
trigger_path = ndi.config_ig_path + "/" + ndi.run_epoch_delta_ig_path.format(insights_group, site_name)
203+
resp = nd.request(trigger_path, method="POST", data=data, prefix=ndi.prefix)
204+
205+
if resp["success"] is True:
206+
job_id = resp["value"]["data"]["configId"]
207+
delta_job_info = ndi.query_delta_analysis(insights_group, site_name, jobId=job_id)
208+
nd.existing = delta_job_info
209+
else:
210+
nd.fail_json(msg="Creating delta analysis job failed")
211+
212+
elif state == "validate":
213+
epoch_choice = "epoch2"
214+
exclude_ack_anomalies = True
215+
# Wait for Epoch Delta Analysis to complete
216+
while nd.existing.get("operSt") not in ["COMPLETE", "FAILED"]:
217+
try:
218+
nd.existing = ndi.query_delta_analysis(insights_group, site_name, jobName=name)
219+
if nd.existing.get("operSt") == "FAILED":
220+
nd.fail_json(msg="Epoch Delta Analysis {0} has failed".format(name))
221+
if nd.existing.get("operSt") == "COMPLETE":
222+
break
223+
except BaseException:
224+
nd.fail_json(msg="Epoch Delta Analysis {0} not found".format(name))
225+
# Evaluate Epoch Delta Analysis
226+
if nd.existing.get("operSt") == "FAILED":
227+
nd.fail_json(msg="Epoch Delta Analysis {0} has failed".format(name))
228+
229+
job_id = nd.existing.get("jobId")
230+
nd.existing["anomaly_count"] = ndi.query_event_severity(insights_group, site_name, job_id)
231+
anomalies = ndi.query_anomalies(insights_group, site_name, job_id, epoch_map[epoch_choice], exclude_ack_anomalies)
232+
nd.existing["anomalies"] = anomalies
233+
# nd.existing["unhealthy_resources"] = ndi.query_impacted_resource(
234+
# insights_group, site_name, job_id)
235+
if anomalies:
236+
anomalies_count = {"minor": 0, "major": 0, "critical": 0, "warning": 0}
237+
for anomaly in anomalies:
238+
severity = anomaly.get("severity")
239+
if severity in anomalies_count.keys():
240+
anomalies_count[severity] += 1
241+
nd.fail_json(
242+
msg="Epoch Delta Analysis failed. The above {0} (critical({1})|major({2})|minor({3})|warning({4})) anomalies have been detected.".format(
243+
len(anomalies), anomalies_count.get("critical"), anomalies_count.get("major"), anomalies_count.get("minor"), anomalies_count.get("warning")
244+
)
245+
)
246+
247+
elif state == "absent":
248+
nd.previous = nd.existing
249+
job_id = nd.existing.get("jobId")
250+
if nd.existing and job_id:
251+
if module.check_mode:
252+
nd.existing = {}
253+
else:
254+
rm_path = ndi.config_ig_path + "/" + "{0}/fabric/{1}/deleteEpochDelta".format(insights_group, site_name)
255+
rm_payload = [job_id]
256+
257+
rm_resp = nd.request(rm_path, method="POST", data=rm_payload, prefix=ndi.prefix)
258+
259+
if rm_resp["success"] is True:
260+
nd.existing = {}
261+
else:
262+
nd.fail_json(msg="Delta Analysis {0} could not be deleted".format(name))
263+
264+
nd.exit_json()
265+
266+
267+
if __name__ == "__main__":
268+
main()

0 commit comments

Comments
 (0)