-
Notifications
You must be signed in to change notification settings - Fork 3
/
contract_parser.py
executable file
·2120 lines (1945 loc) · 86 KB
/
contract_parser.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
#!/usr/bin/python
import logging, re, json, time, sys, os, subprocess
import math
# ----------------------------------------------------------------------------
# Static Globals/Defaults
# ----------------------------------------------------------------------------
CACHE_DIR = "/tmp/"
EXEC_ONLINE = 0
EXEC_OFFLINE = 1
EXEC_MODE = EXEC_ONLINE
SHOW_CONTRACT = True
SHOW_GRAPH = True
UNIQUE_PCTAG_MAX = 0x4000
UNIQUE_PCTAG_MIN = 16
STATIC_UNIQUE = {
13: "ext-shrsvc",
14: "int-shrsvc",
15: "pfx-0.0.0.0/0"
}
# list of epg classes for name resolution
EPG_CLASSES = ["vzToEPg", "fvEpP", "fvAREpP", "fvABD", "fvACtx",
"fvInBEpP", "fvOoBEpP"]
VRF_CLASSES = ["l3Ctx"]
ACTRL_CLASSES = [
# actlr classes for software rule/entries/stats
"actrlRule", "actrlEntry", "actrlRuleHit5min",
# pbr classes
"svcredirDest", "svcredirRsDestAtt", "svcredirDestGrp",
"actrlRsToRedirDestGrp",
# copy service classes
"svccopyDest", "svccopyDestGrp", "svccopyRsCopyDestAtt",
"actrlRsToCopyDestGrp",
]
CONTRACT_CLASSES = [
"actrlRsToEpgConn", "vzTrCreatedBy", "vzRuleOwner","vzObservableRuleOwner"
]
GRAPH_CLASSES = ["vnsNodeInst", "vnsRsNodeInstToLDevCtx", "vnsLDevCtx",
"vnsLIfCtx", "vnsRsLIfCtxToBD", "vnsRsLIfCtxToLIf",
"vnsRsLDevCtxToLDev", "vnsLDevVip",
"vnsCDev", "vnsCIf", "vnsRsCIfPathAtt", "vnsLIf", "vnsRsCIfAtt",
"vnsRsCIfAttN",
]
# fixed regex for extracting node-id
node_regex = re.compile("^topology/pod-[0-9]+/node-(?P<node>[0-9]+)/")
def td(start, end, milli=True):
""" timestamp delta string"""
if milli: return "{0:.3f} msecs".format((end-start)*1000)
else: return "{0:.3f} secs".format((end-start))
# ----------------------------------------------------------------------------
# Common Functions
# ----------------------------------------------------------------------------
def pretty_print(js):
""" try to convert json to pretty-print format """
try:
return json.dumps(js, indent=4, separators=(",", ":"), sort_keys=True)
except Exception as e:
return "%s" % js
def str_to_protocol(prot):
""" map common protocol strings to int value """
# if int was original string, return int value
try: return int(prot)
except Exception as e: pass
p = {
"icmp": 1,
"igmp": 2,
"tcp": 6,
"udp": 17,
"gre": 47,
"ah": 51,
"icmp6": 58, "icmpv6": 58,
"eigrp": 88,
"ospf": 89, "ospfigp": 89,
"pim": 103
}
return p.get(prot.lower(), 0)
def protocol_to_str(prot):
""" map supported protocol int to string value """
try: prot = int(prot)
except Exception as e: return prot
p = {
1: "icmp",
2: "igmp",
6: "tcp",
8: "egp",
9: "igp",
17: "udp",
58: "icmpv6",
88: "eigrp",
89: "ospfigp",
103: "pim",
115: "l2tp",
}
return p.get(prot, prot)
def port_to_str(port):
""" map supported int L4ports to string value """
try: port = int(port)
except Exception as e: return port
p = {
20: "ftpData",
25: "smtp",
53: "dns",
80: "http",
110: "pop3",
443: "https",
554: "rtsp",
}
return p.get(port, port)
def str_to_port(port):
""" map support str l4ports to int value """
p = {
"ftpData": 20,
"smtp": 25,
"dns": 53,
"http": 80,
"pop3": 110,
"https": 443,
"rtsp": 554,
}
return p.get(port, port)
def icmp_opcode_to_str(opcode):
""" map icmpv4 opcode to string """
if opcode == 0: return "echo-reply"
elif opcode == 3: return "dst-unreach"
elif opcode == 4: return "src-quench"
elif opcode == 8: return "echo-request"
elif opcode == 11: return "time-exceeded"
else: return "opcode-%s" % opcode
def icmp6_opcode_to_str(opcode):
""" map icmpv6 opcode to string """
if opcode == 1: return "dst-unreach"
elif opcode == 2: return "pkt-too-big"
elif opcode == 3: return "time-exceeded"
elif opcode == 128: return "echo-request"
elif opcode == 129: return "echo-reply"
elif opcode == 133: return "router-solicit"
elif opcode == 134: return "router-advert"
elif opcode == 135: return "nbr-solicit"
elif opcode == 136: return "nbr-advert"
elif opcode == 137: return "redirect"
else: return "opcode-%s" % opcode
def tcpflags_to_str(tcpflags, mask):
""" return tcpflags string """
f = ""
tcpflags = tcpflags & ((~mask) & 0xff)
if (tcpflags & 1)>0: f+= " fin"
if (tcpflags & 2)>0: f+= " syn"
if (tcpflags & 4)>0: f+= " rst"
if (tcpflags & 8)>0: f+= " psh"
if (tcpflags & 16)>0: f+= " ack"
if (tcpflags & 32)>0: f+= " urg"
if len(f)>0: return "(%s)" % f.strip()
return ""
def offline_extract(tgz, **kwargs):
"""
extract files in tar bundle to tmp directory. Only files matching
provided offline_keys dict (which is also used as key in returned dict)
"""
offline_files = {}
offline_dir = kwargs.get("offline_dir", "/tmp/")
offline_keys = kwargs.get("offline_keys", {})
import tarfile
# force odir to real directory (incase 'file' is provided as offline_dir)
odir = os.path.dirname(offline_dir)
try:
t = tarfile.open(tgz, "r:gz")
for m in t.getmembers():
# check for files matching offline_keys
for tn in offline_keys:
if "%s." % tn in m.name:
offline_files[tn] = "%s/%s" % (odir, m.name)
t.extract(m, path=odir)
logging.debug("extracting %s/%s" % (odir, m.name))
break
except Exception as e:
logging.error("Failed to extract content from offline tar file")
import traceback
traceback.print_exc()
sys.exit()
return offline_files
def online_get_cli(cmd):
""" execute an online command and return result (None on error) """
clist = time.time()
try:
logging.debug("executing command \"%s\"" % cmd)
# check_output in 2.7 only, apic may be on 2.6
if hasattr(subprocess, "check_output"):
# execute command
data = subprocess.check_output(cmd, shell=True)
else:
# apic may not support check_output, use communicate
cmd = re.sub("2> /dev/null", "", cmd)
p = subprocess.Popen(cmd.split(" "), stdout=subprocess.PIPE)
data, err = p.communicate()
logging.debug("cli collect time: %s" % td(clist, time.time()))
return data
except Exception as e:
logging.error("error executing command (%s): %s" % (cmd,e))
return None
def get_class_data(classname, fname=None, **kwargs):
""" perform icurl or read fname to get class data, return json """
# options for filter and page size
flt = kwargs.get("flt", "")
page_size = kwargs.get("page_size", 75000)
page = kwargs.get("page", 0)
if len(flt)>0: flt="&%s" % flt
if "order-by" not in flt: flt="&order-by=%s.dn%s" % (classname, flt)
if fname is not None:
try:
logging.debug("reading file %s" % fname)
with open(fname, "r") as f:
jst = time.time()
j = json.loads(f.read())
logging.debug("json load time: %s" % td(jst, time.time()))
return j
except Exception as e:
logging.error("unabled to read %s: %s" % (c,e))
return {}
except ValueError as e:
logging.warning("failed to decode json for class %s"%classname)
return {}
except TypeError as e:
logging.warning("failed to decode json for class %s"%classname)
return {}
else:
# walk through pages until return count is less than page_size
results = []
while 1:
cmd = "icurl -s 'http://127.0.0.1:7777/api/class/"
cmd+= "%s.json?page-size=%s&page=%s%s'" % (classname,
page_size, page, flt)
cmd+= " 2> /dev/null"
icurlst = time.time()
data = online_get_cli(cmd)
logging.debug("icurl time: %s" % td(icurlst, time.time()))
# failed to get data
if data is None:
logging.warning("failed to get data for class: %s" % classname)
return {}
# parse json data
try:
jst = time.time()
js = json.loads(data)
logging.debug("json load time: %s" % td(jst, time.time()))
if "imdata" not in js or "totalCount" not in js:
logging.error("invalid icurl result: %s" % js)
return {}
results+=js["imdata"]
logging.debug("results count: %s/%s" % (
len(results),js["totalCount"]))
if len(js["imdata"])<page_size or \
len(results)>=int(js["totalCount"]):
logging.debug("all pages received")
r = {
"imdata": results,
"totalCount": len(results)
}
return r
page+= 1
except ValueError as e:
logging.warning("failed to decode json for class %s"%classname)
return {}
except TypeError as e:
logging.warning("failed to decode json for class %s"%classname)
return {}
# some unknown error, return empty result
logging.warning("unexpecedt error occurred when getting class %s"%classname)
return {}
def get_epg_info(**kwargs):
"""
icurl for epg info/read epg file and return dictionary of epgs
epgs[vnid][pcTag] = epg_name
"""
exec_mode = kwargs.get("exec_mode", EXEC_ONLINE)
offline_files = kwargs.get("offline_files", {})
epgs = {}
# concrete classes to collect info from:
for c in EPG_CLASSES:
j = {}
if exec_mode == EXEC_OFFLINE:
if c in offline_files:
j = get_class_data(c, offline_files[c])
else:
j = get_class_data(c)
pst = time.time()
_n = "\w\d_\-\."
# tn is always present (required)
rx = "uni/tn-(?P<tn>[%s]+)" % _n
# subset of following are present (order dependent)
rx+= "(/ctx-(?P<vrf>[%s]+))?" % _n
rx+= "(/BD-(?P<bd>[%s]+))?" % _n
rx+= "(/out-(?P<l3out>[%s]+))?" % _n
rx+= "(/l2out-(?P<l2out>[%s]+))?" % _n
rx+= "(/mgmtp-(?P<mgmtp>[%s]+))?" % _n
rx+= "(/extmgmt-(?P<extmgmt>[%s]+))?" % _n
rx+= "(/instp-(?P<instp>[%s]+))?" % _n
rx+= "(/instP-(?P<instP>[%s]+))?" % _n
rx+= "(/oob-(?P<oob>[%s]+))?" % _n
rx+= "(/inb-(?P<inb>[%s]+))?" % _n
rx+= "(/ap-(?P<ap>[%s]+))?" % _n
rx+= "(/epg-(?P<epg>[%s]+))?" % _n
rx+= "(.+?/G-(?P<G>.+?)-N-.+?-C-(?P<C>[%s]+))?" % _n
rkeys = ["vrf", "bd", "l3out", "l2out", "mgmtp", "extmgmt",
"instp", "instP", "oob", "inb", "ap", "epg", "G", "C"]
if "imdata" in j:
for d in j["imdata"]:
if type(d) is dict and "attributes" in d.values()[0]:
attr = d.values()[0]["attributes"]
if "epgDn" in attr and len(attr["epgDn"])>0: _dn = "epgDn"
else: _dn = "dn"
scope = 0
if _dn in attr and "pcTag" in attr and \
("scope" in attr or "scopeId" in attr):
try:
pcTag = int(attr["pcTag"])
if "scope" in attr: scope = int(attr["scope"])
elif "scopeId" in attr: scope = int(attr["scopeId"])
r1 = re.search(rx, attr[_dn])
if r1 is not None:
n = "tn-%s" % r1.group("tn")
for k in rkeys:
if r1.group("%s"%k) is not None:
n+= "/%s-%s"% (k,r1.group("%s"%k))
if scope not in epgs: epgs[scope] = {}
# don't overwrite previous entries
# (will be found multiple times...)
if pcTag not in epgs[scope]:
epgs[scope][pcTag] = n
except Exception as e:
#skip pcTag/scope that aren't integers ('any')
err = "skipping pcTag: [dn,pcTag,"
err+= "scope]=[%s,%s,%s]" % (attr[_dn],
attr["pcTag"], scope)
logging.debug(err)
# for vzToEPg - add vrf epg tag (may not be local to leaf)
if "ctxDefDn" in attr and "ctxPcTag" in attr and \
"ctxSeg" in attr and attr["ctxPcTag"]!="any":
try:
scope = int(attr["ctxSeg"])
pcTag = int(attr["ctxPcTag"])
r1 = re.search(rx, attr["ctxDefDn"])
if r1 is not None:
n = "tn-%s" % r1.group("tn")
for k in rkeys:
if r1.group("%s"%k) is not None:
n+= "/%s-%s"% (k,r1.group("%s"%k))
if scope not in epgs: epgs[scope] = {}
epgs[scope][pcTag] = n
except Exception as e:
#skip pcTag/scope that aren't integers ('any')
err = "skipping pcTag: [dn,pcTag,"
err+= "scope]=[%s,%s,%s]" % (attr["ctxDefDn"],
attr["ctxPcTag"], attr["ctxSeg"])
logging.debug(err)
logging.debug("json parse time: %s" % td(pst, time.time()))
# return results
return epgs
def get_vrf_info(**kwargs):
"""
icurl for vrf info/read vrf file and return dictionary of vrfs. Each
entry has 3 different indexes. Example:
vrfs["name::<name>"] =
vrfs["vnid::<vnid>"] = {
"name": "<name>",
"vnid": <vnid>,
"pcTag": <pcTag>
}
"""
exec_mode = kwargs.get("exec_mode", EXEC_ONLINE)
offline_files = kwargs.get("offline_files", {})
vrfs = {}
# concrete classes to collect info from:
for c in VRF_CLASSES:
j = {}
if exec_mode == EXEC_OFFLINE:
if c in offline_files:
j = get_class_data(c, offline_files[c])
else:
j = get_class_data(c)
pst = 0
if "imdata" in j:
for d in j["imdata"]:
if type(d) is dict and "attributes" in d.values()[0]:
attr = d.values()[0]["attributes"]
# expect name, pcTag, scope(==vnid), and
# resourceId(==hwscope) or (?possibly secLbl)
# secLbl (==hwscope)
v={"name": None,"vnid": None,"scope": None,"pcTag":None}
if "name" in attr: v["name"] = attr["name"]
else:
logging.debug("skipping l3Ctx %s (no name)"%attr)
continue
if "pcTag" in attr:
if attr["pcTag"] == "any": v["pcTag"] = 0
else: v["pcTag"] = int(attr["pcTag"])
else:
logging.debug("skipping l3Ctx %s (no pcTag)"%attr)
continue
if "scope" in attr: v["vnid"] = int(attr["scope"])
else:
logging.debug("skipping l3Ctx %s (no scope)"%attr)
continue
# add triple-indexed entry to dict
vrfs["name::%s"%v["name"]] = v
vrfs["vnid::%s"%v["vnid"]] = v
pst = time.time()
logging.debug("json parse time: %s" % td(pst, time.time()))
# return results
return vrfs
def get_bd_info(**kwargs):
"""
build mapping for bd name/vnid. Return double-mapped dict
bds["name::<name>"] =
bds["vnid::<vnid>"] = {
"name": "<name>",
"vnid": <vnid>,
"vrf": <vrf-vnid>,
"pcTag": <pcTag>
}
"""
exec_mode = kwargs.get("exec_mode", EXEC_ONLINE)
offline_files = kwargs.get("offline_files", {})
bds = {}
# concrete classes to collect info from:
for c in ["fvABD"]:
j = {}
if exec_mode == EXEC_OFFLINE:
if c in offline_files:
j = get_class_data(c, offline_files[c])
else:
j = get_class_data(c)
pst = 0
if "imdata" in j:
for d in j["imdata"]:
if type(d) is dict and "attributes" in d.values()[0]:
attr = d.values()[0]["attributes"]
# set 'name' to dn
v={"name": None,"vnid": None,"pcTag":None}
if "bdDn" in attr: v["name"] = attr["bdDn"]
elif "dn" in attr: v["name"] = attr["dn"]
else:
logging.debug("skipping fvABD %s (no name)"%attr)
continue
if "pcTag" in attr:
if attr["pcTag"] == "any": v["pcTag"] = 0
else: v["pcTag"] = int(attr["pcTag"])
else:
logging.debug("skipping fvABD %s (no pcTag)"%attr)
continue
if "scope" in attr: v["vrf"] = int(attr["scope"])
else:
logging.debug("skipping fvABD %s (no scope)"%attr)
continue
if "seg" in attr:
v["vnid"] = int(attr["seg"])
else:
logging.debug("skip fvABD %s (no seg)"%attr)
continue
# add triple-indexed entry to dict
bds["name::%s"%v["name"]] = v
bds["vnid::%s"%v["vnid"]] = v
pst = time.time()
logging.debug("json parse time: %s" % td(pst, time.time()))
# return results
return bds
def get_contract_info(**kwargs):
"""
read actrlRsToEpgConn and vzRuleOwner to build mapping of rule to contract.
return dict indexed by actrlRule: {
"rule": "contract"
}
"""
exec_mode = kwargs.get("exec_mode", EXEC_ONLINE)
offline_files = kwargs.get("offline_files", {})
contracts = {}
# handle rstoEpgCon
reg1 = "(?P<r>^.+?)/rstoEpgConn-\[cdef-.*?"
reg1+= "\[(?P<v>uni/tn-[^/]+/(oob)?brc-[^\]]+)\]"
reg1 = re.compile(reg1)
# handle vzRuleOwner for implicit rules
reg2 = "(?P<r>^.+?)/own-\[.+?"
reg2+="(\[tdef-.*?\[(?P<v>uni/tn-[^/]+/taboo-[^\]]+)\]/rstabooRFltAtt.+?)?"
reg2+="-tag"
reg2 = re.compile(reg2)
# handle taboo owners
reg3 = "(?P<r>^.+?)/trCreatedBy-\[tdef-.*?"
reg3+= "\[(?P<v>uni/tn-[^/]+/taboo-[^\]]+)\]/rstabooRFltAtt"
reg3 = re.compile(reg3)
# handle vzObservableRuleOwner
reg4 = "(?P<r>^.+?)/oown-\[cdef-.*?"
reg4+= "\[(?P<v>uni/tn-[^/]+/(oob)?brc-[^\]]+)\]"
reg4 = re.compile(reg4)
search = {
"actrlRsToEpgConn": {"reg": reg1,},
"vzRuleOwner": {"reg": reg2, "default": "implicit"},
"vzTrCreatedBy": {"reg": reg3 },
"vzObservableRuleOwner": {"reg": reg4},
}
# concrete classes to collect info from:
for c in CONTRACT_CLASSES:
j = {}
if exec_mode == EXEC_OFFLINE:
if c in offline_files: j = get_class_data(c, offline_files[c])
else: j = get_class_data(c)
if "imdata" in j:
for d in j["imdata"]:
if type(d) is dict and "attributes" in d.values()[0]:
classname = d.keys()[0]
attr = d.values()[0]["attributes"]
if "dn" in attr and classname in search:
# statically defined 'search' per classname with 'r'
# named group for the rule that it matches and either
# 'default' defined in the dict or 'v' group in regex
# for the value to apply
s = search[classname]
r1 = s["reg"].search(attr["dn"])
if r1 is not None:
# ok to have duplicates, just continue
if r1.group("r") in contracts: continue
if r1.group("v") is not None:
contracts[r1.group("r")] = r1.group("v")
elif "default" in s:
contracts[r1.group("r")] = s["default"]
else:
logging.warn("failed to match against %s %s" % (
classname, attr["dn"]))
return contracts
# ----------------------------------------------------------------------------
# actrl Object
# ----------------------------------------------------------------------------
class ActrlNode(object):
def __init__(self, node):
self.node = node
# indexed by self.rules[prio][vnid] = [list of rules]
self.rules = {}
# indexed by filter name (contains list of filters for multiple ent)
self.filters = {}
# indexed by rule dn
self.stats = {}
# actrl_redirs pointer to svcredirDestGrp indexed by rule dn
self.redirs = {}
# actrl_copy pointer to svccopyDestGrp indexed by rule dn
self.copys = {}
class Actrl(object):
# actrl:RulePrio for sorting rules
# this is version of code dependent, for now setting to most recent version
# dme/model/specific/mo/switch/feature/actrl/types.xml
RULEPRIO = {
"class-eq-filter": 1,
"class-eq-deny": 2,
"class-eq-allow": 3,
"prov-nonshared-to-cons": 4,
"black_list": 5,
"fabric_infra": 6,
"fully_qual": 7,
"system_incomplete": 8,
"src_dst_any": 9,
"shsrc_any_filt_perm": 10,
"shsrc_any_any_perm": 11,
"shsrc_any_any_deny": 12,
"src_any_filter": 13,
"any_dest_filter": 14,
"src_any_any": 15,
"any_dest_any": 16,
"any_any_filter": 16,
"grp_src_any_any_deny": 18,
"grp_any_dest_any_deny": 19,
"grp_any_any_any_permit": 20,
"any_any_any": 21,
"any_vrf_any_deny": 22,
"default_action": 23,
"default": 0 # actual constant DEFAULT but use .lower()
}
def __init__(self, args):
# check cache settings - note that state is never saved to cache but
# for offline mode, cache directory is used during file extraction.
# if not supplied, use defaults in offline_extract function
self.cache_file = args.cache
if len(self.cache_file)==0 or self.cache_file=="0":
self.cache_file = None
# check exec_mode from arguments
offline_keys = ACTRL_CLASSES + VRF_CLASSES + EPG_CLASSES + \
CONTRACT_CLASSES + GRAPH_CLASSES
self.exec_mode = EXEC_ONLINE
self.offline_files = {}
self.bds = {}
self.vrfs = {}
self.epgs = {}
self.unique_epgs = {}
self.contracts = {}
self.graphs = {}
if args.offline:
self.exec_mode = EXEC_OFFLINE
self.offline_files = offline_extract(args.offline,
offline_dir = self.cache_file, # file is ok-func works it out
offline_keys = offline_keys
)
# if name resolution is enabled...
if not args.noNames:
# grab vrf info, two-way indexed by name::%s, vnid::%s
self.vrfs = get_vrf_info(
exec_mode=self.exec_mode,
offline_files=self.offline_files
)
# grab bd info, two-way indexed by name::%s, vnid::%s
self.bds = get_bd_info(
exec_mode=self.exec_mode,
offline_files=self.offline_files
)
# grab epg names, epgs[scope][pcTag] = name
self.epgs = get_epg_info(
exec_mode=self.exec_mode,
offline_files=self.offline_files
)
# build list of shared service 'unique' epgs
for vnid in self.epgs:
for pcTag in self.epgs[vnid]:
if pcTag >= UNIQUE_PCTAG_MIN and pcTag <= UNIQUE_PCTAG_MAX:
self.unique_epgs[pcTag] = self.epgs[vnid][pcTag]
# add static uniques as well
for pcTag in STATIC_UNIQUE:
self.unique_epgs[pcTag] = STATIC_UNIQUE[pcTag]
# build actrlRule to contract info if enabled
if SHOW_CONTRACT:
self.contracts = get_contract_info(
exec_mode=self.exec_mode,
offline_files=self.offline_files
)
# rules/filters/stats/redirs are all objects with a node
self.nodes = {}
self.filter_nodes = args.nodes
# build rules, filters, stats, and redirects
self.get_rules()
self.get_filters()
self.get_stats()
self.get_redirs()
self.get_copys()
# build graph info if enabled
if SHOW_GRAPH: self.get_graphs()
def get_node(self, n):
""" get/create an ActrlNode from self.nodes
return None if node is not allowed by filter
"""
if n not in self.nodes:
# check if this node is filtered
if len(self.filter_nodes)==0 or n in self.filter_nodes:
self.nodes[n] = ActrlNode(n)
logging.debug("new node %s added" % n)
else: return None
return self.nodes[n]
def get_rules(self):
""" get/build concrete rules """
classname = "actrlRule"
j = {}
if self.exec_mode == EXEC_OFFLINE:
if classname in self.offline_files:
j = get_class_data(classname, self.offline_files[classname])
else:
j = get_class_data(classname)
if "imdata" in j:
logging.debug("%s count: %s" % (classname, len(j["imdata"])))
for d in j["imdata"]:
if type(d) is dict and "attributes" in d.values()[0]:
attr = d.values()[0]["attributes"]
rule = {
"dn": None, "id": None,
"fltId": None, "action": None,
"direction": None, "operSt": None,
"dPcTag": None, "sPcTag": None, "scopeId": None,
"type": None, "prio": None,
"markDscp": None, "qosGrp": None,
}
skip_rule = False
for key in rule:
if key not in attr:
logging.warn("skipping rule, %s missing: %s" % (
attr, key))
skip_rule = True
break
rule[key] = attr[key]
if skip_rule: continue
# if contract mapping is enabled, try to add contract
# attribute to rule
if SHOW_CONTRACT:
rule["contract"] = self.contracts.get(rule["dn"],None)
else:
rule["contract"] = None
# determine node-id - not present if executed on leaf
r1 = node_regex.search(attr["dn"])
if r1 is not None: node = self.get_node(r1.group("node"))
else: node = self.get_node("0")
if node is None: continue
# index rules by int priority value
prio = Actrl.RULEPRIO.get(rule["prio"], 0)
if prio not in node.rules: node.rules[prio] = {}
if rule["scopeId"] not in node.rules[prio]:
node.rules[prio][rule["scopeId"]] = []
# add rule to self.rules
node.rules[prio][rule["scopeId"]].append(rule)
def get_filters(self):
""" get/build concrete filters """
classname = "actrlEntry"
j = {}
if self.exec_mode == EXEC_OFFLINE:
if classname in self.offline_files:
j = get_class_data(classname, self.offline_files[classname])
else:
j = get_class_data(classname)
if "imdata" in j:
logging.debug("%s count: %s" % (classname, len(j["imdata"])))
for d in j["imdata"]:
if type(d) is dict and "attributes" in d.values()[0]:
attr = d.values()[0]["attributes"]
flt = {
"dn": None, "name": None,
"applyToFrag": None, "arpOpc": None,
"dFromPort": None, "dToPort": None,
"etherT": None, "icmpv4T": None, "icmpv6T": None,
"prot": None, "sFromPort": None,
"sToPort": None, "stateful": None, "tcpRules": None,
"matchDscp": "unspecified",
}
# optional keys (code dependent)
opt_keys = ["matchDscp"]
skip_flt = False
for key in flt:
if key in attr:
flt[key] = attr[key]
elif key in opt_keys: pass
else:
logging.debug("skipping flt, %s missing: %s" % (
attr, key))
skip_flt = True
break
if skip_flt: continue
# determine node-id - not present if executed on leaf
r1 = node_regex.search(attr["dn"])
if r1 is not None: node = self.get_node(r1.group("node"))
else: node = self.get_node("0")
if node is None: continue
fkey = flt["name"]
r1 = re.search("^(?P<flt>[0-9]+)_(?P<ent>[0-9]+)$",
flt["name"])
if r1 is not None: fkey = "%s"%r1.group("flt")
if fkey not in node.filters: node.filters[fkey] = []
# format ports to integers
flt["sFromPort"] = str_to_port(flt["sFromPort"])
flt["sToPort"] = str_to_port(flt["sToPort"])
flt["dFromPort"] = str_to_port(flt["dFromPort"])
flt["dToPort"] = str_to_port(flt["dToPort"])
node.filters[fkey].append(flt)
# 'default' filter is same as 'any' filter, add second key
if fkey == "any":
if "default" not in node.filters:
node.filters["default"] = []
node.filters["default"].append(flt)
def get_stats(self):
""" get/build concrete stats """
classname = "actrlRuleHit5min"
j = {}
if self.exec_mode == EXEC_OFFLINE:
if classname in self.offline_files:
j = get_class_data(classname, self.offline_files[classname])
else:
j = get_class_data(classname)
if "imdata" in j:
logging.debug("%s count: %s" % (classname, len(j["imdata"])))
for d in j["imdata"]:
if type(d) is dict and "attributes" in d.values()[0]:
attr = d.values()[0]["attributes"]
if "dn" not in attr:
logging.debug("skipping stat, dn missing: %s"%attr)
continue
stat = {
"dn": None,
"ingrPktsCum": None, "egrPktsCum": None,
"ingrPktsLast": None, "egrPktsLast": None,
# SB counters do not have direction
"pktsCum": None, "pktsLast": None
}
match_count = 0
for key in stat:
if key in attr:
match_count+= 1
stat[key] = attr[key]
else:
stat[key] = "0"
if match_count==0:
logging.debug("skipping stat, %s missing attributes"%(
attr))
continue
# determine node-id - not present if executed on leaf
r1 = node_regex.search(attr["dn"])
if r1 is not None: node = self.get_node(r1.group("node"))
else: node = self.get_node("0")
if node is None: continue
# fixup dn by removing "/CDactrlRuleHit5min" - fixed len
dn = attr["dn"][0:len(attr["dn"])-19]
node.stats[dn] = stat
def generic_parse(self, obj, index_key, required_keys, relax=False):
""" generic verification/parsing of object ensuring all required keys
are present
obj = dictionary to parse
must contain 'imdata' and "attributes" for each object
required_keys = list of required keys in object to extract
index_key = unique index for object in return dictionary
relax = if true set missing 'required_keys' to empty value, if false
skip the entire object
"""
if index_key not in required_keys: required_keys.append(index_key)
final_ret = {}
if "imdata" in obj:
for d in obj["imdata"]:
if type(d) is not dict or "attributes" not in d.values()[0]:
logging.debug("skipping invalid object: %s" % d)
continue
d = d.values()[0]["attributes"]
ret = {}
valid=True
for key in required_keys:
if key not in d:
logging.debug("object missing key %s: %s"%(key,d))
if relax: d[key] = ""
else:
valid=False
break
ret[key] = d[key]
if not valid: continue
final_ret[ret[index_key]] = ret
return final_ret
def get_redirs(self):
""" get/build concrete redirect info """
# get redirect groups first
j1 = j2 = j3 = j4 = {}
classname1 = "svcredirDest"
classname2 = "svcredirRsDestAtt"
classname3 = "svcredirDestGrp"
classname4 = "actrlRsToRedirDestGrp"
if self.exec_mode == EXEC_OFFLINE:
if classname1 in self.offline_files:
j1 = get_class_data(classname1, self.offline_files[classname1])
if classname2 in self.offline_files:
j2 = get_class_data(classname2, self.offline_files[classname2])
if classname3 in self.offline_files:
j3 = get_class_data(classname3, self.offline_files[classname3])
if classname4 in self.offline_files:
j4 = get_class_data(classname4, self.offline_files[classname4])
else:
j1 = get_class_data(classname1)
# if no data was found in svcredirDest, then stop
if len(j1) == 0:
logger.debug("no %s found, skipping get_redirs" % classname1)
return
j2 = get_class_data(classname2)
j3 = get_class_data(classname3)
j4 = get_class_data(classname4)
# build dict of destinations indexed by dn
dest = self.generic_parse(j1, "dn", ["dn","ip","vMac","vrf",
"vrfEncap", "bdVnid", "operSt", "operStQual"])
# try to remap bdVnid to bd name
for dn in dest:
d = dest[dn]
d["bd"] = d["bdVnid"]
r1 = re.search("vxlan-(?P<vnid>[0-9]+)", d["bdVnid"])
if r1 is not None:
key = "vnid::%s" % r1.group("vnid")
if key in self.bds: d["bd"] = self.bds[key]["name"]
# build dict of destAtt indexed by dn
destAtt = self.generic_parse(j2, "dn", ["dn", "tDn"])
# re-index destgrps based on group number with pointer to actual dest
# grps[node-id][group-id] -> list(dest objects)
grps = {}
for dn in destAtt:
# determine node-id - not present if executed on leaf
node_id = "0"
r0 = node_regex.search(dn)
if r0 is not None: node_id = r0.group("node")
if node_id not in grps: grps[node_id] = {}
r1 = re.search("destgrp-(?P<id>[0-9]+)", dn)
if r1 is None:
logging.debug("invalid dn for svcredirRsDestAtt: %s" % dn)
continue
gid = r1.group("id")
if gid not in grps[node_id]: grps[node_id][gid] = []
if destAtt[dn]["tDn"] in dest:
grps[node_id][gid].append(dest[destAtt[dn]["tDn"]])
# build dict of redirs indexed by dn
redir_grps = self.generic_parse(j3, "dn", ["dn","operSt","operStQual",
"id","ctrl"])
# for each redir, check if group is in grps to have list of redirDest
for dn in redir_grps:
# determine node-id - not present if executed on leaf
node_id = "0"
r0 = node_regex.search(dn)
if r0 is not None: node_id = r0.group("node")
if node_id not in grps: grps[node_id] = {}
redir_grps[dn]["dests"] = []
r1 = re.search("destgrp-(?P<id>[0-9]+)", dn)
if r1 is None:
logging.debug("invalid dn for svcredirDestGrp: %s" % dn)
continue
if r1.group("id") in grps[node_id]:
redir_grps[dn]["dests"] = grps[node_id][r1.group("id")]
# build dict of actrlRsToRedirDestGrp indexed by dn
tmp_actrl = self.generic_parse(j4, "dn", ["dn", "tDn"])
for dn in tmp_actrl:
# fixup dn to drop rstoRedirDestGrp so it points to actrl
d = "/".join(dn.split("/")[0:-1])
# tDn should point to known destGroup
if tmp_actrl[dn]["tDn"] not in redir_grps:
logging.debug("%s not found in svcredirDestGrp" % (
tmp_actrl[dn]["tDn"]))
continue
# determine node-id - not present if executed on leaf
r1 = node_regex.search(dn)
if r1 is not None: node = self.get_node(r1.group("node"))
else: node = self.get_node("0")
if node is None: continue