forked from robshakir/pyangbind
-
Notifications
You must be signed in to change notification settings - Fork 10
Expand file tree
/
Copy pathpybind.py
More file actions
1406 lines (1301 loc) · 59.3 KB
/
pybind.py
File metadata and controls
1406 lines (1301 loc) · 59.3 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
"""
Copyright 2015, Rob Shakir (rjs@jive.com, rjs@rob.sh)
This project has been supported by:
* Jive Communications, Inc.
* BT plc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import optparse
import sys
import re
import string
import numpy as np
import decimal
import copy
import os
from bitarray import bitarray
from lib.yangtypes import safe_name, YANGBool
from pyang import plugin
from pyang import statements
DEBUG = True
if DEBUG:
import pprint
pp = pprint.PrettyPrinter(indent=2)
# YANG is quite flexible in terms of what it allows as input to a boolean
# value, this map is used to provide a mapping of these values to the python
# True and False boolean instances.
class_bool_map = {
'false': False,
'False': False,
'true': True,
'True': True,
}
class_map = {
# this map is dynamically built upon but defines how we take
# a YANG type and translate it into a native Python class
# along with other attributes that are required for this mapping.
#
# key: the name of the YANG type
# native_type: the Python class that is used to support this
# YANG type natively.
# map (optional): a map to take input values and translate them
# into valid values of the type.
# base_type: whether the class can be used as class(*args, **kwargs)
# in Python, or whether it is a derived class (such as is
# created based on a typedef, or for types that cannot be
# supported natively, such as enumeration, or a string
# with a restriction placed on it)
# quote_arg (opt): whether the argument to this class' __init__ needs to be
# quoted (e.g., str("hello")) in the code that is output.
# pytype (opt): A reference to the actual type that is used, this is
# used where we infer types, such as for an input value to
# a union since we need to actually compare the value
# against the __init__ method and see whether it works.
# parent_type (opt): for "derived" types, then we store what the enclosed
# type is such that we can create instances where required
# e.g., a restricted string will have a parent_type of a
# string. this can be a list if the type is a union.
# restriction ...: where the type is a restricted type, then the class_map
# (optional) dict entry can store more information about the type of
# restriction. this is generally used when we need to
# re-initialise an instance of the class, such as in the
# setter methods of containers.
# Other types may add their own types to this dictionary that have meaning
# only for themselves. For example, a ReferenceType can add the path that it
# references, and whether the require-instance keyword was set or not.
'boolean': {"native_type": "YANGBool", "map": class_bool_map,
"base_type": True, "quote_arg": True,
"pytype": YANGBool},
'binary': {"native_type": "bitarray", "base_type": True,
"quote_arg": True, "pytype": bitarray},
'uint8': {"native_type": "np.uint8", "base_type": True,
"pytype": np.uint8},
'uint16': {"native_type": "np.uint16", "base_type": True,
"pytype": np.uint16},
'uint32': {"native_type": "np.uint32", "base_type": True,
"pytype": np.uint32},
'uint64': {"native_type": "np.uint64", "base_type": True,
"pytype": np.uint64},
'string': {"native_type": "unicode", "base_type": True,
"quote_arg": True, "pytype": unicode},
'decimal64': {"native_type": "Decimal", "base_type": True,
"pytype": decimal.Decimal},
'empty': {"native_type": "YANGBool", "map": class_bool_map,
"base_type": True, "quote_arg": True,
"pytype": YANGBool},
'int8': {"native_type": "np.int8", "base_type": True,
"pytype": np.int8},
'int16': {"native_type": "np.int16", "base_type": True,
"pytype": np.int16},
'int32': {"native_type": "np.int32", "base_type": True,
"pytype": np.int32},
'int64': {"native_type": "np.int64", "base_type": True,
"pytype": np.int64},
}
# We have a set of types which support "range" statements in RFC6020. This
# list determins types that should be allowed to have a "range" argument.
INT_RANGE_TYPES = ["uint8", "uint16", "uint32", "uint64",
"int8", "int16", "int32", "int64"]
# Base machinery to support operation as a plugin to pyang.
def pyang_plugin_init():
plugin.register_plugin(BTPyClass())
class BTPyClass(plugin.PyangPlugin):
def add_output_format(self, fmts):
# Add the 'pybind' output format to pyang.
self.multiple_modules = True
fmts['pybind'] = self
def emit(self, ctx, modules, fd):
# When called, call the build_pyangbind function.
build_pybind(ctx, modules, fd)
def add_opts(self, optparser):
# Add pyangbind specific operations to pyang. These are documented in the
# options, but are essentially divided into three sets.
# * xpathhelper - How pyangbind should deal with xpath expressions. This
# module is documented in lib/xpathhelper and describes how support
# registration, updates, and retrieval of xpaths.
# * class output - whether a single file should be created, or whether a
# hierarchy of python modules should be created. The latter is
# preferable when one has large trees being compiled.
# * extensions - support for YANG extensions that pyangbind should look
# for, and add as a dictionary with each element.
optlist = [
optparse.make_option("--use-xpathhelper",
dest="use_xpathhelper",
action="store_true",
help="""Use the xpathhelper module to
resolve leafrefs"""),
optparse.make_option("--split-class-dir",
metavar="DIR",
dest="split_class_dir",
help="""Split the code output into
multiple directories"""),
optparse.make_option("--pybind-class-dir",
metavar="DIR",
dest="pybind_class_dir",
help="""Path in which the pyangbind
'lib' directionary can be found
- assumed to be the local
directory if this option
is not specified"""),
optparse.make_option("--interesting-extension",
metavar="EXTENSION-MODULE",
default=[],
action="append",
type=str,
dest="pybind_interested_exts",
help="""A set of extensions that
are interesting and should be
stored with the class. They
can be accessed through the
"extension_dict()" argument.
Multiple arguments can be
specified."""),
optparse.make_option("--use-extmethods",
dest="use_extmethods",
action="store_true",
help="""Allow a path-keyed dictionary
to be used to specify methods
related to a particular class"""),
]
g = optparser.add_option_group("pyangbind output specific options")
g.add_options(optlist)
# Core function to build the pyangbind output - starting with building the
# dependencies - and then working through the instantiated tree that pyang has
# already parsed.
def build_pybind(ctx, modules, fd):
# Restrict the output of the plugin to only the modules that are supplied
# to pyang. More modules are parsed by pyangbind to resolve typedefs and
# identities.
module_d = {}
for mod in modules:
module_d[mod.arg] = mod
pyang_called_modules = module_d.keys()
# Bail if there are pyang errors, since this certainly means that the
# pyangbind output will fail - unless these are solely due to imports that
# we provided but then unused.
if len(ctx.errors):
for e in ctx.errors:
if not e[1] == "UNUSED_IMPORT":
sys.stderr.write("FATAL: pyangbind cannot build module that pyang" + \
" has found errors with.\n")
sys.exit(127)
# Build the common set of imports that all pyangbind files needs
ctx.pybind_common_hdr = ""
if ctx.opts.pybind_class_dir:
# If we were asked to include a different directory for the library, then
# the header needs to extend the python system path to be able to include
# files from this directory.
libdir = os.path.abspath(ctx.opts.pybind_class_dir)
ctx.pybind_common_hdr += """import sys\n"""
ctx.pybind_common_hdr += """sys.path.append("%s")\n""" % libdir
ctx.pybind_common_hdr += "\n"
ctx.pybind_common_hdr += "from operator import attrgetter\n"
if ctx.opts.use_xpathhelper:
ctx.pybind_common_hdr += "import lib.xpathhelper as xpathhelper\n"
ctx.pybind_common_hdr += """from lib.yangtypes import """
ctx.pybind_common_hdr += """RestrictedPrecisionDecimalType, """
ctx.pybind_common_hdr += """RestrictedClassType, TypedListType\n"""
ctx.pybind_common_hdr += """from lib.yangtypes import YANGBool, """
ctx.pybind_common_hdr += """YANGListType, YANGDynClass, ReferenceType\n"""
ctx.pybind_common_hdr += """from lib.base import PybindBase\n"""
ctx.pybind_common_hdr += """from decimal import Decimal\n"""
ctx.pybind_common_hdr += """import numpy as np\n"""
ctx.pybind_common_hdr += """from bitarray import bitarray\n"""
if not ctx.opts.split_class_dir:
fd.write(ctx.pybind_common_hdr)
else:
ctx.pybind_split_basepath = os.path.abspath(ctx.opts.split_class_dir)
if not os.path.exists(ctx.pybind_split_basepath):
os.makedirs(ctx.pybind_split_basepath)
# Determine all modules, and submodules that are needed, along with the
# prefix that is used for it. We need to ensure that we understand all of the
# prefixes that might be used to reference an identity or a typedef.
all_mods = []
for module in modules:
local_module_prefix = module.search_one('prefix')
if local_module_prefix is None:
local_module_prefix = module.search_one('belongs-to').search_one('prefix')
if local_module_prefix is None:
raise AttributeError("A module (%s) must have a prefix or parent " + \
"module")
local_module_prefix = local_module_prefix.arg
else:
local_module_prefix = local_module_prefix.arg
mods = [(local_module_prefix,module)]
# 'include' statements specify the submodules of the existing module - which
# also need to be parsed.
for i in module.search('include'):
subm = ctx.get_module(i.arg)
if subm is not None:
mods.append((local_module_prefix, subm))
# 'import' statements specify the other modules that this module will
# reference.
for j in module.search('import'):
mod = ctx.get_module(j.arg)
if mod is not None:
imported_module_prefix = j.search_one('prefix').arg
mods.append((imported_module_prefix, mod))
modules.append(mod)
all_mods.extend(mods)
# remove duplicates from the list (same module and prefix)
new_all_mods = []
for mod in all_mods:
if not mod in new_all_mods:
new_all_mods.append(mod)
all_mods = new_all_mods
# Build a list of the 'typedef' and 'identity' statements that are included
# in the modules supplied.
defn = {}
for defnt in ['typedef', 'identity']:
defn[defnt] = {}
for m in all_mods:
t = find_definitions(defnt, ctx, m[1], m[0])
for k in t:
if not k in defn[defnt]:
defn[defnt][k] = t[k]
# Build the identities and typedefs (these are added to the class_map which
# is globally referenced).
build_identities(ctx, defn['identity'])
build_typedefs(ctx, defn['typedef'])
# Iterate through the tree which pyang has built, solely for the modules
# that pyang was asked to build
for modname in pyang_called_modules:
module = module_d[modname]
mods = [module]
for i in module.search('include'):
subm = ctx.get_module(i.arg)
if subm is not None:
mods.append(subm)
for m in mods:
children = [ch for ch in module.i_children
if ch.keyword in statements.data_definition_keywords]
get_children(ctx, fd, children, m, m)
def build_identities(ctx, defnd):
# Build dicionaries which determine how identities work. Essentially, an
# identity is modelled such that it is a dictionary where the keys of that
# dictionary are the valid values for an identityref.
unresolved_idc = {}
for i in defnd:
unresolved_idc[i] = 0
unresolved_ids = defnd.keys()
error_ids = []
identity_d = {}
# The order of an identity being built is important. Find those identities
# that either have no "base" statement, or have a known base statement, and
# queue these to be processed first.
while len(unresolved_ids):
ident = unresolved_ids.pop(0)
base = defnd[ident].search_one('base')
reprocess = False
if base is None and not unicode(ident) in identity_d:
identity_d[unicode(ident)] = {}
else:
# the identity has a base, so we need to check whether it
# exists already
if unicode(base.arg) in identity_d:
base_id = unicode(base.arg)
# if it did, then we can now define the value - we want to
# define it as both the resolved value (i.e., with the prefix)
# and the unresolved value.
if ":" in ident:
prefix,value = ident.split(":")
prefix,value = unicode(prefix),unicode(value)
if not value in identity_d[base_id]:
identity_d[base_id][value] = {}
if not value in identity_d:
identity_d[value] = {}
# check whether the base existed with the prefix that was
# used for this value too, as long as the base_id is not
# already resolved
if not ":" in base_id:
resolved_base = unicode("%s:%s" % (prefix, base_id))
if not resolved_base in identity_d:
reprocess = True
else:
identity_d[resolved_base][ident] = {}
identity_d[resolved_base][value] = {}
if not ident in identity_d[base_id]:
identity_d[base_id][ident] = {}
if not ident in identity_d:
identity_d[ident] = {}
else:
reprocess = True
if reprocess:
# Fall-out from the loop of resolving the identity. If we've looped
# around many times, we can't find a base for the identity, which means
# it is invalid.
if unresolved_idc[ident] > 1000:
sys.stderr.write("could not find a match for %s base: %s\n" % \
(ident, base.arg))
error_ids.append(ident)
else:
unresolved_ids.append(ident)
unresolved_idc[ident] += 1
# Remove those identities that do not have any members. This would remove
# identities that are solely bases, but have no other members. However, this
# is a problem if particular modules are compiled.
#for potential_identity in identity_d.keys():
# if len(identity_d[potential_identity]) == 0:
# del identity_d[potential_identity]
if error_ids:
raise TypeError("could not resolve identities %s" % error_ids)
# Add entries to the class_map such that this identity can be referenced by
# elements that use this identity ref.
for i in identity_d:
id_type = {"native_type": """RestrictedClassType(base_type=unicode, """ + \
"""restriction_type="dict_key", """ + \
"""restriction_arg=%s,)""" % identity_d[i], \
"restriction_argument": identity_d[i], \
"restriction_type": "dict_key",
"parent_type": "string",
"base_type": False,}
class_map[i] = id_type
def build_typedefs(ctx, defnd):
# Build the type definitions that are specified within a model. Since
# typedefs are essentially derived from existing types, order of processing
# is important - we need to go through and build the types in order where
# they have a known 'type'.
unresolved_tc = {}
for i in defnd:
unresolved_tc[i] = 0
unresolved_t = defnd.keys()
error_ids = []
known_types = class_map.keys()
known_types.append('enumeration')
known_types.append('leafref')
process_typedefs_ordered = []
while len(unresolved_t):
t = unresolved_t.pop(0)
base_t = defnd[t].search_one('type')
if base_t.arg == "union":
subtypes = [i for i in base_t.search('type')]
elif base_t.arg == "identityref":
subtypes = [base_t.search_one('base'),]
else:
subtypes = [base_t,]
any_unknown = False
for i in subtypes:
if not i.arg in known_types:
any_unknown=True
if not any_unknown:
process_typedefs_ordered.append((t, defnd[t]))
known_types.append(t)
else:
unresolved_tc[t] += 1
if unresolved_tc[t] > 1000:
# Take a similar approach to the resolution of identities. If we have a
# typedef that has a type in it that is not found after many iterations
# then we should bail.
error_ids.append(t)
sys.stderr.write("could not find a match for %s type -> %s\n" % \
(t,[i.arg for i in subtypes]))
else:
unresolved_t.append(t)
if error_ids:
raise TypeError("could not resolve typedefs %s" % error_ids)
# Process the types that we built above.
for i_tuple in process_typedefs_ordered:
item = i_tuple[1]
type_name = i_tuple[0]
mapped_type = False
restricted_arg = False
# Copy the class_map entry - this is done so that we do not alter the
# existing instance in memory as we add to it.
cls,elemtype = copy.deepcopy(build_elemtype(ctx, item.search_one('type')))
known_types = class_map.keys()
# Enumeration is a native type, but is not natively supported
# in the class_map, and hence we append it here.
known_types.append("enumeration")
known_types.append("leafref")
# Don't allow duplicate definitions of types
if type_name in known_types:
raise TypeError("Duplicate definition of %s" % type_name)
default_stmt = item.search_one('default')
# 'elemtype' is a list when the type includes a union, so we need to go
# through and build a type definition that supports multiple types.
if not isinstance(elemtype,list):
restricted = False
# Map the original type to the new type, parsing the additional arguments
# that may be specified, for example, a new default, a pattern that must
# be matched, or a length (stored in the restriction_argument, and
# restriction_type class_map variables).
class_map[type_name] = {"base_type": False,}
class_map[type_name]["native_type"] = elemtype["native_type"]
if "parent_type" in elemtype:
class_map[type_name]["parent_type"] = elemtype["parent_type"]
else:
yang_type = item.search_one('type').arg
if not yang_type in known_types:
raise TypeError("typedef specified a native type that was not " +
"supported")
class_map[type_name]["parent_type"] = yang_type
if default_stmt is not None:
class_map[type_name]["default"] = default_stmt.arg
if "referenced_path" in elemtype:
class_map[type_name]["referenced_path"] = elemtype["referenced_path"]
class_map[type_name]["class_override"] = "leafref"
if "require_instance" in elemtype:
class_map[type_name]["require_instance"] = elemtype["require_instance"]
if "restriction_type" in elemtype:
class_map[type_name]["restriction_type"] = \
elemtype["restriction_type"]
class_map[type_name]["restriction_argument"] = \
elemtype["restriction_argument"]
if "quote_arg" in elemtype:
class_map[type_name]["quote_arg"] = elemtype["quote_arg"]
else:
# Handle a typedef that is a union - extended the class_map arguments
# to be a list that is parsed by the relevant dynamic type generation
# function.
native_type = []
parent_type = []
default = False if default_stmt is None else default_stmt.arg
for i in elemtype:
if isinstance(i[1]["native_type"], list):
native_type.extend(i[1]["native_type"])
else:
native_type.append(i[1]["native_type"])
if i[1]["yang_type"] in known_types:
parent_type.append(i[1]["yang_type"])
else:
msg = "typedef in a union specified a native type that was not"
msg += "supported (%s in %s)" % (i[1]["yang_type"], item.arg)
raise TypeError(msg)
if "default" in i[1] and not default:
# When multiple 'default' values are specified within a union that
# is within a typedef, then pyangbind will choose the first one.
q = True if "quote_arg" in i[1] else False
default = (i[1]["default"], q)
class_map[type_name] = {"native_type": native_type, "base_type": False,
"parent_type": parent_type,}
if default:
class_map[type_name]["default"] = default[0]
class_map[type_name]["quote_default"] = default[1]
def find_definitions(defn, ctx, module, prefix):
# Find the statements within a module that map to a particular type of
# statement, for instance - find typedefs, or identities, and reutrn them
# as a dictionary to the calling function.
mod = ctx.get_module(module.arg)
if mod is None:
raise AttributeError("expected to be able to find module %s, " % \
(module.arg) + "but could not")
type_definitions = {}
for i in mod.search(defn):
if i.arg in type_definitions:
sys.stderr.write("WARNING: duplicate definition of %s" % i.arg)
else:
type_definitions["%s:%s" % (prefix, i.arg)] = i
type_definitions[i.arg] = i
return type_definitions
def get_children(ctx, fd, i_children, module, parent, path=str(), \
parent_cfg=True, choice=False):
# Iterative function that is called for all elements that have childen
# data nodes in the tree. This function resolves those nodes into the
# relevant leaf, or container/list configuration and outputs the python
# code that corresponds to it to the relevant file. parent_cfg is used to
# ensure that where a parent container was set to config false, this is
# inherited by all elements below it; and choice is used to store whether
# these leaves are within a choice or not.
used_types,elements = [],[]
choices = False
# When pyangbind was asked to split classes, then we need to create the
# relevant directories for the modules to be created into. In this case
# even though fd might be a valid file handle, we ignore it.
if ctx.opts.split_class_dir:
if path == "":
fpath = ctx.pybind_split_basepath + "/__init__.py"
else:
pparts = path.split("/")
npath = "/"
for pp in pparts:
npath += safe_name(pp) + "/"
bpath = ctx.pybind_split_basepath + npath
if not os.path.exists(bpath):
os.makedirs(bpath)
fpath = bpath + "/__init__.py"
if not os.path.exists(fpath):
try:
nfd = open(fpath, 'w')
except IOError, m:
raise IOError, "could not open pyangbind output file (%s)" % m
nfd.write(ctx.pybind_common_hdr)
else:
try:
nfd = open(fpath, 'a')
except IOError, w:
raise IOError, "could not open pyangbind output file (%s)" % m
else:
# If we weren't asked to split the files, then just use the file handle
# provided.
nfd = fd
if parent_cfg:
# The first time we find a container that has config false set on it
# then we need to hand this down the tree - we don't need to look if
# parent_cfg has already been set to False as we need to inherit.
parent_config = parent.search_one('config')
if parent_config is not None:
parent_config = parent_config.arg
if parent_config.upper() == "FALSE":
# this container is config false
parent_cfg = False
# When we are asked to split the classes into modules, then we need to find
# all elements that have their own class within this container, and make sure
# that they are imported. Additionally, we need to find the elements that are
# within a case, and ensure that these are built with the corresponding
# choice specified.
if ctx.opts.split_class_dir:
import_req = []
for ch in i_children:
if ch.keyword == "choice":
for choice_ch in ch.i_children:
# these are case statements
for case_ch in choice_ch.i_children:
elements += get_element(ctx, fd, case_ch, module, parent, \
path+"/"+ch.arg, parent_cfg=parent_cfg, \
choice=(ch.arg,choice_ch.arg))
else:
elements += get_element(ctx, fd, ch, module, parent, path+"/"+ch.arg,\
parent_cfg=parent_cfg, choice=choice)
if ctx.opts.split_class_dir:
if hasattr(ch, "i_children") and len(ch.i_children):
import_req.append(ch.arg)
# Write out the import statements if needed.
if ctx.opts.split_class_dir:
if len(import_req):
for im in import_req:
nfd.write("""import %s\n""" % safe_name(im))
# 'container', 'module', 'list' and 'submodule' all have their own classes
# generated.
if parent.keyword in ["container", "module", "list", "submodule"]:
if ctx.opts.split_class_dir:
nfd.write("class %s(PybindBase):\n" % safe_name(parent.arg))
else:
if not path == "":
nfd.write("class yc_%s_%s_%s(PybindBase):\n" % (safe_name(parent.arg), \
safe_name(module.arg), safe_name(path.replace("/", "_"))))
else:
nfd.write("class %s(PybindBase):\n" % safe_name(parent.arg))
# If the container is actually a list, then determine what the key value
# is and store this such that we can give a hint.
keyval = False
if parent.keyword == "list":
keyval = parent.search_one('key').arg if parent.search_one('key') \
is not None else False
if keyval and " " in keyval:
keyval = keyval.split(" ")
else:
keyval = [keyval,]
# Auto-generate a docstring based on the description that is provided in
# the YANG module. This aims to provide readability to someone perusing the
# code that is generated.
parent_descr = parent.search_one('description')
if parent_descr is not None:
parent_descr = "\n\n YANG Description: %s" % \
parent_descr.arg.decode('utf8').encode('ascii', 'ignore')
else:
parent_descr = ""
# Add more helper text.
nfd.write(""" \"\"\"
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module %s - based on the path %s. Each member element of
the container is represented as a class variable - with a specific
YANG type.%s
\"\"\"\n""" % (module.arg, (path if not path == "" else "/%s" % parent.arg), \
parent_descr))
else:
raise TypeError("unhandled keyword with children %s" % parent.keyword)
elements_str = ""
if len(elements) == 0:
nfd.write(" pass\n")
else:
# We want to prevent a user from creating new attributes on a class that
# are not allowed within the data model - this uses the __slots__ magic
# variable of the class to restrict anyone from adding to these classes.
# Doing so gives an AttributeError when a user tries to specify something
# that was not in the model.
elements_str = "_pyangbind_elements = {"
slots_str = " __slots__ = ('_pybind_generated_by', '_path_helper', '_yang_name', '_extmethods', "
for i in elements:
slots_str += "'__%s'," % i["name"]
elements_str += "'%s': %s, " % (i["name"], i["name"])
slots_str += ")\n"
elements_str += "}\n"
nfd.write(slots_str + "\n")
# Store the real name of the element - since we often get values that are
# not allowed in python as identifiers, but we need the real-name when
# creating instance documents (e.g., peer-group is not valid due to '-').
nfd.write(" _yang_name = '%s'\n" % (parent.arg))
choices = {}
choice_attrs = []
classes = {}
for i in elements:
# Loop through the elements and build a string that corresponds to the
# class that is going to be created. In all cases (thus far) this uses
# the YANGDynClass helper function to generate a dynamic type. This
# can extend the base type that is provided, and does this to give us
# some attributes that base classes such as int(), or str() don't have -
# but YANG needs (such as a default value, the original YANG name, any
# extension that were provided with the leaf, etc.).
class_str = {}
if "default" in i and not i["default"] is None:
default_arg = repr(i["default"]) if i["quote_arg"] else "%s" \
% i["default"]
if i["class"] == "leaf-list":
# Map a leaf-list to the type specified in the class map. This is a
# TypedList (see lib.yangtypes) with a particular set of types allowed.
class_str["name"] = "__%s" % (i["name"])
class_str["type"] = "YANGDynClass"
class_str["arg"] = "base="
if isinstance(i["type"]["native_type"][1], list):
allowed_type = "["
for subtype in i["type"]["native_type"][1]:
allowed_type += "%s," % subtype
allowed_type += "]"
else:
allowed_type = "%s" % (i["type"]["native_type"][1])
class_str["arg"] += "%s(allowed_type=%s)" % \
(i["type"]["native_type"][0],allowed_type)
if "default" in i and not i["default"] is None:
class_str["arg"] += ", default=%s(%s)" % (i["defaulttype"], \
default_arg)
elif i["class"] == "list":
# Map a list to YANGList class - this is dynamically derived by the
# YANGListType function to have the relevant characteristics, such as
# whether it is ordered by the user.
class_str["name"] = "__%s" % (i["name"])
class_str["type"] = "YANGDynClass"
class_str["arg"] = "base=YANGListType("
class_str["arg"] += "%s,%s" % ("\"%s\"" % i["key"] if i["key"] \
else False, i["type"])
class_str["arg"] += ", yang_name=\"%s\", parent=self" % (i["yang_name"])
class_str["arg"] += ", is_container='list', user_ordered=%s" \
% i["user_ordered"]
class_str["arg"] += ", path_helper=self._path_helper"
if i["choice"]:
class_str["arg"] += ", choice=%s" % repr(choice)
class_str["arg"] += ")"
elif i["class"] == "union" or i["class"] == "leaf-union":
# A special mapped type where there is a union that just includes leaves
# this is mapped to a particular Union type, and valid types within it
# provided. The dynamically generated class will determine whether the
# input can be mapped to the types included in the union.
class_str["name"] = "__%s" % (i["name"])
class_str["type"] = "YANGDynClass"
class_str["arg"] = "base=["
for u in i["type"][1]:
if isinstance(u[1]["native_type"], list):
for su_native_type in u[1]["native_type"]:
class_str["arg"] += "%s," % su_native_type
else:
class_str["arg"] += "%s," % u[1]["native_type"]
class_str["arg"] += "]"
if "default" in i and not i["default"] is None:
class_str["arg"] += ", default=%s(%s)" % (i["defaulttype"], \
default_arg)
elif i["class"] == "leafref":
# A leafref, pyangbind uses the special ReferenceType which performs a
# lookup against the path_helper class provided.
class_str["name"] = "__%s" % (i["name"])
class_str["type"] = "YANGDynClass"
class_str["arg"] = "base=%s" % i["type"]
class_str["arg"] += "(referenced_path='%s'" % i["referenced_path"]
class_str["arg"] += ", caller=self._path() + ['%s'], " \
% (i["yang_name"])
class_str["arg"] += "path_helper=self._path_helper, "
class_str["arg"] += "require_instance=%s)" % (i["require_instance"])
elif i["class"] == "leafref-list":
# Deal with the special case of a list of leafrefs, since the
# ReferenceType has different arguments that need to be provided to the
# class to properly initialise.
class_str["name"] = "__%s"% (i["name"])
class_str["type"] = "YANGDynClass"
class_str["arg"] = "base=%s" % i["type"]["native_type"][0]
class_str["arg"] += "(allowed_type=%s(referenced_path='%s'," \
% (i["type"]["native_type"][1]["native_type"], \
i["type"]["native_type"][1]["referenced_path"])
class_str["arg"] += "caller=self._path() + ['%s'], " % i["yang_name"]
class_str["arg"] += "path_helper=self._path_helper, "
class_str["arg"] += "require_instance=%s))" % \
(i["type"]["native_type"][1]["require_instance"])
else:
# Generically handle all other classes with the 'standard' mappings.
class_str["name"] = "__%s" % (i["name"])
class_str["type"] = "YANGDynClass"
if isinstance(i["type"],list):
class_str["arg"] = "base=["
for u in i["type"]:
class_str["arg"] += "%s," % u
class_str["arg"] += "]"
else:
class_str["arg"] = "base=%s" % i["type"]
if "default" in i and not i["default"] is None:
class_str["arg"] += ", default=%s(%s)" % (i["defaulttype"], \
default_arg)
if i["class"] == "container":
class_str["arg"] += ", is_container='container'"
elif i["class"] == "list":
class_str["arg"] += ", is_container='list'"
elif i["class"] == "leaf-list":
class_str["arg"] += ", is_leaf=False"
else:
class_str["arg"] += ", is_leaf=True"
if class_str["arg"]:
class_str["arg"] += ", yang_name=\"%s\"" % i["yang_name"]
class_str["arg"] += ", parent=self"
if i["choice"]:
class_str["arg"] += ", choice=%s" % repr(i["choice"])
choice_attrs.append(i["name"])
if not i["choice"][0] in choices:
choices[i["choice"][0]] = {}
if not i["choice"][1] in choices[i["choice"][0]]:
choices[i["choice"][0]][i["choice"][1]] = []
choices[i["choice"][0]][i["choice"][1]].append(i["name"])
class_str["arg"] += ", path_helper=self._path_helper"
class_str["arg"] += ", extmethods=self._extmethods"
if "extensions" in i:
class_str["arg"] += ", extensions=%s" % i["extensions"]
if keyval and i["yang_name"] in keyval:
class_str["arg"] += ", is_keyval=True"
classes[i["name"]] = class_str
# TODO: get and set methods currently have errors that are reported that
# are a bit ugly. The intention here is to act like an immutable type - such
# that new class instances are created each time that the value is set.
# Generic class __init__, set up the path_helper if asked to.
nfd.write("""
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):\n""")
if ctx.opts.use_xpathhelper:
nfd.write("""
helper = kwargs.pop("path_helper", None)
if helper is False:
self._path_helper = False
elif helper is not None and isinstance(helper, xpathhelper.YANGPathHelper):
self._path_helper = helper
elif hasattr(self, "_parent"):
helper = getattr(self._parent, "_path_helper", False)
self._path_helper = helper
else:
self._path_helper = False\n""")
else:
nfd.write("""
self._path_helper = False\n""")
if ctx.opts.use_extmethods:
nfd.write("""
extmethods = kwargs.pop("extmethods", None)
if extmethods is False:
self._extmethods = False
elif extmethods is not None and isinstance(extmethods, dict):
self._extmethods = extmethods
elif hasattr(self, "_parent"):
extmethods = getattr(self._parent, "_extmethods", None)
self._extmethods = extmethods
else:
self._extmethods = False\n""")
else:
nfd.write("""
self._extmethods = False\n""")
# Write out the classes that are stored locally as self.__foo where
# foo is the safe YANG name.
for c in classes:
nfd.write(" self.%s = %s(%s)\n" % (classes[c]["name"], \
classes[c]["type"], classes[c]["arg"]))
# Don't accept arguments to a container/list/submodule class
nfd.write("""
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
setmethod = getattr(self, "_set_%s" % e)
setmethod(getattr(args[0], e))\n""")
# A generic method to provide a path() method on each container, that gives
# a path in the form of a list that describes the nodes in the hierarchy.
nfd.write("""
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return %s\n""" % path.split("/")[1:])
node = {}
# For each element, write out a getter and setter method - with the doc
# string of the element within the model.
for i in elements:
c_str = classes[i["name"]]
description_str = ""
if i["description"]:
description_str = "\n\n YANG Description: %s" \
% i["description"].decode('utf-8').encode('ascii', 'ignore')
nfd.write("""
def _get_%s(self):
\"\"\"
Getter method for %s, mapped from YANG variable %s (%s)%s
\"\"\"
return self.__%s
""" % (i["name"], i["name"], i["path"], i["origtype"],
description_str, i["name"]))
nfd.write("""
def _set_%s(self,v):
\"\"\"
Setter method for %s, mapped from YANG variable %s (%s)
If this variable is read-only (config: false) in the
source YANG file, then _set_%s is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_%s() directly.%s
\"\"\"""" % (i["name"], i["name"], i["path"], \
i["origtype"], i["name"], i["name"], description_str,))
nfd.write("""
try:
t = %s(v,%s)""" % (c_str["type"], c_str["arg"]))
nfd.write("""
except (TypeError, ValueError):
raise ValueError(\"\"\"%s must be of a type compatible with %s\"\"\")
self.__%s = t\n""" % (i["name"], c_str["arg"], i["name"]))
nfd.write(" if hasattr(self, '_set'):\n")
nfd.write(" self._set()\n")
# When we want to return a value to its default, the unset method can
# be used. Generally, this is done in a choice where one branch needs to
# be set to the default, but may be used wherever re-initialiation of
# the object is required.
nfd.write("""
def _unset_%s(self):
self.__%s = %s(%s)\n\n""" % (i["name"], i["name"], c_str["type"], c_str["arg"],))
# When an element is read-only, write out the _set and _get methods, but
# we don't actually make the property object accessible. This ensures that
# where backends are populating the model, then they can do so via the
# _set_X method - but a 'normal' user can't just do container.X = 10.
for i in elements:
rw = True
if not i["config"]:
rw = False
elif not parent_cfg:
rw = False
elif keyval and i["yang_name"] in keyval:
rw = False
if not rw:
nfd.write(""" %s = property(_get_%s)\n""" % (i["name"], i["name"]))
else:
nfd.write(""" %s = property(_get_%s, _set_%s)\n""" % \
(i["name"], i["name"], i["name"]))
nfd.write("\n")
# Store a list of the choices that are included within this module such that
# we can enforce each brnahc.
if choices:
nfd.write(" __choices__ = %s" % repr(choices))
nfd.write("""\n %s\n""" % elements_str)
nfd.write("\n")
if ctx.opts.split_class_dir:
nfd.close()
return None
def build_elemtype(ctx, et, prefix=False):
# Build a dictionary which defines the type for the element. This is used
# both in the case that a typedef needs to be built, as well as on per-list
# basis.
cls = None
pattern_stmt = et.search_one('pattern') if not et.search_one('pattern') \
is None else False
range_stmt = et.search_one('range') if not et.search_one('range') \
is None else False
length_stmt = et.search_one('length') if not et.search_one('length') \
is None else False
# Determine whether there are any restrictions that are placed on this leaf,
# and build a dictionary of the different restrictions to be placed on the
# type.
restrictions = {}
if pattern_stmt:
restrictions['pattern'] = pattern_stmt.arg
if length_stmt:
restrictions['length'] = length_stmt.arg
if range_stmt:
restrictions['range'] = range_stmt.arg
# Build RestrictedClassTypes based on the compiled dictionary and the