13
13
def load_json_as_dict (file_name ):
14
14
with open (file_name , "r" ) as f :
15
15
return json .load (f )
16
- def special_flags_processing (json_dict , args = {}, * , base_folder = None , base_dict = {}, object_route = "" ):
16
+ def extends (json_dict , * , base_folder = None , base_dict = {}, object_route = "" ):
17
+ extends_from = json_dict [SPECIAL_FIELD_FLAG + "extends" ][SPECIAL_FIELD_FLAG + "from" ].split ("." )
18
+ attr_build = {}
19
+ if len (extends_from ) == 1 :
20
+ new_route = "." .join (object_route .split ("." )+ [extends_from [0 ]])
21
+ if extends_from [0 ] in base_dict :
22
+ attr_build = special_flags_processing (base_dict [extends_from [0 ]], base_dict = base_dict , object_route = new_route )
23
+ else :
24
+ CustomLogging .error (f"Attribute { extends_from [0 ]} not found \n { base_dict } " )
25
+ else :
26
+ attr_file_name = search_json (json_dict [SPECIAL_FIELD_FLAG + "extends" ][SPECIAL_FIELD_FLAG + "from" ], base_folder = base_folder )
27
+ if not attr_file_name :
28
+ CustomLogging .error (f"{ json_dict [SPECIAL_FIELD_FLAG + 'extends' ][SPECIAL_FIELD_FLAG + 'from' ]} path does not exists in" )
29
+ attr_json = load_json_as_dict (attr_file_name )
30
+ attr_build = json_global_compile (
31
+ attr_json ,
32
+ args = json_dict [SPECIAL_FIELD_FLAG + "extends" ],
33
+ base_folder = os .path .dirname (attr_file_name ),
34
+ object_route = json_dict [SPECIAL_FIELD_FLAG + "extends" ][SPECIAL_FIELD_FLAG + "from" ]
35
+ )
36
+ is_excluding = SPECIAL_FIELD_FLAG + "excludes" in json_dict [SPECIAL_FIELD_FLAG + "extends" ]
37
+ is_including = SPECIAL_FIELD_FLAG + "includes" in json_dict [SPECIAL_FIELD_FLAG + "extends" ]
38
+ if is_including and is_excluding :
39
+ CustomLogging .error ("can not use excludes and includes in a same block" )
40
+ if is_including :
41
+ new_attr_build = {}
42
+ for include in json_dict [SPECIAL_FIELD_FLAG + "extends" ][SPECIAL_FIELD_FLAG + "includes" ]:
43
+ if include not in attr_build :
44
+ CustomLogging .error (f"{ object_route } include error: attribute { include } not in { json_dict [SPECIAL_FIELD_FLAG + 'extends' ][SPECIAL_FIELD_FLAG + 'from' ]} " )
45
+ new_attr_build [include ] = attr_build [include ]
46
+ attr_build = new_attr_build
47
+ if is_excluding :
48
+ for exclude in json_dict [SPECIAL_FIELD_FLAG + "extends" ][SPECIAL_FIELD_FLAG + "excludes" ]:
49
+ if exclude in attr_build :
50
+ attr_build .pop (exclude )
51
+ else :
52
+ CustomLogging .warning (f"exclude error: attribute { exclude } not in { json_dict [SPECIAL_FIELD_FLAG + 'extends' ][SPECIAL_FIELD_FLAG + 'from' ]} " )
53
+ json_dict .update (attr_build )
54
+ json_dict .pop (SPECIAL_FIELD_FLAG + "extends" )
55
+ return json_dict
56
+ def cosntruct_replace (main_object , arg_replace , value ):
57
+ if type (main_object ) == str :
58
+ return main_object .replace (arg_replace , value )
59
+ response_json = {}
60
+ for attribute in main_object :
61
+ attribute_new_name = attribute .replace (arg_replace , value )
62
+ attribute_new_value = main_object [attribute ]
63
+ if type (attribute_new_value ) == dict :
64
+ attribute_new_value = cosntruct_replace (attribute_new_value , arg_replace , value )
65
+ elif type (attribute_new_value ) == list :
66
+ attribute_new_value = [ cosntruct_replace (element , arg_replace , value ) for element in attribute_new_value ]
67
+ response_json [attribute_new_name ] = attribute_new_value
68
+ return response_json
69
+ def cosntructor (json_dict , * , args = {}, object_route = "" ):
70
+ json_dict .pop (SPECIAL_FIELD_FLAG + "constructor" )
71
+ response_json = copy .deepcopy (json_dict )
72
+ args_to_check = copy .deepcopy (args )
73
+ args_to_check .pop (SPECIAL_FIELD_FLAG + "from" )
74
+ if SPECIAL_FIELD_FLAG + "excludes" in args_to_check :
75
+ args_to_check .pop (SPECIAL_FIELD_FLAG + "excludes" )
76
+ if SPECIAL_FIELD_FLAG + "includes" in args_to_check :
77
+ args_to_check .pop (SPECIAL_FIELD_FLAG + "includes" )
78
+ for arg in args :
79
+ new_value = cosntruct_replace (response_json , SPECIAL_FIELD_FLAG + arg , args [arg ])
80
+ response_json = new_value
81
+ return response_json
82
+ def special_flags_processing (json_dict , * , args = {}, base_folder = None , base_dict = {}, object_route = "" ):
17
83
if SPECIAL_FIELD_FLAG + "constructor" in json_dict :
18
- json_dict .pop (SPECIAL_FIELD_FLAG + "constructor" )
84
+ json_dict = cosntructor (json_dict , args = args )
85
+ base_dict = copy .deepcopy (json_dict )
19
86
if SPECIAL_FIELD_FLAG + "extends" in json_dict :
20
- extends_from = json_dict [SPECIAL_FIELD_FLAG + "extends" ]["from" ].split ("." )
21
- attr_build = {}
22
- if len (extends_from ) == 1 :
23
- if extends_from [0 ] in base_dict :
24
- attr_build = special_flags_processing (base_dict [extends_from [0 ]], base_dict = base_dict , object_route = object_route + "." + extends_from [0 ])
25
- else :
26
- CustomLogging .error (f"Attribute not found { object_route } .{ extends_from [0 ]} \n { base_dict } " )
27
- else :
28
- attr_file_name = search_json (json_dict [SPECIAL_FIELD_FLAG + "extends" ]["from" ], base_folder = base_folder )
29
- if not attr_file_name :
30
- CustomLogging .error (f"{ json_dict [SPECIAL_FIELD_FLAG + 'extends' ]['from' ]} path does not exists in" )
31
- attr_json = load_json_as_dict (attr_file_name )
32
- attr_build = json_global_compile (attr_json , base_folder = os .path .dirname (attr_file_name ),object_route = json_dict [SPECIAL_FIELD_FLAG + "extends" ]["from" ])
33
- is_excluding = SPECIAL_FIELD_FLAG + "excludes" in json_dict [SPECIAL_FIELD_FLAG + "extends" ]
34
- is_including = SPECIAL_FIELD_FLAG + "includes" in json_dict [SPECIAL_FIELD_FLAG + "extends" ]
35
- if is_including and is_excluding :
36
- CustomLogging .error ("can not use excludes and includes in a same block" )
37
- if is_including :
38
- new_attr_build = {}
39
- for include in json_dict [SPECIAL_FIELD_FLAG + "extends" ][SPECIAL_FIELD_FLAG + "includes" ]:
40
- if include not in attr_build :
41
- CustomLogging .error (f"include error: attribute { include } not in { json_dict [SPECIAL_FIELD_FLAG + 'extends' ]['from' ]} " )
42
- new_attr_build [include ] = attr_build [include ]
43
- attr_build = new_attr_build
44
- if is_excluding :
45
- for exclude in json_dict [SPECIAL_FIELD_FLAG + "extends" ][SPECIAL_FIELD_FLAG + "excludes" ]:
46
- if exclude in attr_build :
47
- attr_build .pop (exclude )
48
- else :
49
- CustomLogging .warning (f"exclude error: attribute { exclude } not in { json_dict [SPECIAL_FIELD_FLAG + 'extends' ]['from' ]} " )
50
- json_dict .update (attr_build )
51
- json_dict .pop (SPECIAL_FIELD_FLAG + "extends" )
87
+ json_dict = extends (json_dict , base_folder = base_folder , base_dict = base_dict , object_route = object_route )
52
88
for attribute in json_dict :
53
89
if type (json_dict [attribute ]) == dict :
54
- json_dict [attribute ] = special_flags_processing (json_dict [attribute ], args , base_folder = base_folder , base_dict = base_dict , object_route = f"{ object_route } .{ attribute } " )
90
+ json_dict [attribute ] = special_flags_processing (json_dict [attribute ], args = args , base_folder = base_folder , base_dict = base_dict , object_route = f"{ object_route } .{ attribute } " )
55
91
return copy .deepcopy (json_dict )
56
92
57
- def json_global_compile (json_dict , args = {}, * , base_folder = None , base_dict = {}, object_route = "" ):
58
- data = special_flags_processing (json_dict , args , base_folder = base_folder , base_dict = json_dict , object_route = object_route )
93
+ def json_global_compile (json_dict , * , args = {}, base_folder = None , base_dict = {}, object_route = "" ):
94
+ data = special_flags_processing (json_dict , args = args , base_folder = base_folder , base_dict = json_dict , object_route = object_route )
59
95
return data
60
96
class Compiler :
61
97
blueprint : dict = {}
@@ -68,18 +104,20 @@ def __init__(self, main_file) -> None:
68
104
def compile_models (self ):
69
105
if MODELS_FIELD not in self .blueprint and EXTENDS_FIELD not in self .blueprint :
70
106
CustomLogging .error ("models is not defined" )
71
- build = json_global_compile (self .blueprint )
107
+ build = json_global_compile (self .blueprint , base_folder = self . main_folder )
72
108
for model in build ["models" ].copy ():
73
109
model_file_name = self .main_file
74
- if type (model ) == str :
110
+ if type (build [ "models" ][ model ] ) == str :
75
111
model_file_name = search_json (
76
112
build ["models" ][model ], base_folder = self .main_folder )
77
113
if not model_file_name :
78
114
CustomLogging .error (build ["models" ][model ], "path does not exists in" )
79
115
continue
80
116
model_json = load_json_as_dict (model_file_name )
81
- elif type (model ) == dict :
82
- model_json = model
117
+ elif type (build ["models" ][model ]) == dict :
118
+ model_json = build ["models" ][model ]
119
+ else :
120
+ CustomLogging .error (f"invalid model { model } " )
83
121
model_build = json_global_compile (model_json , base_folder = os .path .dirname (model_file_name ), object_route = model )
84
122
build ["models" ][model ] = model_build
85
123
pp = pprint .PrettyPrinter (indent = 2 )
0 commit comments