|
@@ -353,6 +353,9 @@ class ParseJsonSchema(ParseDbSchema):
|
|
|
del schema['default_values']
|
|
|
return schema
|
|
|
|
|
|
+ assert(isinstance(schema, dict)),\
|
|
|
+ "Parameter 'schema' must be a dictionary type"
|
|
|
+
|
|
|
# Need to parse schmema for importing to mongo db
|
|
|
# Reason:
|
|
|
# We need to drop default values since MongoDB can't handle them
|
|
@@ -368,29 +371,31 @@ class ParseJsonSchema(ParseDbSchema):
|
|
|
with open(schema_path) as json_file:
|
|
|
schema = json.load(json_file)
|
|
|
|
|
|
- flags = self._analyze_schema(schema)
|
|
|
+ definitions_flag = self._analyze_schema(schema)
|
|
|
|
|
|
- if flags['ref_flag'] == 1:
|
|
|
- schema = self._dereference_schema(schema)
|
|
|
-
|
|
|
- if flags['default_flag'] == 1:
|
|
|
- schema = self._remove_defaults(schema)
|
|
|
+ if definitions_flag:
|
|
|
+ schema = self._dereference_schema(schema)
|
|
|
|
|
|
return schema
|
|
|
+
|
|
|
+ return schema
|
|
|
|
|
|
- def _analyze_schema(self, schema: dict,
|
|
|
- flags: dict = {'ref_flag':False, 'default_flag':False}) -> dict:
|
|
|
+
|
|
|
+ def _analyze_schema(self, schema: dict, definitions_flag: bool = False) -> dict:
|
|
|
+
|
|
|
+
|
|
|
for key in schema:
|
|
|
- if key == '$ref':
|
|
|
- flags['ref_flag'] = True
|
|
|
-
|
|
|
+ if key == 'definitions':
|
|
|
+ definitions_flag = True
|
|
|
+ return definitions_flag
|
|
|
+
|
|
|
if key == 'default' or key == 'default_values':
|
|
|
- flags['default_flag'] = True
|
|
|
-
|
|
|
+ return self._remove_defaults(schema)
|
|
|
+
|
|
|
if type(schema[key]) == dict:
|
|
|
- flags = self._analyze_schema(schema[key], flags)
|
|
|
-
|
|
|
- return flags
|
|
|
+ definitions_flag = self._analyze_schema(schema[key], definitions_flag)
|
|
|
+
|
|
|
+ return definitions_flag
|
|
|
|
|
|
|
|
|
|
|
@@ -400,22 +405,27 @@ if __name__ == "__main__":
|
|
|
|
|
|
schema_path = os.path.join(".", "mongo_schema", "schema_wheelsets.json")
|
|
|
|
|
|
- if os.path.isfile(schema_path):
|
|
|
-
|
|
|
- parse_obj = ParseJsonSchema(schema_paths=schema_path)
|
|
|
-
|
|
|
- fields = parse_obj.get_fields()
|
|
|
-
|
|
|
- required_fileds = parse_obj.get_required_fields()
|
|
|
-
|
|
|
- patterns = parse_obj.get_patterns()
|
|
|
-
|
|
|
- mongo_types = parse_obj.get_mongo_types()
|
|
|
-
|
|
|
- python_types_except_dates = parse_obj.get_python_types()
|
|
|
-
|
|
|
- datetime_fields = parse_obj.get_datetime_fields()
|
|
|
-
|
|
|
- allowed_values = parse_obj.get_allowed_values()
|
|
|
-
|
|
|
- descriptions = parse_obj.get_field_descriptions()
|
|
|
+ parse_obj = ParseJsonSchema(schema_paths=schema_path)
|
|
|
+
|
|
|
+
|
|
|
+ test=parse_obj.read_schema_and_parse_for_mongodb(schema_path)
|
|
|
+
|
|
|
+# if os.path.isfile(schema_path):
|
|
|
+#
|
|
|
+# parse_obj = ParseJsonSchema(schema_paths=schema_path)
|
|
|
+#
|
|
|
+# fields = parse_obj.get_fields()
|
|
|
+#
|
|
|
+# required_fileds = parse_obj.get_required_fields()
|
|
|
+#
|
|
|
+# patterns = parse_obj.get_patterns()
|
|
|
+#
|
|
|
+# mongo_types = parse_obj.get_mongo_types()
|
|
|
+#
|
|
|
+# python_types_except_dates = parse_obj.get_python_types()
|
|
|
+#
|
|
|
+# datetime_fields = parse_obj.get_datetime_fields()
|
|
|
+#
|
|
|
+# allowed_values = parse_obj.get_allowed_values()
|
|
|
+#
|
|
|
+# descriptions = parse_obj.get_field_descriptions()
|