|
@@ -24,46 +24,46 @@ class ParseMapping:
|
|
|
'''
|
|
|
|
|
|
self._log = Log('Parse Mapping')
|
|
|
-
|
|
|
+
|
|
|
assert(isinstance(mapping_paths, (list, str))),\
|
|
|
"Mapping_paths must be either str or lists"
|
|
|
-
|
|
|
+
|
|
|
if isinstance(mapping_paths, str):
|
|
|
mapping_paths = [mapping_paths]
|
|
|
-
|
|
|
- self._mapping_paths = mapping_paths
|
|
|
+
|
|
|
+ self._mapping_paths = mapping_paths
|
|
|
self._source = source
|
|
|
self._target = target
|
|
|
self._target_collections = target_collections
|
|
|
self._update_mapping()
|
|
|
-
|
|
|
+
|
|
|
def _update_mapping(self):
|
|
|
'''
|
|
|
Since we can have multiple mappings per table we need to add them to
|
|
|
- the object. I concatenated the mapping so that we don't have to adjust
|
|
|
+ the object. I concatenated the mapping so that we don't have to adjust
|
|
|
all function of the class to accept also list input. The class could
|
|
|
- be adjusted to accept list or even a dictornary with the key name as
|
|
|
- name of the mapping and value the json mapping.
|
|
|
- !!! WARNING !!!!
|
|
|
- Since the mapping are just concatenated there is right now
|
|
|
+ be adjusted to accept list or even a dictornary with the key name as
|
|
|
+ name of the mapping and value the json mapping.
|
|
|
+ !!! WARNING !!!!
|
|
|
+ Since the mapping are just concatenated there is right now
|
|
|
no way to ditinguish from the object itself which item belongs to which
|
|
|
mapping file.
|
|
|
'''
|
|
|
mappings = []
|
|
|
-
|
|
|
+
|
|
|
for mapping_path in self._mapping_paths:
|
|
|
try:
|
|
|
with open(mapping_path, "r") as f:
|
|
|
- mapping = json.load(f)
|
|
|
+ mapping = json.load(f)
|
|
|
mappings.append(mapping)
|
|
|
-
|
|
|
+
|
|
|
except Exception as e:
|
|
|
err = ("Could not load json schema:{1} , "
|
|
|
"Obtained error {0}".format(e, mapping_path))
|
|
|
|
|
|
self._log.error(err)
|
|
|
raise Exception(err)
|
|
|
-
|
|
|
+
|
|
|
if len(mappings) > 1:
|
|
|
concatenate_mapping = []
|
|
|
for mapping in mappings:
|
|
@@ -74,7 +74,7 @@ class ParseMapping:
|
|
|
self._mapping = concatenate_mapping
|
|
|
else:
|
|
|
self._mapping = mappings[0]
|
|
|
-
|
|
|
+
|
|
|
|
|
|
def get_field_mapping(self) -> dict:
|
|
|
'''
|
|
@@ -111,43 +111,83 @@ class ParseMapping:
|
|
|
return self._get_fields_satistisfying_condition(key=self._target_collections,
|
|
|
value=collection_name)
|
|
|
|
|
|
- def _get_info(self, key: str, value=None) -> dict:
|
|
|
+ def _get_property_from_mapping(self, property_names: list) -> dict:
|
|
|
'''
|
|
|
+ Get specified property names from migration mapping json.
|
|
|
'''
|
|
|
+ assert(isinstance(property_names,list)),\
|
|
|
+ "Parameter 'property_names' is not a list"
|
|
|
+
|
|
|
assert(all([self._source in d for d in self._mapping])),\
|
|
|
- "Invalid from field"
|
|
|
+ "Not all objects in the mapping json contain property tag " + self._source
|
|
|
|
|
|
result = {}
|
|
|
-
|
|
|
for column_mapping in self._mapping:
|
|
|
+ for property_name in property_names:
|
|
|
+ if property_name in column_mapping and column_mapping[property_name]:
|
|
|
+ result.update({column_mapping[self._source]: column_mapping[property_name]})
|
|
|
|
|
|
- if (key in column_mapping and column_mapping[key] is not None\
|
|
|
- and column_mapping[key] and (column_mapping[key] == value or value is None))\
|
|
|
- or (column_mapping[key] == value):
|
|
|
-
|
|
|
- result.update({column_mapping[self._source]: column_mapping[key]})
|
|
|
-
|
|
|
return result
|
|
|
|
|
|
|
|
|
|
|
|
def get_default_values(self) -> dict:
|
|
|
'''
|
|
|
+ Get default values from migration mapping json. If more peorerty names
|
|
|
+ are beeing added also add them in the unit test.
|
|
|
+ '''
|
|
|
+ standard_default_names=["default_values"]
|
|
|
+
|
|
|
+ return self._get_property_from_mapping(standard_default_names)
|
|
|
+
|
|
|
+ def get_types(self) -> dict:
|
|
|
'''
|
|
|
- return self._get_info(key="default_values")
|
|
|
+ Get type from migration mapping json. If more peorerty names
|
|
|
+ are beeing added also add them in the unit test.
|
|
|
+ '''
|
|
|
+
|
|
|
+ standard_type_names=["type"]
|
|
|
+
|
|
|
+ return self._get_property_from_mapping(standard_type_names)
|
|
|
+
|
|
|
+ def get_value_mappings(self) -> dict:
|
|
|
+ '''
|
|
|
+ Get type from migration mapping json. If more peorerty names
|
|
|
+ are beeing added also add them in the unit test.
|
|
|
+ '''
|
|
|
+
|
|
|
+ standard_value_mapping_names=["value_mapping"]
|
|
|
+
|
|
|
+ return self._get_property_from_mapping(standard_value_mapping_names)
|
|
|
|
|
|
def get_date_formats(self) -> dict:
|
|
|
'''
|
|
|
+ Get date fromats from migration mapping json. If more peorerty names
|
|
|
+ are beeing added or value also add them in the unit test.
|
|
|
'''
|
|
|
- return self._get_info(key="type", value="DATETIME")
|
|
|
-
|
|
|
+ assert(all([self._source in d for d in self._mapping])),\
|
|
|
+ "Not all objects in the mapping json contain property tag " + self._source
|
|
|
+
|
|
|
+ standard_property_names=["type"]
|
|
|
+ standard_property_values=["DATETIME"]
|
|
|
+
|
|
|
+ date_formats = {}
|
|
|
+ for column_mapping in self._mapping:
|
|
|
+ for property_name in standard_property_names:
|
|
|
+ if property_name in column_mapping and column_mapping[property_name]:
|
|
|
+ for value in standard_property_values:
|
|
|
+ if column_mapping[property_name] == value:
|
|
|
+ date_formats.update({column_mapping[self._source]: column_mapping[property_name]})
|
|
|
+
|
|
|
+ return date_formats
|
|
|
+
|
|
|
def get_internal_names(self) -> dict:
|
|
|
'''
|
|
|
'''
|
|
|
-
|
|
|
+
|
|
|
if all(["internal_name" in d for d in self._mapping]):
|
|
|
internal_names = [d["internal_name"] for d in self._mapping]
|
|
|
-
|
|
|
+
|
|
|
elif all(["internal_name" not in d for d in self._mapping]):
|
|
|
internal_names = list(range(len(self._mapping)))
|
|
|
|
|
@@ -177,10 +217,7 @@ class ParseMapping:
|
|
|
|
|
|
return mongo_names
|
|
|
|
|
|
- def get_types(self) -> dict:
|
|
|
- '''
|
|
|
- '''
|
|
|
- return self._get_info(key="type")
|
|
|
+
|
|
|
|
|
|
def get_python_types(self) -> dict:
|
|
|
'''
|
|
@@ -196,10 +233,7 @@ class ParseMapping:
|
|
|
|
|
|
return {k: sql_to_python_dtypes[v] for k, v in sql_types.items()}
|
|
|
|
|
|
- def get_value_mappings(self) -> dict:
|
|
|
- '''
|
|
|
- '''
|
|
|
- return self._get_info(key="value_mapping")
|
|
|
+
|
|
|
|
|
|
def get_column_numbers(self) -> list:
|
|
|
'''
|
|
@@ -221,7 +255,7 @@ class ParseMapping:
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
|
|
- mapping_path = os.path.join(".", "migration_mappings", "rs0_mapping.json")
|
|
|
+ mapping_path = os.path.join(".", "migration_mappings", "unit_test_migration_mapping.json")
|
|
|
|
|
|
if os.path.isfile(mapping_path):
|
|
|
|
|
@@ -230,14 +264,19 @@ if __name__ == "__main__":
|
|
|
parser = ParseMapping(mapping_path, source="internal_name",
|
|
|
target="mongo_name")
|
|
|
|
|
|
- internal_to_mongo_mapping = parser.get_field_mapping()
|
|
|
-
|
|
|
- original_to_internal_mapping = parser.get_field_mapping()
|
|
|
-
|
|
|
default_values = parser.get_default_values()
|
|
|
-
|
|
|
+ print(default_values)
|
|
|
+ date_formats = parser.get_date_formats()
|
|
|
+ print(date_formats)
|
|
|
+ mongo_names = parser.get_mongo_names()
|
|
|
+ print(mongo_names)
|
|
|
types = parser.get_types()
|
|
|
-
|
|
|
+ print(types)
|
|
|
column_numbers = parser.get_column_numbers()
|
|
|
+ print(column_numbers)
|
|
|
+ value_mappings = parser.get_value_mappings()
|
|
|
+ print(value_mappings)
|
|
|
+ sys.exit()
|
|
|
+
|
|
|
|
|
|
print("Done testing!")
|