Skip to content

Commit

Permalink
Fix metadata_reader
Browse files Browse the repository at this point in the history
  • Loading branch information
nicolasblumenroehr committed Sep 21, 2023
1 parent a4dc838 commit d5a7929
Show file tree
Hide file tree
Showing 4 changed files with 18 additions and 17 deletions.
Binary file modified .DS_Store
Binary file not shown.
29 changes: 15 additions & 14 deletions jammato/attribute_inserter.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
import logging
from .attribute_mapper import Attribute_Mapper


class Attribute_Inserter():

def __init__(self, schema_skeleton: dict, key_list: list, map: object) -> None:
Expand Down Expand Up @@ -35,9 +36,9 @@ def fill_json_object(self, json_object: dict, key_list: list, attributes_object:
for key in key_list:
if (isinstance(json_object[key], str)) or (isinstance(json_object[key], tuple)):
if key in attributes_object.keys():
new_dict[key] = self.get_json_type(json_object[key], attributes_object[key])
new_dict[key] = self.get_json_type(json_object[key], attributes_object[key], key)
else:
new_dict[key] = self.get_json_type(json_object[key], json_object[key])
new_dict[key] = self.get_json_type(json_object[key], json_object[key], key)
elif isinstance(json_object[key], dict):
if key in attributes_object:
if isinstance(attributes_object[key], Attribute_Mapper):
Expand Down Expand Up @@ -75,7 +76,7 @@ def fill_json_array(self, json_object: dict, json_object_property: str, json_arr
Returns:
list: The list that represents the filled json array.
"""
if isinstance(attributes, list) == False:
if not isinstance(attributes, list):
if json_object_property in attributes.__dict__.keys():
attributes = attributes.__dict__[json_object_property]
try:
Expand All @@ -89,7 +90,7 @@ def fill_json_array(self, json_object: dict, json_object_property: str, json_arr
for list_item, list_index in zip(json_array, range(0, len(json_array))):
if (isinstance(list_item, str)) or (isinstance(list_item, tuple)):
try:
new_list.append(self.get_json_type(list_item, attributes[list_index]))
new_list.append(self.get_json_type(list_item, attributes[list_index], None))
except TypeError as e:
logging.warning(e)
pass
Expand All @@ -106,7 +107,7 @@ def fill_json_array(self, json_object: dict, json_object_property: str, json_arr
pass
return new_list

def get_json_type(self, data_type: str, attribute: str) -> Any:
def get_json_type(self, data_type: str, attribute: str, key) -> Any:
"""Takes an attribute and its data type. Confirms the primitive data types of the mapped attribute values and assigns these values to the schema attribute. The correct hirarchial
position has been reached through the methods above.
Expand All @@ -118,19 +119,19 @@ def get_json_type(self, data_type: str, attribute: str) -> Any:
Any: The value of the mapped attribute as the correct data type.
"""
try:
if (isinstance(data_type, tuple) and (isinstance(attribute, tuple)==False)):
if (isinstance(data_type, tuple) and not (isinstance(attribute, tuple))):
if (isinstance(attribute, list)) and ("<class 'list'>" in data_type):
return attribute
else:
for element in data_type:
if element in ["<class 'int'>", "<class 'bool'>", "<class 'None'>", "<class 'float'>", "<class 'list'>"]:
return (self.get_json_type(element, attribute))
return (self.get_json_type(element, attribute, None))
else:
pass
logging.warning(f'incorrect type provided for {attribute}, expected {data_type} but received {type(attribute)}, returning original value.')
logging.warning(f'incorrect type provided for property \"{key}\", expected {data_type} but received {type(attribute)}, returning original value.')
return attribute
elif isinstance(attribute, tuple):
logging.warning(f'No value provided for {attribute}, returning original value.')
logging.warning(f'No value provided for property \"{key}\", returning original value.')
return attribute
elif data_type == "<class 'int'>":
return int(attribute)
Expand All @@ -143,11 +144,11 @@ def get_json_type(self, data_type: str, attribute: str) -> Any:
elif data_type == "<class 'str'>":
return str(attribute)
else:
logging.warning(f'incorrect type provided for {attribute}, expected {data_type} but received {type(attribute)}, returning original value.')
logging.warning(f'incorrect type provided for property \"{key}\", expected {data_type} but received {type(attribute)}, returning original value.')
return attribute
except TypeError as e:
logging.warning(f'incorrect type provided for {attribute}, expected {data_type} but received {type(attribute)}, returning original value.')
except TypeError:
logging.warning(f'incorrect type provided for property \"{key}\", expected {data_type} but received {type(attribute)}, returning original value.')
return attribute
except ValueError as e:
logging.warning(f'incorrect value provided for {attribute}, expected {data_type} but received {type(attribute)}, returning original value.')
except ValueError:
logging.warning(f'incorrect value provided for property \"{key}\", expected {data_type} but received {type(attribute)}, returning original value.')
return attribute
3 changes: 1 addition & 2 deletions jammato/dicom_mapping.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
import json
import os
import logging
from typing import Any
from .schema_reader import Schema_Reader
from .dicom_reader import Dicom_Reader
from .cache_schemas import Cache_Schemas
Expand Down Expand Up @@ -165,4 +164,4 @@ def series_extension(self, map_dict: dict, map_attribute: str, series: Dicom_Rea
else:
merged_series_map=assess_type.merge_mapped_attributes(series_map, element_series_attribute, list(map_dict.keys())[3])

return merged_series_map
return merged_series_map
3 changes: 2 additions & 1 deletion jammato/metadata_reader.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,8 @@ def __init__(self, metadata_document_directory: str, config_dicom_file_validatio
self.all_dicom_series.extend(self.post_read_processing(value, flag="single"))

elif type(file_extension) == type(str()):
self.evaluate_file_type(None, metadata_document_directory, file_extension)
self.evaluate_file_type(metadata_document_directory, file_extension)
self.all_dicom_series.extend(self.post_read_processing(list(self.all_dicom_series_dict.values())[0], flag="single"))
else:
logging.error("No valid metadata file path.")
raise FileNotFoundError("No valid metadata file path.")
Expand Down

0 comments on commit d5a7929

Please sign in to comment.