tsteuer 5 years ago
parent
commit
122e9f9981
4 changed files with 268 additions and 2 deletions
  1. 207 0
      cdplib/configuration.py
  2. 2 2
      cdplib/db_handlers/MongodbHandler.py
  3. 58 0
      cdplib/log.py
  4. 1 0
      setup.py

+ 207 - 0
cdplib/configuration.py

@@ -0,0 +1,207 @@
+"""
+@author: Juegen Pannosch (welser project), modified by Tanja Zolotareva
+
+@description: Here we define a data-structure that contains arguments
+used throughout the project. Arguments (like data locations) that can differ
+from person to person are loaded from the ./.config file, arguments that should
+be the same fro everyone are defined directly in the data structure. All
+all changes in this script should be committed to git.
+"""
+
+# -*- coding: utf-8 -*-
+import os
+import configparser
+
+
+class Configuration:
+
+    def __init__(self,
+                 config_file: str = os.path.join(os.getcwd(), ".env")):
+        '''
+        '''
+        assert isinstance(config_file, str), "the config_file must be a string"
+
+        assert os.path.isfile(config_file), "config file was not found"
+
+        self._parse_ini_file(config_file)
+
+    def __getitem__(self, item):
+        '''
+        '''
+        if item in self._config:
+            return self._config[item]
+        else:
+            return None
+
+    def _parse_ini_file(self, config_file: str):
+        '''
+        '''
+        self._config = dict()
+
+        config = configparser.ConfigParser()
+        config.read(config_file)
+
+        for key in config:
+            self._config[key] = {}
+            sub_config = config[key]
+
+            for sub_key in sub_config:
+                name = sub_key.upper()
+                value = sub_config[sub_key]
+
+                self._config[key][name] = value if (value != '') else None
+
+    @property
+    def labeled_history_folder(self):
+        '''
+        '''
+        return os.path.join(self._config["LOCATIONS"]["DATA_DIR"],
+                            "Aufarbeitungsdaten/2018/Datenextrakt einsatzfähige Radsätze 2018")
+
+    @property
+    def unlabeled_history_yearly_folders(self):
+        '''
+        '''
+        folders = []
+
+        for year in ["2016", "2017", "2018"]:
+
+            folders.append(os.path.join(self._config["LOCATIONS"]["DATA_DIR"],
+                                        "Aufarbeitungsdaten",
+                                        year,
+                                        "Datenextrakt alle Radsätze {} ausgehend von der Station 110").format(year))
+
+        return folders
+
+    @property
+    def additional_data_folder(self):
+        '''
+        '''
+        return os.path.join(self._config["LOCATIONS"]["DATA_DIR"],
+                            "Info-Austausch")
+
+    @property
+    def columns_rs516(self):
+        '''
+        '''
+        return {0: "radsatznummer",
+                1: "positionsnummer",
+                2: "status",
+                3: "taetigkeitsname",
+                4: "datum",
+                5: "presskrafdiagram_min",
+                6: "presskrafdiagram_max",
+                7: "presskrafdiagram_wert"}
+
+    @property
+    def ihs_labels(self):
+        '''
+        For analysis we replace replace string IHS by an integer value,
+        can be useful for comparing IHS of two wheelsets
+        '''
+
+        ihs_labels = {"null": -1,
+                      "IS1": 0,
+                      "IS1L": 1,
+                      "IS2": 2,
+                      "IS3": 3}
+
+        return ihs_labels
+
+    @property
+    def schrott_schadcodes(self):
+        '''
+        If during the process one of the following schadcodes is assigned,
+         then the wheelset is scap and is removed from the process.
+         This should correspond to aufarbeitungstyp = 2 in rs0, but if there
+         was a delay (or a mistake) in the maintainance of the table
+         rs0, this might not be the case. Count as scap anyway.
+        '''
+        schadcodes_schrott = ["RSAUS"]
+
+        return schadcodes_schrott
+
+    @property
+    def schrott_taetigkeiten(self):
+        '''
+        If during the process one of the folling tätigkeiten is assigned,
+         then the wheelset is scap and is removed from the process.
+         This should correspond to aufarbeitungstyp = 2 in rs0 and (or)
+         to assignment of a corresponding schadcode. Data might contain
+         inconsistencies. If such an activity is assigned, count as scrap.
+        '''
+        taetigkeiten_schrott = ["RADSATZ AUSSCHEIDEN"]
+
+        return taetigkeiten_schrott
+
+    @property
+    def status_labels(self):
+        '''
+        Used to uniformize the column "Status" in the table rs1,
+         integer values convenient for analysis
+        '''
+        status_labels = {"Scheiden": 2,
+                         "Schlecht": 1,
+                         "Fertig": 0,
+                         "Gut": 0}
+
+        return status_labels
+
+    @property
+    def process_stages(self):
+        '''
+        For machine learning predictions we divide the process into
+         big stages, stages can be skipped dependeing on the IHS of the
+         wheelset. We use all information geathered during the previous
+         process stages to make predictions for the next stage.
+        '''
+        import networkx as nx
+
+        critical_stations = {"A": [421, 110]}
+
+        critical_stations["B"] = [130, 131]
+
+        critical_stations["C"] = [140, 141, 142, 150]
+
+        critical_stations["D"] = [410, 420]
+
+        critical_stations["E"] = [510, 511, 520, 521, 535,
+                                  530, 531, 516, 550]
+
+        critical_stations["F"] = [490, 480, 430, 170]
+
+        critical_stations["G"] = [595, 190, 630]
+
+        critical_stations["H"] = [640, 641]
+
+        critical_stations["I"] = [650, 560]
+
+        critical_stations["J"] = [675]
+
+        critical_stations["K"] = [690]
+
+        critical_stations["L"] = [710, 670]
+
+        stages_graph = nx.DiGraph()
+
+        for stage in critical_stations:
+            stages_graph.add_node(stage, stations=critical_stations[stage])
+
+        stages_graph.add_edge("A", "B")
+        stages_graph.add_edge("B", "C")
+        stages_graph.add_edge("C", "D")
+        stages_graph.add_edge("D", "E")
+        stages_graph.add_edge("D", "F")
+        stages_graph.add_edge("E", "G")
+        stages_graph.add_edge("F", "G")
+        stages_graph.add_edge("G", "H")
+        stages_graph.add_edge("H", "I")
+        stages_graph.add_edge("I", "J")
+        stages_graph.add_edge("J", "K")
+        stages_graph.add_edge("K", "L")
+
+        return stages_graph
+
+
+# singleton
+default = Configuration()

+ 2 - 2
cdplib/db_handlers/MongodbHandler.py

@@ -21,8 +21,8 @@ import pandas as pd
 import numpy as np
 
 sys.path.append(os.getcwd())
-from libraries.log import Log
-from libraries.configuration import default as cfg
+from cdplib.log import Log
+from cdplib.configuration import default as cfg
 
 class MongodbHandler:
 

+ 58 - 0
cdplib/log.py

@@ -0,0 +1,58 @@
+# -*- coding: utf-8 -*-
+"""
+@author: jürgen.pannosch, tanja.zolotareva
+"""
+
+import sys
+import os
+import logging
+
+
+class Log:
+    def __init__(self, name: str = None,
+                 log_file: str = None,
+                 log_level: str = "INFO",
+                 print_to_stdout: bool = True):
+        """Sets the log level and the path where the log file is stored
+
+        :param log_file: Path to the log file.
+        :param log_level: Log level."""
+
+        if name is None:
+            name = ''
+
+        self._logger = logging.getLogger(name)
+
+        if (self._logger.hasHandlers()):
+            self._logger.handlers.clear()
+
+        if log_file is None:
+            log_file = os.path.join(".", "all.log")
+
+        assert(isinstance(log_file, str)),\
+            "Parameter 'log_path' must be of string type"
+
+        formatter = logging.Formatter(
+                '\n %(name)s %(asctime)s %(levelname)s %(message)s')
+
+        os.makedirs(os.path.dirname(log_file), exist_ok=True)
+
+        file_handler = logging.FileHandler(log_file)
+        file_handler.setFormatter(formatter)
+        self._logger.addHandler(file_handler)
+
+        if print_to_stdout:
+            stream_handler = logging.StreamHandler(sys.stdout)
+            stream_handler.setFormatter(formatter)
+            self._logger.addHandler(stream_handler)
+
+        self._logger.setLevel(log_level)
+
+    def info(self, message: str):
+        self._logger.info(message)
+
+    def warning(self, message: str):
+        self._logger.warning(message)
+
+    def error(self, message: str):
+        self._logger.error(message)

+ 1 - 0
setup.py

@@ -5,6 +5,7 @@ INSTALL_REQUIRES = [
     'simplejson',
     'jsonref',
     'pymongo',
+    'pandas'
 ]