Browse Source

Dateien hochladen nach 'GV12/DB'

Here, all files regarding establishing the database are included. For the CSV data please refer to the link from the technical report.
phamann 5 years ago
parent
commit
655116ba35
4 changed files with 649 additions and 0 deletions
  1. 468 0
      GV12/DB/BI_ReadLog.py
  2. 60 0
      GV12/DB/Connection.py
  3. 35 0
      GV12/DB/Support.py
  4. 86 0
      GV12/DB/main.py

+ 468 - 0
GV12/DB/BI_ReadLog.py

@@ -0,0 +1,468 @@
+import sys
+import os
+import yaml
+import shutil
+import pprint
+import lxml.etree as ET
+import lxml.builder
+import datetime
+from datetime import datetime as DT
+import time as t
+import json
+import sqlite3
+from sqlite3 import Error
+
+def create_connection(db):
+    try:
+        conn = sqlite3.connect(db)
+        return conn
+    except Error as e:
+        print(e)
+    return None
+
+
+def check_folder(target_path,folder):
+    if not os.path.exists(os.path.join(target_path, folder)):
+        os.makedirs(os.path.join(target_path, folder))
+    return os.path.join(target_path, folder)
+
+def get_target_path(prod_step, root_path):
+    switch = {
+        "GV12 Order Processing": "level1",
+        "GV12 Production": "level2",
+        "GV12 Turn Production": "level3",
+        "GV12 Turn Machining": "level4"
+    }
+    return check_folder(os.path.join(root_path, "task3/sortedTemplates"), switch[prod_step])
+
+
+def check_timestamp(time, rootNode,search):
+    #convert time-string to date_time
+    call_node_list = []
+    time_conv = time.split("+")[0]
+    datetime_object = DT.strptime(time_conv, '%Y-%m-%d' + 'T' + '%H:%M:%S')
+    time_upper = str(datetime_object + datetime.timedelta(0, 2)) + "+02:00"
+    time_lower = str(datetime_object - datetime.timedelta(0, 2)) + "+02:00"
+    time_lower = time_lower.split(" ")[0] + "T" + time_lower.split(" ")[1]
+    time_upper = time_upper.split(" ")[0] + "T" + time_upper.split(" ")[1]
+
+    #find suitable timestamp among all timestamps for call level
+    time_range = rootNode.xpath("//" + search + "/@time")
+    time_range.sort()
+    for time in time_range:
+        if time <= time_upper and time >= time_lower:
+            call_node_list = rootNode.xpath("//" + search + "[@time = '" + time + "']")
+           # print(call_node_list)
+            break
+        if time > time_upper:
+            break
+    return call_node_list
+
+def create_json(root_path,list,file):
+    with open(os.path.join(root_path, 'task3/'+file), 'w') as fp:
+        json.dump(list, fp, indent=4)
+    fp.close()
+
+
+def categorize_logs(root_path, logs_path):
+    origin_path = logs_path
+    print(origin_path)
+    for root, dirs, files in os.walk(origin_path):
+        for file in files:
+            with open(os.path.join(root, file), "r") as input_file:
+                results = yaml.load_all(input_file)
+                target_path=""
+                for value in results:
+                    if 'log' in value:
+                        try:
+                            val = value['log']['trace']['cpee:name']
+                            target_path=get_target_path(val, root_path)
+                            print(val)
+                            break
+                        except (KeyError, AttributeError):
+                            continue
+                input_file.close()
+                shutil.move(os.path.join(root, file), os.path.join(target_path, file))
+
+
+def xml_check(xml_file):
+    if not os.path.isfile(xml_file):
+        rootNode = ET.Element("ProcessInstances")
+        tree = ET.ElementTree(rootNode)
+        level1 = ET.SubElement(rootNode, "level1")
+    else:
+        tree = ET.parse(xml_file, ET.XMLParser(remove_blank_text=True))
+        rootNode = tree.getroot()
+        if tree.find('level1') is None:
+            level1 = ET.SubElement(rootNode, "level1")
+        else:
+            level1 = tree.find('level1')
+    return [tree,rootNode,level1]
+
+
+
+def recreate_process(root_path, conn, level):
+    # variables
+    f = open('taska3.txt', 'a')
+    cur = conn.cursor()
+    origin_path = os.path.join(root_path, "task3/sortedTemplates/level"+str(level))
+    counter = 0;
+    oknok = {'ok':0, 'nok':0}
+    ok = 0
+    nok = 0
+    # traverse through all logs
+    for root, dirs, files in os.walk(origin_path):
+        for file in files:
+            added = False;
+            print(file)
+            with open(os.path.join(root, file), "r") as input_file:
+                results = yaml.load_all(input_file)
+                # traverse throug single log
+                measured_result = 'NULL'
+                for value in results:
+                    #try:
+                    if 'log' in value:
+                        logname = value['log']['trace']['cpee:name']
+                        uuid = value['log']['trace']['cpee:uuid']
+                        instance = value['log']['trace']['concept:name']
+
+                    if 'event' in value and value['event']['id:id'] == "external" and value['event']['cpee:lifecycle:transition'] == 'dataelements/change':
+                        for listOut in value['event']['list']['data_values']:
+                            if listOut=='qr' and len(value['event']['list']['data_values'][listOut])>0:
+                                charge = value['event']['list']['data_values'][listOut].split(' ')[0]
+                                part = value['event']['list']['data_values'][listOut].split(' ')[1]
+                                #print("Charge: "+ charge + " Part: " + part)
+
+
+                    if 'event' in value and not value['event']['id:id']=="external":
+                        try:
+
+                            time = value['event']['time:timestamp']
+                            activity = value['event']['cpee:lifecycle:transition']
+                            name = value['event']['concept:name']
+                            step_id = value['event']['id:id']
+                            val="";
+                            if value['event']['lifecycle:transition'] == "start":
+                                val = json.dumps(value['event']['list']['data_send'])
+
+                            if 'data_values' in value['event']['list'].keys() and name == 'Measure with MicroVu' and activity == 'dataelements/change' :
+                                # print(value['event']['list']['data_values']['status'])
+                                #print("PART: " + str(part))
+                                #print("AKT; " + str(value['event']['list']['data_values']['qr'].split(' ')[1]))
+
+                                if value['event']['list']['data_values']['qr'].split(' ')[1]!=part:
+                                    print("NOTPART: " + str(part))
+                                    print("NOTAKT; " + str(value['event']['list']['data_values']['qr'].split(' ')[1]))
+                                    continue
+                                measured_result = 'ok'
+                                if value['event']['list']['data_values']['status'] != 'ok':
+                                    measured_result = 'nok'
+                                query = "Update MeasureTot SET status = '" + measured_result + "' where part = '" + part + "'" # and status = 'NULL'"
+                                #print(query)
+                                cur.execute(query)
+                                conn.commit()
+
+
+                            if 'data_receiver' in value['event']['list'].keys() and name!='Fetch':
+                                #print("ICHICHICH")
+                                for listIn in value['event']['list']['data_receiver']:
+                                    #print(listIn.keys())
+                                    #print(listIn['name'])
+                                    #print(listIn['data'])
+
+                                    if 'results' in listIn['data']:
+                                        if 'raw' in listIn['data'] and len(listIn['data']['raw']) > 0:
+                                            # print("DOOF")
+                                            # print(listIn['data']['raw']['Aufford QR-Code-Eingabe']['Eingabe'][0])
+                                            if listIn['data']['raw']['Aufford QR-Code-Eingabe']['Eingabe'][0].split(" ")[1] != part:
+                                                continue
+                                       #print(listIn['data']['results'])
+                                        for entry in listIn['data']['results']:
+
+
+
+                                            messungen = listIn['data']['results'][entry]
+
+                                            measured_attr = entry
+
+                                            query = "Insert into MeasureTot('charge','part', 'status', 'measuredAttr', 'instance', 'timestamp') VALUES ('" + charge + "','" + part + "', '" + measured_result + "','" + measured_attr + "', '" + instance + "','" + time + "' )"
+                                            #print(query)
+                                            cur.execute(query)
+                                            conn.commit()
+                                            for messung in messungen:
+                                                measured_det = messung
+                                                measured_value = messungen[messung]['on_scale_from_zero_to_one']
+                                                status = messungen[messung]['status']
+                                                query = "Insert into MeasureDet('measuredDet','measuredVal', 'status', 'measuredAttr', 'timestamp') VALUES ('" + measured_det + "','" + str(measured_value) + "', '" + status + "','" + measured_attr + "', '" + time + "' )"
+                                                cur.execute(query)
+                                            conn.commit()
+                                        #qrcode=listIn['data']['raw']['Aufford QR-Code-Eingabe']
+
+                                val = json.dumps(value['event']['list']['data_receiver'])
+
+
+                            if not added:
+                                cur.execute("Select called_by from instances where instance = '" + instance + "'")
+                                parent_inst = cur.fetchone()
+                                if parent_inst is None:
+                                    print(time)
+                                    #print(len(time))
+                                    time = time.split('+')[0]
+                                    print(len(time))
+                                    if len(time)>19:
+                                        time = time [:-4]
+                                        print(time)
+                                    datetime_object = DT.strptime(time, '%Y-%m-%d' + 'T' + '%H:%M:%S')
+                                    time_upper = str(datetime_object + datetime.timedelta(0, 2)) + "+02:00"
+                                    time_lower = str(datetime_object - datetime.timedelta(0, 2)) + "+02:00"
+                                    time_lower = time_lower.split(" ")[0] + "T" + time_lower.split(" ")[1]
+                                    time_upper = time_upper.split(" ")[0] + "T" + time_upper.split(" ")[1]
+                                    #print(time)
+                                    #print(time_upper)
+                                    #print(time_lower)
+                                   # cur.execute("Insert into Instances VALUES ('" + instance + "','" + uuid + "', '" + logname + "', '2','Null')")
+                                    query = "Select log.instance from LogEntries log join instances i on i.instance = log.instance where i.level='" + str(level-1) +"' and log.activity ='activity/calling' and log.timestamp>='" + time_lower + "' and log.timestamp<='" + time_upper + "'"
+                                    #print(query)
+                                    cur.execute(query)
+                                    parent_inst = cur.fetchone()
+                                    #print(parent_inst)
+                                    if parent_inst is None:
+                                        #print(parent_inst)
+                                        #print(val)
+                                        #print("Insert into Instances VALUES ('" + instance + "','" + uuid + "', '" + logname + "', '3','Null')")
+                                        cur.execute("Insert into Instances VALUES ('" + instance + "','" + uuid + "', '" + logname + "', '" + str(level) +"','Null')")
+                                        #conn.commit()
+                                    else:
+                                        cur.execute("Insert into Instances VALUES ('" + instance + "','" + uuid + "', '" + logname + "', '" + str(level) +"','" + parent_inst[0] + "')")
+                                else:
+                                    #print(instance)
+                                    #print(parent_inst)
+                                    query = "Update Instances set uuid = '" + uuid + "', name = '" + logname + "' where called_by = '" + parent_inst[0] + "' and instance='" + instance + "'"
+                                    #print(query)
+                                    cur.execute(query)
+                                    #conn.commit()
+                                #con.commit()
+                                added = True
+
+                            #print("Insert into LogEntries VALUES ('" + step_id + "','" + time + "', '" + name + "','" + activity + "', '" + val + "','" + instance + "' )")
+
+                            #to add calling instances
+                            if 'data_receiver' in value['event']['list'].keys() and activity == 'activity/receiving':
+                                # if step_id=='a1':
+                                for attr in value['event']['list']['data_receiver']:
+                                    # print('addNEw')
+                                    if type(attr['data']) is dict and 'CPEE-INSTANCE' in attr['data']:
+                                        # print('addNEw1')
+                                        c_instance = attr['data']['CPEE-INSTANCE'].split('/')[-1]
+                                        query = "Insert into Instances VALUES ('" + c_instance + "','Null', 'Null', '" + str(
+                                            level + 1) + "','" + instance + "')"
+                                        cur.execute(query)
+                                        conn.commit()
+                                        break
+                                    elif attr['name'] == 'instance':
+                                        # print('addNEw2')
+                                        c_instance = attr['data']
+                                        query = "Insert into Instances VALUES ('" + c_instance + "','Null', 'Null', '" + str(
+                                            level + 1) + "','" + instance + "')"
+                                        # print(query)
+                                        cur.execute(query)
+                                        conn.commit()
+                                        break
+                                    # print("Insert into Instances VALUES ('" + c_instance + "','Null', 'Null', '2','"+instance+"')")
+                                    # conn.commit()
+
+
+                            #To avoid adding machine_logs afters receiving status continue
+                            if step_id == 'a3' and name== 'Status' and activity == 'dataelements/change':
+                                    #print("BR")
+                                    #print(value['event']['list']['data_values'].keys())
+                                    #print(value['event']['list']['data_values']['lets_continue'])
+                                    if value['event']['list']['data_values']['lets_continue']==False:
+                                        #print("EAK")
+                                        query = "Insert into LogEntries VALUES ('" + step_id + "','" + time + "', '" + name + "','" + activity + "', '" + val + "','" + instance + "' )"
+                                        cur.execute(query)
+                                        conn.commit()
+                                        break
+                            query="Insert into LogEntries VALUES ('" + step_id + "','" + time + "', '" + name + "','" + activity + "', '" + val + "','" + instance + "' )"
+                            if instance in ('446', '447', '448'):
+                                print(query, file = f)
+                            cur.execute(query)
+                            conn.commit()
+
+
+
+
+                        except(KeyError) as e:
+                            print(e, file = f)
+                            #print(query)
+                            print(activity, file = f)
+                            print(time, file = f)
+                            counter+=1
+
+                        #except sqlite3.Error as qe:
+                        except sqlite3.IntegrityError as qe:
+                            #if instance in ('446','447','448'):
+                                #print(query, file = f)
+                                #print(qe, file = f)
+                            print(qe, file = f)
+                            counter += 1
+                            pass
+                            #exit()
+                        except sqlite3.Error as qe_all:
+                            print("Error_GEN: " + str(query), file=f)
+                            print("Error_GEN: " + str(qe_all), file=f)
+                        except Exception as e:
+                            counter += 1
+                            #message = template.format(type(ex).__name__, ex.args)
+                            print("Error_GEN" + str(e), file = f)
+                            print("Unexpected error!!", file = f)
+
+
+                        try:
+                            # to handle machining:
+                            if step_id == 'a1' and name == 'Fetch' and activity == 'activity/receiving':
+                                for attr in value['event']['list']['data_receiver']:
+                                    for entry in attr['data']:
+                                        try:
+                                            m_id = name = entry['ID']
+                                            name = entry['name']
+                                            val = entry['value']
+                                            clientHandle = entry['meta']['ClientHandle']
+                                            statusCode = entry['meta']['StatusCode']
+                                            server_timestamp = entry['meta']['ServerTimestamp']
+                                            query = "insert into Machining (timestamp, clientHandle, m_id, status, name, value, level4_step_id, level4_activity, level4_timestamp, level4_instance) VALUES('" + server_timestamp + "','" + clientHandle + "','" + m_id + "','" + statusCode + "','" + name + "','" + val + "','" + step_id + "','" + activity + "','" + time + "','" + instance + "')"
+                                            #if instance in ('446', '447', '448', '449', '450', '451','452'):
+                                                #print(query, file = f)
+                                            #if time == '2018-10-17T14:37:40+02:00':
+                                                #print(query, file = f)
+                                            print(query, file=f)
+                                            cur.execute(query)
+                                        except(KeyError) as e:
+                                            print("ERROR IN ENTRY: " + str(e), file=f)
+                                            # print(query)
+                                            print(activity, file=f)
+                                            print(time, file=f)
+                                            counter += 1
+                                            # except sqlite3.Error as qe:
+                                        except sqlite3.IntegrityError as qe:
+                                            # if instance in ('446', '447', '448'):
+                                            # print(query, file = f)
+                                            # print(qe, file =
+                                            print("Error: " + str(query), file=f)
+                                            print("Error: " + str(qe), file=f)
+                                            counter += 1
+                                            pass
+                                            # exit()
+                                        except sqlite3.Error as qe_all:
+                                            print("Error_GEN: " + str(query), file=f)
+                                            print("Error_GEN: " + str(qe_all), file=f)
+                                        except Exception as e:
+                                            counter += 1
+                                            # message = template.format(type(ex).__name__, ex.args)
+                                            print("Error_GEN" + str(e), file=f)
+                                            print("Unexpected error!!", file=f)
+                                        conn.commit()
+
+                        except(KeyError) as e:
+                            print("ERROR IN ATTR: " + str(e), file=f)
+                            #print(query)
+                            print(activity, file = f)
+                            print(time, file = f)
+                            counter += 1
+                            continue
+                            # except sqlite3.Error as qe:
+                        except Exception as e:
+                            counter += 1
+                            #message = template.format(type(ex).__name__, ex.args)
+                            print("Error_GEN" + str(e), file = f)
+                            print("Unexpected error!!", file = f)
+                #oknok['ok']=oknok['ok']+ok
+                #oknok['nok'] = oknok['nok'] + nok
+            #print(oknok)
+            #conn.commit()
+            #print(counter)
+            counter = 0
+            input_file.close()
+            target_path = check_folder(os.path.join(root_path, 'task3/processed'), ('level'+str(level)))
+            #shutil.move(os.path.join(origin_path,file), target_path)
+
+
+
+def recreate_process_level1(root_path,conn):
+    #variables
+    cur = conn.cursor()
+    origin_path = os.path.join(root_path,"task3/sortedTemplates/level1")
+
+    #traverse through all logs
+    for root, dirs, files in os.walk(origin_path):
+        for file in files:
+            first = True;
+            print(file)
+            with open(os.path.join(root, file), "r") as input_file:
+                results = yaml.load_all(input_file)
+                #traverse throug single log
+                for value in results:
+                    try:
+                        if 'log' in value:
+                            logname = value['log']['trace']['cpee:name']
+                            uuid = value['log']['trace']['cpee:uuid']
+                            instance = value['log']['trace']['concept:name']
+                            #print("Insert into Instances VALUES ('" + instance + "','" + uuid + "', '" + logname + "', '1','Null')")
+                            query = "Insert into Instances VALUES ('" + instance + "','" + uuid + "', '" + logname + "', '1','Null')"
+                            cur.execute(query)
+                            #conn.commit()
+                        if 'event' in value:
+                            time = value['event']['time:timestamp']
+
+                            val=""
+                            if value['event']['lifecycle:transition']=="start":
+                                val = json.dumps(value['event']['list']['data_send'])
+
+                            if 'data_receiver' in value['event']['list'].keys():
+                                val = json.dumps(value['event']['list']['data_receiver'])
+
+
+                            step_id = value['event']['id:id']
+                            activity = value['event']['cpee:lifecycle:transition']
+                            name = value['event']['concept:name']
+                            query = "Insert into LogEntries VALUES ('" + step_id + "','" + time + "', '" + name + "','" + activity + "', '" + val + "','" + instance + "' )"
+                            #print("Insert into LogEntries VALUES ('" + step_id + "','" + time + "', '" + name + "','" + activity + "', '" + val + "','" + instance + "' )")
+                            cur.execute(query)
+                            #conn.commit()
+                            for attr in value['event']['list']['data_receiver']:
+                                if attr['name'] == "url":
+                                    c_instance = attr['data'].split('/')[-1]
+                                    query="Insert into Instances VALUES ('" + c_instance + "','Null', 'Null', '2','"+instance+"')"
+                                    cur.execute(query)
+                                    #print("Insert into Instances VALUES ('" + c_instance + "','Null', 'Null', '2','"+instance+"')")
+                                    #conn.commit()
+                        conn.commit()
+                    except(KeyError) as e:
+                        #print(e)
+                        #print(time)
+                        continue
+            input_file.close()
+            target_path = check_folder(os.path.join(root_path,'task3/processed'),'level1')
+            #shutil.move(os.path.join(origin_path,file), target_path)
+
+
+root_path = os.getcwd()
+db = os.path.join(root_path,"BIII.db")
+conn = create_connection(db)
+conn.execute('delete from Machining')
+conn.execute('delete from LogEntries')
+conn.execute('delete from Instances')
+conn.execute('delete from MeasureTot')
+conn.execute('delete from MeasureDet')
+logs_path = os.path.join(root_path, "logs")
+#print(logs_path)
+categorize_logs(root_path, logs_path)
+recreate_process_level1(root_path,conn)
+recreate_process(root_path,conn,2)
+recreate_process(root_path,conn,3)
+recreate_process(root_path,conn,4)
+
+#recreate_process_level3(root_path,conn,"level3")
+#recreate_process_level4(root_path,conn,"level4")
+conn.close()
+

+ 60 - 0
GV12/DB/Connection.py

@@ -0,0 +1,60 @@
+import sqlite3
+from sqlite3 import Error
+import datetime as DT
+import numpy as np
+
+
+#create ConnectionToDB
+def create_connection(db):
+    try:
+        conn = sqlite3.connect(db)
+        return conn
+    except Error as e:
+        print(e)
+    return None
+
+def write_result_to_DB(category, value, conn):
+    timestamp = DT.datetime.utcnow()
+    cur = conn.cursor()
+    query = "Insert into Results('Category', 'timestamp', 'Value') VALUES ('" + category + "','" + str(timestamp) + "','" + str(value) + "' )"
+    #print(query)
+    cur.execute(query)
+    conn.commit()
+
+
+def get_result_from_db(query,target_array, conn):
+    cur = conn.cursor()
+    cur.execute(query)
+    print(query)
+    dbstring = cur.fetchone()
+    if dbstring is None:
+        print("NO ENTRY")
+    else:
+        dbstring=dbstring[0]
+        new_list = dbstring.split(';')
+        #print(new_list)
+        new_arr = np.asarray(new_list)
+        #print("arr")
+        #print(new_arr)
+        for entry in new_arr:
+            value_string = entry.strip('()')
+            value_string = str(value_string).split(', ')
+            #print(value_string[0])
+            #print(value_string[1])
+            target_array = np.append(target_array, np.array([(int(value_string[0]), float(value_string[1]))], dtype=target_array.dtype))
+        #print(result)
+    return target_array
+
+
+def get_third_quartile(conn):
+    cur = conn.cursor()
+    t_quartile = 0
+    query = "select value from results where category ='third_quartile' order by timestamp desc limit 1"
+    cur.execute(query)
+    dbstring = cur.fetchone()
+    if dbstring is None:
+        print("NO ENTRY")
+    else:
+        t_quartile = dbstring[0]
+
+    return t_quartile

+ 35 - 0
GV12/DB/Support.py

@@ -0,0 +1,35 @@
+import numpy as np
+import Level_4 as LV4
+import Level_2 as LV2
+
+def list_to_string(list):
+    first = True
+    for entry in list:
+        if first:
+            liststring= "'" + str(entry) + "'"
+            first=False
+        else:
+            liststring=liststring+", '" + str(entry)+ "'"
+    return liststring
+
+
+def map_lv4_to_part(level4_instances, conn):
+    cur = conn.cursor()
+    lv4_part_map = np.array([], dtype=[('lv4', int), ('lv2', int)])
+    for lv4 in level4_instances:
+        query = "select distinct mt.part from measureTot mt join instances i on i.instance=mt.instance where i.instance in  (select distinct i.called_by from instances i where i.instance in (select called_by from instances i where i.instance = '" + str(lv4) + "'))"
+        cur.execute(query)
+        lv2 = cur.fetchone()
+        if not lv2 is None:
+            lv2 = int(lv2[0])
+            lv4_part_map = np.append(lv4_part_map, np.array([(lv4, lv2)],dtype=lv4_part_map.dtype))
+    return lv4_part_map
+
+
+def convert_part_to_lv4(conn):
+    mapDict = {}
+    level4_instances = LV4.get_level4Instances(conn)
+    mapList = map_lv4_to_part(level4_instances,conn)
+    for key,value in mapList:
+        mapDict[int(key)]=float(value)
+    return mapDict

+ 86 - 0
GV12/DB/main.py

@@ -0,0 +1,86 @@
+import os
+import Operations as op
+import Graph as gr
+import Connection as connect
+import numpy as np
+
+
+#***********************************main********************************************
+#--------connect To DB -------------------------
+root_path = os.getcwd()
+db = os.path.join(root_path,"BIII.db")
+conn = connect.create_connection(db)
+
+#Operations
+#-----------------------get duration of parts------------------------------------------
+#op.get_all_parts(conn)
+#op.get_nok_parts(conn)
+#op.get_durations_lv4(conn)
+
+#----------calculate quantiles of operation duration ------------
+#op.get_break_length_stat(conn)
+
+#----------- get stored quantile ---------------------------
+#t_quartile = connect.get_third_quartile(conn)
+#t_quartile = float(t_quartile)*5
+#t_quartile = float(t_quartile)*5
+#t_quartile = float(3)
+
+
+#------------- calculate duration of all parts with regards to quantile*2 -----------------------------
+#durations = op.get_durations_part(t_quartile, conn)
+#print(durations)
+#-------------get calculated part duration from db--------------------------
+#query = "select value from results where category ='Duration_Part_Breakl_" + str(t_quartile) + "' order by timestamp desc limit 1"
+#result = np.array([], dtype=[('keys', int), ('data', float)])
+#durations = connect.get_result_from_db(query, result, conn)
+
+
+#------------calculate aggregatred duration  -------------------------
+#aggr_dur = op.aggregate_duration(durations)
+
+
+#-------------calculate break --------------------------
+#breaks = op.get_breaks(480, conn)
+#breaks = op.get_breaks(50, conn)
+#breaks = op.get_breaks(1, conn)
+#breaks = op.get_breaks(t_quartile,conn)
+
+
+# ------------- get breaks from db ------------------------
+#query = "select value from results where category ='Duration_BreaksWithin"+str(t_quartile) +"' order by timestamp desc limit 1"
+#result = np.array([], dtype=[('keys', int), ('data', float)])
+#breaks = connect.get_result_from_db(query, result, conn)
+#print("Breaks")
+#print(breaks)
+
+#--------------- get measures-----------------
+#measures = op.get_all_measures(conn)
+
+
+#---------------plot results --------------------------
+#gr.plot_all_measures(measures)
+#gr.plot_duration_part(durations)
+#gr.plot_duration_part(aggr_dur)
+#gr.plot_duration_part(breaks)
+
+
+#------Get durations Zylinder-----------
+final_result = op.get_durations_zylinder(conn)
+# show me values below and above the zylinder_values
+greater_one = np.where(final_result['data'] > 1.0)[0]
+smaller_zero = np.where(final_result['data'] < 0.0)[0]
+#result = np.append(greater_one, smaller_zero)
+
+
+sum = greater_one.size + smaller_zero.size
+print("-----here comes the sum -----------")
+print(sum)
+gr.plot_zylinder_part(final_result)
+
+
+#------------------calculate longer durations (outsiders---------------
+#op.outsiders(durations, conn)
+
+#-----------close connection---------------
+conn.close()