Browse Source

Dateien hochladen nach 'GV12'

Here, all the python scripts are included to reproduce the data analysis as described in the technical report "Time-Data-Driven Production Optimisation and Failure Prediction in Manufacturing Processes". Please use it together with the files included in the "DB" folder.
phamann 4 years ago
parent
commit
57daf51910
5 changed files with 592 additions and 0 deletions
  1. 124 0
      GV12/Graph.py
  2. 37 0
      GV12/Level_2.py
  3. 33 0
      GV12/Level_4.py
  4. 358 0
      GV12/Operations.py
  5. 40 0
      GV12/Parts.py

+ 124 - 0
GV12/Graph.py

@@ -0,0 +1,124 @@
+import matplotlib.pyplot as plt
+import numpy as np
+
+
+def plot_duration_part(dataset):
+    plt.figure(figsize=(18, 7))
+    plt.xlabel("Parts chronologically ordered")
+    plt.ylabel("Duration Minutes")
+    #print(dataset)
+    #dataset = dataset[~np.all(dataset['keys'] == 0, axis=0)]
+    x = dataset['keys']
+    # y=y[~np.all(y==0,axis=1)]
+    # y[y==0] = np.nan
+    x_str=[]
+    for entry in x:
+        x_str.append(str(entry))
+    y = dataset['data']
+    #plt.xticks(range(len(x)), str(x))
+    plt.margins(x=0)
+    plt.xticks(rotation=70, fontsize=9)
+    plt.subplots_adjust(left=0.05, right=1, bottom=0.15)
+    plt.plot(x_str, y, label=str("OverallDuration"))
+
+    plt.legend()
+    plt.title("MeasureResultsPPart")
+    plt.show()
+
+
+def plot_duration_part_cap(dataset):
+    plt.figure(figsize=(18, 7))
+    plt.xlabel("Parts")
+    plt.ylabel("Duration Minutes")
+    #print(dataset)
+    #dataset = dataset[~np.all(dataset['keys'] == 0, axis=0)]
+    x = dataset['keys']
+    # y=y[~np.all(y==0,axis=1)]
+    # y[y==0] = np.nan
+    x_str=[]
+    for entry in x:
+        x_str.append(str(entry))
+    y = dataset['data']
+    #plt.xticks(range(len(x)), str(x))
+    plt.margins(x=0)
+    plt.xticks(rotation=70, fontsize=9)
+    plt.subplots_adjust(left=0.05, right=1, bottom=0.15)
+    plt.plot(x_str, y, label=str("OverallDuration"))
+    plt.ylim([2,6])
+    plt.legend()
+    plt.title("MeasureResultsPPart")
+    plt.show()
+
+def plot_all_measures(measures_dict):
+    f = open('jbg.txt', 'a')
+    datasets ={}
+    for entry in measures_dict:
+        for key in measures_dict[entry]:
+            datasets.update({key:np.zeros((len(measures_dict)+1,1),dtype=[('keys'  , int),('data',float)])})
+        break
+
+    for entry in measures_dict:
+
+        print("------------", file=f)
+        print("------------", file=f)
+        print(entry, file=f)
+        print("------------", file=f)
+        for measure in measures_dict[entry]:
+            part=entry
+            dataset = datasets[measure]
+            print(dataset, file=f)
+            dataset [part][0] = entry+1,measures_dict[entry][measure]
+            # print(dataset[part][0], file=f)
+            # print("------------", file=f)
+            datasets.update({measure : dataset})
+
+
+    print("------------", file=f)
+    print("------------", file=f)
+    plt.figure(figsize=(18, 7))
+    plt.xlabel("Parts")
+    plt.ylabel("0 to 1")
+    for measure in datasets:
+        if measure not in ("Kreis Ø19,2-1-Mitte Z","Kreis Ø19,2-1-Durchmesser","Kreis Ø19,2-2-Mitte Z","Kreis Ø19,2-2-Durchmesser","Zylinder Ø4,5-B-Durchmesser", "Zylinder Ø4,5-B-Zylinderform","Distanz Z4,8-Distanz Z"):
+            continue
+        dataset = datasets[measure]
+        dataset=dataset[~np.all(dataset['keys']==0, axis=1)]
+        y=dataset['keys']
+        x=dataset['data']
+        plt.ylim(-0.5, 2.5)
+
+        plt.xticks(np.arange(0,len(x),2))
+        #plt.xticks(range(len(x)), y)
+        plt.xticks(rotation=70, fontsize=10)
+        plt.subplots_adjust(left=0.05, right=1, bottom=0.15)
+        print(measure)
+        plt.plot(y,x, label=str(measure))
+        plt.legend()
+
+    x = np.arange(1,len(x))
+    y = [1]*len(x)
+    plt.plot(x, y, color='red', linestyle='-.')
+    y = [0] * len(x)
+    plt.plot(x, y, color='red', linestyle='-.')
+    plt.title("MeasureResultsPPart")
+    plt.show()
+
+def plot_zylinder_part(dataset):
+    plt.figure(figsize=(18, 7))
+    plt.xlabel("Parts")
+    plt.ylabel("Zylinder Values")
+    x = dataset['keys']
+    x_str = []
+    for entry in x:
+        x_str.append(str(entry))
+    y = dataset['data']
+    plt.margins(x=0)
+    plt.xticks(rotation=70, fontsize=9)
+    plt.subplots_adjust(left=0.05, right=1, bottom=0.15)
+    plt.hlines(y=1.0, xmin=1, xmax=100, linewidth=1, color='r', linestyles='dashed')
+    plt.hlines(y=0.0, xmin=1, xmax=100, linewidth=1, color='r', linestyles='dashed')
+    plt.plot(x_str, y, label=str("Zylinder Values Change"))
+
+    plt.legend()
+    plt.title("MeasureResultsZylinder")
+    plt.show()

+ 37 - 0
GV12/Level_2.py

@@ -0,0 +1,37 @@
+import Support as sp
+import numpy as np
+
+def get_level2Instance_by_LV4Instance(lv4_instances, conn):
+    #initialize array
+    #lv4_Lv2_map = np.array([], dtype=[('lv4', int),('lv2',int), ('data', float)])
+    #fill array
+    cur = conn.cursor()
+    # #lv4_instances=np.array(lv4_instances,dtype=[('keys', int), ('data', float)])
+    #create and execute lv2 query
+    liststring = sp.list_to_string(lv4_instances)
+    query = "select i.called_by from instances i where i.instance in (select called_by from instances i where i.instance in(" + liststring + "))"
+    cur.execute(query)
+    sqlResult = cur.fetchall()
+    lv2_instances = np.array([], dtype=int)
+    for entry in sqlResult:
+        lv2_instance = np.array(int(entry[0]), dtype = lv2_instances.dtype)
+        lv2_instances=np.append(lv2_instances,lv2_instance)
+    # print(query)
+    #print("LV2")
+    #print(lv2_instances)
+    return lv2_instances
+
+
+def orderLV2InstancesChronoligically(lv2_instances, conn):
+    cur = conn.cursor()
+    liststring = sp.list_to_string(lv2_instances)
+    query = "select distinct instance from logEntries where instance in (" + liststring + ") order by timestamp asc"
+    #print(query)
+    cur.execute(query)
+    sqlResult = cur.fetchall()
+    lv2_instances = np.array([], dtype=int)
+    for entry in sqlResult:
+        lv2_instances = np.append(lv2_instances, np.array([int(entry[0])], dtype=lv2_instances.dtype))
+    #print("lv2_Sorted")
+    #print(lv2_instances)
+    return lv2_instances

+ 33 - 0
GV12/Level_4.py

@@ -0,0 +1,33 @@
+import Support as sp
+import numpy as np
+
+def get_level4Instances(conn):
+    lv4_instances = np.array([],dtype= int)
+    cur = conn.cursor()
+    query = "select distinct m.level4_instance from machining m join logEntries log on (log.timestamp=m.level4_timestamp and log.activity = m.level4_activity and log.step_id=m.level4_step_id and log.instance=m.level4_instance) order by m.level4_instance"
+    #print(query)
+    cur.execute(query)
+    sqlResult=cur.fetchall()
+    for entry in sqlResult:
+        lv4_instance = np.array(int(entry[0]), dtype = lv4_instances.dtype)
+        lv4_instances=np.append(lv4_instances,lv4_instance)
+    #print("LV4")
+    #print(lv4_instances)
+    return lv4_instances
+
+
+
+def orderLV4InstancesChronoligically(lv4_instances, conn):
+    cur = conn.cursor()
+    liststring = sp.list_to_string(lv4_instances)
+    query = "select distinct m.level4_instance from machining m join logEntries log on (log.timestamp=m.level4_timestamp and log.activity = m.level4_activity and log.step_id=m.level4_step_id and log.instance=m.level4_instance) where level4_instance in (" + liststring + ") order by m.timestamp asc"
+    #print(query)
+    cur.execute(query)
+    sqlResult=cur.fetchall()
+    #print(sqlResult)
+    lv4_instances = np.array([], dtype = int)
+    for entry in sqlResult:
+        lv4_instances = np.append(lv4_instances, np.array([int(entry[0])], dtype = lv4_instances.dtype))
+    #print("LV4Sorted")
+    #print(lv4_instances)
+    return lv4_instances

+ 358 - 0
GV12/Operations.py

@@ -0,0 +1,358 @@
+import Support as sp
+import numpy as np
+import Level_2 as LV2
+import Level_4 as LV4
+import Parts as part
+import Connection as connect
+import Graph as gr
+from datetime import datetime as DT
+
+def get_duration_by_break(lv4_instances, dur_break, conn):
+    #TODO CREATE LIST WITH SINGLE Durations to calc quantile
+    t_quartile = connect.get_third_quartile(conn)
+    dataset = np.array([], dtype=[('keys', int), ('data', float)])
+    cur = conn.cursor()
+    liststring = sp.list_to_string(lv4_instances)
+    query = "select m.timestamp, m.name, m.value, m.level4_instance from machining m join logEntries log on (log.timestamp=m.level4_timestamp and log.activity = m.level4_activity and log.step_id=m.level4_step_id and log.instance=m.level4_instance) where m.level4_instance in (" + liststring + ") order by m.timestamp"
+    cur.execute(query)
+    time = cur.fetchall()
+    first = True;
+    test_counter = 0
+    for timestamp in time:
+
+        if first:
+            begin_time = timestamp[0]
+            instance_old = timestamp[3]
+            first = False
+        end_time = timestamp[0]
+        instance_new = timestamp[3]
+        begin_time = DT.strptime(begin_time.split(".")[0], '%Y-%m-%d' + ' ' + '%H:%M:%S')
+        end_time = DT.strptime(end_time.split(".")[0], '%Y-%m-%d' + ' ' + '%H:%M:%S')
+        diff = (end_time - begin_time).total_seconds() / 60
+        # if diff > dur_break:
+        #     if instance_new != instance_old:
+        #         f = open('break_inbetween.txt', 'a')
+        #         print(str(timestamp[0]) + str(", ") + str(timestamp[1]) + str(", ") + str(begin_time) + str(", ") + str(
+        #             end_time) + str(", ") + str(diff), file=f)
+        #         f.close()
+        #     else:
+        #         f = open('break_within.txt', 'a')
+        #         print(str(timestamp[3]) + str(timestamp[0]) + str(", ") + str(timestamp[1]) + str(", ") + str(begin_time) + str(", ") + str(end_time) + str(", ") + str(diff), file=f)
+        #         f.close()
+        # else:
+
+        if diff <= dur_break:
+
+            #print({int(timestamp[3]):float(diff)})
+            if int(timestamp[3]) in dataset['keys']:
+                index = int(np.where(dataset['keys'] == int(timestamp[3]))[0])
+                old_value = dataset[index]['data']
+                new_value = old_value + diff
+                dataset[index]['data'] = new_value
+            else:
+                dataset = np.append(dataset, np.array([(int(timestamp[3]), float(diff))], dtype=dataset.dtype))
+        else:
+            test_counter += 1
+            #print("I am again bigger for the " + str(test_counter) + "thats the value i tried to insert: " + str(diff))
+
+            if int(timestamp[3]) in dataset['keys']:
+                index = int(np.where(dataset['keys'] == int(timestamp[3]))[0])
+                old_value = dataset[index]['data']
+                new_value = old_value + float(t_quartile)
+                dataset[index]['data'] = new_value
+            else:
+                dataset = np.append(dataset, np.array([(int(timestamp[3]), float(t_quartile))], dtype=dataset.dtype))
+
+        begin_time = timestamp[0]
+
+        instance_old = instance_new
+
+    return dataset
+
+
+
+def get_breaks(dur_break, conn):
+    level4_instances = LV4.get_level4Instances(conn)
+    sortedlevel4_instances = LV4.orderLV4InstancesChronoligically(level4_instances, conn)
+    converter = sp.convert_part_to_lv4(conn)
+    breakswithin = np.array([], dtype=[('keys', int), ('data', float)])
+    breaksinbetween = np.array([], dtype=[('keys', int), ('data', float)])
+    cur = conn.cursor()
+    liststring = sp.list_to_string(sortedlevel4_instances)
+    query = "select m.timestamp, m.name, m.value, m.level4_instance from machining m join logEntries log on (log.timestamp=m.level4_timestamp and log.activity = m.level4_activity and log.step_id=m.level4_step_id and log.instance=m.level4_instance) where m.level4_instance in (" + liststring + ") order by m.timestamp"
+    cur.execute(query)
+    time = cur.fetchall()
+    first = True;
+    for timestamp in time:
+        if first:
+            begin_time = timestamp[0]
+            instance_old = timestamp[3]
+            first = False
+        end_time = timestamp[0]
+        instance_new = timestamp[3]
+        begin_time = DT.strptime(begin_time.split(".")[0], '%Y-%m-%d' + ' ' + '%H:%M:%S')
+        end_time = DT.strptime(end_time.split(".")[0], '%Y-%m-%d' + ' ' + '%H:%M:%S')
+        diff = (end_time - begin_time).total_seconds() / 60
+        if diff > dur_break:
+            #print("DIFF: " + str(diff))
+            if instance_new != instance_old:
+                if int(timestamp[3]) in breaksinbetween['keys']:
+                    #print("EXISTS1")
+                    for entry in breaksinbetween:
+                        if int(entry['keys'])==int(timestamp[3]):
+                            dur_new = float(diff) + float(entry['data'])
+
+                else:
+                    dur_new = float(diff)
+                breaksinbetween = np.append(breaksinbetween, np.array([(int(timestamp[3]), float(dur_new))], dtype=breaksinbetween.dtype))
+            else:
+                if int(timestamp[3]) in breakswithin['keys']:
+                    #print("EXISTS2")
+                    for entry in breakswithin:
+                        if int(entry['keys'])==int(timestamp[3]):
+                            dur_new = float(diff) + float(entry['data'])
+                else:
+                     dur_new = float(diff)
+                breakswithin= np.append(breakswithin, np.array([(int(timestamp[3]), float(dur_new))], dtype=breakswithin.dtype))
+            #print("DUR_NEW: " + str(dur_new))
+        begin_time = timestamp[0]
+        instance_old = instance_new
+    print("BINBETWEEN")
+    print(breaksinbetween)
+    print("BWITHIN")
+    print(breakswithin)
+    i = 0
+    indexToRemove = []
+    for entry in breaksinbetween:
+        if entry['keys'] in converter:
+            breaksinbetween[i]['keys'] = converter[entry['keys']]
+        else:
+            indexToRemove.append(i)
+        i += 1
+    durationWOBreaks = np.delete(breaksinbetween, indexToRemove, axis=0)
+    dbstring = breaksinbetween.tolist()
+    dbstring = str(dbstring)
+    dbstring = dbstring.strip('[]')
+    dbstring = dbstring.replace("), (", ");(")
+    connect.write_result_to_DB(str("Duration_BreaksInbetween" + str(dur_break)), dbstring, conn)
+    i = 0
+    indexToRemove = []
+    for entry in breakswithin:
+        if entry['keys'] in converter:
+            breakswithin[i]['keys'] = converter[entry['keys']]
+        else:
+            indexToRemove.append(i)
+        i += 1
+    durationWOBreaks = np.delete(breakswithin, indexToRemove, axis=0)
+    dbstring = breakswithin.tolist()
+    dbstring = str(dbstring)
+    dbstring = dbstring.strip('[]')
+    dbstring = dbstring.replace("), (", ");(")
+    connect.write_result_to_DB(str("Duration_BreaksWithin" + str(dur_break)), dbstring, conn)
+    dataset={"BreakWithin": breakswithin, "BreakInbetween": breaksinbetween}
+    return dataset
+
+
+def get_all_parts(conn):
+    level4_instances = LV4.get_level4Instances(conn)
+    sortedlevel4_instances = LV4.orderLV4InstancesChronoligically(level4_instances, conn)
+    level2_instances = LV2.get_level2Instance_by_LV4Instance(sortedlevel4_instances, conn)
+    sortedlevel2_instances = LV2.orderLV2InstancesChronoligically(level2_instances, conn)
+    parts = part.get_part_by_lv2Instance(sortedlevel2_instances, 'all', conn)
+    sortedParts = part.orderPartsChronoligically(parts, conn)
+    connect.write_result_to_DB("get_all_parts", sortedParts, conn)
+    print(sortedParts)
+    return sortedParts
+
+
+def get_nok_parts(conn):
+    level4_instances = LV4.get_level4Instances(conn)
+    sortedlevel4_instances = LV4.orderLV4InstancesChronoligically(level4_instances, conn)
+    level2_instances = LV2.get_level2Instance_by_LV4Instance(sortedlevel4_instances, conn)
+    sortedlevel2_instances = LV2.orderLV2InstancesChronoligically(level2_instances, conn)
+    parts = part.get_part_by_lv2Instance(sortedlevel2_instances, 'nok', conn)
+    sortedParts = part.orderPartsChronoligically(parts, conn)
+    print(sortedParts)
+    return sortedParts
+
+def get_durations_lv4(conn):
+    level4_instances = LV4.get_level4Instances(conn)
+    sortedlevel4_instances = LV4.orderLV4InstancesChronoligically(level4_instances, conn)
+    #durationsWOLunch = get_duration_by_break(sortedlevel4_instances, 45, conn)
+    #durationWOWeekend = get_duration_by_break(sortedlevel4_instances, 480, conn)
+    durationWOBreaks = get_duration_by_break(sortedlevel4_instances, 1, conn)
+    print(durationWOBreaks)
+    return durationWOBreaks
+
+
+def get_durations_part(break_dur, conn):
+    level4_instances = LV4.get_level4Instances(conn)
+    sortedlevel4_instances = LV4.orderLV4InstancesChronoligically(level4_instances, conn)
+    converter = sp.convert_part_to_lv4(conn)
+    #print(converter)
+    durationWOBreaks = get_duration_by_break(sortedlevel4_instances, break_dur, conn)
+    #durationWOBreaks = np.array([(int(838),float(12.4)),(int(116),float(12.4))], dtype=[('keys', int), ('data', float)])
+
+    i = 0
+    #print(durationWOBreaks)
+    indexToRemove = []
+    for entry in durationWOBreaks:
+        if entry['keys']in converter:
+            durationWOBreaks[i]['keys']=converter[entry['keys']]
+        else:
+            indexToRemove.append(i)
+        i += 1
+    durationWOBreaks = np.delete(durationWOBreaks,indexToRemove, axis=0)
+    #print(durationWOBreaks)
+    dbstring = durationWOBreaks.tolist()
+    dbstring = str(dbstring)
+    dbstring = dbstring.strip('[]')
+    dbstring = dbstring.replace("), (", ");(")
+    #print("string")
+    #print(dbstring)
+    connect.write_result_to_DB(str("Duration_Part_Breakl_" + str(break_dur)),dbstring,conn)
+    return durationWOBreaks
+
+
+def aggregate_duration(durations):
+    dur_aggr = 0
+    result = np.array([], dtype=[('keys', int), ('data', float)])
+    #print(durations)
+    for entry in durations:
+        dur_aggr = dur_aggr + entry['data']
+        result = np.append(result, np.array([(entry['keys'],dur_aggr)],dtype=result.dtype))
+    print(result)
+    return result
+
+
+def get_break_length_stat(conn):
+    dataset = np.array([], dtype=float)
+    cur = conn.cursor()
+    query = "select m.timestamp from machining m join logEntries log on (log.timestamp=m.level4_timestamp and log.activity = m.level4_activity and log.step_id=m.level4_step_id and log.instance=m.level4_instance) order by m.timestamp"
+    cur.execute(query)
+    time = cur.fetchall()
+    test_counter = 0
+    cool_counter = 0
+    if not time is None:
+        #print(time)
+        first = True;
+        for timestamp in time:
+            if first:
+                begin_time = timestamp[0]
+                first = False
+            end_time = timestamp[0]
+            begin_time = DT.strptime(begin_time.split(".")[0], '%Y-%m-%d' + ' ' + '%H:%M:%S')
+            end_time = DT.strptime(end_time.split(".")[0], '%Y-%m-%d' + ' ' + '%H:%M:%S')
+            diff = (end_time - begin_time).total_seconds() / 60
+            if diff>0.0167:
+                test_counter += 1
+            if diff>0:
+                cool_counter += 1
+                dataset = np.append(dataset,np.array([float(diff)],dtype=dataset.dtype))
+            begin_time = timestamp[0]
+    #print(dataset)
+    mid = np.percentile(dataset,80)
+    quart = np.percentile(dataset,90)
+    quart_3 = np.percentile(dataset,95)
+    limit = (np.percentile(dataset,99))
+
+    print(test_counter)
+    print("------vs------")
+    print(cool_counter)
+    print(mid)
+    print(quart)
+    print(quart_3)
+
+    #connect.write_result_to_DB("first_quartile", quart,conn)
+    #connect.write_result_to_DB("second_quartile", mid, conn)
+    #connect.write_result_to_DB("third_quartile", quart_3, conn)
+
+def get_all_measures(conn):
+    f = open('all_measures.txt', 'a')
+    cur = conn.cursor()
+    measures_dict = {}
+
+    cur.execute("select mt.part, mt.measuredAttr, md.measuredDet, md.measuredval, md.status from MeasureTot mt join MeasureDet md on (md.timestamp=mt.timestamp and md.MeasuredAttr=mt.MeasuredAttr) order by mt.timestamp asc")
+    measures = cur.fetchall()
+
+    measure_dict = {}
+    part_no = ""
+    for measure in measures:
+        if part_no != measure[0]:
+            measure_dict = {}
+        part_no=measure[0]
+        part = {measure[0]:measure_dict}
+        dict_entry = {str(measure[1])+"-"+str(measure[2]):measure[3]}
+        part[measure[0]].update(dict_entry)
+        measures_dict.update(part)
+    print(measures_dict, file=f)
+    return measures_dict
+
+
+def get_durations_zylinder(conn):
+    # ------------- get breaks from db ------------------------
+    query = "select value from results where category ='Duration_Part_Breakl_0.08333333333333333' order by timestamp desc limit 1"
+    result = np.array([], dtype=[('keys', int), ('data', float)])
+    breaks = connect.get_result_from_db(query, result, conn)
+
+    print('-----------------------------------------')
+    # check the boxplot in order to define a threshold to find bar changes which is approximately 3.5
+    print(np.percentile(breaks['data'], 75))
+    print('-----------------------------------------')
+    greater_values_i = np.where(breaks['data'] > 3.5)[0]
+
+    # filter all instances with values above 3.5 minutes and make a new array
+    filtered_result = np.array([], dtype=[('keys', int), ('data', float)])
+
+    for number in greater_values_i:
+        item = breaks[number]
+        filtered_result = np.append(filtered_result,
+                                    np.array([(int(item['keys']), float(item['data']))], dtype=filtered_result.dtype))
+
+    print(filtered_result)
+    gr.plot_duration_part(filtered_result)
+    print('-----------------------------------------')
+
+    # retrieve the corresponding z-values from the part
+    final_result = np.array([], dtype=[('keys', int), ('data', float)])
+    cur = conn.cursor()
+    for key in filtered_result:
+        if key['keys'] < 300:
+            cur.execute(
+                "Select distinct md.measuredVal from MeasureDet md join MeasureTot mt on(md.timestamp = mt.timestamp) where md.measuredAttr Like 'Zylinder%' and md.measuredDet='Durchmesser' and mt.part='" + str(
+                    key['keys']) + "'")
+            attr_value = cur.fetchone()
+            final_result = np.append(final_result,
+                                     np.array([(int(key['keys']), float(attr_value[0]))], dtype=final_result.dtype))
+    return (final_result)
+
+def outsiders(durations, conn):
+    print("DURATION")
+    print(durations)
+    outsiders = np.array([], dtype=[('keys', int), ('data', float)])
+    treshold_upper = np.percentile(durations['data'], 90)
+    #treshold_lower = np.percentile(durations['data'], 10)
+    #
+    # print(treshold_lower)
+    print(treshold_upper)
+    for entry in durations:
+        if entry['data']>treshold_upper:
+            #if entry['data']>treshold_lower and entry['data']<treshold_upper:
+            outsiders=np.append(outsiders,np.array([(int(entry['keys']),float(entry['data']))],dtype=outsiders.dtype))
+    print("Outsiders")
+    print(outsiders)
+    #
+    noks = get_nok_parts(conn)
+    statusArr = np.array([], dtype=[('keys', int), ('dur', float), ('status', np.object)])
+    minimum = min(outsiders['data'])
+    counter = 0
+    for entry in outsiders:
+        diff = entry['data']-minimum
+        if entry['keys'] in noks:
+            statusArr = np.append(statusArr, np.array([(int(entry['keys']), float(diff), 'nok')], dtype=statusArr.dtype))
+            counter = counter + 1
+        else:
+            statusArr = np.append(statusArr, np.array([(int(entry['keys']), float(diff), 'ok')], dtype=statusArr.dtype))
+    print("Top 10%")
+    print(statusArr)
+    print(counter)

+ 40 - 0
GV12/Parts.py

@@ -0,0 +1,40 @@
+import Support as sp
+import numpy as np
+
+def get_part_by_lv2Instance(lv2_instances, condition, conn):
+    switchCase = {"all":"", "nok":" and status = 'nok' ", "ok": " and status = 'ok' "}
+    condition = switchCase[condition]
+    #initialize array
+    #lv4_Lv2_map = np.array([], dtype=[('lv4', int),('lv2',int), ('data', float)])
+    #fill array
+    cur = conn.cursor()
+    # #lv4_instances=np.array(lv4_instances,dtype=[('keys', int), ('data', float)])
+    #create and execute lv2 query
+    liststring = sp.list_to_string(lv2_instances)
+    query = "select distinct mt.part from measureTot mt join instances i on mt.instance=i.instance where i.instance in (" + liststring + ")" + condition
+    #print(query)
+    cur.execute(query)
+    sqlResult = cur.fetchall()
+    parts = np.array([], dtype=int)
+    for entry in sqlResult:
+        part = np.array(int(entry[0]), dtype = parts.dtype)
+        parts = np.append(parts,part)
+    # print(query)
+    #print("parts")
+    #print(parts)
+    return parts
+
+
+def orderPartsChronoligically(parts, conn):
+    cur = conn.cursor()
+    liststring = sp.list_to_string(parts)
+    query = "select distinct mt.part from measureTot mt join instances i on i.instance=mt.instance join logEntries log on log.instance=i.instance where mt.part in (" + liststring + ") order by log.timestamp asc"
+    #print(query)
+    cur.execute(query)
+    sqlResult = cur.fetchall()
+    parts = np.array([], dtype=int)
+    for entry in sqlResult:
+        parts = np.append(parts, np.array([int(entry[0])], dtype=parts.dtype))
+    #print("parts_Sorted")
+    #print(parts)
+    return parts