Operations.py 16 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359
  1. import Support as sp
  2. import numpy as np
  3. import Level_2 as LV2
  4. import Level_4 as LV4
  5. import Parts as part
  6. import Connection as connect
  7. import Graph as gr
  8. from datetime import datetime as DT
  9. def get_duration_by_break(lv4_instances, dur_break, conn):
  10. #TODO CREATE LIST WITH SINGLE Durations to calc quantile
  11. t_quartile = connect.get_third_quartile(conn)
  12. dataset = np.array([], dtype=[('keys', int), ('data', float)])
  13. cur = conn.cursor()
  14. liststring = sp.list_to_string(lv4_instances)
  15. query = "select m.timestamp, m.name, m.value, m.level4_instance from machining m join logEntries log on (log.timestamp=m.level4_timestamp and log.activity = m.level4_activity and log.step_id=m.level4_step_id and log.instance=m.level4_instance) where m.level4_instance in (" + liststring + ") order by m.timestamp"
  16. cur.execute(query)
  17. time = cur.fetchall()
  18. first = True;
  19. test_counter = 0
  20. for timestamp in time:
  21. if first:
  22. begin_time = timestamp[0]
  23. instance_old = timestamp[3]
  24. first = False
  25. end_time = timestamp[0]
  26. instance_new = timestamp[3]
  27. begin_time = DT.strptime(begin_time.split(".")[0], '%Y-%m-%d' + ' ' + '%H:%M:%S')
  28. end_time = DT.strptime(end_time.split(".")[0], '%Y-%m-%d' + ' ' + '%H:%M:%S')
  29. diff = (end_time - begin_time).total_seconds() / 60
  30. # if diff > dur_break:
  31. # if instance_new != instance_old:
  32. # f = open('break_inbetween.txt', 'a')
  33. # print(str(timestamp[0]) + str(", ") + str(timestamp[1]) + str(", ") + str(begin_time) + str(", ") + str(
  34. # end_time) + str(", ") + str(diff), file=f)
  35. # f.close()
  36. # else:
  37. # f = open('break_within.txt', 'a')
  38. # print(str(timestamp[3]) + str(timestamp[0]) + str(", ") + str(timestamp[1]) + str(", ") + str(begin_time) + str(", ") + str(end_time) + str(", ") + str(diff), file=f)
  39. # f.close()
  40. # else:
  41. if diff <= dur_break:
  42. #print({int(timestamp[3]):float(diff)})
  43. if int(timestamp[3]) in dataset['keys']:
  44. index = int(np.where(dataset['keys'] == int(timestamp[3]))[0])
  45. old_value = dataset[index]['data']
  46. new_value = old_value + diff
  47. dataset[index]['data'] = new_value
  48. else:
  49. dataset = np.append(dataset, np.array([(int(timestamp[3]), float(diff))], dtype=dataset.dtype))
  50. else:
  51. test_counter += 1
  52. #print("I am again bigger for the " + str(test_counter) + "thats the value i tried to insert: " + str(diff))
  53. if int(timestamp[3]) in dataset['keys']:
  54. index = int(np.where(dataset['keys'] == int(timestamp[3]))[0])
  55. old_value = dataset[index]['data']
  56. new_value = old_value + float(t_quartile)
  57. dataset[index]['data'] = new_value
  58. else:
  59. dataset = np.append(dataset, np.array([(int(timestamp[3]), float(t_quartile))], dtype=dataset.dtype))
  60. begin_time = timestamp[0]
  61. instance_old = instance_new
  62. return dataset
  63. def get_breaks(dur_break, conn):
  64. level4_instances = LV4.get_level4Instances(conn)
  65. sortedlevel4_instances = LV4.orderLV4InstancesChronoligically(level4_instances, conn)
  66. converter = sp.convert_part_to_lv4(conn)
  67. breakswithin = np.array([], dtype=[('keys', int), ('data', float)])
  68. breaksinbetween = np.array([], dtype=[('keys', int), ('data', float)])
  69. cur = conn.cursor()
  70. liststring = sp.list_to_string(sortedlevel4_instances)
  71. query = "select m.timestamp, m.name, m.value, m.level4_instance from machining m join logEntries log on (log.timestamp=m.level4_timestamp and log.activity = m.level4_activity and log.step_id=m.level4_step_id and log.instance=m.level4_instance) where m.level4_instance in (" + liststring + ") order by m.timestamp"
  72. cur.execute(query)
  73. time = cur.fetchall()
  74. first = True;
  75. for timestamp in time:
  76. if first:
  77. begin_time = timestamp[0]
  78. instance_old = timestamp[3]
  79. first = False
  80. end_time = timestamp[0]
  81. instance_new = timestamp[3]
  82. begin_time = DT.strptime(begin_time.split(".")[0], '%Y-%m-%d' + ' ' + '%H:%M:%S')
  83. end_time = DT.strptime(end_time.split(".")[0], '%Y-%m-%d' + ' ' + '%H:%M:%S')
  84. diff = (end_time - begin_time).total_seconds() / 60
  85. if diff > dur_break:
  86. #print("DIFF: " + str(diff))
  87. if instance_new != instance_old:
  88. if int(timestamp[3]) in breaksinbetween['keys']:
  89. #print("EXISTS1")
  90. for entry in breaksinbetween:
  91. if int(entry['keys'])==int(timestamp[3]):
  92. dur_new = float(diff) + float(entry['data'])
  93. else:
  94. dur_new = float(diff)
  95. breaksinbetween = np.append(breaksinbetween, np.array([(int(timestamp[3]), float(dur_new))], dtype=breaksinbetween.dtype))
  96. else:
  97. if int(timestamp[3]) in breakswithin['keys']:
  98. #print("EXISTS2")
  99. for entry in breakswithin:
  100. if int(entry['keys'])==int(timestamp[3]):
  101. dur_new = float(diff) + float(entry['data'])
  102. else:
  103. dur_new = float(diff)
  104. breakswithin= np.append(breakswithin, np.array([(int(timestamp[3]), float(dur_new))], dtype=breakswithin.dtype))
  105. #print("DUR_NEW: " + str(dur_new))
  106. begin_time = timestamp[0]
  107. instance_old = instance_new
  108. print("BINBETWEEN")
  109. print(breaksinbetween)
  110. print("BWITHIN")
  111. print(breakswithin)
  112. i = 0
  113. indexToRemove = []
  114. for entry in breaksinbetween:
  115. if entry['keys'] in converter:
  116. breaksinbetween[i]['keys'] = converter[entry['keys']]
  117. else:
  118. indexToRemove.append(i)
  119. i += 1
  120. durationWOBreaks = np.delete(breaksinbetween, indexToRemove, axis=0)
  121. dbstring = breaksinbetween.tolist()
  122. dbstring = str(dbstring)
  123. dbstring = dbstring.strip('[]')
  124. dbstring = dbstring.replace("), (", ");(")
  125. connect.write_result_to_DB(str("Duration_BreaksInbetween" + str(dur_break)), dbstring, conn)
  126. i = 0
  127. indexToRemove = []
  128. for entry in breakswithin:
  129. if entry['keys'] in converter:
  130. breakswithin[i]['keys'] = converter[entry['keys']]
  131. else:
  132. indexToRemove.append(i)
  133. i += 1
  134. durationWOBreaks = np.delete(breakswithin, indexToRemove, axis=0)
  135. dbstring = breakswithin.tolist()
  136. dbstring = str(dbstring)
  137. dbstring = dbstring.strip('[]')
  138. dbstring = dbstring.replace("), (", ");(")
  139. connect.write_result_to_DB(str("Duration_BreaksWithin" + str(dur_break)), dbstring, conn)
  140. dataset={"BreakWithin": breakswithin, "BreakInbetween": breaksinbetween}
  141. return dataset
  142. def get_all_parts(conn):
  143. level4_instances = LV4.get_level4Instances(conn)
  144. sortedlevel4_instances = LV4.orderLV4InstancesChronoligically(level4_instances, conn)
  145. level2_instances = LV2.get_level2Instance_by_LV4Instance(sortedlevel4_instances, conn)
  146. sortedlevel2_instances = LV2.orderLV2InstancesChronoligically(level2_instances, conn)
  147. parts = part.get_part_by_lv2Instance(sortedlevel2_instances, 'all', conn)
  148. sortedParts = part.orderPartsChronoligically(parts, conn)
  149. connect.write_result_to_DB("get_all_parts", sortedParts, conn)
  150. print(sortedParts)
  151. return sortedParts
  152. def get_nok_parts(conn):
  153. level4_instances = LV4.get_level4Instances(conn)
  154. sortedlevel4_instances = LV4.orderLV4InstancesChronoligically(level4_instances, conn)
  155. level2_instances = LV2.get_level2Instance_by_LV4Instance(sortedlevel4_instances, conn)
  156. sortedlevel2_instances = LV2.orderLV2InstancesChronoligically(level2_instances, conn)
  157. parts = part.get_part_by_lv2Instance(sortedlevel2_instances, 'nok', conn)
  158. sortedParts = part.orderPartsChronoligically(parts, conn)
  159. print(sortedParts)
  160. return sortedParts
  161. def get_durations_lv4(conn):
  162. level4_instances = LV4.get_level4Instances(conn)
  163. sortedlevel4_instances = LV4.orderLV4InstancesChronoligically(level4_instances, conn)
  164. #durationsWOLunch = get_duration_by_break(sortedlevel4_instances, 45, conn)
  165. #durationWOWeekend = get_duration_by_break(sortedlevel4_instances, 480, conn)
  166. durationWOBreaks = get_duration_by_break(sortedlevel4_instances, 1, conn)
  167. print(durationWOBreaks)
  168. return durationWOBreaks
  169. def get_durations_part(break_dur, conn):
  170. level4_instances = LV4.get_level4Instances(conn)
  171. sortedlevel4_instances = LV4.orderLV4InstancesChronoligically(level4_instances, conn)
  172. converter = sp.convert_part_to_lv4(conn)
  173. #print(converter)
  174. durationWOBreaks = get_duration_by_break(sortedlevel4_instances, break_dur, conn)
  175. #durationWOBreaks = np.array([(int(838),float(12.4)),(int(116),float(12.4))], dtype=[('keys', int), ('data', float)])
  176. i = 0
  177. #print(durationWOBreaks)
  178. indexToRemove = []
  179. for entry in durationWOBreaks:
  180. if entry['keys']in converter:
  181. durationWOBreaks[i]['keys']=converter[entry['keys']]
  182. else:
  183. indexToRemove.append(i)
  184. i += 1
  185. durationWOBreaks = np.delete(durationWOBreaks,indexToRemove, axis=0)
  186. #print(durationWOBreaks)
  187. dbstring = durationWOBreaks.tolist()
  188. dbstring = str(dbstring)
  189. dbstring = dbstring.strip('[]')
  190. dbstring = dbstring.replace("), (", ");(")
  191. #print("string")
  192. #print(dbstring)
  193. connect.write_result_to_DB(str("Duration_Part_Breakl_" + str(break_dur)),dbstring,conn)
  194. return durationWOBreaks
  195. def aggregate_duration(durations):
  196. dur_aggr = 0
  197. result = np.array([], dtype=[('keys', int), ('data', float)])
  198. #print(durations)
  199. for entry in durations:
  200. dur_aggr = dur_aggr + entry['data']
  201. result = np.append(result, np.array([(entry['keys'],dur_aggr)],dtype=result.dtype))
  202. print(result)
  203. return result
  204. def get_break_length_stat(conn):
  205. dataset = np.array([], dtype=float)
  206. cur = conn.cursor()
  207. query = "select m.timestamp from machining m join logEntries log on (log.timestamp=m.level4_timestamp and log.activity = m.level4_activity and log.step_id=m.level4_step_id and log.instance=m.level4_instance) order by m.timestamp"
  208. cur.execute(query)
  209. time = cur.fetchall()
  210. test_counter = 0
  211. cool_counter = 0
  212. if not time is None:
  213. #print(time)
  214. first = True;
  215. for timestamp in time:
  216. if first:
  217. begin_time = timestamp[0]
  218. first = False
  219. end_time = timestamp[0]
  220. begin_time = DT.strptime(begin_time.split(".")[0], '%Y-%m-%d' + ' ' + '%H:%M:%S')
  221. end_time = DT.strptime(end_time.split(".")[0], '%Y-%m-%d' + ' ' + '%H:%M:%S')
  222. diff = (end_time - begin_time).total_seconds() / 60
  223. if diff>0.0167:
  224. test_counter += 1
  225. if diff>0:
  226. cool_counter += 1
  227. dataset = np.append(dataset,np.array([float(diff)],dtype=dataset.dtype))
  228. begin_time = timestamp[0]
  229. #print(dataset)
  230. mid = np.percentile(dataset,80)
  231. quart = np.percentile(dataset,90)
  232. quart_3 = np.percentile(dataset,95)
  233. limit = (np.percentile(dataset,99))
  234. print(test_counter)
  235. print("------vs------")
  236. print(cool_counter)
  237. print(mid)
  238. print(quart)
  239. print(quart_3)
  240. #connect.write_result_to_DB("first_quartile", quart,conn)
  241. #connect.write_result_to_DB("second_quartile", mid, conn)
  242. #connect.write_result_to_DB("third_quartile", quart_3, conn)
  243. def get_all_measures(conn):
  244. f = open('all_measures.txt', 'a')
  245. cur = conn.cursor()
  246. measures_dict = {}
  247. cur.execute("select mt.part, mt.measuredAttr, md.measuredDet, md.measuredval, md.status from MeasureTot mt join MeasureDet md on (md.timestamp=mt.timestamp and md.MeasuredAttr=mt.MeasuredAttr) order by mt.timestamp asc")
  248. measures = cur.fetchall()
  249. measure_dict = {}
  250. part_no = ""
  251. for measure in measures:
  252. if part_no != measure[0]:
  253. measure_dict = {}
  254. part_no=measure[0]
  255. part = {measure[0]:measure_dict}
  256. dict_entry = {str(measure[1])+"-"+str(measure[2]):measure[3]}
  257. part[measure[0]].update(dict_entry)
  258. measures_dict.update(part)
  259. print(measures_dict, file=f)
  260. return measures_dict
  261. def get_durations_zylinder(conn):
  262. # ------------- get breaks from db ------------------------
  263. query = "select value from results where category ='Duration_Part_Breakl_0.08333333333333333' order by timestamp desc limit 1"
  264. result = np.array([], dtype=[('keys', int), ('data', float)])
  265. breaks = connect.get_result_from_db(query, result, conn)
  266. print('-----------------------------------------')
  267. # check the boxplot in order to define a threshold to find bar changes which is approximately 3.5
  268. print(np.percentile(breaks['data'], 75))
  269. print('-----------------------------------------')
  270. greater_values_i = np.where(breaks['data'] > 3.5)[0]
  271. # filter all instances with values above 3.5 minutes and make a new array
  272. filtered_result = np.array([], dtype=[('keys', int), ('data', float)])
  273. for number in greater_values_i:
  274. item = breaks[number]
  275. filtered_result = np.append(filtered_result,
  276. np.array([(int(item['keys']), float(item['data']))], dtype=filtered_result.dtype))
  277. print(filtered_result)
  278. gr.plot_duration_part(filtered_result)
  279. print('-----------------------------------------')
  280. # retrieve the corresponding z-values from the part
  281. final_result = np.array([], dtype=[('keys', int), ('data', float)])
  282. cur = conn.cursor()
  283. for key in filtered_result:
  284. if key['keys'] < 300:
  285. cur.execute(
  286. "Select distinct md.measuredVal from MeasureDet md join MeasureTot mt on(md.timestamp = mt.timestamp) where md.measuredAttr Like 'Zylinder%' and md.measuredDet='Durchmesser' and mt.part='" + str(
  287. key['keys']) + "'")
  288. attr_value = cur.fetchone()
  289. final_result = np.append(final_result,
  290. np.array([(int(key['keys']), float(attr_value[0]))], dtype=final_result.dtype))
  291. return (final_result)
  292. def outsiders(durations, conn):
  293. print("DURATION")
  294. print(durations)
  295. outsiders = np.array([], dtype=[('keys', int), ('data', float)])
  296. treshold_upper = np.percentile(durations['data'], 90)
  297. #treshold_lower = np.percentile(durations['data'], 10)
  298. #
  299. # print(treshold_lower)
  300. print(treshold_upper)
  301. for entry in durations:
  302. if entry['data']>treshold_upper:
  303. #if entry['data']>treshold_lower and entry['data']<treshold_upper:
  304. outsiders=np.append(outsiders,np.array([(int(entry['keys']),float(entry['data']))],dtype=outsiders.dtype))
  305. print("Outsiders")
  306. print(outsiders)
  307. #
  308. noks = get_nok_parts(conn)
  309. statusArr = np.array([], dtype=[('keys', int), ('dur', float), ('status', np.object)])
  310. minimum = min(outsiders['data'])
  311. counter = 0
  312. for entry in outsiders:
  313. diff = entry['data']-minimum
  314. if entry['keys'] in noks:
  315. statusArr = np.append(statusArr, np.array([(int(entry['keys']), float(diff), 'nok')], dtype=statusArr.dtype))
  316. counter = counter + 1
  317. else:
  318. statusArr = np.append(statusArr, np.array([(int(entry['keys']), float(diff), 'ok')], dtype=statusArr.dtype))
  319. print("Top 10%")
  320. print(statusArr)
  321. print(counter)