|
@@ -0,0 +1,468 @@
|
|
|
+import sys
|
|
|
+import os
|
|
|
+import yaml
|
|
|
+import shutil
|
|
|
+import pprint
|
|
|
+import lxml.etree as ET
|
|
|
+import lxml.builder
|
|
|
+import datetime
|
|
|
+from datetime import datetime as DT
|
|
|
+import time as t
|
|
|
+import json
|
|
|
+import sqlite3
|
|
|
+from sqlite3 import Error
|
|
|
+
|
|
|
+def create_connection(db):
|
|
|
+ try:
|
|
|
+ conn = sqlite3.connect(db)
|
|
|
+ return conn
|
|
|
+ except Error as e:
|
|
|
+ print(e)
|
|
|
+ return None
|
|
|
+
|
|
|
+
|
|
|
+def check_folder(target_path,folder):
|
|
|
+ if not os.path.exists(os.path.join(target_path, folder)):
|
|
|
+ os.makedirs(os.path.join(target_path, folder))
|
|
|
+ return os.path.join(target_path, folder)
|
|
|
+
|
|
|
+def get_target_path(prod_step, root_path):
|
|
|
+ switch = {
|
|
|
+ "GV12 Order Processing": "level1",
|
|
|
+ "GV12 Production": "level2",
|
|
|
+ "GV12 Turn Production": "level3",
|
|
|
+ "GV12 Turn Machining": "level4"
|
|
|
+ }
|
|
|
+ return check_folder(os.path.join(root_path, "task3/sortedTemplates"), switch[prod_step])
|
|
|
+
|
|
|
+
|
|
|
+def check_timestamp(time, rootNode,search):
|
|
|
+ #convert time-string to date_time
|
|
|
+ call_node_list = []
|
|
|
+ time_conv = time.split("+")[0]
|
|
|
+ datetime_object = DT.strptime(time_conv, '%Y-%m-%d' + 'T' + '%H:%M:%S')
|
|
|
+ time_upper = str(datetime_object + datetime.timedelta(0, 2)) + "+02:00"
|
|
|
+ time_lower = str(datetime_object - datetime.timedelta(0, 2)) + "+02:00"
|
|
|
+ time_lower = time_lower.split(" ")[0] + "T" + time_lower.split(" ")[1]
|
|
|
+ time_upper = time_upper.split(" ")[0] + "T" + time_upper.split(" ")[1]
|
|
|
+
|
|
|
+ #find suitable timestamp among all timestamps for call level
|
|
|
+ time_range = rootNode.xpath("//" + search + "/@time")
|
|
|
+ time_range.sort()
|
|
|
+ for time in time_range:
|
|
|
+ if time <= time_upper and time >= time_lower:
|
|
|
+ call_node_list = rootNode.xpath("//" + search + "[@time = '" + time + "']")
|
|
|
+ # print(call_node_list)
|
|
|
+ break
|
|
|
+ if time > time_upper:
|
|
|
+ break
|
|
|
+ return call_node_list
|
|
|
+
|
|
|
+def create_json(root_path,list,file):
|
|
|
+ with open(os.path.join(root_path, 'task3/'+file), 'w') as fp:
|
|
|
+ json.dump(list, fp, indent=4)
|
|
|
+ fp.close()
|
|
|
+
|
|
|
+
|
|
|
+def categorize_logs(root_path, logs_path):
|
|
|
+ origin_path = logs_path
|
|
|
+ print(origin_path)
|
|
|
+ for root, dirs, files in os.walk(origin_path):
|
|
|
+ for file in files:
|
|
|
+ with open(os.path.join(root, file), "r") as input_file:
|
|
|
+ results = yaml.load_all(input_file)
|
|
|
+ target_path=""
|
|
|
+ for value in results:
|
|
|
+ if 'log' in value:
|
|
|
+ try:
|
|
|
+ val = value['log']['trace']['cpee:name']
|
|
|
+ target_path=get_target_path(val, root_path)
|
|
|
+ print(val)
|
|
|
+ break
|
|
|
+ except (KeyError, AttributeError):
|
|
|
+ continue
|
|
|
+ input_file.close()
|
|
|
+ shutil.move(os.path.join(root, file), os.path.join(target_path, file))
|
|
|
+
|
|
|
+
|
|
|
+def xml_check(xml_file):
|
|
|
+ if not os.path.isfile(xml_file):
|
|
|
+ rootNode = ET.Element("ProcessInstances")
|
|
|
+ tree = ET.ElementTree(rootNode)
|
|
|
+ level1 = ET.SubElement(rootNode, "level1")
|
|
|
+ else:
|
|
|
+ tree = ET.parse(xml_file, ET.XMLParser(remove_blank_text=True))
|
|
|
+ rootNode = tree.getroot()
|
|
|
+ if tree.find('level1') is None:
|
|
|
+ level1 = ET.SubElement(rootNode, "level1")
|
|
|
+ else:
|
|
|
+ level1 = tree.find('level1')
|
|
|
+ return [tree,rootNode,level1]
|
|
|
+
|
|
|
+
|
|
|
+
|
|
|
+def recreate_process(root_path, conn, level):
|
|
|
+ # variables
|
|
|
+ f = open('taska3.txt', 'a')
|
|
|
+ cur = conn.cursor()
|
|
|
+ origin_path = os.path.join(root_path, "task3/sortedTemplates/level"+str(level))
|
|
|
+ counter = 0;
|
|
|
+ oknok = {'ok':0, 'nok':0}
|
|
|
+ ok = 0
|
|
|
+ nok = 0
|
|
|
+ # traverse through all logs
|
|
|
+ for root, dirs, files in os.walk(origin_path):
|
|
|
+ for file in files:
|
|
|
+ added = False;
|
|
|
+ print(file)
|
|
|
+ with open(os.path.join(root, file), "r") as input_file:
|
|
|
+ results = yaml.load_all(input_file)
|
|
|
+ # traverse throug single log
|
|
|
+ measured_result = 'NULL'
|
|
|
+ for value in results:
|
|
|
+ #try:
|
|
|
+ if 'log' in value:
|
|
|
+ logname = value['log']['trace']['cpee:name']
|
|
|
+ uuid = value['log']['trace']['cpee:uuid']
|
|
|
+ instance = value['log']['trace']['concept:name']
|
|
|
+
|
|
|
+ if 'event' in value and value['event']['id:id'] == "external" and value['event']['cpee:lifecycle:transition'] == 'dataelements/change':
|
|
|
+ for listOut in value['event']['list']['data_values']:
|
|
|
+ if listOut=='qr' and len(value['event']['list']['data_values'][listOut])>0:
|
|
|
+ charge = value['event']['list']['data_values'][listOut].split(' ')[0]
|
|
|
+ part = value['event']['list']['data_values'][listOut].split(' ')[1]
|
|
|
+ #print("Charge: "+ charge + " Part: " + part)
|
|
|
+
|
|
|
+
|
|
|
+ if 'event' in value and not value['event']['id:id']=="external":
|
|
|
+ try:
|
|
|
+
|
|
|
+ time = value['event']['time:timestamp']
|
|
|
+ activity = value['event']['cpee:lifecycle:transition']
|
|
|
+ name = value['event']['concept:name']
|
|
|
+ step_id = value['event']['id:id']
|
|
|
+ val="";
|
|
|
+ if value['event']['lifecycle:transition'] == "start":
|
|
|
+ val = json.dumps(value['event']['list']['data_send'])
|
|
|
+
|
|
|
+ if 'data_values' in value['event']['list'].keys() and name == 'Measure with MicroVu' and activity == 'dataelements/change' :
|
|
|
+ # print(value['event']['list']['data_values']['status'])
|
|
|
+ #print("PART: " + str(part))
|
|
|
+ #print("AKT; " + str(value['event']['list']['data_values']['qr'].split(' ')[1]))
|
|
|
+
|
|
|
+ if value['event']['list']['data_values']['qr'].split(' ')[1]!=part:
|
|
|
+ print("NOTPART: " + str(part))
|
|
|
+ print("NOTAKT; " + str(value['event']['list']['data_values']['qr'].split(' ')[1]))
|
|
|
+ continue
|
|
|
+ measured_result = 'ok'
|
|
|
+ if value['event']['list']['data_values']['status'] != 'ok':
|
|
|
+ measured_result = 'nok'
|
|
|
+ query = "Update MeasureTot SET status = '" + measured_result + "' where part = '" + part + "'" # and status = 'NULL'"
|
|
|
+ #print(query)
|
|
|
+ cur.execute(query)
|
|
|
+ conn.commit()
|
|
|
+
|
|
|
+
|
|
|
+ if 'data_receiver' in value['event']['list'].keys() and name!='Fetch':
|
|
|
+ #print("ICHICHICH")
|
|
|
+ for listIn in value['event']['list']['data_receiver']:
|
|
|
+ #print(listIn.keys())
|
|
|
+ #print(listIn['name'])
|
|
|
+ #print(listIn['data'])
|
|
|
+
|
|
|
+ if 'results' in listIn['data']:
|
|
|
+ if 'raw' in listIn['data'] and len(listIn['data']['raw']) > 0:
|
|
|
+ # print("DOOF")
|
|
|
+ # print(listIn['data']['raw']['Aufford QR-Code-Eingabe']['Eingabe'][0])
|
|
|
+ if listIn['data']['raw']['Aufford QR-Code-Eingabe']['Eingabe'][0].split(" ")[1] != part:
|
|
|
+ continue
|
|
|
+ #print(listIn['data']['results'])
|
|
|
+ for entry in listIn['data']['results']:
|
|
|
+
|
|
|
+
|
|
|
+
|
|
|
+ messungen = listIn['data']['results'][entry]
|
|
|
+
|
|
|
+ measured_attr = entry
|
|
|
+
|
|
|
+ query = "Insert into MeasureTot('charge','part', 'status', 'measuredAttr', 'instance', 'timestamp') VALUES ('" + charge + "','" + part + "', '" + measured_result + "','" + measured_attr + "', '" + instance + "','" + time + "' )"
|
|
|
+ #print(query)
|
|
|
+ cur.execute(query)
|
|
|
+ conn.commit()
|
|
|
+ for messung in messungen:
|
|
|
+ measured_det = messung
|
|
|
+ measured_value = messungen[messung]['on_scale_from_zero_to_one']
|
|
|
+ status = messungen[messung]['status']
|
|
|
+ query = "Insert into MeasureDet('measuredDet','measuredVal', 'status', 'measuredAttr', 'timestamp') VALUES ('" + measured_det + "','" + str(measured_value) + "', '" + status + "','" + measured_attr + "', '" + time + "' )"
|
|
|
+ cur.execute(query)
|
|
|
+ conn.commit()
|
|
|
+ #qrcode=listIn['data']['raw']['Aufford QR-Code-Eingabe']
|
|
|
+
|
|
|
+ val = json.dumps(value['event']['list']['data_receiver'])
|
|
|
+
|
|
|
+
|
|
|
+ if not added:
|
|
|
+ cur.execute("Select called_by from instances where instance = '" + instance + "'")
|
|
|
+ parent_inst = cur.fetchone()
|
|
|
+ if parent_inst is None:
|
|
|
+ print(time)
|
|
|
+ #print(len(time))
|
|
|
+ time = time.split('+')[0]
|
|
|
+ print(len(time))
|
|
|
+ if len(time)>19:
|
|
|
+ time = time [:-4]
|
|
|
+ print(time)
|
|
|
+ datetime_object = DT.strptime(time, '%Y-%m-%d' + 'T' + '%H:%M:%S')
|
|
|
+ time_upper = str(datetime_object + datetime.timedelta(0, 2)) + "+02:00"
|
|
|
+ time_lower = str(datetime_object - datetime.timedelta(0, 2)) + "+02:00"
|
|
|
+ time_lower = time_lower.split(" ")[0] + "T" + time_lower.split(" ")[1]
|
|
|
+ time_upper = time_upper.split(" ")[0] + "T" + time_upper.split(" ")[1]
|
|
|
+ #print(time)
|
|
|
+ #print(time_upper)
|
|
|
+ #print(time_lower)
|
|
|
+ # cur.execute("Insert into Instances VALUES ('" + instance + "','" + uuid + "', '" + logname + "', '2','Null')")
|
|
|
+ query = "Select log.instance from LogEntries log join instances i on i.instance = log.instance where i.level='" + str(level-1) +"' and log.activity ='activity/calling' and log.timestamp>='" + time_lower + "' and log.timestamp<='" + time_upper + "'"
|
|
|
+ #print(query)
|
|
|
+ cur.execute(query)
|
|
|
+ parent_inst = cur.fetchone()
|
|
|
+ #print(parent_inst)
|
|
|
+ if parent_inst is None:
|
|
|
+ #print(parent_inst)
|
|
|
+ #print(val)
|
|
|
+ #print("Insert into Instances VALUES ('" + instance + "','" + uuid + "', '" + logname + "', '3','Null')")
|
|
|
+ cur.execute("Insert into Instances VALUES ('" + instance + "','" + uuid + "', '" + logname + "', '" + str(level) +"','Null')")
|
|
|
+ #conn.commit()
|
|
|
+ else:
|
|
|
+ cur.execute("Insert into Instances VALUES ('" + instance + "','" + uuid + "', '" + logname + "', '" + str(level) +"','" + parent_inst[0] + "')")
|
|
|
+ else:
|
|
|
+ #print(instance)
|
|
|
+ #print(parent_inst)
|
|
|
+ query = "Update Instances set uuid = '" + uuid + "', name = '" + logname + "' where called_by = '" + parent_inst[0] + "' and instance='" + instance + "'"
|
|
|
+ #print(query)
|
|
|
+ cur.execute(query)
|
|
|
+ #conn.commit()
|
|
|
+ #con.commit()
|
|
|
+ added = True
|
|
|
+
|
|
|
+ #print("Insert into LogEntries VALUES ('" + step_id + "','" + time + "', '" + name + "','" + activity + "', '" + val + "','" + instance + "' )")
|
|
|
+
|
|
|
+ #to add calling instances
|
|
|
+ if 'data_receiver' in value['event']['list'].keys() and activity == 'activity/receiving':
|
|
|
+ # if step_id=='a1':
|
|
|
+ for attr in value['event']['list']['data_receiver']:
|
|
|
+ # print('addNEw')
|
|
|
+ if type(attr['data']) is dict and 'CPEE-INSTANCE' in attr['data']:
|
|
|
+ # print('addNEw1')
|
|
|
+ c_instance = attr['data']['CPEE-INSTANCE'].split('/')[-1]
|
|
|
+ query = "Insert into Instances VALUES ('" + c_instance + "','Null', 'Null', '" + str(
|
|
|
+ level + 1) + "','" + instance + "')"
|
|
|
+ cur.execute(query)
|
|
|
+ conn.commit()
|
|
|
+ break
|
|
|
+ elif attr['name'] == 'instance':
|
|
|
+ # print('addNEw2')
|
|
|
+ c_instance = attr['data']
|
|
|
+ query = "Insert into Instances VALUES ('" + c_instance + "','Null', 'Null', '" + str(
|
|
|
+ level + 1) + "','" + instance + "')"
|
|
|
+ # print(query)
|
|
|
+ cur.execute(query)
|
|
|
+ conn.commit()
|
|
|
+ break
|
|
|
+ # print("Insert into Instances VALUES ('" + c_instance + "','Null', 'Null', '2','"+instance+"')")
|
|
|
+ # conn.commit()
|
|
|
+
|
|
|
+
|
|
|
+ #To avoid adding machine_logs afters receiving status continue
|
|
|
+ if step_id == 'a3' and name== 'Status' and activity == 'dataelements/change':
|
|
|
+ #print("BR")
|
|
|
+ #print(value['event']['list']['data_values'].keys())
|
|
|
+ #print(value['event']['list']['data_values']['lets_continue'])
|
|
|
+ if value['event']['list']['data_values']['lets_continue']==False:
|
|
|
+ #print("EAK")
|
|
|
+ query = "Insert into LogEntries VALUES ('" + step_id + "','" + time + "', '" + name + "','" + activity + "', '" + val + "','" + instance + "' )"
|
|
|
+ cur.execute(query)
|
|
|
+ conn.commit()
|
|
|
+ break
|
|
|
+ query="Insert into LogEntries VALUES ('" + step_id + "','" + time + "', '" + name + "','" + activity + "', '" + val + "','" + instance + "' )"
|
|
|
+ if instance in ('446', '447', '448'):
|
|
|
+ print(query, file = f)
|
|
|
+ cur.execute(query)
|
|
|
+ conn.commit()
|
|
|
+
|
|
|
+
|
|
|
+
|
|
|
+
|
|
|
+ except(KeyError) as e:
|
|
|
+ print(e, file = f)
|
|
|
+ #print(query)
|
|
|
+ print(activity, file = f)
|
|
|
+ print(time, file = f)
|
|
|
+ counter+=1
|
|
|
+
|
|
|
+ #except sqlite3.Error as qe:
|
|
|
+ except sqlite3.IntegrityError as qe:
|
|
|
+ #if instance in ('446','447','448'):
|
|
|
+ #print(query, file = f)
|
|
|
+ #print(qe, file = f)
|
|
|
+ print(qe, file = f)
|
|
|
+ counter += 1
|
|
|
+ pass
|
|
|
+ #exit()
|
|
|
+ except sqlite3.Error as qe_all:
|
|
|
+ print("Error_GEN: " + str(query), file=f)
|
|
|
+ print("Error_GEN: " + str(qe_all), file=f)
|
|
|
+ except Exception as e:
|
|
|
+ counter += 1
|
|
|
+ #message = template.format(type(ex).__name__, ex.args)
|
|
|
+ print("Error_GEN" + str(e), file = f)
|
|
|
+ print("Unexpected error!!", file = f)
|
|
|
+
|
|
|
+
|
|
|
+ try:
|
|
|
+ # to handle machining:
|
|
|
+ if step_id == 'a1' and name == 'Fetch' and activity == 'activity/receiving':
|
|
|
+ for attr in value['event']['list']['data_receiver']:
|
|
|
+ for entry in attr['data']:
|
|
|
+ try:
|
|
|
+ m_id = name = entry['ID']
|
|
|
+ name = entry['name']
|
|
|
+ val = entry['value']
|
|
|
+ clientHandle = entry['meta']['ClientHandle']
|
|
|
+ statusCode = entry['meta']['StatusCode']
|
|
|
+ server_timestamp = entry['meta']['ServerTimestamp']
|
|
|
+ query = "insert into Machining (timestamp, clientHandle, m_id, status, name, value, level4_step_id, level4_activity, level4_timestamp, level4_instance) VALUES('" + server_timestamp + "','" + clientHandle + "','" + m_id + "','" + statusCode + "','" + name + "','" + val + "','" + step_id + "','" + activity + "','" + time + "','" + instance + "')"
|
|
|
+ #if instance in ('446', '447', '448', '449', '450', '451','452'):
|
|
|
+ #print(query, file = f)
|
|
|
+ #if time == '2018-10-17T14:37:40+02:00':
|
|
|
+ #print(query, file = f)
|
|
|
+ print(query, file=f)
|
|
|
+ cur.execute(query)
|
|
|
+ except(KeyError) as e:
|
|
|
+ print("ERROR IN ENTRY: " + str(e), file=f)
|
|
|
+ # print(query)
|
|
|
+ print(activity, file=f)
|
|
|
+ print(time, file=f)
|
|
|
+ counter += 1
|
|
|
+ # except sqlite3.Error as qe:
|
|
|
+ except sqlite3.IntegrityError as qe:
|
|
|
+ # if instance in ('446', '447', '448'):
|
|
|
+ # print(query, file = f)
|
|
|
+ # print(qe, file =
|
|
|
+ print("Error: " + str(query), file=f)
|
|
|
+ print("Error: " + str(qe), file=f)
|
|
|
+ counter += 1
|
|
|
+ pass
|
|
|
+ # exit()
|
|
|
+ except sqlite3.Error as qe_all:
|
|
|
+ print("Error_GEN: " + str(query), file=f)
|
|
|
+ print("Error_GEN: " + str(qe_all), file=f)
|
|
|
+ except Exception as e:
|
|
|
+ counter += 1
|
|
|
+ # message = template.format(type(ex).__name__, ex.args)
|
|
|
+ print("Error_GEN" + str(e), file=f)
|
|
|
+ print("Unexpected error!!", file=f)
|
|
|
+ conn.commit()
|
|
|
+
|
|
|
+ except(KeyError) as e:
|
|
|
+ print("ERROR IN ATTR: " + str(e), file=f)
|
|
|
+ #print(query)
|
|
|
+ print(activity, file = f)
|
|
|
+ print(time, file = f)
|
|
|
+ counter += 1
|
|
|
+ continue
|
|
|
+ # except sqlite3.Error as qe:
|
|
|
+ except Exception as e:
|
|
|
+ counter += 1
|
|
|
+ #message = template.format(type(ex).__name__, ex.args)
|
|
|
+ print("Error_GEN" + str(e), file = f)
|
|
|
+ print("Unexpected error!!", file = f)
|
|
|
+ #oknok['ok']=oknok['ok']+ok
|
|
|
+ #oknok['nok'] = oknok['nok'] + nok
|
|
|
+ #print(oknok)
|
|
|
+ #conn.commit()
|
|
|
+ #print(counter)
|
|
|
+ counter = 0
|
|
|
+ input_file.close()
|
|
|
+ target_path = check_folder(os.path.join(root_path, 'task3/processed'), ('level'+str(level)))
|
|
|
+ #shutil.move(os.path.join(origin_path,file), target_path)
|
|
|
+
|
|
|
+
|
|
|
+
|
|
|
+def recreate_process_level1(root_path,conn):
|
|
|
+ #variables
|
|
|
+ cur = conn.cursor()
|
|
|
+ origin_path = os.path.join(root_path,"task3/sortedTemplates/level1")
|
|
|
+
|
|
|
+ #traverse through all logs
|
|
|
+ for root, dirs, files in os.walk(origin_path):
|
|
|
+ for file in files:
|
|
|
+ first = True;
|
|
|
+ print(file)
|
|
|
+ with open(os.path.join(root, file), "r") as input_file:
|
|
|
+ results = yaml.load_all(input_file)
|
|
|
+ #traverse throug single log
|
|
|
+ for value in results:
|
|
|
+ try:
|
|
|
+ if 'log' in value:
|
|
|
+ logname = value['log']['trace']['cpee:name']
|
|
|
+ uuid = value['log']['trace']['cpee:uuid']
|
|
|
+ instance = value['log']['trace']['concept:name']
|
|
|
+ #print("Insert into Instances VALUES ('" + instance + "','" + uuid + "', '" + logname + "', '1','Null')")
|
|
|
+ query = "Insert into Instances VALUES ('" + instance + "','" + uuid + "', '" + logname + "', '1','Null')"
|
|
|
+ cur.execute(query)
|
|
|
+ #conn.commit()
|
|
|
+ if 'event' in value:
|
|
|
+ time = value['event']['time:timestamp']
|
|
|
+
|
|
|
+ val=""
|
|
|
+ if value['event']['lifecycle:transition']=="start":
|
|
|
+ val = json.dumps(value['event']['list']['data_send'])
|
|
|
+
|
|
|
+ if 'data_receiver' in value['event']['list'].keys():
|
|
|
+ val = json.dumps(value['event']['list']['data_receiver'])
|
|
|
+
|
|
|
+
|
|
|
+ step_id = value['event']['id:id']
|
|
|
+ activity = value['event']['cpee:lifecycle:transition']
|
|
|
+ name = value['event']['concept:name']
|
|
|
+ query = "Insert into LogEntries VALUES ('" + step_id + "','" + time + "', '" + name + "','" + activity + "', '" + val + "','" + instance + "' )"
|
|
|
+ #print("Insert into LogEntries VALUES ('" + step_id + "','" + time + "', '" + name + "','" + activity + "', '" + val + "','" + instance + "' )")
|
|
|
+ cur.execute(query)
|
|
|
+ #conn.commit()
|
|
|
+ for attr in value['event']['list']['data_receiver']:
|
|
|
+ if attr['name'] == "url":
|
|
|
+ c_instance = attr['data'].split('/')[-1]
|
|
|
+ query="Insert into Instances VALUES ('" + c_instance + "','Null', 'Null', '2','"+instance+"')"
|
|
|
+ cur.execute(query)
|
|
|
+ #print("Insert into Instances VALUES ('" + c_instance + "','Null', 'Null', '2','"+instance+"')")
|
|
|
+ #conn.commit()
|
|
|
+ conn.commit()
|
|
|
+ except(KeyError) as e:
|
|
|
+ #print(e)
|
|
|
+ #print(time)
|
|
|
+ continue
|
|
|
+ input_file.close()
|
|
|
+ target_path = check_folder(os.path.join(root_path,'task3/processed'),'level1')
|
|
|
+ #shutil.move(os.path.join(origin_path,file), target_path)
|
|
|
+
|
|
|
+
|
|
|
+root_path = os.getcwd()
|
|
|
+db = os.path.join(root_path,"BIII.db")
|
|
|
+conn = create_connection(db)
|
|
|
+conn.execute('delete from Machining')
|
|
|
+conn.execute('delete from LogEntries')
|
|
|
+conn.execute('delete from Instances')
|
|
|
+conn.execute('delete from MeasureTot')
|
|
|
+conn.execute('delete from MeasureDet')
|
|
|
+logs_path = os.path.join(root_path, "logs")
|
|
|
+#print(logs_path)
|
|
|
+categorize_logs(root_path, logs_path)
|
|
|
+recreate_process_level1(root_path,conn)
|
|
|
+recreate_process(root_path,conn,2)
|
|
|
+recreate_process(root_path,conn,3)
|
|
|
+recreate_process(root_path,conn,4)
|
|
|
+
|
|
|
+#recreate_process_level3(root_path,conn,"level3")
|
|
|
+#recreate_process_level4(root_path,conn,"level4")
|
|
|
+conn.close()
|
|
|
+
|