BI_ReadLog.py 30 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546
  1. import sys
  2. import os
  3. import yaml
  4. import shutil
  5. import pprint
  6. import lxml.etree as ET
  7. import lxml.builder
  8. import datetime
  9. from datetime import datetime as DT
  10. import time as t
  11. import json
  12. import sqlite3
  13. from sqlite3 import Error
  14. def create_connection(db):
  15. try:
  16. conn = sqlite3.connect(db)
  17. return conn
  18. except Error as e:
  19. print(e)
  20. return None
  21. def check_folder(target_path,folder):
  22. if not os.path.exists(os.path.join(target_path, folder)):
  23. os.makedirs(os.path.join(target_path, folder))
  24. return os.path.join(target_path, folder)
  25. def get_target_path(prod_step, root_path):
  26. switch = {
  27. "Lowerhousing Order Processing": "level1",
  28. "Lowerhousing Production": "level2",
  29. "Lowerhousing Turn 1 Production": "level3",
  30. "Lowerhousing Turn 2 Production": "level3",
  31. "Lowerhousing Mill 1 Production": "level3",
  32. "Lowerhousing Mill 2 Production": "level3",
  33. "Lowerhousing Turn 1 Machining": "level4",
  34. "Lowerhousing Turn 2 Machining": "level4",
  35. "Lowerhousing Mill 1 Machining": "level4",
  36. "Lowerhousing Mill 2 Machining": "level4"
  37. }
  38. return check_folder(os.path.join(root_path, "task2/sortedTemplates"), switch[prod_step])
  39. def check_timestamp(time, rootNode,search):
  40. #convert time-string to date_time
  41. call_node_list = []
  42. time_conv = time.split("+")[0]
  43. datetime_object = DT.strptime(time_conv, '%Y-%m-%d' + 'T' + '%H:%M:%S')
  44. time_upper = str(datetime_object + datetime.timedelta(0, 2)) + "+02:00"
  45. time_lower = str(datetime_object - datetime.timedelta(0, 2)) + "+02:00"
  46. time_lower = time_lower.split(" ")[0] + "T" + time_lower.split(" ")[1]
  47. time_upper = time_upper.split(" ")[0] + "T" + time_upper.split(" ")[1]
  48. #find suitable timestamp among all timestamps for call level
  49. time_range = rootNode.xpath("//" + search + "/@time")
  50. time_range.sort()
  51. for time in time_range:
  52. if time <= time_upper and time >= time_lower:
  53. call_node_list = rootNode.xpath("//" + search + "[@time = '" + time + "']")
  54. # print(call_node_list)
  55. break
  56. if time > time_upper:
  57. break
  58. return call_node_list
  59. def create_json(root_path,list,file):
  60. with open(os.path.join(root_path, 'task2/'+file), 'w') as fp:
  61. json.dump(list, fp, indent=4)
  62. fp.close()
  63. def categorize_logs(root_path, logs_path):
  64. origin_path = logs_path
  65. for root, dirs, files in os.walk(origin_path):
  66. for file in files:
  67. with open(os.path.join(root, file), "r") as input_file:
  68. results = yaml.load_all(input_file)
  69. target_path=""
  70. for value in results:
  71. if 'event' in value:
  72. try:
  73. val = value['event']['list']['data_values']['info']
  74. target_path=get_target_path(val, root_path)
  75. break
  76. except (KeyError, AttributeError):
  77. continue
  78. input_file.close()
  79. shutil.move(os.path.join(root, file), os.path.join(target_path, file))
  80. def xml_check(xml_file):
  81. if not os.path.isfile(xml_file):
  82. rootNode = ET.Element("ProcessInstances")
  83. tree = ET.ElementTree(rootNode)
  84. level1 = ET.SubElement(rootNode, "level1")
  85. else:
  86. tree = ET.parse(xml_file, ET.XMLParser(remove_blank_text=True))
  87. rootNode = tree.getroot()
  88. if tree.find('level1') is None:
  89. level1 = ET.SubElement(rootNode, "level1")
  90. else:
  91. level1 = tree.find('level1')
  92. return [tree,rootNode,level1]
  93. def recreate_process(root_path, conn, level):
  94. # variables
  95. cur = conn.cursor()
  96. origin_path = os.path.join(root_path, "task2/sortedTemplates/level"+str(level))
  97. counter = 0;
  98. # traverse through all logs
  99. for root, dirs, files in os.walk(origin_path):
  100. for file in files:
  101. added = False;
  102. print(file)
  103. with open(os.path.join(root, file), "r") as input_file:
  104. results = yaml.load_all(input_file)
  105. # traverse throug single log
  106. for value in results:
  107. #try:
  108. if 'log' in value:
  109. logname = value['log']['trace']['cpee:name']
  110. uuid = value['log']['trace']['cpee:uuid']
  111. instance = value['log']['trace']['concept:name']
  112. if 'event' in value and not value['event']['id:id']=="external":
  113. try:
  114. time = value['event']['time:timestamp']
  115. activity = value['event']['cpee:lifecycle:transition']
  116. name = value['event']['concept:name']
  117. step_id = value['event']['id:id']
  118. val="";
  119. if value['event']['lifecycle:transition'] == "start":
  120. val = json.dumps(value['event']['list']['data_send'])
  121. if 'data_receiver' in value['event']['list'].keys() and name!='Fetch':
  122. val = json.dumps(value['event']['list']['data_receiver'])
  123. # print(val)
  124. if not added:
  125. cur.execute("Select called_by from instances where instance = '" + instance + "'")
  126. parent_inst = cur.fetchone()
  127. if parent_inst is None:
  128. datetime_object = DT.strptime(time.split('+')[0], '%Y-%m-%d' + 'T' + '%H:%M:%S')
  129. time_upper = str(datetime_object + datetime.timedelta(0, 2)) + "+02:00"
  130. time_lower = str(datetime_object - datetime.timedelta(0, 2)) + "+02:00"
  131. time_lower = time_lower.split(" ")[0] + "T" + time_lower.split(" ")[1]
  132. time_upper = time_upper.split(" ")[0] + "T" + time_upper.split(" ")[1]
  133. #print(time)
  134. #print(time_upper)
  135. #print(time_lower)
  136. # cur.execute("Insert into Instances VALUES ('" + instance + "','" + uuid + "', '" + logname + "', '2','Null')")
  137. query = "Select log.instance from LogEntries log join instances i on i.instance = log.instance where i.level='" + str(level-1) +"' and log.activity ='activity/calling' and log.timestamp>='" + time_lower + "' and log.timestamp<='" + time_upper + "'"
  138. #print(query)
  139. cur.execute(query)
  140. parent_inst = cur.fetchone()
  141. #print(parent_inst)
  142. if parent_inst is None:
  143. #print(parent_inst)
  144. #print(val)
  145. #print("Insert into Instances VALUES ('" + instance + "','" + uuid + "', '" + logname + "', '3','Null')")
  146. cur.execute("Insert into Instances VALUES ('" + instance + "','" + uuid + "', '" + logname + "', '" + str(level) +"','Null')")
  147. #conn.commit()
  148. else:
  149. cur.execute("Insert into Instances VALUES ('" + instance + "','" + uuid + "', '" + logname + "', '" + str(level) +"','" + parent_inst[0] + "')")
  150. else:
  151. #print(instance)
  152. #print(parent_inst)
  153. query = "Update Instances set uuid = '" + uuid + "', name = '" + logname + "' where called_by = '" + parent_inst[0] + "' and instance='" + instance + "'"
  154. #print(query)
  155. cur.execute(query)
  156. #conn.commit()
  157. #con.commit()
  158. added = True
  159. #print("Insert into LogEntries VALUES ('" + step_id + "','" + time + "', '" + name + "','" + activity + "', '" + val + "','" + instance + "' )")
  160. #to add calling instances
  161. if 'data_receiver' in value['event']['list'].keys() and activity == 'activity/receiving':
  162. # if step_id=='a1':
  163. for attr in value['event']['list']['data_receiver']:
  164. # print('addNEw')
  165. if type(attr['data']) is dict and 'CPEE-INSTANCE' in attr['data']:
  166. # print('addNEw1')
  167. c_instance = attr['data']['CPEE-INSTANCE'].split('/')[-1]
  168. query = "Insert into Instances VALUES ('" + c_instance + "','Null', 'Null', '" + str(
  169. level + 1) + "','" + instance + "')"
  170. cur.execute(query)
  171. conn.commit()
  172. break
  173. elif attr['name'] == 'instance':
  174. # print('addNEw2')
  175. c_instance = attr['data']
  176. query = "Insert into Instances VALUES ('" + c_instance + "','Null', 'Null', '" + str(
  177. level + 1) + "','" + instance + "')"
  178. # print(query)
  179. cur.execute(query)
  180. conn.commit()
  181. break
  182. # print("Insert into Instances VALUES ('" + c_instance + "','Null', 'Null', '2','"+instance+"')")
  183. # conn.commit()
  184. #To avoid adding machine_logs afters receiving status continue
  185. if step_id == 'a3' and name== 'Status' and activity == 'dataelements/change':
  186. #print("BR")
  187. #print(value['event']['list']['data_values'].keys())
  188. #print(value['event']['list']['data_values']['lets_continue'])
  189. if value['event']['list']['data_values']['lets_continue']==False:
  190. #print("EAK")
  191. query = "Insert into LogEntries VALUES ('" + step_id + "','" + time + "', '" + name + "','" + activity + "', '" + val + "','" + instance + "' )"
  192. cur.execute(query)
  193. conn.commit()
  194. break
  195. query="Insert into LogEntries VALUES ('" + step_id + "','" + time + "', '" + name + "','" + activity + "', '" + val + "','" + instance + "' )"
  196. #print(query)
  197. cur.execute(query)
  198. conn.commit()
  199. except(KeyError) as e:
  200. #print(e)
  201. #print(activity)
  202. #print(time)
  203. counter+=1
  204. continue
  205. #except sqlite3.Error as qe:
  206. except sqlite3.IntegrityError as qe:
  207. #print(query)
  208. #print(qe)
  209. counter += 1
  210. pass
  211. #exit()
  212. except Error as g:
  213. counter += 1
  214. print(g)
  215. print("Unexpected error!!")
  216. try:
  217. # to handle machining:
  218. if step_id == 'a1' and name == 'Fetch' and activity == 'activity/receiving':
  219. for attr in value['event']['list']['data_receiver']:
  220. for entry in attr['data']:
  221. m_id = name = entry['ID']
  222. name = entry['name']
  223. val = entry['value']
  224. clientHandle = entry['meta']['ClientHandle']
  225. statusCode = entry['meta']['StatusCode']
  226. server_timestamp = entry['timestamp']
  227. query = "insert into Machining (timestamp, clientHandle, m_id, status, name, value, level4_step_id, level4_activity, level4_timestamp) VALUES('" + server_timestamp + "','" + clientHandle + "','" + m_id + "','" + statusCode + "','" + name + "','" + val + "','" + step_id + "','" + activity + "','" + time + "')"
  228. # print(query)
  229. cur.execute(query)
  230. conn.commit()
  231. except(KeyError) as e:
  232. # print(e)
  233. # print(activity)
  234. # print(time)
  235. counter += 1
  236. continue
  237. # except sqlite3.Error as qe:
  238. except sqlite3.IntegrityError as qe:
  239. print(query)
  240. # print(qe)
  241. counter += 1
  242. pass
  243. # exit()
  244. except Exception as e:
  245. counter += 1
  246. #message = template.format(type(ex).__name__, ex.args)
  247. print(e)
  248. print("Unexpected error!!")
  249. #conn.commit()
  250. #print(counter)
  251. counter = 0
  252. input_file.close()
  253. target_path = check_folder(os.path.join(root_path, 'task2/processed'), ('level'+str(level)))
  254. shutil.move(os.path.join(origin_path,file), target_path)
  255. def recreate_process_level1(root_path,conn):
  256. #variables
  257. cur = conn.cursor()
  258. origin_path = os.path.join(root_path,"task2/sortedTemplates/level1")
  259. #traverse through all logs
  260. for root, dirs, files in os.walk(origin_path):
  261. for file in files:
  262. first = True;
  263. print(file)
  264. with open(os.path.join(root, file), "r") as input_file:
  265. results = yaml.load_all(input_file)
  266. #traverse throug single log
  267. for value in results:
  268. try:
  269. if 'log' in value:
  270. logname = value['log']['trace']['cpee:name']
  271. uuid = value['log']['trace']['cpee:uuid']
  272. instance = value['log']['trace']['concept:name']
  273. #print("Insert into Instances VALUES ('" + instance + "','" + uuid + "', '" + logname + "', '1','Null')")
  274. query = "Insert into Instances VALUES ('" + instance + "','" + uuid + "', '" + logname + "', '1','Null')"
  275. cur.execute(query)
  276. #conn.commit()
  277. if 'event' in value:
  278. time = value['event']['time:timestamp']
  279. val=""
  280. if value['event']['lifecycle:transition']=="start":
  281. val = json.dumps(value['event']['list']['data_send'])
  282. if 'data_receiver' in value['event']['list'].keys():
  283. val = json.dumps(value['event']['list']['data_receiver'])
  284. step_id = value['event']['id:id']
  285. activity = value['event']['cpee:lifecycle:transition']
  286. name = value['event']['concept:name']
  287. query = "Insert into LogEntries VALUES ('" + step_id + "','" + time + "', '" + name + "','" + activity + "', '" + val + "','" + instance + "' )"
  288. #print("Insert into LogEntries VALUES ('" + step_id + "','" + time + "', '" + name + "','" + activity + "', '" + val + "','" + instance + "' )")
  289. cur.execute(query)
  290. #conn.commit()
  291. for attr in value['event']['list']['data_receiver']:
  292. if attr['name'] == "url":
  293. c_instance = attr['data'].split('/')[-1]
  294. query="Insert into Instances VALUES ('" + c_instance + "','Null', 'Null', '2','"+instance+"')"
  295. cur.execute(query)
  296. #print("Insert into Instances VALUES ('" + c_instance + "','Null', 'Null', '2','"+instance+"')")
  297. #conn.commit()
  298. conn.commit()
  299. except(KeyError) as e:
  300. #print(e)
  301. #print(time)
  302. continue
  303. input_file.close()
  304. target_path = check_folder(os.path.join(root_path,'task2/processed'),'level1')
  305. shutil.move(os.path.join(origin_path,file), target_path)
  306. root_path = os.getcwd()
  307. db = os.path.join(root_path,"BIII.db")
  308. conn = create_connection(db)
  309. #conn.execute('delete from Machining')
  310. #conn.execute('delete from LogEntries')
  311. #conn.execute('delete from Instances')
  312. logs_path = os.path.join(root_path, "lowerhousing/logs/production")
  313. categorize_logs(root_path, logs_path)
  314. recreate_process_level1(root_path,conn)
  315. recreate_process(root_path,conn,2)
  316. recreate_process(root_path,conn,3)
  317. recreate_process(root_path,conn,4)
  318. #recreate_process_level3(root_path,conn,"level3")
  319. #recreate_process_level4(root_path,conn,"level4")
  320. conn.close()
  321. '''
  322. def recreate_process_level3(root_path,conn, level):
  323. # variables
  324. cur = conn.cursor()
  325. origin_path = os.path.join(root_path, "task2/sortedTemplates/"+level)
  326. # traverse through all logs
  327. for root, dirs, files in os.walk(origin_path):
  328. for file in files:
  329. added = False;
  330. print(file)
  331. with open(os.path.join(root, file), "r") as input_file:
  332. results = yaml.load_all(input_file)
  333. # traverse throug single log
  334. for value in results:
  335. try:
  336. if 'log' in value:
  337. logname = value['log']['trace']['cpee:name']
  338. uuid = value['log']['trace']['cpee:uuid']
  339. instance = value['log']['trace']['concept:name']
  340. if 'event' in value and not value['event']['id:id'] == "external":
  341. time = value['event']['time:timestamp']
  342. activity = value['event']['cpee:lifecycle:transition']
  343. name = value['event']['concept:name']
  344. step_id = value['event']['id:id']
  345. val = "";
  346. if value['event']['lifecycle:transition'] == "start":
  347. val = json.dumps(value['event']['list']['data_send'])
  348. if 'data_receiver' in value['event']['list'].keys():
  349. val = json.dumps(value['event']['list']['data_receiver'])
  350. # print(val)
  351. if not added:
  352. #print("Select called_by from instances where instance = '" + instance + "'")
  353. cur.execute("Select called_by from instances where instance = '" + instance + "'")
  354. parent_inst=cur.fetchone()
  355. if parent_inst is None:
  356. datetime_object = DT.strptime(time.split('+')[0], '%Y-%m-%d' + 'T' + '%H:%M:%S')
  357. time_upper = str(datetime_object + datetime.timedelta(0, 2)) + "+02:00"
  358. time_lower = str(datetime_object - datetime.timedelta(0, 2)) + "+02:00"
  359. time_lower = time_lower.split(" ")[0] + "T" + time_lower.split(" ")[1]
  360. time_upper = time_upper.split(" ")[0] + "T" + time_upper.split(" ")[1]
  361. cur.execute("Select instance from LogEntries where timestamp>='" + time_lower + "' and timestamp<='" + time_upper + "'")
  362. if cur.fetchone() is None:
  363. #print("Insert into Instances VALUES ('" + instance + "','" + uuid + "', '" + logname + "', '3','Null')")
  364. cur.execute("Insert into Instances VALUES ('" + instance + "','" + uuid + "', '" + logname + "', '3','Null')")
  365. conn.commit()
  366. # print("Insert into Instances VALUES ('" + instance + "','" + uuid + "', '" + logname + "', '1','Null')")
  367. else:
  368. cur.execute("Update Instances set uuid = '" + uuid + "', name = '" + logname + "' where called_by = '" + parent_inst[0] + "'")
  369. conn.commit()
  370. added=True
  371. #print("Insert into LogEntries VALUES ('" + step_id + "','" + time + "', '" + name + "','" + activity + "', '" + val + "','" + instance + "' )")
  372. cur.execute("Insert into LogEntries VALUES ('" + step_id + "','" + time + "', '" + name + "','" + activity + "', '" + val + "','" + instance + "' )")
  373. conn.commit()
  374. if step_id == 'a1':
  375. for attr in value['event']['list']['data_receiver']:
  376. if type(attr['data']) is dict:
  377. c_instance = attr['data']['CPEE-INSTANCE'].split('/')[-1]
  378. else:
  379. c_instance = attr['data']
  380. cur.execute(
  381. "Insert into Instances VALUES ('" + c_instance + "','Null', 'Null', '4','" + instance + "')")
  382. # print("Insert into Instances VALUES ('" + c_instance + "','Null', 'Null', '2','"+instance+"')")
  383. conn.commit()
  384. break
  385. input_file.close()
  386. target_path = check_folder(os.path.join(root_path, 'task2/processed'), 'level3')
  387. #shutil.move(os.path.join(origin_path,file), target_path)
  388. def recreate_process_level2(root_path, conn, level):
  389. # variables
  390. cur = conn.cursor()
  391. origin_path = os.path.join(root_path, "task2/sortedTemplates/"+level)
  392. # traverse through all logs
  393. for root, dirs, files in os.walk(origin_path):
  394. for file in files:
  395. added = False;
  396. print(file)
  397. with open(os.path.join(root, file), "r") as input_file:
  398. results = yaml.load_all(input_file)
  399. # traverse throug single log
  400. for value in results:
  401. try:
  402. if 'log' in value:
  403. logname = value['log']['trace']['cpee:name']
  404. uuid = value['log']['trace']['cpee:uuid']
  405. instance = value['log']['trace']['concept:name']
  406. if 'event' in value and not value['event']['id:id']=="external":
  407. time = value['event']['time:timestamp']
  408. activity = value['event']['cpee:lifecycle:transition']
  409. name = value['event']['concept:name']
  410. step_id = value['event']['id:id']
  411. val="";
  412. if value['event']['lifecycle:transition'] == "start":
  413. val = json.dumps(value['event']['list']['data_send'])
  414. if 'data_receiver' in value['event']['list'].keys():
  415. val = json.dumps(value['event']['list']['data_receiver'])
  416. # print(val)
  417. if not added:
  418. cur.execute("Select called_by from instances where instance = '" + instance + "'")
  419. parent_inst = cur.fetchone()
  420. if parent_inst is None:
  421. datetime_object = DT.strptime(time.split('+')[0], '%Y-%m-%d' + 'T' + '%H:%M:%S')
  422. time_upper = str(datetime_object + datetime.timedelta(0, 2)) + "+02:00"
  423. time_lower = str(datetime_object - datetime.timedelta(0, 2)) + "+02:00"
  424. time_lower = time_lower.split(" ")[0] + "T" + time_lower.split(" ")[1]
  425. time_upper = time_upper.split(" ")[0] + "T" + time_upper.split(" ")[1]
  426. #print(time)
  427. #print(time_upper)
  428. #print(time_lower)
  429. # cur.execute("Insert into Instances VALUES ('" + instance + "','" + uuid + "', '" + logname + "', '2','Null')")
  430. query = "Select log.instance from LogEntries log join instances i on i.instance = log.instance where i.level='1' and log.activity ='activity/calling' and log.timestamp>='" + time_lower + "' and log.timestamp<='" + time_upper + "'"
  431. cur.execute(query)
  432. parent_inst = cur.fetchone()
  433. if parent_inst is None:
  434. #print(parent_inst)
  435. #print(val)
  436. # print("Insert into Instances VALUES ('" + instance + "','" + uuid + "', '" + logname + "', '3','Null')")
  437. cur.execute("Insert into Instances VALUES ('" + instance + "','" + uuid + "', '" + logname + "', '2','Null')")
  438. #conn.commit()
  439. else:
  440. #print(instance)
  441. #print(parent_inst)
  442. cur.execute("Update Instances set uuid = '" + uuid + "', name = '" + logname + "' where called_by = '" + parent_inst[0] + "' and instance='" + instance + "'")
  443. #conn.commit()
  444. #con.commit()
  445. added = True
  446. #print("Insert into LogEntries VALUES ('" + step_id + "','" + time + "', '" + name + "','" + activity + "', '" + val + "','" + instance + "' )")
  447. cur.execute("Insert into LogEntries VALUES ('" + step_id + "','" + time + "', '" + name + "','" + activity + "', '" + val + "','" + instance + "' )")
  448. conn.commit()
  449. if 'data_receiver' in value['event']['list'].keys():
  450. #if step_id=='a1':
  451. for attr in value['event']['list']['data_receiver']:
  452. if type(attr['data']) is dict:
  453. c_instance = attr['data']['CPEE-INSTANCE'].split('/')[-1]
  454. else:
  455. c_instance = attr['data']
  456. cur.execute("Insert into Instances VALUES ('" + c_instance + "','Null', 'Null', '3','" + instance + "')")
  457. # print("Insert into Instances VALUES ('" + c_instance + "','Null', 'Null', '2','"+instance+"')")
  458. #conn.commit()
  459. break
  460. conn.commit()
  461. if step_id == 'a1':
  462. for attr in value['event']['list']['data_receiver']:
  463. if type(attr['data']) is dict:
  464. c_instance = attr['data']['CPEE-INSTANCE'].split('/')[-1]
  465. else:
  466. c_instance = attr['data']
  467. cur.execute(
  468. "Insert into Instances VALUES ('" + c_instance + "','Null', 'Null', '4','" + instance + "')")
  469. # print("Insert into Instances VALUES ('" + c_instance + "','Null', 'Null', '2','"+instance+"')")
  470. conn.commit()
  471. break
  472. except(KeyError) as e:
  473. print(e)
  474. print(activity)
  475. print(time)
  476. continue
  477. #conn.commit()
  478. input_file.close()
  479. target_path = check_folder(os.path.join(root_path, 'task2/processed'), 'level2')
  480. #shutil.move(os.path.join(origin_path,file), target_path)
  481. '''