addToRegDB.py 12 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348
  1. # Process the test results
  2. # Test status (like passed, or failed with error code)
  3. import argparse
  4. import re
  5. import TestScripts.NewParser as parse
  6. import TestScripts.CodeGen
  7. from collections import deque
  8. import os.path
  9. import numpy as np
  10. import pandas as pd
  11. import statsmodels.api as sm
  12. import statsmodels.formula.api as smf
  13. import csv
  14. import TestScripts.Deprecate as d
  15. import sqlite3
  16. import datetime, time
  17. import re
  18. # For sql table creation
  19. MKSTRFIELD=['Regression']
  20. MKBOOLFIELD=['HARDFP', 'FASTMATH', 'NEON', 'HELIUM','UNROLL', 'ROUNDING','OPTIMIZED']
  21. MKINTFIELD=['ID','MAX']
  22. MKREALFIELD=['MAXREGCOEF']
  23. MKDATEFIELD=[]
  24. MKKEYFIELD=['DATE','NAME','CATEGORY', 'PLATFORM', 'CORE', 'COMPILER','TYPE','RUN']
  25. MKKEYFIELDID={'CATEGORY':'categoryid',
  26. 'NAME':'testnameid',
  27. 'DATE':'testdateid',
  28. 'PLATFORM':'platformid',
  29. 'CORE':'coreid',
  30. 'COMPILER':'compilerid',
  31. 'TYPE':'typeid',
  32. 'RUN':'runid'}
  33. # For csv table value extraction
  34. VALSTRFIELD=['TESTNAME','VERSION','Regression']
  35. VALBOOLFIELD=['HARDFP', 'FASTMATH', 'NEON', 'HELIUM','UNROLL', 'ROUNDING','OPTIMIZED']
  36. VALINTFIELD=['ID', 'MAX']
  37. VALREALFIELD=['MAXREGCOEF']
  38. VALDATEFIELD=[]
  39. # Some of those fields may be created by the parsing of other fields
  40. VALKEYFIELD=['DATE','NAME','CATEGORY', 'PLATFORM', 'CORE', 'COMPILER','TYPE']
  41. def joinit(iterable, delimiter):
  42. it = iter(iterable)
  43. yield next(it)
  44. for x in it:
  45. yield delimiter
  46. yield x
  47. def tableExists(c,tableName):
  48. req=(tableName,)
  49. r=c.execute("SELECT name FROM sqlite_master WHERE type='table' AND name=?",req)
  50. return(r.fetchone() != None)
  51. def diff(first, second):
  52. second = set(second)
  53. return [item for item in first if item not in second]
  54. def getColumns(elem,full):
  55. colsToKeep=[]
  56. cols = list(full.columns)
  57. params=diff(elem.params.full , elem.params.summary)
  58. common = diff(cols + ["TYPE","RUN"] , ['OLDID'] + params)
  59. for field in common:
  60. if field in MKSTRFIELD:
  61. colsToKeep.append(field)
  62. if field in MKINTFIELD:
  63. colsToKeep.append(field)
  64. if field in MKREALFIELD:
  65. colsToKeep.append(field)
  66. if field in MKKEYFIELD:
  67. colsToKeep.append(field)
  68. if field in MKDATEFIELD:
  69. colsToKeep.append(field)
  70. if field in MKBOOLFIELD:
  71. colsToKeep.append(field)
  72. return(colsToKeep)
  73. def createTableIfMissing(conn,elem,tableName,full):
  74. if not tableExists(conn,tableName):
  75. sql = "CREATE TABLE %s (" % tableName
  76. cols = list(full.columns)
  77. params=diff(elem.params.full , elem.params.summary)
  78. common = diff(cols + ["TYPE","RUN"] , ['OLDID'] + params)
  79. sql += "%sid INTEGER PRIMARY KEY" % (tableName)
  80. start = ","
  81. for field in params:
  82. sql += " %s\n %s INTEGER" % (start,field)
  83. start = ","
  84. for field in common:
  85. if field in MKSTRFIELD:
  86. sql += "%s\n %s TEXT" % (start,field)
  87. if field in MKINTFIELD:
  88. sql += "%s\n %s INTEGER" % (start,field)
  89. if field in MKREALFIELD:
  90. sql += "%s\n %s REAL" % (start,field)
  91. if field in MKKEYFIELD:
  92. sql += "%s\n %s INTEGER" % (start,MKKEYFIELDID[field])
  93. if field in MKDATEFIELD:
  94. sql += "%s\n %s TEXT" % (start,field)
  95. if field in MKBOOLFIELD:
  96. sql += "%s\n %s INTEGER" % (start,field)
  97. start = ","
  98. # Create foreign keys
  99. sql += "%sFOREIGN KEY(typeid) REFERENCES TYPE(typeid)," % start
  100. sql += "FOREIGN KEY(categoryid) REFERENCES CATEGORY(categoryid),"
  101. sql += "FOREIGN KEY(testnameid) REFERENCES TESTNAME(testnameid),"
  102. sql += "FOREIGN KEY(testdateid) REFERENCES TESTDATE(testdateid),"
  103. sql += "FOREIGN KEY(platformid) REFERENCES PLATFORM(platformid),"
  104. sql += "FOREIGN KEY(coreid) REFERENCES CORE(coreid),"
  105. sql += "FOREIGN KEY(compilerid) REFERENCES COMPILER(compilerid)"
  106. sql += "FOREIGN KEY(runid) REFERENCES RUN(runid)"
  107. sql += " )"
  108. conn.execute(sql)
  109. # Find the key or add it in a table
  110. def findInTable(conn,table,keystr,strv,key):
  111. #print(sql)
  112. r = conn.execute("select %s from %s where %s=?" % (key,table,keystr),(strv,))
  113. result=r.fetchone()
  114. if result != None:
  115. return(result[0])
  116. else:
  117. conn.execute("INSERT INTO %s(%s) VALUES(?)" % (table,keystr),(strv,))
  118. conn.commit()
  119. r = conn.execute("select %s from %s where %s=?" % (key,table,keystr),(strv,))
  120. result=r.fetchone()
  121. if result != None:
  122. #print(result)
  123. return(result[0])
  124. else:
  125. return(None)
  126. def findInCompilerTable(conn,kind,version):
  127. #print(sql)
  128. r = conn.execute("select compilerid from COMPILER where compilerkindid=? AND version=?" , (kind,version))
  129. result=r.fetchone()
  130. if result != None:
  131. return(result[0])
  132. else:
  133. fullDate = datetime.datetime.now()
  134. dateid = findInTable(conn,"TESTDATE","date",str(fullDate),"testdateid")
  135. conn.execute("INSERT INTO COMPILER(compilerkindid,version,testdateid) VALUES(?,?,?)" ,(kind,version,dateid))
  136. conn.commit()
  137. r = conn.execute("select compilerid from COMPILER where compilerkindid=? AND version=? AND testdateid=?" , (kind,version,dateid))
  138. result=r.fetchone()
  139. if result != None:
  140. #print(result)
  141. return(result[0])
  142. else:
  143. return(None)
  144. def addRows(conn,elem,tableName,full,runid=0):
  145. # List of columns we have in DB which is
  146. # different from the columns in the table
  147. compilerid = 0
  148. platformid = 0
  149. coreid = 0
  150. keep = getColumns(elem,full)
  151. cols = list(full.columns)
  152. params=diff(elem.params.full , elem.params.summary)
  153. common = diff(["TYPE"] + cols , ['OLDID'] + params)
  154. colNameList = []
  155. for c in params + keep:
  156. if c in MKKEYFIELD:
  157. colNameList.append(MKKEYFIELDID[c])
  158. else:
  159. colNameList.append(c)
  160. colNames = "".join(joinit(colNameList,","))
  161. #print(colNameList)
  162. #print(colNames)
  163. #print(full)
  164. for index, row in full.iterrows():
  165. sql = "INSERT INTO %s(%s) VALUES(" % (tableName,colNames)
  166. keys = {}
  167. # Get data from columns
  168. for field in common:
  169. if field in VALSTRFIELD:
  170. keys[field]=row[field]
  171. if field == "NAME":
  172. name = row[field]
  173. if field == "TESTNAME":
  174. testname = row[field]
  175. if re.match(r'^.*_f64',testname):
  176. keys["TYPE"] = "f64"
  177. if re.match(r'^.*_f32',testname):
  178. keys["TYPE"] = "f32"
  179. if re.match(r'^.*_f16',testname):
  180. keys["TYPE"] = "f16"
  181. if re.match(r'^.*_q31',testname):
  182. keys["TYPE"] = "q31"
  183. if re.match(r'^.*_q15',testname):
  184. keys["TYPE"] = "q15"
  185. if re.match(r'^.*_q7',testname):
  186. keys["TYPE"] = "q7"
  187. if re.match(r'^.*_s8',testname):
  188. keys["TYPE"] = "s8"
  189. if re.match(r'^.*_u8',testname):
  190. keys["TYPE"] = "u8"
  191. if re.match(r'^.*_s16',testname):
  192. keys["TYPE"] = "s16"
  193. if re.match(r'^.*_u16',testname):
  194. keys["TYPE"] = "u16"
  195. if re.match(r'^.*_s32',testname):
  196. keys["TYPE"] = "s32"
  197. if re.match(r'^.*_u32',testname):
  198. keys["TYPE"] = "u32"
  199. if re.match(r'^.*_s64',testname):
  200. keys["TYPE"] = "s64"
  201. if re.match(r'^.*_u64',testname):
  202. keys["TYPE"] = "u64"
  203. if field in VALINTFIELD:
  204. keys[field]=row[field]
  205. if field in VALREALFIELD:
  206. keys[field]=row[field]
  207. if field in VALDATEFIELD:
  208. keys[field]=row[field]
  209. if field in VALBOOLFIELD:
  210. keys[field]=row[field]
  211. keys['RUN']=runid
  212. # Get foreign keys and create missing data
  213. for field in common:
  214. if field in VALKEYFIELD:
  215. if field == "CATEGORY":
  216. # Remove type extension to get category name so that
  217. # all types are maped to same category which will
  218. # help for post processing.
  219. testField=re.sub(r'^(.*)[:]([^:]+)(F16|F32|F64|Q31|Q15|Q7)$',r'\1',row[field])
  220. val = findInTable(conn,"CATEGORY","category",testField,"categoryid")
  221. keys[field]=val
  222. if field == "NAME":
  223. val = findInTable(conn,"TESTNAME","name",row[field],"testnameid")
  224. keys[field]=val
  225. if field == "DATE":
  226. val = findInTable(conn,"TESTDATE","date",str(row[field]),"testdateid")
  227. keys[field]=val
  228. if field == "CORE":
  229. val = findInTable(conn,"CORE","coredef",row[field],"coreid")
  230. keys[field]=val
  231. coreid = val
  232. if field == "PLATFORM":
  233. val = findInTable(conn,"PLATFORM","platform",row[field],"platformid")
  234. keys[field]=val
  235. platformid = val
  236. if field == "TYPE":
  237. val = findInTable(conn,"TYPE","type",keys["TYPE"],"typeid")
  238. keys[field]=val
  239. if field == "COMPILER":
  240. compilerkind = findInTable(conn,"COMPILERKIND","compiler",row[field],"compilerkindid")
  241. compiler = findInCompilerTable(conn,compilerkind,keys["VERSION"])
  242. keys[field]=compiler
  243. compilerid = compiler
  244. # Generate sql command
  245. start = ""
  246. for field in params:
  247. sql += " %s\n %d" % (start,row[field])
  248. start = ","
  249. for field in keep:
  250. if field in MKSTRFIELD or field in MKDATEFIELD:
  251. sql += " %s\n \"%s\"" % (start,keys[field])
  252. elif field in keep:
  253. if field in VALREALFIELD:
  254. sql += " %s\n %f" % (start,keys[field])
  255. else:
  256. sql += " %s\n %d" % (start,keys[field])
  257. start = ","
  258. sql += " )"
  259. #print(sql)
  260. conn.execute(sql)
  261. conn.commit()
  262. return({'compilerid':compilerid,'platformid':platformid,'coreid':coreid})
  263. def addConfig(conn,config,fullDate):
  264. dateid = findInTable(conn,"TESTDATE","date",str(fullDate),"testdateid")
  265. conn.execute("INSERT INTO CONFIG(compilerid,platformid,coreid,testdateid) VALUES(?,?,?,?)" ,(config['compilerid'],config['platformid'],config['coreid'],dateid))
  266. conn.commit()
  267. def getGroup(a):
  268. return(re.sub(r'^(.+)(F64|F32|F16|Q31|Q15|Q7|U32|U16|U8|S32|S16|S8)$',r'\1',a))
  269. def addOneBenchmark(elem,fullPath,db,group,runid):
  270. if os.path.isfile(fullPath):
  271. full=pd.read_csv(fullPath,dtype={'OLDID': str} ,keep_default_na = False)
  272. fullDate = datetime.datetime.now()
  273. full['DATE'] = fullDate
  274. if group:
  275. tableName = getGroup(group)
  276. else:
  277. tableName = getGroup(elem.data["class"])
  278. conn = sqlite3.connect(db)
  279. createTableIfMissing(conn,elem,tableName,full)
  280. config = addRows(conn,elem,tableName,full,runid)
  281. addConfig(conn,config,fullDate)
  282. conn.close()
  283. def addToDB(benchmark,dbpath,elem,group,runid):
  284. if not elem.data["deprecated"]:
  285. if elem.params:
  286. benchPath = os.path.join(benchmark,elem.fullPath(),"regression.csv")
  287. print("Processing %s" % benchPath)
  288. addOneBenchmark(elem,benchPath,dbpath,group,runid)
  289. for c in elem.children:
  290. addToDB(benchmark,dbpath,c,group,runid)
  291. parser = argparse.ArgumentParser(description='Generate summary benchmarks')
  292. parser.add_argument('-f', nargs='?',type = str, default="Output.pickle", help="Pickle path")
  293. parser.add_argument('-b', nargs='?',type = str, default="FullBenchmark", help="Full Benchmark dir path")
  294. #parser.add_argument('-e', action='store_true', help="Embedded test")
  295. parser.add_argument('-o', nargs='?',type = str, default="reg.db", help="Regression benchmark database")
  296. parser.add_argument('-r', nargs='?',type = int, default=0, help="Run ID")
  297. parser.add_argument('others', nargs=argparse.REMAINDER, help="Suite class")
  298. args = parser.parse_args()
  299. if args.f is not None:
  300. #p = parse.Parser()
  301. # Parse the test description file
  302. #root = p.parse(args.f)
  303. root=parse.loadRoot(args.f)
  304. d.deprecate(root,args.others)
  305. if args.others:
  306. group=args.others[0]
  307. else:
  308. group=None
  309. addToDB(args.b,args.o,root,group,args.r)
  310. else:
  311. parser.print_help()