nsdk_utils.py 54 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537
  1. #!/usr/bin/env python3
  2. import os
  3. import sys
  4. SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
  5. requirement_file = os.path.abspath(os.path.join(SCRIPT_DIR, "..", "requirements.txt"))
  6. try:
  7. import time
  8. import datetime
  9. import random
  10. import shutil
  11. import signal
  12. import psutil
  13. import re
  14. import copy
  15. import serial
  16. import serial.tools.list_ports
  17. import tempfile
  18. import collections
  19. from collections import OrderedDict
  20. from threading import Thread
  21. import subprocess
  22. import asyncio
  23. import glob
  24. import json
  25. import yaml
  26. import importlib.util
  27. if sys.platform != "win32":
  28. import fcntl
  29. import stat
  30. except Exception as exc:
  31. print("Import Error: %s" % (exc))
  32. print("Please install requried packages using: pip3 install -r %s" % (requirement_file))
  33. sys.exit(1)
  34. try:
  35. from collections.abc import Mapping
  36. except ImportError: # Python 2.7 compatibility
  37. from collections import Mapping
  38. SDK_GLOBAL_VARIABLES = {
  39. "sdk_checktag": "Nuclei SDK Build Time:",
  40. "sdk_check": True,
  41. "sdk_banner_tmout": 15,
  42. "sdk_copy_objects": "elf,map",
  43. "sdk_copy_objects_flag": False,
  44. "sdk_ttyerr_maxcnt": 3,
  45. "sdk_fpgaprog_maxcnt": 3,
  46. "sdk_gdberr_maxcnt": 10,
  47. "sdk_uploaderr_maxcnt": 10,
  48. "sdk_bannertmout_maxcnt": 100,
  49. "sdk_verb_buildmsg": True,
  50. "sdk_copy_failobj": True
  51. }
  52. INVAILD_SERNO = "xxxxx"
  53. BANNER_TMOUT = "banner_timeout"
  54. TTY_OP_ERR = "tty_operate_error"
  55. TTY_UNKNOWN_ERR = "tty_unknown_error"
  56. FILE_LOCK_NAME = "fpga_program.lock"
  57. DATE_FORMATE = "%Y-%m-%d %H:%M:%S"
  58. def get_tmpdir():
  59. tempdir = tempfile.gettempdir()
  60. if sys.platform == "win32":
  61. wintempdir = "C:\\Users\\Public\\Temp"
  62. if os.path.isdir(wintempdir) == False:
  63. os.makedirs(wintempdir)
  64. tempdir = wintempdir
  65. return tempdir
  66. # get ci url information
  67. def get_ci_info():
  68. cijoburl = os.environ.get("CI_JOB_URL")
  69. cipipelineurl = os.environ.get("CI_PIPELINE_URL")
  70. if cijoburl and cipipelineurl:
  71. return {"joburl": cijoburl, "pipelineurl": cipipelineurl}
  72. else:
  73. return {}
  74. def get_global_variables():
  75. return SDK_GLOBAL_VARIABLES
  76. def get_sdk_checktag():
  77. checktag = os.environ.get("SDK_CHECKTAG")
  78. if checktag is None:
  79. checktag = SDK_GLOBAL_VARIABLES.get("sdk_checktag")
  80. return checktag
  81. def get_sdk_copyobjects():
  82. cpobjs = os.environ.get("SDK_COPY_OBJECTS")
  83. if cpobjs is None:
  84. cpobjs = SDK_GLOBAL_VARIABLES.get("sdk_copy_objects")
  85. return cpobjs
  86. def get_env_flag(envar, deft=None):
  87. flag = os.environ.get(envar)
  88. if flag is None:
  89. return deft
  90. return flag.lower() in ('true', '1', 't')
  91. def get_sdk_check():
  92. check = get_env_flag("SDK_CHECK")
  93. if check is None:
  94. check = SDK_GLOBAL_VARIABLES.get("sdk_check")
  95. return check
  96. def get_sdk_verb_buildmsg():
  97. check = get_env_flag("SDK_VERB_BUILDMSG")
  98. if check is None:
  99. check = SDK_GLOBAL_VARIABLES.get("sdk_verb_buildmsg")
  100. return check
  101. def get_sdk_copyobjects_flag():
  102. cpflag = get_env_flag("SDK_COPY_OBJECTS_FLAG")
  103. if cpflag is None:
  104. cpflag = SDK_GLOBAL_VARIABLES.get("sdk_copy_objects_flag")
  105. return cpflag
  106. def get_sdk_need_copyobjects(appconfig):
  107. try:
  108. needed = appconfig.get("copy_objects")
  109. except:
  110. needed = False
  111. if needed != True:
  112. # use global flag
  113. needed = get_sdk_copyobjects_flag()
  114. return needed
  115. def get_sdk_copy_failobj():
  116. cpflag = get_env_flag("SDK_COPY_FAILOBJ")
  117. if cpflag is None:
  118. cpflag = SDK_GLOBAL_VARIABLES.get("sdk_copy_failobj")
  119. return cpflag
  120. def get_sdk_banner_tmout():
  121. tmout = os.environ.get("SDK_BANNER_TMOUT")
  122. if tmout is not None:
  123. tmout = int(tmout)
  124. else:
  125. tmout = SDK_GLOBAL_VARIABLES.get("sdk_banner_tmout")
  126. return tmout
  127. # some case may run more than default timeout in app.json
  128. def get_sdk_run_tmout():
  129. tmout = os.environ.get("SDK_RUN_TMOUT")
  130. if tmout is not None:
  131. tmout = int(tmout)
  132. return tmout
  133. def get_sdk_fpga_prog_tmout():
  134. tmout = os.environ.get("FPGA_PROG_TMOUT")
  135. return tmout
  136. def get_sdk_ttyerr_maxcnt():
  137. num = os.environ.get("SDK_TTYERR_MAXCNT")
  138. if num is not None:
  139. num = int(num)
  140. else:
  141. num = SDK_GLOBAL_VARIABLES.get("sdk_ttyerr_maxcnt")
  142. return num
  143. def get_sdk_fpgaprog_maxcnt():
  144. num = os.environ.get("SDK_FPGAPROG_MAXCNT")
  145. if num is not None:
  146. num = int(num)
  147. else:
  148. num = SDK_GLOBAL_VARIABLES.get("sdk_fpgaprog_maxcnt")
  149. return num
  150. def get_sdk_gdberr_maxcnt():
  151. num = os.environ.get("SDK_GDBERR_MAXCNT")
  152. if num is not None:
  153. num = int(num)
  154. else:
  155. num = SDK_GLOBAL_VARIABLES.get("sdk_gdberr_maxcnt")
  156. return num
  157. def get_sdk_bannertmout_maxcnt():
  158. num = os.environ.get("SDK_BANNERTMOUT_MAXCNT")
  159. if num is not None:
  160. num = int(num)
  161. else:
  162. num = SDK_GLOBAL_VARIABLES.get("sdk_bannertmout_maxcnt")
  163. return num
  164. def get_sdk_uploaderr_maxcnt():
  165. num = os.environ.get("SDK_UPLOADERR_MAXCNT")
  166. if num is not None:
  167. num = int(num)
  168. else:
  169. num = SDK_GLOBAL_VARIABLES.get("sdk_uploaderr_maxcnt")
  170. return num
  171. def parse_riscv_arch(arch_str):
  172. """Parse RISC-V architecture string to standardized format"""
  173. if not arch_str:
  174. return None
  175. arch_str = arch_str.lower()
  176. if not arch_str.startswith('rv32') and not arch_str.startswith('rv64'):
  177. return None
  178. features = {
  179. 'xlen': arch_str[:4],
  180. 'base': '',
  181. 'exts': set()
  182. }
  183. # Parse standard ISA string
  184. std_isa = arch_str[4:].split('_')[0]
  185. for c in std_isa:
  186. if c in 'iemafdcbpkv':
  187. # don't add b k p into base architecture
  188. if c == 'b':
  189. # for nuclei b extension contains zba/zbb/zbc/zbs
  190. features['exts'].add('zba')
  191. features['exts'].add('zbb')
  192. features['exts'].add('zbc')
  193. features['exts'].add('zbs')
  194. elif c == 'k':
  195. # for nuclei k extension contains zba/zbb/zbc/zbs
  196. features['exts'].add('zk') # zk -> zkn zkr zkt
  197. features['exts'].add('zks') # zks -> zbkb-sc zbkc-sc zbkx-sc zksed zksh
  198. features['exts'].add('zkn') # zkn -> zbkb-sc zbkc-sc zbkx-sc zkne zknd zknh
  199. features['exts'].add('zkr')
  200. features['exts'].add('zkt')
  201. features['exts'].add('zkne')
  202. features['exts'].add('zknd')
  203. features['exts'].add('zknh')
  204. features['exts'].add('zksed')
  205. features['exts'].add('zksh')
  206. features['exts'].add('zbkb-sc')
  207. features['exts'].add('zbkc-sc')
  208. features['exts'].add('zbkx-sc')
  209. elif c == 'v':
  210. features['exts'].add('zve64d')
  211. features['exts'].add('zvl128b')
  212. features['base'] += 'v'
  213. elif c == 'p':
  214. features['exts'].add('xxldsp')
  215. else:
  216. features['base'] += c
  217. # when base architecture has i extension, then e extension is implied
  218. if 'i' in features['base']:
  219. features['base'] += 'e'
  220. # Parse extensions
  221. if '_' in arch_str:
  222. exts = arch_str.split('_')[1:]
  223. for ext in exts:
  224. ext = ext.strip()
  225. if ext == "":
  226. continue
  227. if ext in ('zvl128', 'zvl256', 'zvl512', 'zvl1024'):
  228. ext = ext + 'b'
  229. elif ext in ('zvb', 'zvk', 'zc'):
  230. ext = ext + '*'
  231. elif ext in ('dsp'):
  232. ext = 'xxl' + ext
  233. elif ext in ('dspn1', 'dspn2', 'dspn3'):
  234. ext = 'xxl' + ext + 'x'
  235. features['exts'].add(ext)
  236. # For nuclei zc* can also configured as c extension via mmisc_ctl csr ZCMT_ZCMP_EN bit
  237. if 'zc*' in features['exts']:
  238. features['base'] += 'c'
  239. # For nuclei cpu, zifencei and zicsr are implied
  240. features['exts'].add('zicsr')
  241. features['exts'].add('zifencei')
  242. # zve64d imply zve64f, zve64f imply zve64x and zve32f
  243. # zve64x imply zve32x, zve32f imply zve32x
  244. if 'zve64d' in features['exts']:
  245. features['exts'].add('zve64f')
  246. if 'zve64f' in features['exts']:
  247. features['exts'].add('zve32f')
  248. features['exts'].add('zve64x')
  249. if 'zve64x' in features['exts']:
  250. features['exts'].add('zve32x')
  251. if 'zve32f' in features['exts']:
  252. features['exts'].add('zve32x')
  253. if 'xxldspn3x' in features['exts']:
  254. features['exts'].add('xxldspn2x')
  255. if 'xxldspn2x' in features['exts']:
  256. features['exts'].add('xxldspn1x')
  257. if 'xxldspn1x' in features['exts']:
  258. features['exts'].add('xxldsp')
  259. if 'zvl1024b' in features['exts']:
  260. features['exts'].add('zvl512b')
  261. if 'zvl512b' in features['exts']:
  262. features['exts'].add('zvl256b')
  263. if 'zvl256b' in features['exts']:
  264. features['exts'].add('zvl128b')
  265. if 'zve64d' in features['exts'] and 'zvl128b' in features['exts']:
  266. features['base'] += 'v'
  267. return features
  268. def get_nuclei_sdk_root():
  269. sdk_root = os.environ.get("NUCLEI_SDK_ROOT")
  270. if not sdk_root:
  271. sdk_root = os.path.abspath(os.path.join(SCRIPT_DIR, "..", "..", ".."))
  272. return sdk_root
  273. def parse_makefile_core():
  274. sdk_root = get_nuclei_sdk_root()
  275. makefile_core = os.path.join(sdk_root, "Build", "Makefile.core")
  276. core_archs = {}
  277. if not os.path.exists(makefile_core):
  278. return core_archs
  279. with open(makefile_core, 'r') as f:
  280. for line in f:
  281. line = line.strip()
  282. if not line or line.startswith('#'):
  283. continue
  284. if '_CORE_ARCH_ABI' in line:
  285. parts = line.split('=')
  286. if len(parts) == 2:
  287. core_name = parts[0].split('_CORE_ARCH_ABI')[0].lower()
  288. arch_parts = parts[1].strip().split()
  289. if len(arch_parts) >= 2:
  290. core_archs[core_name] = arch_parts[0]
  291. return core_archs
  292. def check_arch_compatibility(core_arch, arch_ext, supported_arch):
  293. """Check if core architecture with extensions is compatible with supported architecture"""
  294. if not supported_arch:
  295. return True
  296. supported = parse_riscv_arch(supported_arch)
  297. if not supported:
  298. return True
  299. # Combine core_arch with arch_ext
  300. full_arch = core_arch
  301. if arch_ext:
  302. full_arch += arch_ext
  303. current = parse_riscv_arch(full_arch)
  304. if not current:
  305. return False
  306. # Check XLEN compatibility
  307. if current['xlen'] != supported['xlen']:
  308. return False
  309. # Check base ISA compatibility
  310. for c in current['base']:
  311. if c not in supported['base']:
  312. return False
  313. # Check extensions compatibility
  314. # For current['exts'] containing extensions (no * suffix)
  315. # For supported['exts'] containing extensions (may have * suffix)
  316. # Extension matching should handle wildcards (*) in supported extensions
  317. for ext in current['exts']:
  318. found_match = False
  319. for supported_ext in supported['exts']:
  320. if supported_ext.endswith('*'):
  321. # Handle wildcard matching
  322. if ext.startswith(supported_ext[:-1]):
  323. found_match = True
  324. break
  325. elif ext == supported_ext:
  326. # Handle exact matching
  327. found_match = True
  328. break
  329. if not found_match:
  330. return False
  331. return True
  332. def filter_app_config(appconfig):
  333. """
  334. Filter application configurations based on architecture and extension compatibility.
  335. This function examines the build configuration of an application and determines if it should
  336. be filtered out based on architecture support and extension compatibility.
  337. Parameters:
  338. appconfig (dict): A dictionary containing application configuration.
  339. Expected to have a 'build_config' key with CORE, ARCH_EXT details.
  340. Returns:
  341. tuple: A pair of (bool, str) where:
  342. - bool: True if the configuration should be filtered out, False otherwise
  343. - str: A message explaining why the configuration was filtered (empty if not filtered)
  344. Environment Variables Used:
  345. - SDK_SUPPORT_ARCH: Supported architecture specifications
  346. - SDK_IGNORED_EXTS: Underscore-separated list of extensions to ignore
  347. Example:
  348. >>> config = {
  349. ... "build_config": {
  350. ... "CORE": "n307",
  351. ... "ARCH_EXT": "p_zfh"
  352. ... }
  353. ... }
  354. >>> filter_app_config(config)
  355. (False, "")
  356. Notes:
  357. - The function handles both single-letter and multi-letter extensions
  358. - Architecture extensions can be specified with or without leading underscore
  359. - Returns (False, "") if any required configuration is missing or in case of errors
  360. """
  361. if not isinstance(appconfig, dict):
  362. return False, ""
  363. try:
  364. build_config = appconfig.get("build_config", None)
  365. if build_config is None or len(build_config) == 0:
  366. return False, ""
  367. # Check SDK_SUPPORT_ARCH compatibility
  368. core = build_config.get("CORE", "").lower()
  369. arch_ext = build_config.get("ARCH_EXT", "")
  370. supported_arch = os.environ.get("SDK_SUPPORT_ARCH")
  371. if core and supported_arch:
  372. core_archs = parse_makefile_core()
  373. if core in core_archs:
  374. core_arch = core_archs[core]
  375. if not check_arch_compatibility(core_arch, arch_ext, supported_arch):
  376. return True, f"Core {core} with extensions {arch_ext} not supported by {supported_arch}"
  377. # Continue with existing extension filtering
  378. archext = build_config.get("ARCH_EXT", None)
  379. if archext is None or archext.strip() == "":
  380. return False, ""
  381. first_part = None
  382. rest_part = None
  383. if archext.startswith("_") == False:
  384. if "_" in archext:
  385. first_part, rest_part = archext.split("_", 1)
  386. else:
  387. if archext.startswith("z"):
  388. rest_part = archext
  389. else:
  390. first_part = archext
  391. else:
  392. rest_part = archext
  393. ignored_exts = os.environ.get("SDK_IGNORED_EXTS")
  394. if ignored_exts is None:
  395. return False, ""
  396. unique_exts = list(
  397. OrderedDict.fromkeys(part.strip() for part in ignored_exts.split('_'))
  398. )
  399. if len(unique_exts) == 1 and unique_exts[0] == "":
  400. return False, ""
  401. for ext in unique_exts:
  402. if len(ext) == 0:
  403. continue
  404. if len(ext) == 1:
  405. # handle single letter
  406. if first_part and ext in first_part:
  407. return True, "Filtered by %s extension" %(ext)
  408. else:
  409. # handle multi letter
  410. if rest_part and ext in rest_part:
  411. return True, "Filtered by %s extension" % (ext)
  412. except:
  413. pass
  414. return False, ""
  415. class NThread(Thread):
  416. def __init__(self, func, args):
  417. super(NThread, self).__init__()
  418. self.func = func
  419. self.args = args
  420. def run(self):
  421. self.result = self.func(*self.args)
  422. def get_result(self):
  423. try:
  424. return self.result
  425. except Exception:
  426. return None
  427. YAML_OK=0
  428. YAML_NOFILE=1
  429. YAML_INVAILD=2
  430. def load_yaml(file):
  431. if isinstance(file, str) == False or os.path.isfile(file) == False:
  432. return YAML_NOFILE, None
  433. try:
  434. data = yaml.load(open(file, 'r'), Loader=yaml.FullLoader)
  435. return YAML_OK, data
  436. except:
  437. print("Error: %s is an invalid yaml file!" % (file))
  438. return YAML_INVAILD, None
  439. def save_yaml(file, data):
  440. if isinstance(file, str) == False:
  441. return False
  442. try:
  443. with open(file, "w") as cf:
  444. yaml.dump(data, cf, indent=4)
  445. return True
  446. except:
  447. print("Error: Data can't be serialized to yaml file!")
  448. return False
  449. def get_specific_key_value(dictdata:dict, key):
  450. if not dictdata:
  451. print("Error: dictdata doesn't exist!")
  452. return None
  453. value = dictdata.get(key, None)
  454. if not value:
  455. print("Error, key %s has no value!" % (key))
  456. return None
  457. return value
  458. JSON_OK=0
  459. JSON_NOFILE=1
  460. JSON_INVAILD=2
  461. def load_json(file):
  462. if isinstance(file, str) == False or os.path.isfile(file) == False:
  463. return JSON_NOFILE, None
  464. try:
  465. data = json.load(open(file, 'r'))
  466. return JSON_OK, data
  467. except:
  468. print("Error: %s is an invalid json file!" % (file))
  469. return JSON_INVAILD, None
  470. def save_json(file, data):
  471. if isinstance(file, str) == False:
  472. return False
  473. try:
  474. with open(file, "w") as cf:
  475. json.dump(data, cf, indent=4)
  476. return True
  477. except:
  478. print("Error: Data can't be serialized to json file!")
  479. return False
  480. def save_csv(file, csvlines, display=True):
  481. if isinstance(csvlines, list) == False:
  482. return False
  483. # Flush stdout buffer
  484. sys.stdout.flush()
  485. try:
  486. with open(file, "w") as cf:
  487. for line in csvlines:
  488. csvline = line + "\n"
  489. cf.write(csvline)
  490. cf.flush()
  491. if display:
  492. try:
  493. # sometimes facing issue BlockingIOError: [Errno 11] write could not complete without blocking here
  494. # maybe related to https://bugs.python.org/issue40634 since we are using async in this tool
  495. sys.stdout.flush()
  496. print("CSV, %s" % line)
  497. except:
  498. pass
  499. return True
  500. except:
  501. print("Error: Data can't be saved to file!")
  502. return False
  503. # Return possible serports, return a list of possible serports
  504. def find_possible_serports():
  505. comports = serial.tools.list_ports.comports()
  506. serports = [ port.device for port in comports ]
  507. return serports
  508. def find_serport_by_no(serno):
  509. comports = serial.tools.list_ports.comports()
  510. serport = None
  511. for port in comports:
  512. cur_serno = port.serial_number
  513. cur_dev = port.device
  514. cur_loc = port.location
  515. if cur_serno is None:
  516. continue
  517. if sys.platform == "win32":
  518. if (serno + 'B') == cur_serno:
  519. serport = cur_dev
  520. break
  521. else:
  522. if serno != cur_serno:
  523. continue
  524. # serial is the second device of the composite device
  525. if cur_loc.endswith(".1"):
  526. serport = cur_dev
  527. break
  528. # serport founded
  529. return serport
  530. def find_most_possible_serport():
  531. serports = find_possible_serports()
  532. if len(serports) > 0:
  533. # sort the ports
  534. serports.sort()
  535. # get the biggest port
  536. # for /dev/ttyUSB0, /dev/ttyUSB1, get /dev/ttyUSB1
  537. # for COM16, COM17, get COM17
  538. return serports[-1]
  539. else:
  540. return None
  541. def update_list_items(list1, list2):
  542. """
  543. Merges unique elements from list2 into list1.
  544. This function appends items from list2 to list1 only if they are not already present in list1.
  545. The function modifies list1 in-place and returns the updated list1.
  546. Args:
  547. list1: The destination list to be updated with unique elements
  548. list2: The source list containing elements to be added
  549. Returns:
  550. The updated list1 with unique elements from list2 appended
  551. """
  552. for i in range(0, len(list2)):
  553. if list2[i] not in list1:
  554. list1.append(list2[i])
  555. return list1
  556. # get from https://gist.github.com/angstwad/bf22d1822c38a92ec0a9
  557. def dict_merge(dct, merge_dct):
  558. """ Recursive dict merge. Inspired by :meth:``dict.update()``, instead of
  559. updating only top-level keys, dict_merge recurses down into dicts nested
  560. to an arbitrary depth, updating keys. The ``merge_dct`` is merged into
  561. ``dct``.
  562. If both values for a key are lists, they are merged by combining unique elements.
  563. :param dct: dict onto which the merge is executed
  564. :param merge_dct: dct merged into dct
  565. :return: None
  566. """
  567. for k, v in merge_dct.items():
  568. if (k in dct and isinstance(dct[k], dict)
  569. and isinstance(merge_dct[k], Mapping)):
  570. dict_merge(dct[k], merge_dct[k])
  571. elif (k in dct and isinstance(dct[k], list)
  572. and isinstance(merge_dct[k], list)):
  573. # Merge lists by combining unique elements
  574. dct[k] = update_list_items(dct[k], merge_dct[k])
  575. else:
  576. dct[k] = merge_dct[k]
  577. def get_make_csv(app, config):
  578. make_options = " "
  579. SUPPORT_KEYS = ["SOC", "BOARD", "CORE", "DOWNLOAD", "VARIANT", \
  580. "BENCH_UNIT", "BENCH_FLAGS", "ARCH_EXT", "STDCLIB", "SILENT", "V"]
  581. csv_print = "CSV, APP=%s" % (app)
  582. if isinstance(config, dict):
  583. for key in config:
  584. if key not in SUPPORT_KEYS:
  585. continue
  586. option = "%s=%s"%(key, config[key])
  587. make_options = " %s %s " % (make_options, option)
  588. csv_print = "%s, %s" % (csv_print, option)
  589. return make_options, csv_print
  590. def try_decode_bytes(bytes):
  591. ENCODING_LIST = ['utf-8', 'gbk', 'gb18030']
  592. destr = ""
  593. for encoding in ENCODING_LIST:
  594. try:
  595. destr = bytes.decode(encoding)
  596. break
  597. except:
  598. continue
  599. return destr
  600. def kill_async_subprocess(proc):
  601. startticks = time.time()
  602. if proc is not None:
  603. try:
  604. kill_sig = signal.SIGTERM
  605. if sys.platform != "win32":
  606. kill_sig = signal.SIGKILL
  607. print("Try to Kill process id %d now" %(proc.pid))
  608. parent_proc = psutil.Process(proc.pid)
  609. try:
  610. # This might cause PermissionError: [Errno 1] Operation not permitted: '/proc/1/stat' issue
  611. child_procs = parent_proc.children(recursive=True)
  612. for child_proc in child_procs:
  613. print("Kill child process %s, pid %d" %(child_proc.name(), child_proc.pid))
  614. try:
  615. os.kill(child_proc.pid, kill_sig) # kill child process
  616. except:
  617. continue
  618. except Exception as exc:
  619. print("Warning: kill child process failed with %s" %(exc))
  620. if parent_proc.is_running():
  621. print("Kill parent process %s, pid %d" %(parent_proc.name(), parent_proc.pid))
  622. if sys.platform != "win32":
  623. try:
  624. os.killpg(parent_proc.pid, kill_sig) # kill parent process
  625. except:
  626. os.kill(parent_proc.pid, kill_sig) # kill parent process
  627. else:
  628. os.kill(parent_proc.pid, kill_sig) # kill parent process
  629. # kill using process.kill again
  630. if parent_proc.is_running():
  631. proc.kill()
  632. except psutil.NoSuchProcess:
  633. pass
  634. except Exception as exc:
  635. print("Warning: kill process failed with %s" %(exc))
  636. # show time cost for kill process
  637. print("kill process used %.2f seconds" %((time.time() - startticks)))
  638. sys.stdout.flush()
  639. pass
  640. def kill_subprocess(proc):
  641. try:
  642. if proc.poll() is None: # process is still running
  643. kill_async_subprocess(proc)
  644. except:
  645. pass
  646. pass
  647. def import_module(module_name, file_path):
  648. if file_path is None or os.path.isfile(file_path) == False:
  649. return None
  650. try:
  651. spec = importlib.util.spec_from_file_location(module_name, file_path)
  652. module = importlib.util.module_from_spec(spec)
  653. spec.loader.exec_module(module)
  654. except:
  655. module = None
  656. return module
  657. def import_function(func_name, file_path):
  658. module_name = "tempmodule_%s" % (random.randint(0, 10000))
  659. tmpmodule = import_module(module_name, file_path)
  660. if tmpmodule is None:
  661. return None
  662. if func_name not in dir(tmpmodule):
  663. return None
  664. return getattr(tmpmodule, func_name)
  665. COMMAND_RUNOK=0
  666. COMMAND_INVALID=1
  667. COMMAND_FAIL=2
  668. COMMAND_INTERRUPTED=3
  669. COMMAND_EXCEPTION=4
  670. COMMAND_NOTAPP=5
  671. COMMAND_TIMEOUT=6
  672. COMMAND_TIMEOUT_READ=7
  673. RUNSTATUS_OK=0
  674. RUNSTATUS_FAIL=1
  675. RUNSTATUS_NOTSTART=2
  676. def run_command(command, show_output=True, logfile=None, append=False):
  677. logfh = None
  678. ret = COMMAND_RUNOK
  679. cmd_elapsed_ticks = 0
  680. if isinstance(command, str) == False:
  681. return COMMAND_INVALID, cmd_elapsed_ticks
  682. startticks = time.time()
  683. process = None
  684. try:
  685. if isinstance(logfile, str):
  686. if append:
  687. logfh = open(logfile, "ab")
  688. else:
  689. logfh = open(logfile, "wb")
  690. if logfh:
  691. # record command run in log file
  692. logfh.write(("Execute Command %s\n" % (command)).encode())
  693. process = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, \
  694. stderr=subprocess.STDOUT)
  695. while True:
  696. line = process.stdout.readline()
  697. if (not line) and process.poll() is not None:
  698. break
  699. if show_output:
  700. print(try_decode_bytes(line), end="")
  701. if logfh:
  702. logfh.write(line)
  703. time.sleep(0.01)
  704. process.communicate(30)
  705. if process.returncode != 0:
  706. ret = COMMAND_FAIL
  707. except (KeyboardInterrupt):
  708. print("Key CTRL-C pressed, command executing stopped!")
  709. ret = COMMAND_INTERRUPTED
  710. except subprocess.TimeoutExpired:
  711. ret = COMMAND_TIMEOUT
  712. except Exception as exc:
  713. print("Unexpected exception happened: %s" %(str(exc)))
  714. ret = COMMAND_EXCEPTION
  715. finally:
  716. kill_subprocess(process)
  717. if process:
  718. del process
  719. if logfh:
  720. logfh.close()
  721. cmd_elapsed_ticks = time.time() - startticks
  722. return ret, cmd_elapsed_ticks
  723. async def run_cmd_and_check_async(command, timeout:int, checks:dict, checktime=time.time(), sdk_check=False, logfile=None, show_output=False, banner_timeout=3):
  724. logfh = None
  725. ret = COMMAND_FAIL
  726. cmd_elapsed_ticks = 0
  727. if isinstance(command, str) == False:
  728. return COMMAND_INVALID, cmd_elapsed_ticks
  729. startticks = time.time()
  730. process = None
  731. check_status = False
  732. pass_checks = checks.get("PASS", [])
  733. fail_checks = checks.get("FAIL", [])
  734. def test_in_check(string, checks):
  735. if type(checks) == list:
  736. for check in checks:
  737. if check in string:
  738. return True
  739. return False
  740. NSDK_CHECK_TAG = get_sdk_checktag()
  741. if get_sdk_verb_buildmsg():
  742. print("Checker used: ", checks)
  743. print("SDK Checker Tag \"%s\", checker enable %s" % (NSDK_CHECK_TAG, sdk_check))
  744. print("SDK run timeout %s, banner timeout %s" % (timeout, banner_timeout))
  745. check_finished = False
  746. start_time = time.time()
  747. serial_log = ""
  748. nsdk_check_timeout = banner_timeout
  749. sdk_checkstarttime = time.time()
  750. try:
  751. if isinstance(logfile, str):
  752. logfh = open(logfile, "wb")
  753. if sys.platform != "win32":
  754. # add exec to running command to avoid create a process called /bin/sh -c
  755. # and if you kill that process it will kill this sh process not the really
  756. # command process you want to kill
  757. process = await asyncio.create_subprocess_shell("exec " + command, \
  758. stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.STDOUT)
  759. else:
  760. process = await asyncio.create_subprocess_shell(command, \
  761. stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.STDOUT)
  762. while (time.time() - start_time) < timeout:
  763. try:
  764. linebytes = await asyncio.wait_for(process.stdout.readline(), 1)
  765. except asyncio.TimeoutError:
  766. if sdk_check == True:
  767. linebytes = None
  768. else:
  769. continue
  770. except KeyboardInterrupt:
  771. print("Key CTRL-C pressed, command executing stopped!")
  772. break
  773. except:
  774. break
  775. if linebytes:
  776. line = str(try_decode_bytes(linebytes)).replace('\r', '')
  777. else:
  778. line = ""
  779. if sdk_check == True:
  780. if (time.time() - sdk_checkstarttime) > nsdk_check_timeout:
  781. print("No SDK banner found in %s s, quit now!" % (nsdk_check_timeout))
  782. ret = COMMAND_TIMEOUT
  783. check_status = False
  784. break
  785. if line == "":
  786. continue
  787. if show_output:
  788. print("XXX Check " + line, end='')
  789. if NSDK_CHECK_TAG in line:
  790. timestr = line.split(NSDK_CHECK_TAG)[-1].strip()
  791. cur_time = time.mktime(time.strptime(timestr, "%b %d %Y, %H:%M:%S"))
  792. if int(cur_time) >= int(checktime):
  793. sdk_check = False
  794. line = NSDK_CHECK_TAG + " " + timestr + "\n"
  795. serial_log = serial_log + str(line)
  796. else:
  797. serial_log = serial_log + str(line)
  798. if show_output:
  799. print(line, end='')
  800. if check_finished == False:
  801. if test_in_check(line, fail_checks):
  802. check_status = False
  803. check_finished = True
  804. if test_in_check(line, pass_checks):
  805. check_status = True
  806. check_finished = True
  807. if check_finished:
  808. ret = COMMAND_RUNOK
  809. # record another 2 seconds by reset start_time and timeout to 2
  810. start_time = time.time()
  811. timeout = 1
  812. if logfh and linebytes:
  813. logfh.write(linebytes)
  814. time.sleep(0.01)
  815. except (KeyboardInterrupt):
  816. print("Key CTRL-C pressed, command executing stopped!")
  817. ret = COMMAND_INTERRUPTED
  818. except Exception as exc:
  819. print("Unexpected exception happened: %s" %(str(exc)))
  820. ret = COMMAND_EXCEPTION
  821. finally:
  822. # kill this process
  823. kill_async_subprocess(process)
  824. if logfh:
  825. logfh.close()
  826. cmd_elapsed_ticks = time.time() - startticks
  827. return check_status, cmd_elapsed_ticks
  828. def run_cmd_and_check(command, timeout:int, checks:dict, checktime=time.time(), sdk_check=False, logfile=None, show_output=False, banner_timeout=30):
  829. loop = asyncio.get_event_loop()
  830. try:
  831. ret, cmd_elapsed_ticks = loop.run_until_complete( \
  832. run_cmd_and_check_async(command, timeout, checks, checktime, sdk_check, logfile, show_output, banner_timeout))
  833. except KeyboardInterrupt:
  834. print("Key CTRL-C pressed, command executing stopped!")
  835. ret, cmd_elapsed_ticks = False, 0
  836. finally:
  837. if sys.platform != "win32":
  838. os.system("stty echo 2> /dev/null")
  839. return ret, cmd_elapsed_ticks
  840. def find_files(fndir, pattern, recursive=False):
  841. fndir = os.path.normpath(fndir)
  842. files = glob.glob(os.path.join(fndir, pattern), recursive=recursive)
  843. return files
  844. def get_logfile(appdir, startdir, logdir, logname):
  845. relpath = os.path.relpath(appdir, startdir)
  846. _, startdir_basename = os.path.splitdrive(startdir)
  847. applogdir = os.path.join(os.path.relpath(logdir + os.sep + startdir_basename), relpath)
  848. applog = os.path.relpath(os.path.join(applogdir, logname))
  849. applogdir = os.path.dirname(applog)
  850. if os.path.isdir(applogdir) == False:
  851. os.makedirs(applogdir)
  852. return applog
  853. def strtofloat(value):
  854. fval = 0.0
  855. try:
  856. match = re.search(r'[+-]?\d*\.?\d+([Ee][+-]?\d+)?', value.strip())
  857. if match:
  858. fval = float(match.group())
  859. except:
  860. pass
  861. return fval
  862. def check_tool_version(ver_cmd, ver_check):
  863. vercmd_log = tempfile.mktemp()
  864. ret, _ = run_command(ver_cmd, show_output=False, logfile=vercmd_log)
  865. check_sts = False
  866. verstr = None
  867. if ret == COMMAND_RUNOK:
  868. with open(vercmd_log, 'r', errors='ignore') as vlf:
  869. for line in vlf.readlines():
  870. if ver_check in line:
  871. verstr = line.strip()
  872. check_sts = True
  873. break
  874. os.remove(vercmd_log)
  875. return check_sts, verstr
  876. def get_elfsize(elf):
  877. sizeinfo = {"text": -1, "data": -1, "bss": -1, "total": -1}
  878. if os.path.isfile(elf) == False:
  879. return sizeinfo
  880. for sizetool in [ "riscv-nuclei-elf-size", "riscv64-unknown-elf-size", "size" ]:
  881. sizecmd = "%s %s" % (sizetool, elf)
  882. sizelog = tempfile.mktemp()
  883. ret, _ = run_command(sizecmd, show_output=False, logfile=sizelog)
  884. if ret == COMMAND_RUNOK:
  885. with open(sizelog, "r", errors='ignore') as sf:
  886. lines = sf.readlines()
  887. datas = lines[-1].strip().split()
  888. sizeinfo["text"] = int(datas[0])
  889. sizeinfo["data"] = int(datas[1])
  890. sizeinfo["bss"] = int(datas[2])
  891. sizeinfo["total"] = int(datas[3])
  892. os.remove(sizelog)
  893. break
  894. else:
  895. os.remove(sizelog)
  896. return sizeinfo
  897. def merge_config_with_makeopts(config, make_options):
  898. opt_splits=make_options.strip().split()
  899. passed_buildcfg = dict()
  900. for opt in opt_splits:
  901. if "=" in opt:
  902. values = opt.split("=")
  903. # Make new build config
  904. if (len(values) == 2):
  905. passed_buildcfg[values[0]] = values[1]
  906. build_cfg = config.get("build_config", None)
  907. if build_cfg is None:
  908. config["build_config"] = passed_buildcfg
  909. else:
  910. # update build_config using parsed config via values specified in make_options
  911. config["build_config"].update(passed_buildcfg)
  912. return config
  913. # merge config dict and args dict
  914. # args will overwrite config
  915. def merge_config_with_args(config, args_dict):
  916. if isinstance(config, dict) == False:
  917. return None
  918. if isinstance(args_dict, dict) == False:
  919. return config
  920. serport = args_dict.get("serport", None)
  921. baudrate = args_dict.get("baudrate", None)
  922. make_options = args_dict.get("make_options", None)
  923. parallel = args_dict.get("parallel", None)
  924. build_target = args_dict.get("build_target", None)
  925. run_target = args_dict.get("run_target", None)
  926. timeout = args_dict.get("timeout", None)
  927. ncycm = args_dict.get("ncycm", None)
  928. if isinstance(config, dict) == False:
  929. return None
  930. new_config = copy.deepcopy(config)
  931. if serport or baudrate or run_target:
  932. run_cfg = new_config.get("run_config", None)
  933. if run_cfg is None:
  934. new_config["run_config"] = {"hardware":{}}
  935. elif "hardware" not in run_cfg:
  936. new_config["run_config"]["hardware"] = {}
  937. if serport:
  938. new_config["run_config"]["hardware"]["serport"] = str(serport)
  939. if baudrate:
  940. new_config["run_config"]["hardware"]["serport"] = int(baudrate)
  941. if run_target:
  942. new_config["run_config"]["target"] = str(run_target)
  943. run_target = new_config["run_config"].get("target", "hardware")
  944. if run_target not in new_config["run_config"]:
  945. new_config["run_config"][run_target] = dict()
  946. if ncycm:
  947. if "ncycm" not in new_config["run_config"]:
  948. new_config["run_config"]["ncycm"] = dict()
  949. new_config["run_config"]["ncycm"]["ncycm"] = os.path.abspath(ncycm)
  950. if timeout: # set timeout
  951. try:
  952. timeout = int(timeout)
  953. except:
  954. timeout = 60
  955. new_config["run_config"][run_target]["timeout"] = timeout
  956. if build_target is not None:
  957. new_config["build_target"] = build_target
  958. if parallel is not None:
  959. new_config["parallel"] = parallel
  960. if make_options:
  961. new_config = merge_config_with_makeopts(new_config, make_options)
  962. return new_config
  963. # merge two config, now is appcfg, another is hwcfg
  964. # hwcfg will overwrite configuration in appcfg
  965. def merge_two_config(appcfg, hwcfg):
  966. if isinstance(appcfg, dict) == True and isinstance(hwcfg, dict) == False:
  967. return appcfg
  968. if isinstance(appcfg, dict) == False and isinstance(hwcfg, dict) == True:
  969. return hwcfg
  970. merged_appcfg = copy.deepcopy(appcfg)
  971. dict_merge(merged_appcfg, hwcfg)
  972. return merged_appcfg
  973. def set_global_variables(config):
  974. global SDK_GLOBAL_VARIABLES
  975. if isinstance(config, dict) == False:
  976. return False
  977. if "global_variables" in config:
  978. dict_merge(SDK_GLOBAL_VARIABLES, config["global_variables"])
  979. print("Using global variables: %s" % SDK_GLOBAL_VARIABLES)
  980. return True
  981. def get_app_runresult(apprst):
  982. if not isinstance(apprst, dict):
  983. return "unknown", "-"
  984. if "type" not in apprst:
  985. return "unknown", "-"
  986. rsttype = apprst["type"]
  987. rstvaluedict = apprst.get("value", dict())
  988. if rstvaluedict and len(rstvaluedict) < 3:
  989. rstval = ""
  990. for key in rstvaluedict:
  991. rstval += "%s : %s;" %(key, rstvaluedict[key])
  992. rstval = rstval.rstrip(';')
  993. else:
  994. rstval = "-"
  995. return rsttype, rstval
  996. def save_execute_csv(result, csvfile):
  997. if isinstance(result, dict) == False:
  998. return False
  999. csvlines = ["App, buildstatus, runstatus, buildtime, runtime, type, value, total, text, data, bss"]
  1000. for app in result:
  1001. size = result[app]["size"]
  1002. app_status = result[app]["status"]
  1003. app_time = result[app]["time"]
  1004. apprsttype, apprstval = get_app_runresult(result[app].get("result", dict()))
  1005. csvline ="%s, %s, %s, %s, %s, %s, %s, %d, %d, %d, %d" % (app, app_status["build"], \
  1006. app_status.get("run", False), app_time.get("build", "-"), app_time.get("run", "-"), \
  1007. apprsttype, apprstval, size["total"], size["text"], size["data"], size["bss"])
  1008. csvlines.append(csvline)
  1009. display = get_sdk_verb_buildmsg()
  1010. save_csv(csvfile, csvlines, display)
  1011. return True
  1012. def save_bench_csv(result, csvfile):
  1013. if isinstance(result, dict) == False:
  1014. return False
  1015. csvlines = ["App, case, buildstatus, runstatus, buildtime, runtime, type, value, total, text, data, bss"]
  1016. for app in result:
  1017. appresult = result[app]
  1018. for case in appresult:
  1019. size = appresult[case]["size"]
  1020. app_status = appresult[case]["status"]
  1021. app_time = appresult[case]["time"]
  1022. apprsttype, apprstval = get_app_runresult(appresult[case].get("result", dict()))
  1023. csvline = "%s, %s, %s, %s, %s, %s, %s, %s, %d, %d, %d, %d" % (app, case, app_status["build"], \
  1024. app_status.get("run", False), app_time.get("build", "-"), app_time.get("run", "-"), \
  1025. apprsttype, apprstval, size["total"], size["text"], size["data"], size["bss"])
  1026. csvlines.append(csvline)
  1027. # save csv file
  1028. display = get_sdk_verb_buildmsg()
  1029. save_csv(csvfile, csvlines, display)
  1030. return True
  1031. def find_local_appconfig(appdir, localcfgs):
  1032. if isinstance(appdir, str) and isinstance(localcfgs, dict):
  1033. if appdir in localcfgs:
  1034. return appdir
  1035. else:
  1036. foundcfg = None
  1037. for localcfg in localcfgs:
  1038. localcfgtp = localcfg.strip('/')
  1039. striped_dir = appdir.split(localcfgtp, 1)
  1040. if len(striped_dir) == 2:
  1041. striped_dir = striped_dir[1]
  1042. else:
  1043. striped_dir = appdir
  1044. if striped_dir != appdir:
  1045. if striped_dir.startswith('/'):
  1046. if foundcfg is None:
  1047. foundcfg = localcfg
  1048. else:
  1049. if len(foundcfg) < len(localcfg):
  1050. foundcfg = localcfg
  1051. return foundcfg
  1052. else:
  1053. return None
  1054. def fix_evalsoc_verilog_ncycm(verilog):
  1055. if os.path.isfile(verilog) == False:
  1056. return ""
  1057. vfct = ""
  1058. with open(verilog, "r", errors='ignore') as vf:
  1059. for line in vf.readlines():
  1060. line = line.replace("@80", "@00").replace("@90", "@08")
  1061. vfct += line
  1062. verilog_new = verilog + ".ncycm"
  1063. with open(verilog_new, "w") as vf:
  1064. vf.write(vfct)
  1065. return verilog_new
  1066. PROGRAM_UNKNOWN="unknown"
  1067. PROGRAM_BAREBENCH="barebench"
  1068. PROGRAM_COREMARK="coremark"
  1069. PROGRAM_DHRYSTONE="dhrystone"
  1070. PROGRAM_WHETSTONE="whetstone"
  1071. def parse_benchmark_compatiable(lines):
  1072. result = None
  1073. program_type = PROGRAM_UNKNOWN
  1074. subtype = PROGRAM_UNKNOWN
  1075. try:
  1076. for line in lines:
  1077. # Coremark
  1078. if "CoreMark" in line:
  1079. program_type = PROGRAM_BAREBENCH
  1080. subtype = PROGRAM_COREMARK
  1081. if "Iterations*1000000/total_ticks" in line:
  1082. value = line.split("=")[1].strip().split()[0]
  1083. result = dict()
  1084. result["CoreMark/MHz"] = strtofloat(value)
  1085. # Dhrystone
  1086. if "Dhrystone" in line:
  1087. program_type = PROGRAM_BAREBENCH
  1088. subtype = PROGRAM_DHRYSTONE
  1089. if "1000000/(User_Cycle/Number_Of_Runs)" in line:
  1090. value = line.split("=")[1].strip().split()[0]
  1091. result = dict()
  1092. result["DMIPS/MHz"] = strtofloat(value)
  1093. # Whetstone
  1094. if "Whetstone" in line:
  1095. program_type = PROGRAM_BAREBENCH
  1096. subtype = PROGRAM_WHETSTONE
  1097. if "MWIPS/MHz" in line:
  1098. value = line.split("MWIPS/MHz")[-1].strip().split()[0]
  1099. result = dict()
  1100. result["MWIPS/MHz"] = strtofloat(value)
  1101. except:
  1102. return program_type, subtype, result
  1103. return program_type, subtype, result
  1104. def parse_benchmark_baremetal(lines):
  1105. result = None
  1106. program_type = PROGRAM_UNKNOWN
  1107. subtype = PROGRAM_UNKNOWN
  1108. try:
  1109. unit = "unknown"
  1110. for line in lines:
  1111. stripline = line.strip()
  1112. if "csv," in stripline.lower():
  1113. csv_values = stripline.split(',')
  1114. if len(csv_values) >= 3:
  1115. key = csv_values[1].strip()
  1116. value = csv_values[-1].strip()
  1117. if key.lower() == "benchmark":
  1118. program_type = PROGRAM_BAREBENCH
  1119. unit = value
  1120. else:
  1121. subtype = key.lower()
  1122. result = dict()
  1123. result[unit] = strtofloat(value)
  1124. break
  1125. except:
  1126. return program_type, subtype, result
  1127. return program_type, subtype, result
  1128. def parse_benchmark_baremetal_csv(lines):
  1129. result = None
  1130. program_type = PROGRAM_UNKNOWN
  1131. try:
  1132. result = dict()
  1133. for line in lines:
  1134. stripline = line.strip()
  1135. if "csv," in stripline.lower():
  1136. csv_values = stripline.split(',')
  1137. if len(csv_values) >= 3:
  1138. key = csv_values[1].strip()
  1139. value = csv_values[-1].strip()
  1140. if "BENCH" not in key.upper():
  1141. result[key] = value
  1142. except:
  1143. return program_type, result
  1144. return program_type, result
  1145. def find_index(key, arr):
  1146. try:
  1147. index = arr.index(key)
  1148. except:
  1149. index = -1
  1150. return index
  1151. def parse_benchmark_runlog(lines, lgf=""):
  1152. if isinstance(lines, list) == False:
  1153. return PROGRAM_UNKNOWN, PROGRAM_UNKNOWN, None
  1154. if len(lines) == 0:
  1155. return PROGRAM_UNKNOWN, PROGRAM_UNKNOWN, None
  1156. subtype = ""
  1157. if lgf.strip() == "": # old style
  1158. program_type, subtype, result = parse_benchmark_compatiable(lines)
  1159. else:
  1160. lgf = lgf.replace("\\", "/")
  1161. appnormdirs = os.path.dirname(os.path.normpath(lgf)).replace('\\', '/').split('/')
  1162. if "baremetal/benchmark" in lgf:
  1163. # baremetal benchmark
  1164. program_type, subtype, result = parse_benchmark_baremetal(lines)
  1165. if program_type == PROGRAM_UNKNOWN:
  1166. # fallback to previous parser
  1167. program_type, subtype, result = parse_benchmark_compatiable(lines)
  1168. elif "baremetal/demo_dsp" in lgf:
  1169. program_type, result = parse_benchmark_baremetal_csv(lines)
  1170. program_type = "demo_dsp"
  1171. elif "DSP/Examples/RISCV" in lgf:
  1172. program_type, result = parse_benchmark_baremetal_csv(lines)
  1173. program_type = "nmsis_dsp_example"
  1174. index = find_index("RISCV", appnormdirs)
  1175. if index >= 0:
  1176. subtype = appnormdirs[index + 1]
  1177. elif "DSP/Test" in lgf:
  1178. program_type, result = parse_benchmark_baremetal_csv(lines)
  1179. program_type = "nmsis_dsp_tests"
  1180. index = find_index("Test", appnormdirs)
  1181. if index >= 0:
  1182. subtype = appnormdirs[index + 1]
  1183. elif "DSP/Benchmark" in lgf:
  1184. program_type, result = parse_benchmark_baremetal_csv(lines)
  1185. program_type = "nmsis_dsp_benchmark"
  1186. index = find_index("Benchmark", appnormdirs)
  1187. if index >= 0:
  1188. subtype = appnormdirs[index + 1]
  1189. elif "NN/Examples/RISCV" in lgf:
  1190. program_type, result = parse_benchmark_baremetal_csv(lines)
  1191. program_type = "nmsis_nn_example"
  1192. index = find_index("RISCV", appnormdirs)
  1193. if index >= 0:
  1194. subtype = appnormdirs[index + 1]
  1195. elif "NN/Tests" in lgf:
  1196. program_type, result = parse_benchmark_baremetal_csv(lines)
  1197. if "full" in appnormdirs:
  1198. program_type = "nmsis_nn_test_full"
  1199. subtype = "full"
  1200. else:
  1201. program_type = "nmsis_nn_test_percase"
  1202. index = find_index("percase", appnormdirs)
  1203. if index >= 0:
  1204. subtype = appnormdirs[index + 1]
  1205. elif "NN/Benchmark" in lgf:
  1206. program_type, result = parse_benchmark_baremetal_csv(lines)
  1207. program_type = "nmsis_nn_benchmark"
  1208. index = find_index("Benchmark", appnormdirs)
  1209. if index >= 0:
  1210. subtype = appnormdirs[index + 1]
  1211. else:
  1212. program_type, subtype, result = parse_benchmark_compatiable(lines)
  1213. return program_type, subtype, result
  1214. def parse_benchmark_use_pyscript(lines, lgf, pyscript):
  1215. if isinstance(lines, list) == False:
  1216. return PROGRAM_UNKNOWN, PROGRAM_UNKNOWN, None
  1217. if len(lines) == 0:
  1218. return PROGRAM_UNKNOWN, PROGRAM_UNKNOWN, None
  1219. # function should named parse_benchmark
  1220. # function argument and return like parse_benchmark_runlog
  1221. parsefunc = import_function("parse_benchmark", pyscript)
  1222. if parsefunc is None:
  1223. return PROGRAM_UNKNOWN, PROGRAM_UNKNOWN, None
  1224. try:
  1225. program_type, subtype, result = parsefunc(lines, lgf)
  1226. return program_type, subtype, result
  1227. except Exception as exc:
  1228. print("ERROR: Parse using %s script error: %s" %(pyscript, exc))
  1229. return PROGRAM_UNKNOWN, PROGRAM_UNKNOWN, None
  1230. def check_tool_exist(tool):
  1231. exist = False
  1232. if sys.platform == 'win32':
  1233. if os.system("where %s" % (tool)) == 0:
  1234. exist = True
  1235. else:
  1236. if os.system("which %s" % (tool)) == 0:
  1237. exist = True
  1238. return exist
  1239. def find_vivado_cmd():
  1240. for vivado_cmd in ("vivado", "vivado_lab"):
  1241. if sys.platform == 'win32':
  1242. if os.system("where %s" % (vivado_cmd)) == 0:
  1243. return vivado_cmd
  1244. else:
  1245. if os.system("which %s" % (vivado_cmd)) == 0:
  1246. return vivado_cmd
  1247. return None
  1248. def datetime_now():
  1249. return datetime.datetime.now().strftime(DATE_FORMATE)
  1250. def program_fpga(bit, target):
  1251. if os.path.isfile(bit) == False:
  1252. print("Can't find bitstream in %s" % (bit))
  1253. return False
  1254. print("Try to program fpga bitstream %s to target board %s" % (bit, target))
  1255. sys.stdout.flush()
  1256. FILE_LOCK = os.path.join(get_tmpdir(), FILE_LOCK_NAME)
  1257. # TODO: use portable filelock for win32
  1258. with open(FILE_LOCK, 'w+') as filelock:
  1259. if sys.platform != "win32":
  1260. print("%s, Wait another board's programing fpga to finished" %(datetime_now()))
  1261. fcntl.flock(filelock, fcntl.LOCK_EX)
  1262. # set to 666, in case that other user can't access this file causing exception
  1263. if os.stat(FILE_LOCK).st_uid == os.getuid():
  1264. os.chmod(FILE_LOCK, stat.S_IWGRP | stat.S_IRGRP | stat.S_IWUSR | stat.S_IRUSR | stat.S_IWOTH | stat.S_IROTH)
  1265. print("%s, Has acquired the chance to do fpga programing!" %(datetime_now()))
  1266. vivado_cmd = find_vivado_cmd()
  1267. # check vivado is found or not
  1268. if vivado_cmd == None:
  1269. print("vivado is not found in PATH, please check!")
  1270. return False
  1271. tcl = os.path.join(os.path.dirname(os.path.realpath(__file__)), "program_bit.tcl")
  1272. target = "*%s" % (target)
  1273. progcmd = "%s -mode batch -nolog -nojournal -source %s -tclargs %s %s" % (vivado_cmd, tcl, bit, target)
  1274. tmout = get_sdk_fpga_prog_tmout()
  1275. if sys.platform != 'win32' and tmout is not None and tmout.strip() != "":
  1276. print("Timeout %s do fpga program" % (tmout))
  1277. progcmd = "timeout --foreground -s SIGKILL %s %s" % (tmout, progcmd)
  1278. print("Do fpga program using command: %s" % (progcmd))
  1279. sys.stdout.flush()
  1280. ret = os.system(progcmd)
  1281. sys.stdout.flush()
  1282. if ret != 0:
  1283. print("Program fpga bit failed, error code %d" % ret)
  1284. return False
  1285. print("Program fpga bit successfully")
  1286. return True
  1287. def find_fpgas():
  1288. vivado_cmd = find_vivado_cmd()
  1289. if vivado_cmd == None:
  1290. print("vivado is not found in PATH, please check!")
  1291. return dict()
  1292. tcl = os.path.join(os.path.dirname(os.path.realpath(__file__)), "find_devices.tcl")
  1293. sys.stdout.flush()
  1294. tmp_log = tempfile.mktemp()
  1295. os.system("%s -mode batch -nolog -nojournal -source %s -notrace > %s" % (vivado_cmd, tcl, tmp_log))
  1296. sys.stdout.flush()
  1297. fpgadevices = dict()
  1298. with open(tmp_log, "r", errors='ignore') as tf:
  1299. for line in tf.readlines():
  1300. line = line.strip()
  1301. if line.startswith("CSV,") == False:
  1302. continue
  1303. splits = line.split(",")
  1304. if len(splits) != 3:
  1305. continue
  1306. fpga_serial = "/".join(splits[1].split("/")[2:])
  1307. fpgadevices[fpga_serial] = splits[2].strip()
  1308. return fpgadevices
  1309. def check_serial_port(serport):
  1310. if serport in find_possible_serports():
  1311. return True
  1312. return False
  1313. def modify_openocd_cfg(cfg, ftdi_serial):
  1314. cfg_bk = cfg + ".backup"
  1315. if (os.path.isfile(cfg)) == False:
  1316. return False
  1317. if os.path.isfile(cfg_bk) == True:
  1318. print("Restore openocd cfg %s" %(cfg))
  1319. shutil.copyfile(cfg_bk, cfg)
  1320. else:
  1321. print("Backup openocd cfg %s" %(cfg))
  1322. shutil.copyfile(cfg, cfg_bk)
  1323. found = False
  1324. contents = []
  1325. index = 0
  1326. with open(cfg, 'r', errors='ignore') as cf:
  1327. contents = cf.readlines()
  1328. for line in contents:
  1329. if line.strip().startswith("transport select"):
  1330. found = True
  1331. break
  1332. index += 1
  1333. if found == False:
  1334. return False
  1335. if sys.platform == 'win32':
  1336. ftdi_serial = "%sA" % (ftdi_serial)
  1337. contents.insert(index, "ftdi_serial %s\ntcl_port disabled\ntelnet_port disabled\n" %(ftdi_serial))
  1338. with open(cfg, 'w') as cf:
  1339. contents = "".join(contents)
  1340. cf.write(contents)
  1341. return True
  1342. GL_CPUCFGs = os.path.join(SCRIPT_DIR, "configs", "cpu")
  1343. def gen_runcfg(cpucfg, runcfg, buildconfig=dict()):
  1344. _, cpucfgdict = load_json(cpucfg)
  1345. _, runcfgdict = load_json(runcfg)
  1346. if cpucfgdict is None:
  1347. return { "build_configs": { "default": {} } }
  1348. if runcfgdict is None:
  1349. return cpucfgdict
  1350. matrixcfgs = runcfgdict.get("matrix", None)
  1351. expectedcfg = runcfgdict.get("expected", dict())
  1352. expectedscfg = runcfgdict.get("expecteds", dict())
  1353. appdirs_ignore = runcfgdict.get("appdirs_ignore", [])
  1354. finalruncfg = copy.deepcopy(cpucfgdict)
  1355. # merge buildconfig
  1356. finalruncfg["build_config"] = merge_two_config(finalruncfg.get("build_config", dict()), buildconfig)
  1357. finalruncfg["expected"] = merge_two_config(finalruncfg.get("expected", dict()), expectedcfg)
  1358. finalruncfg["expecteds"] = merge_two_config(finalruncfg.get("expecteds", dict()), expectedscfg)
  1359. # allow pass core related ignore cases
  1360. finalruncfg["appdirs_ignore"] = update_list_items(finalruncfg.get("appdirs_ignore", []), appdirs_ignore)
  1361. # if appdirs_ignore is empty, remove the key
  1362. if not finalruncfg["appdirs_ignore"]:
  1363. del finalruncfg["appdirs_ignore"]
  1364. if matrixcfgs is None:
  1365. return finalruncfg
  1366. bcfgs = cpucfgdict.get("build_configs", dict())
  1367. newbcfgs = dict()
  1368. for bkey in bcfgs:
  1369. for key in matrixcfgs:
  1370. cfgkey = "%s-%s" % (bkey, key)
  1371. newbcfgs[cfgkey] = merge_two_config(bcfgs[bkey], matrixcfgs[key])
  1372. if len(newbcfgs) > 1:
  1373. finalruncfg["build_configs"] = newbcfgs
  1374. else:
  1375. finalruncfg["build_configs"] = bcfgs
  1376. return finalruncfg
  1377. def gen_coreruncfg(core, runcfg, choice="mini", buildconfig=dict(), casedir=None):
  1378. cpucfgsloc = os.path.join(GL_CPUCFGs, choice)
  1379. if casedir is not None:
  1380. tmp = os.path.join(casedir, choice)
  1381. if os.path.isdir(tmp) == True:
  1382. cpucfgsloc = os.path.realpath(tmp)
  1383. print("Use cpu configs in location %s directory" % (cpucfgsloc))
  1384. cpucfg = os.path.join(cpucfgsloc, "%s.json" % (core))
  1385. return gen_runcfg(cpucfg, runcfg, buildconfig)
  1386. def gen_coreruncfg_custom(core, runcfg, customcfgdir, buildconfig=dict()):
  1387. cpucfg = os.path.join(customcfgdir, "%s.json" % (core))
  1388. return gen_runcfg(cpucfg, runcfg, buildconfig)
  1389. def gen_runyaml(core, locs, fpga_serial, ftdi_serial, cycm, fpgabit, boardtype, ocdcfg, appcfg, hwcfg):
  1390. runyaml = { "runcfg": {"runner": "fpga"},
  1391. "fpga_runners": { core: {
  1392. "board_type": boardtype, "fpga_serial": fpga_serial,
  1393. "ftdi_serial": ftdi_serial, "serial_port": ""}
  1394. },
  1395. "ncycm_runners": { core: {
  1396. "model": cycm if cycm else "" }
  1397. },
  1398. "configs": { core: {
  1399. "fpga": boardtype, "bitstream": fpgabit,
  1400. "ncycm": core, "openocd_cfg": ocdcfg,
  1401. "appcfg": appcfg, "hwcfg": hwcfg }
  1402. },
  1403. "environment": {
  1404. "fpgaloc": locs.get("fpgaloc", ""),
  1405. "ncycmloc": locs.get("ncycmloc", ""),
  1406. "cfgloc": locs.get("cfgloc", "")
  1407. }
  1408. }
  1409. if cycm is not None:
  1410. runyaml["runcfg"]["runner"] = "ncycm"
  1411. return runyaml