3 # Copyright(c) 2010-2014 Intel Corporation. All rights reserved.
6 # Redistribution and use in source and binary forms, with or without
7 # modification, are permitted provided that the following conditions
10 # * Redistributions of source code must retain the above copyright
11 # notice, this list of conditions and the following disclaimer.
12 # * Redistributions in binary form must reproduce the above copyright
13 # notice, this list of conditions and the following disclaimer in
14 # the documentation and/or other materials provided with the
16 # * Neither the name of Intel Corporation nor the names of its
17 # contributors may be used to endorse or promote products derived
18 # from this software without specific prior written permission.
20 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
21 # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
22 # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
23 # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
24 # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
25 # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
26 # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
27 # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
28 # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
29 # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
30 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
32 # The main logic behind running autotests in parallel
36 import multiprocessing
46 def wait_prompt(child):
49 result = child.expect(["RTE>>", pexpect.TIMEOUT, pexpect.EOF],
59 # each result tuple in results list consists of:
60 # result value (0 or -1)
63 # total test run time (double)
65 # test report (if not available, should be None)
67 # this function needs to be outside AutotestRunner class
68 # because otherwise Pool won't work (or rather it will require
69 # quite a bit of effort to make it work).
72 def run_test_group(cmdline, test_group):
75 start_time = time.time()
80 # prepare logging of init
81 startuplog = StringIO.StringIO()
83 print >>startuplog, "\n%s %s\n" % ("=" * 20, test_group["Prefix"])
84 print >>startuplog, "\ncmdline=%s" % cmdline
86 child = pexpect.spawn(cmdline, logfile=startuplog)
88 # wait for target to boot
89 if not wait_prompt(child):
94 "Start %s" % test_group["Prefix"],
95 time.time() - start_time,
96 startuplog.getvalue(),
99 # mark all tests as failed
100 for test in test_group["Tests"]:
101 results.append((-1, "Fail [No prompt]", test["Name"],
102 time.time() - start_time, "", None))
109 "Start %s" % test_group["Prefix"],
110 time.time() - start_time,
111 startuplog.getvalue(),
114 # mark all tests as failed
115 for t in test_group["Tests"]:
116 results.append((-1, "Fail [Can't run]", t["Name"],
117 time.time() - start_time, "", None))
121 # startup was successful
122 results.append((0, "Success", "Start %s" % test_group["Prefix"],
123 time.time() - start_time, startuplog.getvalue(), None))
125 # parse the binary for available test commands
126 binary = cmdline.split()[0]
127 stripped = 'not stripped' not in subprocess.check_output(['file', binary])
129 symbols = subprocess.check_output(['nm', binary]).decode('utf-8')
130 avail_cmds = re.findall('test_register_(\w+)', symbols)
132 # run all tests in test group
133 for test in test_group["Tests"]:
135 # create log buffer for each test
136 # in multiprocessing environment, the logging would be
137 # interleaved and will create a mess, hence the buffering
138 logfile = StringIO.StringIO()
139 child.logfile = logfile
143 # make a note when the test started
144 start_time = time.time()
147 # print test name to log buffer
148 print >>logfile, "\n%s %s\n" % ("-" * 20, test["Name"])
150 # run test function associated with the test
151 if stripped or test["Command"] in avail_cmds:
152 result = test["Func"](child, test["Command"])
154 result = (0, "Skipped [Not Available]")
156 # make a note when the test was finished
157 end_time = time.time()
159 # append test data to the result tuple
160 result += (test["Name"], end_time - start_time,
163 # call report function, if any defined, and supply it with
164 # target and complete log for test run
166 report = test["Report"](self.target, log)
168 # append report to results tuple
174 # make a note when the test crashed
175 end_time = time.time()
177 # mark test as failed
178 result = (-1, "Fail [Crash]", test["Name"],
179 end_time - start_time, logfile.getvalue(), None)
181 # append the results to the results list
182 results.append(result)
184 # regardless of whether test has crashed, try quitting it
186 child.sendline("quit")
188 # if the test crashed, just do nothing instead
193 # return test results
197 # class representing an instance of autotests run
198 class AutotestRunner:
200 parallel_test_groups = []
201 non_parallel_test_groups = []
212 def __init__(self, cmdline, target, blacklist, whitelist):
213 self.cmdline = cmdline
215 self.blacklist = blacklist
216 self.whitelist = whitelist
219 logfile = "%s.log" % target
220 csvfile = "%s.csv" % target
222 self.logfile = open(logfile, "w")
223 csvfile = open(csvfile, "w")
224 self.csvwriter = csv.writer(csvfile)
226 # prepare results table
227 self.csvwriter.writerow(["test_name", "test_result", "result_str"])
229 # set up cmdline string
230 def __get_cmdline(self, test):
231 cmdline = self.cmdline
233 # append memory limitations for each test
234 # otherwise tests won't run in parallel
235 if "i686" not in self.target:
236 cmdline += " --socket-mem=%s" % test["Memory"]
238 # affinitize startup so that tests don't fail on i686
239 cmdline = "taskset 1 " + cmdline
240 cmdline += " -m " + str(sum(map(int, test["Memory"].split(","))))
242 # set group prefix for autotest group
243 # otherwise they won't run in parallel
244 cmdline += " --file-prefix=%s" % test["Prefix"]
248 def add_parallel_test_group(self, test_group):
249 self.parallel_test_groups.append(test_group)
251 def add_non_parallel_test_group(self, test_group):
252 self.non_parallel_test_groups.append(test_group)
254 def __process_results(self, results):
255 # this iterates over individual test results
256 for i, result in enumerate(results):
258 # increase total number of tests that were run
259 # do not include "start" test
263 # unpack result tuple
264 test_result, result_str, test_name, \
265 test_time, log, report = result
268 cur_time = time.time()
269 total_time = int(cur_time - self.start)
271 # print results, test run time and total time since start
272 result = ("%s:" % test_name).ljust(30)
273 result += result_str.ljust(29)
274 result += "[%02dm %02ds]" % (test_time / 60, test_time % 60)
276 # don't print out total time every line, it's the same anyway
277 if i == len(results) - 1:
279 "[%02dm %02ds]" % (total_time / 60, total_time % 60))
283 # if test failed and it wasn't a "start" test
284 if test_result < 0 and not i == 0:
288 self.log_buffers.append(log)
290 # create report if it exists
293 f = open("%s_%s_report.rst" %
294 (self.target, test_name), "w")
296 print("Report for %s could not be created!" % test_name)
301 # write test result to CSV file
303 self.csvwriter.writerow([test_name, test_result, result_str])
305 # this function iterates over test groups and removes each
306 # test that is not in whitelist/blacklist
307 def __filter_groups(self, test_groups):
308 groups_to_remove = []
310 # filter out tests from parallel test groups
311 for i, test_group in enumerate(test_groups):
313 # iterate over a copy so that we could safely delete individual
315 for test in test_group["Tests"][:]:
316 test_id = test["Command"]
318 # dump tests are specified in full e.g. "Dump_mempool"
319 if "_autotest" in test_id:
320 test_id = test_id[:-len("_autotest")]
322 # filter out blacklisted/whitelisted tests
323 if self.blacklist and test_id in self.blacklist:
324 test_group["Tests"].remove(test)
326 if self.whitelist and test_id not in self.whitelist:
327 test_group["Tests"].remove(test)
330 # modify or remove original group
331 if len(test_group["Tests"]) > 0:
332 test_groups[i] = test_group
334 # remember which groups should be deleted
335 # put the numbers backwards so that we start
336 # deleting from the end, not from the beginning
337 groups_to_remove.insert(0, i)
339 # remove test groups that need to be removed
340 for i in groups_to_remove:
345 # iterate over test groups and run tests associated with them
346 def run_all_tests(self):
348 self.parallel_test_groups = \
349 self.__filter_groups(self.parallel_test_groups)
350 self.non_parallel_test_groups = \
351 self.__filter_groups(self.non_parallel_test_groups)
353 # create a pool of worker threads
354 pool = multiprocessing.Pool(processes=1)
358 # whatever happens, try to save as much logs as possible
361 # create table header
363 print("Test name".ljust(30), "Test result".ljust(29),
364 "Test".center(9), "Total".center(9))
367 # make a note of tests start time
368 self.start = time.time()
370 # assign worker threads to run test groups
371 for test_group in self.parallel_test_groups:
372 result = pool.apply_async(run_test_group,
373 [self.__get_cmdline(test_group),
375 results.append(result)
377 # iterate while we have group execution results to get
378 while len(results) > 0:
380 # iterate over a copy to be able to safely delete results
381 # this iterates over a list of group results
382 for group_result in results[:]:
384 # if the thread hasn't finished yet, continue
385 if not group_result.ready():
388 res = group_result.get()
390 self.__process_results(res)
392 # remove result from results list once we're done with it
393 results.remove(group_result)
395 # run non_parallel tests. they are run one by one, synchronously
396 for test_group in self.non_parallel_test_groups:
397 group_result = run_test_group(
398 self.__get_cmdline(test_group), test_group)
400 self.__process_results(group_result)
403 cur_time = time.time()
404 total_time = int(cur_time - self.start)
408 print("Total run time: %02dm %02ds" % (total_time / 60,
411 print("Number of failed tests: %s" % str(self.fails))
413 # write summary to logfile
414 self.logfile.write("Summary\n")
415 self.logfile.write("Target: ".ljust(15) + "%s\n" % self.target)
416 self.logfile.write("Tests: ".ljust(15) + "%i\n" % self.n_tests)
417 self.logfile.write("Failed tests: ".ljust(
418 15) + "%i\n" % self.fails)
420 print("Exception occurred")
421 print(sys.exc_info())
424 # drop logs from all executions to a logfile
425 for buf in self.log_buffers:
426 self.logfile.write(buf.replace("\r", ""))