5 # Copyright(c) 2010-2014 Intel Corporation. All rights reserved.
8 # Redistribution and use in source and binary forms, with or without
9 # modification, are permitted provided that the following conditions
12 # * Redistributions of source code must retain the above copyright
13 # notice, this list of conditions and the following disclaimer.
14 # * Redistributions in binary form must reproduce the above copyright
15 # notice, this list of conditions and the following disclaimer in
16 # the documentation and/or other materials provided with the
18 # * Neither the name of Intel Corporation nor the names of its
19 # contributors may be used to endorse or promote products derived
20 # from this software without specific prior written permission.
22 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
23 # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
24 # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
25 # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
26 # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
27 # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
28 # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
29 # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
30 # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
31 # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
32 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
34 # The main logic behind running autotests in parallel
36 import multiprocessing, sys, pexpect, time, os, StringIO, csv
39 def wait_prompt(child):
42 result = child.expect(["RTE>>", pexpect.TIMEOUT, pexpect.EOF],
52 # each result tuple in results list consists of:
53 # result value (0 or -1)
56 # total test run time (double)
58 # test report (if not available, should be None)
60 # this function needs to be outside AutotestRunner class
61 # because otherwise Pool won't work (or rather it will require
62 # quite a bit of effort to make it work).
63 def run_test_group(cmdline, test_group):
66 start_time = time.time()
71 # prepare logging of init
72 startuplog = StringIO.StringIO()
74 print >>startuplog, "\n%s %s\n" % ("="*20, test_group["Prefix"])
76 child = pexpect.spawn(cmdline, logfile=startuplog)
78 # wait for target to boot
79 if not wait_prompt(child):
82 results.append((-1, "Fail [No prompt]", "Start %s" % test_group["Prefix"],
83 time.time() - start_time, startuplog.getvalue(), None))
85 # mark all tests as failed
86 for test in test_group["Tests"]:
87 results.append((-1, "Fail [No prompt]", test["Name"],
88 time.time() - start_time, "", None))
93 results.append((-1, "Fail [Can't run]", "Start %s" % test_group["Prefix"],
94 time.time() - start_time, startuplog.getvalue(), None))
96 # mark all tests as failed
97 for t in test_group["Tests"]:
98 results.append((-1, "Fail [Can't run]", t["Name"],
99 time.time() - start_time, "", None))
103 # startup was successful
104 results.append((0, "Success", "Start %s" % test_group["Prefix"],
105 time.time() - start_time, startuplog.getvalue(), None))
107 # run all tests in test group
108 for test in test_group["Tests"]:
110 # create log buffer for each test
111 # in multiprocessing environment, the logging would be
112 # interleaved and will create a mess, hence the buffering
113 logfile = StringIO.StringIO()
114 child.logfile = logfile
118 # make a note when the test started
119 start_time = time.time()
122 # print test name to log buffer
123 print >>logfile, "\n%s %s\n" % ("-"*20, test["Name"])
125 # run test function associated with the test
126 result = test["Func"](child, test["Command"])
128 # make a note when the test was finished
129 end_time = time.time()
131 # append test data to the result tuple
132 result += (test["Name"], end_time - start_time,
135 # call report function, if any defined, and supply it with
136 # target and complete log for test run
138 report = test["Report"](self.target, log)
140 # append report to results tuple
146 # make a note when the test crashed
147 end_time = time.time()
149 # mark test as failed
150 result = (-1, "Fail [Crash]", test["Name"],
151 end_time - start_time, logfile.getvalue(), None)
153 # append the results to the results list
154 results.append(result)
156 # regardless of whether test has crashed, try quitting it
158 child.sendline("quit")
160 # if the test crashed, just do nothing instead
165 # return test results
172 # class representing an instance of autotests run
173 class AutotestRunner:
175 parallel_test_groups = []
176 non_parallel_test_groups = []
188 def __init__(self, cmdline, target, blacklist, whitelist):
189 self.cmdline = cmdline
191 self.blacklist = blacklist
192 self.whitelist = whitelist
195 logfile = "%s.log" % target
196 csvfile = "%s.csv" % target
198 self.logfile = open(logfile, "w")
199 csvfile = open(csvfile, "w")
200 self.csvwriter = csv.writer(csvfile)
202 # prepare results table
203 self.csvwriter.writerow(["test_name","test_result","result_str"])
207 # set up cmdline string
208 def __get_cmdline(self, test):
209 cmdline = self.cmdline
211 # append memory limitations for each test
212 # otherwise tests won't run in parallel
213 if not "i686" in self.target:
214 cmdline += " --socket-mem=%s"% test["Memory"]
216 # affinitize startup so that tests don't fail on i686
217 cmdline = "taskset 1 " + cmdline
218 cmdline += " -m " + str(sum(map(int,test["Memory"].split(","))))
220 # set group prefix for autotest group
221 # otherwise they won't run in parallel
222 cmdline += " --file-prefix=%s"% test["Prefix"]
228 def add_parallel_test_group(self,test_group):
229 self.parallel_test_groups.append(test_group)
231 def add_non_parallel_test_group(self,test_group):
232 self.non_parallel_test_groups.append(test_group)
235 def __process_results(self, results):
236 # this iterates over individual test results
237 for i, result in enumerate(results):
239 # increase total number of tests that were run
240 # do not include "start" test
244 # unpack result tuple
245 test_result, result_str, test_name, \
246 test_time, log, report = result
249 cur_time = time.time()
250 total_time = int(cur_time - self.start)
252 # print results, test run time and total time since start
253 print ("%s:" % test_name).ljust(30),
254 print result_str.ljust(29),
255 print "[%02dm %02ds]" % (test_time / 60, test_time % 60),
257 # don't print out total time every line, it's the same anyway
258 if i == len(results) - 1:
259 print "[%02dm %02ds]" % (total_time / 60, total_time % 60)
263 # if test failed and it wasn't a "start" test
264 if test_result < 0 and not i == 0:
268 self.log_buffers.append(log)
270 # create report if it exists
273 f = open("%s_%s_report.rst" % (self.target,test_name), "w")
275 print "Report for %s could not be created!" % test_name
280 # write test result to CSV file
282 self.csvwriter.writerow([test_name, test_result, result_str])
287 # this function iterates over test groups and removes each
288 # test that is not in whitelist/blacklist
289 def __filter_groups(self, test_groups):
290 groups_to_remove = []
292 # filter out tests from parallel test groups
293 for i, test_group in enumerate(test_groups):
295 # iterate over a copy so that we could safely delete individual tests
296 for test in test_group["Tests"][:]:
297 test_id = test["Command"]
299 # dump tests are specified in full e.g. "Dump_mempool"
300 if "_autotest" in test_id:
301 test_id = test_id[:-len("_autotest")]
303 # filter out blacklisted/whitelisted tests
304 if self.blacklist and test_id in self.blacklist:
305 test_group["Tests"].remove(test)
307 if self.whitelist and test_id not in self.whitelist:
308 test_group["Tests"].remove(test)
311 # modify or remove original group
312 if len(test_group["Tests"]) > 0:
313 test_groups[i] = test_group
315 # remember which groups should be deleted
316 # put the numbers backwards so that we start
317 # deleting from the end, not from the beginning
318 groups_to_remove.insert(0, i)
320 # remove test groups that need to be removed
321 for i in groups_to_remove:
328 # iterate over test groups and run tests associated with them
329 def run_all_tests(self):
331 self.parallel_test_groups = \
332 self.__filter_groups(self.parallel_test_groups)
333 self.non_parallel_test_groups = \
334 self.__filter_groups(self.non_parallel_test_groups)
336 # create a pool of worker threads
337 pool = multiprocessing.Pool(processes=1)
341 # whatever happens, try to save as much logs as possible
344 # create table header
346 print "Test name".ljust(30),
347 print "Test result".ljust(29),
348 print "Test".center(9),
349 print "Total".center(9)
352 # make a note of tests start time
353 self.start = time.time()
355 # assign worker threads to run test groups
356 for test_group in self.parallel_test_groups:
357 result = pool.apply_async(run_test_group,
358 [self.__get_cmdline(test_group), test_group])
359 results.append(result)
361 # iterate while we have group execution results to get
362 while len(results) > 0:
364 # iterate over a copy to be able to safely delete results
365 # this iterates over a list of group results
366 for group_result in results[:]:
368 # if the thread hasn't finished yet, continue
369 if not group_result.ready():
372 res = group_result.get()
374 self.__process_results(res)
376 # remove result from results list once we're done with it
377 results.remove(group_result)
379 # run non_parallel tests. they are run one by one, synchronously
380 for test_group in self.non_parallel_test_groups:
381 group_result = run_test_group(self.__get_cmdline(test_group), test_group)
383 self.__process_results(group_result)
386 cur_time = time.time()
387 total_time = int(cur_time - self.start)
391 print "Total run time: %02dm %02ds" % (total_time / 60, total_time % 60)
393 print "Number of failed tests: %s" % str(self.fails)
395 # write summary to logfile
396 self.logfile.write("Summary\n")
397 self.logfile.write("Target: ".ljust(15) + "%s\n" % self.target)
398 self.logfile.write("Tests: ".ljust(15) + "%i\n" % self.n_tests)
399 self.logfile.write("Failed tests: ".ljust(15) + "%i\n" % self.fails)
401 print "Exception occured"
404 # drop logs from all executions to a logfile
405 for buf in self.log_buffers:
406 self.logfile.write(buf.replace("\r",""))