2 # SPDX-License-Identifier: BSD-3-Clause
3 # Copyright(c) 2016 Intel Corporation
6 # This script maps the set of pipelines identified (MASTER pipelines are
7 # ignored) from the input configuration file to the set of cores
8 # provided as input argument and creates configuration files for each of
9 # the mapping combinations.
12 from __future__ import print_function
13 from collections import namedtuple
23 enable_stage0_traceout = 1
24 enable_stage1_traceout = 1
25 enable_stage2_traceout = 1
27 enable_stage1_fileout = 1
28 enable_stage2_fileout = 1
30 Constants = namedtuple('Constants', ['MAX_CORES', 'MAX_PIPELINES'])
31 constants = Constants(16, 64)
33 # pattern for physical core
34 pattern_phycore = '^(s|S)\d(c|C)[1-9][0-9]*$'
35 reg_phycore = re.compile(pattern_phycore)
39 return bin(mask).count("1")
47 sys.exit('error: len2mask - length %i > 64. exiting' % length)
49 return int('1' * length, 2)
52 def bitstring_write(n, n_bits):
90 self.bitpos = array.array(
91 "L", itertools.repeat(0, constants.MAX_PIPELINES))
97 self.cores = [Cores0() for i in range(0, constants.MAX_CORES)]
100 self.n_pipelines0 = 0
102 self.file_comment = ""
106 def stage0_print(self):
107 print('printing Context0 obj')
108 print('c0.cores(n_pipelines) = [ ', end='')
109 for cores_count in range(0, constants.MAX_CORES):
110 print(self.cores[cores_count].n_pipelines, end=' ')
112 print('c0.n_cores = %d' % self.n_cores)
113 print('c0.n_pipelines = %d' % self.n_pipelines)
114 print('c0.n_pipelines0 = %d' % self.n_pipelines0)
115 print('c0.pos = %d' % self.pos)
116 print('c0.file_comment = %s' % self.file_comment)
117 if (self.ctx1 is not None):
118 print('c0.ctx1 = ', end='')
119 print(repr(self.ctx1))
121 print('c0.ctx1 = None')
123 if (self.ctx2 is not None):
124 print('c0.ctx2 = ', end='')
125 print(repr(self.ctx2))
127 print('c0.ctx2 = None')
129 def stage0_init(self, num_cores, num_pipelines, ctx1, ctx2):
130 self.n_cores = num_cores
131 self.n_pipelines = num_pipelines
135 def stage0_process(self):
137 self.cores[0].n_pipelines = self.n_pipelines
138 self.n_pipelines0 = 0
144 if ((self.pos < self.n_cores) and (self.n_pipelines0 > 0)):
145 self.cores[self.pos].n_pipelines = min(
146 self.cores[self.pos - 1].n_pipelines,
148 self.n_pipelines0 -= self.cores[self.pos].n_pipelines
154 if (self.n_pipelines0 == 0):
156 self.ctx1.stage1_init(self, self.ctx2) # self is object c0
157 self.ctx1.stage1_process()
165 if ((self.cores[self.pos].n_pipelines > 1) and
166 (self.pos != (self.n_cores - 1))):
169 self.n_pipelines0 += self.cores[self.pos].n_pipelines
170 self.cores[self.pos].n_pipelines = 0
173 self.cores[self.pos].n_pipelines -= 1
174 self.n_pipelines0 += 1
177 def stage0_log(self):
178 tmp_file_comment = ""
179 if(enable_stage0_traceout != 1):
182 print('STAGE0: ', end='')
183 tmp_file_comment += 'STAGE0: '
184 for cores_count in range(0, self.n_cores):
187 self.cores[cores_count].n_pipelines), end='')
188 tmp_file_comment += "C{} = {}\t".format(
189 cores_count, self.cores[cores_count].n_pipelines)
192 self.ctx1.stage0_file_comment = tmp_file_comment
193 self.ctx2.stage0_file_comment = tmp_file_comment
200 self.cores = [Cores1() for i in range(constants.MAX_CORES)]
204 self.stage0_file_comment = ""
205 self.stage1_file_comment = ""
208 self.arr_pipelines2cores = []
210 def stage1_reset(self):
211 for i in range(constants.MAX_CORES):
212 self.cores[i].pipelines = 0
213 self.cores[i].n_pipelines = 0
220 del self.arr_pipelines2cores[:]
222 def stage1_print(self):
223 print('printing Context1 obj')
224 print('ctx1.cores(pipelines,n_pipelines) = [ ', end='')
225 for cores_count in range(0, constants.MAX_CORES):
226 print('(%d,%d)' % (self.cores[cores_count].pipelines,
227 self.cores[cores_count].n_pipelines), end=' ')
229 print('ctx1.n_cores = %d' % self.n_cores)
230 print('ctx1.n_pipelines = %d' % self.n_pipelines)
231 print('ctx1.pos = %d' % self.pos)
232 print('ctx1.stage0_file_comment = %s' % self.stage0_file_comment)
233 print('ctx1.stage1_file_comment = %s' % self.stage1_file_comment)
234 if (self.ctx2 is not None):
235 print('ctx1.ctx2 = ', end='')
238 print('ctx1.ctx2 = None')
240 def stage1_init(self, c0, ctx2):
243 while (c0.cores[self.n_cores].n_pipelines > 0):
246 self.n_pipelines = c0.n_pipelines
249 self.arr_pipelines2cores = [0] * self.n_pipelines
252 while (i < self.n_cores):
253 self.cores[i].n_pipelines = c0.cores[i].n_pipelines
256 def stage1_process(self):
257 pipelines_max = len2mask(self.n_pipelines)
262 if (self.cores[self.pos].pipelines == pipelines_max):
266 self.cores[self.pos].pipelines = 0
270 self.cores[self.pos].pipelines += 1
271 if (popcount(self.cores[self.pos].pipelines) !=
272 self.cores[self.pos].n_pipelines):
277 while (pos < self.pos):
278 if ((self.cores[self.pos].pipelines) &
279 (self.cores[pos].pipelines)):
287 if ((self.pos > 0) and
288 ((self.cores[self.pos].n_pipelines) ==
289 (self.cores[self.pos - 1].n_pipelines)) and
290 ((self.cores[self.pos].pipelines) <
291 (self.cores[self.pos - 1].pipelines))):
294 if (self.pos == self.n_cores - 1):
296 self.ctx2.stage2_init(self)
297 self.ctx2.stage2_process()
302 self.cores[self.pos].pipelines = 0
308 def stage1_log(self):
309 tmp_file_comment = ""
310 if(enable_stage1_traceout == 1):
311 print('STAGE1: ', end='')
312 tmp_file_comment += 'STAGE1: '
314 while (i < self.n_cores):
315 print('C%d = [' % i, end='')
316 tmp_file_comment += "C{} = [".format(i)
318 j = self.n_pipelines - 1
320 cond = ((self.cores[i].pipelines) & (1 << j))
323 tmp_file_comment += '1'
326 tmp_file_comment += '0'
330 tmp_file_comment += ']\t'
334 self.stage1_file_comment = tmp_file_comment
335 self.ctx2.stage1_file_comment = tmp_file_comment
337 # check if file traceing is enabled
338 if(enable_stage1_fileout != 1):
341 # spit out the combination to file
342 self.stage1_process_file()
344 def stage1_updateCoresInBuf(self, nPipeline, sCore):
345 rePipeline = self._fileTrace.arr_pipelines[nPipeline]
346 rePipeline = rePipeline.replace("[", "\[").replace("]", "\]")
347 reCore = 'core\s*=\s*((\d*)|(((s|S)\d)?(c|C)[1-9][0-9]*)).*\n'
348 sSubs = 'core = ' + sCore + '\n'
350 reg_pipeline = re.compile(rePipeline)
351 search_match = reg_pipeline.search(self._fileTrace.in_buf)
354 pos = search_match.start()
355 substr1 = self._fileTrace.in_buf[:pos]
356 substr2 = self._fileTrace.in_buf[pos:]
357 substr2 = re.sub(reCore, sSubs, substr2, 1)
358 self._fileTrace.in_buf = substr1 + substr2
360 def stage1_process_file(self):
361 outFileName = os.path.join(self._fileTrace.out_path,
362 self._fileTrace.prefix_outfile)
363 outFileName += "_{}CoReS".format(self.n_cores)
365 i = 0 # represents core number
366 while (i < self.n_cores):
367 j = self.n_pipelines - 1
370 cond = ((self.cores[i].pipelines) & (1 << j))
372 # update the pipelines array to match the core
373 # only in case of cond match
374 self.arr_pipelines2cores[
375 pipeline_idx] = fileTrace.in_physical_cores[i]
382 # update the in_buf as per the arr_pipelines2cores
383 for pipeline_idx in range(len(self.arr_pipelines2cores)):
384 outFileName += "_{}".format(self.arr_pipelines2cores[pipeline_idx])
385 self.stage1_updateCoresInBuf(
386 pipeline_idx, self.arr_pipelines2cores[pipeline_idx])
388 # by now the in_buf is all set to be written to file
389 outFileName += self._fileTrace.suffix_outfile
390 outputFile = open(outFileName, "w")
392 # write out the comments
393 strTruncated = ("", "(Truncated)")[self._fileTrace.ncores_truncated]
395 "; =============== Pipeline-to-Core Mapping ================\n"
396 "; Generated from file {}\n"
397 "; Input pipelines = {}\n"
398 "; Input cores = {}\n"
399 "; N_PIPELINES = {} N_CORES = {} {} hyper_thread = {}\n"
401 self._fileTrace.in_file_namepath,
402 fileTrace.arr_pipelines,
403 fileTrace.in_physical_cores,
404 self._fileTrace.n_pipelines,
405 self._fileTrace.n_cores,
407 self._fileTrace.hyper_thread))
412 "; ========================================================\n"
415 stg0cmt=self.stage0_file_comment,
416 stg1cmt=self.stage1_file_comment))
418 # write buffer contents
419 outputFile.write(self._fileTrace.in_buf)
428 self.cores = [Cores2() for i in range(constants.MAX_CORES)]
432 self.stage0_file_comment = ""
433 self.stage1_file_comment = ""
434 self.stage2_file_comment = ""
436 # each array entry is a pipeline mapped to core stored as string
437 # pipeline ranging from 1 to n, however stored in zero based array
438 self.arr2_pipelines2cores = []
440 def stage2_print(self):
441 print('printing Context2 obj')
442 print('ctx2.cores(pipelines, n_pipelines, counter, counter_max) =')
443 for cores_count in range(0, constants.MAX_CORES):
444 print('core[%d] = (%d,%d,%d,%d)' % (
446 self.cores[cores_count].pipelines,
447 self.cores[cores_count].n_pipelines,
448 self.cores[cores_count].counter,
449 self.cores[cores_count].counter_max))
451 print('ctx2.n_cores = %d' % self.n_cores, end='')
452 print('ctx2.n_pipelines = %d' % self.n_pipelines, end='')
453 print('ctx2.pos = %d' % self.pos)
454 print('ctx2.stage0_file_comment = %s' %
455 self.self.stage0_file_comment)
456 print('ctx2.stage1_file_comment = %s' %
457 self.self.stage1_file_comment)
458 print('ctx2.stage2_file_comment = %s' %
459 self.self.stage2_file_comment)
461 def stage2_reset(self):
462 for i in range(0, constants.MAX_CORES):
463 self.cores[i].pipelines = 0
464 self.cores[i].n_pipelines = 0
465 self.cores[i].counter = 0
466 self.cores[i].counter_max = 0
468 for idx in range(0, constants.MAX_PIPELINES):
469 self.cores[i].bitpos[idx] = 0
475 del self.arr2_pipelines2cores[:]
477 def bitpos_load(self, coreidx):
479 while (i < self.n_pipelines):
480 if ((self.cores[coreidx].pipelines) &
482 self.cores[coreidx].bitpos[j] = i
485 self.cores[coreidx].n_pipelines = j
487 def bitpos_apply(self, in_buf, pos, n_pos):
489 for i in range(0, n_pos):
490 out |= (in_buf & (1 << i)) << (pos[i] - i)
494 def stage2_init(self, ctx1):
496 self.n_cores = ctx1.n_cores
497 self.n_pipelines = ctx1.n_pipelines
499 self.arr2_pipelines2cores = [''] * self.n_pipelines
502 while (core_idx < self.n_cores):
503 self.cores[core_idx].pipelines = ctx1.cores[core_idx].pipelines
505 self.bitpos_load(core_idx)
508 def stage2_log(self):
509 tmp_file_comment = ""
510 if(enable_stage2_traceout == 1):
511 print('STAGE2: ', end='')
512 tmp_file_comment += 'STAGE2: '
514 for i in range(0, self.n_cores):
515 mask = len2mask(self.cores[i].n_pipelines)
516 pipelines_ht0 = self.bitpos_apply(
517 (~self.cores[i].counter) & mask,
518 self.cores[i].bitpos,
519 self.cores[i].n_pipelines)
521 pipelines_ht1 = self.bitpos_apply(
522 self.cores[i].counter,
523 self.cores[i].bitpos,
524 self.cores[i].n_pipelines)
526 print('C%dHT0 = [' % i, end='')
527 tmp_file_comment += "C{}HT0 = [".format(i)
528 tmp_file_comment += bitstring_write(
529 pipelines_ht0, self.n_pipelines)
531 print(']\tC%dHT1 = [' % i, end='')
532 tmp_file_comment += "]\tC{}HT1 = [".format(i)
533 tmp_file_comment += bitstring_write(
534 pipelines_ht1, self.n_pipelines)
536 tmp_file_comment += ']\t'
539 self.stage2_file_comment = tmp_file_comment
541 # check if file traceing is enabled
542 if(enable_stage2_fileout != 1):
544 # spit out the combination to file
545 self.stage2_process_file()
547 def stage2_updateCoresInBuf(self, nPipeline, sCore):
548 rePipeline = self._fileTrace.arr_pipelines[nPipeline]
549 rePipeline = rePipeline.replace("[", "\[").replace("]", "\]")
550 reCore = 'core\s*=\s*((\d*)|(((s|S)\d)?(c|C)[1-9][0-9]*)).*\n'
551 sSubs = 'core = ' + sCore + '\n'
553 reg_pipeline = re.compile(rePipeline)
554 search_match = reg_pipeline.search(self._fileTrace.in_buf)
557 pos = search_match.start()
558 substr1 = self._fileTrace.in_buf[:pos]
559 substr2 = self._fileTrace.in_buf[pos:]
560 substr2 = re.sub(reCore, sSubs, substr2, 1)
561 self._fileTrace.in_buf = substr1 + substr2
563 def pipelines2cores(self, n, n_bits, nCore, bHT):
570 cond = (n & (1 << i))
572 # update the pipelines array to match the core
573 # only in case of cond match
574 # PIPELINE0 and core 0 are reserved
576 tmpCore = fileTrace.in_physical_cores[nCore] + 'h'
577 self.arr2_pipelines2cores[pipeline_idx] = tmpCore
579 self.arr2_pipelines2cores[pipeline_idx] = \
580 fileTrace.in_physical_cores[nCore]
585 def stage2_process_file(self):
586 outFileName = os.path.join(self._fileTrace.out_path,
587 self._fileTrace.prefix_outfile)
588 outFileName += "_{}CoReS".format(self.n_cores)
590 for i in range(0, self.n_cores):
591 mask = len2mask(self.cores[i].n_pipelines)
592 pipelines_ht0 = self.bitpos_apply((~self.cores[i].counter) & mask,
593 self.cores[i].bitpos,
594 self.cores[i].n_pipelines)
596 pipelines_ht1 = self.bitpos_apply(self.cores[i].counter,
597 self.cores[i].bitpos,
598 self.cores[i].n_pipelines)
600 # update pipelines to core mapping
601 self.pipelines2cores(pipelines_ht0, self.n_pipelines, i, False)
602 self.pipelines2cores(pipelines_ht1, self.n_pipelines, i, True)
604 # update the in_buf as per the arr_pipelines2cores
605 for pipeline_idx in range(len(self.arr2_pipelines2cores)):
606 outFileName += "_{}".format(
607 self.arr2_pipelines2cores[pipeline_idx])
608 self.stage2_updateCoresInBuf(
609 pipeline_idx, self.arr2_pipelines2cores[pipeline_idx])
611 # by now the in_buf is all set to be written to file
612 outFileName += self._fileTrace.suffix_outfile
613 outputFile = open(outFileName, "w")
615 # write the file comments
616 strTruncated = ("", "(Truncated)")[self._fileTrace.ncores_truncated]
618 "; =============== Pipeline-to-Core Mapping ================\n"
619 "; Generated from file {}\n"
620 "; Input pipelines = {}\n"
621 "; Input cores = {}\n"
622 "; N_PIPELINES = {} N_CORES = {} {} hyper_thread = {} \n"
624 self._fileTrace.in_file_namepath,
625 fileTrace.arr_pipelines,
626 fileTrace.in_physical_cores,
627 self._fileTrace.n_pipelines,
628 self._fileTrace.n_cores,
630 self._fileTrace.hyper_thread))
636 "; ========================================================\n"
639 stg0cmt=self.stage0_file_comment,
640 stg1cmt=self.stage1_file_comment,
641 stg2cmt=self.stage2_file_comment))
643 # write the buffer contents
644 outputFile.write(self._fileTrace.in_buf)
648 def stage2_process(self):
650 while(i < self.n_cores):
651 self.cores[i].counter_max = len2mask(
652 self.cores[i].n_pipelines - 1)
655 self.pos = self.n_cores - 1
657 if (self.pos == self.n_cores - 1):
660 if (self.cores[self.pos].counter ==
661 self.cores[self.pos].counter_max):
665 self.cores[self.pos].counter = 0
669 self.cores[self.pos].counter += 1
670 if(self.pos < self.n_cores - 1):
676 def __init__(self, filenamepath):
677 self.in_file_namepath = os.path.abspath(filenamepath)
678 self.in_filename = os.path.basename(self.in_file_namepath)
679 self.in_path = os.path.dirname(self.in_file_namepath)
681 filenamesplit = self.in_filename.split('.')
682 self.prefix_outfile = filenamesplit[0]
683 self.suffix_outfile = ".cfg"
685 # output folder: in the same folder as input file
686 # create new folder in the name of input file
687 self.out_path = os.path.join(
688 os.path.abspath(os.path.dirname(__file__)),
692 os.makedirs(self.out_path)
693 except OSError as excep:
694 if excep.errno == errno.EEXIST and os.path.isdir(self.out_path):
700 self.arr_pipelines = [] # holds the positions of search
703 self.max_pipelines = 15
705 self.in_physical_cores = None
706 self.hyper_thread = None
708 # save the num of pipelines determined from input file
710 # save the num of cores input (or the truncated value)
712 self.ncores_truncated = False
714 def print_TraceFile(self):
715 print("self.in_file_namepath = ", self.in_file_namepath)
716 print("self.in_filename = ", self.in_filename)
717 print("self.in_path = ", self.in_path)
718 print("self.out_path = ", self.out_path)
719 print("self.prefix_outfile = ", self.prefix_outfile)
720 print("self.suffix_outfile = ", self.suffix_outfile)
721 print("self.in_buf = ", self.in_buf)
722 print("self.arr_pipelines =", self.arr_pipelines)
723 print("self.in_physical_cores", self.in_physical_cores)
724 print("self.hyper_thread", self.hyper_thread)
727 def process(n_cores, n_pipelines, fileTrace):
728 '''process and map pipelines, cores.'''
730 sys.exit('N_CORES is 0, exiting')
732 if (n_pipelines == 0):
733 sys.exit('N_PIPELINES is 0, exiting')
735 if (n_cores > n_pipelines):
736 print('\nToo many cores, truncating N_CORES to N_PIPELINES')
737 n_cores = n_pipelines
738 fileTrace.ncores_truncated = True
740 fileTrace.n_pipelines = n_pipelines
741 fileTrace.n_cores = n_cores
743 strTruncated = ("", "(Truncated)")[fileTrace.ncores_truncated]
744 print("N_PIPELINES = {}, N_CORES = {} {}"
745 .format(n_pipelines, n_cores, strTruncated))
746 print("---------------------------------------------------------------")
748 ctx0_inst = Context0()
749 ctx1_inst = Context1()
750 ctx2_inst = Context2()
752 # initialize the class variables
753 ctx1_inst._fileTrace = fileTrace
754 ctx2_inst._fileTrace = fileTrace
756 ctx0_inst.stage0_init(n_cores, n_pipelines, ctx1_inst, ctx2_inst)
757 ctx0_inst.stage0_process()
760 def validate_core(core):
761 match = reg_phycore.match(core)
768 def validate_phycores(phy_cores):
769 '''validate physical cores, check if unique.'''
771 phy_cores = phy_cores.strip().split(',')
773 # check if the core list is unique
774 if(len(phy_cores) != len(set(phy_cores))):
775 print('list of physical cores has duplicates')
778 for core in phy_cores:
779 if not validate_core(core):
780 print('invalid physical core specified.')
785 def scanconfigfile(fileTrace):
786 '''scan input file for pipelines, validate then process.'''
788 filetoscan = open(fileTrace.in_file_namepath, 'r')
789 fileTrace.in_buf = filetoscan.read()
791 # reset iterator on open file
794 # scan input file for pipelines
795 # master pipelines to be ignored
796 pattern_pipeline = r'\[PIPELINE\d*\]'
797 pattern_mastertype = r'type\s*=\s*MASTER'
799 pending_pipeline = False
800 for line in filetoscan:
801 match_pipeline = re.search(pattern_pipeline, line)
802 match_type = re.search('type\s*=', line)
803 match_mastertype = re.search(pattern_mastertype, line)
806 sPipeline = line[match_pipeline.start():match_pipeline.end()]
807 pending_pipeline = True
809 # found a type definition...
810 if(match_mastertype is None):
811 # and this is not a master pipeline...
812 if(pending_pipeline):
813 # add it to the list of pipelines to be mapped
814 fileTrace.arr_pipelines.append(sPipeline)
815 pending_pipeline = False
817 # and this is a master pipeline...
818 # ignore the current and move on to next
820 pending_pipeline = False
823 # validate if pipelines are unique
824 if(len(fileTrace.arr_pipelines) != len(set(fileTrace.arr_pipelines))):
825 sys.exit('Error: duplicate pipelines in input file')
827 num_pipelines = len(fileTrace.arr_pipelines)
828 num_cores = len(fileTrace.in_physical_cores)
830 print("-------------------Pipeline-to-core mapping--------------------")
831 print("Input pipelines = {}\nInput cores = {}"
832 .format(fileTrace.arr_pipelines, fileTrace.in_physical_cores))
834 # input configuration file validations goes here
835 if (num_cores > fileTrace.max_cores):
836 sys.exit('Error: number of cores specified > max_cores (%d)' %
839 if (num_pipelines > fileTrace.max_pipelines):
840 sys.exit('Error: number of pipelines in input \
841 cfg file > max_pipelines (%d)' % fileTrace.max_pipelines)
843 # call process to generate pipeline-to-core mapping, trace and log
844 process(num_cores, num_pipelines, fileTrace)
847 if __name__ == "__main__":
848 parser = argparse.ArgumentParser(description='mappipelines')
850 reqNamedGrp = parser.add_argument_group('required named args')
851 reqNamedGrp.add_argument(
854 type=argparse.FileType('r'),
855 help='Input config file',
858 reqNamedGrp.add_argument(
861 type=validate_phycores,
862 help='''Enter available CPU cores in
863 format:\"<core>,<core>,...\"
864 where each core format: \"s<SOCKETID>c<COREID>\"
865 where SOCKETID={0..9}, COREID={1-99}''',
868 # add optional arguments
872 help='enable/disable hyper threading. default is ON',
874 choices=['ON', 'OFF'])
879 help='''disable output config file generation.
880 Output file generation is enabled by default''',
883 args = parser.parse_args()
885 if(args.physical_cores is None):
886 parser.error("invalid physical_cores specified")
888 # create object of FileTrace and initialise
889 fileTrace = FileTrace(args.input_file.name)
890 fileTrace.in_physical_cores = args.physical_cores
891 fileTrace.hyper_thread = args.hyper_thread
893 if(fileTrace.hyper_thread == 'OFF'):
894 print("!!!!disabling stage2 HT!!!!")
895 enable_stage2_traceout = 0
896 enable_stage2_fileout = 0
897 elif(fileTrace.hyper_thread == 'ON'):
898 print("!!!!HT enabled. disabling stage1 file generation.!!!!")
899 enable_stage1_fileout = 0
901 if(args.no_output_file is True):
902 print("!!!!disabling stage1 and stage2 fileout!!!!")
903 enable_stage1_fileout = 0
904 enable_stage2_fileout = 0
906 scanconfigfile(fileTrace)