diff --git a/CHANGELOG.md b/CHANGELOG.md
index 254edbc..aba777d 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -3,6 +3,10 @@
This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
+## [0.17.0] - 2022-10-25
+- Improve data propagation reports to capture multiple signature updates per coverpoint
+- Add a CLI flag to explicitly log the redundant coverpoints while normalizing the CGF files
+
## [0.16.1] - 2022-10-20
- Fix length of commitval to 32 bits if flen is 32 for f registers in sail parser.
diff --git a/docs/source/cgf.rst b/docs/source/cgf.rst
index 25a4814..e49c538 100644
--- a/docs/source/cgf.rst
+++ b/docs/source/cgf.rst
@@ -334,7 +334,11 @@ A covergroup contains the following nodes:
* **csrcomb-str**
This string is interpreted as a valid python statement/expression which evaluates to a Boolean value. The variables available for use in the expression are as follows:
- * ``csr_name`` : The value (as of the end of previous instruction) in the CSR whose name is specified by csr_name.
+ * ``csr_name`` : The value (as of the end of current instruction) in the CSR whose name is specified by csr_name.
+
+ * ``old("csr_name")`` : The value (as of the end of previous instruction) in the CSR whose name is specified by csr_name.
+
+ * ``write("csr_name")`` : The value being written to the CSR in the current instruction whose name is specified by csr_name.
* ``xlen`` : The length of the regsiters in the machine.
@@ -367,6 +371,12 @@ A covergroup contains the following nodes:
mstatus && (0x8) == 0x8
+ 4. A coverpoint which checks whether the *M* bit of the value being written to *misa* register is unset and the final value that the register assumes has that bit still set.
+
+ .. code-block:: python
+
+ (write("misa") >> 12) & 1 == 0 and misa & 0x1000 == 0x1000
+
* **cross_comb**
*This node is optional.*
diff --git a/docs/source/dpr.rst b/docs/source/dpr.rst
new file mode 100644
index 0000000..522d0a4
--- /dev/null
+++ b/docs/source/dpr.rst
@@ -0,0 +1,12 @@
+***********************
+Data Propagation Report
+***********************
+
+The data propagation details quality analysis on the data propagation occurring within the test/application. It reports
+the following statistics about coverpoint hits and related signature updates:
+
+* **STAT1** : Number of instructions that hit unique coverpoints and update the signature
+* **STAT2** : Number of instructions that hit covepoints which are not unique but still update the signature (completely or partially)
+* **STAT3** : Number of instructions that hit a unique coverpoint but do not update the signature completely
+* **STAT4** : Number of multiple signature updates for the same coverpoint
+* **STAT5** : Number of times the signature was overwritten
diff --git a/riscv_isac/InstructionObject.py b/riscv_isac/InstructionObject.py
index 2490257..7dd585c 100644
--- a/riscv_isac/InstructionObject.py
+++ b/riscv_isac/InstructionObject.py
@@ -1,6 +1,17 @@
import struct
-
+instrs_sig_mutable = ['auipc','jal','jalr']
+instrs_sig_update = ['sh','sb','sw','sd','c.sw','c.sd','c.swsp','c.sdsp','fsw','fsd',\
+ 'c.fsw','c.fsd','c.fswsp','c.fsdsp']
+instrs_no_reg_tracking = ['beq','bne','blt','bge','bltu','bgeu','fence','c.j','c.jal','c.jalr',\
+ 'c.jr','c.beqz','c.bnez', 'c.ebreak'] + instrs_sig_update
+instrs_fcsr_affected = ['fmadd.s','fmsub.s','fnmsub.s','fnmadd.s','fadd.s','fsub.s','fmul.s','fdiv.s',\
+ 'fsqrt.s','fmin.s','fmax.s','fcvt.w.s','fcvt.wu.s','feq.s','flt.s',\
+ 'fle.s','fcvt.s.w','fcvt.s.wu','fcvt.l.s','fcvt.lu.s','fcvt.s.l',\
+ 'fcvt.s.lu', 'fmadd.d','fmsub.d','fnmsub.d','fnmadd.d','fadd.d','fsub.d',\
+ 'fmul.d','fdiv.d','fsqrt.d','fmin.d','fmax.d','fcvt.s.d','fcvt.d.s',\
+ 'feq.d','flt.d','fle.d','fcvt.w.d','fcvt.wu.d','fcvt.l.d','fcvt.lu.d',\
+ 'fcvt.d.l','fcvt.d.lu']
unsgn_rs1 = ['sw','sd','sh','sb','ld','lw','lwu','lh','lhu','lb', 'lbu','flw','fld','fsw','fsd',\
'bgeu', 'bltu', 'sltiu', 'sltu','c.lw','c.ld','c.lwsp','c.ldsp',\
'c.sw','c.sd','c.swsp','c.sdsp','mulhu','divu','remu','divuw',\
@@ -112,6 +123,10 @@ def __init__(
self.rd_nregs = 1
+ def is_sig_update(self):
+ return self.instr_name in instrs_sig_update
+
+
def evaluate_instr_vars(self, xlen, flen, arch_state, csr_regfile, instr_vars):
'''
This function populates the provided instr_vars dictionary
@@ -179,6 +194,84 @@ def evaluate_instr_vars(self, xlen, flen, arch_state, csr_regfile, instr_vars):
instr_vars.update(ext_specific_vars)
+ def get_elements_to_track(self, xlen):
+ '''
+ This function returns the elements to track to aid in monitoring signature updates and related statistics.
+ The returned value is a tuple of three elements:
+
+ - The first element is a list of registers to track whose values cannot be modified before storing
+ - The second element is a list of registers to track whose value can be modified prior to storing
+ - The third element is a list of instructions to track for signature updates other than those of tracked registers (mostly used for branch instructions)
+ '''
+ regs_to_track_immutable = []
+ regs_to_track_mutable = []
+ instrs_to_track = []
+
+ if self.instr_name in instrs_no_reg_tracking:
+ store_instrs = []
+ if self.is_sig_update():
+ store_instrs = [self.instr_name]
+ else:
+ if self.instr_name.startswith("c."):
+ store_instrs = ['sd','c.sdsp'] if xlen == 64 else ['sw','c.swsp']
+ else:
+ store_instrs = ['sd'] if xlen == 64 else ['sw']
+ instrs_to_track.append(store_instrs)
+ elif self.instr_name in instrs_sig_mutable:
+ if self.rd is not None:
+ reg = self.rd[1] + str(self.rd[0])
+ regs_to_track_mutable.append(reg)
+ else:
+ if self.rd is not None:
+ reg = self.rd[1] + str(self.rd[0])
+ regs_to_track_immutable.append(reg)
+
+ if self.instr_name in instrs_fcsr_affected:
+ regs_to_track_immutable.append('fcsr')
+
+ if self.csr_commit is not None:
+ for commit in self.csr_commit:
+ if commit[0] == "CSR":
+ csr_reg = commit[1]
+ if csr_reg not in regs_to_track_immutable:
+ regs_to_track_immutable.append(csr_reg)
+
+ return (regs_to_track_immutable, regs_to_track_mutable, instrs_to_track)
+
+
+ def get_changed_regs(self, arch_state, csr_regfile):
+ '''
+ This function returns a list of registers whose value will be changed as
+ a result of executing this instruction.
+
+ :param csr_regfile: Architectural state of CSR register files
+ :param instr_vars: Dictionary to be populated by the evaluated instruction variables
+ '''
+ changed_regs = []
+
+ if self.reg_commit is not None:
+ reg = self.reg_commit[0] + self.reg_commit[1]
+
+ prev_value = None
+ if self.reg_commit[0] == 'x':
+ prev_value = arch_state.x_rf[int(self.reg_commit[1])]
+ elif self.reg_commit[0] == 'f':
+ prev_value = arch_state.f_rf[int(self.reg_commit[1])]
+
+ if prev_value != str(self.reg_commit[2][2:]): # this is a string check, but should we do an exact number check?
+ changed_regs.append(reg)
+
+ if self.csr_commit is not None:
+ for commit in self.csr_commit:
+ if commit[0] == "CSR":
+ csr_reg = commit[1]
+
+ if csr_regfile[csr_reg] != str(commit[2][2:]):
+ changed_regs.append(csr_reg)
+
+ return changed_regs
+
+
def update_arch_state(self, arch_state, csr_regfile):
'''
This function updates the arch state and csr regfiles
diff --git a/riscv_isac/__init__.py b/riscv_isac/__init__.py
index 04e7412..7fca03b 100644
--- a/riscv_isac/__init__.py
+++ b/riscv_isac/__init__.py
@@ -4,5 +4,5 @@
__author__ = """InCore Semiconductors Pvt Ltd"""
__email__ = 'info@incoresemi.com'
-__version__ = '0.16.1'
+__version__ = '0.17.0'
diff --git a/riscv_isac/cgf_normalize.py b/riscv_isac/cgf_normalize.py
index 0c98621..7202adb 100644
--- a/riscv_isac/cgf_normalize.py
+++ b/riscv_isac/cgf_normalize.py
@@ -549,7 +549,7 @@ def alternate(var, size, signed=True, fltr_func=None,scale_func=None):
#return [(coverpoint,"Alternate") for coverpoint in coverpoints]
-def expand_cgf(cgf_files, xlen,flen):
+def expand_cgf(cgf_files, xlen,flen, log_redundant=False):
'''
This function will replace all the abstract functions with their unrolled
coverpoints. It replaces node
@@ -616,6 +616,8 @@ def expand_cgf(cgf_files, xlen,flen):
+" in "+labels+": "+str(e) )
else:
for cp,comment in exp_cp:
+ if log_redundant and cp in cgf[labels][label]:
+ logger.warn(f'Redundant coverpoint during normalization: {cp}')
cgf[labels][label].insert(l+i,cp,coverage,comment=comment)
i += 1
return dict(cgf)
diff --git a/riscv_isac/constants.py b/riscv_isac/constants.py
index 9066074..7d12439 100644
--- a/riscv_isac/constants.py
+++ b/riscv_isac/constants.py
@@ -9,9 +9,9 @@
dpr_template = '''
# Data Propagation Report
-- **STAT1** : Number of instructions that hit unique coverpoints and update the signature.
-- **STAT2** : Number of instructions that hit covepoints which are not unique but still update the signature
-- **STAT3** : Number of instructions that hit a unique coverpoint but do not update signature
+- **STAT1** : Number of instructions that hit unique coverpoints and update the signature
+- **STAT2** : Number of instructions that hit covepoints which are not unique but still update the signature (completely or partially)
+- **STAT3** : Number of instructions that hit a unique coverpoint but do not update the signature completely
- **STAT4** : Number of multiple signature updates for the same coverpoint
- **STAT5** : Number of times the signature was overwritten
@@ -57,10 +57,15 @@
## Details of STAT1:
-- The first column indicates the signature address and the data at that location in hexadecimal in the following format:
+- The first column indicates the signature address(es) and the data at that location in hexadecimal in the following format:
```
- [Address]
- Data
+ [Address1]
+ Data1
+
+ [Address2]
+ Data2
+
+ ...
```
- The second column captures all the coverpoints which have been captured by that particular signature location
diff --git a/riscv_isac/coverage.py b/riscv_isac/coverage.py
index 4d7b3c6..30d0b3f 100644
--- a/riscv_isac/coverage.py
+++ b/riscv_isac/coverage.py
@@ -26,15 +26,84 @@
import multiprocessing as mp
from collections.abc import MutableMapping
+instrs_csr_mov = ['csrrw','csrrs','csrrc','csrrwi','csrrsi','csrrci']
+
+csr_reg_num_to_str = {
+ 3857: 'mvendorid',
+ 3858: 'marchid',
+ 3859: 'mimpid',
+ 3860: 'mhartid',
+ 768: 'mstatus',
+ 769: 'misa',
+ 770: 'medeleg',
+ 771: 'mideleg',
+ 772: 'mie',
+ 773: 'mtvec',
+ 774: 'mcounteren',
+ 832: 'mscratch',
+ 833: 'mepc',
+ 834: 'mcause',
+ 835: 'mtval',
+ 836: 'mip',
+ 928: 'pmpcfg0',
+ 929: 'pmpcfg1',
+ 930: 'pmpcfg2',
+ 931: 'pmpcfg3',
+ 2816: 'mcycle',
+ 2818: 'minstret',
+ 2944: 'mcycleh',
+ 2946: 'minstreth',
+ 800: 'mcountinhibit',
+ 1952: 'tselect',
+ 1953: 'tdata1',
+ 1954: 'tdata2',
+ 1955: 'tdata3',
+ 1968: 'dcsr',
+ 1969: 'dpc',
+ 1970: 'dscratch0',
+ 1971: 'dscratch1',
+ 256: 'sstatus',
+ 258:'sedeleg',
+ 259: 'sideleg',
+ 260: 'sie',
+ 261: 'stvec',
+ 262: 'scounteren',
+ 320: 'sscratch',
+ 321: 'sepc',
+ 322: 'scause',
+ 323: 'stval',
+ 324: 'sip',
+ 384: 'satp',
+ 9: 'vxsat',
+ 1: 'fflags',
+ 2: 'frm',
+ 3: 'fcsr',
+}
+
class cross():
BASE_REG_DICT = { 'x'+str(i) : 'x'+str(i) for i in range(32)}
- def __init__(self,label,coverpoint):
+ def __init__(self,label,coverpoint,xlen,flen,addr_pairs,sig_addrs,window_size):
self.label = label
self.coverpoint = coverpoint
+ self.xlen = xlen
+ self.flen = flen
+ self.addr_pairs = addr_pairs
+ self.sig_addrs = sig_addrs
+ self.window_size = window_size
+
+ self.arch_state = archState(xlen,flen)
+ self.csr_regfile = csr_registers(xlen)
+ self.stats = statistics(xlen, flen)
+
self.result = 0
+ self.queue = []
+
+ self.tracked_regs = set()
+ self.instr_addr_of_tracked_reg = {} # tracked_reg: instr_addr of instr which triggered its tracking
+ self.instr_stat_meta_at_addr = {} # start_instr_addr: [is_ucovpt, num_exp, num_obs, num_rem, covpts_hit, code_seq, store_addresses, store_vals]
## Extract relevant information from coverpt
self.data = self.coverpoint.split('::')
@@ -42,19 +111,68 @@ def __init__(self,label,coverpoint):
self.assign_lst = self.data[1].replace(' ', '')[1:-1].split(':')
self.cond_lst = self.data[2].lstrip().rstrip()[1:-1].split(':')
- def process(self, queue, window_size, addr_pairs):
+ if len(self.ops) > window_size:
+ logger.error(f'Size of opcode list greater than the window size in the cross_comb coverpoint: {coverpoint}')
+
+ def process(self, instr):
+ if len(self.ops) > self.window_size:
+ return
+ self.queue.append(instr)
+ if len(self.queue) >= len(self.ops):
+ self.compute_cross_cov()
+
+ def finish_up(self):
+ if len(self.ops) > self.window_size:
+ return
+ for i in range(len(self.queue)):
+ self.compute()
+
+ # update stats one last time for remaining elements
+ for key_instr_addr in list(self.instr_stat_meta_at_addr.keys()):
+ stat_meta = self.instr_stat_meta_at_addr[key_instr_addr]
+ if stat_meta[0]: # is_ucovpt
+ if stat_meta[2] == stat_meta[1]: # num_observed == num_expected
+ # update STAT1 with (store_addresses, store_vals, covpt, code_seq)
+ self.stats.stat1.append((stat_meta[6], stat_meta[7], stat_meta[4], stat_meta[5]))
+ elif stat_meta[2] < stat_meta[1]: # num_observed < num_expected
+ # update STAT3 with code sequence
+ self.stats.stat3.append('\n'.join(stat_meta[5]))
+ else: # not is_ucovpt
+ if stat_meta[2] > 0: # num_observed > 0
+ # update STAT2
+ _log = 'Op without unique coverpoint updates Signature\n'
+
+ _log += ' -- Code Sequence:\n'
+ for op in stat_meta[5]:
+ _log += ' ' + op + '\n'
+
+ _log += ' -- Signature Addresses:\n'
+ for store_address, store_val in zip(stat_meta[6], stat_meta[7]):
+ _log += ' Address: {0} Data: {1}\n'.format(
+ str(hex(store_address)), store_val)
+
+ _log += ' -- Redundant Coverpoints hit by the op\n'
+ for c in stat_meta[4]:
+ _log += ' - ' + str(c) + '\n'
+
+ logger.warn(_log)
+ self.stats.stat2.append(_log + '\n\n')
+
+ del self.instr_stat_meta_at_addr[key_instr_addr]
+
+ def compute_cross_cov(self):
'''
- Check whether the coverpoint is a hit or not and update the metric
+ Check whether the cross coverage coverpoint was hit or not and update the metric
+ Also perform tracking for generating the data propagation report
'''
- if(len(self.ops)>window_size or len(self.ops)>len(queue)):
- return
+ hit_covpt = False
+ regs_to_track = set()
for index in range(len(self.ops)):
-
- instr = queue[index]
+ instr = self.queue[index]
instr_name = instr.instr_name
- if addr_pairs:
- if not (any([instr.instr_addr >= saddr and instr.instr_addr < eaddr for saddr,eaddr in addr_pairs])):
+ if self.addr_pairs:
+ if not (any([instr.instr_addr >= saddr and instr.instr_addr < eaddr for saddr,eaddr in self.addr_pairs])):
break
rd = None
@@ -107,19 +225,157 @@ def process(self, queue, window_size, addr_pairs):
if (instr_name not in check_lst):
break
+ # get changes to track due to this instr
+ regs_to_track_immutable, regs_to_track_mutable, instrs_to_track = instr.get_elements_to_track(self.xlen)
+ regs_to_track.update(regs_to_track_immutable)
+
if self.cond_lst[index].find('?') == -1:
if(eval(self.cond_lst[index], locals(), cross.BASE_REG_DICT)):
if(index==len(self.ops)-1):
self.result = self.result + 1
+ hit_covpt = True
else:
break
if self.assign_lst[index].find('?') == -1:
exec(self.assign_lst[index], locals(), cross.BASE_REG_DICT)
+ if hit_covpt:
+ self.stats.cov_pt_sig += [self.coverpoint]
+
+ start_instr = self.queue[0]
+ hit_uniq_covpt = self.result == 1
+ num_exp = len(regs_to_track)
+
+ for reg in regs_to_track:
+ if reg in self.tracked_regs:
+ stat_meta = self.instr_stat_meta_at_addr[self.instr_addr_of_tracked_reg[reg]]
+ stat_meta[3] -= 1
+ self.tracked_regs.remove(reg)
+ del self.instr_addr_of_tracked_reg[reg]
+
+ self.tracked_regs.add(reg)
+ self.instr_addr_of_tracked_reg[reg] = start_instr.instr_addr
+
+ self.instr_stat_meta_at_addr[start_instr.instr_addr] = [hit_uniq_covpt, num_exp, 0, num_exp, [self.coverpoint], [], [], []]
+
+ for i in range(len(self.ops)):
+ self.compute(True)
+ else:
+ self.compute()
+
+ def compute(self, is_part_of_covpt=False):
+ instr = self.queue.pop(0)
+
+ mnemonic = instr.mnemonic
+
+ # check if instruction lies within the valid region of interest
+ if self.addr_pairs:
+ if any([instr.instr_addr >= saddr and instr.instr_addr < eaddr for saddr,eaddr in self.addr_pairs]):
+ enable = True
+ else:
+ enable = False
+
+ instr_vars = {}
+ instr.evaluate_instr_vars(self.xlen, self.flen, self.arch_state, self.csr_regfile, instr_vars)
+
+ if enable:
+ # check for changes in tracked registers
+ if not is_part_of_covpt:
+ changed_regs = instr.get_changed_regs(self.arch_state, self.csr_regfile)
+ for reg in changed_regs:
+ if reg in self.tracked_regs:
+ stat_meta = self.instr_stat_meta_at_addr[self.instr_addr_of_tracked_reg[reg]]
+ stat_meta[3] -= 1
+ self.tracked_regs.remove(reg)
+ del self.instr_addr_of_tracked_reg[reg]
+
+ # update code_seq
+ if self.instr_stat_meta_at_addr:
+ if mnemonic is not None:
+ self.stats.code_seq.append('[' + str(hex(instr.instr_addr)) + ']:' + mnemonic)
+ else:
+ self.stats.code_seq.append('[' + str(hex(instr.instr_addr)) + ']:' + instr.instr_name)
+
+ for key_instr_addr, stat_meta in self.instr_stat_meta_at_addr.items():
+ if mnemonic is not None:
+ stat_meta[5].append('[' + str(hex(instr.instr_addr)) + ']:' + mnemonic)
+ else:
+ stat_meta[5].append('[' + str(hex(instr.instr_addr)) + ']:' + instr.instr_name)
+
+ # handle signature update
+ if instr.is_sig_update() and self.sig_addrs:
+ store_address = instr_vars['rs1_val'] + instr_vars['imm_val']
+ store_val = '0x'+arch_state.x_rf[instr.rs2[0]]
+ for start, end in self.sig_addrs:
+ if store_address >= start and store_address <= end:
+ logger.debug('Signature update : ' + str(hex(store_address)))
+
+ rs2 = instr_vars['rs2']
+ if rs2 in self.tracked_regs:
+ stat_meta = self.instr_stat_meta_at_addr[self.instr_addr_of_tracked_reg[rs2]]
+ stat_meta[2] += 1 # increase num observed
+ stat_meta[3] -= 1 # decrease num remaining
+ stat_meta[6].append(store_address) # add to store_addresses
+ stat_meta[7].append(store_val) # add to store_vals
+ self.stats.last_meta = [store_address, store_val, stat_meta[4], stat_meta[5]]
+ self.tracked_regs.remove(rs2)
+ del self.instr_addr_of_tracked_reg[rs2]
+ else:
+ if len(self.stats.last_meta):
+ _log = 'Last Coverpoint : ' + str(self.stats.last_meta[2]) + '\n'
+ _log += 'Last Code Sequence : \n\t-' + '\n\t-'.join(self.stats.last_meta[3]) + '\n'
+ _log += 'Current Store : [{0}] : {1} -- Store: [{2}]:{3}\n'.format(\
+ str(hex(instr.instr_addr)), mnemonic,
+ str(hex(store_address)),
+ store_val)
+ logger.error(_log)
+ self.stats.stat4.append(_log + '\n\n')
+
+ self.stats.code_seq = []
+
+ # update stats
+ for key_instr_addr in list(self.instr_stat_meta_at_addr.keys()):
+ stat_meta = self.instr_stat_meta_at_addr[key_instr_addr]
+ if stat_meta[3] == 0: # num_remaining == 0
+ if stat_meta[0]: # is_ucovpt
+ if stat_meta[2] == stat_meta[1]: # num_observed == num_expected
+ # update STAT1 with (store_addresses, store_vals, covpt, code_seq)
+ self.stats.stat1.append((stat_meta[6], stat_meta[7], stat_meta[4], stat_meta[5]))
+ elif stat_meta[2] < stat_meta[1]: # num_observed < num_expected
+ # update STAT3 with code sequence
+ self.stats.stat3.append('\n'.join(stat_meta[5]))
+ else: # not is_ucovpt
+ if stat_meta[2] > 0: # num_observed > 0
+ # update STAT2
+ _log = 'Op without unique coverpoint updates Signature\n'
+
+ _log += ' -- Code Sequence:\n'
+ for op in stat_meta[5]:
+ _log += ' ' + op + '\n'
+
+ _log += ' -- Signature Addresses:\n'
+ for store_address, store_val in zip(stat_meta[6], stat_meta[7]):
+ _log += ' Address: {0} Data: {1}\n'.format(
+ str(hex(store_address)), store_val)
+
+ _log += ' -- Redundant Coverpoints hit by the op\n'
+ for c in stat_meta[4]:
+ _log += ' - ' + str(c) + '\n'
+
+ logger.warn(_log)
+ self.stats.stat2.append(_log + '\n\n')
+
+ del self.instr_stat_meta_at_addr[key_instr_addr]
+
+ instr.update_arch_state(self.arch_state, self.csr_regfile)
+
def get_metric(self):
return self.result
+ def get_stats(self):
+ return self.stats
+
class csr_registers(MutableMapping):
'''
@@ -311,9 +567,6 @@ def __init__(self, xlen, flen):
self.stat4 = []
self.stat5 = []
self.code_seq = []
- self.ucode_seq = []
- self.covpt = []
- self.ucovpt = []
self.cov_pt_sig = []
self.last_meta = []
@@ -326,9 +579,6 @@ def __add__(self, o):
temp.stat5 = self.stat5 + o.stat5
temp.code_seq = self.code_seq + o.code_seq
- temp.ucode_seq = self.ucode_seq + o.ucode_seq
- temp.covpt = self.covpt + o.covpt
- temp.ucovpt = self.ucovpt + o.ucovpt
temp.cov_pt_sig = self.cov_pt_sig + o.cov_pt_sig
temp.last_meta = self.last_meta + o.last_meta
@@ -520,7 +770,7 @@ def simd_val_unpack(val_comb, op_width, op_name, val, local_dict):
if simd_size == op_width:
local_dict[f"{op_name}_val"]=elm_val
-def compute_per_line(queue, event, cgf_queue, stats_queue, cgf, xlen, flen, addr_pairs, sig_addrs, stats, arch_state, csr_regfile, result_count, no_count):
+def compute_per_line(queue, event, cgf_queue, stats_queue, cgf, xlen, flen, addr_pairs, sig_addrs, stats, arch_state, csr_regfile, no_count):
'''
This function checks if the current instruction under scrutiny matches a
particular coverpoint of interest. If so, it updates the coverpoints and
@@ -538,7 +788,6 @@ def compute_per_line(queue, event, cgf_queue, stats_queue, cgf, xlen, flen, addr
:param sig_addrs: pairs of start and end addresses for which signature update needs to be checked
:param stats: `stats` object
:param csr_regfile: Architectural state of CSR register file
- :param result_count:
:type queue: class`multiprocessing.Queue`
:type event: class`multiprocessing.Event`
@@ -552,13 +801,20 @@ def compute_per_line(queue, event, cgf_queue, stats_queue, cgf, xlen, flen, addr
:type sig_addrs: (int, int)
:type stats: class `statistics`
:type csr_regfile: class `csr_registers`
- :type result_count: int
'''
# List to hold hit coverpoints
hit_covpts = []
rcgf = copy.deepcopy(cgf)
+ # Set of elements to monitor for tracking signature updates
+ tracked_regs_immutable = set()
+ tracked_regs_mutable = set()
+ tracked_instrs = [] # list of tuples of the type (list_instr_names, triggering_instr_addr)
+
+ instr_stat_meta_at_addr = {} # Maps an address to the stat metadata of the instruction present at that address [is_ucovpt, num_exp, num_obs, num_rem, covpts_hit, code_seq, store_addresses, store_vals]
+ instr_addr_of_tracked_reg = {} # Maps a tracked register to the address of instruction which triggered its tracking
+
# Enter the loop only when Event is not set or when the
# instruction object queue is not empty
while (event.is_set() == False) or (queue.empty() == False):
@@ -584,9 +840,16 @@ def compute_per_line(queue, event, cgf_queue, stats_queue, cgf, xlen, flen, addr
enable=True
instr_vars = {}
-
instr.evaluate_instr_vars(xlen, flen, arch_state, csr_regfile, instr_vars)
+ old_csr_regfile = {}
+ for i in csr_regfile.csr_regs:
+ old_csr_regfile[i] = int(csr_regfile[i],16)
+ def old_fn_csr_comb_covpt(csr_reg):
+ return old_csr_regfile[csr_reg]
+
+ instr.update_arch_state(arch_state, csr_regfile)
+
if 'rs1' in instr_vars:
rs1 = instr_vars['rs1']
if 'rs2' in instr_vars:
@@ -599,23 +862,26 @@ def compute_per_line(queue, event, cgf_queue, stats_queue, cgf, xlen, flen, addr
else:
is_rd_valid = False
- sig_update = False
- if instr.instr_name in ['sh','sb','sw','sd','c.sw','c.sd','c.swsp','c.sdsp'] and sig_addrs:
- store_address = instr_vars['rs1_val'] + instr_vars['imm_val']
- for start, end in sig_addrs:
- if store_address >= start and store_address <= end:
- sig_update = True
- break
-
- if sig_update: # writing result operands of last non-store instruction to the signature region
- result_count = result_count - 1
- else:
- result_count = instr.rd_nregs
-
for i in csr_regfile.csr_regs:
instr_vars[i] = int(csr_regfile[i],16)
+ csr_write_vals = {}
+ if instr.csr_commit is not None:
+ for commit in instr.csr_commit:
+ if commit[0] == "CSR" and commit[3]:
+ csr_write_vals[commit[1]] = int(commit[3],16)
+ def write_fn_csr_comb_covpt(csr_reg):
+ if csr_reg in csr_write_vals:
+ return csr_write_vals[csr_reg]
+ else:
+ return int(csr_regfile[csr_reg],16)
+
+
if enable :
+ ucovpt = []
+ covpt = []
+ csr_covpt = []
+
for cov_labels,value in cgf.items():
if cov_labels != 'datasets':
if 'mnemonics' in value:
@@ -641,68 +907,60 @@ def compute_per_line(queue, event, cgf_queue, stats_queue, cgf, xlen, flen, addr
# Update hit statistics of the mnemonic
if is_found:
if value[req_node][mnemonic] == 0:
- stats.ucovpt.append('mnemonic : ' + mnemonic)
- stats.covpt.append('mnemonic : ' + mnemonic)
+ ucovpt.append('mnemonic : ' + mnemonic)
+ covpt.append('mnemonic : ' + mnemonic)
value[req_node][mnemonic] += 1
rcgf[cov_labels][req_node][mnemonic] += 1
if instr.instr_name in value[req_node] or is_found:
- if stats.code_seq:
- #logger.error('Found a coverpoint without sign Upd ' + str(stats.code_seq))
- stats.stat3.append('\n'.join(stats.code_seq))
- stats.code_seq = []
- stats.covpt = []
- stats.ucovpt = []
- stats.ucode_seq = []
-
# If mnemonic not detected via base-op
if not is_found:
if value[req_node][instr.instr_name] == 0:
- stats.ucovpt.append('mnemonic : ' + instr.instr_name)
- stats.covpt.append('mnemonic : ' + instr.instr_name)
+ ucovpt.append('mnemonic : ' + instr.instr_name)
+ covpt.append('mnemonic : ' + instr.instr_name)
value[req_node][instr.instr_name] += 1
rcgf[cov_labels][req_node][instr.instr_name] += 1
if 'rs1' in value and rs1 in value['rs1']:
if value['rs1'][rs1] == 0:
- stats.ucovpt.append('rs1 : ' + rs1)
+ ucovpt.append('rs1 : ' + rs1)
if no_count:
hit_covpts.append((cov_labels, 'rs1', rs1))
- stats.covpt.append('rs1 : ' + rs1)
+ covpt.append('rs1 : ' + rs1)
value['rs1'][rs1] += 1
if 'rs2' in value and rs2 in value['rs2']:
if value['rs2'][rs2] == 0:
- stats.ucovpt.append('rs2 : ' + rs2)
+ ucovpt.append('rs2 : ' + rs2)
if no_count:
hit_covpts.append((cov_labels, 'rs2', rs2))
- stats.covpt.append('rs2 : ' + rs2)
+ covpt.append('rs2 : ' + rs2)
value['rs2'][rs2] += 1
if 'rd' in value and is_rd_valid and rd in value['rd']:
if value['rd'][rd] == 0:
- stats.ucovpt.append('rd : ' + rd)
+ ucovpt.append('rd : ' + rd)
if no_count:
hit_covpts.append((cov_labels, 'rd', rd))
- stats.covpt.append('rd : ' + rd)
+ covpt.append('rd : ' + rd)
value['rd'][rd] += 1
if 'rs3' in value and rs3 in value['rs3']:
if value['rs3'][rs3] == 0:
- stats.ucovpt.append('rs3 : ' + rs3)
+ ucovpt.append('rs3 : ' + rs3)
if no_count:
hit_covpts.append((cov_labels, 'rs3', rs3))
- stats.covpt.append('rs3 : ' + rs3)
+ covpt.append('rs3 : ' + rs3)
value['rs3'][rs3] += 1
if 'op_comb' in value and len(value['op_comb']) != 0 :
for coverpoints in value['op_comb']:
if eval(coverpoints, globals(), instr_vars):
if cgf[cov_labels]['op_comb'][coverpoints] == 0:
- stats.ucovpt.append(str(coverpoints))
+ ucovpt.append(str(coverpoints))
if no_count:
hit_covpts.append((cov_labels, 'op_comb', coverpoints))
- stats.covpt.append(str(coverpoints))
+ covpt.append(str(coverpoints))
cgf[cov_labels]['op_comb'][coverpoints] += 1
if 'val_comb' in value and len(value['val_comb']) != 0:
@@ -717,94 +975,263 @@ def compute_per_line(queue, event, cgf_queue, stats_queue, cgf, xlen, flen, addr
for coverpoints in value['val_comb']:
if eval(coverpoints, globals(), instr_vars):
if cgf[cov_labels]['val_comb'][coverpoints] == 0:
- stats.ucovpt.append(str(coverpoints))
+ ucovpt.append(str(coverpoints))
if no_count:
hit_covpts.append((cov_labels, 'val_comb', coverpoints))
- stats.covpt.append(str(coverpoints))
+ covpt.append(str(coverpoints))
cgf[cov_labels]['val_comb'][coverpoints] += 1
if 'abstract_comb' in value \
and len(value['abstract_comb']) != 0 :
for coverpoints in value['abstract_comb']:
if eval(coverpoints, globals(), instr_vars):
if cgf[cov_labels]['abstract_comb'][coverpoints] == 0:
- stats.ucovpt.append(str(coverpoints))
+ ucovpt.append(str(coverpoints))
if no_count:
- hit_covpts.append((cov_labels, 'abstract_comb', coverpoints))
- stats.covpt.append(str(coverpoints))
+ hit_covpts.append((cov_labels, 'abstract_comb', coverpoints))
+ covpt.append(str(coverpoints))
cgf[cov_labels]['abstract_comb'][coverpoints] += 1
if 'csr_comb' in value and len(value['csr_comb']) != 0:
- for coverpoints in value['csr_comb']:
- if eval(coverpoints, {"__builtins__":None}, instr_vars):
- if cgf[cov_labels]['csr_comb'][coverpoints] == 0:
- stats.ucovpt.append(str(coverpoints))
- if no_count:
- hit_covpts.append((cov_labels, 'csr_comb', coverpoints))
- stats.covpt.append(str(coverpoints))
- cgf[cov_labels]['csr_comb'][coverpoints] += 1
+ if instr.csr_commit:
+ is_csr_commit = False
+ for commit in instr.csr_commit:
+ if commit[0] == "CSR":
+ is_csr_commit = True
+ break
+ if is_csr_commit:
+ for coverpoints in value['csr_comb']:
+ if eval(
+ coverpoints,
+ {
+ "__builtins__":None,
+ "old": old_fn_csr_comb_covpt,
+ "write": write_fn_csr_comb_covpt
+ },
+ instr_vars
+ ):
+ if cgf[cov_labels]['csr_comb'][coverpoints] == 0:
+ ucovpt.append(str(coverpoints))
+ if no_count:
+ hit_covpts.append((cov_labels, 'csr_comb', coverpoints))
+ covpt.append(str(coverpoints))
+ csr_covpt.append(str(coverpoints))
+ cgf[cov_labels]['csr_comb'][coverpoints] += 1
elif 'opcode' not in value:
if 'csr_comb' in value and len(value['csr_comb']) != 0:
- for coverpoints in value['csr_comb']:
- if eval(coverpoints, {"__builtins__":None}, instr_vars):
- if cgf[cov_labels]['csr_comb'][coverpoints] == 0:
- stats.ucovpt.append(str(coverpoints))
- if no_count:
+ if instr.csr_commit:
+ is_csr_commit = False
+ for commit in instr.csr_commit:
+ if commit[0] == "CSR":
+ is_csr_commit = True
+ break
+ if is_csr_commit:
+ for coverpoints in value['csr_comb']:
+ if eval(
+ coverpoints,
+ {
+ "__builtins__":None,
+ "old": old_fn_csr_comb_covpt,
+ "write": write_fn_csr_comb_covpt
+ },
+ instr_vars
+ ):
+ if cgf[cov_labels]['csr_comb'][coverpoints] == 0:
+ ucovpt.append(str(coverpoints))
+ if no_count:
hit_covpts.append((cov_labels, 'csr_comb', coverpoints))
- stats.covpt.append(str(coverpoints))
- cgf[cov_labels]['csr_comb'][coverpoints] += 1
- if stats.covpt:
- if mnemonic is not None :
+ covpt.append(str(coverpoints))
+ csr_covpt.append(str(coverpoints))
+ cgf[cov_labels]['csr_comb'][coverpoints] += 1
+
+ hit_any_covpt = len(covpt) > 0
+ hit_uniq_covpt = len(ucovpt) > 0
+ hit_csr_covpt = len(csr_covpt) > 0
+
+ if hit_csr_covpt:
+ stats.cov_pt_sig += covpt
+
+ csr_regs_involved_in_covpt = set()
+ for covpt in csr_covpt:
+ for reg in csr_reg_num_to_str.values():
+ if reg in covpt:
+ csr_regs_involved_in_covpt.add(reg)
+
+ num_exp = 0
+ for reg in csr_regs_involved_in_covpt:
+ if reg in tracked_regs_immutable or reg in tracked_regs_mutable:
+ stat_meta = instr_stat_meta_at_addr[instr_addr_of_tracked_reg[reg]]
+ stat_meta[3] -= 1 # decrease num remaining
+ tracked_regs_immutable.discard(reg)
+ tracked_regs_mutable.discard(reg)
+ del instr_addr_of_tracked_reg[reg]
+
+ num_exp += 1
+ instr_addr_of_tracked_reg[reg] = instr.instr_addr
+ tracked_regs_immutable.add(reg)
+
+ instr_stat_meta_at_addr[instr.instr_addr] = [hit_uniq_covpt, num_exp, 0, num_exp, csr_covpt, [], [], []]
+ elif hit_any_covpt:
+ stats.cov_pt_sig += covpt
+
+ if len(tracked_instrs) > 0:
+ for list_instrs, triggering_instr_addr in tracked_instrs:
+ stat_meta = instr_stat_meta_at_addr[triggering_instr_addr]
+ stat_meta[3] -= 1 # decrease num remaining
+ tracked_instrs = []
+
+ num_exp = 0 # expected number of signature updates for this instruction
+
+ regs_to_track_immutable, regs_to_track_mutable, instrs_to_track = instr.get_elements_to_track(xlen)
+
+ # update tracked elements
+ for reg in regs_to_track_immutable:
+ if reg in tracked_regs_immutable or reg in tracked_regs_mutable:
+ stat_meta = instr_stat_meta_at_addr[instr_addr_of_tracked_reg[reg]]
+ stat_meta[3] -= 1 # decrease num remaining
+ tracked_regs_immutable.discard(reg)
+ tracked_regs_mutable.discard(reg)
+ del instr_addr_of_tracked_reg[reg]
+
+ num_exp += 1
+ instr_addr_of_tracked_reg[reg] = instr.instr_addr
+ tracked_regs_immutable.add(reg)
+
+ for reg in regs_to_track_mutable:
+ if reg in tracked_regs_immutable or reg in tracked_regs_mutable:
+ stat_meta = instr_stat_meta_at_addr[instr_addr_of_tracked_reg[reg]]
+ stat_meta[3] -= 1
+ tracked_regs_immutable.discard(reg)
+ tracked_regs_mutable.discard(reg)
+ del instr_addr_of_tracked_reg[reg]
+
+ num_exp += 1
+ instr_addr_of_tracked_reg[reg] = instr.instr_addr
+ tracked_regs_mutable.add(reg)
+
+ for instrs in instrs_to_track:
+ num_exp += 1
+ tracked_instrs.append((instrs, instr.instr_addr))
+
+ instr_stat_meta_at_addr[instr.instr_addr] = [hit_uniq_covpt, num_exp, 0, num_exp, ucovpt if hit_uniq_covpt else covpt, [], [], []]
+ else:
+ changed_regs = instr.get_changed_regs(arch_state, csr_regfile)
+
+ if instr.instr_name in instrs_csr_mov and csr_reg_num_to_str[instr.csr] in tracked_regs_immutable: # handle csr movs separately
+ csr_reg = csr_reg_num_to_str[instr.csr]
+
+ if not is_rd_valid:
+ if csr_reg in changed_regs: # csr register overwritten without propagating into signature
+ stat_meta = instr_stat_meta_at_addr[instr_addr_of_tracked_reg[csr_reg]]
+ stat_meta[3] -= 1
+ tracked_regs_immutable.remove(csr_reg)
+ del instr_addr_of_tracked_reg[csr_reg]
+ else:
+ if rd in tracked_regs_immutable or rd in tracked_regs_mutable:
+ stat_meta = instr_stat_meta_at_addr[instr_addr_of_tracked_reg[rd]]
+ stat_meta[3] -= 1
+ tracked_regs_immutable.discard(rd)
+ tracked_regs_mutable.discard(rd)
+ del instr_addr_of_tracked_reg[rd]
+
+ tracked_regs_immutable.remove(csr_reg)
+ tracked_regs_immutable.add(rd)
+ instr_addr_of_tracked_reg[rd] = instr_addr_of_tracked_reg[csr_reg]
+ del instr_addr_of_tracked_reg[csr_reg]
+ else: # check for changes in tracked registers
+ for reg in changed_regs:
+ if reg in tracked_regs_immutable:
+ stat_meta = instr_stat_meta_at_addr[instr_addr_of_tracked_reg[reg]]
+ stat_meta[3] -= 1
+ tracked_regs_immutable.remove(reg)
+ del instr_addr_of_tracked_reg[reg]
+
+ # update code_seq
+ if instr_stat_meta_at_addr:
+ if mnemonic is not None:
stats.code_seq.append('[' + str(hex(instr.instr_addr)) + ']:' + mnemonic)
else:
stats.code_seq.append('[' + str(hex(instr.instr_addr)) + ']:' + instr.instr_name)
- if stats.ucovpt:
- if mnemonic is not None :
- stats.ucode_seq.append('[' + str(hex(instr.instr_addr)) + ']:' + mnemonic)
+
+ for key_instr_addr, stat_meta in instr_stat_meta_at_addr.items():
+ if mnemonic is not None:
+ stat_meta[5].append('[' + str(hex(instr.instr_addr)) + ']:' + mnemonic)
else:
- stats.ucode_seq.append('[' + str(hex(instr.instr_addr)) + ']:' + instr.instr_name)
+ stat_meta[5].append('[' + str(hex(instr.instr_addr)) + ']:' + instr.instr_name)
- if instr.instr_name in ['sh','sb','sw','sd','c.sw','c.sd','c.swsp','c.sdsp'] and sig_addrs:
+ # handle signature update
+ if instr.is_sig_update() and sig_addrs:
store_address = instr_vars['rs1_val'] + instr_vars['imm_val']
store_val = '0x'+arch_state.x_rf[instr.rs2[0]]
for start, end in sig_addrs:
if store_address >= start and store_address <= end:
logger.debug('Signature update : ' + str(hex(store_address)))
- stats.stat5.append((store_address, store_val, stats.ucovpt, stats.code_seq))
- stats.cov_pt_sig += stats.covpt
- if result_count <= 0:
- if stats.ucovpt:
- stats.stat1.append((store_address, store_val, stats.ucovpt, stats.ucode_seq))
- stats.last_meta = [store_address, store_val, stats.ucovpt, stats.ucode_seq]
- stats.ucovpt = []
- elif stats.covpt:
- _log = 'Op without unique coverpoint updates Signature\n'
- _log += ' -- Code Sequence:\n'
- for op in stats.code_seq:
- _log += ' ' + op + '\n'
- _log += ' -- Signature Address: {0} Data: {1}\n'.format(
+ stats.stat5.append((store_address, store_val, ucovpt, stats.code_seq))
+
+ if rs2 in tracked_regs_immutable or rs2 in tracked_regs_mutable:
+ stat_meta = instr_stat_meta_at_addr[instr_addr_of_tracked_reg[rs2]]
+ stat_meta[2] += 1 # increase num observed
+ stat_meta[3] -= 1 # decrease num remaining
+ stat_meta[6].append(store_address) # add to store_addresses
+ stat_meta[7].append(store_val) # add to store_vals
+ stats.last_meta = [store_address, store_val, stat_meta[4], stat_meta[5]]
+ tracked_regs_immutable.discard(rs2)
+ tracked_regs_mutable.discard(rs2)
+ del instr_addr_of_tracked_reg[rs2]
+ elif tracked_instrs and instr.instr_name in tracked_instrs[0][0]:
+ stat_meta = instr_stat_meta_at_addr[tracked_instrs[0][1]]
+ stat_meta[2] += 1
+ stat_meta[3] -= 1
+ stat_meta[6].append(store_address)
+ stat_meta[7].append(store_val)
+ stats.last_meta = [store_address, store_val, stat_meta[4], stat_meta[5]]
+ del tracked_instrs[0]
+ else:
+ if len(stats.last_meta):
+ _log = 'Last Coverpoint : ' + str(stats.last_meta[2]) + '\n'
+ _log += 'Last Code Sequence : \n\t-' + '\n\t-'.join(stats.last_meta[3]) + '\n'
+ _log += 'Current Store : [{0}] : {1} -- Store: [{2}]:{3}\n'.format(\
+ str(hex(instr.instr_addr)), mnemonic,
+ str(hex(store_address)),
+ store_val)
+ logger.error(_log)
+ stats.stat4.append(_log + '\n\n')
+
+ stats.code_seq = []
+
+ # update stats
+ for key_instr_addr in list(instr_stat_meta_at_addr.keys()):
+ stat_meta = instr_stat_meta_at_addr[key_instr_addr]
+ if stat_meta[3] == 0: # num_remaining == 0
+ if stat_meta[0]: # is_ucovpt
+ if stat_meta[2] == stat_meta[1]: # num_observed == num_expected
+ # update STAT1 with (store_addresses, store_vals, covpt, code_seq)
+ stats.stat1.append((stat_meta[6], stat_meta[7], stat_meta[4], stat_meta[5]))
+ elif stat_meta[2] < stat_meta[1]: # num_observed < num_expected
+ # update STAT3 with code sequence
+ stats.stat3.append('\n'.join(stat_meta[5]))
+ else: # not is_ucovpt
+ if stat_meta[2] > 0: # num_observed > 0
+ # update STAT2
+ _log = 'Op without unique coverpoint updates Signature\n'
+
+ _log += ' -- Code Sequence:\n'
+ for op in stat_meta[5]:
+ _log += ' ' + op + '\n'
+
+ _log += ' -- Signature Addresses:\n'
+ for store_address, store_val in zip(stat_meta[6], stat_meta[7]):
+ _log += ' Address: {0} Data: {1}\n'.format(
str(hex(store_address)), store_val)
- _log += ' -- Redundant Coverpoints hit by the op\n'
- for c in stats.covpt:
- _log += ' - ' + str(c) + '\n'
- logger.warn(_log)
- stats.stat2.append(_log + '\n\n')
- stats.last_meta = [store_address, store_val, stats.covpt, stats.code_seq]
- else:
- if len(stats.last_meta):
- _log = 'Last Coverpoint : ' + str(stats.last_meta[2]) + '\n'
- _log += 'Last Code Sequence : \n\t-' + '\n\t-'.join(stats.last_meta[3]) + '\n'
- _log +='Current Store : [{0}] : {1} -- Store: [{2}]:{3}\n'.format(\
- str(hex(instr.instr_addr)), mnemonic,
- str(hex(store_address)),
- store_val)
- logger.error(_log)
- stats.stat4.append(_log + '\n\n')
- stats.covpt = []
- stats.code_seq = []
- stats.ucode_seq = []
- instr.update_arch_state(arch_state, csr_regfile)
+ _log += ' -- Redundant Coverpoints hit by the op\n'
+ for c in stat_meta[4]:
+ _log += ' - ' + str(c) + '\n'
+
+ logger.warn(_log)
+ stats.stat2.append(_log + '\n\n')
+
+ del instr_stat_meta_at_addr[key_instr_addr]
# Remove hit coverpoints if no_count is set
if no_count:
@@ -814,6 +1241,39 @@ def compute_per_line(queue, event, cgf_queue, stats_queue, cgf, xlen, flen, addr
else:
hit_covpts = []
else:
+ # update stats one last time for the remaining elements
+ for key_instr_addr in list(instr_stat_meta_at_addr.keys()):
+ stat_meta = instr_stat_meta_at_addr[key_instr_addr]
+ if stat_meta[0]: # is_ucovpt
+ if stat_meta[2] == stat_meta[1]: # num_observed == num_expected
+ # update STAT1 with (store_addresses, store_vals, covpt, code_seq)
+ stats.stat1.append((stat_meta[6], stat_meta[7], stat_meta[4], stat_meta[5]))
+ elif stat_meta[2] < stat_meta[1]: # num_observed < num_expected
+ # update STAT3 with code sequence
+ stats.stat3.append('\n'.join(stat_meta[5]))
+ else: # not is_ucovpt
+ if stat_meta[2] > 0: # num_observed > 0
+ # update STAT2
+ _log = 'Op without unique coverpoint updates Signature\n'
+
+ _log += ' -- Code Sequence:\n'
+ for op in stat_meta[5]:
+ _log += ' ' + op + '\n'
+
+ _log += ' -- Signature Addresses:\n'
+ for store_address, store_val in zip(stat_meta[6], stat_meta[7]):
+ _log += ' Address: {0} Data: {1}\n'.format(
+ str(hex(store_address)), store_val)
+
+ _log += ' -- Redundant Coverpoints hit by the op\n'
+ for c in stat_meta[4]:
+ _log += ' - ' + str(c) + '\n'
+
+ logger.warn(_log)
+ stats.stat2.append(_log + '\n\n')
+
+ del instr_stat_meta_at_addr[key_instr_addr]
+
# if no_count option is set, return rcgf
# else return cgf
if not no_count:
@@ -834,7 +1294,6 @@ def compute(trace_file, test_name, cgf, parser_name, decoder_name, detailed, xle
global csr_regfile
global stats
global cross_cover_queue
- global result_count
temp = cgf.copy()
if cov_labels:
@@ -858,8 +1317,6 @@ def compute(trace_file, test_name, cgf, parser_name, decoder_name, detailed, xle
arch_state = archState(xlen,flen)
csr_regfile = csr_registers(xlen)
stats = statistics(xlen, flen)
- cross_cover_queue = []
- result_count = 0
## Get coverpoints from cgf
obj_dict = {} ## (label,coverpoint): object
@@ -868,7 +1325,7 @@ def compute(trace_file, test_name, cgf, parser_name, decoder_name, detailed, xle
if 'cross_comb' in value and len(value['cross_comb'])!=0:
for coverpt in value['cross_comb'].keys():
if(isinstance(coverpt,str)):
- new_obj = cross(cov_labels,coverpt)
+ new_obj = cross(cov_labels,coverpt,xlen,flen,addr_pairs,sig_addrs,window_size)
obj_dict[(cov_labels,coverpt)] = new_obj
@@ -931,7 +1388,6 @@ def compute(trace_file, test_name, cgf, parser_name, decoder_name, detailed, xle
stats,
arch_state,
csr_regfile,
- result_count,
no_count
)
)
@@ -953,11 +1409,8 @@ def compute(trace_file, test_name, cgf, parser_name, decoder_name, detailed, xle
each.put_nowait(instrObj)
logger.debug(instrObj)
- cross_cover_queue.append(instrObj)
- if(len(cross_cover_queue)>=window_size):
- for (label,coverpt) in obj_dict.keys():
- obj_dict[(label,coverpt)].process(cross_cover_queue, window_size,addr_pairs)
- cross_cover_queue.pop(0)
+ for (label,coverpt) in obj_dict.keys():
+ obj_dict[(label,coverpt)].process(instrObj)
@@ -1000,12 +1453,11 @@ def compute(trace_file, test_name, cgf, parser_name, decoder_name, detailed, xle
## Check for cross coverage for end instructions
## All metric is stored in objects of obj_dict
- while(len(cross_cover_queue)>1):
- for label,coverpt in obj_dict.keys():
- obj_dict[(label,coverpt)].process(cross_cover_queue, window_size,addr_pairs)
- cross_cover_queue.pop(0)
+ for label,coverpt in obj_dict.keys():
+ obj_dict[(label,coverpt)].finish_up()
for label,coverpt in obj_dict.keys():
+ stats += obj_dict[(label,coverpt)].get_stats()
metric = obj_dict[(label,coverpt)].get_metric()
if(metric!=0):
rcgf[label]['cross_comb'][coverpt] = metric
@@ -1046,9 +1498,10 @@ def compute(trace_file, test_name, cgf, parser_name, decoder_name, detailed, xle
cov_set = set()
count = 1
- stat5_log = []
- for addr,val,cover,code in stats.stat1:
- sig = ('[{0}]
{1}'.format(str(hex(addr)), str(val)))
+ for addrs,vals,cover,code in stats.stat1:
+ sig = ''
+ for addr, val in zip(addrs, vals):
+ sig += '[{0}]
{1}'.format(str(hex(addr)), str(val)) + '
\n'
cov = ''
for c in cover:
cov += '- ' + str(c) + '
\n'
diff --git a/riscv_isac/main.py b/riscv_isac/main.py
index 648f661..2d66319 100644
--- a/riscv_isac/main.py
+++ b/riscv_isac/main.py
@@ -126,10 +126,14 @@ def cli(verbose):
default = 1,
help = 'Set number of processes to calculate coverage'
)
+@click.option('--log-redundant',
+ is_flag = True,
+ help = "Log redundant coverpoints during normalization"
+)
def coverage(elf,trace_file, window_size, cgf_file, detailed,parser_name, decoder_name, parser_path, decoder_path,output_file, test_label,
- sig_label, dump,cov_label, xlen, flen, no_count, procs):
- isac(output_file,elf,trace_file, window_size, expand_cgf(cgf_file,int(xlen),int(flen)), parser_name, decoder_name, parser_path, decoder_path, detailed, test_label,
+ sig_label, dump,cov_label, xlen, flen, no_count, procs, log_redundant):
+ isac(output_file,elf,trace_file, window_size, expand_cgf(cgf_file,int(xlen),int(flen),log_redundant), parser_name, decoder_name, parser_path, decoder_path, detailed, test_label,
sig_label, dump, cov_label, int(xlen), int(flen), no_count, procs)
@cli.command(help = "Merge given coverage files.")
@@ -164,9 +168,13 @@ def coverage(elf,trace_file, window_size, cgf_file, detailed,parser_name, decode
help="FLEN value for the ISA."
)
@click.option('--xlen','-x',type=click.Choice(['32','64']),default='32',help="XLEN value for the ISA.")
-def merge(files,detailed,p,cgf_file,output_file,flen,xlen):
+@click.option('--log-redundant',
+ is_flag = True,
+ help = "Log redundant coverpoints during normalization"
+)
+def merge(files,detailed,p,cgf_file,output_file,flen,xlen,log_redundant):
rpt = cov.merge_coverage(
- files,expand_cgf(cgf_file,int(xlen),int(flen)),detailed,p)
+ files,expand_cgf(cgf_file,int(xlen),int(flen),log_redundant),detailed,p)
if output_file is None:
logger.info('Coverage Report:')
logger.info('\n\n' + rpt)
@@ -192,10 +200,14 @@ def merge(files,detailed,p,cgf_file,output_file,flen,xlen):
)
@click.option('--xlen','-x',type=click.Choice(['32','64']),default='32',help="XLEN value for the ISA.")
@click.option('--flen','-f',type=click.Choice(['32','64']),default='32',help="FLEN value for the ISA.")
-def normalize(cgf_file,output_file,xlen,flen):
+@click.option('--log-redundant',
+ is_flag = True,
+ help = "Log redundant coverpoints during normalization"
+)
+def normalize(cgf_file,output_file,xlen,flen,log_redundant):
logger.info("Writing normalized CGF to "+str(output_file))
with open(output_file,"w") as outfile:
- utils.dump_yaml(expand_cgf(cgf_file,int(xlen),int(flen)),outfile)
+ utils.dump_yaml(expand_cgf(cgf_file,int(xlen),int(flen),log_redundant),outfile)
@cli.command(help = 'Setup the plugin which uses the information from RISCV Opcodes repository to decode.')
@click.option('--url',
diff --git a/riscv_isac/plugins/c_sail.py b/riscv_isac/plugins/c_sail.py
index a53a579..6a4748c 100644
--- a/riscv_isac/plugins/c_sail.py
+++ b/riscv_isac/plugins/c_sail.py
@@ -17,7 +17,7 @@ def setup(self, trace, arch):
instr_pattern_c_sail= re.compile(
'\[\d*\]\s\[(.*?)\]:\s(?P[0-9xABCDEF]+)\s\((?P[0-9xABCDEF]+)\)\s*(?P.*)')
instr_pattern_c_sail_regt_reg_val = re.compile('(?P[xf])(?P[\d]+)\s<-\s(?P[0-9xABCDEF]+)')
- instr_pattern_c_sail_csr_reg_val = re.compile('(?PCSR|clint::tick)\s(?P[a-z0-9]+)\s<-\s(?P[0-9xABCDEF]+)')
+ instr_pattern_c_sail_csr_reg_val = re.compile('(?PCSR|clint::tick)\s(?P[a-z0-9]+)\s<-\s(?P[0-9xABCDEF]+)(?:\s\(input:\s(?P[0-9xABCDEF]+)\))?')
def extractInstruction(self, line):
instr_pattern = self.instr_pattern_c_sail
re_search = instr_pattern.search(line)
diff --git a/setup.cfg b/setup.cfg
index 1cadfd2..74ef4df 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -1,5 +1,5 @@
[bumpversion]
-current_version = 0.16.1
+current_version = 0.17.0
commit = True
tag = True
diff --git a/setup.py b/setup.py
index b2702af..84d764d 100644
--- a/setup.py
+++ b/setup.py
@@ -26,7 +26,7 @@ def read_requires():
setup(
name='riscv_isac',
- version='0.16.1',
+ version='0.17.0',
description="RISC-V ISAC",
long_description=readme + '\n\n',
classifiers=[