Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
elif concrete_op == ARMG_CC_OP_ADD:
res = cc_dep1 + cc_dep2
flag = claripy.LShR(res, 31)
elif concrete_op == ARMG_CC_OP_SUB:
res = cc_dep1 - cc_dep2
flag = claripy.LShR(res, 31)
elif concrete_op == ARMG_CC_OP_ADC:
res = cc_dep1 + cc_dep2 + cc_dep3
flag = claripy.LShR(res, 31)
elif concrete_op == ARMG_CC_OP_SBB:
res = cc_dep1 - cc_dep2 - (cc_dep3^1)
flag = claripy.LShR(res, 31)
elif concrete_op == ARMG_CC_OP_LOGIC:
flag = claripy.LShR(cc_dep1, 31)
elif concrete_op == ARMG_CC_OP_MUL:
flag = claripy.LShR(cc_dep1, 31)
elif concrete_op == ARMG_CC_OP_MULL:
flag = claripy.LShR(cc_dep2, 31)
if flag is not None: return flag
l.error("Unknown cc_op %s (armg_calculate_flag_n)", cc_op)
raise SimCCallError("Unknown cc_op %s" % cc_op)
def arm64g_calculate_flag_v(state, cc_op, cc_dep1, cc_dep2, cc_dep3):
concrete_op = op_concretize(cc_op)
flag = None
cc_dep1, cc_dep2, cc_dep3 = arm64g_32bit_truncate_operands(concrete_op, cc_dep1, cc_dep2, cc_dep3)
if concrete_op == ARM64G_CC_OP_COPY:
flag = claripy.LShR(cc_dep1, ARM64G_CC_SHIFT_V) & 1
elif concrete_op == ARM64G_CC_OP_ADD32:
cc_dep1 = cc_dep1[31:0]
cc_dep2 = cc_dep2[31:0]
res = cc_dep1 + cc_dep2
v = ((res ^ cc_dep1) & (res ^ cc_dep2))
flag = claripy.LShR(v, 31).zero_extend(32)
elif concrete_op == ARM64G_CC_OP_ADD64:
res = cc_dep1 + cc_dep2
v = ((res ^ cc_dep1) & (res ^ cc_dep2))
flag = claripy.LShR(v, 63)
elif concrete_op == ARM64G_CC_OP_SUB32:
cc_dep1 = cc_dep1[31:0]
cc_dep2 = cc_dep2[31:0]
res = cc_dep1 - cc_dep2
v = ((cc_dep1 ^ cc_dep2) & (cc_dep1 ^ res))
flag = claripy.LShR(v, 31).zero_extend(32)
flag = boolean_extend(claripy.ULT, res, cc_dep1, 32)
elif concrete_op == ARMG_CC_OP_SUB:
flag = boolean_extend(claripy.UGE, cc_dep1, cc_dep2, 32)
elif concrete_op == ARMG_CC_OP_ADC:
res = cc_dep1 + cc_dep2 + cc_dep3
flag = claripy.If(cc_dep3 != 0, boolean_extend(claripy.ULE, res, cc_dep1, 32),
boolean_extend(claripy.ULT, res, cc_dep1, 32))
elif concrete_op == ARMG_CC_OP_SBB:
flag = claripy.If(cc_dep3 != 0, boolean_extend(claripy.UGE, cc_dep1, cc_dep2, 32),
boolean_extend(claripy.UGT, cc_dep1, cc_dep2, 32))
elif concrete_op == ARMG_CC_OP_LOGIC:
flag = cc_dep2
elif concrete_op == ARMG_CC_OP_MUL:
flag = (claripy.LShR(cc_dep3, 1)) & 1
elif concrete_op == ARMG_CC_OP_MULL:
flag = (claripy.LShR(cc_dep3, 1)) & 1
if flag is not None: return flag
l.error("Unknown cc_op %s (armg_calculate_flag_c)", cc_op)
raise SimCCallError("Unknown cc_op %s" % cc_op)
concrete_op = op_concretize(cc_op)
flag = None
cc_dep1, cc_dep2, cc_dep3 = arm64g_32bit_truncate_operands(concrete_op, cc_dep1, cc_dep2, cc_dep3)
if concrete_op == ARM64G_CC_OP_COPY:
flag = claripy.LShR(cc_dep1, ARM64G_CC_SHIFT_N) & 1
elif concrete_op == ARM64G_CC_OP_ADD32:
res = cc_dep1 + cc_dep2
flag = claripy.LShR(res, 31)
elif concrete_op == ARM64G_CC_OP_ADD64:
res = cc_dep1 + cc_dep2
flag = claripy.LShR(res, 63)
elif concrete_op == ARM64G_CC_OP_SUB32:
res = cc_dep1 - cc_dep2
flag = claripy.LShR(res, 31)
elif concrete_op == ARM64G_CC_OP_SUB64:
res = cc_dep1 - cc_dep2
flag = claripy.LShR(res, 63)
elif concrete_op == ARM64G_CC_OP_ADC32:
res = cc_dep1 + cc_dep2 + cc_dep3
flag = claripy.LShR(res, 31)
elif concrete_op == ARM64G_CC_OP_ADC64:
res = cc_dep1 + cc_dep2 + cc_dep3
flag = claripy.LShR(res, 63)
elif concrete_op == ARM64G_CC_OP_SBC32:
res = cc_dep1 - cc_dep2 - (cc_dep3^1)
flag = claripy.LShR(res, 31)
elif concrete_op == ARM64G_CC_OP_SBC64:
res = cc_dep1 - cc_dep2 - (cc_dep3^1)
flag = claripy.LShR(res, 63)
elif concrete_op == ARM64G_CC_OP_LOGIC32:
def arm64g_calculate_flag_c(state, cc_op, cc_dep1, cc_dep2, cc_dep3):
concrete_op = op_concretize(cc_op)
flag = None
cc_dep1, cc_dep2, cc_dep3 = arm64g_32bit_truncate_operands(concrete_op, cc_dep1, cc_dep2, cc_dep3)
if concrete_op == ARM64G_CC_OP_COPY:
flag = claripy.LShR(cc_dep1, ARM64G_CC_SHIFT_C) & 1
elif concrete_op in (ARM64G_CC_OP_ADD32, ARM64G_CC_OP_ADD64):
res = cc_dep1 + cc_dep2
flag = boolean_extend(claripy.ULT, res, cc_dep1, 64)
elif concrete_op in (ARM64G_CC_OP_SUB32, ARM64G_CC_OP_SUB64):
flag = boolean_extend(claripy.UGE, cc_dep1, cc_dep2, 64)
elif concrete_op in (ARM64G_CC_OP_ADC32, ARM64G_CC_OP_ADC64):
res = cc_dep1 + cc_dep2 + cc_dep3
flag = claripy.If(cc_dep2 != 0, boolean_extend(claripy.ULE, res, cc_dep1, 64),
boolean_extend(claripy.ULT, res, cc_dep1, 64))
elif concrete_op in (ARM64G_CC_OP_SBC32, ARM64G_CC_OP_SBC64):
flag = claripy.If(cc_dep2 != 0, boolean_extend(claripy.UGE, cc_dep1, cc_dep2, 64),
boolean_extend(claripy.UGT, cc_dep1, cc_dep2, 64))
elif concrete_op in (ARM64G_CC_OP_LOGIC32, ARM64G_CC_OP_LOGIC64):
flag = claripy.BVV(0, 64) # C after logic is zero on arm64
if flag is not None: return flag
def armg_calculate_flag_v(state, cc_op, cc_dep1, cc_dep2, cc_dep3):
concrete_op = op_concretize(cc_op)
flag = None
if concrete_op == ARMG_CC_OP_COPY:
flag = claripy.LShR(cc_dep1, ARMG_CC_SHIFT_V) & 1
elif concrete_op == ARMG_CC_OP_ADD:
res = cc_dep1 + cc_dep2
v = ((res ^ cc_dep1) & (res ^ cc_dep2))
flag = claripy.LShR(v, 31)
elif concrete_op == ARMG_CC_OP_SUB:
res = cc_dep1 - cc_dep2
v = ((cc_dep1 ^ cc_dep2) & (cc_dep1 ^ res))
flag = claripy.LShR(v, 31)
elif concrete_op == ARMG_CC_OP_ADC:
res = cc_dep1 + cc_dep2 + cc_dep3
v = ((res ^ cc_dep1) & (res ^ cc_dep2))
flag = claripy.LShR(v, 31)
elif concrete_op == ARMG_CC_OP_SBB:
res = cc_dep1 - cc_dep2 - (cc_dep3^1)
v = ((cc_dep1 ^ cc_dep2) & (cc_dep1 ^ res))
flag = claripy.LShR(v, 31)
elif concrete_op == ARMG_CC_OP_LOGIC:
flag = cc_dep3
elif concrete_op == ARMG_CC_OP_MUL:
flag = cc_dep3 & 1
return 1 & (inv ^ zf)
if v in [data[platform]['CondTypes']['CondB'], data[platform]['CondTypes']['CondNB']]:
l.debug("CondB")
cf = claripy.LShR(rdata, data[platform]['CondBitOffsets']['G_CC_SHIFT_C'])
return 1 & (inv ^ cf)
if v in [data[platform]['CondTypes']['CondBE'], data[platform]['CondTypes']['CondNBE']]:
l.debug("CondBE")
cf = claripy.LShR(rdata, data[platform]['CondBitOffsets']['G_CC_SHIFT_C'])
zf = claripy.LShR(rdata, data[platform]['CondBitOffsets']['G_CC_SHIFT_Z'])
return 1 & (inv ^ (cf | zf))
if v in [data[platform]['CondTypes']['CondS'], data[platform]['CondTypes']['CondNS']]:
l.debug("CondS")
sf = claripy.LShR(rdata, data[platform]['CondBitOffsets']['G_CC_SHIFT_S'])
return 1 & (inv ^ sf)
if v in [data[platform]['CondTypes']['CondP'], data[platform]['CondTypes']['CondNP']]:
l.debug("CondP")
pf = claripy.LShR(rdata, data[platform]['CondBitOffsets']['G_CC_SHIFT_P'])
return 1 & (inv ^ pf)
if v in [data[platform]['CondTypes']['CondL'], data[platform]['CondTypes']['CondNL']]:
l.debug("CondL")
sf = claripy.LShR(rdata, data[platform]['CondBitOffsets']['G_CC_SHIFT_S'])
of = claripy.LShR(rdata, data[platform]['CondBitOffsets']['G_CC_SHIFT_O'])
return 1 & (inv ^ (sf ^ of))
if v in [data[platform]['CondTypes']['CondLE'], data[platform]['CondTypes']['CondNLE']]:
l.debug("CondLE")
sf = claripy.LShR(rdata, data[platform]['CondBitOffsets']['G_CC_SHIFT_S'])
elif concrete_op == ARMG_CC_OP_ADD:
res = cc_dep1 + cc_dep2
flag = boolean_extend(claripy.ULT, res, cc_dep1, 32)
elif concrete_op == ARMG_CC_OP_SUB:
flag = boolean_extend(claripy.UGE, cc_dep1, cc_dep2, 32)
elif concrete_op == ARMG_CC_OP_ADC:
res = cc_dep1 + cc_dep2 + cc_dep3
flag = claripy.If(cc_dep3 != 0, boolean_extend(claripy.ULE, res, cc_dep1, 32),
boolean_extend(claripy.ULT, res, cc_dep1, 32))
elif concrete_op == ARMG_CC_OP_SBB:
flag = claripy.If(cc_dep3 != 0, boolean_extend(claripy.UGE, cc_dep1, cc_dep2, 32),
boolean_extend(claripy.UGT, cc_dep1, cc_dep2, 32))
elif concrete_op == ARMG_CC_OP_LOGIC:
flag = cc_dep2
elif concrete_op == ARMG_CC_OP_MUL:
flag = (claripy.LShR(cc_dep3, 1)) & 1
elif concrete_op == ARMG_CC_OP_MULL:
flag = (claripy.LShR(cc_dep3, 1)) & 1
if flag is not None: return flag
l.error("Unknown cc_op %s (armg_calculate_flag_c)", cc_op)
raise SimCCallError("Unknown cc_op %s" % cc_op)
return 1 & (inv ^ of)
if v in [data[platform]['CondTypes']['CondZ'], data[platform]['CondTypes']['CondNZ']]:
l.debug("CondZ")
zf = claripy.LShR(rdata, data[platform]['CondBitOffsets']['G_CC_SHIFT_Z'])
return 1 & (inv ^ zf)
if v in [data[platform]['CondTypes']['CondB'], data[platform]['CondTypes']['CondNB']]:
l.debug("CondB")
cf = claripy.LShR(rdata, data[platform]['CondBitOffsets']['G_CC_SHIFT_C'])
return 1 & (inv ^ cf)
if v in [data[platform]['CondTypes']['CondBE'], data[platform]['CondTypes']['CondNBE']]:
l.debug("CondBE")
cf = claripy.LShR(rdata, data[platform]['CondBitOffsets']['G_CC_SHIFT_C'])
zf = claripy.LShR(rdata, data[platform]['CondBitOffsets']['G_CC_SHIFT_Z'])
return 1 & (inv ^ (cf | zf))
if v in [data[platform]['CondTypes']['CondS'], data[platform]['CondTypes']['CondNS']]:
l.debug("CondS")
sf = claripy.LShR(rdata, data[platform]['CondBitOffsets']['G_CC_SHIFT_S'])
return 1 & (inv ^ sf)
if v in [data[platform]['CondTypes']['CondP'], data[platform]['CondTypes']['CondNP']]:
l.debug("CondP")
pf = claripy.LShR(rdata, data[platform]['CondBitOffsets']['G_CC_SHIFT_P'])
return 1 & (inv ^ pf)
if v in [data[platform]['CondTypes']['CondL'], data[platform]['CondTypes']['CondNL']]:
l.debug("CondL")
sf = claripy.LShR(rdata, data[platform]['CondBitOffsets']['G_CC_SHIFT_S'])
of = claripy.LShR(rdata, data[platform]['CondBitOffsets']['G_CC_SHIFT_O'])
elif concrete_op == ARM64G_CC_OP_ADC32:
res = cc_dep1 + cc_dep2 + cc_dep3
flag = claripy.LShR(res, 31)
elif concrete_op == ARM64G_CC_OP_ADC64:
res = cc_dep1 + cc_dep2 + cc_dep3
flag = claripy.LShR(res, 63)
elif concrete_op == ARM64G_CC_OP_SBC32:
res = cc_dep1 - cc_dep2 - (cc_dep3^1)
flag = claripy.LShR(res, 31)
elif concrete_op == ARM64G_CC_OP_SBC64:
res = cc_dep1 - cc_dep2 - (cc_dep3^1)
flag = claripy.LShR(res, 63)
elif concrete_op == ARM64G_CC_OP_LOGIC32:
flag = claripy.LShR(cc_dep1, 31)
elif concrete_op == ARM64G_CC_OP_LOGIC64:
flag = claripy.LShR(cc_dep1, 63)
if flag is not None: return flag
l.error("Unknown cc_op %s (arm64g_calculate_flag_n)", cc_op)
raise SimCCallError("Unknown cc_op %s" % cc_op)