Remove the definitons of Ptr_to_ULong and ULong_to_Ptr.

A cast to Addr replaces the former and the latter wasn't used.


git-svn-id: svn://svn.valgrind.org/vex/trunk@3061
This commit is contained in:
Florian Krohm 2015-01-07 20:14:48 +00:00
parent 37bd462daa
commit 1d2c7d730a
19 changed files with 107 additions and 137 deletions

View File

@ -2765,7 +2765,7 @@ Int emit_AMD64Instr ( /*MB_MOD*/Bool* is_profInc,
const void* disp_cp_chain_me
= i->Ain.XDirect.toFastEP ? disp_cp_chain_me_to_fastEP
: disp_cp_chain_me_to_slowEP;
p = emit64(p, Ptr_to_ULong(disp_cp_chain_me));
p = emit64(p, (Addr)disp_cp_chain_me);
/* call *%r11 */
*p++ = 0x41;
*p++ = 0xFF;
@ -2808,18 +2808,18 @@ Int emit_AMD64Instr ( /*MB_MOD*/Bool* is_profInc,
p = doAMode_M(p, i->Ain.XIndir.dstGA, i->Ain.XIndir.amRIP);
/* get $disp_cp_xindir into %r11 */
if (fitsIn32Bits(Ptr_to_ULong(disp_cp_xindir))) {
if (fitsIn32Bits((Addr)disp_cp_xindir)) {
/* use a shorter encoding */
/* movl sign-extend(disp_cp_xindir), %r11 */
*p++ = 0x49;
*p++ = 0xC7;
*p++ = 0xC3;
p = emit32(p, (UInt)Ptr_to_ULong(disp_cp_xindir));
p = emit32(p, (UInt)(Addr)disp_cp_xindir);
} else {
/* movabsq $disp_cp_xindir, %r11 */
*p++ = 0x49;
*p++ = 0xBB;
p = emit64(p, Ptr_to_ULong(disp_cp_xindir));
p = emit64(p, (Addr)disp_cp_xindir);
}
/* jmp *%r11 */
@ -2884,7 +2884,7 @@ Int emit_AMD64Instr ( /*MB_MOD*/Bool* is_profInc,
/* movabsq $disp_assisted, %r11 */
*p++ = 0x49;
*p++ = 0xBB;
p = emit64(p, Ptr_to_ULong(disp_cp_xassisted));
p = emit64(p, (Addr)disp_cp_xassisted);
/* jmp *%r11 */
*p++ = 0x41;
*p++ = 0xFF;
@ -3567,7 +3567,7 @@ VexInvalRange chainXDirect_AMD64 ( VexEndness endness_host,
UChar* p = (UChar*)place_to_chain;
vassert(p[0] == 0x49);
vassert(p[1] == 0xBB);
vassert(*(ULong*)(&p[2]) == Ptr_to_ULong(disp_cp_chain_me_EXPECTED));
vassert(*(Addr*)(&p[2]) == (Addr)disp_cp_chain_me_EXPECTED);
vassert(p[10] == 0x41);
vassert(p[11] == 0xFF);
vassert(p[12] == 0xD3);
@ -3629,7 +3629,7 @@ VexInvalRange chainXDirect_AMD64 ( VexEndness endness_host,
vassert(delta == 0LL || delta == -1LL);
} else {
/* Minimal modifications from the starting sequence. */
*(ULong*)(&p[2]) = Ptr_to_ULong(place_to_jump_to);
*(Addr*)(&p[2]) = (Addr)place_to_jump_to;
p[12] = 0xE3;
}
VexInvalRange vir = { (HWord)place_to_chain, 13 };
@ -3664,7 +3664,7 @@ VexInvalRange unchainXDirect_AMD64 ( VexEndness endness_host,
UChar* p = (UChar*)place_to_unchain;
Bool valid = False;
if (p[0] == 0x49 && p[1] == 0xBB
&& *(ULong*)(&p[2]) == Ptr_to_ULong(place_to_jump_to_EXPECTED)
&& *(Addr*)(&p[2]) == (Addr)place_to_jump_to_EXPECTED
&& p[10] == 0x41 && p[11] == 0xFF && p[12] == 0xE3) {
/* it's the long form */
valid = True;
@ -3695,7 +3695,7 @@ VexInvalRange unchainXDirect_AMD64 ( VexEndness endness_host,
*/
p[0] = 0x49;
p[1] = 0xBB;
*(ULong*)(&p[2]) = Ptr_to_ULong(disp_cp_chain_me);
*(Addr*)(&p[2]) = (Addr)disp_cp_chain_me;
p[10] = 0x41;
p[11] = 0xFF;
p[12] = 0xD3;
@ -3726,7 +3726,7 @@ VexInvalRange patchProfInc_AMD64 ( VexEndness endness_host,
vassert(p[10] == 0x49);
vassert(p[11] == 0xFF);
vassert(p[12] == 0x03);
ULong imm64 = (ULong)Ptr_to_ULong(location_of_counter);
ULong imm64 = (ULong)(Addr)location_of_counter;
p[2] = imm64 & 0xFF; imm64 >>= 8;
p[3] = imm64 & 0xFF; imm64 >>= 8;
p[4] = imm64 & 0xFF; imm64 >>= 8;

View File

@ -691,7 +691,7 @@ void doHelperCall ( /*OUT*/UInt* stackAdjustAfterCall,
/* Finally, generate the call itself. This needs the *retloc value
set in the switch above, which is why it's at the end. */
addInstr(env,
AMD64Instr_Call(cc, Ptr_to_ULong(cee->addr), n_args, *retloc));
AMD64Instr_Call(cc, (Addr)cee->addr, n_args, *retloc));
}

View File

@ -957,7 +957,7 @@ ARM64Instr* ARM64Instr_CSel ( HReg dst, HReg argL, HReg argR,
i->ARM64in.CSel.cond = cond;
return i;
}
ARM64Instr* ARM64Instr_Call ( ARM64CondCode cond, HWord target, Int nArgRegs,
ARM64Instr* ARM64Instr_Call ( ARM64CondCode cond, Addr64 target, Int nArgRegs,
RetLoc rloc ) {
ARM64Instr* i = LibVEX_Alloc(sizeof(ARM64Instr));
i->tag = ARM64in_Call;
@ -1490,7 +1490,7 @@ void ppARM64Instr ( const ARM64Instr* i ) {
vex_printf("call%s ",
i->ARM64in.Call.cond==ARM64cc_AL
? " " : showARM64CondCode(i->ARM64in.Call.cond));
vex_printf("0x%lx [nArgRegs=%d, ",
vex_printf("0x%llx [nArgRegs=%d, ",
i->ARM64in.Call.target, i->ARM64in.Call.nArgRegs);
ppRetLoc(i->ARM64in.Call.rloc);
vex_printf("]");
@ -3436,7 +3436,7 @@ Int emit_ARM64Instr ( /*MB_MOD*/Bool* is_profInc,
= i->ARM64in.XDirect.toFastEP ? disp_cp_chain_me_to_fastEP
: disp_cp_chain_me_to_slowEP;
p = imm64_to_iregNo_EXACTLY4(p, /*x*/9,
Ptr_to_ULong(disp_cp_chain_me));
(Addr)disp_cp_chain_me);
*p++ = 0xD63F0120;
/* --- END of PATCHABLE BYTES --- */
@ -3479,7 +3479,7 @@ Int emit_ARM64Instr ( /*MB_MOD*/Bool* is_profInc,
/* imm64 x9, VG_(disp_cp_xindir) */
/* br x9 */
p = imm64_to_iregNo(p, /*x*/9, Ptr_to_ULong(disp_cp_xindir));
p = imm64_to_iregNo(p, /*x*/9, (Addr)disp_cp_xindir);
*p++ = 0xD61F0120; /* br x9 */
/* Fix up the conditional jump, if there was one. */
@ -3546,7 +3546,7 @@ Int emit_ARM64Instr ( /*MB_MOD*/Bool* is_profInc,
/* imm64 x9, VG_(disp_cp_xassisted) */
/* br x9 */
p = imm64_to_iregNo(p, /*x*/9, Ptr_to_ULong(disp_cp_xassisted));
p = imm64_to_iregNo(p, /*x*/9, (Addr)disp_cp_xassisted);
*p++ = 0xD61F0120; /* br x9 */
/* Fix up the conditional jump, if there was one. */
@ -5230,7 +5230,7 @@ VexInvalRange chainXDirect_ARM64 ( VexEndness endness_host,
UInt* p = (UInt*)place_to_chain;
vassert(0 == (3 & (HWord)p));
vassert(is_imm64_to_iregNo_EXACTLY4(
p, /*x*/9, Ptr_to_ULong(disp_cp_chain_me_EXPECTED)));
p, /*x*/9, (Addr)disp_cp_chain_me_EXPECTED));
vassert(p[4] == 0xD63F0120);
/* And what we want to change it to is:
@ -5246,7 +5246,7 @@ VexInvalRange chainXDirect_ARM64 ( VexEndness endness_host,
The replacement has the same length as the original.
*/
(void)imm64_to_iregNo_EXACTLY4(
p, /*x*/9, Ptr_to_ULong(place_to_jump_to));
p, /*x*/9, (Addr)place_to_jump_to);
p[4] = 0xD61F0120;
VexInvalRange vir = {(HWord)p, 20};
@ -5276,7 +5276,7 @@ VexInvalRange unchainXDirect_ARM64 ( VexEndness endness_host,
UInt* p = (UInt*)place_to_unchain;
vassert(0 == (3 & (HWord)p));
vassert(is_imm64_to_iregNo_EXACTLY4(
p, /*x*/9, Ptr_to_ULong(place_to_jump_to_EXPECTED)));
p, /*x*/9, (Addr)place_to_jump_to_EXPECTED));
vassert(p[4] == 0xD61F0120);
/* And what we want to change it to is:
@ -5290,7 +5290,7 @@ VexInvalRange unchainXDirect_ARM64 ( VexEndness endness_host,
D6 3F 01 20
*/
(void)imm64_to_iregNo_EXACTLY4(
p, /*x*/9, Ptr_to_ULong(disp_cp_chain_me));
p, /*x*/9, (Addr)disp_cp_chain_me);
p[4] = 0xD63F0120;
VexInvalRange vir = {(HWord)p, 20};
@ -5313,7 +5313,7 @@ VexInvalRange patchProfInc_ARM64 ( VexEndness endness_host,
vassert(p[5] == 0x91000508);
vassert(p[6] == 0xF9000128);
imm64_to_iregNo_EXACTLY4(p, /*x*/9,
Ptr_to_ULong(location_of_counter));
(Addr)location_of_counter);
VexInvalRange vir = {(HWord)p, 4*4};
return vir;
}

View File

@ -630,7 +630,7 @@ typedef
condition (which could be ARM64cc_AL). */
struct {
RetLoc rloc; /* where the return value will be */
HWord target;
Addr64 target;
ARM64CondCode cond;
Int nArgRegs; /* # regs carrying args: 0 .. 8 */
} Call;
@ -877,7 +877,7 @@ extern ARM64Instr* ARM64Instr_XAssisted ( HReg dstGA, ARM64AMode* amPC,
ARM64CondCode cond, IRJumpKind jk );
extern ARM64Instr* ARM64Instr_CSel ( HReg dst, HReg argL, HReg argR,
ARM64CondCode cond );
extern ARM64Instr* ARM64Instr_Call ( ARM64CondCode, HWord, Int nArgRegs,
extern ARM64Instr* ARM64Instr_Call ( ARM64CondCode, Addr64, Int nArgRegs,
RetLoc rloc );
extern ARM64Instr* ARM64Instr_AddToSP ( Int simm );
extern ARM64Instr* ARM64Instr_FromSP ( HReg dst );

View File

@ -498,7 +498,7 @@ Bool doHelperCall ( /*OUT*/UInt* stackAdjustAfterCall,
HReg tmpregs[ARM64_N_ARGREGS];
Bool go_fast;
Int n_args, i, nextArgReg;
ULong target;
Addr64 target;
vassert(ARM64_N_ARGREGS == 8);
@ -784,7 +784,7 @@ Bool doHelperCall ( /*OUT*/UInt* stackAdjustAfterCall,
number into the call (we'll need to know it when doing register
allocation, to know what regs the call reads.) */
target = (HWord)Ptr_to_ULong(cee->addr);
target = (Addr)cee->addr;
addInstr(env, ARM64Instr_Call( cc, target, nextArgReg, *retloc ));
return True; /* success */
@ -1708,7 +1708,7 @@ static HReg iselIntExpr_R_wrk ( ISelEnv* env, IRExpr* e )
HReg res = newVRegI(env);
addInstr(env, ARM64Instr_MovI(hregARM64_X0(), regL));
addInstr(env, ARM64Instr_MovI(hregARM64_X1(), regR));
addInstr(env, ARM64Instr_Call( ARM64cc_AL, (HWord)Ptr_to_ULong(fn),
addInstr(env, ARM64Instr_Call( ARM64cc_AL, (Addr)fn,
2, mk_RetLoc_simple(RLPri_Int) ));
addInstr(env, ARM64Instr_MovI(res, hregARM64_X0()));
return res;

View File

@ -1229,7 +1229,7 @@ ARMInstr* ARMInstr_CMov ( ARMCondCode cond, HReg dst, ARMRI84* src ) {
vassert(cond != ARMcc_AL);
return i;
}
ARMInstr* ARMInstr_Call ( ARMCondCode cond, HWord target, Int nArgRegs,
ARMInstr* ARMInstr_Call ( ARMCondCode cond, Addr32 target, Int nArgRegs,
RetLoc rloc ) {
ARMInstr* i = LibVEX_Alloc(sizeof(ARMInstr));
i->tag = ARMin_Call;
@ -1700,7 +1700,7 @@ void ppARMInstr ( const ARMInstr* i ) {
vex_printf("call%s ",
i->ARMin.Call.cond==ARMcc_AL
? "" : showARMCondCode(i->ARMin.Call.cond));
vex_printf("0x%lx [nArgRegs=%d, ",
vex_printf("0x%x [nArgRegs=%d, ",
i->ARMin.Call.target, i->ARMin.Call.nArgRegs);
ppRetLoc(i->ARMin.Call.rloc);
vex_printf("]");
@ -3242,7 +3242,7 @@ Int emit_ARMInstr ( /*MB_MOD*/Bool* is_profInc,
= i->ARMin.XDirect.toFastEP ? disp_cp_chain_me_to_fastEP
: disp_cp_chain_me_to_slowEP;
p = imm32_to_iregNo_EXACTLY2(p, /*r*/12,
(UInt)Ptr_to_ULong(disp_cp_chain_me));
(UInt)(Addr)disp_cp_chain_me);
*p++ = 0xE12FFF3C;
/* --- END of PATCHABLE BYTES --- */
@ -3290,7 +3290,7 @@ Int emit_ARMInstr ( /*MB_MOD*/Bool* is_profInc,
/* movt r12, hi16(VG_(disp_cp_xindir)) */
/* bx r12 (A1) */
p = imm32_to_iregNo(p, /*r*/12,
(UInt)Ptr_to_ULong(disp_cp_xindir));
(UInt)(Addr)disp_cp_xindir);
*p++ = 0xE12FFF1C;
/* Fix up the conditional jump, if there was one. */
@ -3355,7 +3355,7 @@ Int emit_ARMInstr ( /*MB_MOD*/Bool* is_profInc,
/* movt r12, hi16(VG_(disp_cp_xassisted)) */
/* bx r12 (A1) */
p = imm32_to_iregNo(p, /*r*/12,
(UInt)Ptr_to_ULong(disp_cp_xassisted));
(UInt)(Addr)disp_cp_xassisted);
*p++ = 0xE12FFF1C;
/* Fix up the conditional jump, if there was one. */
@ -4721,7 +4721,7 @@ VexInvalRange chainXDirect_ARM ( VexEndness endness_host,
UInt* p = (UInt*)place_to_chain;
vassert(0 == (3 & (HWord)p));
vassert(is_imm32_to_iregNo_EXACTLY2(
p, /*r*/12, (UInt)Ptr_to_ULong(disp_cp_chain_me_EXPECTED)));
p, /*r*/12, (UInt)(Addr)disp_cp_chain_me_EXPECTED));
vassert(p[2] == 0xE12FFF3C);
/* And what we want to change it to is either:
(general case)
@ -4775,7 +4775,7 @@ VexInvalRange chainXDirect_ARM ( VexEndness endness_host,
p[2] = 0xFF000000;
} else {
(void)imm32_to_iregNo_EXACTLY2(
p, /*r*/12, (UInt)Ptr_to_ULong(place_to_jump_to));
p, /*r*/12, (UInt)(Addr)place_to_jump_to);
p[2] = 0xE12FFF1C;
}
@ -4814,7 +4814,7 @@ VexInvalRange unchainXDirect_ARM ( VexEndness endness_host,
Bool valid = False;
if (is_imm32_to_iregNo_EXACTLY2(
p, /*r*/12, (UInt)Ptr_to_ULong(place_to_jump_to_EXPECTED))
p, /*r*/12, (UInt)(Addr)place_to_jump_to_EXPECTED)
&& p[2] == 0xE12FFF1C) {
valid = True; /* it's the long form */
if (0)
@ -4841,7 +4841,7 @@ VexInvalRange unchainXDirect_ARM ( VexEndness endness_host,
E1 2F FF 3C
*/
(void)imm32_to_iregNo_EXACTLY2(
p, /*r*/12, (UInt)Ptr_to_ULong(disp_cp_chain_me));
p, /*r*/12, (UInt)(Addr)disp_cp_chain_me);
p[2] = 0xE12FFF3C;
VexInvalRange vir = {(HWord)p, 12};
return vir;
@ -4866,7 +4866,7 @@ VexInvalRange patchProfInc_ARM ( VexEndness endness_host,
vassert(p[6] == 0xE2ABB000);
vassert(p[7] == 0xE58CB004);
imm32_to_iregNo_EXACTLY2(p, /*r*/12,
(UInt)Ptr_to_ULong(location_of_counter));
(UInt)(Addr)location_of_counter);
VexInvalRange vir = {(HWord)p, 8};
return vir;
}

View File

@ -724,7 +724,7 @@ typedef
condition (which could be ARMcc_AL). */
struct {
ARMCondCode cond;
HWord target;
Addr32 target;
Int nArgRegs; /* # regs carrying args: 0 .. 4 */
RetLoc rloc; /* where the return value will be */
} Call;
@ -976,7 +976,7 @@ extern ARMInstr* ARMInstr_XIndir ( HReg dstGA, ARMAMode1* amR15T,
extern ARMInstr* ARMInstr_XAssisted ( HReg dstGA, ARMAMode1* amR15T,
ARMCondCode cond, IRJumpKind jk );
extern ARMInstr* ARMInstr_CMov ( ARMCondCode, HReg dst, ARMRI84* src );
extern ARMInstr* ARMInstr_Call ( ARMCondCode, HWord, Int nArgRegs,
extern ARMInstr* ARMInstr_Call ( ARMCondCode, Addr32, Int nArgRegs,
RetLoc rloc );
extern ARMInstr* ARMInstr_Mul ( ARMMulOp op );
extern ARMInstr* ARMInstr_LdrEX ( Int szB );

View File

@ -396,7 +396,7 @@ Bool doHelperCall ( /*OUT*/UInt* stackAdjustAfterCall,
HReg tmpregs[ARM_N_ARGREGS];
Bool go_fast;
Int n_args, i, nextArgReg;
ULong target;
Addr32 target;
vassert(ARM_N_ARGREGS == 4);
@ -708,7 +708,7 @@ Bool doHelperCall ( /*OUT*/UInt* stackAdjustAfterCall,
instruction, a bitmask indicating which of r0/1/2/3 carry live
values. But that's too much hassle. */
target = (HWord)Ptr_to_ULong(cee->addr);
target = (Addr)cee->addr;
addInstr(env, ARMInstr_Call( cc, target, nextArgReg, *retloc ));
return True; /* success */
@ -1483,7 +1483,7 @@ static HReg iselIntExpr_R_wrk ( ISelEnv* env, IRExpr* e )
HReg res = newVRegI(env);
addInstr(env, mk_iMOVds_RR(hregARM_R0(), regL));
addInstr(env, mk_iMOVds_RR(hregARM_R1(), regR));
addInstr(env, ARMInstr_Call( ARMcc_AL, (HWord)Ptr_to_ULong(fn),
addInstr(env, ARMInstr_Call( ARMcc_AL, (Addr)fn,
2, mk_RetLoc_simple(RLPri_Int) ));
addInstr(env, mk_iMOVds_RR(res, hregARM_R0()));
return res;
@ -1772,7 +1772,7 @@ static HReg iselIntExpr_R_wrk ( ISelEnv* env, IRExpr* e )
HReg arg = iselIntExpr_R(env, e->Iex.Unop.arg);
HReg res = newVRegI(env);
addInstr(env, mk_iMOVds_RR(hregARM_R0(), arg));
addInstr(env, ARMInstr_Call( ARMcc_AL, (HWord)Ptr_to_ULong(fn),
addInstr(env, ARMInstr_Call( ARMcc_AL, (Addr)fn,
1, mk_RetLoc_simple(RLPri_Int) ));
addInstr(env, mk_iMOVds_RR(res, hregARM_R0()));
return res;

View File

@ -3443,7 +3443,7 @@ Int emit_MIPSInstr ( /*MB_MOD*/Bool* is_profInc,
= i->Min.XDirect.toFastEP ? disp_cp_chain_me_to_fastEP
: disp_cp_chain_me_to_slowEP;
p = mkLoadImm_EXACTLY2or6(p, /*r*/ 9,
Ptr_to_ULong(disp_cp_chain_me), mode64);
(Addr)disp_cp_chain_me, mode64);
/* jalr $9 */
/* nop */
p = mkFormR(p, 0, 9, 0, 31, 0, 9); /* p += 4 */
@ -3496,7 +3496,7 @@ Int emit_MIPSInstr ( /*MB_MOD*/Bool* is_profInc,
/* jalr r9 */
/* nop */
p = mkLoadImm_EXACTLY2or6(p, /*r*/ 9,
Ptr_to_ULong(disp_cp_xindir), mode64);
(Addr)disp_cp_xindir, mode64);
p = mkFormR(p, 0, 9, 0, 31, 0, 9); /* p += 4 */
p = mkFormR(p, 0, 0, 0, 0, 0, 0); /* p += 4 */
@ -3566,7 +3566,7 @@ Int emit_MIPSInstr ( /*MB_MOD*/Bool* is_profInc,
/* move r9, VG_(disp_cp_xassisted) */
p = mkLoadImm_EXACTLY2or6(p, /*r*/ 9,
(ULong)Ptr_to_ULong(disp_cp_xassisted), mode64);
(ULong)(Addr)disp_cp_xassisted, mode64);
/* jalr $9
nop */
p = mkFormR(p, 0, 9, 0, 31, 0, 9); /* p += 4 */
@ -4343,7 +4343,7 @@ VexInvalRange chainXDirect_MIPS ( VexEndness endness_host,
UChar* p = (UChar*)place_to_chain;
vassert(0 == (3 & (HWord)p));
vassert(isLoadImm_EXACTLY2or6(p, /*r*/9,
(UInt)Ptr_to_ULong(disp_cp_chain_me_EXPECTED),
(UInt)(Addr)disp_cp_chain_me_EXPECTED,
mode64));
vassert(fetch32(p + (mode64 ? 24 : 8) + 0) == 0x120F809);
vassert(fetch32(p + (mode64 ? 24 : 8) + 4) == 0x00000000);
@ -4360,7 +4360,7 @@ VexInvalRange chainXDirect_MIPS ( VexEndness endness_host,
*/
p = mkLoadImm_EXACTLY2or6(p, /*r*/9,
Ptr_to_ULong(place_to_jump_to), mode64);
(Addr)place_to_jump_to, mode64);
p = emit32(p, 0x120F809);
p = emit32(p, 0x00000000);
@ -4391,7 +4391,7 @@ VexInvalRange unchainXDirect_MIPS ( VexEndness endness_host,
UChar* p = (UChar*)place_to_unchain;
vassert(0 == (3 & (HWord)p));
vassert(isLoadImm_EXACTLY2or6(p, /*r*/ 9,
Ptr_to_ULong(place_to_jump_to_EXPECTED),
(Addr)place_to_jump_to_EXPECTED,
mode64));
vassert(fetch32(p + (mode64 ? 24 : 8) + 0) == 0x120F809);
vassert(fetch32(p + (mode64 ? 24 : 8) + 4) == 0x00000000);
@ -4406,7 +4406,7 @@ VexInvalRange unchainXDirect_MIPS ( VexEndness endness_host,
The replacement has the same length as the original.
*/
p = mkLoadImm_EXACTLY2or6(p, /*r*/ 9,
Ptr_to_ULong(disp_cp_chain_me), mode64);
(Addr)disp_cp_chain_me, mode64);
p = emit32(p, 0x120F809);
p = emit32(p, 0x00000000);
@ -4450,7 +4450,7 @@ VexInvalRange patchProfInc_MIPS ( VexEndness endness_host,
}
p = mkLoadImm_EXACTLY2or6(p, /*r*/9,
Ptr_to_ULong(location_of_counter), mode64);
(Addr)location_of_counter, mode64);
VexInvalRange vir = {(HWord)p, 8};
return vir;

View File

@ -640,16 +640,16 @@ static void doHelperCall(/*OUT*/UInt* stackAdjustAfterCall,
vassert(0);
}
ULong target = mode64 ? Ptr_to_ULong(cee->addr) :
toUInt(Ptr_to_ULong(cee->addr));
Addr64 target = mode64 ? (Addr)cee->addr :
toUInt((Addr)cee->addr);
/* Finally, generate the call itself. This needs the *retloc value
set in the switch above, which is why it's at the end. */
if (cc == MIPScc_AL)
addInstr(env, MIPSInstr_CallAlways(cc, (Addr64)target, argiregs,
addInstr(env, MIPSInstr_CallAlways(cc, target, argiregs,
*retloc));
else
addInstr(env, MIPSInstr_Call(cc, (Addr64)target, argiregs, src, *retloc));
addInstr(env, MIPSInstr_Call(cc, target, argiregs, src, *retloc));
}
/*---------------------------------------------------------*/
@ -1355,7 +1355,7 @@ static HReg iselWordExpr_R_wrk(ISelEnv * env, IRExpr * e)
argiregs |= (1 << 4);
argiregs |= (1 << 5);
addInstr(env, MIPSInstr_CallAlways( MIPScc_AL,
(HWord)Ptr_to_ULong(fn),
(Addr)fn,
argiregs, rloc));
addInstr(env, mk_iMOVds_RR(res, hregMIPS_GPR2(env->mode64)));
return res;
@ -1763,7 +1763,7 @@ static HReg iselWordExpr_R_wrk(ISelEnv * env, IRExpr * e)
addInstr(env, mk_iMOVds_RR(hregMIPS_GPR4(env->mode64), regL));
argiregs |= (1 << 4);
addInstr(env, MIPSInstr_CallAlways( MIPScc_AL,
(HWord)Ptr_to_ULong(fn),
(Addr)fn,
argiregs, rloc));
addInstr(env, mk_iMOVds_RR(res, hregMIPS_GPR2(env->mode64)));
return res;

View File

@ -4267,7 +4267,7 @@ Int emit_PPCInstr ( /*MB_MOD*/Bool* is_profInc,
= i->Pin.XDirect.toFastEP ? disp_cp_chain_me_to_fastEP
: disp_cp_chain_me_to_slowEP;
p = mkLoadImm_EXACTLY2or5(
p, /*r*/30, Ptr_to_ULong(disp_cp_chain_me), mode64, endness_host);
p, /*r*/30, (Addr)disp_cp_chain_me, mode64, endness_host);
/* mtctr r30 */
p = mkFormXFX(p, /*r*/30, 9, 467, endness_host);
/* bctrl */
@ -4315,7 +4315,7 @@ Int emit_PPCInstr ( /*MB_MOD*/Bool* is_profInc,
);
/* imm32/64 r30, VG_(disp_cp_xindir) */
p = mkLoadImm(p, /*r*/30, (ULong)Ptr_to_ULong(disp_cp_xindir), mode64,
p = mkLoadImm(p, /*r*/30, (ULong)(Addr)disp_cp_xindir, mode64,
endness_host);
/* mtctr r30 */
p = mkFormXFX(p, /*r*/30, 9, 467, endness_host);
@ -4384,7 +4384,7 @@ Int emit_PPCInstr ( /*MB_MOD*/Bool* is_profInc,
/* imm32/64 r30, VG_(disp_cp_xassisted) */
p = mkLoadImm(p, /*r*/30,
(ULong)Ptr_to_ULong(disp_cp_xassisted), mode64,
(ULong)(Addr)disp_cp_xassisted, mode64,
endness_host);
/* mtctr r30 */
p = mkFormXFX(p, /*r*/30, 9, 467, endness_host);
@ -5936,7 +5936,7 @@ VexInvalRange chainXDirect_PPC ( VexEndness endness_host,
UChar* p = (UChar*)place_to_chain;
vassert(0 == (3 & (HWord)p));
vassert(isLoadImm_EXACTLY2or5(p, /*r*/30,
Ptr_to_ULong(disp_cp_chain_me_EXPECTED),
(Addr)disp_cp_chain_me_EXPECTED,
mode64, endness_host));
vassert(fetch32(p + (mode64 ? 20 : 8) + 0, endness_host) == 0x7FC903A6);
vassert(fetch32(p + (mode64 ? 20 : 8) + 4, endness_host) == 0x4E800421);
@ -5951,7 +5951,7 @@ VexInvalRange chainXDirect_PPC ( VexEndness endness_host,
The replacement has the same length as the original.
*/
p = mkLoadImm_EXACTLY2or5(p, /*r*/30,
Ptr_to_ULong(place_to_jump_to), mode64,
(Addr)place_to_jump_to, mode64,
endness_host);
p = emit32(p, 0x7FC903A6, endness_host);
p = emit32(p, 0x4E800420, endness_host);
@ -5990,7 +5990,7 @@ VexInvalRange unchainXDirect_PPC ( VexEndness endness_host,
UChar* p = (UChar*)place_to_unchain;
vassert(0 == (3 & (HWord)p));
vassert(isLoadImm_EXACTLY2or5(p, /*r*/30,
Ptr_to_ULong(place_to_jump_to_EXPECTED),
(Addr)place_to_jump_to_EXPECTED,
mode64, endness_host));
vassert(fetch32(p + (mode64 ? 20 : 8) + 0, endness_host) == 0x7FC903A6);
vassert(fetch32(p + (mode64 ? 20 : 8) + 4, endness_host) == 0x4E800420);
@ -6005,7 +6005,7 @@ VexInvalRange unchainXDirect_PPC ( VexEndness endness_host,
The replacement has the same length as the original.
*/
p = mkLoadImm_EXACTLY2or5(p, /*r*/30,
Ptr_to_ULong(disp_cp_chain_me), mode64,
(Addr)disp_cp_chain_me, mode64,
endness_host);
p = emit32(p, 0x7FC903A6, endness_host);
p = emit32(p, 0x4E800421, endness_host);
@ -6043,7 +6043,7 @@ VexInvalRange patchProfInc_PPC ( VexEndness endness_host,
vassert(fetch32(p + 24, endness_host) == 0x3BBD0001);
vassert(fetch32(p + 28, endness_host) == 0xFBBE0000);
p = mkLoadImm_EXACTLY2or5(p, /*r*/30,
Ptr_to_ULong(location_of_counter),
(Addr)location_of_counter,
True/*mode64*/, endness_host);
len = p - (UChar*)place_to_patch;
vassert(len == 20);
@ -6058,7 +6058,7 @@ VexInvalRange patchProfInc_PPC ( VexEndness endness_host,
vassert(fetch32(p + 24, endness_host) == 0x7FBD0194);
vassert(fetch32(p + 28, endness_host) == 0x93BE0000);
p = mkLoadImm_EXACTLY2or5(p, /*r*/30,
Ptr_to_ULong(location_of_counter),
(Addr)location_of_counter,
False/*!mode64*/, endness_host);
len = p - (UChar*)place_to_patch;
vassert(len == 8);

View File

@ -1059,9 +1059,9 @@ void doHelperCall ( /*OUT*/UInt* stackAdjustAfterCall,
/* Finally, generate the call itself. This needs the *retloc value
set in the switch above, which is why it's at the end. */
ULong target = mode64 ? Ptr_to_ULong(cee->addr)
: toUInt(Ptr_to_ULong(cee->addr));
addInstr(env, PPCInstr_Call( cc, (Addr64)target, argiregs, *retloc ));
Addr64 target = mode64 ? (Addr)cee->addr
: toUInt((Addr)(cee->addr));
addInstr(env, PPCInstr_Call( cc, target, argiregs, *retloc ));
}
@ -2260,7 +2260,7 @@ static HReg iselWordExpr_R_wrk ( ISelEnv* env, IRExpr* e,
cc = mk_PPCCondCode( Pct_ALWAYS, Pcf_NONE );
if (IEndianess == Iend_LE) {
addInstr(env, PPCInstr_Call( cc, Ptr_to_ULong(h_calc_BCDtoDPB),
addInstr(env, PPCInstr_Call( cc, (Addr)h_calc_BCDtoDPB,
argiregs,
mk_RetLoc_simple(RLPri_Int)) );
} else {
@ -2297,7 +2297,7 @@ static HReg iselWordExpr_R_wrk ( ISelEnv* env, IRExpr* e,
cc = mk_PPCCondCode( Pct_ALWAYS, Pcf_NONE );
if (IEndianess == Iend_LE) {
addInstr(env, PPCInstr_Call( cc, Ptr_to_ULong(h_calc_DPBtoBCD),
addInstr(env, PPCInstr_Call( cc, (Addr)h_calc_DPBtoBCD,
argiregs,
mk_RetLoc_simple(RLPri_Int) ) );
} else {
@ -3673,14 +3673,14 @@ static void iselInt64Expr_wrk ( HReg* rHi, HReg* rLo,
cc = mk_PPCCondCode( Pct_ALWAYS, Pcf_NONE );
if (IEndianess == Iend_LE) {
addInstr( env, PPCInstr_Call( cc, Ptr_to_ULong(h_calc_BCDtoDPB),
addInstr( env, PPCInstr_Call( cc, (Addr)h_calc_BCDtoDPB,
argiregs,
mk_RetLoc_simple(RLPri_2Int) ) );
} else {
ULong target;
target = mode64 ? Ptr_to_ULong(h_calc_BCDtoDPB) :
toUInt( Ptr_to_ULong(h_calc_BCDtoDPB ) );
addInstr( env, PPCInstr_Call( cc, (Addr64)target,
Addr64 target;
target = mode64 ? (Addr)h_calc_BCDtoDPB :
toUInt( (Addr)h_calc_BCDtoDPB );
addInstr( env, PPCInstr_Call( cc, target,
argiregs,
mk_RetLoc_simple(RLPri_2Int) ) );
}
@ -3721,14 +3721,14 @@ static void iselInt64Expr_wrk ( HReg* rHi, HReg* rLo,
cc = mk_PPCCondCode( Pct_ALWAYS, Pcf_NONE );
if (IEndianess == Iend_LE) {
addInstr(env, PPCInstr_Call( cc, Ptr_to_ULong(h_calc_DPBtoBCD),
addInstr(env, PPCInstr_Call( cc, (Addr)h_calc_DPBtoBCD,
argiregs,
mk_RetLoc_simple(RLPri_2Int) ) );
} else {
ULong target;
target = mode64 ? Ptr_to_ULong(h_calc_DPBtoBCD) :
toUInt( Ptr_to_ULong( h_calc_DPBtoBCD ) );
addInstr(env, PPCInstr_Call( cc, (Addr64)target, argiregs,
Addr64 target;
target = mode64 ? (Addr)h_calc_DPBtoBCD :
toUInt( (Addr)h_calc_DPBtoBCD );
addInstr(env, PPCInstr_Call( cc, target, argiregs,
mk_RetLoc_simple(RLPri_2Int) ) );
}

View File

@ -9517,7 +9517,7 @@ s390_insn_xdirect_emit(UChar *buf, const s390_insn *insn,
buf = s390_emit_BASR(buf, 1, R0);
/* --- FIRST PATCHABLE BYTE follows (must not modify %r1) --- */
ULong addr = Ptr_to_ULong(disp_cp_chain_me);
Addr64 addr = (Addr)disp_cp_chain_me;
buf = s390_tchain_load64(buf, S390_REGNO_TCHAIN_SCRATCH, addr);
/* goto *tchain_scratch */
@ -9589,7 +9589,7 @@ s390_insn_xindir_emit(UChar *buf, const s390_insn *insn,
/* load tchain_scratch, #disp_indir */
buf = s390_tchain_load64(buf, S390_REGNO_TCHAIN_SCRATCH,
Ptr_to_ULong(disp_cp_xindir));
(Addr)disp_cp_xindir);
/* goto *tchain_direct */
buf = s390_emit_BCR(buf, S390_CC_ALWAYS, S390_REGNO_TCHAIN_SCRATCH);
@ -9670,7 +9670,7 @@ s390_insn_xassisted_emit(UChar *buf, const s390_insn *insn,
/* load tchain_scratch, #disp_assisted */
buf = s390_tchain_load64(buf, S390_REGNO_TCHAIN_SCRATCH,
Ptr_to_ULong(disp_cp_xassisted));
(Addr)disp_cp_xassisted);
/* goto *tchain_direct */
buf = s390_emit_BCR(buf, S390_CC_ALWAYS, S390_REGNO_TCHAIN_SCRATCH);
@ -9973,7 +9973,7 @@ patchProfInc_S390(VexEndness endness_host,
s390_tchain_verify_load64(code_to_patch, S390_REGNO_TCHAIN_SCRATCH, 0);
UChar *p = s390_tchain_patch_load64(code_to_patch,
Ptr_to_ULong(location_of_counter));
(Addr)location_of_counter);
UInt len = p - (UChar *)code_to_patch;
VexInvalRange vir = { (HWord)code_to_patch, len };
@ -9998,7 +9998,7 @@ chainXDirect_S390(VexEndness endness_host,
*/
const UChar *next;
next = s390_tchain_verify_load64(place_to_chain, S390_REGNO_TCHAIN_SCRATCH,
Ptr_to_ULong(disp_cp_chain_me_EXPECTED));
(Addr)disp_cp_chain_me_EXPECTED);
vassert(s390_insn_is_BR(next, S390_REGNO_TCHAIN_SCRATCH));
/* And what we want to change it to is either:
@ -10059,7 +10059,7 @@ chainXDirect_S390(VexEndness endness_host,
load tchain_scratch, #place_to_jump_to
goto *tchain_scratch
*/
ULong addr = Ptr_to_ULong(place_to_jump_to);
Addr64 addr = (Addr)place_to_jump_to;
p = s390_tchain_load64(p, S390_REGNO_TCHAIN_SCRATCH, addr);
/* There is not need to emit a BCR here, as it is already there. */
}
@ -10111,7 +10111,7 @@ unchainXDirect_S390(VexEndness endness_host,
const UChar *next;
next = s390_tchain_verify_load64(p, S390_REGNO_TCHAIN_SCRATCH,
Ptr_to_ULong(place_to_jump_to_EXPECTED));
(Addr)place_to_jump_to_EXPECTED);
/* Check for BR *tchain_scratch */
vassert(s390_insn_is_BR(next, S390_REGNO_TCHAIN_SCRATCH));
}
@ -10130,7 +10130,7 @@ unchainXDirect_S390(VexEndness endness_host,
address (see s390_insn_xdirect_emit). */
p = s390_emit_BASR(p - S390_BASR_LEN, 1, R0);
ULong addr = Ptr_to_ULong(disp_cp_chain_me);
Addr64 addr = (Addr)disp_cp_chain_me;
p = s390_tchain_load64(p, S390_REGNO_TCHAIN_SCRATCH, addr);
/* Emit the BCR in case the short form was used. In case of the long

View File

@ -509,7 +509,7 @@ doHelperCall(/*OUT*/UInt *stackAdjustAfterCall,
IRCallee *callee, IRType retTy, IRExpr **args)
{
UInt n_args, i, argreg, size;
ULong target;
Addr64 target;
HReg tmpregs[S390_NUM_GPRPARMS];
s390_cc_t cc;
@ -606,7 +606,7 @@ doHelperCall(/*OUT*/UInt *stackAdjustAfterCall,
addInstr(env, s390_insn_move(size, finalreg, tmpregs[i]));
}
target = Ptr_to_ULong(callee->addr);
target = (Addr)callee->addr;
/* Do final checks, set the return values, and generate the call
instruction proper. */
@ -630,7 +630,7 @@ doHelperCall(/*OUT*/UInt *stackAdjustAfterCall,
}
/* Finally, the call itself. */
addInstr(env, s390_insn_helper_call(cc, (Addr64)target, n_args,
addInstr(env, s390_insn_helper_call(cc, target, n_args,
callee->name, *retloc));
}

View File

@ -2468,7 +2468,7 @@ Int emit_X86Instr ( /*MB_MOD*/Bool* is_profInc,
const void* disp_cp_chain_me
= i->Xin.XDirect.toFastEP ? disp_cp_chain_me_to_fastEP
: disp_cp_chain_me_to_slowEP;
p = emit32(p, (UInt)Ptr_to_ULong(disp_cp_chain_me));
p = emit32(p, (UInt)(Addr)disp_cp_chain_me);
/* call *%edx */
*p++ = 0xFF;
*p++ = 0xD2;
@ -2510,7 +2510,7 @@ Int emit_X86Instr ( /*MB_MOD*/Bool* is_profInc,
/* movl $disp_indir, %edx */
*p++ = 0xBA;
p = emit32(p, (UInt)Ptr_to_ULong(disp_cp_xindir));
p = emit32(p, (UInt)(Addr)disp_cp_xindir);
/* jmp *%edx */
*p++ = 0xFF;
*p++ = 0xE2;
@ -2572,7 +2572,7 @@ Int emit_X86Instr ( /*MB_MOD*/Bool* is_profInc,
/* movl $disp_indir, %edx */
*p++ = 0xBA;
p = emit32(p, (UInt)Ptr_to_ULong(disp_cp_xassisted));
p = emit32(p, (UInt)(Addr)disp_cp_xassisted);
/* jmp *%edx */
*p++ = 0xFF;
*p++ = 0xE2;
@ -3360,7 +3360,7 @@ VexInvalRange chainXDirect_X86 ( VexEndness endness_host,
*/
UChar* p = (UChar*)place_to_chain;
vassert(p[0] == 0xBA);
vassert(*(UInt*)(&p[1]) == (UInt)Ptr_to_ULong(disp_cp_chain_me_EXPECTED));
vassert(*(UInt*)(&p[1]) == (UInt)(Addr)disp_cp_chain_me_EXPECTED);
vassert(p[5] == 0xFF);
vassert(p[6] == 0xD2);
/* And what we want to change it to is:
@ -3428,7 +3428,7 @@ VexInvalRange unchainXDirect_X86 ( VexEndness endness_host,
So it's the same length (convenient, huh).
*/
p[0] = 0xBA;
*(UInt*)(&p[1]) = (UInt)Ptr_to_ULong(disp_cp_chain_me);
*(UInt*)(&p[1]) = (UInt)(Addr)disp_cp_chain_me;
p[5] = 0xFF;
p[6] = 0xD2;
VexInvalRange vir = { (HWord)place_to_unchain, 7 };
@ -3459,12 +3459,12 @@ VexInvalRange patchProfInc_X86 ( VexEndness endness_host,
vassert(p[11] == 0x00);
vassert(p[12] == 0x00);
vassert(p[13] == 0x00);
UInt imm32 = (UInt)Ptr_to_ULong(location_of_counter);
UInt imm32 = (UInt)(Addr)location_of_counter;
p[2] = imm32 & 0xFF; imm32 >>= 8;
p[3] = imm32 & 0xFF; imm32 >>= 8;
p[4] = imm32 & 0xFF; imm32 >>= 8;
p[5] = imm32 & 0xFF; imm32 >>= 8;
imm32 = 4 + (UInt)Ptr_to_ULong(location_of_counter);
imm32 = 4 + (UInt)(Addr)location_of_counter;
p[9] = imm32 & 0xFF; imm32 >>= 8;
p[10] = imm32 & 0xFF; imm32 >>= 8;
p[11] = imm32 & 0xFF; imm32 >>= 8;

View File

@ -387,7 +387,7 @@ void callHelperAndClearArgs ( ISelEnv* env, X86CondCode cc,
parameters. */
vassert(sizeof(void*) == 4);
addInstr(env, X86Instr_Call( cc, toUInt(Ptr_to_ULong(cee->addr)),
addInstr(env, X86Instr_Call( cc, (Addr)cee->addr,
cee->regparms, rloc));
if (n_arg_ws > 0)
add_to_esp(env, 4*n_arg_ws);
@ -1400,11 +1400,11 @@ static HReg iselIntExpr_R_wrk ( ISelEnv* env, IRExpr* e )
*/
HReg xLo, xHi;
HReg dst = newVRegI(env);
HWord fn = (HWord)h_generic_calc_GetMSBs8x8;
Addr fn = (Addr)h_generic_calc_GetMSBs8x8;
iselInt64Expr(&xHi, &xLo, env, e->Iex.Unop.arg);
addInstr(env, X86Instr_Push(X86RMI_Reg(xHi)));
addInstr(env, X86Instr_Push(X86RMI_Reg(xLo)));
addInstr(env, X86Instr_Call( Xcc_ALWAYS, (UInt)fn,
addInstr(env, X86Instr_Call( Xcc_ALWAYS, (Addr32)fn,
0, mk_RetLoc_simple(RLPri_Int) ));
add_to_esp(env, 2*4);
addInstr(env, mk_iMOVsd_RR(hregX86_EAX(), dst));
@ -2541,7 +2541,7 @@ static void iselInt64Expr_wrk ( HReg* rHi, HReg* rLo, ISelEnv* env, IRExpr* e )
iselInt64Expr(&xHi, &xLo, env, e->Iex.Binop.arg1);
addInstr(env, X86Instr_Push(X86RMI_Reg(xHi)));
addInstr(env, X86Instr_Push(X86RMI_Reg(xLo)));
addInstr(env, X86Instr_Call( Xcc_ALWAYS, (UInt)fn,
addInstr(env, X86Instr_Call( Xcc_ALWAYS, (Addr32)fn,
0, mk_RetLoc_simple(RLPri_2Int) ));
add_to_esp(env, 4*4);
addInstr(env, mk_iMOVsd_RR(hregX86_EDX(), tHi));
@ -2581,7 +2581,7 @@ static void iselInt64Expr_wrk ( HReg* rHi, HReg* rLo, ISelEnv* env, IRExpr* e )
iselInt64Expr(&xHi, &xLo, env, e->Iex.Binop.arg1);
addInstr(env, X86Instr_Push(X86RMI_Reg(xHi)));
addInstr(env, X86Instr_Push(X86RMI_Reg(xLo)));
addInstr(env, X86Instr_Call( Xcc_ALWAYS, (UInt)fn,
addInstr(env, X86Instr_Call( Xcc_ALWAYS, (Addr32)fn,
0, mk_RetLoc_simple(RLPri_2Int) ));
add_to_esp(env, 3*4);
addInstr(env, mk_iMOVsd_RR(hregX86_EDX(), tHi));
@ -2820,7 +2820,7 @@ static void iselInt64Expr_wrk ( HReg* rHi, HReg* rLo, ISelEnv* env, IRExpr* e )
iselInt64Expr(&xHi, &xLo, env, e->Iex.Unop.arg);
addInstr(env, X86Instr_Push(X86RMI_Reg(xHi)));
addInstr(env, X86Instr_Push(X86RMI_Reg(xLo)));
addInstr(env, X86Instr_Call( Xcc_ALWAYS, (UInt)fn,
addInstr(env, X86Instr_Call( Xcc_ALWAYS, (Addr32)fn,
0, mk_RetLoc_simple(RLPri_2Int) ));
add_to_esp(env, 2*4);
addInstr(env, mk_iMOVsd_RR(hregX86_EDX(), tHi));

View File

@ -498,7 +498,7 @@ UInt vprintf_wrk ( void(*sink)(HChar),
case 'p':
case 'P': {
Bool hexcaps = toBool(*format == 'P');
ULong l = Ptr_to_ULong( va_arg(ap, void*) );
ULong l = (Addr)va_arg(ap, void*);
convert_int(intbuf, l, 16/*base*/, False/*unsigned*/, hexcaps);
len1 = len3 = 0;
len2 = vex_strlen(intbuf)+2;

View File

@ -142,16 +142,7 @@ typedef unsigned long Addr;
machine. */
typedef unsigned long HWord;
/* We need to know the host word size in order to write Ptr_to_ULong
and ULong_to_Ptr in a way that doesn't cause compilers to complain.
These functions allow us to cast pointers to and from 64-bit
integers without complaints from compilers, regardless of the host
word size.
Also set up VEX_REGPARM.
*/
/* Set up VEX_HOST_WORDSIZE and VEX_REGPARM. */
#undef VEX_HOST_WORDSIZE
#undef VEX_REGPARM
@ -197,27 +188,6 @@ typedef unsigned long HWord;
#endif
#if VEX_HOST_WORDSIZE == 8
static inline ULong Ptr_to_ULong ( const void* p ) {
return (ULong)p;
}
static inline void* ULong_to_Ptr ( ULong n ) {
return (void*)n;
}
#elif VEX_HOST_WORDSIZE == 4
static inline ULong Ptr_to_ULong ( const void* p ) {
UInt w = (UInt)p;
return (ULong)w;
}
static inline void* ULong_to_Ptr ( ULong n ) {
UInt w = (UInt)n;
return (void*)w;
}
#else
# error "Vex: Fatal: Can't define Ptr_to_ULong / ULong_to_Ptr"
#endif
#endif /* ndef __LIBVEX_BASICTYPES_H */
/*---------------------------------------------------------------*/

View File

@ -1223,7 +1223,7 @@ ocGetNames_ELF ( ObjectCode* oc )
# else
ad = calloc(1, stab[j].st_size);
# endif
// assert( Ptr_to_ULong(ad) < 0xF0000000ULL );
// assert( (Addr)ad < 0xF0000000ULL );
if (0)
fprintf(stderr, "COMMON symbol, size %lld name %s allocd %p\n",