Guest User

Untitled

a guest
May 28th, 2018
90
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 39.18 KB | None | 0 0
  1. //===-- stack_deconstructor.cpp - Deconstruct the global stack into local stack
  2. // frame ---===//
  3. //
  4. // The LLVM Compiler Infrastructure
  5. //
  6. // This file is distributed under the University of Illinois Open Source
  7. // License. See LICENSE.TXT for details.
  8. //
  9. //===----------------------------------------------------------------------===//
  10. //
  11. // This file implements a pass that deconstruct the global stack shared by all
  12. // the prcedures into local stack frames per procedure
  13. //===----------------------------------------------------------------------===//
  14.  
  15. #define DEBUG_TYPE "stack_deconstructor"
  16. #include "stack_deconstructor.h"
  17. #include "llvm/ADT/PostOrderIterator.h"
  18. #include "llvm/ADT/Statistic.h"
  19. #include "llvm/IR/CFG.h"
  20. #include "llvm/IR/Constants.h"
  21. #include "llvm/IR/Function.h"
  22. #include "llvm/IR/IRBuilder.h"
  23. #include "llvm/IR/InstIterator.h"
  24. #include "llvm/IR/IntrinsicInst.h"
  25. #include "llvm/IR/TypeBuilder.h"
  26. #include "llvm/Support/Debug.h"
  27. #include "llvm/Support/FileSystem.h"
  28. #include "llvm/Support/raw_ostream.h"
  29. #include "llvm/Transforms/Utils/BasicBlockUtils.h"
  30. #include "llvm/Transforms/Utils/Cloning.h"
  31. #include "llvm/Transforms/Utils/ValueMapper.h"
  32. #include "llvm/Support/CommandLine.h"
  33.  
  34. using namespace llvm;
  35. STATISTIC(StaticParentAccessChecks, "Number of static parent stack accesses");
  36.  
  37. char stack_deconstructor::ID = 0;
  38. static RegisterPass<stack_deconstructor>
  39. X("stack-decons",
  40. "To partition global monolithic stack to per function stack frame");
  41.  
  42. cl::opt<std::string> EntryFuncion("mcsema_main", cl::desc("Specify the entry function to be called from the inline asm driver"), cl::value_desc("Function Name"), cl::Required);
  43.  
  44. /// Function : runOnModule
  45. /// Purpose : Entry point for stack_deconstructor pass
  46. bool stack_deconstructor::runOnModule(Module &M) {
  47. Mod = &M;
  48. ctx = &M.getContext();
  49.  
  50. // Initialize
  51. int64_type = Type::getInt64Ty(*ctx);
  52. int8_type = Type::getInt8Ty(*ctx);
  53. ptr_to_int8_type = Type::getInt8PtrTy(*ctx);
  54. ptr_to_int64_type = Type::getInt64PtrTy(*ctx);
  55. mcsema_main = nullptr;
  56.  
  57. deconstructStack();
  58.  
  59. return true;
  60. }
  61.  
  62. /// Function : deconstructStack
  63. /// Purpose : Introduce local stack frame based on the stack size
  64. /// approximated by max_stack_height pass. This includes
  65. /// 1. Augment formal arguments of all the internal functions with parent stack
  66. /// informations (like stack pointer and base pointer)
  67. /// For each clonee function
  68. /// 2. Create local stack pointers and replace all existing used of
  69. /// RSP_val/RBP_val with them
  70. /// 3. Augment the calls to internal functions with proper actual arguments
  71. /// 4. Handle parent stack access
  72. void stack_deconstructor::deconstructStack() {
  73. Value *local_stack_start, *local_stack_end;
  74. Value *rsp_ptr_alloca, *rbp_ptr_alloca;
  75.  
  76. // Extracting function approximate heights
  77. for (Module::iterator FuncI = Mod->begin(), FuncE = Mod->end();
  78. FuncI != FuncE; ++FuncI) {
  79. Function *Func = &*FuncI;
  80. if (Func->isIntrinsic() || Func->isDeclaration()) {
  81. continue;
  82. }
  83.  
  84. max_stack_height &max_stack_height_pass =
  85. getAnalysis<max_stack_height>(*Func);
  86. height_ty stack_height = max_stack_height_pass.get_stack_height();
  87. assert(stack_height <= 0 && "stack height cannot be positive\n");
  88. FunctionStackHeightMap[Func] = stack_height;
  89. }
  90.  
  91. // Task 1
  92. augmentFuntionSignature();
  93.  
  94. for (auto P : FunctionCloneMap) {
  95. auto *oldFunc = P.first;
  96. auto *newFunc = P.second;
  97. local_stack_start = local_stack_end = rbp_ptr_alloca = nullptr;
  98.  
  99. if(oldFunc == mcsema_main) {
  100. DEBUG(errs() << "========================\n Processing Function:"
  101. << oldFunc->getName()
  102. << "\n=================================\n");
  103.  
  104. createLocalStackFrame(*oldFunc, FunctionStackHeightMap[oldFunc],
  105. &local_stack_start, &local_stack_end,
  106. &rsp_ptr_alloca, &rbp_ptr_alloca);
  107. augmentCall(*oldFunc, local_stack_start, local_stack_end, rsp_ptr_alloca, rbp_ptr_alloca);
  108. modifyLoadsToAccessParentStack(*oldFunc, local_stack_start, local_stack_end);
  109. }
  110.  
  111. DEBUG(errs() << "========================\n Processing Function:"
  112. << newFunc->getName()
  113. << "\n=================================\n");
  114.  
  115. createLocalStackFrame(*newFunc, FunctionStackHeightMap[oldFunc],
  116. &local_stack_start, &local_stack_end,
  117. &rsp_ptr_alloca, &rbp_ptr_alloca);
  118. augmentCall(*newFunc, local_stack_start, local_stack_end, rsp_ptr_alloca, rbp_ptr_alloca);
  119. modifyLoadsToAccessParentStack(*newFunc, local_stack_start,
  120. local_stack_end);
  121. }
  122.  
  123. return;
  124. }
  125.  
  126. /// Function : augmentFuntionSignature
  127. /// Purpose : 1. Augment formal arguments of internal functions with parent
  128. /// stack informations (like stack pointer and base pointer)
  129. /// 2. Uses of cloned functions other that calls are replaced with
  130. /// the clonee
  131. /// 3. Do not clone the entry function mcsema_main
  132. /// Example :
  133. /// Convert define i32 @bar(i32) to
  134. /// define i32 define i32 @bar.2(i32, i8* %_parent_stack_start_ptr_, i8*
  135. /// %_parent_stack_end_ptr_, i8* %_parent_stack_rbp_ptr_)
  136. void stack_deconstructor::augmentFuntionSignature() {
  137. // Collect the functons to clone
  138. std::vector<Function *> worklist;
  139.  
  140. for (Module::iterator FuncI = Mod->begin(), FuncE = Mod->end();
  141. FuncI != FuncE; ++FuncI) {
  142. Function *Func = &*FuncI;
  143. if (Func->getName() == EntryFuncion) {
  144. mcsema_main = Func;
  145. }
  146. worklist.push_back(Func);
  147. }
  148.  
  149. // Clone them
  150. for (auto *oldFunc : worklist) {
  151. // Do not clone functions whose definitions are not internal
  152. if (oldFunc->isIntrinsic() || oldFunc->isDeclaration()) {
  153. continue;
  154. }
  155.  
  156. Function *newFunc = cloneFunctionWithExtraArgument(oldFunc);
  157. FunctionCloneMap[oldFunc] = newFunc;
  158.  
  159. // Task 2
  160. for (User *user : oldFunc->users()) {
  161. assert((true == isa<CallInst>(user) || true == isa<Constant>(user)) &&
  162. "Unhandled usage of a function");
  163. if (isa<CallInst>(user)) {
  164. continue;
  165. }
  166.  
  167. DEBUG(llvm::errs() << *user << "\n");
  168. Constant *const_ptr = ConstantExpr::getCast(Instruction::BitCast, newFunc,
  169. oldFunc->getType());
  170. oldFunc->replaceAllUsesWith(const_ptr);
  171. }
  172. }
  173. }
  174.  
  175. /// Function : createLocalStackFrame
  176. /// Purpose : Convert the following instructiions
  177. ///
  178. /// Task 1:
  179. /// %RSP_val = alloca i64
  180. /// %RSP = getelementptr inbounds %struct.regs* %0, i64 0, i32 6
  181. /// %7 = load i64* %RSP, !mcsema_real_eip !2
  182. /// store i64 %7, i64* %RSP_val
  183. ///
  184. /// TO
  185. /// %RSP_ptr = alloca i8*
  186. /// %RBP_ptr = alloca i8*
  187. /// %_local_stack_start_ptr_ = alloca i8, i64 n
  188. /// %_local_stack_end_ptr_ = gep inbounds i8, i8* %_local_stack_start_ptr_, i64
  189. /// n
  190. /// store i8* %_local_stack_end_ptr_ , i8** %RSP_ptr
  191. ///
  192. /// Task 2:
  193. /// Replace all uses of RSP_val with RSP_ptr
  194. void stack_deconstructor::createLocalStackFrame(Function &F,
  195. height_ty stackheight,
  196. Value **stack_start,
  197. Value **stack_end,
  198. Value **rsp_ptr_alloca,
  199. Value **rbp_ptr_alloca) {
  200.  
  201. // Check F's argumnets are augmented with parent stack rbp pointer.
  202. Value *parent_stack_rbp_ptr = NULL;
  203. for (Function::arg_iterator I = F.arg_begin(), E = F.arg_end(); I != E; ++I) {
  204. if (I->getName() == "_parent_stack_rbp_ptr_") {
  205. parent_stack_rbp_ptr = &*I;
  206. }
  207. }
  208.  
  209. // Performing Task 1
  210. ConstantInt *stack_height = ConstantInt::get(int64_type, -1 * stackheight);
  211.  
  212. Instruction *I = &*(F.getEntryBlock().begin());
  213.  
  214. IRBuilder<> IRB(I);
  215. *rsp_ptr_alloca =
  216. IRB.CreateAlloca(ptr_to_int8_type, nullptr, "_RSP_ptr_");
  217. *rbp_ptr_alloca = IRB.CreateAlloca(ptr_to_int8_type, nullptr, "_RBP_ptr_");
  218.  
  219. *stack_start =
  220. IRB.CreateAlloca(int8_type, stack_height, "_local_stack_start_ptr_");
  221. std::vector<Value *> indices;
  222. indices.push_back(stack_height);
  223. *stack_end =
  224. IRB.CreateInBoundsGEP(*stack_start, indices, "_local_stack_end_ptr_");
  225. IRB.CreateStore(*stack_end, *rsp_ptr_alloca);
  226. if (parent_stack_rbp_ptr)
  227. IRB.CreateStore(parent_stack_rbp_ptr, *rbp_ptr_alloca);
  228.  
  229. // Performing Task 2
  230. for (auto FI = F.begin(), FE = F.end(); FI != FE; ++FI) {
  231. for (auto BBI = FI->begin(), BBE = FI->end(); BBI != BBE;) {
  232. Instruction *I = &*BBI++;
  233. if (shouldConvert(I)) {
  234. DEBUG(errs() << "\n" << *I << "\n");
  235. convert(I, *rsp_ptr_alloca, *rbp_ptr_alloca);
  236. }
  237. }
  238. }
  239. eraseReplacedInstructions();
  240.  
  241. return;
  242. }
  243.  
  244. bool stack_deconstructor::shouldConvert(Instruction *I) {
  245.  
  246. // handle load
  247. if (LoadInst *LI = dyn_cast<LoadInst>(I)) {
  248. Value *ptr_operand = LI->getPointerOperand();
  249. if (ptr_operand->getName().equals("XSP") ||
  250. ptr_operand->getName().equals("XBP")) {
  251. return true;
  252. }
  253. }
  254.  
  255.  
  256. // handle add sub
  257. if (I->getOpcode() == Instruction::Add || I->getOpcode() == Instruction::Sub) {
  258. /*
  259. Value *pointer_operand = I->getOperand(0);
  260. Instruction *ptr_operand = dyn_cast<Instruction>(pointer_operand);
  261. if(!ptr_operand) {
  262. llvm::errs() << *I << "\tPtr Op: " << *ptr_operand <<"\n";
  263. assert(ptr_operand && "stack_deconstructor::handle_add - Check out");
  264. }
  265. */
  266. if ( (0 != convertMap.count(I->getOperand(0))) ||
  267. (0 != convertMap.count(I->getOperand(1))) ) {
  268. return true;
  269. }
  270. return false;
  271. }
  272.  
  273. //handle Xor And Trunc ICmp Shl LShr
  274. if (I->getOpcode() == Instruction::Xor || I->getOpcode() == Instruction::And) {
  275. auto *op1 = I->getOperand(0);
  276. auto *op2 = I->getOperand(1);
  277. if (0 != convertMap.count(op1) || 0 != convertMap.count(op2)) {
  278. return true;
  279. }
  280. return false;
  281. }
  282.  
  283. if (I->getOpcode() == Instruction::Trunc ||
  284. I->getOpcode() == Instruction::ICmp ||
  285. I->getOpcode() == Instruction::Shl ||
  286. I->getOpcode() == Instruction::LShr) {
  287. auto *op1 = I->getOperand(0);
  288. if (0 != convertMap.count(op1)) {
  289. return true;
  290. }
  291. return false;
  292. }
  293.  
  294.  
  295. // handle int2ptr
  296. if (I->getOpcode() == Instruction::IntToPtr) {
  297. Value *int_operand = I->getOperand(0);
  298. return (0 != convertMap.count(int_operand));
  299. }
  300.  
  301. // handle store
  302. if (StoreInst *SI = dyn_cast<StoreInst>(I)) {
  303. Value *ptr_operand = SI->getPointerOperand();
  304. Value *val_operand = SI->getValueOperand();
  305.  
  306. // Check1: Consider stores to RSP_val or RBP_val OR
  307. // Check2: Ignore If value pointer of store is coming from load RSP or load
  308. // RBP OR
  309. // Check3: Consider stores whose value pointers are already replaced (i.e.
  310. // present in convertMap)
  311.  
  312. // Check 1
  313. if (ptr_operand->getName().equals("XSP") ||
  314. ptr_operand->getName().equals("XBP")) {
  315. // Check 2
  316. if (isLoadOfImp(val_operand, "RSP") || isLoadOfImp(val_operand, "RBP")) {
  317. return false;
  318. }
  319. return true;
  320. }
  321. // Check 3
  322. if (0 != convertMap.count(val_operand)) {
  323. return true;
  324. }
  325. return false;
  326. }
  327.  
  328. // handle call
  329. if (CallInst *CI = dyn_cast<CallInst>(I)) {
  330. Function *F = CI->getCalledFunction();
  331. if (!F || F->getIntrinsicID() != Intrinsic::uadd_with_overflow) {
  332. return false;
  333. }
  334. Value *op1 = CI->getArgOperand(0);
  335. if (false == isLoadOfImp(op1, "XSP") &&
  336. false == isLoadOfImp(op1, "XBP")) {
  337. //assert(0 == convertMap.count(op1) && "CHECK");
  338. // %117 = load i64, i64* %RSP_val
  339. // %.lcssa = phi i64 [ %117, %block_0x1f ]
  340. //%uadd211 = tail call { i64, i1 } @llvm.uadd.with.overflow.i64(i64 %.lcssa, i64 16)
  341. return false;
  342. }
  343.  
  344. assert(0 != convertMap.count(op1) &&
  345. "Call Inst: The pointer operand should already get converted.");
  346.  
  347. return true;
  348. }
  349.  
  350. // handle extract
  351. if (ExtractValueInst *EI = dyn_cast<ExtractValueInst>(I)) {
  352. Value *op1 = EI->getOperand(0);
  353. return (0 != convertMap.count(op1));
  354. }
  355.  
  356. //PhiNode
  357. if (PHINode *PI = dyn_cast<PHINode>(I)) {
  358. unsigned incomingValues = PI->getNumIncomingValues();
  359. for(unsigned i = 0 ; i < incomingValues; i++) {
  360. Value *val = PI->getIncomingValue (i);
  361. if (0 != convertMap.count(val)) {
  362. return true;
  363. }
  364. }
  365.  
  366. return false;
  367. }
  368.  
  369. // Handle unsupported: Debug Purpose only
  370. auto numOperand = I->getNumOperands();
  371. for(unsigned i = 0 ; i < numOperand; i++) {
  372. Value *op = I->getOperand(i);
  373. if(0 != convertMap.count(op)) {
  374. llvm::errs() << "Unsupported: " << *I << "\n";
  375. assert(0 == convertMap.count(op) && "Unsupported Instruction\n");
  376. }
  377. }
  378. return false;
  379. }
  380.  
  381. void stack_deconstructor::convert(Instruction *I, Value *rsp_ptr_alloca,
  382. Value *rbp_ptr_alloca) {
  383. //lib/IR/Instrcution.cpp
  384. switch (I->getOpcode()) {
  385. case Instruction::Load:
  386. handle_load(I, rsp_ptr_alloca, rbp_ptr_alloca);
  387. break;
  388. case Instruction::Store:
  389. handle_store(I, rsp_ptr_alloca, rbp_ptr_alloca);
  390. break;
  391. case Instruction::Add:
  392. handle_add(I);
  393. break;
  394. case Instruction::Sub:
  395. handle_sub(I);
  396. break;
  397. case Instruction::ExtractValue:
  398. handle_extractval(I);
  399. break;
  400. case Instruction::Call:
  401. handle_call(I);
  402. break;
  403. case Instruction::IntToPtr:
  404. handle_int2ptr(I);
  405. break;
  406. case Instruction::PHI:
  407. handle_phi(I);
  408. break;
  409. //case Instruction::And:
  410. case Instruction::Trunc:
  411. case Instruction::ICmp:
  412. case Instruction::Xor:
  413. case Instruction::And:
  414. case Instruction::Shl:
  415. case Instruction::LShr:
  416. handle_int_operators(I);
  417. break;
  418. default:
  419. llvm::errs() << *I << "\n";
  420. assert(0 && "Unexpected Instruction to be converted");
  421. break;
  422. }
  423.  
  424. return;
  425. }
  426.  
  427. /// Purpose: Helper function to convert (load i64, i64* RSP_val) to
  428. /// (load i8*, i8** RSP_ptr)
  429. /// This transmation is useful to make alias analysis effective.
  430. void stack_deconstructor::handle_load(Instruction *I, Value *rsp_ptr_alloca,
  431. Value *rbp_ptr_alloca) {
  432. LoadInst *LI = dyn_cast<LoadInst>(I);
  433.  
  434. IRBuilder<> IRB(LI);
  435. Instruction *new_load = nullptr;
  436. Value *ptr_operand = LI->getPointerOperand();
  437.  
  438. if (ptr_operand->getName().equals("XSP")) {
  439. new_load = IRB.CreateLoad(rsp_ptr_alloca, "_load_rsp_ptr_");
  440. } else {
  441. new_load = IRB.CreateLoad(rbp_ptr_alloca, "_load_rbp_ptr_");
  442. }
  443. recordConverted(LI, new_load, false, false);
  444.  
  445. return;
  446. }
  447.  
  448. // Case I:
  449. // %1 = load i64, i64* %RSP_val
  450. // %2 = add i64 %1, 16
  451. // store i64 %2, i64* %RSP_val
  452. //
  453. // While processing store, the add inst has already been converted to
  454. // gep; so check convertMap.contains(store::value_operand) and its agep
  455. //
  456. // Case II: pop rbp
  457. // %1 = inttoptr i64 %0 to i64* ; %O is an offset from %RSP_val
  458. // %2 = load i64, i64* %1
  459. // store i64 %2, i64* %RBP_val
  460. //
  461. // At int2ptr inst we create a bitcast and replace all used of int2ptr with
  462. // that.
  463. // So convertMap.contains(store::value_operand) is false and we will convert
  464. // the result of load to
  465. // i8* ptr and store it in i8** RBP_ptr
  466. //
  467. // Case III:
  468. // %1 = load i64, i64* RSP_val
  469. // %2 = add i64 %1, C
  470. // %3 = inttoptr i64 %2 to i64*
  471. // store i64 %val , i64* %3
  472. // This store will be processed by handle_store only if
  473. // convertMap.contains(%val) == true
  474. // Nevertheless, inttoptr will be converted to i8* bitcast and its uses will be
  475. // replaced.
  476. //
  477. // Case IV:
  478. // %1 = load i64, i64* %RSP_val
  479. // store i64 %1, i64* %RDI_val
  480. // This store will be processed by handle_store as convertMap.contains(%1) ==
  481. // true
  482. // The instruction convertMap[%1] is appended with 'int2ptr convertMap[%1] to
  483. // elementtypeof(%RDI_val)'
  484. //
  485. // Example:
  486. // ; push rbp ; mov rsp -> rbp
  487. // %1 = load i64, i64* %RBP_val
  488. // %2 = load i64, i64* %RSP_val
  489. // %3 = add i64 %1, -8
  490. // %4 = inttoptr i64 %3 to i64*
  491. // store i64 %1, i64* %4, !mcsema_real_eip !2
  492. // store i64 %3, i64* %RBP_val, !mcsema_real_eip !3
  493. //
  494. // After transform:
  495. // %1 = load i64, i64* %RBP_val, !mcsema_real_eip !2
  496. // %_load_rbp_ptr_ = load i8*, i8** %_RBP_ptr_
  497. // %_load_rsp_ptr_ = load i8*, i8** %_RSP_ptr_
  498. // %_new_gep_ = getelementptr i8, i8* %_load_rsp_ptr_, i64 -8
  499. // %_allin_new_bt_ = bitcast i8* %_new_gep_ to i64*
  500. // %_new_ptr2int_ = inttoptr i8* %_load_rbp_ptr_ to i64
  501. // store i64 %_new_ptr2int_, i64* %_allin_new_bt_
  502. // store volatile i8* %_new_gep_, i8** %_RBP_ptr_
  503. void stack_deconstructor::handle_store(Instruction *I, Value *rsp_ptr_alloca,
  504. Value *rbp_ptr_alloca) {
  505. StoreInst *SI = dyn_cast<StoreInst>(I);
  506. Value *ptr_operand = SI->getPointerOperand();
  507. Value *val_operand = SI->getValueOperand();
  508.  
  509. IRBuilder<> IRB(SI);
  510.  
  511. Value *inst_before_store = nullptr;
  512. Instruction *new_store = nullptr;
  513. bool erase_old_store;
  514.  
  515. if (ptr_operand->getName().equals("XSP") ||
  516. ptr_operand->getName().equals("XBP")) {
  517.  
  518. if (0 != convertMap.count(val_operand)) {
  519. Value *new_val_operand = convertMap[val_operand];
  520. bool acceptable_val_operand = isa<GetElementPtrInst>(new_val_operand) || isa<LoadInst> (new_val_operand);
  521. if(!acceptable_val_operand) {
  522. llvm::errs() << *SI << "\tValue: " << *val_operand << "\t Ptr: " << *ptr_operand << "\n";
  523. assert(0 && "stack_deconstructor::handle_store -> check");
  524. }
  525. inst_before_store = new_val_operand;
  526. } else {
  527. inst_before_store =
  528. IRB.CreateIntToPtr(val_operand, ptr_to_int8_type, "_new_int2ptr_");
  529. }
  530.  
  531. if (ptr_operand->getName().equals("XSP")) {
  532. new_store =
  533. IRB.CreateStore(inst_before_store, rsp_ptr_alloca, "_new_store_");
  534. } else if (ptr_operand->getName().equals("XBP")) {
  535. new_store =
  536. IRB.CreateStore(inst_before_store, rbp_ptr_alloca, "_new_store_");
  537. }
  538. erase_old_store = false;
  539.  
  540. } else {
  541. // Means convertMap.contains(val_operand) = true
  542. Value *converted_value_operand = convertMap[val_operand];
  543. Type *value_operand_type = val_operand->getType();
  544. Type *converted_value_operand_type = converted_value_operand->getType();
  545. assert(true == converted_value_operand_type->isPointerTy() &&
  546. "All the transofrmed types must be pointer types");
  547. assert(true == value_operand_type->isIntegerTy() &&
  548. "Increemental check failed");
  549. inst_before_store = IRB.CreatePtrToInt(converted_value_operand,
  550. value_operand_type, "_new_ptr2int_");
  551. new_store = IRB.CreateStore(inst_before_store, ptr_operand, "_new_store_");
  552. erase_old_store = true;
  553. }
  554. recordConverted(SI, new_store, false, erase_old_store);
  555.  
  556. return;
  557. }
  558.  
  559. void stack_deconstructor::handle_add(Instruction *I) {
  560. IRBuilder<> IRB(I);
  561. Value *new_ptr_operand = nullptr;
  562. Value *C = nullptr;
  563. if(0 != convertMap.count(I->getOperand(0))) {
  564. new_ptr_operand = convertMap[I->getOperand(0)];
  565. C = I->getOperand(1);
  566. } else {
  567. new_ptr_operand = convertMap[I->getOperand(1)];
  568. C = I->getOperand(0);
  569. }
  570.  
  571. if(false == isa<Constant>(C)) {
  572. //llvm::errs() << *I << "\n";
  573. //assert(isa<Constant>(C) && "handle_add: Not a constant\n");
  574. }
  575.  
  576. auto *new_gep = IRB.CreateGEP(new_ptr_operand, C, "_new_gep_");
  577. recordConverted(I, new_gep, false, false);
  578.  
  579. return;
  580. }
  581.  
  582. void stack_deconstructor::handle_sub(Instruction *I) {
  583. IRBuilder<> IRB(I);
  584. //BinaryOperator *BI = dyn_cast<BinaryOperator>(I);
  585. Type *Ty = I->getType();
  586.  
  587. Value *ptr_operand = I->getOperand(0);
  588. Value *new_ptr_operand = convertMap[ptr_operand];
  589. Value *op = I->getOperand(1);
  590. ConstantInt *C = dyn_cast<ConstantInt> (op);
  591. assert(C && "Sub oerand not a constant int!!\n");
  592.  
  593. int64_t sub_int_op = C->getSExtValue();
  594. Constant *gep_int_op = ConstantInt::get(Ty, -1*sub_int_op);
  595. auto *new_gep = IRB.CreateGEP(new_ptr_operand, gep_int_op, "_new_gep_");
  596. recordConverted(I, new_gep, false, false);
  597.  
  598. return;
  599. }
  600.  
  601. void stack_deconstructor::handle_int2ptr(Instruction *I) {
  602. IRBuilder<> IRB(I);
  603. Type *type = I->getType();
  604. Value *ptr_operand = I->getOperand(0);
  605. Value *new_ptr_operand = convertMap[ptr_operand];
  606. auto *new_bt = IRB.CreateBitCast(new_ptr_operand, type, "_allin_new_bt_");
  607. recordConverted(I, new_bt, true, false);
  608.  
  609. return;
  610. }
  611.  
  612. void stack_deconstructor::handle_call(Instruction *I) {
  613. IRBuilder<> IRB(I);
  614. CallInst *CI = dyn_cast<CallInst>(I);
  615. Value *op1 = CI->getArgOperand(0);
  616. // Value *op2 = CI->getArgOperand(1);
  617. Type *op1_type = op1->getType();
  618. auto *new_ptr_int =
  619. IRB.CreatePtrToInt(convertMap[op1], op1_type, "_new_ptr2int_");
  620. Instruction *op1_I = dyn_cast<Instruction>(op1);
  621. recordConverted(op1_I, new_ptr_int, true, false);
  622. return;
  623. }
  624.  
  625. void stack_deconstructor::handle_extractval(Instruction *I) {
  626. IRBuilder<> IRB(I);
  627. Value *op1 = I->getOperand(0);
  628. recordConverted(I, convertMap[op1], false, false);
  629. return;
  630. }
  631.  
  632. void stack_deconstructor::handle_phi(Instruction *I) {
  633. IRBuilder<> IRB(I);
  634. PHINode *PI = dyn_cast<PHINode>(I);
  635.  
  636. unsigned incomingValues = PI->getNumIncomingValues();
  637.  
  638. // Obtain the new Type of PHINode
  639. Type *Ty = nullptr;
  640. bool all_incoming_vals_converted = true;
  641. for(unsigned i = 0 ; i < incomingValues; i++) {
  642. Value *orig_val = PI->getIncomingValue (i);
  643. if (0 != convertMap.count(orig_val)) {
  644. auto *converted_val = convertMap[orig_val];
  645. Ty = converted_val->getType();
  646. } else {
  647. all_incoming_vals_converted = false;
  648. }
  649. }
  650.  
  651. if(true == all_incoming_vals_converted) {
  652. auto *new_phi = IRB.CreatePHI(Ty, incomingValues);
  653.  
  654. for(unsigned i = 0 ; i < incomingValues; i++) {
  655. auto *orig_val = PI->getIncomingValue (i);
  656. auto *converted_val = convertMap[orig_val];
  657. new_phi->addIncoming(converted_val, PI->getIncomingBlock(i));
  658. }
  659. recordConverted(PI, new_phi, false, false);
  660. } else {
  661. auto *new_phi = IRB.CreatePHI(Ty, incomingValues);
  662.  
  663. for(unsigned i = 0 ; i < incomingValues; i++) {
  664. auto *orig_val = PI->getIncomingValue (i);
  665. if(0 != convertMap.count(orig_val)) {
  666. auto *converted_val = convertMap[orig_val];
  667. Value *new_val =
  668. IRB.CreatePtrToInt(converted_val, PI->getType(), "_trans_p2i_");
  669. new_phi->addIncoming(new_val, PI->getIncomingBlock(i));
  670. } else {
  671. new_phi->addIncoming(orig_val, PI->getIncomingBlock(i));
  672. }
  673. }
  674. recordConverted(PI, new_phi, false, false);
  675. }
  676. return;
  677. }
  678.  
  679. void stack_deconstructor::handle_int_operators(Instruction *I) {
  680. IRBuilder<> IRB(I);
  681.  
  682. if(Instruction::Xor == I->getOpcode() || Instruction::And == I->getOpcode()) {
  683. auto *op1 = I->getOperand(0);
  684. auto *op2 = I->getOperand(1);
  685.  
  686. auto *new_op1 = op1;
  687. auto *new_op2 = op2;
  688.  
  689. if(0 != convertMap.count(op1)) {
  690. auto *converted_op1 = convertMap[op1];
  691. if(converted_op1->getType()->isPointerTy()) {
  692. new_op1 =
  693. IRB.CreatePtrToInt(converted_op1, op1->getType(), "_trans_p2i_");
  694. }
  695. }
  696.  
  697. if(0 != convertMap.count(op2)) {
  698. auto *converted_op2 = convertMap[op2];
  699. if(converted_op2->getType()->isPointerTy()) {
  700. new_op2 =
  701. IRB.CreatePtrToInt(converted_op2, op2->getType(), "_trans_p2i_");
  702. }
  703. }
  704.  
  705. Value *new_val = nullptr;
  706. if(Instruction::Xor == I->getOpcode()) {
  707. new_val = IRB.CreateBinOp(Instruction::Xor, new_op1,
  708. new_op2, "_trans_xor_");
  709. } else {
  710. new_val = IRB.CreateBinOp(Instruction::And, new_op1,
  711. new_op2, "_trans_xor_");
  712. }
  713. Instruction *new_inst = dyn_cast<Instruction>(new_val);
  714. recordConverted(I, new_inst, true, false);
  715.  
  716. } else if (Instruction::Trunc == I->getOpcode()) {
  717.  
  718. auto *op1 = I->getOperand(0);
  719. auto *new_op1 = op1;
  720.  
  721. if(0 != convertMap.count(op1)) {
  722. auto *converted_op1 = convertMap[op1];
  723. if(converted_op1->getType()->isPointerTy()) {
  724. new_op1 =
  725. IRB.CreatePtrToInt(converted_op1, op1->getType(), "_trans_p2i_");
  726. }
  727. }
  728.  
  729. auto *new_val = IRB.CreateTrunc(new_op1, I->getType(), "_trans_trunc_");
  730. Instruction *new_inst = dyn_cast<Instruction>(new_val);
  731. recordConverted(I, new_inst, true, false);
  732. } else if (Instruction::ICmp == I->getOpcode()) {
  733.  
  734. ICmpInst *IC = dyn_cast<ICmpInst>(I);
  735. auto *op1 = IC->getOperand(0);
  736. auto *new_op1 = op1;
  737.  
  738. if(0 != convertMap.count(op1)) {
  739. auto *converted_op1 = convertMap[op1];
  740. if(converted_op1->getType()->isPointerTy()) {
  741. new_op1 =
  742. IRB.CreatePtrToInt(converted_op1, op1->getType(), "_trans_p2i_");
  743. }
  744. }
  745.  
  746. Value *new_val = nullptr;
  747. if(IC->isEquality()) {
  748. new_val = IRB.CreateICmpEQ(new_op1, IC->getOperand(1), "_trans_icmp_eq_");
  749. } else {
  750. new_val = IRB.CreateICmpNE(new_op1, IC->getOperand(1), "_trans_icmp_ne_");
  751. }
  752. Instruction *new_inst = dyn_cast<Instruction>(new_val);
  753. recordConverted(IC, new_inst, true, false);
  754. } else if (Instruction::Shl == I->getOpcode()) {
  755.  
  756. BinaryOperator *BI = dyn_cast<BinaryOperator>(I);
  757. auto *op1 = BI->getOperand(0);
  758. auto *new_op1 = op1;
  759.  
  760. if(0 != convertMap.count(op1)) {
  761. auto *converted_op1 = convertMap[op1];
  762. if(converted_op1->getType()->isPointerTy()) {
  763. new_op1 =
  764. IRB.CreatePtrToInt(converted_op1, op1->getType(), "_trans_p2i_");
  765. }
  766. }
  767.  
  768. Value *new_val = IRB.CreateShl(new_op1, BI->getOperand(1), "_trans_shl_eq_");
  769. Instruction *new_inst = dyn_cast<Instruction>(new_val);
  770. recordConverted(BI, new_inst, true, false);
  771. }
  772.  
  773. return;
  774. }
  775.  
  776. /// Function : augmentCall
  777. /// Purpose : For each CallInst (to mcsema generated functions), add extra
  778. /// actual arguments
  779. /// %_local_stack_start_ : points to the start of parent frame
  780. /// %_local_stack_end_ : point to the end of parent stack frame
  781. /// %_rbp_ptr_ : point to the rbp pointer of parent frame
  782. /// Also the corresponding called function are augmented with extra
  783. /// formal arguments.
  784. void stack_deconstructor::augmentCall(Function &F, Value *local_stack_start,
  785. Value *local_stack_end,
  786. Value *rsp_ptr_alloca,
  787. Value *rbp_ptr_alloca) {
  788. for (auto FI = F.begin(), FE = F.end(); FI != FE; ++FI) {
  789. for (auto BBI = FI->begin(), BBE = FI->end(); BBI != BBE;) {
  790. Instruction *I = &*BBI++;
  791.  
  792. if (CallInst *ci = dyn_cast<CallInst>(I)) {
  793.  
  794. IRBuilder<> IRB(ci);
  795. Value *calledValue = ci->getCalledValue();
  796. Function *oldFunc = dyn_cast<Function>(calledValue);
  797. Value *newCallee = NULL;
  798.  
  799. if (oldFunc) {
  800. // We will be augmenting only those calls whose definitions
  801. // are cloned. Ex. we should not augment printf
  802. if (0 == FunctionCloneMap.count(oldFunc)) {
  803. //This could be functions like strcmp or llvm.ctpop
  804. // For functions like strcmp, fix the rsp pointer by 8
  805. auto FuncName = oldFunc->getName();
  806. if(false == FuncName.startswith("llvm.")) {
  807. std::vector<Value *> arguments;
  808. for (auto &args : ci->arg_operands()) {
  809. arguments.push_back(args);
  810. }
  811. CallInst *new_ci = IRB.CreateCall(oldFunc, arguments);
  812. new_ci->setCallingConv(ci->getCallingConv());
  813. auto *rsp_fix = IRB.CreateLoad(rsp_ptr_alloca, "_rsp_fix_");
  814. auto *gep_fix =
  815. IRB.CreateGEP(rsp_fix, ConstantInt::get(int64_type, 8) , "_gep_fix_");
  816. IRB.CreateStore(gep_fix, rsp_ptr_alloca);
  817. recordConverted(ci, new_ci);
  818. }
  819. continue;
  820. }
  821. newCallee = FunctionCloneMap[oldFunc];
  822. } else {
  823. // calledValue is a function pointer
  824. Type *type = calledValue->getType();
  825. FunctionType *funcTy =
  826. dyn_cast<FunctionType>(type->getPointerElementType());
  827. assert(funcTy != nullptr && "Must be a function type");
  828.  
  829. // Augment the old function pointer type to promoted type
  830. std::vector<Type *> ArgTypes;
  831. FunctionType::param_iterator PI = funcTy->param_begin();
  832. FunctionType::param_iterator PE = funcTy->param_end();
  833. for (; PI != PE; PI++) {
  834. ArgTypes.push_back(*PI);
  835. }
  836. ArgTypes.push_back(ptr_to_int8_type);
  837. ArgTypes.push_back(ptr_to_int8_type);
  838. ArgTypes.push_back(ptr_to_int8_type);
  839.  
  840. FunctionType *newTy =
  841. FunctionType::get(ci->getType(), ArgTypes, false);
  842.  
  843. // Create a bitcast of the old function pointer to promoted type
  844. newCallee = IRB.CreateBitCast(calledValue, newTy->getPointerTo());
  845. }
  846.  
  847. std::vector<Value *> arguments;
  848. for (auto &args : ci->arg_operands()) {
  849. arguments.push_back(args);
  850. }
  851. auto *new_load_rbp = IRB.CreateLoad(rbp_ptr_alloca, "_load_rbp_ptr_");
  852. auto *new_load_rsp = IRB.CreateLoad(rsp_ptr_alloca, "_load_rsp_ptr_");
  853. arguments.push_back(new_load_rsp);
  854. arguments.push_back(local_stack_end);
  855. arguments.push_back(new_load_rbp);
  856.  
  857. CallInst *new_ci = IRB.CreateCall(newCallee, arguments);
  858. new_ci->setCallingConv(ci->getCallingConv());
  859. auto *rsp_fix = IRB.CreateLoad(rsp_ptr_alloca, "_rsp_fix_");
  860. auto *gep_fix =
  861. IRB.CreateGEP(rsp_fix, ConstantInt::get(int64_type, 8) , "_gep_fix_");
  862. IRB.CreateStore(gep_fix, rsp_ptr_alloca);
  863. recordConverted(ci, new_ci);
  864. } // end if(CallInst ...)
  865. } // end for
  866. } // end for
  867. // At this point erase all the replcaed instrcutions
  868. eraseReplacedInstructions();
  869.  
  870. return;
  871. }
  872.  
  873. /// Function : modifyLoadsToAccessParentStack
  874. /// Purpose : Add a runtime check for each load
  875. /// to check if the pointer dereferenced pointer to pointing to
  876. /// parent stack
  877. // Algorithm:
  878. /// Let PTR : Pointer to be dereferenced
  879. /// LocalEnd : Top address of the local stack
  880. /// ParentEnd : Top address of the parent stack
  881. /// ParentStart : Bottom address of the parent stack
  882. ///
  883. /// condition1 : (PTR points to higher address than LocalEnd) : PTR > LocalEnd
  884. /// condition2 (not within parent stack limits ): (PTR > ParentEnd || PTR < ParentStart)
  885. /// condition3 : ParentStart + (PTR - LocalEnd) < ParentEnd
  886. ///
  887. /// Case I: PTR is a local stack pointer
  888. /// Satisfies:
  889. /// !condition1
  890. /// Conclusion:
  891. /// Dereference PTR
  892. ///
  893. /// Case II: PTR is a parent stack pointer computed w.r.t local RSP/RBP as PTR =
  894. /// RSP/RBP + C
  895. /// Satisfies:
  896. /// condition1
  897. /// condition2
  898. /// // NOTE: PTR is on a different address space because of different stack array used for
  899. /// // each function, so PTR will not fall within the parent stack limits
  900. //
  901. /// condition3 : ParentStart + (PTR - LocalEnd) < ParentEnd
  902. // // LHS of the condition is the effective address in the parnet stack frame. And that should
  903. // // lie within the parnet stack bounds.
  904. //
  905. /// Conclusion:
  906. /// Dereference (ParentStart + (PTR - LocalEnd))
  907. ///
  908. /// Case III: PTR is a direct parent stack pointer
  909. /// Satisfies:
  910. /// condition1
  911. // // as layout of parent stack address is above that of local stack address
  912. /// !condition2
  913. /// Conclusion:
  914. /// Dereference PTR
  915. ///
  916. /// Case IV: PTR is a direct parent to global or heap
  917. /// Satisfies:
  918. /// condition1 or !condition1 // as layout of heap could be anywhere
  919. /// condition2
  920. /// !condition3
  921. /// Conclusion:
  922. /// Dereference PTR
  923. ///
  924. /// So IF condition1 && condition2 && condition3 ==> Dereference
  925. /// (ParentStart + (PTR - LocalEnd))
  926. /// Else Dereference PTR
  927. void stack_deconstructor::modifyLoadsToAccessParentStack(
  928. Function &F, Value *local_stack_start, Value *local_stack_end) {
  929.  
  930. Value *parent_stack_start = NULL;
  931. Value *parent_stack_end = NULL;
  932. for (Function::arg_iterator I = F.arg_begin(), E = F.arg_end(); I != E; ++I) {
  933. if (I->getName() == "_parent_stack_end_ptr_") {
  934. parent_stack_end = &*I;
  935. }
  936. if (I->getName() == "_parent_stack_start_ptr_") {
  937. parent_stack_start = &*I;
  938. }
  939. }
  940.  
  941. // If not a generated function, return
  942. if (NULL == parent_stack_end)
  943. return;
  944.  
  945. std::vector<Instruction *> intr_to_be_transfomed;
  946.  
  947. // Collect all the Loads/Stores to be transformed
  948. for (auto FI = F.begin(), FE = F.end(); FI != FE; ++FI) {
  949. for (auto BBI = FI->begin(), BBE = FI->end(); BBI != BBE;) {
  950. Instruction *I = &*BBI++;
  951. if (shouldConvertForParentStackAccess(I)) {
  952. intr_to_be_transfomed.push_back(I);
  953. }
  954. }
  955. }
  956.  
  957. for (Instruction *I : intr_to_be_transfomed) {
  958. DEBUG(errs() << "\nEvaluate for Parent Access: " << *I << "\n");
  959. LoadInst *li = dyn_cast<LoadInst>(I);
  960. Value *ptr_operand = li->getPointerOperand();
  961. auto *head_bb = I->getParent();
  962. Type *ptr_operand_type = ptr_operand->getType();
  963.  
  964. IRBuilder<> IRB(I);
  965. auto *ptr_to_int =
  966. IRB.CreatePtrToInt(ptr_operand, int64_type, "_ptr_to_int_");
  967. auto *local_end_to_int =
  968. IRB.CreatePtrToInt(local_stack_end, int64_type, "_local_end_to_int_");
  969.  
  970. auto *ptr_operand_bt =
  971. IRB.CreateBitCast(ptr_operand, ptr_to_int8_type, "_ptr_bt_");
  972. auto *local_end_bt =
  973. IRB.CreateBitCast(local_stack_end, ptr_to_int8_type, "_local_end_bt_");
  974. auto *parent_end_bt = IRB.CreateBitCast(parent_stack_end, ptr_to_int8_type,
  975. "_parent_end_bt_");
  976. auto *parent_start_bt = IRB.CreateBitCast(
  977. parent_stack_start, ptr_to_int8_type, "_parent_start_bt_");
  978.  
  979. auto *offset = IRB.CreateBinOp(Instruction::Sub, ptr_to_int,
  980. local_end_to_int, "_offset_above_rbp_");
  981. auto *potential_parent_address =
  982. IRB.CreateGEP(parent_start_bt, offset, "_pot_address_in_parent_stack_");
  983.  
  984. auto *cond1 = IRB.CreateICmp(ICmpInst::ICMP_UGT, ptr_operand_bt,
  985. local_end_bt, "_cond1_");
  986. auto *cond2_1 = IRB.CreateICmp(ICmpInst::ICMP_UGT, ptr_operand_bt,
  987. parent_end_bt, "_cond2_1_");
  988. auto *cond2_2 = IRB.CreateICmp(ICmpInst::ICMP_ULT, ptr_operand_bt,
  989. parent_start_bt, "_cond2_2_");
  990. auto *cond2 = IRB.CreateBinOp(Instruction::Or, cond2_1, cond2_2, "_cond2_");
  991.  
  992. auto *cond3 = IRB.CreateICmp(ICmpInst::ICMP_ULE, potential_parent_address,
  993. parent_end_bt, "_cond4_");
  994. auto *cond1_n_cond2 =
  995. IRB.CreateBinOp(Instruction::And, cond1, cond2, "_cond1_n_cond2_");
  996. auto *cond1_n_cond2_n_cond3 = IRB.CreateBinOp(
  997. Instruction::And, cond1_n_cond2, cond3, "_cond1_n_cond2_cond3_");
  998. TerminatorInst *ti =
  999. SplitBlockAndInsertIfThen(cond1_n_cond2_n_cond3, I, false);
  1000.  
  1001. auto *then_bb = ti->getParent();
  1002.  
  1003. // Populate the Then Basic Block
  1004. IRB.SetInsertPoint(then_bb->getTerminator());
  1005.  
  1006. DEBUG(Constant *printf_func = printf_prototype(*ctx, Mod); IRB.CreateCall(
  1007. printf_func,
  1008. geti8StrVal(*Mod,
  1009. "Accessing Parent Stack [" +
  1010. std::to_string(StaticParentAccessChecks++) + "]\n",
  1011. "_debug_parent_stack_")));
  1012. // Constant *printf_func = printf_prototype(*ctx, Mod); IRB.CreateCall(
  1013. // printf_func,
  1014. // geti8StrVal(*Mod,
  1015. // "Accessing Parent Stack [" +
  1016. // std::to_string(StaticParentAccessChecks++) + "]\n",
  1017. // "_debug_parent_stack_"));
  1018. auto *parent_address =
  1019. IRB.CreateGEP(parent_start_bt, offset, "_address_in_parent_stack_");
  1020. auto *parent_address_bt = IRB.CreateBitCast(
  1021. parent_address, ptr_operand_type, "_address_in_parent_stack_bt_");
  1022.  
  1023. // Polulate the Tail Basic Block
  1024. IRB.SetInsertPoint(li);
  1025.  
  1026. auto *new_phi = IRB.CreatePHI(ptr_operand_type, 2);
  1027. new_phi->addIncoming(ptr_operand, head_bb);
  1028. new_phi->addIncoming(parent_address_bt, then_bb);
  1029.  
  1030. Instruction *new_load = NULL;
  1031. new_load = IRB.CreateLoad(new_phi, "_new_load_");
  1032. recordConverted(li, new_load);
  1033. }
  1034.  
  1035. // At this point erase all the replcaed instrcutions
  1036. eraseReplacedInstructions();
  1037.  
  1038. return;
  1039. }
  1040.  
  1041. bool stack_deconstructor::shouldConvertForParentStackAccess(Instruction *I) {
  1042. if (LoadInst *li = dyn_cast<LoadInst>(I)) {
  1043. auto *ptr_operand = li->getPointerOperand();
  1044. StringRef str = ptr_operand->getName();
  1045. if (str.empty() || StringRef::npos != str.find("_allin_")) {
  1046. return true;
  1047. }
  1048. }
  1049.  
  1050. return false;
  1051. }
  1052.  
  1053. Function *stack_deconstructor::cloneFunctionWithExtraArgument(Function *F) {
  1054. std::vector<Type *> ArgTypes;
  1055. ValueToValueMapTy VMap;
  1056.  
  1057. for (Function::const_arg_iterator I = F->arg_begin(), E = F->arg_end();
  1058. I != E; ++I) {
  1059. ArgTypes.push_back(I->getType());
  1060. }
  1061.  
  1062. // extra argument
  1063. ArgTypes.push_back(ptr_to_int8_type);
  1064. ArgTypes.push_back(ptr_to_int8_type);
  1065. ArgTypes.push_back(ptr_to_int8_type);
  1066.  
  1067. // Create a new function type considering the extra argument
  1068. FunctionType *FTy =
  1069. FunctionType::get(F->getFunctionType()->getReturnType(), ArgTypes,
  1070. F->getFunctionType()->isVarArg());
  1071.  
  1072. // Create the new function...
  1073. Function *NewF = Function::Create(FTy, F->getLinkage(), F->getName());
  1074.  
  1075. // Loop over the arguments, copying the names of the mapped arguments over...
  1076. Function::arg_iterator DestI = NewF->arg_begin();
  1077. for (Function::const_arg_iterator I = F->arg_begin(), E = F->arg_end();
  1078. I != E; ++I) {
  1079. DestI->setName(I->getName());
  1080. VMap[&*I] = &*DestI;
  1081. DestI++;
  1082. }
  1083. DestI->setName("_parent_stack_start_ptr_");
  1084. DestI++;
  1085. DestI->setName("_parent_stack_end_ptr_");
  1086. DestI++;
  1087. DestI->setName("_parent_stack_rbp_ptr_");
  1088. DestI++;
  1089.  
  1090. SmallVector<ReturnInst *, 8> Returns; // Ignore returns cloned.
  1091. CloneFunctionInto(NewF, F, VMap, false, Returns, "");
  1092.  
  1093. Mod->getFunctionList().push_back(NewF);
  1094.  
  1095. return NewF;
  1096. }
  1097.  
  1098. void stack_deconstructor::recordConverted(Instruction *From, Value *To,
  1099. bool replaceUses, bool erase) {
  1100. convertMap[From] = To;
  1101. if (erase)
  1102. ToErase.push_back(From);
  1103. if (replaceUses) {
  1104. From->replaceAllUsesWith(To);
  1105. }
  1106. DEBUG(llvm::errs() << "\tConvert :" << *From << " --> " << *To << "\n");
  1107. }
  1108.  
  1109. void stack_deconstructor::eraseReplacedInstructions() {
  1110. for (Instruction *E : ToErase)
  1111. E->dropAllReferences();
  1112. for (Instruction *E : ToErase)
  1113. E->eraseFromParent();
  1114.  
  1115. convertMap.shrink_and_clear();
  1116. ToErase.clear();
  1117. }
  1118.  
  1119. Constant *stack_deconstructor::printf_prototype(LLVMContext &ctx, Module *mod) {
  1120.  
  1121. FunctionType *printf_type = TypeBuilder<int(char *, ...), false>::get(ctx);
  1122.  
  1123. Constant *func = mod->getOrInsertFunction(
  1124. "printf", printf_type,
  1125. AttributeSet().addAttribute(mod->getContext(), 1U, Attribute::NoAlias));
  1126.  
  1127. if (!func) {
  1128. assert(0 && "getOrInsertFunction returned non function");
  1129. }
  1130. return func;
  1131. }
  1132.  
  1133. Constant *stack_deconstructor::geti8StrVal(Module &M, std::string str,
  1134. Twine const &name) {
  1135. Constant *strConstant = ConstantDataArray::getString(*ctx, str.c_str());
  1136. GlobalVariable *GVStr =
  1137. new GlobalVariable(M, strConstant->getType(), true,
  1138. GlobalValue::InternalLinkage, strConstant, name);
  1139. Constant *zero = Constant::getNullValue(IntegerType::getInt32Ty(*ctx));
  1140. Constant *indices[] = {zero, zero};
  1141. Constant *strVal = ConstantExpr::getGetElementPtr(strConstant->getType(),
  1142. GVStr, indices, true);
  1143. return strVal;
  1144. }
  1145.  
  1146. bool stack_deconstructor::isLoadOfImp(Value *I, StringRef ptr_name) {
  1147. LoadInst *LI = dyn_cast<LoadInst>(I);
  1148. if (!LI) {
  1149. return false;
  1150. }
  1151.  
  1152. Value *ptr_operand = LI->getPointerOperand();
  1153. if (ptr_operand->getName() != ptr_name) {
  1154. return false;
  1155. }
  1156.  
  1157. return true;
  1158. }
Add Comment
Please, Sign In to add comment