DbCheckPostLower.cpp 11 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267
  1. //-------------------------------------------------------------------------------------------------------
  2. // Copyright (C) Microsoft. All rights reserved.
  3. // Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
  4. //-------------------------------------------------------------------------------------------------------
  5. #include "Backend.h"
  6. #if DBG
  7. void
  8. DbCheckPostLower::Check()
  9. {
  10. bool doOpHelperCheck = Js::Configuration::Global.flags.CheckOpHelpers && !this->func->isPostLayout;
  11. bool isInHelperBlock = false;
  12. FOREACH_INSTR_IN_FUNC_EDITING(instr, instrNext, this->func)
  13. {
  14. Assert(Lowerer::ValidOpcodeAfterLower(instr, this->func));
  15. LowererMD::Legalize</*verify*/true>(instr);
  16. switch(instr->GetKind())
  17. {
  18. case IR::InstrKindLabel:
  19. case IR::InstrKindProfiledLabel:
  20. isInHelperBlock = instr->AsLabelInstr()->isOpHelper;
  21. if (doOpHelperCheck && !isInHelperBlock && !instr->AsLabelInstr()->m_noHelperAssert)
  22. {
  23. bool foundNonHelperPath = false;
  24. bool isDeadLabel = true;
  25. IR::LabelInstr* labelInstr = instr->AsLabelInstr();
  26. while (1)
  27. {
  28. FOREACH_SLIST_ENTRY(IR::BranchInstr *, branchInstr, &labelInstr->labelRefs)
  29. {
  30. isDeadLabel = false;
  31. IR::Instr *instrPrev = branchInstr->m_prev;
  32. while (instrPrev && !instrPrev->IsLabelInstr())
  33. {
  34. instrPrev = instrPrev->m_prev;
  35. }
  36. if (!instrPrev || !instrPrev->AsLabelInstr()->isOpHelper || branchInstr->m_isHelperToNonHelperBranch)
  37. {
  38. foundNonHelperPath = true;
  39. break;
  40. }
  41. } NEXT_SLIST_ENTRY;
  42. if (!labelInstr->m_next->IsLabelInstr())
  43. {
  44. break;
  45. }
  46. IR::LabelInstr *const nextLabel = labelInstr->m_next->AsLabelInstr();
  47. // It is generally not expected for a non-helper label to be immediately followed by a helper label. Some
  48. // special cases may flag the helper label with m_noHelperAssert = true. Peeps can cause non-helper blocks
  49. // to fall through into helper blocks, so skip this check after peeps.
  50. Assert(func->isPostPeeps || nextLabel->m_noHelperAssert || !nextLabel->isOpHelper);
  51. if(nextLabel->isOpHelper)
  52. {
  53. break;
  54. }
  55. labelInstr = nextLabel;
  56. }
  57. instrNext = labelInstr->m_next;
  58. // This label is unreachable or at least one path to it is not from a helper block.
  59. if (!foundNonHelperPath && !instr->GetNextRealInstrOrLabel()->IsExitInstr() && !isDeadLabel)
  60. {
  61. IR::Instr *prevInstr = labelInstr->GetPrevRealInstrOrLabel();
  62. if (prevInstr->HasFallThrough() && !(prevInstr->IsBranchInstr() && prevInstr->AsBranchInstr()->m_isHelperToNonHelperBranch))
  63. {
  64. while (prevInstr && !prevInstr->IsLabelInstr())
  65. {
  66. prevInstr = prevInstr->m_prev;
  67. }
  68. AssertMsg(prevInstr && prevInstr->IsLabelInstr() && !prevInstr->AsLabelInstr()->isOpHelper, "Inconsistency in Helper label annotations");
  69. }
  70. }
  71. }
  72. break;
  73. case IR::InstrKindBranch:
  74. if (doOpHelperCheck && !isInHelperBlock)
  75. {
  76. IR::LabelInstr *targetLabel = instr->AsBranchInstr()->GetTarget();
  77. // This branch needs a path to a non-helper block.
  78. if (instr->AsBranchInstr()->IsConditional())
  79. {
  80. if (targetLabel->isOpHelper && !targetLabel->m_noHelperAssert)
  81. {
  82. IR::Instr *instrNextDebug = instr->GetNextRealInstrOrLabel();
  83. Assert(!(instrNextDebug->IsLabelInstr() && instrNextDebug->AsLabelInstr()->isOpHelper));
  84. }
  85. }
  86. else
  87. {
  88. Assert(instr->AsBranchInstr()->IsUnconditional());
  89. if (targetLabel)
  90. {
  91. if (!targetLabel->isOpHelper || targetLabel->m_noHelperAssert)
  92. {
  93. break;
  94. }
  95. // Target is opHelper
  96. IR::Instr *instrPrev = instr->m_prev;
  97. if (this->func->isPostRegAlloc)
  98. {
  99. while (LowererMD::IsAssign(instrPrev))
  100. {
  101. // Skip potential register allocation compensation code
  102. instrPrev = instrPrev->m_prev;
  103. }
  104. }
  105. if (instrPrev->m_opcode == Js::OpCode::DeletedNonHelperBranch)
  106. {
  107. break;
  108. }
  109. Assert((instrPrev->IsBranchInstr() && instrPrev->AsBranchInstr()->IsConditional()
  110. && (!instrPrev->AsBranchInstr()->GetTarget()->isOpHelper || instrPrev->AsBranchInstr()->GetTarget()->m_noHelperAssert)));
  111. }
  112. else
  113. {
  114. Assert(instr->GetSrc1());
  115. }
  116. }
  117. }
  118. break;
  119. default:
  120. this->Check(instr->GetDst());
  121. this->Check(instr->GetSrc1());
  122. this->Check(instr->GetSrc2());
  123. #if defined(_M_IX86) || defined(_M_X64)
  124. // for op-eq's and assignment operators, make sure the types match
  125. // for shift operators make sure the types match and the third is an 8-bit immediate
  126. // for cmp operators similarly check types are same
  127. if (EncoderMD::IsOPEQ(instr))
  128. {
  129. Assert(instr->GetDst()->IsEqual(instr->GetSrc1()));
  130. #if defined(_M_X64)
  131. Assert(!instr->GetSrc2() || instr->GetDst()->GetSize() == instr->GetSrc2()->GetSize() ||
  132. ((EncoderMD::IsSHIFT(instr) || instr->m_opcode == Js::OpCode::BTR ||
  133. instr->m_opcode == Js::OpCode::BTS ||
  134. instr->m_opcode == Js::OpCode::BT) && instr->GetSrc2()->GetSize() == 1) ||
  135. // Is src2 is TyVar and src1 is TyInt32/TyUint32, make sure the address fits in 32 bits
  136. (instr->GetSrc2()->GetType() == TyVar && instr->GetDst()->GetSize() == 4 &&
  137. instr->GetSrc2()->IsAddrOpnd() && Math::FitsInDWord(reinterpret_cast<int64>(instr->GetSrc2()->AsAddrOpnd()->m_address))));
  138. #else
  139. Assert(!instr->GetSrc2() || instr->GetDst()->GetSize() == instr->GetSrc2()->GetSize() ||
  140. ((EncoderMD::IsSHIFT(instr) || instr->m_opcode == Js::OpCode::BTR ||
  141. instr->m_opcode == Js::OpCode::BT) && instr->GetSrc2()->GetSize() == 1));
  142. #endif
  143. }
  144. Assert(!LowererMD::IsAssign(instr) || instr->GetDst()->GetSize() == instr->GetSrc1()->GetSize());
  145. Assert(instr->m_opcode != Js::OpCode::CMP || instr->GetSrc1()->GetType() == instr->GetSrc1()->GetType());
  146. switch (instr->m_opcode)
  147. {
  148. case Js::OpCode::CMOVA:
  149. case Js::OpCode::CMOVAE:
  150. case Js::OpCode::CMOVB:
  151. case Js::OpCode::CMOVBE:
  152. case Js::OpCode::CMOVE:
  153. case Js::OpCode::CMOVG:
  154. case Js::OpCode::CMOVGE:
  155. case Js::OpCode::CMOVL:
  156. case Js::OpCode::CMOVLE:
  157. case Js::OpCode::CMOVNE:
  158. case Js::OpCode::CMOVNO:
  159. case Js::OpCode::CMOVNP:
  160. case Js::OpCode::CMOVNS:
  161. case Js::OpCode::CMOVO:
  162. case Js::OpCode::CMOVP:
  163. case Js::OpCode::CMOVS:
  164. if (instr->GetSrc2())
  165. {
  166. // CMOV inserted before regAlloc need a fake use of the dst register to make up for the
  167. // fact that the CMOV may not set the dst. Regalloc needs to assign the same physical register for dst and src1.
  168. Assert(instr->GetDst()->IsEqual(instr->GetSrc1()));
  169. }
  170. else
  171. {
  172. // These must have been inserted post-regalloc.
  173. Assert(instr->GetDst()->AsRegOpnd()->GetReg() != RegNOREG);
  174. }
  175. break;
  176. case Js::OpCode::CALL:
  177. Assert(!instr->m_func->IsTrueLeaf());
  178. break;
  179. }
  180. #endif
  181. }
  182. } NEXT_INSTR_IN_FUNC_EDITING;
  183. }
  184. void DbCheckPostLower::Check(IR::Opnd *opnd)
  185. {
  186. if (opnd == NULL)
  187. {
  188. return;
  189. }
  190. if (opnd->IsRegOpnd())
  191. {
  192. this->Check(opnd->AsRegOpnd());
  193. }
  194. else if (opnd->IsIndirOpnd())
  195. {
  196. this->Check(opnd->AsIndirOpnd()->GetBaseOpnd());
  197. this->Check(opnd->AsIndirOpnd()->GetIndexOpnd());
  198. }
  199. else if (opnd->IsSymOpnd() && opnd->AsSymOpnd()->m_sym->IsStackSym())
  200. {
  201. if (this->func->isPostRegAlloc)
  202. {
  203. AssertMsg(opnd->AsSymOpnd()->m_sym->AsStackSym()->IsAllocated(), "No Stack space allocated for StackSym?");
  204. }
  205. IRType symType = opnd->AsSymOpnd()->m_sym->AsStackSym()->GetType();
  206. if (symType != TyMisc)
  207. {
  208. uint symSize = static_cast<uint>(max(TySize[symType], MachRegInt));
  209. AssertMsg(static_cast<uint>(TySize[opnd->AsSymOpnd()->GetType()]) + opnd->AsSymOpnd()->m_offset <= symSize, "SymOpnd cannot refer to a size greater than Sym's reference");
  210. }
  211. }
  212. }
  213. void DbCheckPostLower::Check(IR::RegOpnd *regOpnd)
  214. {
  215. if (regOpnd == NULL)
  216. {
  217. return;
  218. }
  219. RegNum reg = regOpnd->GetReg();
  220. if (reg != RegNOREG)
  221. {
  222. if (IRType_IsFloat(LinearScan::GetRegType(reg)))
  223. {
  224. // both simd128 and float64 map to float64 regs
  225. Assert(IRType_IsFloat(regOpnd->GetType()) || IRType_IsSimd128(regOpnd->GetType()));
  226. }
  227. else
  228. {
  229. Assert(IRType_IsNativeInt(regOpnd->GetType()) || regOpnd->GetType() == TyVar);
  230. #if defined(_M_IX86) || defined(_M_X64)
  231. if (regOpnd->GetSize() == 1)
  232. {
  233. Assert(LinearScan::GetRegAttribs(reg) & RA_BYTEABLE);
  234. }
  235. #endif
  236. }
  237. }
  238. }
  239. #endif // DBG