LowererMDArch.cpp 98 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652265326542655265626572658265926602661266226632664266526662667266826692670267126722673267426752676267726782679268026812682268326842685268626872688268926902691269226932694269526962697269826992700270127022703270427052706270727082709271027112712271327142715271627172718271927202721272227232724272527262727272827292730273127322733273427352736273727382739274027412742274327442745274627472748274927502751275227532754275527562757275827592760276127622763276427652766276727682769277027712772277327742775277627772778277927802781278227832784278527862787278827892790279127922793279427952796279727982799280028012802280328042805280628072808280928102811
  1. //-------------------------------------------------------------------------------------------------------
  2. // Copyright (C) Microsoft. All rights reserved.
  3. // Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
  4. //-------------------------------------------------------------------------------------------------------
  5. #include "BackEnd.h"
  6. #include "LowererMDArch.h"
  7. #include "Library\JavascriptGeneratorFunction.h"
  8. const Js::OpCode LowererMD::MDExtend32Opcode = Js::OpCode::MOVSXD;
  9. extern const IRType RegTypes[RegNumCount];
  10. BYTE
  11. LowererMDArch::GetDefaultIndirScale()
  12. {
  13. return IndirScale8;
  14. }
  15. RegNum
  16. LowererMDArch::GetRegShiftCount()
  17. {
  18. return RegRCX;
  19. }
  20. RegNum
  21. LowererMDArch::GetRegReturn(IRType type)
  22. {
  23. return ( IRType_IsFloat(type) || IRType_IsSimd128(type) ) ? RegXMM0 : RegRAX;
  24. }
  25. RegNum
  26. LowererMDArch::GetRegReturnAsmJs(IRType type)
  27. {
  28. if (IRType_IsFloat(type))
  29. {
  30. return RegXMM0;
  31. }
  32. else if (IRType_IsSimd128(type))
  33. {
  34. return RegXMM0;
  35. }
  36. else
  37. {
  38. return RegRAX;
  39. }
  40. }
  41. RegNum
  42. LowererMDArch::GetRegStackPointer()
  43. {
  44. return RegRSP;
  45. }
  46. RegNum
  47. LowererMDArch::GetRegBlockPointer()
  48. {
  49. return RegRBP;
  50. }
  51. RegNum
  52. LowererMDArch::GetRegFramePointer()
  53. {
  54. return RegRBP;
  55. }
  56. RegNum
  57. LowererMDArch::GetRegChkStkParam()
  58. {
  59. return RegRAX;
  60. }
  61. RegNum
  62. LowererMDArch::GetRegIMulDestLower()
  63. {
  64. return RegRAX;
  65. }
  66. RegNum
  67. LowererMDArch::GetRegIMulHighDestLower()
  68. {
  69. return RegRDX;
  70. }
  71. RegNum
  72. LowererMDArch::GetRegArgI4(int32 argNum)
  73. {
  74. // TODO: decide on registers to use for int
  75. return RegNOREG;
  76. }
  77. RegNum
  78. LowererMDArch::GetRegArgR8(int32 argNum)
  79. {
  80. // TODO: decide on registers to use for double
  81. return RegNOREG;
  82. }
  83. Js::OpCode
  84. LowererMDArch::GetAssignOp(IRType type)
  85. {
  86. switch (type)
  87. {
  88. case TyFloat64:
  89. return Js::OpCode::MOVSD;
  90. case TyFloat32:
  91. return Js::OpCode::MOVSS;
  92. case TySimd128F4:
  93. case TySimd128I4:
  94. case TySimd128D2:
  95. return Js::OpCode::MOVUPS;
  96. default:
  97. return Js::OpCode::MOV;
  98. }
  99. }
  100. void
  101. LowererMDArch::Init(LowererMD *lowererMD)
  102. {
  103. this->lowererMD = lowererMD;
  104. this->helperCallArgsCount = 0;
  105. }
  106. ///----------------------------------------------------------------------------
  107. ///
  108. /// LowererMD::LoadInputParamPtr
  109. ///
  110. /// Load the address of the start of the passed-in parameters not including
  111. /// the this parameter.
  112. ///
  113. ///----------------------------------------------------------------------------
  114. IR::Instr *
  115. LowererMDArch::LoadInputParamPtr(IR::Instr *instrInsert, IR::RegOpnd *optionalDstOpnd /* = nullptr */)
  116. {
  117. if (this->m_func->GetJnFunction()->IsGenerator())
  118. {
  119. IR::RegOpnd * argPtrRegOpnd = Lowerer::LoadGeneratorArgsPtr(instrInsert);
  120. IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(argPtrRegOpnd, 1 * MachPtr, TyMachPtr, this->m_func);
  121. IR::RegOpnd * dstOpnd = optionalDstOpnd != nullptr ? optionalDstOpnd : IR::RegOpnd::New(TyMachPtr, this->m_func);
  122. return Lowerer::InsertLea(dstOpnd, indirOpnd, instrInsert);
  123. }
  124. else
  125. {
  126. // Stack looks like (EBP chain)+0, (return addr)+4, (function object)+8, (arg count)+12, (this)+16, actual args
  127. StackSym *paramSym = StackSym::New(TyMachReg, this->m_func);
  128. this->m_func->SetArgOffset(paramSym, 5 * MachPtr);
  129. IR::Instr *instr = this->lowererMD->LoadStackAddress(paramSym, optionalDstOpnd);
  130. instrInsert->InsertBefore(instr);
  131. return instr;
  132. }
  133. }
  134. IR::Instr *
  135. LowererMDArch::LoadStackArgPtr(IR::Instr * instrArgPtr)
  136. {
  137. // Get the args pointer relative to the frame pointer.
  138. // NOTE: This code is sufficient for the apply-args optimization, but not for StackArguments,
  139. // if and when that is enabled.
  140. // dst = LEA &[rbp + "this" offset + sizeof(var)]
  141. IR::Instr * instr = LoadInputParamPtr(instrArgPtr, instrArgPtr->UnlinkDst()->AsRegOpnd());
  142. instrArgPtr->Remove();
  143. return instr->m_prev;
  144. }
  145. IR::Instr *
  146. LowererMDArch::LoadHeapArgsCached(IR::Instr *instrArgs)
  147. {
  148. // s7 = formals are let decls
  149. // s6 = memory context
  150. // s5 = local frame instance
  151. // s4 = address of first actual argument (after "this")
  152. // s3 = formal argument count
  153. // s2 = actual argument count
  154. // s1 = current function
  155. // dst = JavascriptOperators::LoadArguments(s1, s2, s3, s4, s5, s6, s7)
  156. ASSERT_INLINEE_FUNC(instrArgs);
  157. Func *func = instrArgs->m_func;
  158. IR::Instr *instrPrev = instrArgs->m_prev;
  159. // s7 = formals are let decls
  160. IR::Opnd * formalsAreLetDecls = IR::IntConstOpnd::New((IntConstType)(instrArgs->m_opcode == Js::OpCode::LdLetHeapArgsCached), TyUint8, func);
  161. this->LoadHelperArgument(instrArgs, formalsAreLetDecls);
  162. // s6 = memory context
  163. this->lowererMD->m_lowerer->LoadScriptContext(instrArgs);
  164. // s5 = local frame instance
  165. IR::Opnd *frameObj = instrArgs->UnlinkSrc1();
  166. this->LoadHelperArgument(instrArgs, frameObj);
  167. if (func->IsInlinee())
  168. {
  169. // s4 = address of first actual argument (after "this").
  170. StackSym *firstRealArgSlotSym = func->GetInlineeArgvSlotOpnd()->m_sym->AsStackSym();
  171. this->m_func->SetArgOffset(firstRealArgSlotSym, firstRealArgSlotSym->m_offset + MachPtr);
  172. IR::Instr *instr = this->lowererMD->LoadStackAddress(firstRealArgSlotSym);
  173. instrArgs->InsertBefore(instr);
  174. this->LoadHelperArgument(instrArgs, instr->GetDst());
  175. // s3 = formal argument count (without counting "this").
  176. uint32 formalsCount = func->GetJnFunction()->GetInParamsCount() - 1;
  177. this->LoadHelperArgument(instrArgs, IR::IntConstOpnd::New(formalsCount, TyUint32, func));
  178. // s2 = actual argument count (without counting "this").
  179. instr = IR::Instr::New(Js::OpCode::MOV,
  180. IR::RegOpnd::New(TyMachReg, func),
  181. IR::IntConstOpnd::New(func->actualCount - 1, TyUint32, func),
  182. func);
  183. instrArgs->InsertBefore(instr);
  184. this->LoadHelperArgument(instrArgs, instr->GetDst());
  185. // s1 = current function.
  186. this->LoadHelperArgument(instrArgs, func->GetInlineeFunctionObjectSlotOpnd());
  187. // Save the newly-created args object to its dedicated stack slot.
  188. IR::SymOpnd *argObjSlotOpnd = func->GetInlineeArgumentsObjectSlotOpnd();
  189. instr = IR::Instr::New(Js::OpCode::MOV,
  190. argObjSlotOpnd,
  191. instrArgs->GetDst(),
  192. func);
  193. instrArgs->InsertAfter(instr);
  194. }
  195. else
  196. {
  197. // s4 = address of first actual argument (after "this")
  198. // Stack looks like (EBP chain)+0, (return addr)+4, (function object)+8, (arg count)+12, (this)+16, actual args
  199. IR::Instr *instr = this->LoadInputParamPtr(instrArgs);
  200. this->LoadHelperArgument(instrArgs, instr->GetDst());
  201. // s3 = formal argument count (without counting "this")
  202. uint32 formalsCount = func->GetInParamsCount() - 1;
  203. this->LoadHelperArgument(instrArgs, IR::IntConstOpnd::New(formalsCount, TyInt32, func));
  204. // s2 = actual argument count (without counting "this")
  205. instr = this->lowererMD->LoadInputParamCount(instrArgs);
  206. instr = IR::Instr::New(Js::OpCode::DEC, instr->GetDst(), instr->GetDst(), func);
  207. instrArgs->InsertBefore(instr);
  208. this->LoadHelperArgument(instrArgs, instr->GetDst());
  209. // s1 = current function
  210. StackSym *paramSym = StackSym::New(TyMachReg, func);
  211. this->m_func->SetArgOffset(paramSym, 2 * MachPtr);
  212. IR::Opnd * srcOpnd = IR::SymOpnd::New(paramSym, TyMachReg, func);
  213. this->LoadHelperArgument(instrArgs, srcOpnd);
  214. // Save the newly-created args object to its dedicated stack slot.
  215. IR::Opnd *opnd = this->lowererMD->CreateStackArgumentsSlotOpnd();
  216. instr = IR::Instr::New(Js::OpCode::MOV, opnd, instrArgs->GetDst(), func);
  217. instrArgs->InsertAfter(instr);
  218. }
  219. this->lowererMD->ChangeToHelperCall(instrArgs, IR::HelperOp_LoadHeapArgsCached);
  220. return instrPrev;
  221. }
  222. ///----------------------------------------------------------------------------
  223. ///
  224. /// LowererMDArch::LoadHeapArguments
  225. ///
  226. /// Load the arguments object
  227. /// NOTE: The same caveat regarding arguments passed on the stack applies here
  228. /// as in LoadInputParamCount above.
  229. ///----------------------------------------------------------------------------
  230. IR::Instr *
  231. LowererMDArch::LoadHeapArguments(IR::Instr *instrArgs, bool force /* = false */, IR::Opnd *opndInputParamCount /* = nullptr */)
  232. {
  233. ASSERT_INLINEE_FUNC(instrArgs);
  234. Func *func = instrArgs->m_func;
  235. IR::Instr *instrPrev = instrArgs->m_prev;
  236. if (!force && func->GetHasStackArgs() && this->m_func->GetHasStackArgs())
  237. {
  238. // The initial args slot value is zero. (TODO: it should be possible to dead-store the LdHeapArgs in this case.)
  239. instrArgs->m_opcode = Js::OpCode::MOV;
  240. instrArgs->ReplaceSrc1(IR::AddrOpnd::NewNull(func));
  241. instrArgs->FreeSrc2();
  242. }
  243. else
  244. {
  245. // s7 = formals are let decls
  246. // s6 = memory context
  247. // s5 = array of property ID's
  248. // s4 = local frame instance
  249. // s3 = address of first actual argument (after "this")
  250. // s2 = actual argument count
  251. // s1 = current function
  252. // dst = JavascriptOperators::LoadHeapArguments(s1, s2, s3, s4, s5, s6, s7)
  253. // s7 = formals are let decls
  254. this->LoadHelperArgument(instrArgs, IR::IntConstOpnd::New(instrArgs->m_opcode == Js::OpCode::LdLetHeapArguments ? TRUE : FALSE, TyUint8, func));
  255. // s6 = memory context
  256. instrPrev = this->lowererMD->m_lowerer->LoadScriptContext(instrArgs);
  257. // s5 = array of property ID's
  258. IR::Opnd *argArray = instrArgs->UnlinkSrc2();
  259. this->LoadHelperArgument(instrArgs, argArray);
  260. // s4 = local frame instance
  261. IR::Opnd *frameObj = instrArgs->UnlinkSrc1();
  262. this->LoadHelperArgument(instrArgs, frameObj);
  263. if (func->IsInlinee())
  264. {
  265. // s3 = address of first actual argument (after "this").
  266. StackSym *firstRealArgSlotSym = func->GetInlineeArgvSlotOpnd()->m_sym->AsStackSym();
  267. this->m_func->SetArgOffset(firstRealArgSlotSym, firstRealArgSlotSym->m_offset + MachPtr);
  268. IR::Instr *instr = this->lowererMD->LoadStackAddress(firstRealArgSlotSym);
  269. instrArgs->InsertBefore(instr);
  270. this->LoadHelperArgument(instrArgs, instr->GetDst());
  271. // s2 = actual argument count (without counting "this").
  272. instr = IR::Instr::New(Js::OpCode::MOV,
  273. IR::RegOpnd::New(TyUint32, func),
  274. IR::IntConstOpnd::New(func->actualCount - 1, TyUint32, func),
  275. func);
  276. instrArgs->InsertBefore(instr);
  277. this->LoadHelperArgument(instrArgs, instr->GetDst());
  278. // s1 = current function.
  279. this->LoadHelperArgument(instrArgs, func->GetInlineeFunctionObjectSlotOpnd());
  280. // Save the newly-created args object to its dedicated stack slot.
  281. IR::SymOpnd *argObjSlotOpnd = func->GetInlineeArgumentsObjectSlotOpnd();
  282. instr = IR::Instr::New(Js::OpCode::MOV,
  283. argObjSlotOpnd,
  284. instrArgs->GetDst(),
  285. func);
  286. instrArgs->InsertAfter(instr);
  287. }
  288. else
  289. {
  290. // s3 = address of first actual argument (after "this")
  291. // Stack looks like (EBP chain)+0, (return addr)+4, (function object)+8, (arg count)+12, (this)+16, actual args
  292. IR::Instr *instr = this->LoadInputParamPtr(instrArgs);
  293. this->LoadHelperArgument(instrArgs, instr->GetDst());
  294. // s2 = actual argument count (without counting "this")
  295. if (opndInputParamCount == nullptr)
  296. {
  297. instr = this->lowererMD->LoadInputParamCount(instrArgs, -1);
  298. opndInputParamCount = instr->GetDst();
  299. }
  300. this->LoadHelperArgument(instrArgs, opndInputParamCount);
  301. // s1 = current function
  302. StackSym * paramSym = StackSym::New(TyMachReg, func);
  303. this->m_func->SetArgOffset(paramSym, 2 * MachPtr);
  304. IR::Opnd * srcOpnd = IR::SymOpnd::New(paramSym, TyMachReg, func);
  305. if (this->m_func->GetJnFunction()->IsGenerator())
  306. {
  307. // the function object for generator calls is a GeneratorVirtualScriptFunction object
  308. // and we need to pass the real JavascriptGeneratorFunction object so grab it instead
  309. IR::RegOpnd *tmpOpnd = IR::RegOpnd::New(TyMachReg, func);
  310. LowererMD::CreateAssign(tmpOpnd, srcOpnd, instrArgs);
  311. srcOpnd = IR::IndirOpnd::New(tmpOpnd, Js::GeneratorVirtualScriptFunction::GetRealFunctionOffset(), TyMachPtr, func);
  312. }
  313. this->LoadHelperArgument(instrArgs, srcOpnd);
  314. // Save the newly-created args object to its dedicated stack slot.
  315. IR::Opnd *opnd = this->lowererMD->CreateStackArgumentsSlotOpnd();
  316. instr = IR::Instr::New(Js::OpCode::MOV, opnd, instrArgs->GetDst(), func);
  317. instrArgs->InsertAfter(instr);
  318. }
  319. this->lowererMD->ChangeToHelperCall(instrArgs, IR::HelperOp_LoadHeapArguments);
  320. }
  321. return instrPrev;
  322. }
  323. ///----------------------------------------------------------------------------
  324. ///
  325. /// LowererMDArch::LoadFuncExpression
  326. ///
  327. /// Load the function expression to src1 from [ebp + 8]
  328. ///
  329. ///----------------------------------------------------------------------------
  330. IR::Instr *
  331. LowererMDArch::LoadFuncExpression(IR::Instr *instrFuncExpr)
  332. {
  333. ASSERT_INLINEE_FUNC(instrFuncExpr);
  334. Func *func = instrFuncExpr->m_func;
  335. IR::Opnd *paramOpnd = nullptr;
  336. if (func->IsInlinee())
  337. {
  338. paramOpnd = func->GetInlineeFunctionObjectSlotOpnd();
  339. }
  340. else
  341. {
  342. StackSym *paramSym = StackSym::New(TyMachReg, this->m_func);
  343. this->m_func->SetArgOffset(paramSym, 2 * MachPtr);
  344. paramOpnd = IR::SymOpnd::New(paramSym, TyMachReg, this->m_func);
  345. }
  346. if (this->m_func->GetJnFunction()->IsGenerator())
  347. {
  348. // the function object for generator calls is a GeneratorVirtualScriptFunction object
  349. // and we need to return the real JavascriptGeneratorFunction object so grab it before
  350. // assigning to the dst
  351. IR::RegOpnd *tmpOpnd = IR::RegOpnd::New(TyMachReg, func);
  352. LowererMD::CreateAssign(tmpOpnd, paramOpnd, instrFuncExpr);
  353. paramOpnd = IR::IndirOpnd::New(tmpOpnd, Js::GeneratorVirtualScriptFunction::GetRealFunctionOffset(), TyMachPtr, func);
  354. }
  355. // mov dst, param
  356. instrFuncExpr->SetSrc1(paramOpnd);
  357. LowererMD::ChangeToAssign(instrFuncExpr);
  358. return instrFuncExpr;
  359. }
  360. //
  361. // Load the parameter in the first argument slot
  362. //
  363. IR::Instr *
  364. LowererMDArch::LoadNewScObjFirstArg(IR::Instr * instr, IR::Opnd * dst, ushort extraArgs)
  365. {
  366. // Spread moves down the argument slot by one.
  367. IR::Opnd * argOpnd = this->GetArgSlotOpnd(3 + extraArgs);
  368. IR::Instr * argInstr = LowererMD::CreateAssign(argOpnd, dst, instr);
  369. return argInstr;
  370. }
  371. int32
  372. LowererMDArch::LowerCallArgs(IR::Instr *callInstr, ushort callFlags, Js::ArgSlot extraParams, IR::IntConstOpnd **callInfoOpndRef /* = nullptr */)
  373. {
  374. AssertMsg(this->helperCallArgsCount == 0, "We don't support nested helper calls yet");
  375. const Js::ArgSlot argOffset = 1;
  376. uint32 argCount = 0;
  377. // Lower args and look for StartCall
  378. IR::Instr * argInstr = callInstr;
  379. IR::Instr * cfgInsertLoc = callInstr->GetPrevRealInstr();
  380. IR::Opnd *src2 = argInstr->UnlinkSrc2();
  381. while (src2->IsSymOpnd())
  382. {
  383. IR::SymOpnd * argLinkOpnd = src2->AsSymOpnd();
  384. StackSym * argLinkSym = argLinkOpnd->m_sym->AsStackSym();
  385. AssertMsg(argLinkSym->IsArgSlotSym() && argLinkSym->m_isSingleDef, "Arg tree not single def...");
  386. argLinkOpnd->Free(this->m_func);
  387. argInstr = argLinkSym->m_instrDef;
  388. src2 = argInstr->UnlinkSrc2();
  389. this->lowererMD->ChangeToAssign(argInstr);
  390. // Mov each arg to its argSlot
  391. Js::ArgSlot argPosition = argInstr->GetDst()->AsSymOpnd()->m_sym->AsStackSym()->GetArgSlotNum();
  392. Js::ArgSlot index = argOffset + argPosition;
  393. if(index < argPosition)
  394. {
  395. Js::Throw::OutOfMemory();
  396. }
  397. index += extraParams;
  398. if(index < extraParams)
  399. {
  400. Js::Throw::OutOfMemory();
  401. }
  402. IR::Opnd * dstOpnd = this->GetArgSlotOpnd(index, argLinkSym);
  403. argInstr->ReplaceDst(dstOpnd);
  404. cfgInsertLoc = argInstr->GetPrevRealInstr();
  405. // The arg sym isn't assigned a constant directly anymore
  406. // TODO: We can just move the instruction down next to the call if it is just an constant assignment
  407. // but AMD64 doesn't have the MOV mem,imm64 encoding, and we have no code to detect if the value can fit
  408. // into imm32 and hoist the src if it is not.
  409. argLinkSym->m_isConst = false;
  410. argLinkSym->m_isIntConst = false;
  411. argLinkSym->m_isTaggableIntConst = false;
  412. argInstr->Unlink();
  413. callInstr->InsertBefore(argInstr);
  414. argCount++;
  415. }
  416. IR::RegOpnd * argLinkOpnd = src2->AsRegOpnd();
  417. StackSym * argLinkSym = argLinkOpnd->m_sym->AsStackSym();
  418. AssertMsg(!argLinkSym->IsArgSlotSym() && argLinkSym->m_isSingleDef, "Arg tree not single def...");
  419. IR::Instr *startCallInstr = argLinkSym->m_instrDef;
  420. if (callInstr->m_opcode == Js::OpCode::NewScObject ||
  421. callInstr->m_opcode == Js::OpCode::NewScObjectSpread ||
  422. callInstr->m_opcode == Js::OpCode::NewScObjectLiteral ||
  423. callInstr->m_opcode == Js::OpCode::NewScObjArray ||
  424. callInstr->m_opcode == Js::OpCode::NewScObjArraySpread)
  425. {
  426. // These push an extra arg.
  427. argCount++;
  428. }
  429. AssertMsg(startCallInstr->m_opcode == Js::OpCode::StartCall ||
  430. startCallInstr->m_opcode == Js::OpCode::LoweredStartCall ||
  431. startCallInstr->m_opcode == Js::OpCode::StartCallAsmJsE ||
  432. startCallInstr->m_opcode == Js::OpCode::StartCallAsmJsI,
  433. "Problem with arg chain.");
  434. AssertMsg(startCallInstr->GetArgOutCount(/*getInterpreterArgOutCount*/ false) == argCount ||
  435. m_func->GetJnFunction()->GetIsAsmjsMode(),
  436. "ArgCount doesn't match StartCall count");
  437. //
  438. // Machine dependent lowering
  439. //
  440. if (callInstr->m_opcode != Js::OpCode::AsmJsCallI)
  441. {
  442. // Push argCount
  443. IR::IntConstOpnd *argCountOpnd = Lowerer::MakeCallInfoConst(callFlags, argCount, m_func);
  444. if (callInfoOpndRef)
  445. {
  446. argCountOpnd->Use(m_func);
  447. *callInfoOpndRef = argCountOpnd;
  448. }
  449. Lowerer::InsertMove(this->GetArgSlotOpnd(1 + extraParams), argCountOpnd, callInstr);
  450. }
  451. startCallInstr = this->LowerStartCall(startCallInstr);
  452. const uint32 argSlots = argCount + 1 + extraParams; // + 1 for call flags
  453. this->m_func->m_argSlotsForFunctionsCalled = max(this->m_func->m_argSlotsForFunctionsCalled, argSlots);
  454. if (m_func->GetJnFunction()->GetIsAsmjsMode())
  455. {
  456. IR::Opnd * functionObjOpnd = callInstr->UnlinkSrc1();
  457. GeneratePreCall(callInstr, functionObjOpnd, cfgInsertLoc->GetNextRealInstr());
  458. }
  459. return argSlots;
  460. }
  461. void
  462. LowererMDArch::SetMaxArgSlots(Js::ArgSlot actualCount /*including this*/)
  463. {
  464. Js::ArgSlot offset = 3;//For function object & callInfo & this
  465. if (this->m_func->m_argSlotsForFunctionsCalled < (uint32) (actualCount + offset))
  466. {
  467. this->m_func->m_argSlotsForFunctionsCalled = (uint32)(actualCount + offset);
  468. }
  469. return;
  470. }
  471. IR::Instr *
  472. LowererMDArch::LowerCallIDynamic(IR::Instr *callInstr, IR::Instr*saveThisArgOutInstr, IR::Opnd *argsLength, ushort callFlags, IR::Instr * insertBeforeInstrForCFG)
  473. {
  474. callInstr->InsertBefore(saveThisArgOutInstr); //Move this Argout next to call;
  475. this->LoadDynamicArgument(saveThisArgOutInstr, 3); /*this pointer is the 3rd argument
  476. /*callInfo*/
  477. if (callInstr->m_func->IsInlinee())
  478. {
  479. Assert(argsLength->AsIntConstOpnd()->GetValue() == callInstr->m_func->actualCount);
  480. this->SetMaxArgSlots((Js::ArgSlot)callInstr->m_func->actualCount);
  481. }
  482. else
  483. {
  484. callInstr->InsertBefore(IR::Instr::New(Js::OpCode::ADD, argsLength, argsLength, IR::IntConstOpnd::New(1, TyInt8, this->m_func), this->m_func));
  485. this->SetMaxArgSlots(Js::InlineeCallInfo::MaxInlineeArgoutCount);
  486. }
  487. callInstr->InsertBefore(IR::Instr::New(Js::OpCode::MOV, this->GetArgSlotOpnd(2), argsLength, this->m_func));
  488. IR::Opnd *funcObjOpnd = callInstr->UnlinkSrc1();
  489. GeneratePreCall(callInstr, funcObjOpnd, insertBeforeInstrForCFG);
  490. LowerCall(callInstr, 0);
  491. return callInstr;
  492. }
  493. void
  494. LowererMDArch::GenerateFunctionObjectTest(IR::Instr * callInstr, IR::RegOpnd *functionObjOpnd, bool isHelper, IR::LabelInstr* continueAfterExLabel /* = nullptr */)
  495. {
  496. AssertMsg(!m_func->IsJitInDebugMode() || continueAfterExLabel, "When jit is in debug mode, continueAfterExLabel must be provided otherwise continue after exception may cause AV.");
  497. IR::RegOpnd *functionObjRegOpnd = functionObjOpnd->AsRegOpnd();
  498. IR::Instr * insertBeforeInstr = callInstr;
  499. // Need check and error if we are calling a tagged int.
  500. if (!functionObjRegOpnd->IsNotTaggedValue())
  501. {
  502. IR::LabelInstr * helperLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  503. if (this->lowererMD->GenerateObjectTest(functionObjRegOpnd, callInstr, helperLabel))
  504. {
  505. IR::LabelInstr * callLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isHelper);
  506. IR::Instr* instr = IR::BranchInstr::New(Js::OpCode::JMP, callLabel, this->m_func);
  507. callInstr->InsertBefore(instr);
  508. callInstr->InsertBefore(helperLabel);
  509. callInstr->InsertBefore(callLabel);
  510. insertBeforeInstr = callLabel;
  511. lowererMD->m_lowerer->GenerateRuntimeError(insertBeforeInstr, JSERR_NeedFunction);
  512. if (continueAfterExLabel)
  513. {
  514. // Under debugger the RuntimeError (exception) can be ignored, generate branch to jmp to safe place
  515. // (which would normally be debugger bailout check).
  516. IR::BranchInstr* continueAfterEx = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, continueAfterExLabel, this->m_func);
  517. insertBeforeInstr->InsertBefore(continueAfterEx);
  518. }
  519. }
  520. }
  521. }
  522. void
  523. LowererMDArch::GeneratePreCall(IR::Instr * callInstr, IR::Opnd *functionObjOpnd, IR::Instr * insertBeforeInstrForCFGCheck)
  524. {
  525. if (insertBeforeInstrForCFGCheck == nullptr)
  526. {
  527. insertBeforeInstrForCFGCheck = callInstr;
  528. }
  529. IR::RegOpnd * functionTypeRegOpnd = nullptr;
  530. IR::IndirOpnd * entryPointIndirOpnd = nullptr;
  531. if (callInstr->m_opcode == Js::OpCode::AsmJsCallI)
  532. {
  533. functionTypeRegOpnd = IR::RegOpnd::New(TyMachReg, m_func);
  534. IR::IndirOpnd* functionInfoIndirOpnd = IR::IndirOpnd::New(functionObjOpnd->AsRegOpnd(), Js::RecyclableObject::GetTypeOffset(), TyMachReg, m_func);
  535. IR::Instr* instr = IR::Instr::New(Js::OpCode::MOV, functionTypeRegOpnd, functionInfoIndirOpnd, m_func);
  536. insertBeforeInstrForCFGCheck->InsertBefore(instr);
  537. functionInfoIndirOpnd = IR::IndirOpnd::New(functionTypeRegOpnd, Js::ScriptFunctionType::GetEntryPointInfoOffset(), TyMachReg, m_func);
  538. instr = IR::Instr::New(Js::OpCode::MOV, functionTypeRegOpnd, functionInfoIndirOpnd, m_func);
  539. insertBeforeInstrForCFGCheck->InsertBefore(instr);
  540. uint32 entryPointOffset = Js::ProxyEntryPointInfo::GetAddressOffset();
  541. entryPointIndirOpnd = IR::IndirOpnd::New(functionTypeRegOpnd, entryPointOffset, TyMachReg, m_func);
  542. }
  543. else
  544. {
  545. // For calls to fixed functions we load the function's type directly from the known (hard-coded) function object address.
  546. // For other calls, we need to load it from the function object stored in a register operand.
  547. if (functionObjOpnd->IsAddrOpnd() && functionObjOpnd->AsAddrOpnd()->m_isFunction)
  548. {
  549. functionTypeRegOpnd = this->lowererMD->m_lowerer->GenerateFunctionTypeFromFixedFunctionObject(insertBeforeInstrForCFGCheck, functionObjOpnd);
  550. }
  551. else if (functionObjOpnd->IsRegOpnd())
  552. {
  553. AssertMsg(functionObjOpnd->AsRegOpnd()->m_sym->IsStackSym(), "Expected call target to be a stack symbol.");
  554. functionTypeRegOpnd = IR::RegOpnd::New(TyMachReg, m_func);
  555. // functionTypeRegOpnd(RAX) = MOV function->type
  556. {
  557. IR::IndirOpnd * functionTypeIndirOpnd = IR::IndirOpnd::New(functionObjOpnd->AsRegOpnd(),
  558. Js::DynamicObject::GetOffsetOfType(), TyMachReg, m_func);
  559. IR::Instr * mov = IR::Instr::New(Js::OpCode::MOV, functionTypeRegOpnd, functionTypeIndirOpnd, m_func);
  560. insertBeforeInstrForCFGCheck->InsertBefore(mov);
  561. }
  562. }
  563. else
  564. {
  565. AnalysisAssertMsg(false, "Unexpected call target operand type.");
  566. }
  567. // entryPointRegOpnd(RAX) = MOV type->entryPoint
  568. entryPointIndirOpnd = IR::IndirOpnd::New(functionTypeRegOpnd, Js::Type::GetOffsetOfEntryPoint(), TyMachPtr, m_func);
  569. }
  570. IR::RegOpnd *entryPointRegOpnd = functionTypeRegOpnd;
  571. entryPointRegOpnd->m_isCallArg = true;
  572. IR::Instr *mov = IR::Instr::New(Js::OpCode::MOV, entryPointRegOpnd, entryPointIndirOpnd, m_func);
  573. insertBeforeInstrForCFGCheck->InsertBefore(mov);
  574. // entryPointRegOpnd(RAX) = CALL entryPointRegOpnd(RAX)
  575. callInstr->SetSrc1(entryPointRegOpnd);
  576. #if defined(_CONTROL_FLOW_GUARD)
  577. // verify that the call target is valid (CFG Check)
  578. if (!PHASE_OFF(Js::CFGInJitPhase, this->m_func))
  579. {
  580. this->lowererMD->GenerateCFGCheck(entryPointRegOpnd, insertBeforeInstrForCFGCheck);
  581. }
  582. #endif
  583. // Setup the first call argument - pointer to the function being called.
  584. {
  585. IR::Instr *mov = IR::Instr::New(Js::OpCode::MOV, GetArgSlotOpnd(1), functionObjOpnd, m_func);
  586. callInstr->InsertBefore(mov);
  587. }
  588. }
  589. IR::Instr *
  590. LowererMDArch::LowerCallI(IR::Instr * callInstr, ushort callFlags, bool isHelper, IR::Instr * insertBeforeInstrForCFG)
  591. {
  592. AssertMsg(this->helperCallArgsCount == 0, "We don't support nested helper calls yet");
  593. IR::Opnd * functionObjOpnd = callInstr->UnlinkSrc1();
  594. IR::Instr * insertBeforeInstrForCFGCheck = callInstr;
  595. // If this is a call for new, we already pass the function operand through NewScObject,
  596. // which checks if the function operand is a real function or not, don't need to add a check again
  597. // If this is a call to a fixed function, we've already verified that the target is, indeed, a function.
  598. if (callInstr->m_opcode != Js::OpCode::CallIFixed && !(callFlags & Js::CallFlags_New))
  599. {
  600. Assert(functionObjOpnd->IsRegOpnd());
  601. IR::LabelInstr* continueAfterExLabel = Lowerer::InsertContinueAfterExceptionLabelForDebugger(m_func, callInstr, isHelper);
  602. GenerateFunctionObjectTest(callInstr, functionObjOpnd->AsRegOpnd(), isHelper, continueAfterExLabel);
  603. }
  604. else if (insertBeforeInstrForCFG != nullptr)
  605. {
  606. RegNum dstReg = insertBeforeInstrForCFG->GetDst()->AsRegOpnd()->GetReg();
  607. AssertMsg(dstReg == RegR8 || dstReg == RegR9, "NewScObject should insert the first Argument in R8/R9 only based on Spread call or not.");
  608. insertBeforeInstrForCFGCheck = insertBeforeInstrForCFG;
  609. }
  610. GeneratePreCall(callInstr, functionObjOpnd, insertBeforeInstrForCFGCheck);
  611. // We need to get the calculated CallInfo in SimpleJit because that doesn't include any changes for stack alignment
  612. IR::IntConstOpnd *callInfo;
  613. int32 argCount = LowerCallArgs(callInstr, callFlags, 1, &callInfo);
  614. IR::Opnd *const finalDst = callInstr->GetDst();
  615. // x64 keeps track of argCount for us, so pass just an arbitrary value there
  616. IR::Instr* ret = this->LowerCall(callInstr, argCount);
  617. IR::AutoReuseOpnd autoReuseSavedFunctionObjOpnd;
  618. if (callInstr->IsJitProfilingInstr())
  619. {
  620. Assert(callInstr->m_func->IsSimpleJit());
  621. Assert(!Js::FunctionBody::IsNewSimpleJit());
  622. if(finalDst &&
  623. finalDst->IsRegOpnd() &&
  624. functionObjOpnd->IsRegOpnd() &&
  625. finalDst->AsRegOpnd()->m_sym == functionObjOpnd->AsRegOpnd()->m_sym)
  626. {
  627. // The function object sym is going to be overwritten, so save it in a temp for profiling
  628. IR::RegOpnd *const savedFunctionObjOpnd = IR::RegOpnd::New(functionObjOpnd->GetType(), callInstr->m_func);
  629. autoReuseSavedFunctionObjOpnd.Initialize(savedFunctionObjOpnd, callInstr->m_func);
  630. Lowerer::InsertMove(savedFunctionObjOpnd, functionObjOpnd, callInstr->m_next);
  631. functionObjOpnd = savedFunctionObjOpnd;
  632. }
  633. auto instr = callInstr->AsJitProfilingInstr();
  634. ret = this->lowererMD->m_lowerer->GenerateCallProfiling(
  635. instr->profileId,
  636. instr->inlineCacheIndex,
  637. instr->GetDst(),
  638. functionObjOpnd,
  639. callInfo,
  640. instr->isProfiledReturnCall,
  641. callInstr,
  642. ret);
  643. }
  644. return ret;
  645. }
  646. IR::Instr *
  647. LowererMDArch::LowerCallPut(IR::Instr *callInstr)
  648. {
  649. // Note: what we have to do here is call a helper with the Jscript calling convention,
  650. // so we need to factor the lowering of arguments out of the CallI expansion.
  651. AssertMsg(FALSE, "TODO: LowerCallPut not implemented");
  652. return nullptr;
  653. }
  654. IR::Instr *
  655. LowererMDArch::LowerCall(IR::Instr * callInstr, uint32 argCount)
  656. {
  657. IR::Instr *retInstr = callInstr;
  658. callInstr->m_opcode = Js::OpCode::CALL;
  659. if (callInstr->GetDst())
  660. {
  661. IR::Opnd * dstOpnd;
  662. this->lowererMD->ForceDstToReg(callInstr);
  663. dstOpnd = callInstr->GetDst();
  664. IRType dstType = dstOpnd->GetType();
  665. Js::OpCode assignOp = GetAssignOp(dstType);
  666. if (callInstr->GetSrc1()->IsHelperCallOpnd())
  667. {
  668. // Truncate the result of a conversion to 32-bit int, because the C++ code doesn't.
  669. IR::HelperCallOpnd *helperOpnd = callInstr->GetSrc1()->AsHelperCallOpnd();
  670. if (helperOpnd->m_fnHelper == IR::HelperConv_ToInt32 ||
  671. helperOpnd->m_fnHelper == IR::HelperConv_ToInt32_Full ||
  672. helperOpnd->m_fnHelper == IR::HelperConv_ToInt32Core ||
  673. helperOpnd->m_fnHelper == IR::HelperConv_ToUInt32 ||
  674. helperOpnd->m_fnHelper == IR::HelperConv_ToUInt32_Full ||
  675. helperOpnd->m_fnHelper == IR::HelperConv_ToUInt32Core)
  676. {
  677. assignOp = Js::OpCode::MOV_TRUNC;
  678. }
  679. }
  680. IR::Instr * movInstr = callInstr->SinkDst(assignOp);
  681. RegNum reg = GetRegReturn(dstType);
  682. callInstr->GetDst()->AsRegOpnd()->SetReg(reg);
  683. movInstr->GetSrc1()->AsRegOpnd()->SetReg(reg);
  684. retInstr = movInstr;
  685. }
  686. //
  687. // assign the arguments to appropriate positions
  688. //
  689. AssertMsg(this->helperCallArgsCount >= 0, "Fatal. helper call arguments ought to be positive");
  690. AssertMsg(this->helperCallArgsCount < 255, "Too many helper call arguments");
  691. uint16 argsLeft = static_cast<uint16>(this->helperCallArgsCount);
  692. while (argsLeft > 0)
  693. {
  694. IR::Opnd * helperSrc = this->helperCallArgs[this->helperCallArgsCount - argsLeft];
  695. StackSym * helperSym = m_func->m_symTable->GetArgSlotSym(static_cast<uint16>(argsLeft));
  696. helperSym->m_type = helperSrc->GetType();
  697. Lowerer::InsertMove(
  698. this->GetArgSlotOpnd(argsLeft, helperSym),
  699. helperSrc,
  700. callInstr);
  701. --argsLeft;
  702. }
  703. //
  704. // load the address into a register because we cannot directly access 64 bit constants
  705. // in CALL instruction. Non helper call methods will already be accessed indirectly.
  706. //
  707. // Skip this for bailout calls. The register allocator will lower that as appropriate, without affecting spill choices.
  708. //
  709. // Also skip this for relocatable helper calls. These will be turned into indirect
  710. // calls in lower.
  711. if (callInstr->GetSrc1()->IsHelperCallOpnd() && !callInstr->HasBailOutInfo())
  712. {
  713. IR::RegOpnd *targetOpnd = IR::RegOpnd::New(StackSym::New(TyMachPtr,m_func), RegRAX, TyMachPtr, this->m_func);
  714. IR::Instr *movInstr = IR::Instr::New(Js::OpCode::MOV, targetOpnd, callInstr->GetSrc1(), this->m_func);
  715. targetOpnd->m_isCallArg = true;
  716. callInstr->UnlinkSrc1();
  717. callInstr->SetSrc1(targetOpnd);
  718. callInstr->InsertBefore(movInstr);
  719. }
  720. //
  721. // Reset the call
  722. //
  723. this->m_func->m_argSlotsForFunctionsCalled = max(this->m_func->m_argSlotsForFunctionsCalled , (uint32)this->helperCallArgsCount);
  724. this->helperCallArgsCount = 0;
  725. return retInstr;
  726. }
  727. //
  728. // Returns the opnd where the corresponding argument would have been stored. On amd64,
  729. // the first 4 arguments go in registers and the rest are on stack.
  730. //
  731. IR::Opnd *
  732. LowererMDArch::GetArgSlotOpnd(uint16 index, StackSym * argSym)
  733. {
  734. Assert(index != 0);
  735. uint16 argPosition = index;
  736. // Without SIMD the index is the Var offset and is also the argument index. Since each arg = 1 Var.
  737. // With SIMD, args are of variable length and we need to the argument position in the args list.
  738. if (m_func->GetScriptContext()->GetConfig()->IsSimdjsEnabled() &&
  739. m_func->GetJnFunction()->GetIsAsmJsFunction() &&
  740. argSym != nullptr &&
  741. argSym->m_argPosition != 0)
  742. {
  743. argPosition = (uint16)argSym->m_argPosition;
  744. }
  745. IR::Opnd *argSlotOpnd = nullptr;
  746. if (argSym != nullptr)
  747. {
  748. argSym->m_offset = (index - 1) * MachPtr;
  749. argSym->m_allocated = true;
  750. }
  751. IRType type = argSym ? argSym->GetType() : TyMachReg;
  752. if (argPosition <= 4)
  753. {
  754. RegNum reg = RegNOREG;
  755. if (IRType_IsFloat(type) || IRType_IsSimd128(type))
  756. {
  757. switch (argPosition)
  758. {
  759. case 4:
  760. reg = RegXMM3;
  761. break;
  762. case 3:
  763. reg = RegXMM2;
  764. break;
  765. case 2:
  766. reg = RegXMM1;
  767. break;
  768. case 1:
  769. reg = RegXMM0;
  770. break;
  771. default:
  772. Assume(UNREACHED);
  773. }
  774. }
  775. else
  776. {
  777. switch (argPosition)
  778. {
  779. case 4:
  780. reg = RegR9;
  781. break;
  782. case 3:
  783. reg = RegR8;
  784. break;
  785. case 2:
  786. reg = RegRDX;
  787. break;
  788. case 1:
  789. reg = RegRCX;
  790. break;
  791. default:
  792. Assume(UNREACHED);
  793. }
  794. }
  795. IR::RegOpnd *regOpnd = IR::RegOpnd::New(argSym, reg, type, m_func);
  796. regOpnd->m_isCallArg = true;
  797. argSlotOpnd = regOpnd;
  798. }
  799. else
  800. {
  801. if (argSym == nullptr)
  802. {
  803. argSym = this->m_func->m_symTable->GetArgSlotSym(static_cast<uint16>(index));
  804. }
  805. //
  806. // More than 4 arguments. Assign them to appropriate slots
  807. //
  808. argSlotOpnd = IR::SymOpnd::New(argSym, type, this->m_func);
  809. }
  810. return argSlotOpnd;
  811. }
  812. IR::Instr *
  813. LowererMDArch::LowerAsmJsCallE(IR::Instr *callInstr)
  814. {
  815. IR::IntConstOpnd *callInfo;
  816. int32 argCount = this->LowerCallArgs(callInstr, Js::CallFlags_Value, 1, &callInfo);
  817. IR::Instr* ret = this->LowerCall(callInstr, argCount);
  818. return ret;
  819. }
  820. IR::Instr *
  821. LowererMDArch::LowerAsmJsCallI(IR::Instr * callInstr)
  822. {
  823. int32 argCount = this->LowerCallArgs(callInstr, Js::CallFlags_Value, 0);
  824. IR::Instr* ret = this->LowerCall(callInstr, argCount);
  825. return ret;
  826. }
  827. IR::Instr*
  828. LowererMDArch::LowerAsmJsLdElemHelper(IR::Instr * instr, bool isSimdLoad /*= false*/, bool checkEndOffset /*= false*/)
  829. {
  830. IR::Instr* done;
  831. IR::Opnd * src1 = instr->UnlinkSrc1();
  832. IR::RegOpnd * indexOpnd = src1->AsIndirOpnd()->GetIndexOpnd();
  833. const uint8 dataWidth = instr->dataWidth;
  834. Assert(isSimdLoad == false || dataWidth == 4 || dataWidth == 8 || dataWidth == 12 || dataWidth == 16);
  835. // For x64, bound checks are required only for SIMD loads.
  836. if (isSimdLoad)
  837. {
  838. IR::LabelInstr * helperLabel = Lowerer::InsertLabel(true, instr);
  839. IR::LabelInstr * loadLabel = Lowerer::InsertLabel(false, instr);
  840. IR::LabelInstr * doneLabel = Lowerer::InsertLabel(false, instr);
  841. IR::Opnd *cmpOpnd;
  842. if (indexOpnd)
  843. {
  844. cmpOpnd = indexOpnd;
  845. }
  846. else
  847. {
  848. cmpOpnd = IR::IntConstOpnd::New(src1->AsIndirOpnd()->GetOffset(), TyUint32, m_func);
  849. }
  850. // if dataWidth != byte per element, we need to check end offset
  851. if (checkEndOffset)
  852. {
  853. IR::RegOpnd *tmp = IR::RegOpnd::New(cmpOpnd->GetType(), m_func);
  854. // MOV tmp, cmpOnd
  855. Lowerer::InsertMove(tmp, cmpOpnd, helperLabel);
  856. // ADD tmp, dataWidth
  857. Lowerer::InsertAdd(false, tmp, tmp, IR::IntConstOpnd::New((uint32)dataWidth, TyInt8, m_func, true), helperLabel);
  858. // CMP tmp, size
  859. // JG $helper
  860. lowererMD->m_lowerer->InsertCompareBranch(tmp, instr->UnlinkSrc2(), Js::OpCode::BrGt_A, true, helperLabel, helperLabel);
  861. }
  862. else
  863. {
  864. lowererMD->m_lowerer->InsertCompareBranch(cmpOpnd, instr->UnlinkSrc2(), Js::OpCode::BrGe_A, true, helperLabel, helperLabel);
  865. }
  866. Lowerer::InsertBranch(Js::OpCode::Br, loadLabel, helperLabel);
  867. lowererMD->m_lowerer->GenerateRuntimeError(loadLabel, JSERR_ArgumentOutOfRange, IR::HelperOp_RuntimeRangeError);
  868. Lowerer::InsertBranch(Js::OpCode::Br, doneLabel, loadLabel);
  869. done = doneLabel;
  870. }
  871. else
  872. {
  873. Assert(!instr->GetSrc2());
  874. done = instr;
  875. }
  876. return done;
  877. }
  878. IR::Instr*
  879. LowererMDArch::LowerAsmJsStElemHelper(IR::Instr * instr, bool isSimdStore /*= false*/, bool checkEndOffset /*= false*/)
  880. {
  881. IR::Instr* done;
  882. IR::Opnd * dst = instr->UnlinkDst();
  883. IR::RegOpnd * indexOpnd = dst->AsIndirOpnd()->GetIndexOpnd();
  884. const uint8 dataWidth = instr->dataWidth;
  885. Assert(isSimdStore == false || dataWidth == 4 || dataWidth == 8 || dataWidth == 12 || dataWidth == 16);
  886. // For x64, bound checks are required only for SIMD loads.
  887. if (isSimdStore)
  888. {
  889. IR::LabelInstr * helperLabel = Lowerer::InsertLabel(true, instr);
  890. IR::LabelInstr * storeLabel = Lowerer::InsertLabel(false, instr);
  891. IR::LabelInstr * doneLabel = Lowerer::InsertLabel(false, instr);
  892. IR::Opnd * cmpOpnd;
  893. if (indexOpnd)
  894. {
  895. cmpOpnd = dst->AsIndirOpnd()->GetIndexOpnd();
  896. }
  897. else
  898. {
  899. cmpOpnd = IR::IntConstOpnd::New(dst->AsIndirOpnd()->GetOffset(), TyUint32, m_func);
  900. }
  901. // if dataWidth != byte per element, we need to check end offset
  902. if (checkEndOffset)
  903. {
  904. IR::RegOpnd *tmp = IR::RegOpnd::New(cmpOpnd->GetType(), m_func);
  905. // MOV tmp, cmpOnd
  906. Lowerer::InsertMove(tmp, cmpOpnd, helperLabel);
  907. // ADD tmp, dataWidth
  908. Lowerer::InsertAdd(false, tmp, tmp, IR::IntConstOpnd::New((uint32)dataWidth, TyInt8, m_func, true), helperLabel);
  909. // CMP tmp, size
  910. // JG $helper
  911. lowererMD->m_lowerer->InsertCompareBranch(tmp, instr->UnlinkSrc2(), Js::OpCode::BrGt_A, true, helperLabel, helperLabel);
  912. }
  913. else
  914. {
  915. lowererMD->m_lowerer->InsertCompareBranch(cmpOpnd, instr->UnlinkSrc2(), Js::OpCode::BrGe_A, true, helperLabel, helperLabel);
  916. }
  917. Lowerer::InsertBranch(Js::OpCode::Br, storeLabel, helperLabel);
  918. lowererMD->m_lowerer->GenerateRuntimeError(storeLabel, JSERR_ArgumentOutOfRange, IR::HelperOp_RuntimeRangeError);
  919. Lowerer::InsertBranch(Js::OpCode::Br, doneLabel, storeLabel);
  920. done = doneLabel;
  921. }
  922. else
  923. {
  924. Assert(!instr->GetSrc2());
  925. done = instr;
  926. }
  927. return done;
  928. }
  929. ///----------------------------------------------------------------------------
  930. ///
  931. /// LowererMDArch::LowerStartCall
  932. ///
  933. ///
  934. ///----------------------------------------------------------------------------
  935. IR::Instr *
  936. LowererMDArch::LowerStartCall(IR::Instr * startCallInstr)
  937. {
  938. startCallInstr->m_opcode = Js::OpCode::LoweredStartCall;
  939. return startCallInstr;
  940. }
  941. ///----------------------------------------------------------------------------
  942. ///
  943. /// LowererMDArch::LoadHelperArgument
  944. ///
  945. /// Assign register or push on stack as per AMD64 calling convention
  946. ///
  947. ///----------------------------------------------------------------------------
  948. IR::Instr *
  949. LowererMDArch::LoadHelperArgument(IR::Instr *instr, IR::Opnd *opndArg)
  950. {
  951. IR::Opnd *destOpnd;
  952. IR::Instr *instrToReturn;
  953. if(opndArg->IsImmediateOpnd())
  954. {
  955. destOpnd = opndArg;
  956. instrToReturn = instr;
  957. }
  958. else
  959. {
  960. destOpnd = IR::RegOpnd::New(opndArg->GetType(), this->m_func);
  961. instrToReturn = instr->m_prev;
  962. Lowerer::InsertMove(destOpnd, opndArg, instr);
  963. instrToReturn = instrToReturn->m_next;
  964. }
  965. helperCallArgs[helperCallArgsCount++] = destOpnd;
  966. AssertMsg(helperCallArgsCount < LowererMDArch::MaxArgumentsToHelper,
  967. "We do not yet support any no. of arguments to the helper");
  968. return instrToReturn;
  969. }
  970. IR::Instr *
  971. LowererMDArch::LoadDynamicArgument(IR::Instr *instr, uint argNumber)
  972. {
  973. Assert(instr->m_opcode == Js::OpCode::ArgOut_A_Dynamic);
  974. Assert(instr->GetSrc2() == nullptr);
  975. instr->m_opcode = Js::OpCode::MOV;
  976. IR::Opnd* dst = GetArgSlotOpnd((Js::ArgSlot) argNumber);
  977. instr->SetDst(dst);
  978. if (!dst->IsRegOpnd())
  979. {
  980. //TODO: Move it to legalizer.
  981. IR::RegOpnd *tempOpnd = IR::RegOpnd::New(TyMachReg, instr->m_func);
  982. instr->InsertBefore(IR::Instr::New(Js::OpCode::MOV, tempOpnd, instr->GetSrc1(), instr->m_func));
  983. instr->ReplaceSrc1(tempOpnd);
  984. }
  985. return instr;
  986. }
  987. IR::Instr *
  988. LowererMDArch::LoadDynamicArgumentUsingLength(IR::Instr *instr)
  989. {
  990. Assert(instr->m_opcode == Js::OpCode::ArgOut_A_Dynamic);
  991. IR::RegOpnd* src2 = instr->UnlinkSrc2()->AsRegOpnd();
  992. IR::Instr*mov = IR::Instr::New(Js::OpCode::MOV, IR::RegOpnd::New(TyInt32, this->m_func), src2, this->m_func);
  993. instr->InsertBefore(mov);
  994. //We need store nth actuals, so stack location is after function object, callinfo & this pointer
  995. instr->InsertBefore(IR::Instr::New(Js::OpCode::ADD, mov->GetDst(), mov->GetDst(), IR::IntConstOpnd::New(3, TyInt8, this->m_func), this->m_func));
  996. IR::RegOpnd *stackPointer = IR::RegOpnd::New(nullptr, GetRegStackPointer(), TyMachReg, this->m_func);
  997. IR::IndirOpnd *actualsLocation = IR::IndirOpnd::New(stackPointer, mov->GetDst()->AsRegOpnd(), GetDefaultIndirScale(), TyMachReg, this->m_func);
  998. instr->SetDst(actualsLocation);
  999. instr->m_opcode = Js::OpCode::MOV;
  1000. return instr;
  1001. }
  1002. IR::Instr *
  1003. LowererMDArch::LoadDoubleHelperArgument(IR::Instr * instrInsert, IR::Opnd * opndArg)
  1004. {
  1005. Assert(opndArg->IsFloat64());
  1006. return LoadHelperArgument(instrInsert, opndArg);
  1007. }
  1008. //
  1009. // Emits the code to allocate 'size' amount of space on stack. for values smaller than PAGE_SIZE
  1010. // this will just emit sub rsp,size otherwise calls _chkstk.
  1011. //
  1012. void
  1013. LowererMDArch::GenerateStackAllocation(IR::Instr *instr, uint32 size)
  1014. {
  1015. Assert(size > 0);
  1016. IR::RegOpnd * rspOpnd = IR::RegOpnd::New(nullptr, RegRSP, TyMachReg, this->m_func);
  1017. //review: size should fit in 32bits
  1018. IR::IntConstOpnd * stackSizeOpnd = IR::IntConstOpnd::New(size, TyInt32, this->m_func);
  1019. if (size <= PAGESIZE)
  1020. {
  1021. // Generate SUB RSP, stackSize
  1022. IR::Instr * subInstr = IR::Instr::New(Js::OpCode::SUB,
  1023. rspOpnd, rspOpnd, stackSizeOpnd, this->m_func);
  1024. instr->InsertAfter(subInstr);
  1025. }
  1026. else
  1027. {
  1028. // Generate _chkstk call
  1029. //
  1030. // REVIEW: Call to helper functions assume the address of the variable to be present in
  1031. // RAX. But _chkstk method accepts argument in RAX. Hence handling this one manually.
  1032. // fix this later when CALLHELPER leaved dependency on RAX.
  1033. //
  1034. IR::RegOpnd *raxOpnd = IR::RegOpnd::New(nullptr, RegRAX, TyMachReg, this->m_func);
  1035. IR::RegOpnd *rcxOpnd = IR::RegOpnd::New(nullptr, RegRCX, TyMachReg, this->m_func);
  1036. IR::RegOpnd *rspOpnd = IR::RegOpnd::New(nullptr, RegRSP, TyMachReg, this->m_func);
  1037. IR::Instr * subInstr = IR::Instr::New(Js::OpCode::SUB, rspOpnd, rspOpnd, stackSizeOpnd, this->m_func);
  1038. instr->InsertAfter(subInstr);
  1039. // Leave off the src until we've calculated it below.
  1040. IR::Instr * callInstr = IR::Instr::New(Js::OpCode::Call, raxOpnd, rcxOpnd, this->m_func);
  1041. instr->InsertAfter(callInstr);
  1042. this->LowerCall(callInstr, 0);
  1043. {
  1044. IR::Instr *movHelperAddrInstr = IR::Instr::New(
  1045. Js::OpCode::MOV,
  1046. rcxOpnd,
  1047. IR::HelperCallOpnd::New(IR::HelperCRT_chkstk, this->m_func),
  1048. this->m_func);
  1049. instr->InsertAfter(movHelperAddrInstr);
  1050. }
  1051. LowererMD::CreateAssign(raxOpnd, stackSizeOpnd, instr->m_next);
  1052. }
  1053. }
  1054. void
  1055. LowererMDArch::MovArgFromReg2Stack(IR::Instr * instr, RegNum reg, uint16 slotNumber, IRType type)
  1056. {
  1057. StackSym * slotSym = this->m_func->m_symTable->GetArgSlotSym(slotNumber + 1);
  1058. slotSym->m_type = type;
  1059. IR::SymOpnd * dst = IR::SymOpnd::New(slotSym, type, this->m_func);
  1060. IR::RegOpnd * src = IR::RegOpnd::New(nullptr, reg, type, this->m_func);
  1061. IR::Instr * movInstr = IR::Instr::New(GetAssignOp(type), dst, src, this->m_func);
  1062. instr->InsertAfter(movInstr);
  1063. }
  1064. ///----------------------------------------------------------------------------
  1065. ///
  1066. /// LowererMDArch::LowerEntryInstr
  1067. ///
  1068. /// Emit prolog.
  1069. ///
  1070. ///----------------------------------------------------------------------------
  1071. IR::Instr *
  1072. LowererMDArch::LowerEntryInstr(IR::EntryInstr * entryInstr)
  1073. {
  1074. /*
  1075. * push rbp
  1076. * mov rbp, rsp
  1077. * sub rsp, localVariablesHeight + floatCalleeSavedRegsSize
  1078. * movsdx qword ptr [rsp + 0], xmm6 ------\
  1079. * movsdx qword ptr [rsp + 8], xmm7 |
  1080. * ... |
  1081. * movsdx qword ptr [rsp + (N * 8)], xmmN |- Callee saved registers.
  1082. * push rsi |
  1083. * ... |
  1084. * push rbx ------/
  1085. * sub rsp, ArgumentsBacking
  1086. */
  1087. uint savedRegSize = 0;
  1088. IR::Instr *firstPrologInstr = nullptr;
  1089. IR::Instr *lastPrologInstr = nullptr;
  1090. // PUSH used callee-saved registers.
  1091. IR::Instr *secondInstr = entryInstr->m_next;
  1092. AssertMsg(secondInstr, "Instruction chain broken.");
  1093. IR::RegOpnd *stackPointer = IR::RegOpnd::New(nullptr, GetRegStackPointer(), TyMachReg, this->m_func);
  1094. unsigned xmmOffset = 0;
  1095. // PDATA doesn't seem to like two consecutive "SUB RSP, size" instructions. Temporarily save and
  1096. // restore RBX always so that the pattern doesn't occur in the prolog.
  1097. for (RegNum reg = (RegNum)(RegNOREG + 1); reg < RegNumCount; reg = (RegNum)(reg + 1))
  1098. {
  1099. if (LinearScan::IsCalleeSaved(reg) && (this->m_func->HasTry() || this->m_func->m_regsUsed.Test(reg)))
  1100. {
  1101. IRType type = RegTypes[reg];
  1102. IR::RegOpnd *regOpnd = IR::RegOpnd::New(nullptr, reg, type, this->m_func);
  1103. if (type == TyFloat64)
  1104. {
  1105. IR::Instr *saveInstr = IR::Instr::New(Js::OpCode::MOVAPS,
  1106. IR::IndirOpnd::New(stackPointer,
  1107. xmmOffset,
  1108. type,
  1109. this->m_func),
  1110. regOpnd,
  1111. this->m_func);
  1112. xmmOffset += (MachDouble * 2);
  1113. entryInstr->InsertAfter(saveInstr);
  1114. m_func->m_prologEncoder.RecordXmmRegSave();
  1115. }
  1116. else
  1117. {
  1118. Assert(type == TyInt64);
  1119. IR::Instr *pushInstr = IR::Instr::New(Js::OpCode::PUSH, this->m_func);
  1120. pushInstr->SetSrc1(regOpnd);
  1121. entryInstr->InsertAfter(pushInstr);
  1122. m_func->m_prologEncoder.RecordNonVolRegSave();
  1123. savedRegSize += MachPtr;
  1124. }
  1125. }
  1126. }
  1127. //
  1128. // Now that we know the exact stack size, lets fix it for alignment
  1129. // The stack on entry would be aligned. VC++ recommends that the stack
  1130. // should always be 16 byte aligned.
  1131. //
  1132. uint32 argSlotsForFunctionsCalled = this->m_func->m_argSlotsForFunctionsCalled;
  1133. // Stack is always reserved for at least 4 parameters.
  1134. if (argSlotsForFunctionsCalled < 4)
  1135. argSlotsForFunctionsCalled = 4;
  1136. uint32 stackArgsSize = MachPtr * (argSlotsForFunctionsCalled + 1);
  1137. this->m_func->m_localStackHeight = Math::Align<int32>(this->m_func->m_localStackHeight, 8);
  1138. // Allocate the inlined arg out stack in the locals. Allocate an additional slot so that
  1139. // we can unconditionally clear the first slot past the current frame.
  1140. this->m_func->m_localStackHeight += ((this->m_func->GetMaxInlineeArgOutCount() + 1) * MachPtr);
  1141. uint32 stackLocalsSize = this->m_func->m_localStackHeight;
  1142. if(xmmOffset != 0)
  1143. {
  1144. // Xmm registers need to be saved to 16-byte-aligned addresses. The stack locals size is aligned here and the total
  1145. // size will be aligned below, which guarantees that the offset from rsp will be 16-byte-aligned.
  1146. stackLocalsSize = ::Math::Align(stackLocalsSize + xmmOffset, static_cast<uint32>(MachDouble * 2));
  1147. }
  1148. uint32 totalStackSize = stackLocalsSize +
  1149. stackArgsSize +
  1150. savedRegSize;
  1151. AssertMsg(0 == (totalStackSize % 8), "Stack should always be 8 byte aligned");
  1152. uint32 alignmentPadding = (totalStackSize % 16) ? MachPtr : 0;
  1153. stackArgsSize += alignmentPadding;
  1154. Assert(
  1155. xmmOffset == 0 ||
  1156. ::Math::Align(stackArgsSize + savedRegSize, static_cast<uint32>(MachDouble * 2)) == stackArgsSize + savedRegSize);
  1157. totalStackSize += alignmentPadding;
  1158. if(totalStackSize > (1u << 20)) // 1 MB
  1159. {
  1160. // Total stack size is > 1 MB, let's just bail. There are things that need to be changed to allow using large stack
  1161. // sizes, for instance in the unwind info, the offset to saved xmm registers can be (1 MB - 16) at most for the op-code
  1162. // we're currently using (UWOP_SAVE_XMM128). To support larger offsets, we need to use a FAR version of the op-code.
  1163. throw Js::OperationAbortedException();
  1164. }
  1165. if (this->m_func->GetMaxInlineeArgOutCount())
  1166. {
  1167. this->m_func->m_workItem->GetFunctionBody()->SetFrameHeight(this->m_func->m_workItem->GetEntryPoint(), this->m_func->m_localStackHeight);
  1168. }
  1169. //
  1170. // This is the last instruction so should have been emitted before, register saves.
  1171. // But we did not have 'savedRegSize' by then. So we saved secondInstr. We now insert w.r.t that
  1172. // instruction.
  1173. //
  1174. this->m_func->SetArgsSize(stackArgsSize);
  1175. this->m_func->SetSavedRegSize(savedRegSize);
  1176. this->m_func->SetSpillSize(stackLocalsSize);
  1177. if (secondInstr == entryInstr->m_next)
  1178. {
  1179. // There is no register save at all, just combine the stack allocation
  1180. uint combineStackAllocationSize = stackArgsSize + stackLocalsSize;
  1181. this->GenerateStackAllocation(secondInstr->m_prev, combineStackAllocationSize);
  1182. m_func->m_prologEncoder.RecordAlloca(combineStackAllocationSize);
  1183. }
  1184. else
  1185. {
  1186. this->GenerateStackAllocation(secondInstr->m_prev, stackArgsSize);
  1187. m_func->m_prologEncoder.RecordAlloca(stackArgsSize);
  1188. // Allocate frame.
  1189. if (stackLocalsSize)
  1190. {
  1191. this->GenerateStackAllocation(entryInstr, stackLocalsSize);
  1192. m_func->m_prologEncoder.RecordAlloca(stackLocalsSize);
  1193. }
  1194. }
  1195. lastPrologInstr = secondInstr->m_prev;
  1196. Assert(lastPrologInstr != entryInstr);
  1197. // Zero-initialize dedicated arguments slot.
  1198. IR::Instr *movRax0 = nullptr;
  1199. IR::Opnd *raxOpnd = nullptr;
  1200. if (this->m_func->HasArgumentSlot())
  1201. {
  1202. // TODO: Support mov [rbp - n], IMM64
  1203. raxOpnd = IR::RegOpnd::New(nullptr, RegRAX, TyUint32, this->m_func);
  1204. movRax0 = IR::Instr::New(Js::OpCode::XOR, raxOpnd, raxOpnd, raxOpnd, this->m_func);
  1205. secondInstr->m_prev->InsertAfter(movRax0);
  1206. IR::Opnd *opnd = this->lowererMD->CreateStackArgumentsSlotOpnd();
  1207. IR::Instr *movNullInstr = IR::Instr::New(Js::OpCode::MOV, opnd, raxOpnd->UseWithNewType(TyMachReg, this->m_func), this->m_func);
  1208. secondInstr->m_prev->InsertAfter(movNullInstr);
  1209. }
  1210. // Zero initialize the first inlinee frames argc.
  1211. if (this->m_func->GetMaxInlineeArgOutCount())
  1212. {
  1213. if(!movRax0)
  1214. {
  1215. raxOpnd = IR::RegOpnd::New(nullptr, RegRAX, TyUint32, this->m_func);
  1216. movRax0 = IR::Instr::New(Js::OpCode::XOR, raxOpnd, raxOpnd, raxOpnd, this->m_func);
  1217. secondInstr->m_prev->InsertAfter(movRax0);
  1218. }
  1219. StackSym *sym = this->m_func->m_symTable->GetArgSlotSym((Js::ArgSlot)-1);
  1220. sym->m_isInlinedArgSlot = true;
  1221. sym->m_offset = 0;
  1222. IR::Opnd *dst = IR::SymOpnd::New(sym, 0, TyMachReg, this->m_func);
  1223. secondInstr->m_prev->InsertAfter(IR::Instr::New(Js::OpCode::MOV,
  1224. dst,
  1225. raxOpnd->UseWithNewType(TyMachReg, this->m_func),
  1226. this->m_func));
  1227. }
  1228. // Generate MOV RBP, RSP
  1229. IR::RegOpnd * rbpOpnd = IR::RegOpnd::New(nullptr, RegRBP, TyMachReg, this->m_func);
  1230. IR::RegOpnd * rspOpnd = IR::RegOpnd::New(nullptr, RegRSP, TyMachReg, this->m_func);
  1231. IR::Instr * movInstr = IR::Instr::New(Js::OpCode::MOV, rbpOpnd, rspOpnd, this->m_func);
  1232. entryInstr->InsertAfter(movInstr);
  1233. // Generate PUSH RBP
  1234. IR::Instr * pushInstr = IR::Instr::New(Js::OpCode::PUSH, this->m_func);
  1235. pushInstr->SetSrc1(rbpOpnd);
  1236. entryInstr->InsertAfter(pushInstr);
  1237. m_func->m_prologEncoder.RecordNonVolRegSave();
  1238. firstPrologInstr = pushInstr;
  1239. //
  1240. // Insert pragmas that tell the prolog encoder the extent of the prolog.
  1241. //
  1242. firstPrologInstr->InsertBefore(IR::PragmaInstr::New(Js::OpCode::PrologStart, 0, m_func));
  1243. lastPrologInstr->InsertAfter(IR::PragmaInstr::New(Js::OpCode::PrologEnd, 0, m_func));
  1244. //
  1245. // Now store all the arguments in the register in the stack slots
  1246. //
  1247. this->MovArgFromReg2Stack(entryInstr, RegRCX, 1);
  1248. if (m_func->GetJnFunction()->GetIsAsmjsMode() && !m_func->IsLoopBody())
  1249. {
  1250. uint16 offset = 2;
  1251. for (uint16 i = 0; i < m_func->GetJnFunction()->GetAsmJsFunctionInfo()->GetArgCount() && i < 3; i++)
  1252. {
  1253. switch (m_func->GetJnFunction()->GetAsmJsFunctionInfo()->GetArgType(i).which())
  1254. {
  1255. case Js::AsmJsVarType::Int:
  1256. this->MovArgFromReg2Stack(entryInstr, i == 0 ? RegRDX : i == 1 ? RegR8 : RegR9, offset, TyInt32);
  1257. offset++;
  1258. break;
  1259. case Js::AsmJsVarType::Float:
  1260. // registers we need are contiguous, so calculate it from XMM1
  1261. this->MovArgFromReg2Stack(entryInstr, (RegNum)(RegXMM1 + i), offset, TyFloat32);
  1262. offset++;
  1263. break;
  1264. case Js::AsmJsVarType::Double:
  1265. this->MovArgFromReg2Stack(entryInstr, (RegNum)(RegXMM1 + i), offset, TyFloat64);
  1266. offset++;
  1267. break;
  1268. case Js::AsmJsVarType::Float32x4:
  1269. this->MovArgFromReg2Stack(entryInstr, (RegNum)(RegXMM1 + i), offset, TySimd128F4);
  1270. offset += 2;
  1271. break;
  1272. case Js::AsmJsVarType::Int32x4:
  1273. this->MovArgFromReg2Stack(entryInstr, (RegNum)(RegXMM1 + i), offset, TySimd128I4);
  1274. offset += 2;
  1275. break;
  1276. case Js::AsmJsVarType::Float64x2:
  1277. this->MovArgFromReg2Stack(entryInstr, (RegNum)(RegXMM1 + i), offset, TySimd128D2);
  1278. offset += 2;
  1279. break;
  1280. default:
  1281. Assume(UNREACHED);
  1282. }
  1283. }
  1284. }
  1285. else
  1286. {
  1287. this->MovArgFromReg2Stack(entryInstr, RegRDX, 2);
  1288. this->MovArgFromReg2Stack(entryInstr, RegR8, 3);
  1289. this->MovArgFromReg2Stack(entryInstr, RegR9, 4);
  1290. }
  1291. IntConstType frameSize = Js::Constants::MinStackJIT + stackArgsSize + stackLocalsSize + savedRegSize;
  1292. this->GeneratePrologueStackProbe(entryInstr, frameSize);
  1293. return entryInstr;
  1294. }
  1295. void
  1296. LowererMDArch::GeneratePrologueStackProbe(IR::Instr *entryInstr, IntConstType frameSize)
  1297. {
  1298. //
  1299. // Generate a stack overflow check. Since ProbeCurrentStack throws an exception it needs
  1300. // an unwindable stack. Should we need to call ProbeCurrentStack, instead of creating a new frame here,
  1301. // we make it appear like our caller directly called ProbeCurrentStack.
  1302. //
  1303. // For thread-bound thread context
  1304. // MOV rax, ThreadContext::scriptStackLimit + frameSize
  1305. // CMP rsp, rax
  1306. // JG $done
  1307. // MOV rax, ThreadContext::ProbeCurrentStack
  1308. // MOV rcx, frameSize
  1309. // MOV rdx, scriptContext
  1310. // JMP rax
  1311. // $done:
  1312. //
  1313. // For thread-agile thread context
  1314. // MOV rax, [ThreadContext::scriptStackLimit]
  1315. // ADD rax, frameSize
  1316. // CMP rsp, rax
  1317. // JG $done
  1318. // MOV rax, ThreadContext::ProbeCurrentStack
  1319. // MOV rcx, frameSize
  1320. // MOV rdx, scriptContext
  1321. // JMP rax
  1322. // $done:
  1323. //
  1324. // For thread context with script interrupt enabled
  1325. // MOV rax, [ThreadContext::scriptStackLimit]
  1326. // ADD rax, frameSize
  1327. // JO $helper
  1328. // CMP rsp, rax
  1329. // JG $done
  1330. // $helper:
  1331. // MOV rax, ThreadContext::ProbeCurrentStack
  1332. // MOV rcx, frameSize
  1333. // MOV rdx, scriptContext
  1334. // JMP rax
  1335. // $done:
  1336. //
  1337. IR::LabelInstr *helperLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  1338. IR::Instr *insertInstr = entryInstr->m_next;
  1339. IR::Instr *instr;
  1340. IR::Opnd *stackLimitOpnd;
  1341. ThreadContext *threadContext = this->m_func->GetScriptContext()->GetThreadContext();
  1342. bool doInterruptProbe = threadContext->DoInterruptProbe(this->m_func->GetJnFunction());
  1343. // MOV rax, ThreadContext::scriptStackLimit + frameSize
  1344. stackLimitOpnd = IR::RegOpnd::New(nullptr, RegRAX, TyMachReg, this->m_func);
  1345. if (doInterruptProbe || !threadContext->GetIsThreadBound())
  1346. {
  1347. // Load the current stack limit from the ThreadContext and add the current frame size.
  1348. {
  1349. void *pLimit = threadContext->GetAddressOfStackLimitForCurrentThread();
  1350. IR::RegOpnd *baseOpnd = IR::RegOpnd::New(nullptr, RegRAX, TyMachReg, this->m_func);
  1351. this->lowererMD->CreateAssign(baseOpnd, IR::AddrOpnd::New(pLimit, IR::AddrOpndKindDynamicMisc, this->m_func), insertInstr);
  1352. IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(baseOpnd, 0, TyMachReg, this->m_func);
  1353. this->lowererMD->CreateAssign(stackLimitOpnd, indirOpnd, insertInstr);
  1354. }
  1355. instr = IR::Instr::New(Js::OpCode::ADD, stackLimitOpnd, stackLimitOpnd,
  1356. IR::AddrOpnd::New((void*)frameSize, IR::AddrOpndKindConstant, this->m_func), this->m_func);
  1357. insertInstr->InsertBefore(instr);
  1358. if (doInterruptProbe)
  1359. {
  1360. // If the add overflows, call the probe helper.
  1361. instr = IR::BranchInstr::New(Js::OpCode::JO, helperLabel, this->m_func);
  1362. insertInstr->InsertBefore(instr);
  1363. }
  1364. }
  1365. else
  1366. {
  1367. size_t scriptStackLimit = (size_t)this->m_func->GetScriptContext()->GetThreadContext()->GetScriptStackLimit();
  1368. this->lowererMD->CreateAssign(stackLimitOpnd, IR::AddrOpnd::New((void *)(frameSize + scriptStackLimit), IR::AddrOpndKindConstant, this->m_func), insertInstr);
  1369. }
  1370. // CMP rsp, rax
  1371. instr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  1372. instr->SetSrc1(IR::RegOpnd::New(nullptr, GetRegStackPointer(), TyMachReg, m_func));
  1373. instr->SetSrc2(stackLimitOpnd);
  1374. insertInstr->InsertBefore(instr);
  1375. IR::LabelInstr * doneLabel = nullptr;
  1376. if (!PHASE_OFF(Js::LayoutPhase, this->m_func))
  1377. {
  1378. // JLE $helper
  1379. instr = IR::BranchInstr::New(Js::OpCode::JLE, helperLabel, m_func);
  1380. insertInstr->InsertBefore(instr);
  1381. Security::InsertRandomFunctionPad(insertInstr);
  1382. // This is generated after layout. Generate the block at the end of the function manually
  1383. insertInstr = IR::PragmaInstr::New(Js::OpCode::StatementBoundary, Js::Constants::NoStatementIndex, m_func);
  1384. this->m_func->m_tailInstr->InsertAfter(insertInstr);
  1385. this->m_func->m_tailInstr = insertInstr;
  1386. }
  1387. else
  1388. {
  1389. doneLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  1390. // JGT $done
  1391. instr = IR::BranchInstr::New(Js::OpCode::JGT, doneLabel, m_func);
  1392. insertInstr->InsertBefore(instr);
  1393. }
  1394. insertInstr->InsertBefore(helperLabel);
  1395. IR::RegOpnd *target;
  1396. {
  1397. // MOV rdx, scriptContext
  1398. this->lowererMD->CreateAssign(
  1399. IR::RegOpnd::New(nullptr, RegRDX, TyMachReg, m_func),
  1400. this->lowererMD->m_lowerer->LoadScriptContextOpnd(insertInstr), insertInstr);
  1401. // MOV rcx, frameSize
  1402. this->lowererMD->CreateAssign(
  1403. IR::RegOpnd::New(nullptr, RegRCX, TyMachReg, this->m_func),
  1404. IR::AddrOpnd::New((void*)frameSize, IR::AddrOpndKindConstant, this->m_func), insertInstr);
  1405. // MOV rax, ThreadContext::ProbeCurrentStack
  1406. target = IR::RegOpnd::New(nullptr, RegRAX, TyMachReg, m_func);
  1407. this->lowererMD->CreateAssign(target, IR::HelperCallOpnd::New(IR::HelperProbeCurrentStack, m_func), insertInstr);
  1408. }
  1409. // JMP rax
  1410. instr = IR::MultiBranchInstr::New(Js::OpCode::JMP, target, m_func);
  1411. insertInstr->InsertBefore(instr);
  1412. if (doneLabel)
  1413. {
  1414. // $done:
  1415. insertInstr->InsertBefore(doneLabel);
  1416. Security::InsertRandomFunctionPad(doneLabel);
  1417. }
  1418. }
  1419. ///----------------------------------------------------------------------------
  1420. ///
  1421. /// LowererMDArch::LowerExitInstr
  1422. ///
  1423. /// Emit epilog.
  1424. ///
  1425. ///----------------------------------------------------------------------------
  1426. IR::Instr *
  1427. LowererMDArch::LowerExitInstr(IR::ExitInstr * exitInstr)
  1428. {
  1429. uint32 savedRegSize = 0;
  1430. // POP used callee-saved registers
  1431. IR::Instr * exitPrevInstr = exitInstr->m_prev;
  1432. AssertMsg(exitPrevInstr, "Can a function have only 1 instr ? Or is the instr chain broken");
  1433. IR::RegOpnd *stackPointer = IR::RegOpnd::New(nullptr, GetRegStackPointer(), TyMachReg, this->m_func);
  1434. unsigned xmmOffset = 0;
  1435. for (RegNum reg = (RegNum)(RegNOREG + 1); reg < RegNumCount; reg = (RegNum)(reg+1))
  1436. {
  1437. if (LinearScan::IsCalleeSaved(reg) && (this->m_func->HasTry() || this->m_func->m_regsUsed.Test(reg)))
  1438. {
  1439. IRType type = RegTypes[reg];
  1440. IR::RegOpnd *regOpnd = IR::RegOpnd::New(nullptr, reg, type, this->m_func);
  1441. if (type == TyFloat64)
  1442. {
  1443. IR::Instr *restoreInstr = IR::Instr::New(Js::OpCode::MOVAPS,
  1444. regOpnd,
  1445. IR::IndirOpnd::New(stackPointer,
  1446. xmmOffset,
  1447. type,
  1448. this->m_func),
  1449. this->m_func);
  1450. xmmOffset += (MachDouble * 2);
  1451. exitInstr->InsertBefore(restoreInstr);
  1452. }
  1453. else
  1454. {
  1455. Assert(type == TyInt64);
  1456. IR::Instr *popInstr = IR::Instr::New(Js::OpCode::POP, regOpnd, this->m_func);
  1457. exitInstr->InsertBefore(popInstr);
  1458. savedRegSize += MachPtr;
  1459. }
  1460. }
  1461. }
  1462. Assert(savedRegSize == (uint)this->m_func->GetSavedRegSize());
  1463. // Generate ADD RSP, argsStackSize before the register restore (if there are any)
  1464. uint32 stackArgsSize = this->m_func->GetArgsSize();
  1465. Assert(stackArgsSize);
  1466. if (savedRegSize || xmmOffset)
  1467. {
  1468. IR::IntConstOpnd *stackSizeOpnd = IR::IntConstOpnd::New(stackArgsSize, TyInt32, this->m_func);
  1469. IR::Instr *addInstr = IR::Instr::New(Js::OpCode::ADD, stackPointer, stackPointer, stackSizeOpnd, this->m_func);
  1470. exitPrevInstr->InsertAfter(addInstr);
  1471. }
  1472. //
  1473. // useful register operands
  1474. //
  1475. IR::RegOpnd * rspOpnd = IR::RegOpnd::New(nullptr, RegRSP, TyMachReg, this->m_func);
  1476. IR::RegOpnd * rbpOpnd = IR::RegOpnd::New(nullptr, RegRBP, TyMachReg, this->m_func);
  1477. // Restore frame
  1478. // Generate MOV RSP, RBP
  1479. IR::Instr * movInstr = IR::Instr::New(Js::OpCode::MOV, rspOpnd, rbpOpnd, this->m_func);
  1480. exitInstr->InsertBefore(movInstr);
  1481. // Generate POP RBP
  1482. IR::Instr * pushInstr = IR::Instr::New(Js::OpCode::POP, rbpOpnd, this->m_func);
  1483. exitInstr->InsertBefore(pushInstr);
  1484. // Insert RET
  1485. IR::IntConstOpnd * intSrc = IR::IntConstOpnd::New(0, TyInt32, this->m_func);
  1486. IR::RegOpnd *retReg = nullptr;
  1487. if (m_func->GetJnFunction()->GetIsAsmjsMode() && !m_func->IsLoopBody())
  1488. {
  1489. switch (m_func->GetJnFunction()->GetAsmJsFunctionInfo()->GetReturnType().which())
  1490. {
  1491. case Js::AsmJsRetType::Double:
  1492. case Js::AsmJsRetType::Float:
  1493. retReg = IR::RegOpnd::New(nullptr, this->GetRegReturnAsmJs(TyMachDouble), TyMachDouble, this->m_func);
  1494. break;
  1495. case Js::AsmJsRetType::Int32x4:
  1496. retReg = IR::RegOpnd::New(nullptr, this->GetRegReturnAsmJs(TySimd128I4), TySimd128I4, this->m_func);
  1497. break;
  1498. case Js::AsmJsRetType::Float32x4:
  1499. retReg = IR::RegOpnd::New(nullptr, this->GetRegReturnAsmJs(TySimd128F4), TySimd128F4, this->m_func);
  1500. break;
  1501. case Js::AsmJsRetType::Float64x2:
  1502. retReg = IR::RegOpnd::New(nullptr, this->GetRegReturnAsmJs(TySimd128D2), TySimd128D2, this->m_func);
  1503. break;
  1504. case Js::AsmJsRetType::Signed:
  1505. case Js::AsmJsRetType::Void:
  1506. retReg = IR::RegOpnd::New(nullptr, this->GetRegReturn(TyMachReg), TyMachReg, this->m_func);
  1507. break;
  1508. default:
  1509. Assume(UNREACHED);
  1510. }
  1511. }
  1512. else
  1513. {
  1514. retReg = IR::RegOpnd::New(nullptr, this->GetRegReturn(TyMachReg), TyMachReg, this->m_func);
  1515. }
  1516. IR::Instr *retInstr = IR::Instr::New(Js::OpCode::RET, this->m_func);
  1517. retInstr->SetSrc1(intSrc);
  1518. retInstr->SetSrc2(retReg);
  1519. exitInstr->InsertBefore(retInstr);
  1520. retInstr->m_opcode = Js::OpCode::RET;
  1521. return exitInstr;
  1522. }
  1523. IR::Instr *
  1524. LowererMDArch::LowerEntryInstrAsmJs(IR::EntryInstr * entryInstr)
  1525. {
  1526. // prologue is almost identical on x64, except for loading args
  1527. return LowerEntryInstr(entryInstr);
  1528. }
  1529. IR::Instr *
  1530. LowererMDArch::LowerExitInstrAsmJs(IR::ExitInstr * exitInstr)
  1531. {
  1532. // epilogue is almost identical on x64, except for return register
  1533. return LowerExitInstr(exitInstr);
  1534. }
  1535. void
  1536. LowererMDArch::EmitPtrInstr(IR::Instr *instr)
  1537. {
  1538. bool legalize = false;
  1539. switch (instr->m_opcode)
  1540. {
  1541. case Js::OpCode::Add_Ptr:
  1542. LowererMD::ChangeToAdd(instr, false /* needFlags */);
  1543. legalize = true;
  1544. break;
  1545. default:
  1546. AssertMsg(UNREACHED, "Un-implemented ptr opcode");
  1547. }
  1548. // OpEq's
  1549. if (legalize)
  1550. {
  1551. LowererMD::Legalize(instr);
  1552. }
  1553. else
  1554. {
  1555. LowererMD::MakeDstEquSrc1(instr);
  1556. }
  1557. }
  1558. void
  1559. LowererMDArch::EmitInt4Instr(IR::Instr *instr, bool signExtend /* = false */)
  1560. {
  1561. IR::Opnd *dst = instr->GetDst();
  1562. IR::Opnd *src1 = instr->GetSrc1();
  1563. IR::Opnd *src2 = instr->GetSrc2();
  1564. IR::Instr *newInstr = nullptr;
  1565. IR::RegOpnd *regEDX;
  1566. if (dst && !dst->IsUInt32())
  1567. {
  1568. dst->SetType(TyInt32);
  1569. }
  1570. if (!src1->IsUInt32())
  1571. {
  1572. src1->SetType(TyInt32);
  1573. }
  1574. if (src2 && !src2->IsUInt32())
  1575. {
  1576. src2->SetType(TyInt32);
  1577. }
  1578. bool legalize = false;
  1579. switch (instr->m_opcode)
  1580. {
  1581. case Js::OpCode::Neg_I4:
  1582. instr->m_opcode = Js::OpCode::NEG;
  1583. break;
  1584. case Js::OpCode::Not_I4:
  1585. instr->m_opcode = Js::OpCode::NOT;
  1586. break;
  1587. case Js::OpCode::Add_I4:
  1588. LowererMD::ChangeToAdd(instr, false /* needFlags */);
  1589. legalize = true;
  1590. break;
  1591. case Js::OpCode::Sub_I4:
  1592. LowererMD::ChangeToSub(instr, false /* needFlags */);
  1593. legalize = true;
  1594. break;
  1595. case Js::OpCode::Mul_I4:
  1596. instr->m_opcode = Js::OpCode::IMUL2;
  1597. break;
  1598. case Js::OpCode::Div_I4:
  1599. instr->SinkDst(Js::OpCode::MOV, RegRAX);
  1600. goto idiv_common;
  1601. case Js::OpCode::Rem_I4:
  1602. instr->SinkDst(Js::OpCode::MOV, RegRDX);
  1603. idiv_common:
  1604. if (instr->GetSrc1()->GetType() == TyUint32)
  1605. {
  1606. Assert(instr->GetSrc2()->GetType() == TyUint32);
  1607. instr->m_opcode = Js::OpCode::DIV;
  1608. }
  1609. else
  1610. {
  1611. instr->m_opcode = Js::OpCode::IDIV;
  1612. }
  1613. instr->HoistSrc1(Js::OpCode::MOV, RegRAX);
  1614. regEDX = IR::RegOpnd::New(TyInt32, instr->m_func);
  1615. regEDX->SetReg(RegRDX);
  1616. if (instr->GetSrc1()->GetType() == TyUint32)
  1617. {
  1618. // we need to ensure that register allocator doesn't muck about with rdx
  1619. instr->HoistSrc2(Js::OpCode::MOV, RegRCX);
  1620. newInstr = IR::Instr::New(Js::OpCode::Ld_I4, regEDX, IR::IntConstOpnd::New(0, TyInt32, instr->m_func), instr->m_func);
  1621. instr->InsertBefore(newInstr);
  1622. LowererMD::ChangeToAssign(newInstr);
  1623. // NOP ensures that the EDX = Ld_I4 0 doesn't get deadstored, will be removed in peeps
  1624. instr->InsertBefore(IR::Instr::New(Js::OpCode::NOP, regEDX, regEDX, instr->m_func));
  1625. }
  1626. else
  1627. {
  1628. if (instr->GetSrc2()->IsImmediateOpnd())
  1629. {
  1630. instr->HoistSrc2(Js::OpCode::MOV);
  1631. }
  1632. instr->InsertBefore(IR::Instr::New(Js::OpCode::CDQ, regEDX, instr->m_func));
  1633. }
  1634. return;
  1635. case Js::OpCode::Or_I4:
  1636. instr->m_opcode = Js::OpCode::OR;
  1637. break;
  1638. case Js::OpCode::Xor_I4:
  1639. instr->m_opcode = Js::OpCode::XOR;
  1640. break;
  1641. case Js::OpCode::And_I4:
  1642. instr->m_opcode = Js::OpCode::AND;
  1643. break;
  1644. case Js::OpCode::Shl_I4:
  1645. case Js::OpCode::ShrU_I4:
  1646. case Js::OpCode::Shr_I4:
  1647. LowererMD::ChangeToShift(instr, false /* needFlags */);
  1648. legalize = true;
  1649. break;
  1650. case Js::OpCode::BrTrue_I4:
  1651. instr->m_opcode = Js::OpCode::JNE;
  1652. goto br1_Common;
  1653. case Js::OpCode::BrFalse_I4:
  1654. instr->m_opcode = Js::OpCode::JEQ;
  1655. br1_Common:
  1656. src1 = instr->UnlinkSrc1();
  1657. newInstr = IR::Instr::New(Js::OpCode::TEST, instr->m_func);
  1658. instr->InsertBefore(newInstr);
  1659. newInstr->SetSrc1(src1);
  1660. newInstr->SetSrc2(src1);
  1661. return;
  1662. case Js::OpCode::BrEq_I4:
  1663. instr->m_opcode = Js::OpCode::JEQ;
  1664. goto br2_Common;
  1665. case Js::OpCode::BrNeq_I4:
  1666. instr->m_opcode = Js::OpCode::JNE;
  1667. goto br2_Common;
  1668. case Js::OpCode::BrUnGt_I4:
  1669. instr->m_opcode = Js::OpCode::JA;
  1670. goto br2_Common;
  1671. case Js::OpCode::BrUnGe_I4:
  1672. instr->m_opcode = Js::OpCode::JAE;
  1673. goto br2_Common;
  1674. case Js::OpCode::BrUnLe_I4:
  1675. instr->m_opcode = Js::OpCode::JBE;
  1676. goto br2_Common;
  1677. case Js::OpCode::BrUnLt_I4:
  1678. instr->m_opcode = Js::OpCode::JB;
  1679. goto br2_Common;
  1680. case Js::OpCode::BrGt_I4:
  1681. instr->m_opcode = Js::OpCode::JGT;
  1682. goto br2_Common;
  1683. case Js::OpCode::BrGe_I4:
  1684. instr->m_opcode = Js::OpCode::JGE;
  1685. goto br2_Common;
  1686. case Js::OpCode::BrLe_I4:
  1687. instr->m_opcode = Js::OpCode::JLE;
  1688. goto br2_Common;
  1689. case Js::OpCode::BrLt_I4:
  1690. instr->m_opcode = Js::OpCode::JLT;
  1691. br2_Common:
  1692. src1 = instr->UnlinkSrc1();
  1693. src2 = instr->UnlinkSrc2();
  1694. newInstr = IR::Instr::New(Js::OpCode::CMP, instr->m_func);
  1695. instr->InsertBefore(newInstr);
  1696. newInstr->SetSrc1(src1);
  1697. newInstr->SetSrc2(src2);
  1698. return;
  1699. default:
  1700. AssertMsg(UNREACHED, "Un-implemented int4 opcode");
  1701. }
  1702. if (signExtend)
  1703. {
  1704. Assert(instr->GetDst());
  1705. IR::Opnd *dst64 = instr->GetDst()->Copy(instr->m_func);
  1706. dst64->SetType(TyMachReg);
  1707. instr->InsertAfter(IR::Instr::New(Js::OpCode::MOVSXD, dst64, instr->GetDst(), instr->m_func));
  1708. }
  1709. if(legalize)
  1710. {
  1711. LowererMD::Legalize(instr);
  1712. }
  1713. else
  1714. {
  1715. // OpEq's
  1716. LowererMD::MakeDstEquSrc1(instr);
  1717. }
  1718. }
  1719. #if !FLOATVAR
  1720. void
  1721. LowererMDArch::EmitLoadVar(IR::Instr *instrLoad, bool isFromUint32, bool isHelper)
  1722. {
  1723. // e1 = MOV e_src1
  1724. // e1 = SHL e1, Js::VarTag_Shift
  1725. // JO $ToVar
  1726. // JB $ToVar [isFromUint32]
  1727. // e1 = INC e1
  1728. // r_dst = MOVSXD e1
  1729. // JMP $done
  1730. // $ToVar:
  1731. // EmitLoadVarNoCheck
  1732. // $Done:
  1733. Assert(instrLoad->GetSrc1()->IsRegOpnd());
  1734. Assert(instrLoad->GetDst()->GetType() == TyVar);
  1735. bool isInt = false;
  1736. bool isNotInt = false;
  1737. IR::Opnd *dst = instrLoad->GetDst();
  1738. IR::RegOpnd *src1 = instrLoad->GetSrc1()->AsRegOpnd();
  1739. IR::LabelInstr *toVar = nullptr;
  1740. IR::LabelInstr *done = nullptr;
  1741. // TODO: Fix bad lowering. We shouldn't get TyVars here.
  1742. // Assert(instrLoad->GetSrc1()->GetType() == TyInt32);
  1743. src1->SetType(TyInt32);
  1744. if (src1->IsTaggedInt())
  1745. {
  1746. isInt = true;
  1747. }
  1748. else if (src1->IsNotInt())
  1749. {
  1750. isNotInt = true;
  1751. }
  1752. if (!isNotInt)
  1753. {
  1754. // e1 = MOV e_src1
  1755. IR::RegOpnd *e1 = IR::RegOpnd::New(TyInt32, m_func);
  1756. instrLoad->InsertBefore(IR::Instr::New(Js::OpCode::MOV, e1, instrLoad->GetSrc1(), m_func));
  1757. // e1 = SHL e1, Js::VarTag_Shift
  1758. instrLoad->InsertBefore(IR::Instr::New(Js::OpCode::SHL,
  1759. e1,
  1760. e1,
  1761. IR::IntConstOpnd::New(Js::VarTag_Shift, TyInt8, m_func), m_func));
  1762. if (!isInt)
  1763. {
  1764. // JO $ToVar
  1765. toVar = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  1766. instrLoad->InsertBefore(IR::BranchInstr::New(Js::OpCode::JO, toVar, m_func));
  1767. if (isFromUint32)
  1768. {
  1769. // JB $ToVar [isFromUint32]
  1770. instrLoad->InsertBefore(IR::BranchInstr::New(Js::OpCode::JB, toVar, this->m_func));
  1771. }
  1772. }
  1773. // e1 = INC e1
  1774. instrLoad->InsertBefore(IR::Instr::New(Js::OpCode::INC, e1, e1, m_func));
  1775. // dst = MOVSXD e1
  1776. instrLoad->InsertBefore(IR::Instr::New(Js::OpCode::MOVSXD, dst, e1, m_func));
  1777. if (!isInt)
  1778. {
  1779. // JMP $done
  1780. done = IR::LabelInstr::New(Js::OpCode::Label, m_func, isHelper);
  1781. instrLoad->InsertBefore(IR::BranchInstr::New(Js::OpCode::JMP, done, m_func));
  1782. }
  1783. }
  1784. IR::Instr *insertInstr = instrLoad;
  1785. if (!isInt)
  1786. {
  1787. // $toVar:
  1788. if (toVar)
  1789. {
  1790. instrLoad->InsertBefore(toVar);
  1791. }
  1792. // ToVar()
  1793. this->lowererMD->EmitLoadVarNoCheck(dst->AsRegOpnd(), src1, instrLoad, isFromUint32, isHelper || toVar != nullptr);
  1794. }
  1795. if (done)
  1796. {
  1797. instrLoad->InsertAfter(done);
  1798. }
  1799. instrLoad->Remove();
  1800. }
  1801. #else
  1802. void
  1803. LowererMDArch::EmitLoadVar(IR::Instr *instrLoad, bool isFromUint32, bool isHelper)
  1804. {
  1805. // MOV e1, e_src1
  1806. // CMP e1, 0 [uint32]
  1807. // JLT $Helper [uint32] -- overflows?
  1808. // BTS r1, VarTag_Shift
  1809. // MOV r_dst, r1
  1810. // JMP $done [uint32]
  1811. // $helper [uint32]
  1812. // EmitLoadVarNoCheck
  1813. // $done [uint32]
  1814. Assert(instrLoad->GetSrc1()->IsRegOpnd());
  1815. Assert(instrLoad->GetDst()->GetType() == TyVar);
  1816. bool isInt = false;
  1817. IR::Opnd *dst = instrLoad->GetDst();
  1818. IR::RegOpnd *src1 = instrLoad->GetSrc1()->AsRegOpnd();
  1819. IR::LabelInstr *labelHelper = nullptr;
  1820. // TODO: Fix bad lowering. We shouldn't get TyVars here.
  1821. // Assert(instrLoad->GetSrc1()->GetType() == TyInt32);
  1822. src1->SetType(TyInt32);
  1823. if (src1->IsTaggedInt())
  1824. {
  1825. isInt = true;
  1826. }
  1827. else if (src1->IsNotInt())
  1828. {
  1829. // ToVar()
  1830. this->lowererMD->EmitLoadVarNoCheck(dst->AsRegOpnd(), src1, instrLoad, isFromUint32, isHelper);
  1831. return;
  1832. }
  1833. IR::RegOpnd *r1 = IR::RegOpnd::New(TyVar, m_func);
  1834. // e1 = MOV e_src1
  1835. IR::RegOpnd *e1 = r1->Copy(m_func)->AsRegOpnd();
  1836. e1->SetType(TyInt32);
  1837. instrLoad->InsertBefore(IR::Instr::New(Js::OpCode::MOV,
  1838. e1,
  1839. src1,
  1840. m_func));
  1841. if (!isInt && isFromUint32)
  1842. {
  1843. // CMP e1, 0
  1844. IR::Instr *instr = IR::Instr::New(Js::OpCode::CMP, m_func);
  1845. instr->SetSrc1(e1);
  1846. instr->SetSrc2(IR::IntConstOpnd::New(0, TyInt32, m_func));
  1847. instrLoad->InsertBefore(instr);
  1848. Assert(!labelHelper);
  1849. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  1850. // JLT $helper
  1851. instr = IR::BranchInstr::New(Js::OpCode::JLT, labelHelper, m_func);
  1852. instrLoad->InsertBefore(instr);
  1853. }
  1854. // The previous operation clears the top 32 bits.
  1855. // BTS r1, VarTag_Shift
  1856. this->lowererMD->GenerateInt32ToVarConversion(r1, instrLoad);
  1857. // REVIEW: We need r1 only if we could generate sn = Ld_A_I4 sn. i.e. the destination and
  1858. // source are the same.
  1859. // r_dst = MOV r1
  1860. instrLoad->InsertBefore(IR::Instr::New(Js::OpCode::MOV,
  1861. dst,
  1862. r1,
  1863. m_func));
  1864. if (labelHelper)
  1865. {
  1866. Assert(isFromUint32);
  1867. // JMP $done
  1868. IR::LabelInstr * labelDone = IR::LabelInstr::New(Js::OpCode::Label, m_func, isHelper);
  1869. instrLoad->InsertBefore(IR::BranchInstr::New(Js::OpCode::JMP, labelDone, m_func));
  1870. // $helper
  1871. instrLoad->InsertBefore(labelHelper);
  1872. // ToVar()
  1873. this->lowererMD->EmitLoadVarNoCheck(dst->AsRegOpnd(), src1, instrLoad, isFromUint32, true);
  1874. // $done
  1875. instrLoad->InsertBefore(labelDone);
  1876. }
  1877. instrLoad->Remove();
  1878. }
  1879. #endif
  1880. void
  1881. LowererMDArch::EmitIntToFloat(IR::Opnd *dst, IR::Opnd *src, IR::Instr *instrInsert)
  1882. {
  1883. Assert(dst->IsRegOpnd() && dst->IsFloat());
  1884. Assert(src->IsRegOpnd() && src->IsInt32());
  1885. if (dst->IsFloat64())
  1886. {
  1887. // Use MOVD to make sure we sign extended the 32-bit src
  1888. instrInsert->InsertBefore(IR::Instr::New(Js::OpCode::MOVD, dst, src, this->m_func));
  1889. // Convert to float
  1890. instrInsert->InsertBefore(IR::Instr::New(Js::OpCode::CVTDQ2PD, dst, dst, this->m_func));
  1891. }
  1892. else
  1893. {
  1894. Assert(dst->IsFloat32());
  1895. instrInsert->InsertBefore(IR::Instr::New(Js::OpCode::CVTSI2SS, dst, src, this->m_func));
  1896. }
  1897. }
  1898. void
  1899. LowererMDArch::EmitUIntToFloat(IR::Opnd *dst, IR::Opnd *src, IR::Instr *instrInsert)
  1900. {
  1901. Assert(dst->IsRegOpnd() && dst->IsFloat());
  1902. Assert(src->IsRegOpnd() && (src->IsInt32() || src->IsUInt32()));
  1903. // MOV tempReg.i32, src - make sure the top bits are 0
  1904. IR::RegOpnd * tempReg = IR::RegOpnd::New(TyInt32, this->m_func);
  1905. instrInsert->InsertBefore(IR::Instr::New(Js::OpCode::MOV_TRUNC, tempReg, src, this->m_func));
  1906. // CVTSI2SD dst, tempReg.i64 (Use the tempreg as if it is 64 bit without sign extension)
  1907. instrInsert->InsertBefore(IR::Instr::New(dst->IsFloat64() ? Js::OpCode::CVTSI2SD : Js::OpCode::CVTSI2SS, dst,
  1908. tempReg->UseWithNewType(TyInt64, this->m_func), this->m_func));
  1909. }
  1910. bool
  1911. LowererMDArch::EmitLoadInt32(IR::Instr *instrLoad)
  1912. {
  1913. //
  1914. // r1 = MOV src1
  1915. // rtest = MOV src1
  1916. // SHR rtest, AtomTag_Shift
  1917. // CMP rtest, 1
  1918. // JNE $helper or $float
  1919. // r_dst = MOV_TRUNC e_src1
  1920. // JMP $done
  1921. // $float:
  1922. // dst = ConvertToFloat(r1, $helper)
  1923. // $helper:
  1924. // r_dst = ToInt32()
  1925. //
  1926. Assert(instrLoad->GetSrc1()->IsRegOpnd());
  1927. Assert(instrLoad->GetSrc1()->GetType() == TyVar);
  1928. // TODO: Fix bad lowering. We shouldn't see TyVars here.
  1929. // Assert(instrLoad->GetDst()->GetType() == TyInt32);
  1930. bool isInt = false;
  1931. bool isNotInt = false;
  1932. IR::Opnd *dst = instrLoad->GetDst();
  1933. IR::RegOpnd *src1 = instrLoad->GetSrc1()->AsRegOpnd();
  1934. IR::LabelInstr *helper = nullptr;
  1935. IR::LabelInstr *labelFloat = nullptr;
  1936. IR::LabelInstr *done = nullptr;
  1937. if (src1->IsTaggedInt())
  1938. {
  1939. isInt = true;
  1940. }
  1941. else if (src1->IsNotInt())
  1942. {
  1943. isNotInt = true;
  1944. }
  1945. if (src1->IsEqual(instrLoad->GetDst()) == false)
  1946. {
  1947. // r1 = MOV src1
  1948. IR::RegOpnd *r1 = IR::RegOpnd::New(TyVar, instrLoad->m_func);
  1949. r1->SetValueType(src1->GetValueType());
  1950. instrLoad->InsertBefore(IR::Instr::New(Js::OpCode::MOV, r1, src1, instrLoad->m_func));
  1951. src1 = r1;
  1952. }
  1953. const ValueType src1ValueType(src1->GetValueType());
  1954. const bool doFloatToIntFastPath =
  1955. (src1ValueType.IsLikelyFloat() || src1ValueType.IsLikelyUntaggedInt()) &&
  1956. !(instrLoad->HasBailOutInfo() && (instrLoad->GetBailOutKind() == IR::BailOutIntOnly || instrLoad->GetBailOutKind() == IR::BailOutExpectingInteger));
  1957. if (!isNotInt)
  1958. {
  1959. if (!isInt)
  1960. {
  1961. if(doFloatToIntFastPath)
  1962. {
  1963. labelFloat = IR::LabelInstr::New(Js::OpCode::Label, instrLoad->m_func, false);
  1964. }
  1965. else
  1966. {
  1967. helper = IR::LabelInstr::New(Js::OpCode::Label, instrLoad->m_func, true);
  1968. }
  1969. this->lowererMD->GenerateSmIntTest(src1, instrLoad, labelFloat ? labelFloat : helper);
  1970. }
  1971. IR::RegOpnd *src132 = src1->UseWithNewType(TyInt32, instrLoad->m_func)->AsRegOpnd();
  1972. #if !INT32VAR
  1973. // src1 = SAR src1, VarTag_Shift
  1974. instrLoad->InsertBefore(IR::Instr::New(Js::OpCode::SAR,
  1975. src132,
  1976. src132,
  1977. IR::IntConstOpnd::New(Js::VarTag_Shift, TyInt8, instrLoad->m_func),
  1978. instrLoad->m_func));
  1979. // r_dst = MOV src1
  1980. // This is only a MOV (and not a MOVSXD) because we do a signed shift right, but we'll copy
  1981. // all 64 bits.
  1982. instrLoad->InsertBefore(IR::Instr::New(Js::OpCode::MOV,
  1983. dst->UseWithNewType(TyMachReg, instrLoad->m_func),
  1984. src1,
  1985. instrLoad->m_func));
  1986. #else
  1987. instrLoad->InsertBefore(IR::Instr::New(Js::OpCode::MOV_TRUNC,
  1988. dst->UseWithNewType(TyInt32, instrLoad->m_func),
  1989. src132,
  1990. instrLoad->m_func));
  1991. #endif
  1992. if (!isInt)
  1993. {
  1994. // JMP $done
  1995. done = instrLoad->GetOrCreateContinueLabel();
  1996. instrLoad->InsertBefore(IR::BranchInstr::New(Js::OpCode::JMP, done, m_func));
  1997. }
  1998. }
  1999. if (!isInt)
  2000. {
  2001. if(doFloatToIntFastPath)
  2002. {
  2003. if(labelFloat)
  2004. {
  2005. instrLoad->InsertBefore(labelFloat);
  2006. }
  2007. if(!helper)
  2008. {
  2009. helper = IR::LabelInstr::New(Js::OpCode::Label, instrLoad->m_func, true);
  2010. }
  2011. if(!done)
  2012. {
  2013. done = instrLoad->GetOrCreateContinueLabel();
  2014. }
  2015. #if FLOATVAR
  2016. IR::RegOpnd* floatOpnd = this->lowererMD->CheckFloatAndUntag(src1, instrLoad, helper);
  2017. #else
  2018. this->lowererMD->GenerateFloatTest(src1, instrLoad, helper, instrLoad->HasBailOutInfo());
  2019. IR::IndirOpnd* floatOpnd = IR::IndirOpnd::New(src1, Js::JavascriptNumber::GetValueOffset(), TyMachDouble, this->m_func);
  2020. #endif
  2021. this->lowererMD->ConvertFloatToInt32(instrLoad->GetDst(), floatOpnd, helper, done, instrLoad);
  2022. }
  2023. // $helper:
  2024. if (helper)
  2025. {
  2026. instrLoad->InsertBefore(helper);
  2027. }
  2028. if(instrLoad->HasBailOutInfo() && (instrLoad->GetBailOutKind() == IR::BailOutIntOnly || instrLoad->GetBailOutKind() == IR::BailOutExpectingInteger))
  2029. {
  2030. // Avoid bailout if we have a JavascriptNumber whose value is a signed 32-bit integer
  2031. lowererMD->m_lowerer->LoadInt32FromUntaggedVar(instrLoad);
  2032. // Need to bail out instead of calling a helper
  2033. return true;
  2034. }
  2035. lowererMD->m_lowerer->LowerUnaryHelperMem(instrLoad, IR::HelperConv_ToInt32);
  2036. }
  2037. else
  2038. {
  2039. instrLoad->Remove();
  2040. }
  2041. return false;
  2042. }
  2043. IR::Instr *
  2044. LowererMDArch::LoadCheckedFloat(IR::RegOpnd *opndOrig, IR::RegOpnd *opndFloat, IR::LabelInstr *labelInline, IR::LabelInstr *labelHelper, IR::Instr *instrInsert, const bool checkForNullInLoopBody)
  2045. {
  2046. //
  2047. // if (TaggedInt::Is(opndOrig))
  2048. // s1 = MOVSXD opndOrig_32
  2049. // opndFloat = CVTSI2SD s1
  2050. // JMP $labelInline
  2051. // else
  2052. // JMP $labelOpndIsNotInt
  2053. //
  2054. // $labelOpndIsNotInt:
  2055. // if (TaggedFloat::Is(opndOrig))
  2056. // s2 = MOV opndOrig
  2057. // s2 = XOR FloatTag_Value
  2058. // opndFloat = MOVD s2
  2059. // else
  2060. // JMP $labelHelper
  2061. //
  2062. // $labelInline:
  2063. //
  2064. IR::Instr *instrFirst = nullptr;
  2065. IR::LabelInstr *labelOpndIsNotInt = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  2066. lowererMD->GenerateSmIntTest(opndOrig, instrInsert, labelOpndIsNotInt, &instrFirst);
  2067. if (opndOrig->GetValueType().IsLikelyFloat())
  2068. {
  2069. // Make this path helper if value is likely a float
  2070. instrInsert->InsertBefore(IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true));
  2071. }
  2072. IR::RegOpnd *s1 = IR::RegOpnd::New(TyMachReg, this->m_func);
  2073. IR::Opnd *opndOrig_32 = opndOrig->UseWithNewType(TyInt32, this->m_func);
  2074. IR::Instr *movsxd = IR::Instr::New(Js::OpCode::MOVSXD, s1, opndOrig_32, this->m_func);
  2075. instrInsert->InsertBefore(movsxd);
  2076. IR::Instr *cvtsi2sd = IR::Instr::New(Js::OpCode::CVTSI2SD, opndFloat, s1, this->m_func);
  2077. instrInsert->InsertBefore(cvtsi2sd);
  2078. IR::Instr *jmpInline = IR::BranchInstr::New(Js::OpCode::JMP, labelInline, this->m_func);
  2079. instrInsert->InsertBefore(jmpInline);
  2080. instrInsert->InsertBefore(labelOpndIsNotInt);
  2081. lowererMD->GenerateFloatTest(opndOrig, instrInsert, labelHelper, checkForNullInLoopBody);
  2082. IR::RegOpnd *s2 = IR::RegOpnd::New(TyMachReg, this->m_func);
  2083. IR::Instr *mov = IR::Instr::New(Js::OpCode::MOV, s2, opndOrig, this->m_func);
  2084. instrInsert->InsertBefore(mov);
  2085. IR::Instr *xorTag = IR::Instr::New(Js::OpCode::XOR,
  2086. s2,
  2087. s2,
  2088. IR::AddrOpnd::New((Js::Var)Js::FloatTag_Value,
  2089. IR::AddrOpndKindConstantVar,
  2090. this->m_func,
  2091. /* dontEncode = */ true),
  2092. this->m_func);
  2093. instrInsert->InsertBefore(xorTag);
  2094. LowererMD::Legalize(xorTag);
  2095. IR::Instr *movFloat = IR::Instr::New(Js::OpCode::MOVD, opndFloat, s2, this->m_func);
  2096. instrInsert->InsertBefore(movFloat);
  2097. return instrFirst;
  2098. }
  2099. IR::LabelInstr *
  2100. LowererMDArch::GetBailOutStackRestoreLabel(BailOutInfo * bailOutInfo, IR::LabelInstr * exitTargetInstr)
  2101. {
  2102. return exitTargetInstr;
  2103. }
  2104. bool LowererMDArch::GenerateFastAnd(IR::Instr * instrAnd)
  2105. {
  2106. return true;
  2107. }
  2108. bool LowererMDArch::GenerateFastXor(IR::Instr * instrXor)
  2109. {
  2110. return true;
  2111. }
  2112. bool LowererMDArch::GenerateFastOr(IR::Instr * instrOr)
  2113. {
  2114. return true;
  2115. }
  2116. bool LowererMDArch::GenerateFastNot(IR::Instr * instrNot)
  2117. {
  2118. return true;
  2119. }
  2120. bool LowererMDArch::GenerateFastShiftLeft(IR::Instr * instrShift)
  2121. {
  2122. return true;
  2123. }
  2124. bool LowererMDArch::GenerateFastShiftRight(IR::Instr * instrShift)
  2125. {
  2126. // Given:
  2127. //
  2128. // dst = Shr/ShrU src1, src2
  2129. //
  2130. // Generate:
  2131. //
  2132. // (If not 2 Int31's, jump to $helper.)
  2133. // s1 = MOV src1
  2134. //RCX = MOV src2
  2135. // TEST RCX, 0x1F [unsigned only] // Bail if unsigned and not shifting,
  2136. // JEQ $helper [unsigned only] // as we may not end up with a taggable int
  2137. // s1 = SAR/SHR s1, RCX
  2138. // BTS s1, VarTag_Shift
  2139. //dst = MOV s1
  2140. // JMP $fallthru
  2141. // $helper:
  2142. // (caller generates helper call)
  2143. // $fallthru:
  2144. IR::Instr * instr;
  2145. IR::LabelInstr * labelHelper;
  2146. IR::LabelInstr * labelFallThru;
  2147. IR::Opnd * opndReg;
  2148. IR::Opnd * opndSrc1;
  2149. IR::Opnd * opndSrc2;
  2150. Assert(instrShift->m_opcode == Js::OpCode::ShrU_A || instrShift->m_opcode == Js::OpCode::Shr_A);
  2151. bool isUnsigned = (instrShift->m_opcode == Js::OpCode::ShrU_A);
  2152. opndSrc1 = instrShift->GetSrc1();
  2153. opndSrc2 = instrShift->GetSrc2();
  2154. AssertMsg(opndSrc1 && opndSrc2, "Expected 2 src opnd's on Add instruction");
  2155. // Not int?
  2156. if (opndSrc1->IsRegOpnd() && opndSrc1->AsRegOpnd()->IsNotInt())
  2157. {
  2158. return true;
  2159. }
  2160. if (opndSrc2->IsRegOpnd() && opndSrc2->AsRegOpnd()->IsNotInt())
  2161. {
  2162. return true;
  2163. }
  2164. // Tagged ints?
  2165. bool isTaggedInts = false;
  2166. if (opndSrc1->IsTaggedInt())
  2167. {
  2168. if (opndSrc2->IsTaggedInt())
  2169. {
  2170. isTaggedInts = true;
  2171. }
  2172. }
  2173. IntConstType s2Value = 0;
  2174. bool src2IsIntConst = false;
  2175. if (isUnsigned)
  2176. {
  2177. if (opndSrc2->IsRegOpnd())
  2178. {
  2179. src2IsIntConst = opndSrc2->AsRegOpnd()->m_sym->IsTaggableIntConst();
  2180. if (src2IsIntConst)
  2181. {
  2182. s2Value = opndSrc2->AsRegOpnd()->m_sym->GetIntConstValue();
  2183. }
  2184. }
  2185. else
  2186. {
  2187. AssertMsg(opndSrc2->IsAddrOpnd() && Js::TaggedInt::Is(opndSrc2->AsAddrOpnd()->m_address),
  2188. "Expect src2 of shift right to be reg or Var.");
  2189. src2IsIntConst = true;
  2190. s2Value = Js::TaggedInt::ToInt32(opndSrc2->AsAddrOpnd()->m_address);
  2191. }
  2192. // 32-bit Shifts only uses the bottom 5 bits.
  2193. s2Value &= 0x1F;
  2194. // Unsigned shift by 0 could yield a value not encodable as an tagged int.
  2195. if (isUnsigned && src2IsIntConst && s2Value == 0)
  2196. {
  2197. return true;
  2198. }
  2199. }
  2200. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  2201. if (!isTaggedInts)
  2202. {
  2203. // (If not 2 Int31's, jump to $helper.)
  2204. this->lowererMD->GenerateSmIntPairTest(instrShift, opndSrc1, opndSrc2, labelHelper);
  2205. }
  2206. opndSrc1 = opndSrc1->UseWithNewType(TyInt32, this->m_func);
  2207. if (src2IsIntConst)
  2208. {
  2209. opndSrc2 = IR::IntConstOpnd::New(s2Value, TyInt32, this->m_func);
  2210. }
  2211. else
  2212. {
  2213. // RCX = MOV src2
  2214. opndSrc2 = opndSrc2->UseWithNewType(TyInt32, this->m_func);
  2215. opndReg = IR::RegOpnd::New(TyInt32, this->m_func);
  2216. opndReg->AsRegOpnd()->SetReg(this->GetRegShiftCount());
  2217. instr = IR::Instr::New(Js::OpCode::MOV, opndReg, opndSrc2, this->m_func);
  2218. instrShift->InsertBefore(instr);
  2219. opndSrc2 = opndReg;
  2220. }
  2221. if (!src2IsIntConst && isUnsigned)
  2222. {
  2223. // TEST RCX, 0x1F [unsigned only] // Bail if unsigned and not shifting,
  2224. instr = IR::Instr::New(Js::OpCode::TEST, this->m_func);
  2225. instr->SetSrc1(opndSrc2);
  2226. instr->SetSrc2(IR::IntConstOpnd::New(0x1F, TyInt32, this->m_func));
  2227. instrShift->InsertBefore(instr);
  2228. // JEQ $helper [unsigned only] // as we may not end up with a taggable int
  2229. instr = IR::BranchInstr::New(Js::OpCode::JEQ, labelHelper, this->m_func);
  2230. instrShift->InsertBefore(instr);
  2231. }
  2232. // s1 = MOV src1
  2233. opndReg = IR::RegOpnd::New(TyInt32, this->m_func);
  2234. instr = IR::Instr::New(Js::OpCode::MOV, opndReg, opndSrc1, this->m_func);
  2235. instrShift->InsertBefore(instr);
  2236. // s1 = SAR/SHR s1, RCX
  2237. instr = IR::Instr::New(isUnsigned ? Js::OpCode::SHR : Js::OpCode::SAR, opndReg, opndReg, opndSrc2, this->m_func);
  2238. instrShift->InsertBefore(instr);
  2239. //
  2240. // Convert TyInt32 operand, back to TyMachPtr type.
  2241. //
  2242. if(TyMachReg != opndReg->GetType())
  2243. {
  2244. opndReg = opndReg->UseWithNewType(TyMachPtr, this->m_func);
  2245. }
  2246. // BTS s1, VarTag_Shift
  2247. this->lowererMD->GenerateInt32ToVarConversion(opndReg, instrShift);
  2248. // dst = MOV s1
  2249. instr = IR::Instr::New(Js::OpCode::MOV, instrShift->GetDst(), opndReg, this->m_func);
  2250. instrShift->InsertBefore(instr);
  2251. // JMP $fallthru
  2252. labelFallThru = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  2253. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelFallThru, this->m_func);
  2254. instrShift->InsertBefore(instr);
  2255. // $helper:
  2256. // (caller generates helper call)
  2257. // $fallthru:
  2258. instrShift->InsertBefore(labelHelper);
  2259. instrShift->InsertAfter(labelFallThru);
  2260. return true;
  2261. }
  2262. void
  2263. LowererMDArch::FinalLower()
  2264. {
  2265. IR::IntConstOpnd *intOpnd;
  2266. FOREACH_INSTR_BACKWARD_EDITING_IN_RANGE(instr, instrPrev, this->m_func->m_tailInstr, this->m_func->m_headInstr)
  2267. {
  2268. switch (instr->m_opcode)
  2269. {
  2270. case Js::OpCode::LdArgSize:
  2271. Assert(this->m_func->HasTry());
  2272. instr->m_opcode = Js::OpCode::MOV;
  2273. intOpnd = IR::IntConstOpnd::New(this->m_func->GetArgsSize(), TyUint32, this->m_func);
  2274. instr->SetSrc1(intOpnd);
  2275. LowererMD::Legalize(instr);
  2276. break;
  2277. case Js::OpCode::LdSpillSize:
  2278. Assert(this->m_func->HasTry());
  2279. instr->m_opcode = Js::OpCode::MOV;
  2280. intOpnd = IR::IntConstOpnd::New(this->m_func->GetSpillSize(), TyUint32, this->m_func);
  2281. instr->SetSrc1(intOpnd);
  2282. LowererMD::Legalize(instr);
  2283. break;
  2284. case Js::OpCode::Leave:
  2285. Assert(this->m_func->DoOptimizeTryCatch() && !this->m_func->IsLoopBodyInTry());
  2286. instrPrev = this->lowererMD->LowerLeave(instr, instr->AsBranchInstr()->GetTarget(), true /*fromFinalLower*/);
  2287. break;
  2288. case Js::OpCode::CMOVA:
  2289. case Js::OpCode::CMOVAE:
  2290. case Js::OpCode::CMOVB:
  2291. case Js::OpCode::CMOVBE:
  2292. case Js::OpCode::CMOVE:
  2293. case Js::OpCode::CMOVG:
  2294. case Js::OpCode::CMOVGE:
  2295. case Js::OpCode::CMOVL:
  2296. case Js::OpCode::CMOVLE:
  2297. case Js::OpCode::CMOVNE:
  2298. case Js::OpCode::CMOVNO:
  2299. case Js::OpCode::CMOVNP:
  2300. case Js::OpCode::CMOVNS:
  2301. case Js::OpCode::CMOVO:
  2302. case Js::OpCode::CMOVP:
  2303. case Js::OpCode::CMOVS:
  2304. // Get rid of fake src1.
  2305. if (instr->GetSrc2())
  2306. {
  2307. // CMOV inserted before regalloc have a dummy src1 to simulate the fact that
  2308. // CMOV is not an definite def of the dst.
  2309. instr->SwapOpnds();
  2310. instr->FreeSrc2();
  2311. }
  2312. break;
  2313. }
  2314. } NEXT_INSTR_BACKWARD_EDITING_IN_RANGE;
  2315. }
  2316. IR::Opnd*
  2317. LowererMDArch::GenerateArgOutForStackArgs(IR::Instr* callInstr, IR::Instr* stackArgsInstr)
  2318. {
  2319. return this->lowererMD->m_lowerer->GenerateArgOutForStackArgs(callInstr, stackArgsInstr);
  2320. }
  2321. void
  2322. LowererMDArch::LowerInlineSpreadArgOutLoop(IR::Instr *callInstr, IR::RegOpnd *indexOpnd, IR::RegOpnd *arrayElementsStartOpnd)
  2323. {
  2324. this->lowererMD->m_lowerer->LowerInlineSpreadArgOutLoopUsingRegisters(callInstr, indexOpnd, arrayElementsStartOpnd);
  2325. }
  2326. IR::Instr *
  2327. LowererMDArch::LowerEHRegionReturn(IR::Instr * insertBeforeInstr, IR::Opnd * targetOpnd)
  2328. {
  2329. IR::RegOpnd *retReg = IR::RegOpnd::New(StackSym::New(TyMachReg, this->m_func), GetRegReturn(TyMachReg), TyMachReg, this->m_func);
  2330. // Load the continuation address into the return register.
  2331. insertBeforeInstr->InsertBefore(IR::Instr::New(Js::OpCode::MOV, retReg, targetOpnd, this->m_func));
  2332. // MOV r8, spillSize
  2333. IR::Instr *movR8 = IR::Instr::New(Js::OpCode::LdSpillSize,
  2334. IR::RegOpnd::New(nullptr, RegR8, TyMachReg, m_func),
  2335. m_func);
  2336. insertBeforeInstr->InsertBefore(movR8);
  2337. // MOV r9, argsSize
  2338. IR::Instr *movR9 = IR::Instr::New(Js::OpCode::LdArgSize,
  2339. IR::RegOpnd::New(nullptr, RegR9, TyMachReg, m_func),
  2340. m_func);
  2341. insertBeforeInstr->InsertBefore(movR9);
  2342. // MOV rcx, amd64_ReturnFromCallWithFakeFrame
  2343. // PUSH rcx
  2344. // RET
  2345. IR::Opnd *endCallWithFakeFrame = endCallWithFakeFrame = IR::RegOpnd::New(nullptr, RegRCX, TyMachReg, m_func);
  2346. IR::Instr *movTarget = IR::Instr::New(Js::OpCode::MOV,
  2347. endCallWithFakeFrame,
  2348. IR::HelperCallOpnd::New(IR::HelperOp_ReturnFromCallWithFakeFrame, m_func),
  2349. m_func);
  2350. insertBeforeInstr->InsertBefore(movTarget);
  2351. IR::Instr *push = IR::Instr::New(Js::OpCode::PUSH, m_func);
  2352. push->SetSrc1(endCallWithFakeFrame);
  2353. insertBeforeInstr->InsertBefore(push);
  2354. #if 0
  2355. // TODO: This block gets deleted if we emit a JMP instead of a RET.
  2356. IR::BranchInstr *jmp = IR::BranchInstr::New(Js::OpCode::JMP,
  2357. nullptr,
  2358. targetOpnd,
  2359. m_func);
  2360. leaveInstr->InsertBefore(jmp);
  2361. #endif
  2362. IR::IntConstOpnd *intSrc = IR::IntConstOpnd::New(0, TyInt32, this->m_func);
  2363. IR::Instr * retInstr = IR::Instr::New(Js::OpCode::RET, this->m_func);
  2364. retInstr->SetSrc1(intSrc);
  2365. retInstr->SetSrc2(retReg);
  2366. insertBeforeInstr->InsertBefore(retInstr);
  2367. // return the last instruction inserted
  2368. return retInstr;
  2369. }