LowererMDArch.cpp 111 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652265326542655265626572658265926602661266226632664266526662667266826692670267126722673267426752676267726782679268026812682268326842685268626872688268926902691269226932694269526962697269826992700270127022703270427052706270727082709271027112712271327142715271627172718271927202721272227232724272527262727272827292730273127322733273427352736273727382739274027412742274327442745274627472748274927502751275227532754275527562757275827592760276127622763276427652766276727682769277027712772277327742775277627772778277927802781278227832784278527862787278827892790279127922793279427952796279727982799280028012802280328042805280628072808280928102811281228132814281528162817281828192820282128222823282428252826282728282829283028312832283328342835283628372838283928402841284228432844284528462847284828492850285128522853285428552856285728582859286028612862286328642865286628672868286928702871287228732874287528762877287828792880288128822883288428852886288728882889289028912892289328942895289628972898289929002901290229032904290529062907290829092910291129122913291429152916291729182919292029212922292329242925292629272928292929302931293229332934293529362937293829392940294129422943294429452946294729482949295029512952295329542955295629572958295929602961296229632964296529662967296829692970297129722973297429752976297729782979298029812982298329842985298629872988298929902991299229932994299529962997299829993000300130023003300430053006300730083009301030113012301330143015301630173018301930203021302230233024302530263027302830293030303130323033303430353036303730383039304030413042304330443045304630473048304930503051305230533054305530563057305830593060306130623063306430653066306730683069307030713072307330743075307630773078307930803081308230833084308530863087308830893090309130923093309430953096309730983099310031013102310331043105310631073108310931103111311231133114311531163117311831193120312131223123312431253126312731283129313031313132313331343135313631373138313931403141314231433144314531463147314831493150315131523153
  1. //-------------------------------------------------------------------------------------------------------
  2. // Copyright (C) Microsoft Corporation and contributors. All rights reserved.
  3. // Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
  4. //-------------------------------------------------------------------------------------------------------
  5. #include "Backend.h"
  6. #include "LowererMDArch.h"
  7. #include "Library/JavascriptGeneratorFunction.h"
  8. const Js::OpCode LowererMD::MDExtend32Opcode = Js::OpCode::MOVSXD;
  9. extern const IRType RegTypes[RegNumCount];
  10. BYTE
  11. LowererMDArch::GetDefaultIndirScale()
  12. {
  13. return IndirScale8;
  14. }
  15. RegNum
  16. LowererMDArch::GetRegShiftCount()
  17. {
  18. return RegRCX;
  19. }
  20. RegNum
  21. LowererMDArch::GetRegReturn(IRType type)
  22. {
  23. return ( IRType_IsFloat(type) || IRType_IsSimd128(type) ) ? RegXMM0 : RegRAX;
  24. }
  25. RegNum
  26. LowererMDArch::GetRegReturnAsmJs(IRType type)
  27. {
  28. if (IRType_IsFloat(type))
  29. {
  30. return RegXMM0;
  31. }
  32. else if (IRType_IsSimd128(type))
  33. {
  34. return RegXMM0;
  35. }
  36. else
  37. {
  38. return RegRAX;
  39. }
  40. }
  41. RegNum
  42. LowererMDArch::GetRegStackPointer()
  43. {
  44. return RegRSP;
  45. }
  46. RegNum
  47. LowererMDArch::GetRegBlockPointer()
  48. {
  49. return RegRBP;
  50. }
  51. RegNum
  52. LowererMDArch::GetRegFramePointer()
  53. {
  54. return RegRBP;
  55. }
  56. RegNum
  57. LowererMDArch::GetRegChkStkParam()
  58. {
  59. return RegRAX;
  60. }
  61. RegNum
  62. LowererMDArch::GetRegIMulDestLower()
  63. {
  64. return RegRAX;
  65. }
  66. RegNum
  67. LowererMDArch::GetRegIMulHighDestLower()
  68. {
  69. return RegRDX;
  70. }
  71. RegNum
  72. LowererMDArch::GetRegArgI4(int32 argNum)
  73. {
  74. // TODO: decide on registers to use for int
  75. return RegNOREG;
  76. }
  77. RegNum
  78. LowererMDArch::GetRegArgR8(int32 argNum)
  79. {
  80. // TODO: decide on registers to use for double
  81. return RegNOREG;
  82. }
  83. Js::OpCode
  84. LowererMDArch::GetAssignOp(IRType type)
  85. {
  86. switch (type)
  87. {
  88. case TyFloat64:
  89. return Js::OpCode::MOVSD;
  90. case TyFloat32:
  91. return Js::OpCode::MOVSS;
  92. case TySimd128F4:
  93. case TySimd128I4:
  94. case TySimd128I8:
  95. case TySimd128I16:
  96. case TySimd128U4:
  97. case TySimd128U8:
  98. case TySimd128U16:
  99. case TySimd128B4:
  100. case TySimd128B8:
  101. case TySimd128B16:
  102. case TySimd128D2:
  103. return Js::OpCode::MOVUPS;
  104. default:
  105. return Js::OpCode::MOV;
  106. }
  107. }
  108. void
  109. LowererMDArch::Init(LowererMD *lowererMD)
  110. {
  111. this->lowererMD = lowererMD;
  112. this->helperCallArgsCount = 0;
  113. }
  114. ///----------------------------------------------------------------------------
  115. ///
  116. /// LowererMD::LoadInputParamPtr
  117. ///
  118. /// Load the address of the start of the passed-in parameters not including
  119. /// the this parameter.
  120. ///
  121. ///----------------------------------------------------------------------------
  122. IR::Instr *
  123. LowererMDArch::LoadInputParamPtr(IR::Instr *instrInsert, IR::RegOpnd *optionalDstOpnd /* = nullptr */)
  124. {
  125. if (this->m_func->GetJITFunctionBody()->IsCoroutine())
  126. {
  127. IR::RegOpnd * argPtrRegOpnd = Lowerer::LoadGeneratorArgsPtr(instrInsert);
  128. IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(argPtrRegOpnd, 1 * MachPtr, TyMachPtr, this->m_func);
  129. IR::RegOpnd * dstOpnd = optionalDstOpnd != nullptr ? optionalDstOpnd : IR::RegOpnd::New(TyMachPtr, this->m_func);
  130. return Lowerer::InsertLea(dstOpnd, indirOpnd, instrInsert);
  131. }
  132. else
  133. {
  134. // Stack looks like (EBP chain)+0, (return addr)+4, (function object)+8, (arg count)+12, (this)+16, actual args
  135. StackSym *paramSym = StackSym::New(TyMachReg, this->m_func);
  136. this->m_func->SetArgOffset(paramSym, 5 * MachPtr);
  137. IR::Instr *instr = this->lowererMD->LoadStackAddress(paramSym, optionalDstOpnd);
  138. instrInsert->InsertBefore(instr);
  139. return instr;
  140. }
  141. }
  142. IR::Instr *
  143. LowererMDArch::LoadStackArgPtr(IR::Instr * instrArgPtr)
  144. {
  145. // Get the args pointer relative to the frame pointer.
  146. // NOTE: This code is sufficient for the apply-args optimization, but not for StackArguments,
  147. // if and when that is enabled.
  148. // dst = LEA &[rbp + "this" offset + sizeof(var)]
  149. IR::Instr * instr = LoadInputParamPtr(instrArgPtr, instrArgPtr->UnlinkDst()->AsRegOpnd());
  150. instrArgPtr->Remove();
  151. return instr->m_prev;
  152. }
  153. IR::Instr *
  154. LowererMDArch::LoadHeapArgsCached(IR::Instr *instrArgs)
  155. {
  156. ASSERT_INLINEE_FUNC(instrArgs);
  157. Func *func = instrArgs->m_func;
  158. IR::Instr *instrPrev = instrArgs->m_prev;
  159. if (instrArgs->m_func->IsStackArgsEnabled())
  160. {
  161. instrArgs->m_opcode = Js::OpCode::MOV;
  162. instrArgs->ReplaceSrc1(IR::AddrOpnd::NewNull(func));
  163. if (PHASE_TRACE1(Js::StackArgFormalsOptPhase) && func->GetJITFunctionBody()->GetInParamsCount() > 1)
  164. {
  165. Output::Print(_u("StackArgFormals : %s (%d) :Removing Heap Arguments object creation in Lowerer. \n"), instrArgs->m_func->GetJITFunctionBody()->GetDisplayName(), instrArgs->m_func->GetFunctionNumber());
  166. Output::Flush();
  167. }
  168. }
  169. else
  170. {
  171. // s7 = formals are let decls
  172. // s6 = memory context
  173. // s5 = local frame instance
  174. // s4 = address of first actual argument (after "this")
  175. // s3 = formal argument count
  176. // s2 = actual argument count
  177. // s1 = current function
  178. // dst = JavascriptOperators::LoadArguments(s1, s2, s3, s4, s5, s6, s7)
  179. // s7 = formals are let decls
  180. IR::Opnd * formalsAreLetDecls = IR::IntConstOpnd::New((IntConstType)(instrArgs->m_opcode == Js::OpCode::LdLetHeapArgsCached), TyUint8, func);
  181. this->LoadHelperArgument(instrArgs, formalsAreLetDecls);
  182. // s6 = memory context
  183. this->lowererMD->m_lowerer->LoadScriptContext(instrArgs);
  184. // s5 = local frame instance
  185. IR::Opnd *frameObj = instrArgs->UnlinkSrc1();
  186. this->LoadHelperArgument(instrArgs, frameObj);
  187. if (func->IsInlinee())
  188. {
  189. // s4 = address of first actual argument (after "this").
  190. StackSym *firstRealArgSlotSym = func->GetInlineeArgvSlotOpnd()->m_sym->AsStackSym();
  191. this->m_func->SetArgOffset(firstRealArgSlotSym, firstRealArgSlotSym->m_offset + MachPtr);
  192. IR::Instr *instr = this->lowererMD->LoadStackAddress(firstRealArgSlotSym);
  193. instrArgs->InsertBefore(instr);
  194. this->LoadHelperArgument(instrArgs, instr->GetDst());
  195. // s3 = formal argument count (without counting "this").
  196. uint32 formalsCount = func->GetJITFunctionBody()->GetInParamsCount() - 1;
  197. this->LoadHelperArgument(instrArgs, IR::IntConstOpnd::New(formalsCount, TyUint32, func));
  198. // s2 = actual argument count (without counting "this").
  199. instr = IR::Instr::New(Js::OpCode::MOV,
  200. IR::RegOpnd::New(TyMachReg, func),
  201. IR::IntConstOpnd::New(func->actualCount - 1, TyMachReg, func),
  202. func);
  203. instrArgs->InsertBefore(instr);
  204. this->LoadHelperArgument(instrArgs, instr->GetDst());
  205. // s1 = current function.
  206. this->LoadHelperArgument(instrArgs, func->GetInlineeFunctionObjectSlotOpnd());
  207. // Save the newly-created args object to its dedicated stack slot.
  208. IR::SymOpnd *argObjSlotOpnd = func->GetInlineeArgumentsObjectSlotOpnd();
  209. instr = IR::Instr::New(Js::OpCode::MOV,
  210. argObjSlotOpnd,
  211. instrArgs->GetDst(),
  212. func);
  213. instrArgs->InsertAfter(instr);
  214. }
  215. else
  216. {
  217. // s4 = address of first actual argument (after "this")
  218. // Stack looks like (EBP chain)+0, (return addr)+4, (function object)+8, (arg count)+12, (this)+16, actual args
  219. IR::Instr *instr = this->LoadInputParamPtr(instrArgs);
  220. this->LoadHelperArgument(instrArgs, instr->GetDst());
  221. // s3 = formal argument count (without counting "this")
  222. uint32 formalsCount = func->GetInParamsCount() - 1;
  223. this->LoadHelperArgument(instrArgs, IR::IntConstOpnd::New(formalsCount, TyInt32, func));
  224. // s2 = actual argument count (without counting "this")
  225. instr = this->lowererMD->LoadInputParamCount(instrArgs);
  226. instr = IR::Instr::New(Js::OpCode::DEC, instr->GetDst(), instr->GetDst(), func);
  227. instrArgs->InsertBefore(instr);
  228. this->LoadHelperArgument(instrArgs, instr->GetDst());
  229. // s1 = current function
  230. StackSym *paramSym = StackSym::New(TyMachReg, func);
  231. this->m_func->SetArgOffset(paramSym, 2 * MachPtr);
  232. IR::Opnd * srcOpnd = IR::SymOpnd::New(paramSym, TyMachReg, func);
  233. this->LoadHelperArgument(instrArgs, srcOpnd);
  234. // Save the newly-created args object to its dedicated stack slot.
  235. IR::Opnd *opnd = this->lowererMD->CreateStackArgumentsSlotOpnd();
  236. instr = IR::Instr::New(Js::OpCode::MOV, opnd, instrArgs->GetDst(), func);
  237. instrArgs->InsertAfter(instr);
  238. }
  239. this->lowererMD->ChangeToHelperCall(instrArgs, IR::HelperOp_LoadHeapArgsCached);
  240. }
  241. return instrPrev;
  242. }
  243. ///----------------------------------------------------------------------------
  244. ///
  245. /// LowererMDArch::LoadHeapArguments
  246. ///
  247. /// Load the arguments object
  248. /// NOTE: The same caveat regarding arguments passed on the stack applies here
  249. /// as in LoadInputParamCount above.
  250. ///----------------------------------------------------------------------------
  251. IR::Instr *
  252. LowererMDArch::LoadHeapArguments(IR::Instr *instrArgs)
  253. {
  254. ASSERT_INLINEE_FUNC(instrArgs);
  255. Func *func = instrArgs->m_func;
  256. IR::Instr *instrPrev = instrArgs->m_prev;
  257. if (func->IsStackArgsEnabled())
  258. {
  259. instrArgs->m_opcode = Js::OpCode::MOV;
  260. instrArgs->ReplaceSrc1(IR::AddrOpnd::NewNull(func));
  261. if (PHASE_TRACE1(Js::StackArgFormalsOptPhase) && func->GetJITFunctionBody()->GetInParamsCount() > 1)
  262. {
  263. Output::Print(_u("StackArgFormals : %s (%d) :Removing Heap Arguments object creation in Lowerer. \n"), instrArgs->m_func->GetJITFunctionBody()->GetDisplayName(), instrArgs->m_func->GetFunctionNumber());
  264. Output::Flush();
  265. }
  266. }
  267. else
  268. {
  269. // s7 = formals are let decls
  270. // s6 = memory context
  271. // s5 = array of property ID's
  272. // s4 = local frame instance
  273. // s3 = address of first actual argument (after "this")
  274. // s2 = actual argument count
  275. // s1 = current function
  276. // dst = JavascriptOperators::LoadHeapArguments(s1, s2, s3, s4, s5, s6, s7)
  277. // s7 = formals are let decls
  278. this->LoadHelperArgument(instrArgs, IR::IntConstOpnd::New(instrArgs->m_opcode == Js::OpCode::LdLetHeapArguments ? TRUE : FALSE, TyUint8, func));
  279. // s6 = memory context
  280. instrPrev = this->lowererMD->m_lowerer->LoadScriptContext(instrArgs);
  281. // s5 = array of property ID's
  282. intptr_t formalsPropIdArray = instrArgs->m_func->GetJITFunctionBody()->GetFormalsPropIdArrayAddr();
  283. if (!formalsPropIdArray)
  284. {
  285. formalsPropIdArray = instrArgs->m_func->GetScriptContextInfo()->GetNullAddr();
  286. }
  287. IR::Opnd * argArray = IR::AddrOpnd::New(formalsPropIdArray, IR::AddrOpndKindDynamicMisc, m_func);
  288. this->LoadHelperArgument(instrArgs, argArray);
  289. // s4 = local frame instance
  290. IR::Opnd *frameObj = instrArgs->UnlinkSrc1();
  291. this->LoadHelperArgument(instrArgs, frameObj);
  292. if (func->IsInlinee())
  293. {
  294. // s3 = address of first actual argument (after "this").
  295. StackSym *firstRealArgSlotSym = func->GetInlineeArgvSlotOpnd()->m_sym->AsStackSym();
  296. this->m_func->SetArgOffset(firstRealArgSlotSym, firstRealArgSlotSym->m_offset + MachPtr);
  297. IR::Instr *instr = this->lowererMD->LoadStackAddress(firstRealArgSlotSym);
  298. instrArgs->InsertBefore(instr);
  299. this->LoadHelperArgument(instrArgs, instr->GetDst());
  300. // s2 = actual argument count (without counting "this").
  301. instr = IR::Instr::New(Js::OpCode::MOV,
  302. IR::RegOpnd::New(TyUint32, func),
  303. IR::IntConstOpnd::New(func->actualCount - 1, TyUint32, func),
  304. func);
  305. instrArgs->InsertBefore(instr);
  306. this->LoadHelperArgument(instrArgs, instr->GetDst());
  307. // s1 = current function.
  308. this->LoadHelperArgument(instrArgs, func->GetInlineeFunctionObjectSlotOpnd());
  309. // Save the newly-created args object to its dedicated stack slot.
  310. IR::SymOpnd *argObjSlotOpnd = func->GetInlineeArgumentsObjectSlotOpnd();
  311. instr = IR::Instr::New(Js::OpCode::MOV,
  312. argObjSlotOpnd,
  313. instrArgs->GetDst(),
  314. func);
  315. instrArgs->InsertAfter(instr);
  316. }
  317. else
  318. {
  319. // s3 = address of first actual argument (after "this")
  320. // Stack looks like (EBP chain)+0, (return addr)+4, (function object)+8, (arg count)+12, (this)+16, actual args
  321. IR::Instr *instr = this->LoadInputParamPtr(instrArgs);
  322. this->LoadHelperArgument(instrArgs, instr->GetDst());
  323. // s2 = actual argument count (without counting "this")
  324. instr = this->lowererMD->LoadInputParamCount(instrArgs, -1);
  325. IR::Opnd * opndInputParamCount = instr->GetDst();
  326. this->LoadHelperArgument(instrArgs, opndInputParamCount);
  327. // s1 = current function
  328. StackSym * paramSym = StackSym::New(TyMachReg, func);
  329. this->m_func->SetArgOffset(paramSym, 2 * MachPtr);
  330. IR::Opnd * srcOpnd = IR::SymOpnd::New(paramSym, TyMachReg, func);
  331. if (this->m_func->GetJITFunctionBody()->IsCoroutine())
  332. {
  333. // the function object for generator calls is a GeneratorVirtualScriptFunction object
  334. // and we need to pass the real JavascriptGeneratorFunction object so grab it instead
  335. IR::RegOpnd *tmpOpnd = IR::RegOpnd::New(TyMachReg, func);
  336. LowererMD::CreateAssign(tmpOpnd, srcOpnd, instrArgs);
  337. srcOpnd = IR::IndirOpnd::New(tmpOpnd, Js::GeneratorVirtualScriptFunction::GetRealFunctionOffset(), TyMachPtr, func);
  338. }
  339. this->LoadHelperArgument(instrArgs, srcOpnd);
  340. // Save the newly-created args object to its dedicated stack slot.
  341. IR::Opnd *opnd = this->lowererMD->CreateStackArgumentsSlotOpnd();
  342. instr = IR::Instr::New(Js::OpCode::MOV, opnd, instrArgs->GetDst(), func);
  343. instrArgs->InsertAfter(instr);
  344. }
  345. this->lowererMD->ChangeToHelperCall(instrArgs, IR::HelperOp_LoadHeapArguments);
  346. }
  347. return instrPrev;
  348. }
  349. //
  350. // Load the parameter in the first argument slot
  351. //
  352. IR::Instr *
  353. LowererMDArch::LoadNewScObjFirstArg(IR::Instr * instr, IR::Opnd * dst, ushort extraArgs)
  354. {
  355. // Spread moves down the argument slot by one.
  356. IR::Opnd * argOpnd = this->GetArgSlotOpnd(3 + extraArgs);
  357. IR::Instr * argInstr = LowererMD::CreateAssign(argOpnd, dst, instr);
  358. return argInstr;
  359. }
  360. inline static RegNum GetRegFromArgPosition(const bool isFloatArg, const uint16 argPosition)
  361. {
  362. RegNum reg = RegNOREG;
  363. if (!isFloatArg && argPosition <= IntArgRegsCount)
  364. {
  365. switch (argPosition)
  366. {
  367. #define REG_INT_ARG(Index, Name) \
  368. case ((Index) + 1): \
  369. reg = Reg ## Name; \
  370. break;
  371. #include "RegList.h"
  372. default:
  373. Assume(UNREACHED);
  374. }
  375. }
  376. else if (isFloatArg && argPosition <= XmmArgRegsCount)
  377. {
  378. switch (argPosition)
  379. {
  380. #define REG_XMM_ARG(Index, Name) \
  381. case ((Index) + 1): \
  382. reg = Reg ## Name; \
  383. break;
  384. #include "RegList.h"
  385. default:
  386. Assume(UNREACHED);
  387. }
  388. }
  389. return reg;
  390. }
  391. int32
  392. LowererMDArch::LowerCallArgs(IR::Instr *callInstr, ushort callFlags, Js::ArgSlot extraParams, IR::IntConstOpnd **callInfoOpndRef /* = nullptr */)
  393. {
  394. AssertMsg(this->helperCallArgsCount == 0, "We don't support nested helper calls yet");
  395. const Js::ArgSlot argOffset = 1;
  396. uint32 argCount = 0;
  397. // Lower args and look for StartCall
  398. IR::Instr * argInstr = callInstr;
  399. IR::Instr * cfgInsertLoc = callInstr->GetPrevRealInstr();
  400. IR::Opnd *src2 = argInstr->UnlinkSrc2();
  401. while (src2->IsSymOpnd())
  402. {
  403. IR::SymOpnd * argLinkOpnd = src2->AsSymOpnd();
  404. StackSym * argLinkSym = argLinkOpnd->m_sym->AsStackSym();
  405. AssertMsg(argLinkSym->IsArgSlotSym() && argLinkSym->m_isSingleDef, "Arg tree not single def...");
  406. argLinkOpnd->Free(this->m_func);
  407. argInstr = argLinkSym->m_instrDef;
  408. src2 = argInstr->UnlinkSrc2();
  409. this->lowererMD->ChangeToAssign(argInstr);
  410. // Mov each arg to its argSlot
  411. Js::ArgSlot argPosition = argInstr->GetDst()->AsSymOpnd()->m_sym->AsStackSym()->GetArgSlotNum();
  412. Js::ArgSlot index = argOffset + argPosition;
  413. if(index < argPosition)
  414. {
  415. Js::Throw::OutOfMemory();
  416. }
  417. index += extraParams;
  418. if(index < extraParams)
  419. {
  420. Js::Throw::OutOfMemory();
  421. }
  422. IR::Opnd * dstOpnd = this->GetArgSlotOpnd(index, argLinkSym);
  423. argInstr->ReplaceDst(dstOpnd);
  424. cfgInsertLoc = argInstr->GetPrevRealInstr();
  425. // The arg sym isn't assigned a constant directly anymore
  426. // TODO: We can just move the instruction down next to the call if it is just a constant assignment
  427. // but AMD64 doesn't have the MOV mem,imm64 encoding, and we have no code to detect if the value can fit
  428. // into imm32 and hoist the src if it is not.
  429. argLinkSym->m_isConst = false;
  430. argLinkSym->m_isIntConst = false;
  431. argLinkSym->m_isTaggableIntConst = false;
  432. argInstr->Unlink();
  433. callInstr->InsertBefore(argInstr);
  434. argCount++;
  435. }
  436. IR::RegOpnd * argLinkOpnd = src2->AsRegOpnd();
  437. StackSym * argLinkSym = argLinkOpnd->m_sym->AsStackSym();
  438. AssertMsg(!argLinkSym->IsArgSlotSym() && argLinkSym->m_isSingleDef, "Arg tree not single def...");
  439. IR::Instr *startCallInstr = argLinkSym->m_instrDef;
  440. if (callInstr->m_opcode == Js::OpCode::NewScObject ||
  441. callInstr->m_opcode == Js::OpCode::NewScObjectSpread ||
  442. callInstr->m_opcode == Js::OpCode::NewScObjectLiteral ||
  443. callInstr->m_opcode == Js::OpCode::NewScObjArray ||
  444. callInstr->m_opcode == Js::OpCode::NewScObjArraySpread)
  445. {
  446. // These push an extra arg.
  447. argCount++;
  448. }
  449. AssertMsg(startCallInstr->m_opcode == Js::OpCode::StartCall ||
  450. startCallInstr->m_opcode == Js::OpCode::LoweredStartCall,
  451. "Problem with arg chain.");
  452. AssertMsg(startCallInstr->GetArgOutCount(/*getInterpreterArgOutCount*/ false) == argCount ||
  453. m_func->GetJITFunctionBody()->IsAsmJsMode(),
  454. "ArgCount doesn't match StartCall count");
  455. //
  456. // Machine dependent lowering
  457. //
  458. if (callInstr->m_opcode != Js::OpCode::AsmJsCallI)
  459. {
  460. // Push argCount
  461. IR::IntConstOpnd *argCountOpnd = Lowerer::MakeCallInfoConst(callFlags, argCount, m_func);
  462. if (callInfoOpndRef)
  463. {
  464. argCountOpnd->Use(m_func);
  465. *callInfoOpndRef = argCountOpnd;
  466. }
  467. Lowerer::InsertMove(this->GetArgSlotOpnd(1 + extraParams), argCountOpnd, callInstr);
  468. }
  469. startCallInstr = this->LowerStartCall(startCallInstr);
  470. const uint32 argSlots = argCount + 1 + extraParams; // + 1 for call flags
  471. this->m_func->m_argSlotsForFunctionsCalled = max(this->m_func->m_argSlotsForFunctionsCalled, argSlots);
  472. if (m_func->GetJITFunctionBody()->IsAsmJsMode())
  473. {
  474. IR::Opnd * functionObjOpnd = callInstr->UnlinkSrc1();
  475. GeneratePreCall(callInstr, functionObjOpnd, cfgInsertLoc->GetNextRealInstr());
  476. }
  477. return argSlots;
  478. }
  479. void
  480. LowererMDArch::SetMaxArgSlots(Js::ArgSlot actualCount /*including this*/)
  481. {
  482. Js::ArgSlot offset = 3;//For function object & callInfo & this
  483. if (this->m_func->m_argSlotsForFunctionsCalled < (uint32) (actualCount + offset))
  484. {
  485. this->m_func->m_argSlotsForFunctionsCalled = (uint32)(actualCount + offset);
  486. }
  487. return;
  488. }
  489. void
  490. LowererMDArch::GenerateMemInit(IR::RegOpnd * opnd, int32 offset, size_t value, IR::Instr * insertBeforeInstr, bool isZeroed)
  491. {
  492. IRType type = TyVar;
  493. if (isZeroed)
  494. {
  495. if (value == 0)
  496. {
  497. // Recycler memory are zero initialized
  498. return;
  499. }
  500. type = value <= UINT_MAX ?
  501. (value <= USHORT_MAX ?
  502. (value <= UCHAR_MAX ? TyUint8 : TyUint16) :
  503. TyUint32) :
  504. type;
  505. }
  506. Func * func = this->m_func;
  507. lowererMD->GetLowerer()->InsertMove(IR::IndirOpnd::New(opnd, offset, type, func), IR::IntConstOpnd::New(value, type, func), insertBeforeInstr);
  508. }
  509. IR::Instr *
  510. LowererMDArch::LowerCallIDynamic(IR::Instr *callInstr, IR::Instr*saveThisArgOutInstr, IR::Opnd *argsLength, ushort callFlags, IR::Instr * insertBeforeInstrForCFG)
  511. {
  512. callInstr->InsertBefore(saveThisArgOutInstr); //Move this Argout next to call;
  513. this->LoadDynamicArgument(saveThisArgOutInstr, 3); //this pointer is the 3rd argument
  514. /*callInfo*/
  515. if (callInstr->m_func->IsInlinee())
  516. {
  517. Assert(argsLength->AsIntConstOpnd()->GetValue() == callInstr->m_func->actualCount);
  518. this->SetMaxArgSlots((Js::ArgSlot)callInstr->m_func->actualCount);
  519. }
  520. else
  521. {
  522. callInstr->InsertBefore(IR::Instr::New(Js::OpCode::ADD, argsLength, argsLength, IR::IntConstOpnd::New(1, TyMachReg, this->m_func), this->m_func));
  523. this->SetMaxArgSlots(Js::InlineeCallInfo::MaxInlineeArgoutCount);
  524. }
  525. callInstr->InsertBefore(IR::Instr::New(Js::OpCode::MOV, this->GetArgSlotOpnd(2), argsLength, this->m_func));
  526. IR::Opnd *funcObjOpnd = callInstr->UnlinkSrc1();
  527. GeneratePreCall(callInstr, funcObjOpnd, insertBeforeInstrForCFG);
  528. // Normally for dynamic calls we move 4 args to registers and push remaining
  529. // args onto stack (Windows convention, and unchanged on xplat). We need to
  530. // manully home 4 args. inlinees lower differently and follow platform ABI.
  531. // So we need to manually home actualArgsCount + 2 args (function, callInfo).
  532. const uint32 homeArgs = callInstr->m_func->IsInlinee() ?
  533. callInstr->m_func->actualCount + 2 : 4;
  534. LowerCall(callInstr, homeArgs);
  535. return callInstr;
  536. }
  537. void
  538. LowererMDArch::GenerateFunctionObjectTest(IR::Instr * callInstr, IR::RegOpnd *functionObjOpnd, bool isHelper, IR::LabelInstr* continueAfterExLabel /* = nullptr */)
  539. {
  540. AssertMsg(!m_func->IsJitInDebugMode() || continueAfterExLabel, "When jit is in debug mode, continueAfterExLabel must be provided otherwise continue after exception may cause AV.");
  541. IR::RegOpnd *functionObjRegOpnd = functionObjOpnd->AsRegOpnd();
  542. IR::Instr * insertBeforeInstr = callInstr;
  543. // Need check and error if we are calling a tagged int.
  544. if (!functionObjRegOpnd->IsNotTaggedValue())
  545. {
  546. IR::LabelInstr * helperLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  547. if (this->lowererMD->GenerateObjectTest(functionObjRegOpnd, callInstr, helperLabel))
  548. {
  549. IR::LabelInstr * callLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isHelper);
  550. IR::Instr* instr = IR::BranchInstr::New(Js::OpCode::JMP, callLabel, this->m_func);
  551. callInstr->InsertBefore(instr);
  552. callInstr->InsertBefore(helperLabel);
  553. callInstr->InsertBefore(callLabel);
  554. insertBeforeInstr = callLabel;
  555. lowererMD->m_lowerer->GenerateRuntimeError(insertBeforeInstr, JSERR_NeedFunction);
  556. if (continueAfterExLabel)
  557. {
  558. // Under debugger the RuntimeError (exception) can be ignored, generate branch to jmp to safe place
  559. // (which would normally be debugger bailout check).
  560. IR::BranchInstr* continueAfterEx = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, continueAfterExLabel, this->m_func);
  561. insertBeforeInstr->InsertBefore(continueAfterEx);
  562. }
  563. }
  564. }
  565. }
  566. void
  567. LowererMDArch::GeneratePreCall(IR::Instr * callInstr, IR::Opnd *functionObjOpnd, IR::Instr * insertBeforeInstrForCFGCheck)
  568. {
  569. if (insertBeforeInstrForCFGCheck == nullptr)
  570. {
  571. insertBeforeInstrForCFGCheck = callInstr;
  572. }
  573. IR::RegOpnd * functionTypeRegOpnd = nullptr;
  574. IR::IndirOpnd * entryPointIndirOpnd = nullptr;
  575. if (callInstr->m_opcode == Js::OpCode::AsmJsCallI)
  576. {
  577. functionTypeRegOpnd = IR::RegOpnd::New(TyMachReg, m_func);
  578. IR::IndirOpnd* functionInfoIndirOpnd = IR::IndirOpnd::New(functionObjOpnd->AsRegOpnd(), Js::RecyclableObject::GetOffsetOfType(), TyMachReg, m_func);
  579. IR::Instr* instr = IR::Instr::New(Js::OpCode::MOV, functionTypeRegOpnd, functionInfoIndirOpnd, m_func);
  580. insertBeforeInstrForCFGCheck->InsertBefore(instr);
  581. functionInfoIndirOpnd = IR::IndirOpnd::New(functionTypeRegOpnd, Js::ScriptFunctionType::GetEntryPointInfoOffset(), TyMachReg, m_func);
  582. instr = IR::Instr::New(Js::OpCode::MOV, functionTypeRegOpnd, functionInfoIndirOpnd, m_func);
  583. insertBeforeInstrForCFGCheck->InsertBefore(instr);
  584. uint32 entryPointOffset = Js::ProxyEntryPointInfo::GetAddressOffset();
  585. entryPointIndirOpnd = IR::IndirOpnd::New(functionTypeRegOpnd, entryPointOffset, TyMachReg, m_func);
  586. }
  587. else
  588. {
  589. // For calls to fixed functions we load the function's type directly from the known (hard-coded) function object address.
  590. // For other calls, we need to load it from the function object stored in a register operand.
  591. if (functionObjOpnd->IsAddrOpnd() && functionObjOpnd->AsAddrOpnd()->m_isFunction)
  592. {
  593. functionTypeRegOpnd = this->lowererMD->m_lowerer->GenerateFunctionTypeFromFixedFunctionObject(insertBeforeInstrForCFGCheck, functionObjOpnd);
  594. }
  595. else if (functionObjOpnd->IsRegOpnd())
  596. {
  597. AssertMsg(functionObjOpnd->AsRegOpnd()->m_sym->IsStackSym(), "Expected call target to be a stack symbol.");
  598. functionTypeRegOpnd = IR::RegOpnd::New(TyMachReg, m_func);
  599. // functionTypeRegOpnd(RAX) = MOV function->type
  600. {
  601. IR::IndirOpnd * functionTypeIndirOpnd = IR::IndirOpnd::New(functionObjOpnd->AsRegOpnd(),
  602. Js::DynamicObject::GetOffsetOfType(), TyMachReg, m_func);
  603. IR::Instr * mov = IR::Instr::New(Js::OpCode::MOV, functionTypeRegOpnd, functionTypeIndirOpnd, m_func);
  604. insertBeforeInstrForCFGCheck->InsertBefore(mov);
  605. }
  606. }
  607. else
  608. {
  609. AnalysisAssertMsg(false, "Unexpected call target operand type.");
  610. }
  611. // entryPointRegOpnd(RAX) = MOV type->entryPoint
  612. entryPointIndirOpnd = IR::IndirOpnd::New(functionTypeRegOpnd, Js::Type::GetOffsetOfEntryPoint(), TyMachPtr, m_func);
  613. }
  614. IR::RegOpnd *entryPointRegOpnd = functionTypeRegOpnd;
  615. entryPointRegOpnd->m_isCallArg = true;
  616. IR::Instr *mov = IR::Instr::New(Js::OpCode::MOV, entryPointRegOpnd, entryPointIndirOpnd, m_func);
  617. insertBeforeInstrForCFGCheck->InsertBefore(mov);
  618. // entryPointRegOpnd(RAX) = CALL entryPointRegOpnd(RAX)
  619. callInstr->SetSrc1(entryPointRegOpnd);
  620. #if defined(_CONTROL_FLOW_GUARD)
  621. // verify that the call target is valid (CFG Check)
  622. if (!PHASE_OFF(Js::CFGInJitPhase, this->m_func))
  623. {
  624. this->lowererMD->GenerateCFGCheck(entryPointRegOpnd, insertBeforeInstrForCFGCheck);
  625. }
  626. #endif
  627. // Setup the first call argument - pointer to the function being called.
  628. IR::Instr * instrMovArg1 = IR::Instr::New(Js::OpCode::MOV, GetArgSlotOpnd(1), functionObjOpnd, m_func);
  629. callInstr->InsertBefore(instrMovArg1);
  630. }
  631. IR::Instr *
  632. LowererMDArch::LowerCallI(IR::Instr * callInstr, ushort callFlags, bool isHelper, IR::Instr * insertBeforeInstrForCFG)
  633. {
  634. AssertMsg(this->helperCallArgsCount == 0, "We don't support nested helper calls yet");
  635. IR::Opnd * functionObjOpnd = callInstr->UnlinkSrc1();
  636. IR::Instr * insertBeforeInstrForCFGCheck = callInstr;
  637. // If this is a call for new, we already pass the function operand through NewScObject,
  638. // which checks if the function operand is a real function or not, don't need to add a check again
  639. // If this is a call to a fixed function, we've already verified that the target is, indeed, a function.
  640. if (callInstr->m_opcode != Js::OpCode::CallIFixed && !(callFlags & Js::CallFlags_New))
  641. {
  642. Assert(functionObjOpnd->IsRegOpnd());
  643. IR::LabelInstr* continueAfterExLabel = Lowerer::InsertContinueAfterExceptionLabelForDebugger(m_func, callInstr, isHelper);
  644. GenerateFunctionObjectTest(callInstr, functionObjOpnd->AsRegOpnd(), isHelper, continueAfterExLabel);
  645. }
  646. else if (insertBeforeInstrForCFG != nullptr)
  647. {
  648. RegNum dstReg = insertBeforeInstrForCFG->GetDst()->AsRegOpnd()->GetReg();
  649. AssertMsg(dstReg == RegArg2 || dstReg == RegArg3, "NewScObject should insert the first Argument in RegArg2/RegArg3 only based on Spread call or not.");
  650. insertBeforeInstrForCFGCheck = insertBeforeInstrForCFG;
  651. }
  652. GeneratePreCall(callInstr, functionObjOpnd, insertBeforeInstrForCFGCheck);
  653. // We need to get the calculated CallInfo in SimpleJit because that doesn't include any changes for stack alignment
  654. IR::IntConstOpnd *callInfo = nullptr;
  655. int32 argCount = LowerCallArgs(callInstr, callFlags, 1, &callInfo);
  656. IR::Opnd *const finalDst = callInstr->GetDst();
  657. // x64 keeps track of argCount for us, so pass just an arbitrary value there
  658. IR::Instr* ret = this->LowerCall(callInstr, argCount);
  659. IR::AutoReuseOpnd autoReuseSavedFunctionObjOpnd;
  660. if (callInstr->IsJitProfilingInstr())
  661. {
  662. Assert(callInstr->m_func->IsSimpleJit());
  663. Assert(!CONFIG_FLAG(NewSimpleJit));
  664. if(finalDst &&
  665. finalDst->IsRegOpnd() &&
  666. functionObjOpnd->IsRegOpnd() &&
  667. finalDst->AsRegOpnd()->m_sym == functionObjOpnd->AsRegOpnd()->m_sym)
  668. {
  669. // The function object sym is going to be overwritten, so save it in a temp for profiling
  670. IR::RegOpnd *const savedFunctionObjOpnd = IR::RegOpnd::New(functionObjOpnd->GetType(), callInstr->m_func);
  671. autoReuseSavedFunctionObjOpnd.Initialize(savedFunctionObjOpnd, callInstr->m_func);
  672. Lowerer::InsertMove(savedFunctionObjOpnd, functionObjOpnd, callInstr->m_next);
  673. functionObjOpnd = savedFunctionObjOpnd;
  674. }
  675. auto instr = callInstr->AsJitProfilingInstr();
  676. ret = this->lowererMD->m_lowerer->GenerateCallProfiling(
  677. instr->profileId,
  678. instr->inlineCacheIndex,
  679. instr->GetDst(),
  680. functionObjOpnd,
  681. callInfo,
  682. instr->isProfiledReturnCall,
  683. callInstr,
  684. ret);
  685. }
  686. return ret;
  687. }
  688. static inline IRType ExtendHelperArg(IRType type)
  689. {
  690. #ifdef __clang__
  691. // clang expects caller to extend arg size to int
  692. switch (type)
  693. {
  694. case TyInt8:
  695. case TyInt16:
  696. return TyInt32;
  697. case TyUint8:
  698. case TyUint16:
  699. return TyUint32;
  700. }
  701. #endif
  702. return type;
  703. }
  704. IR::Instr *
  705. LowererMDArch::LowerCall(IR::Instr * callInstr, uint32 argCount)
  706. {
  707. UNREFERENCED_PARAMETER(argCount);
  708. IR::Instr *retInstr = callInstr;
  709. callInstr->m_opcode = Js::OpCode::CALL;
  710. // This is required here due to calls create during lowering
  711. callInstr->m_func->SetHasCallsOnSelfAndParents();
  712. if (callInstr->GetDst())
  713. {
  714. IR::Opnd * dstOpnd;
  715. this->lowererMD->ForceDstToReg(callInstr);
  716. dstOpnd = callInstr->GetDst();
  717. IRType dstType = dstOpnd->GetType();
  718. Js::OpCode assignOp = GetAssignOp(dstType);
  719. if (callInstr->GetSrc1()->IsHelperCallOpnd())
  720. {
  721. // Truncate the result of a conversion to 32-bit int, because the C++ code doesn't.
  722. IR::HelperCallOpnd *helperOpnd = callInstr->GetSrc1()->AsHelperCallOpnd();
  723. if (helperOpnd->m_fnHelper == IR::HelperConv_ToInt32 ||
  724. helperOpnd->m_fnHelper == IR::HelperConv_ToInt32_Full ||
  725. helperOpnd->m_fnHelper == IR::HelperConv_ToInt32Core ||
  726. helperOpnd->m_fnHelper == IR::HelperConv_ToUInt32 ||
  727. helperOpnd->m_fnHelper == IR::HelperConv_ToUInt32_Full ||
  728. helperOpnd->m_fnHelper == IR::HelperConv_ToUInt32Core)
  729. {
  730. assignOp = Js::OpCode::MOV_TRUNC;
  731. }
  732. }
  733. IR::Instr * movInstr = callInstr->SinkDst(assignOp);
  734. RegNum reg = GetRegReturn(dstType);
  735. callInstr->GetDst()->AsRegOpnd()->SetReg(reg);
  736. movInstr->GetSrc1()->AsRegOpnd()->SetReg(reg);
  737. retInstr = movInstr;
  738. }
  739. //
  740. // assign the arguments to appropriate positions
  741. //
  742. AssertMsg(this->helperCallArgsCount >= 0, "Fatal. helper call arguments ought to be positive");
  743. AssertMsg(this->helperCallArgsCount < MaxArgumentsToHelper && MaxArgumentsToHelper < 255, "Too many helper call arguments");
  744. uint16 argsLeft = static_cast<uint16>(this->helperCallArgsCount);
  745. // Sys V x64 ABI assigns int and xmm arg registers separately.
  746. // e.g. args: int, double, int, double, int, double
  747. // Windows: int0, xmm1, int2, xmm3, stack, stack
  748. // Sys V: int0, xmm0, int1, xmm1, int2, xmm2
  749. #ifdef _WIN32
  750. #define _V_ARG_INDEX(index) index
  751. #else
  752. uint16 _vindex[MaxArgumentsToHelper];
  753. {
  754. uint16 intIndex = 1, doubleIndex = 1, stackIndex = IntArgRegsCount + 1;
  755. for (int i = 0; i < this->helperCallArgsCount; i++)
  756. {
  757. IR::Opnd * helperSrc = this->helperCallArgs[this->helperCallArgsCount - 1 - i];
  758. IRType type = helperSrc->GetType();
  759. if (IRType_IsFloat(type) || IRType_IsSimd128(type))
  760. {
  761. if (doubleIndex <= XmmArgRegsCount)
  762. {
  763. _vindex[i] = doubleIndex++;
  764. }
  765. else
  766. {
  767. _vindex[i] = stackIndex++;
  768. }
  769. }
  770. else
  771. {
  772. if (intIndex <= IntArgRegsCount)
  773. {
  774. _vindex[i] = intIndex++;
  775. }
  776. else
  777. {
  778. _vindex[i] = stackIndex++;
  779. }
  780. }
  781. }
  782. }
  783. #define _V_ARG_INDEX(index) _vindex[(index) - 1]
  784. #endif
  785. // xplat NOTE: Lower often loads "known args" with LoadHelperArgument() and
  786. // variadic JS runtime args with LowerCallArgs(). So the full args length is
  787. // this->helperCallArgsCount + argCount
  788. // "argCount > 0" indicates we have variadic JS runtime args and needs to
  789. // manually home registers on xplat.
  790. const bool shouldHomeParams = argCount > 0;
  791. while (argsLeft > 0)
  792. {
  793. IR::Opnd * helperSrc = this->helperCallArgs[this->helperCallArgsCount - argsLeft];
  794. uint16 index = _V_ARG_INDEX(argsLeft);
  795. StackSym * helperSym = m_func->m_symTable->GetArgSlotSym(index);
  796. helperSym->m_type = ExtendHelperArg(helperSrc->GetType());
  797. Lowerer::InsertMove(
  798. this->GetArgSlotOpnd(index, helperSym, /*isHelper*/!shouldHomeParams),
  799. helperSrc,
  800. callInstr, false);
  801. --argsLeft;
  802. }
  803. #ifndef _WIN32
  804. // Manually home args
  805. if (shouldHomeParams)
  806. {
  807. const int callArgCount = this->helperCallArgsCount + static_cast<int>(argCount);
  808. int argRegs = min(callArgCount, static_cast<int>(XmmArgRegsCount));
  809. for (int i = argRegs; i > 0; i--)
  810. {
  811. IRType type = this->xplatCallArgs.args[i];
  812. bool isFloatArg = this->xplatCallArgs.IsFloat(i);
  813. if ( i > IntArgRegsCount && !isFloatArg ) continue;
  814. StackSym * sym = this->m_func->m_symTable->GetArgSlotSym(static_cast<uint16>(i));
  815. RegNum reg = GetRegFromArgPosition(isFloatArg, i);
  816. IR::RegOpnd *regOpnd = IR::RegOpnd::New(nullptr, reg, type, this->m_func);
  817. regOpnd->m_isCallArg = true;
  818. Lowerer::InsertMove(
  819. IR::SymOpnd::New(sym, type, this->m_func),
  820. regOpnd,
  821. callInstr, false);
  822. }
  823. }
  824. this->xplatCallArgs.Reset();
  825. #endif // !_WIN32
  826. //
  827. // load the address into a register because we cannot directly access 64 bit constants
  828. // in CALL instruction. Non helper call methods will already be accessed indirectly.
  829. //
  830. // Skip this for bailout calls. The register allocator will lower that as appropriate, without affecting spill choices.
  831. //
  832. // Also skip this for relocatable helper calls. These will be turned into indirect
  833. // calls in lower.
  834. if (callInstr->GetSrc1()->IsHelperCallOpnd() && !callInstr->HasBailOutInfo())
  835. {
  836. IR::RegOpnd *targetOpnd = IR::RegOpnd::New(StackSym::New(TyMachPtr,m_func), RegRAX, TyMachPtr, this->m_func);
  837. IR::Instr *movInstr = IR::Instr::New(Js::OpCode::MOV, targetOpnd, callInstr->GetSrc1(), this->m_func);
  838. targetOpnd->m_isCallArg = true;
  839. callInstr->UnlinkSrc1();
  840. callInstr->SetSrc1(targetOpnd);
  841. callInstr->InsertBefore(movInstr);
  842. }
  843. //
  844. // Reset the call
  845. //
  846. this->m_func->m_argSlotsForFunctionsCalled = max(this->m_func->m_argSlotsForFunctionsCalled , (uint32)this->helperCallArgsCount);
  847. this->helperCallArgsCount = 0;
  848. return retInstr;
  849. }
  850. //
  851. // Returns the opnd where the corresponding argument would have been stored. On amd64,
  852. // the first 4 arguments go in registers and the rest are on stack.
  853. //
  854. IR::Opnd *
  855. LowererMDArch::GetArgSlotOpnd(uint16 index, StackSym * argSym, bool isHelper /*= false*/)
  856. {
  857. Assert(index != 0);
  858. uint16 argPosition = index;
  859. #ifdef ENABLE_SIMDJS
  860. // Without SIMD the index is the Var offset and is also the argument index. Since each arg = 1 Var.
  861. // With SIMD, args are of variable length and we need to the argument position in the args list.
  862. if (m_func->IsSIMDEnabled() &&
  863. m_func->GetJITFunctionBody()->IsAsmJsMode() &&
  864. argSym != nullptr &&
  865. argSym->m_argPosition != 0)
  866. {
  867. argPosition = (uint16)argSym->m_argPosition;
  868. }
  869. #endif
  870. IR::Opnd *argSlotOpnd = nullptr;
  871. if (argSym != nullptr)
  872. {
  873. argSym->m_offset = (index - 1) * MachPtr;
  874. argSym->m_allocated = true;
  875. }
  876. IRType type = argSym ? argSym->GetType() : TyMachReg;
  877. const bool isFloatArg = IRType_IsFloat(type) || IRType_IsSimd128(type);
  878. RegNum reg = GetRegFromArgPosition(isFloatArg, argPosition);
  879. #ifndef _WIN32
  880. if (isFloatArg && argPosition <= XmmArgRegsCount)
  881. {
  882. this->xplatCallArgs.SetFloat(argPosition);
  883. }
  884. #endif
  885. if (reg != RegNOREG)
  886. {
  887. IR::RegOpnd *regOpnd = IR::RegOpnd::New(argSym, reg, type, m_func);
  888. regOpnd->m_isCallArg = true;
  889. argSlotOpnd = regOpnd;
  890. }
  891. else
  892. {
  893. if (argSym == nullptr)
  894. {
  895. argSym = this->m_func->m_symTable->GetArgSlotSym(index);
  896. }
  897. #ifndef _WIN32
  898. // helper does not home args, adjust stack offset
  899. if (isHelper)
  900. {
  901. const uint16 argIndex = index - IntArgRegsCount;
  902. argSym->m_offset = (argIndex - 1) * MachPtr;
  903. }
  904. #endif
  905. argSlotOpnd = IR::SymOpnd::New(argSym, type, this->m_func);
  906. }
  907. return argSlotOpnd;
  908. }
  909. IR::Instr *
  910. LowererMDArch::LowerAsmJsCallE(IR::Instr *callInstr)
  911. {
  912. IR::IntConstOpnd *callInfo = nullptr;
  913. int32 argCount = this->LowerCallArgs(callInstr, Js::CallFlags_Value, 1, &callInfo);
  914. IR::Instr* ret = this->LowerCall(callInstr, argCount);
  915. return ret;
  916. }
  917. IR::Instr *
  918. LowererMDArch::LowerAsmJsCallI(IR::Instr * callInstr)
  919. {
  920. int32 argCount = this->LowerCallArgs(callInstr, Js::CallFlags_Value, 0);
  921. IR::Instr* ret = this->LowerCall(callInstr, argCount);
  922. return ret;
  923. }
  924. IR::Instr *
  925. LowererMDArch::LowerWasmMemOp(IR::Instr * instr, IR::Opnd *addrOpnd)
  926. {
  927. #if ENABLE_FAST_ARRAYBUFFER
  928. if (CONFIG_FLAG(WasmFastArray))
  929. {
  930. return instr;
  931. }
  932. #endif
  933. Assert(instr->GetSrc2());
  934. IR::LabelInstr * helperLabel = Lowerer::InsertLabel(true, instr);
  935. IR::LabelInstr * loadLabel = Lowerer::InsertLabel(false, instr);
  936. IR::LabelInstr * doneLabel = Lowerer::InsertLabel(false, instr);
  937. // Find array buffer length
  938. IR::IndirOpnd * indirOpnd = addrOpnd->AsIndirOpnd();
  939. IR::RegOpnd * indexOpnd = indirOpnd->GetIndexOpnd();
  940. uint32 offset = indirOpnd->GetOffset();
  941. IR::Opnd *arrayLenOpnd = instr->GetSrc2();
  942. IR::Int64ConstOpnd * constOffsetOpnd = IR::Int64ConstOpnd::New((int64)addrOpnd->GetSize() + (int64)offset, TyInt64, m_func);
  943. IR::Opnd *cmpOpnd;
  944. if (indexOpnd != nullptr)
  945. {
  946. // Compare index + memop access length and array buffer length, and generate RuntimeError if greater
  947. cmpOpnd = IR::RegOpnd::New(TyInt64, m_func);
  948. Lowerer::InsertAdd(true, cmpOpnd, indexOpnd, constOffsetOpnd, helperLabel);
  949. }
  950. else
  951. {
  952. cmpOpnd = constOffsetOpnd;
  953. }
  954. lowererMD->m_lowerer->InsertCompareBranch(cmpOpnd, arrayLenOpnd, Js::OpCode::BrGt_A, true, helperLabel, helperLabel);
  955. lowererMD->m_lowerer->GenerateThrow(IR::IntConstOpnd::New(WASMERR_ArrayIndexOutOfRange, TyInt32, m_func), loadLabel);
  956. Lowerer::InsertBranch(Js::OpCode::Br, loadLabel, helperLabel);
  957. return doneLabel;
  958. }
  959. IR::Instr*
  960. LowererMDArch::LowerAsmJsLdElemHelper(IR::Instr * instr, bool isSimdLoad /*= false*/, bool checkEndOffset /*= false*/)
  961. {
  962. IR::Instr* done;
  963. IR::Opnd * src1 = instr->UnlinkSrc1();
  964. IRType type = src1->GetType();
  965. IR::RegOpnd * indexOpnd = src1->AsIndirOpnd()->GetIndexOpnd();
  966. const uint8 dataWidth = instr->dataWidth;
  967. Assert(isSimdLoad == false || dataWidth == 4 || dataWidth == 8 || dataWidth == 12 || dataWidth == 16);
  968. #ifdef _WIN32
  969. // For x64, bound checks are required only for SIMD loads.
  970. if (isSimdLoad)
  971. #else
  972. // xplat: Always do bound check. We don't support out-of-bound access violation recovery.
  973. if (true)
  974. #endif
  975. {
  976. IR::LabelInstr * helperLabel = Lowerer::InsertLabel(true, instr);
  977. IR::LabelInstr * loadLabel = Lowerer::InsertLabel(false, instr);
  978. IR::LabelInstr * doneLabel = Lowerer::InsertLabel(false, instr);
  979. IR::Opnd *cmpOpnd;
  980. if (indexOpnd)
  981. {
  982. cmpOpnd = indexOpnd;
  983. }
  984. else
  985. {
  986. cmpOpnd = IR::IntConstOpnd::New(src1->AsIndirOpnd()->GetOffset(), TyUint32, m_func);
  987. }
  988. // if dataWidth != byte per element, we need to check end offset
  989. if (checkEndOffset)
  990. {
  991. IR::RegOpnd *tmp = IR::RegOpnd::New(cmpOpnd->GetType(), m_func);
  992. // MOV tmp, cmpOnd
  993. Lowerer::InsertMove(tmp, cmpOpnd, helperLabel);
  994. // ADD tmp, dataWidth
  995. Lowerer::InsertAdd(true, tmp, tmp, IR::IntConstOpnd::New((uint32)dataWidth, tmp->GetType(), m_func, true), helperLabel);
  996. // JB helper
  997. Lowerer::InsertBranch(Js::OpCode::JB, helperLabel, helperLabel);
  998. // CMP tmp, size
  999. // JG $helper
  1000. lowererMD->m_lowerer->InsertCompareBranch(tmp, instr->UnlinkSrc2(), Js::OpCode::BrGt_A, true, helperLabel, helperLabel);
  1001. }
  1002. else
  1003. {
  1004. lowererMD->m_lowerer->InsertCompareBranch(cmpOpnd, instr->UnlinkSrc2(), Js::OpCode::BrGe_A, true, helperLabel, helperLabel);
  1005. }
  1006. Lowerer::InsertBranch(Js::OpCode::Br, loadLabel, helperLabel);
  1007. if (isSimdLoad)
  1008. {
  1009. lowererMD->m_lowerer->GenerateRuntimeError(loadLabel, JSERR_ArgumentOutOfRange, IR::HelperOp_RuntimeRangeError);
  1010. }
  1011. else
  1012. {
  1013. if (IRType_IsFloat(type))
  1014. {
  1015. Lowerer::InsertMove(instr->UnlinkDst(), IR::FloatConstOpnd::New(Js::NumberConstants::NaN, type, m_func), loadLabel);
  1016. }
  1017. else
  1018. {
  1019. Lowerer::InsertMove(instr->UnlinkDst(), IR::IntConstOpnd::New(0, TyInt8, m_func), loadLabel);
  1020. }
  1021. }
  1022. Lowerer::InsertBranch(Js::OpCode::Br, doneLabel, loadLabel);
  1023. done = doneLabel;
  1024. }
  1025. else
  1026. {
  1027. Assert(!instr->GetSrc2());
  1028. done = instr;
  1029. }
  1030. return done;
  1031. }
  1032. IR::Instr*
  1033. LowererMDArch::LowerAsmJsStElemHelper(IR::Instr * instr, bool isSimdStore /*= false*/, bool checkEndOffset /*= false*/)
  1034. {
  1035. IR::Instr* done;
  1036. IR::Opnd * dst = instr->UnlinkDst();
  1037. IR::RegOpnd * indexOpnd = dst->AsIndirOpnd()->GetIndexOpnd();
  1038. const uint8 dataWidth = instr->dataWidth;
  1039. Assert(isSimdStore == false || dataWidth == 4 || dataWidth == 8 || dataWidth == 12 || dataWidth == 16);
  1040. #ifdef _WIN32
  1041. // For x64, bound checks are required only for SIMD loads.
  1042. if (isSimdStore)
  1043. #else
  1044. // xplat: Always do bound check. We don't support out-of-bound access violation recovery.
  1045. if (true)
  1046. #endif
  1047. {
  1048. IR::LabelInstr * helperLabel = Lowerer::InsertLabel(true, instr);
  1049. IR::LabelInstr * storeLabel = Lowerer::InsertLabel(false, instr);
  1050. IR::LabelInstr * doneLabel = Lowerer::InsertLabel(false, instr);
  1051. IR::Opnd * cmpOpnd;
  1052. if (indexOpnd)
  1053. {
  1054. cmpOpnd = dst->AsIndirOpnd()->GetIndexOpnd();
  1055. }
  1056. else
  1057. {
  1058. cmpOpnd = IR::IntConstOpnd::New(dst->AsIndirOpnd()->GetOffset(), TyUint32, m_func);
  1059. }
  1060. // if dataWidth != byte per element, we need to check end offset
  1061. if (checkEndOffset)
  1062. {
  1063. IR::RegOpnd *tmp = IR::RegOpnd::New(cmpOpnd->GetType(), m_func);
  1064. // MOV tmp, cmpOnd
  1065. Lowerer::InsertMove(tmp, cmpOpnd, helperLabel);
  1066. // ADD tmp, dataWidth
  1067. Lowerer::InsertAdd(true, tmp, tmp, IR::IntConstOpnd::New((uint32)dataWidth, tmp->GetType(), m_func, true), helperLabel);
  1068. // JB helper
  1069. Lowerer::InsertBranch(Js::OpCode::JB, helperLabel, helperLabel);
  1070. // CMP tmp, size
  1071. // JG $helper
  1072. lowererMD->m_lowerer->InsertCompareBranch(tmp, instr->UnlinkSrc2(), Js::OpCode::BrGt_A, true, helperLabel, helperLabel);
  1073. }
  1074. else
  1075. {
  1076. lowererMD->m_lowerer->InsertCompareBranch(cmpOpnd, instr->UnlinkSrc2(), Js::OpCode::BrGe_A, true, helperLabel, helperLabel);
  1077. }
  1078. Lowerer::InsertBranch(Js::OpCode::Br, storeLabel, helperLabel);
  1079. if (isSimdStore)
  1080. {
  1081. lowererMD->m_lowerer->GenerateRuntimeError(storeLabel, JSERR_ArgumentOutOfRange, IR::HelperOp_RuntimeRangeError);
  1082. }
  1083. Lowerer::InsertBranch(Js::OpCode::Br, doneLabel, storeLabel);
  1084. done = doneLabel;
  1085. }
  1086. else
  1087. {
  1088. Assert(!instr->GetSrc2());
  1089. done = instr;
  1090. }
  1091. return done;
  1092. }
  1093. ///----------------------------------------------------------------------------
  1094. ///
  1095. /// LowererMDArch::LowerStartCall
  1096. ///
  1097. ///
  1098. ///----------------------------------------------------------------------------
  1099. IR::Instr *
  1100. LowererMDArch::LowerStartCall(IR::Instr * startCallInstr)
  1101. {
  1102. startCallInstr->m_opcode = Js::OpCode::LoweredStartCall;
  1103. return startCallInstr;
  1104. }
  1105. IR::Instr *
  1106. LowererMDArch::LoadInt64HelperArgument(IR::Instr * instrInsert, IR::Opnd * opndArg)
  1107. {
  1108. return LoadHelperArgument(instrInsert, opndArg);
  1109. }
  1110. ///----------------------------------------------------------------------------
  1111. ///
  1112. /// LowererMDArch::LoadHelperArgument
  1113. ///
  1114. /// Assign register or push on stack as per AMD64 calling convention
  1115. ///
  1116. ///----------------------------------------------------------------------------
  1117. IR::Instr *
  1118. LowererMDArch::LoadHelperArgument(IR::Instr *instr, IR::Opnd *opndArg)
  1119. {
  1120. IR::Opnd *destOpnd;
  1121. IR::Instr *instrToReturn;
  1122. if(opndArg->IsImmediateOpnd())
  1123. {
  1124. destOpnd = opndArg;
  1125. instrToReturn = instr;
  1126. }
  1127. else
  1128. {
  1129. destOpnd = IR::RegOpnd::New(opndArg->GetType(), this->m_func);
  1130. instrToReturn = instr->m_prev;
  1131. Lowerer::InsertMove(destOpnd, opndArg, instr, false);
  1132. instrToReturn = instrToReturn->m_next;
  1133. }
  1134. helperCallArgs[helperCallArgsCount++] = destOpnd;
  1135. AssertMsg(helperCallArgsCount < LowererMDArch::MaxArgumentsToHelper,
  1136. "We do not yet support any no. of arguments to the helper");
  1137. return instrToReturn;
  1138. }
  1139. IR::Instr *
  1140. LowererMDArch::LoadDynamicArgument(IR::Instr *instr, uint argNumber)
  1141. {
  1142. Assert(instr->m_opcode == Js::OpCode::ArgOut_A_Dynamic);
  1143. Assert(instr->GetSrc2() == nullptr);
  1144. instr->m_opcode = Js::OpCode::MOV;
  1145. IR::Opnd* dst = GetArgSlotOpnd((Js::ArgSlot) argNumber);
  1146. instr->SetDst(dst);
  1147. if (!dst->IsRegOpnd())
  1148. {
  1149. //TODO: Move it to legalizer.
  1150. IR::RegOpnd *tempOpnd = IR::RegOpnd::New(TyMachReg, instr->m_func);
  1151. instr->InsertBefore(IR::Instr::New(Js::OpCode::MOV, tempOpnd, instr->GetSrc1(), instr->m_func));
  1152. instr->ReplaceSrc1(tempOpnd);
  1153. }
  1154. return instr;
  1155. }
  1156. IR::Instr *
  1157. LowererMDArch::LoadDynamicArgumentUsingLength(IR::Instr *instr)
  1158. {
  1159. Assert(instr->m_opcode == Js::OpCode::ArgOut_A_Dynamic);
  1160. IR::RegOpnd* src2 = instr->UnlinkSrc2()->AsRegOpnd();
  1161. IR::Instr*mov = IR::Instr::New(Js::OpCode::MOV, IR::RegOpnd::New(TyMachReg, this->m_func), src2, this->m_func);
  1162. instr->InsertBefore(mov);
  1163. //We need store nth actuals, so stack location is after function object, callinfo & this pointer
  1164. instr->InsertBefore(IR::Instr::New(Js::OpCode::ADD, mov->GetDst(), mov->GetDst(), IR::IntConstOpnd::New(3, TyMachReg, this->m_func), this->m_func));
  1165. IR::RegOpnd *stackPointer = IR::RegOpnd::New(nullptr, GetRegStackPointer(), TyMachReg, this->m_func);
  1166. IR::IndirOpnd *actualsLocation = IR::IndirOpnd::New(stackPointer, mov->GetDst()->AsRegOpnd(), GetDefaultIndirScale(), TyMachReg, this->m_func);
  1167. instr->SetDst(actualsLocation);
  1168. instr->m_opcode = Js::OpCode::MOV;
  1169. return instr;
  1170. }
  1171. IR::Instr *
  1172. LowererMDArch::LoadDoubleHelperArgument(IR::Instr * instrInsert, IR::Opnd * opndArg)
  1173. {
  1174. IR::Opnd * float64Opnd;
  1175. if (opndArg->GetType() == TyFloat32)
  1176. {
  1177. float64Opnd = IR::RegOpnd::New(TyFloat64, m_func);
  1178. IR::Instr * instr = IR::Instr::New(Js::OpCode::CVTSS2SD, float64Opnd, opndArg, this->m_func);
  1179. instrInsert->InsertBefore(instr);
  1180. }
  1181. else
  1182. {
  1183. float64Opnd = opndArg;
  1184. }
  1185. Assert(opndArg->IsFloat());
  1186. return LoadHelperArgument(instrInsert, opndArg);
  1187. }
  1188. IR::Instr *
  1189. LowererMDArch::LoadFloatHelperArgument(IR::Instr * instrInsert, IR::Opnd * opndArg)
  1190. {
  1191. Assert(opndArg->IsFloat32());
  1192. return LoadHelperArgument(instrInsert, opndArg);
  1193. }
  1194. //
  1195. // Emits the code to allocate 'size' amount of space on stack. for values smaller than PAGE_SIZE
  1196. // this will just emit sub rsp,size otherwise calls _chkstk.
  1197. //
  1198. void
  1199. LowererMDArch::GenerateStackAllocation(IR::Instr *instr, uint32 size)
  1200. {
  1201. Assert(size > 0);
  1202. IR::RegOpnd * rspOpnd = IR::RegOpnd::New(nullptr, RegRSP, TyMachReg, this->m_func);
  1203. //review: size should fit in 32bits
  1204. IR::IntConstOpnd * stackSizeOpnd = IR::IntConstOpnd::New(size, TyMachReg, this->m_func);
  1205. if (size <= PAGESIZE)
  1206. {
  1207. // Generate SUB RSP, stackSize
  1208. IR::Instr * subInstr = IR::Instr::New(Js::OpCode::SUB,
  1209. rspOpnd, rspOpnd, stackSizeOpnd, this->m_func);
  1210. instr->InsertAfter(subInstr);
  1211. }
  1212. else
  1213. {
  1214. // Generate _chkstk call
  1215. //
  1216. // REVIEW: Call to helper functions assume the address of the variable to be present in
  1217. // RAX. But _chkstk method accepts argument in RAX. Hence handling this one manually.
  1218. // fix this later when CALLHELPER leaved dependency on RAX.
  1219. //
  1220. IR::RegOpnd *raxOpnd = IR::RegOpnd::New(nullptr, RegRAX, TyMachReg, this->m_func);
  1221. IR::RegOpnd *rcxOpnd = IR::RegOpnd::New(nullptr, RegRCX, TyMachReg, this->m_func);
  1222. IR::Instr * subInstr = IR::Instr::New(Js::OpCode::SUB, rspOpnd, rspOpnd, stackSizeOpnd, this->m_func);
  1223. instr->InsertAfter(subInstr);
  1224. // Leave off the src until we've calculated it below.
  1225. IR::Instr * callInstr = IR::Instr::New(Js::OpCode::Call, raxOpnd, rcxOpnd, this->m_func);
  1226. instr->InsertAfter(callInstr);
  1227. this->LowerCall(callInstr, 0);
  1228. {
  1229. IR::Instr *movHelperAddrInstr = IR::Instr::New(
  1230. Js::OpCode::MOV,
  1231. rcxOpnd,
  1232. IR::HelperCallOpnd::New(IR::HelperCRT_chkstk, this->m_func),
  1233. this->m_func);
  1234. instr->InsertAfter(movHelperAddrInstr);
  1235. }
  1236. LowererMD::CreateAssign(raxOpnd, stackSizeOpnd, instr->m_next);
  1237. }
  1238. }
  1239. void
  1240. LowererMDArch::MovArgFromReg2Stack(IR::Instr * instr, RegNum reg, uint16 slotNumber, IRType type)
  1241. {
  1242. StackSym * slotSym = this->m_func->m_symTable->GetArgSlotSym(slotNumber + 1);
  1243. slotSym->m_type = type;
  1244. IR::SymOpnd * dst = IR::SymOpnd::New(slotSym, type, this->m_func);
  1245. IR::RegOpnd * src = IR::RegOpnd::New(nullptr, reg, type, this->m_func);
  1246. IR::Instr * movInstr = IR::Instr::New(GetAssignOp(type), dst, src, this->m_func);
  1247. instr->InsertAfter(movInstr);
  1248. }
  1249. ///----------------------------------------------------------------------------
  1250. ///
  1251. /// LowererMDArch::LowerEntryInstr
  1252. ///
  1253. /// Emit prolog.
  1254. ///
  1255. ///----------------------------------------------------------------------------
  1256. IR::Instr *
  1257. LowererMDArch::LowerEntryInstr(IR::EntryInstr * entryInstr)
  1258. {
  1259. /*
  1260. * push rbp
  1261. * mov rbp, rsp
  1262. * sub rsp, localVariablesHeight + floatCalleeSavedRegsSize
  1263. * movsdx qword ptr [rsp + 0], xmm6 ------\
  1264. * movsdx qword ptr [rsp + 8], xmm7 |
  1265. * ... |
  1266. * movsdx qword ptr [rsp + (N * 8)], xmmN |- Callee saved registers.
  1267. * push rsi |
  1268. * ... |
  1269. * push rbx ------/
  1270. * sub rsp, ArgumentsBacking
  1271. */
  1272. uint savedRegSize = 0;
  1273. IR::Instr *firstPrologInstr = nullptr;
  1274. IR::Instr *lastPrologInstr = nullptr;
  1275. // PUSH used callee-saved registers.
  1276. IR::Instr *secondInstr = entryInstr->m_next;
  1277. AssertMsg(secondInstr, "Instruction chain broken.");
  1278. IR::RegOpnd *stackPointer = IR::RegOpnd::New(nullptr, GetRegStackPointer(), TyMachReg, this->m_func);
  1279. unsigned xmmOffset = 0;
  1280. // PDATA doesn't seem to like two consecutive "SUB RSP, size" instructions. Temporarily save and
  1281. // restore RBX always so that the pattern doesn't occur in the prolog.
  1282. for (RegNum reg = (RegNum)(RegNOREG + 1); reg < RegNumCount; reg = (RegNum)(reg + 1))
  1283. {
  1284. if (LinearScan::IsCalleeSaved(reg) && (this->m_func->HasTry() || this->m_func->m_regsUsed.Test(reg)))
  1285. {
  1286. IRType type = RegTypes[reg];
  1287. IR::RegOpnd *regOpnd = IR::RegOpnd::New(nullptr, reg, type, this->m_func);
  1288. if (type == TyFloat64)
  1289. {
  1290. IR::Instr *saveInstr = IR::Instr::New(Js::OpCode::MOVAPS,
  1291. IR::IndirOpnd::New(stackPointer,
  1292. xmmOffset,
  1293. type,
  1294. this->m_func),
  1295. regOpnd,
  1296. this->m_func);
  1297. xmmOffset += (MachDouble * 2);
  1298. entryInstr->InsertAfter(saveInstr);
  1299. m_func->m_prologEncoder.RecordXmmRegSave();
  1300. }
  1301. else
  1302. {
  1303. Assert(type == TyInt64);
  1304. IR::Instr *pushInstr = IR::Instr::New(Js::OpCode::PUSH, this->m_func);
  1305. pushInstr->SetSrc1(regOpnd);
  1306. entryInstr->InsertAfter(pushInstr);
  1307. m_func->m_prologEncoder.RecordNonVolRegSave();
  1308. savedRegSize += MachPtr;
  1309. }
  1310. }
  1311. }
  1312. //
  1313. // Now that we know the exact stack size, lets fix it for alignment
  1314. // The stack on entry would be aligned. VC++ recommends that the stack
  1315. // should always be 16 byte aligned.
  1316. //
  1317. uint32 argSlotsForFunctionsCalled = this->m_func->m_argSlotsForFunctionsCalled;
  1318. if (Lowerer::IsArgSaveRequired(this->m_func))
  1319. {
  1320. if (argSlotsForFunctionsCalled < IntArgRegsCount)
  1321. argSlotsForFunctionsCalled = IntArgRegsCount;
  1322. }
  1323. else
  1324. {
  1325. argSlotsForFunctionsCalled = 0;
  1326. }
  1327. uint32 stackArgsSize = MachPtr * (argSlotsForFunctionsCalled + 1);
  1328. this->m_func->m_localStackHeight = Math::Align<int32>(this->m_func->m_localStackHeight, 8);
  1329. // Allocate the inlined arg out stack in the locals. Allocate an additional slot so that
  1330. // we can unconditionally clear the first slot past the current frame.
  1331. this->m_func->m_localStackHeight += m_func->GetMaxInlineeArgOutSize() + MachPtr;
  1332. uint32 stackLocalsSize = this->m_func->m_localStackHeight;
  1333. if(xmmOffset != 0)
  1334. {
  1335. // Xmm registers need to be saved to 16-byte-aligned addresses. The stack locals size is aligned here and the total
  1336. // size will be aligned below, which guarantees that the offset from rsp will be 16-byte-aligned.
  1337. stackLocalsSize = ::Math::Align(stackLocalsSize + xmmOffset, static_cast<uint32>(MachDouble * 2));
  1338. }
  1339. uint32 totalStackSize = stackLocalsSize +
  1340. stackArgsSize +
  1341. savedRegSize;
  1342. AssertMsg(0 == (totalStackSize % 8), "Stack should always be 8 byte aligned");
  1343. uint32 alignmentPadding = (totalStackSize % 16) ? MachPtr : 0;
  1344. stackArgsSize += alignmentPadding;
  1345. Assert(
  1346. xmmOffset == 0 ||
  1347. ::Math::Align(stackArgsSize + savedRegSize, static_cast<uint32>(MachDouble * 2)) == stackArgsSize + savedRegSize);
  1348. totalStackSize += alignmentPadding;
  1349. if(totalStackSize > (1u << 20)) // 1 MB
  1350. {
  1351. // Total stack size is > 1 MB, let's just bail. There are things that need to be changed to allow using large stack
  1352. // sizes, for instance in the unwind info, the offset to saved xmm registers can be (1 MB - 16) at most for the op-code
  1353. // we're currently using (UWOP_SAVE_XMM128). To support larger offsets, we need to use a FAR version of the op-code.
  1354. throw Js::OperationAbortedException();
  1355. }
  1356. if (m_func->HasInlinee())
  1357. {
  1358. this->m_func->GetJITOutput()->SetFrameHeight(this->m_func->m_localStackHeight);
  1359. }
  1360. //
  1361. // This is the last instruction so should have been emitted before, register saves.
  1362. // But we did not have 'savedRegSize' by then. So we saved secondInstr. We now insert w.r.t that
  1363. // instruction.
  1364. //
  1365. this->m_func->SetArgsSize(stackArgsSize);
  1366. this->m_func->SetSavedRegSize(savedRegSize);
  1367. this->m_func->SetSpillSize(stackLocalsSize);
  1368. if (secondInstr == entryInstr->m_next)
  1369. {
  1370. // There is no register save at all, just combine the stack allocation
  1371. uint combineStackAllocationSize = stackArgsSize + stackLocalsSize;
  1372. this->GenerateStackAllocation(secondInstr->m_prev, combineStackAllocationSize);
  1373. m_func->m_prologEncoder.RecordAlloca(combineStackAllocationSize);
  1374. }
  1375. else
  1376. {
  1377. this->GenerateStackAllocation(secondInstr->m_prev, stackArgsSize);
  1378. m_func->m_prologEncoder.RecordAlloca(stackArgsSize);
  1379. // Allocate frame.
  1380. if (stackLocalsSize)
  1381. {
  1382. this->GenerateStackAllocation(entryInstr, stackLocalsSize);
  1383. m_func->m_prologEncoder.RecordAlloca(stackLocalsSize);
  1384. }
  1385. }
  1386. lastPrologInstr = secondInstr->m_prev;
  1387. Assert(lastPrologInstr != entryInstr);
  1388. // Zero-initialize dedicated arguments slot.
  1389. IR::Instr *movRax0 = nullptr;
  1390. IR::Opnd *raxOpnd = nullptr;
  1391. if ((this->m_func->HasArgumentSlot() &&
  1392. (this->m_func->IsStackArgsEnabled() ||
  1393. this->m_func->IsJitInDebugMode() ||
  1394. // disabling apply inlining leads to explicit load from the zero-inited slot
  1395. this->m_func->GetJITFunctionBody()->IsInlineApplyDisabled()))
  1396. #ifdef BAILOUT_INJECTION
  1397. || Js::Configuration::Global.flags.IsEnabled(Js::BailOutFlag)
  1398. || Js::Configuration::Global.flags.IsEnabled(Js::BailOutAtEveryLineFlag)
  1399. || Js::Configuration::Global.flags.IsEnabled(Js::BailOutAtEveryByteCodeFlag)
  1400. || Js::Configuration::Global.flags.IsEnabled(Js::BailOutByteCodeFlag)
  1401. #endif
  1402. )
  1403. {
  1404. // TODO: Support mov [rbp - n], IMM64
  1405. raxOpnd = IR::RegOpnd::New(nullptr, RegRAX, TyUint32, this->m_func);
  1406. movRax0 = IR::Instr::New(Js::OpCode::XOR, raxOpnd, raxOpnd, raxOpnd, this->m_func);
  1407. secondInstr->m_prev->InsertAfter(movRax0);
  1408. IR::Opnd *opnd = this->lowererMD->CreateStackArgumentsSlotOpnd();
  1409. IR::Instr *movNullInstr = IR::Instr::New(Js::OpCode::MOV, opnd, raxOpnd->UseWithNewType(TyMachReg, this->m_func), this->m_func);
  1410. secondInstr->m_prev->InsertAfter(movNullInstr);
  1411. }
  1412. // Zero initialize the first inlinee frames argc.
  1413. if (m_func->HasInlinee())
  1414. {
  1415. if(!movRax0)
  1416. {
  1417. raxOpnd = IR::RegOpnd::New(nullptr, RegRAX, TyUint32, this->m_func);
  1418. movRax0 = IR::Instr::New(Js::OpCode::XOR, raxOpnd, raxOpnd, raxOpnd, this->m_func);
  1419. secondInstr->m_prev->InsertAfter(movRax0);
  1420. }
  1421. StackSym *sym = this->m_func->m_symTable->GetArgSlotSym((Js::ArgSlot)-1);
  1422. sym->m_isInlinedArgSlot = true;
  1423. sym->m_offset = 0;
  1424. IR::Opnd *dst = IR::SymOpnd::New(sym, 0, TyMachReg, this->m_func);
  1425. secondInstr->m_prev->InsertAfter(IR::Instr::New(Js::OpCode::MOV,
  1426. dst,
  1427. raxOpnd->UseWithNewType(TyMachReg, this->m_func),
  1428. this->m_func));
  1429. }
  1430. // Generate MOV RBP, RSP
  1431. IR::RegOpnd * rbpOpnd = IR::RegOpnd::New(nullptr, RegRBP, TyMachReg, this->m_func);
  1432. IR::RegOpnd * rspOpnd = IR::RegOpnd::New(nullptr, RegRSP, TyMachReg, this->m_func);
  1433. IR::Instr * movInstr = IR::Instr::New(Js::OpCode::MOV, rbpOpnd, rspOpnd, this->m_func);
  1434. entryInstr->InsertAfter(movInstr);
  1435. // Generate PUSH RBP
  1436. IR::Instr * pushInstr = IR::Instr::New(Js::OpCode::PUSH, this->m_func);
  1437. pushInstr->SetSrc1(rbpOpnd);
  1438. entryInstr->InsertAfter(pushInstr);
  1439. m_func->m_prologEncoder.RecordNonVolRegSave();
  1440. firstPrologInstr = pushInstr;
  1441. //
  1442. // Insert pragmas that tell the prolog encoder the extent of the prolog.
  1443. //
  1444. firstPrologInstr->InsertBefore(IR::PragmaInstr::New(Js::OpCode::PrologStart, 0, m_func));
  1445. lastPrologInstr->InsertAfter(IR::PragmaInstr::New(Js::OpCode::PrologEnd, 0, m_func));
  1446. #ifdef _WIN32 // home registers
  1447. //
  1448. // Now store all the arguments in the register in the stack slots
  1449. //
  1450. if (m_func->GetJITFunctionBody()->IsAsmJsMode() && !m_func->IsLoopBody())
  1451. {
  1452. uint16 offset = 2;
  1453. this->MovArgFromReg2Stack(entryInstr, RegRCX, 1);
  1454. for (uint16 i = 0; i < m_func->GetJITFunctionBody()->GetAsmJsInfo()->GetArgCount() && i < 3; i++)
  1455. {
  1456. switch (m_func->GetJITFunctionBody()->GetAsmJsInfo()->GetArgType(i))
  1457. {
  1458. case Js::AsmJsVarType::Int:
  1459. this->MovArgFromReg2Stack(entryInstr, i == 0 ? RegRDX : i == 1 ? RegR8 : RegR9, offset, TyInt32);
  1460. offset++;
  1461. break;
  1462. case Js::AsmJsVarType::Int64:
  1463. this->MovArgFromReg2Stack(entryInstr, i == 0 ? RegRDX : i == 1 ? RegR8 : RegR9, offset, TyInt64);
  1464. offset++;
  1465. break;
  1466. case Js::AsmJsVarType::Float:
  1467. // registers we need are contiguous, so calculate it from XMM1
  1468. this->MovArgFromReg2Stack(entryInstr, (RegNum)(RegXMM1 + i), offset, TyFloat32);
  1469. offset++;
  1470. break;
  1471. case Js::AsmJsVarType::Double:
  1472. this->MovArgFromReg2Stack(entryInstr, (RegNum)(RegXMM1 + i), offset, TyFloat64);
  1473. offset++;
  1474. break;
  1475. case Js::AsmJsVarType::Float32x4:
  1476. this->MovArgFromReg2Stack(entryInstr, (RegNum)(RegXMM1 + i), offset, TySimd128F4);
  1477. offset += 2;
  1478. break;
  1479. case Js::AsmJsVarType::Int32x4:
  1480. this->MovArgFromReg2Stack(entryInstr, (RegNum)(RegXMM1 + i), offset, TySimd128I4);
  1481. offset += 2;
  1482. break;
  1483. case Js::AsmJsVarType::Int16x8:
  1484. this->MovArgFromReg2Stack(entryInstr, (RegNum)(RegXMM1 + i), offset, TySimd128I8);
  1485. offset += 2;
  1486. break;
  1487. case Js::AsmJsVarType::Int8x16:
  1488. this->MovArgFromReg2Stack(entryInstr, (RegNum)(RegXMM1 + i), offset, TySimd128I16);
  1489. offset += 2;
  1490. break;
  1491. case Js::AsmJsVarType::Uint32x4:
  1492. this->MovArgFromReg2Stack(entryInstr, (RegNum)(RegXMM1 + i), offset, TySimd128U4);
  1493. offset += 2;
  1494. break;
  1495. case Js::AsmJsVarType::Uint16x8:
  1496. this->MovArgFromReg2Stack(entryInstr, (RegNum)(RegXMM1 + i), offset, TySimd128U8);
  1497. offset += 2;
  1498. break;
  1499. case Js::AsmJsVarType::Uint8x16:
  1500. this->MovArgFromReg2Stack(entryInstr, (RegNum)(RegXMM1 + i), offset, TySimd128U16);
  1501. offset += 2;
  1502. break;
  1503. case Js::AsmJsVarType::Bool32x4:
  1504. this->MovArgFromReg2Stack(entryInstr, (RegNum)(RegXMM1 + i), offset, TySimd128B4);
  1505. offset += 2;
  1506. break;
  1507. case Js::AsmJsVarType::Bool16x8:
  1508. this->MovArgFromReg2Stack(entryInstr, (RegNum)(RegXMM1 + i), offset, TySimd128B8);
  1509. offset += 2;
  1510. break;
  1511. case Js::AsmJsVarType::Bool8x16:
  1512. this->MovArgFromReg2Stack(entryInstr, (RegNum)(RegXMM1 + i), offset, TySimd128B16);
  1513. offset += 2;
  1514. break;
  1515. case Js::AsmJsVarType::Float64x2:
  1516. this->MovArgFromReg2Stack(entryInstr, (RegNum)(RegXMM1 + i), offset, TySimd128D2);
  1517. offset += 2;
  1518. break;
  1519. default:
  1520. Assume(UNREACHED);
  1521. }
  1522. }
  1523. }
  1524. else if (argSlotsForFunctionsCalled)
  1525. {
  1526. this->MovArgFromReg2Stack(entryInstr, RegRCX, 1);
  1527. this->MovArgFromReg2Stack(entryInstr, RegRDX, 2);
  1528. this->MovArgFromReg2Stack(entryInstr, RegR8, 3);
  1529. this->MovArgFromReg2Stack(entryInstr, RegR9, 4);
  1530. }
  1531. #endif // _WIN32
  1532. IntConstType frameSize = Js::Constants::MinStackJIT + stackArgsSize + stackLocalsSize + savedRegSize;
  1533. this->GeneratePrologueStackProbe(entryInstr, frameSize);
  1534. return entryInstr;
  1535. }
  1536. void
  1537. LowererMDArch::GeneratePrologueStackProbe(IR::Instr *entryInstr, IntConstType frameSize)
  1538. {
  1539. //
  1540. // Generate a stack overflow check. Since ProbeCurrentStack throws an exception it needs
  1541. // an unwindable stack. Should we need to call ProbeCurrentStack, instead of creating a new frame here,
  1542. // we make it appear like our caller directly called ProbeCurrentStack.
  1543. //
  1544. // For thread-bound thread context
  1545. // MOV rax, ThreadContext::scriptStackLimit + frameSize
  1546. // CMP rsp, rax
  1547. // JG $done
  1548. // MOV rax, ThreadContext::ProbeCurrentStack
  1549. // MOV rcx, frameSize
  1550. // MOV rdx, scriptContext
  1551. // JMP rax
  1552. // $done:
  1553. //
  1554. // For thread-agile thread context
  1555. // MOV rax, [ThreadContext::scriptStackLimit]
  1556. // ADD rax, frameSize
  1557. // CMP rsp, rax
  1558. // JG $done
  1559. // MOV rax, ThreadContext::ProbeCurrentStack
  1560. // MOV rcx, frameSize
  1561. // MOV rdx, scriptContext
  1562. // JMP rax
  1563. // $done:
  1564. //
  1565. // For thread context with script interrupt enabled
  1566. // MOV rax, [ThreadContext::scriptStackLimit]
  1567. // ADD rax, frameSize
  1568. // JO $helper
  1569. // CMP rsp, rax
  1570. // JG $done
  1571. // $helper:
  1572. // MOV rax, ThreadContext::ProbeCurrentStack
  1573. // MOV rcx, frameSize
  1574. // MOV rdx, scriptContext
  1575. // JMP rax
  1576. // $done:
  1577. //
  1578. // Do not insert stack probe for leaf functions which have low stack footprint
  1579. if (this->m_func->IsTrueLeaf() &&
  1580. frameSize - Js::Constants::MinStackJIT < Js::Constants::MaxStackSizeForNoProbe)
  1581. {
  1582. return;
  1583. }
  1584. IR::LabelInstr *helperLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  1585. IR::Instr *insertInstr = entryInstr->m_next;
  1586. IR::Instr *instr;
  1587. IR::Opnd *stackLimitOpnd;
  1588. bool doInterruptProbe = m_func->GetJITFunctionBody()->DoInterruptProbe();
  1589. // MOV rax, ThreadContext::scriptStackLimit + frameSize
  1590. stackLimitOpnd = IR::RegOpnd::New(nullptr, RegRAX, TyMachReg, this->m_func);
  1591. if (doInterruptProbe || !m_func->GetThreadContextInfo()->IsThreadBound())
  1592. {
  1593. // Load the current stack limit from the ThreadContext and add the current frame size.
  1594. {
  1595. intptr_t pLimit = m_func->GetThreadContextInfo()->GetThreadStackLimitAddr();
  1596. IR::RegOpnd *baseOpnd = IR::RegOpnd::New(nullptr, RegRAX, TyMachReg, this->m_func);
  1597. this->lowererMD->CreateAssign(baseOpnd, IR::AddrOpnd::New(pLimit, IR::AddrOpndKindDynamicMisc, this->m_func), insertInstr);
  1598. IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(baseOpnd, 0, TyMachReg, this->m_func);
  1599. this->lowererMD->CreateAssign(stackLimitOpnd, indirOpnd, insertInstr);
  1600. }
  1601. instr = IR::Instr::New(Js::OpCode::ADD, stackLimitOpnd, stackLimitOpnd,
  1602. IR::IntConstOpnd::New(frameSize, TyMachReg, this->m_func), this->m_func);
  1603. insertInstr->InsertBefore(instr);
  1604. if (doInterruptProbe)
  1605. {
  1606. // If the add overflows, call the probe helper.
  1607. instr = IR::BranchInstr::New(Js::OpCode::JO, helperLabel, this->m_func);
  1608. insertInstr->InsertBefore(instr);
  1609. }
  1610. }
  1611. else
  1612. {
  1613. // TODO: michhol, check this math
  1614. size_t scriptStackLimit = m_func->GetThreadContextInfo()->GetScriptStackLimit();
  1615. this->lowererMD->CreateAssign(stackLimitOpnd, IR::IntConstOpnd::New((frameSize + scriptStackLimit), TyMachReg, this->m_func), insertInstr);
  1616. }
  1617. // CMP rsp, rax
  1618. instr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
  1619. instr->SetSrc1(IR::RegOpnd::New(nullptr, GetRegStackPointer(), TyMachReg, m_func));
  1620. instr->SetSrc2(stackLimitOpnd);
  1621. insertInstr->InsertBefore(instr);
  1622. IR::LabelInstr * doneLabel = nullptr;
  1623. if (!PHASE_OFF(Js::LayoutPhase, this->m_func))
  1624. {
  1625. // JLE $helper
  1626. instr = IR::BranchInstr::New(Js::OpCode::JLE, helperLabel, m_func);
  1627. insertInstr->InsertBefore(instr);
  1628. Security::InsertRandomFunctionPad(insertInstr);
  1629. // This is generated after layout. Generate the block at the end of the function manually
  1630. insertInstr = IR::PragmaInstr::New(Js::OpCode::StatementBoundary, Js::Constants::NoStatementIndex, m_func);
  1631. this->m_func->m_tailInstr->InsertAfter(insertInstr);
  1632. this->m_func->m_tailInstr = insertInstr;
  1633. }
  1634. else
  1635. {
  1636. doneLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  1637. // JGT $done
  1638. instr = IR::BranchInstr::New(Js::OpCode::JGT, doneLabel, m_func);
  1639. insertInstr->InsertBefore(instr);
  1640. }
  1641. insertInstr->InsertBefore(helperLabel);
  1642. IR::RegOpnd *target;
  1643. {
  1644. // MOV RegArg1, scriptContext
  1645. this->lowererMD->CreateAssign(
  1646. IR::RegOpnd::New(nullptr, RegArg1, TyMachReg, m_func),
  1647. this->lowererMD->m_lowerer->LoadScriptContextOpnd(insertInstr), insertInstr);
  1648. // MOV RegArg0, frameSize
  1649. this->lowererMD->CreateAssign(
  1650. IR::RegOpnd::New(nullptr, RegArg0, TyMachReg, this->m_func),
  1651. IR::IntConstOpnd::New(frameSize, TyMachReg, this->m_func), insertInstr);
  1652. // MOV rax, ThreadContext::ProbeCurrentStack
  1653. target = IR::RegOpnd::New(nullptr, RegRAX, TyMachReg, m_func);
  1654. this->lowererMD->CreateAssign(target, IR::HelperCallOpnd::New(IR::HelperProbeCurrentStack, m_func), insertInstr);
  1655. }
  1656. // JMP rax
  1657. instr = IR::MultiBranchInstr::New(Js::OpCode::JMP, target, m_func);
  1658. insertInstr->InsertBefore(instr);
  1659. if (doneLabel)
  1660. {
  1661. // $done:
  1662. insertInstr->InsertBefore(doneLabel);
  1663. Security::InsertRandomFunctionPad(doneLabel);
  1664. }
  1665. }
  1666. ///----------------------------------------------------------------------------
  1667. ///
  1668. /// LowererMDArch::LowerExitInstr
  1669. ///
  1670. /// Emit epilog.
  1671. ///
  1672. ///----------------------------------------------------------------------------
  1673. IR::Instr *
  1674. LowererMDArch::LowerExitInstr(IR::ExitInstr * exitInstr)
  1675. {
  1676. uint32 savedRegSize = 0;
  1677. // POP used callee-saved registers
  1678. IR::Instr * exitPrevInstr = exitInstr->m_prev;
  1679. AssertMsg(exitPrevInstr, "Can a function have only 1 instr ? Or is the instr chain broken");
  1680. IR::RegOpnd *stackPointer = IR::RegOpnd::New(nullptr, GetRegStackPointer(), TyMachReg, this->m_func);
  1681. unsigned xmmOffset = 0;
  1682. for (RegNum reg = (RegNum)(RegNOREG + 1); reg < RegNumCount; reg = (RegNum)(reg+1))
  1683. {
  1684. if (LinearScan::IsCalleeSaved(reg) && (this->m_func->HasTry() || this->m_func->m_regsUsed.Test(reg)))
  1685. {
  1686. IRType type = RegTypes[reg];
  1687. IR::RegOpnd *regOpnd = IR::RegOpnd::New(nullptr, reg, type, this->m_func);
  1688. if (type == TyFloat64)
  1689. {
  1690. IR::Instr *restoreInstr = IR::Instr::New(Js::OpCode::MOVAPS,
  1691. regOpnd,
  1692. IR::IndirOpnd::New(stackPointer,
  1693. xmmOffset,
  1694. type,
  1695. this->m_func),
  1696. this->m_func);
  1697. xmmOffset += (MachDouble * 2);
  1698. exitInstr->InsertBefore(restoreInstr);
  1699. }
  1700. else
  1701. {
  1702. Assert(type == TyInt64);
  1703. IR::Instr *popInstr = IR::Instr::New(Js::OpCode::POP, regOpnd, this->m_func);
  1704. exitInstr->InsertBefore(popInstr);
  1705. savedRegSize += MachPtr;
  1706. }
  1707. }
  1708. }
  1709. Assert(savedRegSize == (uint)this->m_func->GetSavedRegSize());
  1710. // Generate ADD RSP, argsStackSize before the register restore (if there are any)
  1711. uint32 stackArgsSize = this->m_func->GetArgsSize();
  1712. Assert(stackArgsSize);
  1713. if (savedRegSize || xmmOffset)
  1714. {
  1715. IR::IntConstOpnd *stackSizeOpnd = IR::IntConstOpnd::New(stackArgsSize, TyMachReg, this->m_func);
  1716. IR::Instr *addInstr = IR::Instr::New(Js::OpCode::ADD, stackPointer, stackPointer, stackSizeOpnd, this->m_func);
  1717. exitPrevInstr->InsertAfter(addInstr);
  1718. }
  1719. //
  1720. // useful register operands
  1721. //
  1722. IR::RegOpnd * rspOpnd = IR::RegOpnd::New(nullptr, RegRSP, TyMachReg, this->m_func);
  1723. IR::RegOpnd * rbpOpnd = IR::RegOpnd::New(nullptr, RegRBP, TyMachReg, this->m_func);
  1724. // Restore frame
  1725. // Generate MOV RSP, RBP
  1726. IR::Instr * movInstr = IR::Instr::New(Js::OpCode::MOV, rspOpnd, rbpOpnd, this->m_func);
  1727. exitInstr->InsertBefore(movInstr);
  1728. // Generate POP RBP
  1729. IR::Instr * pushInstr = IR::Instr::New(Js::OpCode::POP, rbpOpnd, this->m_func);
  1730. exitInstr->InsertBefore(pushInstr);
  1731. // Insert RET
  1732. IR::IntConstOpnd * intSrc = IR::IntConstOpnd::New(0, TyInt32, this->m_func);
  1733. IR::RegOpnd *retReg = nullptr;
  1734. if (m_func->GetJITFunctionBody()->IsAsmJsMode() && !m_func->IsLoopBody())
  1735. {
  1736. switch (m_func->GetJITFunctionBody()->GetAsmJsInfo()->GetRetType())
  1737. {
  1738. case Js::AsmJsRetType::Double:
  1739. case Js::AsmJsRetType::Float:
  1740. retReg = IR::RegOpnd::New(nullptr, this->GetRegReturnAsmJs(TyMachDouble), TyMachDouble, this->m_func);
  1741. break;
  1742. case Js::AsmJsRetType::Int32x4:
  1743. retReg = IR::RegOpnd::New(nullptr, this->GetRegReturnAsmJs(TySimd128I4), TySimd128I4, this->m_func);
  1744. break;
  1745. case Js::AsmJsRetType::Int16x8:
  1746. retReg = IR::RegOpnd::New(nullptr, this->GetRegReturnAsmJs(TySimd128I8), TySimd128I8, this->m_func);
  1747. break;
  1748. case Js::AsmJsRetType::Int8x16:
  1749. retReg = IR::RegOpnd::New(nullptr, this->GetRegReturnAsmJs(TySimd128I16), TySimd128U16, this->m_func);
  1750. break;
  1751. case Js::AsmJsRetType::Uint32x4:
  1752. retReg = IR::RegOpnd::New(nullptr, this->GetRegReturnAsmJs(TySimd128U4), TySimd128U4, this->m_func);
  1753. break;
  1754. case Js::AsmJsRetType::Uint16x8:
  1755. retReg = IR::RegOpnd::New(nullptr, this->GetRegReturnAsmJs(TySimd128U8), TySimd128U8, this->m_func);
  1756. break;
  1757. case Js::AsmJsRetType::Uint8x16:
  1758. retReg = IR::RegOpnd::New(nullptr, this->GetRegReturnAsmJs(TySimd128U16), TySimd128U16, this->m_func);
  1759. break;
  1760. case Js::AsmJsRetType::Bool32x4:
  1761. retReg = IR::RegOpnd::New(nullptr, this->GetRegReturnAsmJs(TySimd128B4), TySimd128B4, this->m_func);
  1762. break;
  1763. case Js::AsmJsRetType::Bool16x8:
  1764. retReg = IR::RegOpnd::New(nullptr, this->GetRegReturnAsmJs(TySimd128B8), TySimd128B8, this->m_func);
  1765. break;
  1766. case Js::AsmJsRetType::Bool8x16:
  1767. retReg = IR::RegOpnd::New(nullptr, this->GetRegReturnAsmJs(TySimd128B16), TySimd128B16, this->m_func);
  1768. break;
  1769. case Js::AsmJsRetType::Float32x4:
  1770. retReg = IR::RegOpnd::New(nullptr, this->GetRegReturnAsmJs(TySimd128F4), TySimd128F4, this->m_func);
  1771. break;
  1772. case Js::AsmJsRetType::Float64x2:
  1773. retReg = IR::RegOpnd::New(nullptr, this->GetRegReturnAsmJs(TySimd128D2), TySimd128D2, this->m_func);
  1774. break;
  1775. case Js::AsmJsRetType::Int64:
  1776. case Js::AsmJsRetType::Signed:
  1777. retReg = IR::RegOpnd::New(nullptr, this->GetRegReturn(TyMachReg), TyMachReg, this->m_func);
  1778. break;
  1779. case Js::AsmJsRetType::Void:
  1780. break;
  1781. default:
  1782. Assume(UNREACHED);
  1783. }
  1784. }
  1785. else
  1786. {
  1787. retReg = IR::RegOpnd::New(nullptr, this->GetRegReturn(TyMachReg), TyMachReg, this->m_func);
  1788. }
  1789. // Generate RET
  1790. IR::Instr * retInstr = IR::Instr::New(Js::OpCode::RET, this->m_func);
  1791. retInstr->SetSrc1(intSrc);
  1792. if (retReg)
  1793. {
  1794. retInstr->SetSrc2(retReg);
  1795. }
  1796. exitInstr->InsertBefore(retInstr);
  1797. retInstr->m_opcode = Js::OpCode::RET;
  1798. return exitInstr;
  1799. }
  1800. IR::Instr *
  1801. LowererMDArch::LowerExitInstrAsmJs(IR::ExitInstr * exitInstr)
  1802. {
  1803. // epilogue is almost identical on x64, except for return register
  1804. return LowerExitInstr(exitInstr);
  1805. }
  1806. void
  1807. LowererMDArch::EmitInt4Instr(IR::Instr *instr, bool signExtend /* = false */)
  1808. {
  1809. IR::Opnd *dst = instr->GetDst();
  1810. IR::Opnd *src1 = instr->GetSrc1();
  1811. IR::Opnd *src2 = instr->GetSrc2();
  1812. IR::Instr *newInstr = nullptr;
  1813. IR::RegOpnd *regEDX;
  1814. bool legalize = false;
  1815. bool isInt64Instr = instr->AreAllOpndInt64();
  1816. if (!isInt64Instr)
  1817. {
  1818. if (dst && !dst->IsUInt32())
  1819. {
  1820. dst->SetType(TyInt32);
  1821. }
  1822. if (!src1->IsUInt32())
  1823. {
  1824. src1->SetType(TyInt32);
  1825. }
  1826. if (src2 && !src2->IsUInt32())
  1827. {
  1828. src2->SetType(TyInt32);
  1829. }
  1830. }
  1831. else
  1832. {
  1833. legalize = true;
  1834. }
  1835. switch (instr->m_opcode)
  1836. {
  1837. case Js::OpCode::Neg_I4:
  1838. instr->m_opcode = Js::OpCode::NEG;
  1839. break;
  1840. case Js::OpCode::Not_I4:
  1841. instr->m_opcode = Js::OpCode::NOT;
  1842. break;
  1843. case Js::OpCode::Add_I4:
  1844. LowererMD::ChangeToAdd(instr, false /* needFlags */);
  1845. legalize = true;
  1846. break;
  1847. case Js::OpCode::Sub_I4:
  1848. LowererMD::ChangeToSub(instr, false /* needFlags */);
  1849. legalize = true;
  1850. break;
  1851. case Js::OpCode::Mul_I4:
  1852. instr->m_opcode = Js::OpCode::IMUL2;
  1853. legalize = true;
  1854. break;
  1855. case Js::OpCode::DivU_I4:
  1856. case Js::OpCode::Div_I4:
  1857. instr->SinkDst(Js::OpCode::MOV, RegRAX);
  1858. goto idiv_common;
  1859. case Js::OpCode::RemU_I4:
  1860. case Js::OpCode::Rem_I4:
  1861. instr->SinkDst(Js::OpCode::MOV, RegRDX);
  1862. idiv_common:
  1863. {
  1864. bool isUnsigned = instr->GetSrc1()->IsUnsigned();
  1865. if (isUnsigned)
  1866. {
  1867. Assert(instr->GetSrc2()->IsUnsigned());
  1868. Assert(instr->m_opcode == Js::OpCode::RemU_I4 || instr->m_opcode == Js::OpCode::DivU_I4);
  1869. instr->m_opcode = Js::OpCode::DIV;
  1870. }
  1871. else
  1872. {
  1873. instr->m_opcode = Js::OpCode::IDIV;
  1874. }
  1875. instr->HoistSrc1(Js::OpCode::MOV, RegRAX);
  1876. regEDX = IR::RegOpnd::New(src1->GetType(), instr->m_func);
  1877. regEDX->SetReg(RegRDX);
  1878. if (isUnsigned)
  1879. {
  1880. // we need to ensure that register allocator doesn't muck about with rdx
  1881. instr->HoistSrc2(Js::OpCode::MOV, RegRCX);
  1882. Lowerer::InsertMove(regEDX, IR::IntConstOpnd::New(0, src1->GetType(), instr->m_func), instr);
  1883. // NOP ensures that the EDX = Ld_I4 0 doesn't get deadstored, will be removed in peeps
  1884. instr->InsertBefore(IR::Instr::New(Js::OpCode::NOP, regEDX, regEDX, instr->m_func));
  1885. }
  1886. else
  1887. {
  1888. if (instr->GetSrc2()->IsImmediateOpnd())
  1889. {
  1890. instr->HoistSrc2(Js::OpCode::MOV);
  1891. }
  1892. instr->InsertBefore(IR::Instr::New(isInt64Instr ? Js::OpCode::CQO : Js::OpCode::CDQ, regEDX, instr->m_func));
  1893. }
  1894. return;
  1895. }
  1896. case Js::OpCode::Or_I4:
  1897. instr->m_opcode = Js::OpCode::OR;
  1898. break;
  1899. case Js::OpCode::Xor_I4:
  1900. instr->m_opcode = Js::OpCode::XOR;
  1901. break;
  1902. case Js::OpCode::And_I4:
  1903. instr->m_opcode = Js::OpCode::AND;
  1904. break;
  1905. case Js::OpCode::Shl_I4:
  1906. case Js::OpCode::ShrU_I4:
  1907. case Js::OpCode::Shr_I4:
  1908. case Js::OpCode::Rol_I4:
  1909. case Js::OpCode::Ror_I4:
  1910. LowererMD::ChangeToShift(instr, false /* needFlags */);
  1911. legalize = true;
  1912. break;
  1913. case Js::OpCode::BrTrue_I4:
  1914. instr->m_opcode = Js::OpCode::JNE;
  1915. goto br1_Common;
  1916. case Js::OpCode::BrFalse_I4:
  1917. instr->m_opcode = Js::OpCode::JEQ;
  1918. br1_Common:
  1919. src1 = instr->UnlinkSrc1();
  1920. newInstr = IR::Instr::New(Js::OpCode::TEST, instr->m_func);
  1921. instr->InsertBefore(newInstr);
  1922. newInstr->SetSrc1(src1);
  1923. newInstr->SetSrc2(src1);
  1924. return;
  1925. case Js::OpCode::BrEq_I4:
  1926. instr->m_opcode = Js::OpCode::JEQ;
  1927. goto br2_Common;
  1928. case Js::OpCode::BrNeq_I4:
  1929. instr->m_opcode = Js::OpCode::JNE;
  1930. goto br2_Common;
  1931. case Js::OpCode::BrUnGt_I4:
  1932. instr->m_opcode = Js::OpCode::JA;
  1933. goto br2_Common;
  1934. case Js::OpCode::BrUnGe_I4:
  1935. instr->m_opcode = Js::OpCode::JAE;
  1936. goto br2_Common;
  1937. case Js::OpCode::BrUnLe_I4:
  1938. instr->m_opcode = Js::OpCode::JBE;
  1939. goto br2_Common;
  1940. case Js::OpCode::BrUnLt_I4:
  1941. instr->m_opcode = Js::OpCode::JB;
  1942. goto br2_Common;
  1943. case Js::OpCode::BrGt_I4:
  1944. instr->m_opcode = Js::OpCode::JGT;
  1945. goto br2_Common;
  1946. case Js::OpCode::BrGe_I4:
  1947. instr->m_opcode = Js::OpCode::JGE;
  1948. goto br2_Common;
  1949. case Js::OpCode::BrLe_I4:
  1950. instr->m_opcode = Js::OpCode::JLE;
  1951. goto br2_Common;
  1952. case Js::OpCode::BrLt_I4:
  1953. instr->m_opcode = Js::OpCode::JLT;
  1954. br2_Common:
  1955. src1 = instr->UnlinkSrc1();
  1956. src2 = instr->UnlinkSrc2();
  1957. newInstr = IR::Instr::New(Js::OpCode::CMP, instr->m_func);
  1958. instr->InsertBefore(newInstr);
  1959. newInstr->SetSrc1(src1);
  1960. newInstr->SetSrc2(src2);
  1961. return;
  1962. default:
  1963. AssertMsg(UNREACHED, "Un-implemented int4 opcode");
  1964. }
  1965. if (signExtend)
  1966. {
  1967. Assert(instr->GetDst());
  1968. IR::Opnd *dst64 = instr->GetDst()->Copy(instr->m_func);
  1969. dst64->SetType(TyMachReg);
  1970. instr->InsertAfter(IR::Instr::New(Js::OpCode::MOVSXD, dst64, instr->GetDst(), instr->m_func));
  1971. }
  1972. if(legalize)
  1973. {
  1974. LowererMD::Legalize(instr);
  1975. }
  1976. else
  1977. {
  1978. // OpEq's
  1979. LowererMD::MakeDstEquSrc1(instr);
  1980. }
  1981. }
  1982. #if !FLOATVAR
  1983. void
  1984. LowererMDArch::EmitLoadVar(IR::Instr *instrLoad, bool isFromUint32, bool isHelper)
  1985. {
  1986. // e1 = MOV e_src1
  1987. // e1 = SHL e1, Js::VarTag_Shift
  1988. // JO $ToVar
  1989. // JB $ToVar [isFromUint32]
  1990. // e1 = INC e1
  1991. // r_dst = MOVSXD e1
  1992. // JMP $done
  1993. // $ToVar:
  1994. // EmitLoadVarNoCheck
  1995. // $Done:
  1996. Assert(instrLoad->GetSrc1()->IsRegOpnd());
  1997. Assert(instrLoad->GetDst()->GetType() == TyVar);
  1998. bool isInt = false;
  1999. bool isNotInt = false;
  2000. IR::Opnd *dst = instrLoad->GetDst();
  2001. IR::RegOpnd *src1 = instrLoad->GetSrc1()->AsRegOpnd();
  2002. IR::LabelInstr *toVar = nullptr;
  2003. IR::LabelInstr *done = nullptr;
  2004. // TODO: Fix bad lowering. We shouldn't get TyVars here.
  2005. // Assert(instrLoad->GetSrc1()->GetType() == TyInt32);
  2006. src1->SetType(TyInt32);
  2007. if (src1->IsTaggedInt())
  2008. {
  2009. isInt = true;
  2010. }
  2011. else if (src1->IsNotInt())
  2012. {
  2013. isNotInt = true;
  2014. }
  2015. if (!isNotInt)
  2016. {
  2017. // e1 = MOV e_src1
  2018. IR::RegOpnd *e1 = IR::RegOpnd::New(TyInt32, m_func);
  2019. instrLoad->InsertBefore(IR::Instr::New(Js::OpCode::MOV, e1, instrLoad->GetSrc1(), m_func));
  2020. // e1 = SHL e1, Js::VarTag_Shift
  2021. instrLoad->InsertBefore(IR::Instr::New(Js::OpCode::SHL,
  2022. e1,
  2023. e1,
  2024. IR::IntConstOpnd::New(Js::VarTag_Shift, TyInt8, m_func), m_func));
  2025. if (!isInt)
  2026. {
  2027. // JO $ToVar
  2028. toVar = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  2029. instrLoad->InsertBefore(IR::BranchInstr::New(Js::OpCode::JO, toVar, m_func));
  2030. if (isFromUint32)
  2031. {
  2032. // JB $ToVar [isFromUint32]
  2033. instrLoad->InsertBefore(IR::BranchInstr::New(Js::OpCode::JB, toVar, this->m_func));
  2034. }
  2035. }
  2036. // e1 = INC e1
  2037. instrLoad->InsertBefore(IR::Instr::New(Js::OpCode::INC, e1, e1, m_func));
  2038. // dst = MOVSXD e1
  2039. instrLoad->InsertBefore(IR::Instr::New(Js::OpCode::MOVSXD, dst, e1, m_func));
  2040. if (!isInt)
  2041. {
  2042. // JMP $done
  2043. done = IR::LabelInstr::New(Js::OpCode::Label, m_func, isHelper);
  2044. instrLoad->InsertBefore(IR::BranchInstr::New(Js::OpCode::JMP, done, m_func));
  2045. }
  2046. }
  2047. IR::Instr *insertInstr = instrLoad;
  2048. if (!isInt)
  2049. {
  2050. // $toVar:
  2051. if (toVar)
  2052. {
  2053. instrLoad->InsertBefore(toVar);
  2054. }
  2055. // ToVar()
  2056. this->lowererMD->EmitLoadVarNoCheck(dst->AsRegOpnd(), src1, instrLoad, isFromUint32, isHelper || toVar != nullptr);
  2057. }
  2058. if (done)
  2059. {
  2060. instrLoad->InsertAfter(done);
  2061. }
  2062. instrLoad->Remove();
  2063. }
  2064. #else
  2065. void
  2066. LowererMDArch::EmitLoadVar(IR::Instr *instrLoad, bool isFromUint32, bool isHelper)
  2067. {
  2068. // MOV_TRUNC e1, e_src1
  2069. // CMP e1, 0 [uint32]
  2070. // JLT $Helper [uint32] -- overflows?
  2071. // BTS r1, VarTag_Shift
  2072. // MOV r_dst, r1
  2073. // JMP $done [uint32]
  2074. // $helper [uint32]
  2075. // EmitLoadVarNoCheck
  2076. // $done [uint32]
  2077. Assert(instrLoad->GetSrc1()->IsRegOpnd());
  2078. Assert(instrLoad->GetDst()->GetType() == TyVar);
  2079. bool isInt = false;
  2080. IR::Opnd *dst = instrLoad->GetDst();
  2081. IR::RegOpnd *src1 = instrLoad->GetSrc1()->AsRegOpnd();
  2082. IR::LabelInstr *labelHelper = nullptr;
  2083. // TODO: Fix bad lowering. We shouldn't get TyVars here.
  2084. // Assert(instrLoad->GetSrc1()->GetType() == TyInt32);
  2085. src1->SetType(TyInt32);
  2086. if (src1->IsTaggedInt())
  2087. {
  2088. isInt = true;
  2089. }
  2090. else if (src1->IsNotInt())
  2091. {
  2092. // ToVar()
  2093. this->lowererMD->EmitLoadVarNoCheck(dst->AsRegOpnd(), src1, instrLoad, isFromUint32, isHelper);
  2094. return;
  2095. }
  2096. IR::RegOpnd *r1 = IR::RegOpnd::New(TyVar, m_func);
  2097. // e1 = MOV_TRUNC e_src1
  2098. // (Use MOV_TRUNC here as we rely on the register copy to clear the upper 32 bits.)
  2099. IR::RegOpnd *e1 = r1->Copy(m_func)->AsRegOpnd();
  2100. e1->SetType(TyInt32);
  2101. instrLoad->InsertBefore(IR::Instr::New(Js::OpCode::MOV_TRUNC,
  2102. e1,
  2103. src1,
  2104. m_func));
  2105. if (!isInt && isFromUint32)
  2106. {
  2107. // CMP e1, 0
  2108. IR::Instr *instr = IR::Instr::New(Js::OpCode::CMP, m_func);
  2109. instr->SetSrc1(e1);
  2110. instr->SetSrc2(IR::IntConstOpnd::New(0, TyInt32, m_func));
  2111. instrLoad->InsertBefore(instr);
  2112. Assert(!labelHelper);
  2113. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  2114. // JLT $helper
  2115. instr = IR::BranchInstr::New(Js::OpCode::JLT, labelHelper, m_func);
  2116. instrLoad->InsertBefore(instr);
  2117. }
  2118. // The previous operation clears the top 32 bits.
  2119. // BTS r1, VarTag_Shift
  2120. this->lowererMD->GenerateInt32ToVarConversion(r1, instrLoad);
  2121. // REVIEW: We need r1 only if we could generate sn = Ld_A_I4 sn. i.e. the destination and
  2122. // source are the same.
  2123. // r_dst = MOV r1
  2124. instrLoad->InsertBefore(IR::Instr::New(Js::OpCode::MOV,
  2125. dst,
  2126. r1,
  2127. m_func));
  2128. if (labelHelper)
  2129. {
  2130. Assert(isFromUint32);
  2131. // JMP $done
  2132. IR::LabelInstr * labelDone = IR::LabelInstr::New(Js::OpCode::Label, m_func, isHelper);
  2133. instrLoad->InsertBefore(IR::BranchInstr::New(Js::OpCode::JMP, labelDone, m_func));
  2134. // $helper
  2135. instrLoad->InsertBefore(labelHelper);
  2136. // ToVar()
  2137. this->lowererMD->EmitLoadVarNoCheck(dst->AsRegOpnd(), src1, instrLoad, isFromUint32, true);
  2138. // $done
  2139. instrLoad->InsertBefore(labelDone);
  2140. }
  2141. instrLoad->Remove();
  2142. }
  2143. #endif
  2144. void
  2145. LowererMDArch::EmitIntToFloat(IR::Opnd *dst, IR::Opnd *src, IR::Instr *instrInsert)
  2146. {
  2147. Assert(dst->IsRegOpnd() && dst->IsFloat());
  2148. Assert(src->IsRegOpnd() && src->IsInt32());
  2149. if (dst->IsFloat64())
  2150. {
  2151. // Use MOVD to make sure we sign extended the 32-bit src
  2152. instrInsert->InsertBefore(IR::Instr::New(Js::OpCode::MOVD, dst, src, this->m_func));
  2153. // Convert to float
  2154. instrInsert->InsertBefore(IR::Instr::New(Js::OpCode::CVTDQ2PD, dst, dst, this->m_func));
  2155. }
  2156. else
  2157. {
  2158. Assert(dst->IsFloat32());
  2159. instrInsert->InsertBefore(IR::Instr::New(Js::OpCode::CVTSI2SS, dst, src, this->m_func));
  2160. }
  2161. }
  2162. void
  2163. LowererMDArch::EmitIntToLong(IR::Opnd *dst, IR::Opnd *src, IR::Instr *instrInsert)
  2164. {
  2165. Assert(dst->IsRegOpnd() && dst->IsInt64());
  2166. Assert(src->IsInt32());
  2167. Lowerer::InsertMove(dst, src, instrInsert);
  2168. }
  2169. void
  2170. LowererMDArch::EmitUIntToLong(IR::Opnd *dst, IR::Opnd *src, IR::Instr *instrInsert)
  2171. {
  2172. Assert(dst->IsRegOpnd() && dst->IsInt64());
  2173. Assert(src->IsUInt32());
  2174. Lowerer::InsertMove(dst, src, instrInsert);
  2175. }
  2176. void
  2177. LowererMDArch::EmitLongToInt(IR::Opnd *dst, IR::Opnd *src, IR::Instr *instrInsert)
  2178. {
  2179. Assert(dst->IsRegOpnd() && dst->IsInt32());
  2180. Assert(src->IsInt64());
  2181. instrInsert->InsertBefore(IR::Instr::New(Js::OpCode::MOV_TRUNC, dst, src, instrInsert->m_func));
  2182. }
  2183. void
  2184. LowererMDArch::EmitUIntToFloat(IR::Opnd *dst, IR::Opnd *src, IR::Instr *instrInsert)
  2185. {
  2186. Assert(dst->IsRegOpnd() && dst->IsFloat());
  2187. Assert(src->IsRegOpnd() && (src->IsInt32() || src->IsUInt32()));
  2188. // MOV tempReg.i32, src - make sure the top bits are 0
  2189. IR::RegOpnd * tempReg = IR::RegOpnd::New(TyInt32, this->m_func);
  2190. instrInsert->InsertBefore(IR::Instr::New(Js::OpCode::MOV_TRUNC, tempReg, src, this->m_func));
  2191. // CVTSI2SD dst, tempReg.i64 (Use the tempreg as if it is 64 bit without sign extension)
  2192. instrInsert->InsertBefore(IR::Instr::New(dst->IsFloat64() ? Js::OpCode::CVTSI2SD : Js::OpCode::CVTSI2SS, dst,
  2193. tempReg->UseWithNewType(TyInt64, this->m_func), this->m_func));
  2194. }
  2195. bool
  2196. LowererMDArch::EmitLoadInt32(IR::Instr *instrLoad, bool conversionFromObjectAllowed, bool bailOutOnHelper, IR::LabelInstr * labelBailOut)
  2197. {
  2198. //
  2199. // r1 = MOV src1
  2200. // rtest = MOV src1
  2201. // SHR rtest, AtomTag_Shift
  2202. // CMP rtest, 1
  2203. // JNE $helper or $float
  2204. // r_dst = MOV_TRUNC e_src1
  2205. // JMP $done
  2206. // $float:
  2207. // dst = ConvertToFloat(r1, $helper)
  2208. // $helper:
  2209. // r_dst = ToInt32()
  2210. //
  2211. Assert(instrLoad->GetSrc1()->IsRegOpnd());
  2212. Assert(instrLoad->GetSrc1()->GetType() == TyVar);
  2213. // TODO: Fix bad lowering. We shouldn't see TyVars here.
  2214. // Assert(instrLoad->GetDst()->GetType() == TyInt32);
  2215. bool isInt = false;
  2216. bool isNotInt = false;
  2217. IR::Opnd *dst = instrLoad->GetDst();
  2218. IR::RegOpnd *src1 = instrLoad->GetSrc1()->AsRegOpnd();
  2219. IR::LabelInstr *helper = nullptr;
  2220. IR::LabelInstr *labelFloat = nullptr;
  2221. IR::LabelInstr *done = nullptr;
  2222. if (src1->IsTaggedInt())
  2223. {
  2224. isInt = true;
  2225. }
  2226. else if (src1->IsNotInt())
  2227. {
  2228. isNotInt = true;
  2229. }
  2230. if (src1->IsEqual(instrLoad->GetDst()) == false)
  2231. {
  2232. // r1 = MOV src1
  2233. IR::RegOpnd *r1 = IR::RegOpnd::New(TyVar, instrLoad->m_func);
  2234. r1->SetValueType(src1->GetValueType());
  2235. instrLoad->InsertBefore(IR::Instr::New(Js::OpCode::MOV, r1, src1, instrLoad->m_func));
  2236. src1 = r1;
  2237. }
  2238. const ValueType src1ValueType(src1->GetValueType());
  2239. const bool doFloatToIntFastPath =
  2240. (src1ValueType.IsLikelyFloat() || src1ValueType.IsLikelyUntaggedInt()) &&
  2241. !(instrLoad->HasBailOutInfo() && (instrLoad->GetBailOutKind() == IR::BailOutIntOnly || instrLoad->GetBailOutKind() == IR::BailOutExpectingInteger));
  2242. if (isNotInt)
  2243. {
  2244. // Known to be non-integer. If we are required to bail out on helper call, just re-jit.
  2245. if (!doFloatToIntFastPath && bailOutOnHelper)
  2246. {
  2247. if(!GlobOpt::DoEliminateArrayAccessHelperCall(this->m_func))
  2248. {
  2249. // Array access helper call removal is already off for some reason. Prevent trying to rejit again
  2250. // because it won't help and the same thing will happen again. Just abort jitting this function.
  2251. if(PHASE_TRACE(Js::BailOutPhase, this->m_func))
  2252. {
  2253. Output::Print(_u(" Aborting JIT because EliminateArrayAccessHelperCall is already off\n"));
  2254. Output::Flush();
  2255. }
  2256. throw Js::OperationAbortedException();
  2257. }
  2258. throw Js::RejitException(RejitReason::ArrayAccessHelperCallEliminationDisabled);
  2259. }
  2260. }
  2261. else
  2262. {
  2263. // It could be an integer in this case.
  2264. if (!isInt)
  2265. {
  2266. if(doFloatToIntFastPath)
  2267. {
  2268. labelFloat = IR::LabelInstr::New(Js::OpCode::Label, instrLoad->m_func, false);
  2269. }
  2270. else
  2271. {
  2272. helper = IR::LabelInstr::New(Js::OpCode::Label, instrLoad->m_func, true);
  2273. }
  2274. this->lowererMD->GenerateSmIntTest(src1, instrLoad, labelFloat ? labelFloat : helper);
  2275. }
  2276. IR::RegOpnd *src132 = src1->UseWithNewType(TyInt32, instrLoad->m_func)->AsRegOpnd();
  2277. #if !INT32VAR
  2278. // src1 = SAR src1, VarTag_Shift
  2279. instrLoad->InsertBefore(IR::Instr::New(Js::OpCode::SAR,
  2280. src132,
  2281. src132,
  2282. IR::IntConstOpnd::New(Js::VarTag_Shift, TyInt8, instrLoad->m_func),
  2283. instrLoad->m_func));
  2284. // r_dst = MOV src1
  2285. // This is only a MOV (and not a MOVSXD) because we do a signed shift right, but we'll copy
  2286. // all 64 bits.
  2287. instrLoad->InsertBefore(IR::Instr::New(Js::OpCode::MOV,
  2288. dst->UseWithNewType(TyMachReg, instrLoad->m_func),
  2289. src1,
  2290. instrLoad->m_func));
  2291. #else
  2292. instrLoad->InsertBefore(IR::Instr::New(Js::OpCode::MOV_TRUNC,
  2293. dst->UseWithNewType(TyInt32, instrLoad->m_func),
  2294. src132,
  2295. instrLoad->m_func));
  2296. #endif
  2297. if (!isInt)
  2298. {
  2299. // JMP $done
  2300. done = instrLoad->GetOrCreateContinueLabel();
  2301. instrLoad->InsertBefore(IR::BranchInstr::New(Js::OpCode::JMP, done, m_func));
  2302. }
  2303. }
  2304. if (!isInt)
  2305. {
  2306. if(doFloatToIntFastPath)
  2307. {
  2308. if(labelFloat)
  2309. {
  2310. instrLoad->InsertBefore(labelFloat);
  2311. }
  2312. if(!helper)
  2313. {
  2314. helper = IR::LabelInstr::New(Js::OpCode::Label, instrLoad->m_func, true);
  2315. }
  2316. if(!done)
  2317. {
  2318. done = instrLoad->GetOrCreateContinueLabel();
  2319. }
  2320. #if FLOATVAR
  2321. IR::RegOpnd* floatOpnd = this->lowererMD->CheckFloatAndUntag(src1, instrLoad, helper);
  2322. #else
  2323. this->lowererMD->GenerateFloatTest(src1, instrLoad, helper, instrLoad->HasBailOutInfo());
  2324. IR::IndirOpnd* floatOpnd = IR::IndirOpnd::New(src1, Js::JavascriptNumber::GetValueOffset(), TyMachDouble, this->m_func);
  2325. #endif
  2326. this->lowererMD->ConvertFloatToInt32(instrLoad->GetDst(), floatOpnd, helper, done, instrLoad);
  2327. }
  2328. // $helper:
  2329. if (helper)
  2330. {
  2331. instrLoad->InsertBefore(helper);
  2332. }
  2333. if(instrLoad->HasBailOutInfo() && (instrLoad->GetBailOutKind() == IR::BailOutIntOnly || instrLoad->GetBailOutKind() == IR::BailOutExpectingInteger))
  2334. {
  2335. // Avoid bailout if we have a JavascriptNumber whose value is a signed 32-bit integer
  2336. lowererMD->m_lowerer->LoadInt32FromUntaggedVar(instrLoad);
  2337. // Need to bail out instead of calling a helper
  2338. return true;
  2339. }
  2340. if (bailOutOnHelper)
  2341. {
  2342. Assert(labelBailOut);
  2343. lowererMD->m_lowerer->InsertBranch(Js::OpCode::Br, labelBailOut, instrLoad);
  2344. instrLoad->Remove();
  2345. }
  2346. else if (conversionFromObjectAllowed)
  2347. {
  2348. lowererMD->m_lowerer->LowerUnaryHelperMem(instrLoad, IR::HelperConv_ToInt32);
  2349. }
  2350. else
  2351. {
  2352. lowererMD->m_lowerer->LowerUnaryHelperMemWithBoolReference(instrLoad, IR::HelperConv_ToInt32_NoObjects, true /*useBoolForBailout*/);
  2353. }
  2354. }
  2355. else
  2356. {
  2357. instrLoad->Remove();
  2358. }
  2359. return false;
  2360. }
  2361. IR::Instr *
  2362. LowererMDArch::LoadCheckedFloat(IR::RegOpnd *opndOrig, IR::RegOpnd *opndFloat, IR::LabelInstr *labelInline, IR::LabelInstr *labelHelper, IR::Instr *instrInsert, const bool checkForNullInLoopBody)
  2363. {
  2364. //
  2365. // if (TaggedInt::Is(opndOrig))
  2366. // opndFloat = CVTSI2SD opndOrig_32
  2367. // JMP $labelInline
  2368. // else
  2369. // JMP $labelOpndIsNotInt
  2370. //
  2371. // $labelOpndIsNotInt:
  2372. // if (TaggedFloat::Is(opndOrig))
  2373. // s2 = MOV opndOrig
  2374. // s2 = XOR FloatTag_Value
  2375. // opndFloat = MOVD s2
  2376. // else
  2377. // JMP $labelHelper
  2378. //
  2379. // $labelInline:
  2380. //
  2381. IR::Instr *instrFirst = nullptr;
  2382. IR::LabelInstr *labelOpndIsNotInt = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  2383. lowererMD->GenerateSmIntTest(opndOrig, instrInsert, labelOpndIsNotInt, &instrFirst);
  2384. if (opndOrig->GetValueType().IsLikelyFloat())
  2385. {
  2386. // Make this path helper if value is likely a float
  2387. instrInsert->InsertBefore(IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true));
  2388. }
  2389. IR::Opnd *opndOrig_32 = opndOrig->UseWithNewType(TyInt32, this->m_func);
  2390. IR::Instr *cvtsi2sd = IR::Instr::New(Js::OpCode::CVTSI2SD, opndFloat, opndOrig_32, this->m_func);
  2391. instrInsert->InsertBefore(cvtsi2sd);
  2392. IR::Instr *jmpInline = IR::BranchInstr::New(Js::OpCode::JMP, labelInline, this->m_func);
  2393. instrInsert->InsertBefore(jmpInline);
  2394. instrInsert->InsertBefore(labelOpndIsNotInt);
  2395. lowererMD->GenerateFloatTest(opndOrig, instrInsert, labelHelper, checkForNullInLoopBody);
  2396. IR::RegOpnd *s2 = IR::RegOpnd::New(TyMachReg, this->m_func);
  2397. IR::Instr *mov = IR::Instr::New(Js::OpCode::MOV, s2, opndOrig, this->m_func);
  2398. instrInsert->InsertBefore(mov);
  2399. IR::Instr *xorTag = IR::Instr::New(Js::OpCode::XOR,
  2400. s2,
  2401. s2,
  2402. IR::IntConstOpnd::New(Js::FloatTag_Value,
  2403. TyMachReg,
  2404. this->m_func,
  2405. /* dontEncode = */ true),
  2406. this->m_func);
  2407. instrInsert->InsertBefore(xorTag);
  2408. LowererMD::Legalize(xorTag);
  2409. IR::Instr *movFloat = IR::Instr::New(Js::OpCode::MOVD, opndFloat, s2, this->m_func);
  2410. instrInsert->InsertBefore(movFloat);
  2411. return instrFirst;
  2412. }
  2413. IR::LabelInstr *
  2414. LowererMDArch::GetBailOutStackRestoreLabel(BailOutInfo * bailOutInfo, IR::LabelInstr * exitTargetInstr)
  2415. {
  2416. return exitTargetInstr;
  2417. }
  2418. bool LowererMDArch::GenerateFastAnd(IR::Instr * instrAnd)
  2419. {
  2420. return true;
  2421. }
  2422. bool LowererMDArch::GenerateFastXor(IR::Instr * instrXor)
  2423. {
  2424. return true;
  2425. }
  2426. bool LowererMDArch::GenerateFastOr(IR::Instr * instrOr)
  2427. {
  2428. return true;
  2429. }
  2430. bool LowererMDArch::GenerateFastNot(IR::Instr * instrNot)
  2431. {
  2432. return true;
  2433. }
  2434. bool LowererMDArch::GenerateFastShiftLeft(IR::Instr * instrShift)
  2435. {
  2436. return true;
  2437. }
  2438. bool LowererMDArch::GenerateFastShiftRight(IR::Instr * instrShift)
  2439. {
  2440. // Given:
  2441. //
  2442. // dst = Shr/ShrU src1, src2
  2443. //
  2444. // Generate:
  2445. //
  2446. // (If not 2 Int31's, jump to $helper.)
  2447. // s1 = MOV src1
  2448. //RCX = MOV src2
  2449. // TEST RCX, 0x1F [unsigned only] // Bail if unsigned and not shifting,
  2450. // JEQ $helper [unsigned only] // as we may not end up with a taggable int
  2451. // s1 = SAR/SHR s1, RCX
  2452. // BTS s1, VarTag_Shift
  2453. //dst = MOV s1
  2454. // JMP $fallthru
  2455. // $helper:
  2456. // (caller generates helper call)
  2457. // $fallthru:
  2458. IR::Instr * instr;
  2459. IR::LabelInstr * labelHelper;
  2460. IR::LabelInstr * labelFallThru;
  2461. IR::Opnd * opndReg;
  2462. IR::Opnd * opndSrc1;
  2463. IR::Opnd * opndSrc2;
  2464. Assert(instrShift->m_opcode == Js::OpCode::ShrU_A || instrShift->m_opcode == Js::OpCode::Shr_A);
  2465. bool isUnsigned = (instrShift->m_opcode == Js::OpCode::ShrU_A);
  2466. opndSrc1 = instrShift->GetSrc1();
  2467. opndSrc2 = instrShift->GetSrc2();
  2468. AssertMsg(opndSrc1 && opndSrc2, "Expected 2 src opnd's on Add instruction");
  2469. // Not int?
  2470. if (opndSrc1->IsRegOpnd() && opndSrc1->AsRegOpnd()->IsNotInt())
  2471. {
  2472. return true;
  2473. }
  2474. if (opndSrc2->IsRegOpnd() && opndSrc2->AsRegOpnd()->IsNotInt())
  2475. {
  2476. return true;
  2477. }
  2478. // Tagged ints?
  2479. bool isTaggedInts = false;
  2480. if (opndSrc1->IsTaggedInt())
  2481. {
  2482. if (opndSrc2->IsTaggedInt())
  2483. {
  2484. isTaggedInts = true;
  2485. }
  2486. }
  2487. IntConstType s2Value = 0;
  2488. bool src2IsIntConst = false;
  2489. if (isUnsigned)
  2490. {
  2491. if (opndSrc2->IsRegOpnd())
  2492. {
  2493. src2IsIntConst = opndSrc2->AsRegOpnd()->m_sym->IsTaggableIntConst();
  2494. if (src2IsIntConst)
  2495. {
  2496. s2Value = opndSrc2->AsRegOpnd()->m_sym->GetIntConstValue();
  2497. }
  2498. }
  2499. else
  2500. {
  2501. AssertMsg(opndSrc2->IsAddrOpnd() && Js::TaggedInt::Is(opndSrc2->AsAddrOpnd()->m_address),
  2502. "Expect src2 of shift right to be reg or Var.");
  2503. src2IsIntConst = true;
  2504. s2Value = Js::TaggedInt::ToInt32(opndSrc2->AsAddrOpnd()->m_address);
  2505. }
  2506. // 32-bit Shifts only uses the bottom 5 bits.
  2507. s2Value &= 0x1F;
  2508. // Unsigned shift by 0 could yield a value not encodable as a tagged int.
  2509. if (isUnsigned && src2IsIntConst && s2Value == 0)
  2510. {
  2511. return true;
  2512. }
  2513. }
  2514. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  2515. if (!isTaggedInts)
  2516. {
  2517. // (If not 2 Int31's, jump to $helper.)
  2518. this->lowererMD->GenerateSmIntPairTest(instrShift, opndSrc1, opndSrc2, labelHelper);
  2519. }
  2520. opndSrc1 = opndSrc1->UseWithNewType(TyInt32, this->m_func);
  2521. if (src2IsIntConst)
  2522. {
  2523. opndSrc2 = IR::IntConstOpnd::New(s2Value, TyInt32, this->m_func);
  2524. }
  2525. else
  2526. {
  2527. // RCX = MOV src2
  2528. opndSrc2 = opndSrc2->UseWithNewType(TyInt32, this->m_func);
  2529. opndReg = IR::RegOpnd::New(TyInt32, this->m_func);
  2530. opndReg->AsRegOpnd()->SetReg(this->GetRegShiftCount());
  2531. instr = IR::Instr::New(Js::OpCode::MOV, opndReg, opndSrc2, this->m_func);
  2532. instrShift->InsertBefore(instr);
  2533. opndSrc2 = opndReg;
  2534. }
  2535. if (!src2IsIntConst && isUnsigned)
  2536. {
  2537. // TEST RCX, 0x1F [unsigned only] // Bail if unsigned and not shifting,
  2538. instr = IR::Instr::New(Js::OpCode::TEST, this->m_func);
  2539. instr->SetSrc1(opndSrc2);
  2540. instr->SetSrc2(IR::IntConstOpnd::New(0x1F, TyInt32, this->m_func));
  2541. instrShift->InsertBefore(instr);
  2542. // JEQ $helper [unsigned only] // as we may not end up with a taggable int
  2543. instr = IR::BranchInstr::New(Js::OpCode::JEQ, labelHelper, this->m_func);
  2544. instrShift->InsertBefore(instr);
  2545. }
  2546. // s1 = MOV src1
  2547. opndReg = IR::RegOpnd::New(TyInt32, this->m_func);
  2548. instr = IR::Instr::New(Js::OpCode::MOV, opndReg, opndSrc1, this->m_func);
  2549. instrShift->InsertBefore(instr);
  2550. // s1 = SAR/SHR s1, RCX
  2551. instr = IR::Instr::New(isUnsigned ? Js::OpCode::SHR : Js::OpCode::SAR, opndReg, opndReg, opndSrc2, this->m_func);
  2552. instrShift->InsertBefore(instr);
  2553. //
  2554. // Convert TyInt32 operand, back to TyMachPtr type.
  2555. //
  2556. if(TyMachReg != opndReg->GetType())
  2557. {
  2558. opndReg = opndReg->UseWithNewType(TyMachPtr, this->m_func);
  2559. }
  2560. // BTS s1, VarTag_Shift
  2561. this->lowererMD->GenerateInt32ToVarConversion(opndReg, instrShift);
  2562. // dst = MOV s1
  2563. instr = IR::Instr::New(Js::OpCode::MOV, instrShift->GetDst(), opndReg, this->m_func);
  2564. instrShift->InsertBefore(instr);
  2565. // JMP $fallthru
  2566. labelFallThru = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  2567. instr = IR::BranchInstr::New(Js::OpCode::JMP, labelFallThru, this->m_func);
  2568. instrShift->InsertBefore(instr);
  2569. // $helper:
  2570. // (caller generates helper call)
  2571. // $fallthru:
  2572. instrShift->InsertBefore(labelHelper);
  2573. instrShift->InsertAfter(labelFallThru);
  2574. return true;
  2575. }
  2576. void
  2577. LowererMDArch::FinalLower()
  2578. {
  2579. IR::IntConstOpnd *intOpnd;
  2580. FOREACH_INSTR_BACKWARD_EDITING_IN_RANGE(instr, instrPrev, this->m_func->m_tailInstr, this->m_func->m_headInstr)
  2581. {
  2582. switch (instr->m_opcode)
  2583. {
  2584. case Js::OpCode::Ret:
  2585. instr->Remove();
  2586. break;
  2587. case Js::OpCode::LdArgSize:
  2588. Assert(this->m_func->HasTry());
  2589. instr->m_opcode = Js::OpCode::MOV;
  2590. intOpnd = IR::IntConstOpnd::New(this->m_func->GetArgsSize(), TyUint32, this->m_func);
  2591. instr->SetSrc1(intOpnd);
  2592. LowererMD::Legalize(instr);
  2593. break;
  2594. case Js::OpCode::LdSpillSize:
  2595. Assert(this->m_func->HasTry());
  2596. instr->m_opcode = Js::OpCode::MOV;
  2597. intOpnd = IR::IntConstOpnd::New(this->m_func->GetSpillSize(), TyUint32, this->m_func);
  2598. instr->SetSrc1(intOpnd);
  2599. LowererMD::Legalize(instr);
  2600. break;
  2601. case Js::OpCode::Leave:
  2602. Assert(this->m_func->DoOptimizeTry() && !this->m_func->IsLoopBodyInTry());
  2603. instrPrev = this->lowererMD->LowerLeave(instr, instr->AsBranchInstr()->GetTarget(), true /*fromFinalLower*/);
  2604. break;
  2605. case Js::OpCode::CMOVA:
  2606. case Js::OpCode::CMOVAE:
  2607. case Js::OpCode::CMOVB:
  2608. case Js::OpCode::CMOVBE:
  2609. case Js::OpCode::CMOVE:
  2610. case Js::OpCode::CMOVG:
  2611. case Js::OpCode::CMOVGE:
  2612. case Js::OpCode::CMOVL:
  2613. case Js::OpCode::CMOVLE:
  2614. case Js::OpCode::CMOVNE:
  2615. case Js::OpCode::CMOVNO:
  2616. case Js::OpCode::CMOVNP:
  2617. case Js::OpCode::CMOVNS:
  2618. case Js::OpCode::CMOVO:
  2619. case Js::OpCode::CMOVP:
  2620. case Js::OpCode::CMOVS:
  2621. // Get rid of fake src1.
  2622. if (instr->GetSrc2())
  2623. {
  2624. // CMOV inserted before regalloc have a dummy src1 to simulate the fact that
  2625. // CMOV is not a definite def of the dst.
  2626. instr->SwapOpnds();
  2627. instr->FreeSrc2();
  2628. }
  2629. break;
  2630. }
  2631. } NEXT_INSTR_BACKWARD_EDITING_IN_RANGE;
  2632. }
  2633. IR::Opnd*
  2634. LowererMDArch::GenerateArgOutForStackArgs(IR::Instr* callInstr, IR::Instr* stackArgsInstr)
  2635. {
  2636. return this->lowererMD->m_lowerer->GenerateArgOutForStackArgs(callInstr, stackArgsInstr);
  2637. }
  2638. void
  2639. LowererMDArch::LowerInlineSpreadArgOutLoop(IR::Instr *callInstr, IR::RegOpnd *indexOpnd, IR::RegOpnd *arrayElementsStartOpnd)
  2640. {
  2641. this->lowererMD->m_lowerer->LowerInlineSpreadArgOutLoopUsingRegisters(callInstr, indexOpnd, arrayElementsStartOpnd);
  2642. }
  2643. IR::Instr *
  2644. LowererMDArch::LowerEHRegionReturn(IR::Instr * insertBeforeInstr, IR::Opnd * targetOpnd)
  2645. {
  2646. IR::RegOpnd *retReg = IR::RegOpnd::New(StackSym::New(TyMachReg, this->m_func), GetRegReturn(TyMachReg), TyMachReg, this->m_func);
  2647. // Load the continuation address into the return register.
  2648. insertBeforeInstr->InsertBefore(IR::Instr::New(Js::OpCode::MOV, retReg, targetOpnd, this->m_func));
  2649. // MOV REG_EH_SPILL_SIZE, spillSize
  2650. IR::Instr *movSpillSize = IR::Instr::New(Js::OpCode::LdSpillSize,
  2651. IR::RegOpnd::New(nullptr, REG_EH_SPILL_SIZE, TyMachReg, m_func),
  2652. m_func);
  2653. insertBeforeInstr->InsertBefore(movSpillSize);
  2654. // MOV REG_EH_ARGS_SIZE, argsSize
  2655. IR::Instr *movArgsSize = IR::Instr::New(Js::OpCode::LdArgSize,
  2656. IR::RegOpnd::New(nullptr, REG_EH_ARGS_SIZE, TyMachReg, m_func),
  2657. m_func);
  2658. insertBeforeInstr->InsertBefore(movArgsSize);
  2659. // MOV REG_EH_TARGET, amd64_ReturnFromCallWithFakeFrame
  2660. // PUSH REG_EH_TARGET
  2661. // RET
  2662. IR::Opnd *endCallWithFakeFrame = endCallWithFakeFrame =
  2663. IR::RegOpnd::New(nullptr, REG_EH_TARGET, TyMachReg, m_func);
  2664. IR::Instr *movTarget = IR::Instr::New(Js::OpCode::MOV,
  2665. endCallWithFakeFrame,
  2666. IR::HelperCallOpnd::New(IR::HelperOp_ReturnFromCallWithFakeFrame, m_func),
  2667. m_func);
  2668. insertBeforeInstr->InsertBefore(movTarget);
  2669. IR::Instr *push = IR::Instr::New(Js::OpCode::PUSH, m_func);
  2670. push->SetSrc1(endCallWithFakeFrame);
  2671. insertBeforeInstr->InsertBefore(push);
  2672. #if 0
  2673. // TODO: This block gets deleted if we emit a JMP instead of a RET.
  2674. IR::BranchInstr *jmp = IR::BranchInstr::New(Js::OpCode::JMP,
  2675. nullptr,
  2676. targetOpnd,
  2677. m_func);
  2678. leaveInstr->InsertBefore(jmp);
  2679. #endif
  2680. IR::IntConstOpnd *intSrc = IR::IntConstOpnd::New(0, TyInt32, this->m_func);
  2681. IR::Instr * retInstr = IR::Instr::New(Js::OpCode::RET, this->m_func);
  2682. retInstr->SetSrc1(intSrc);
  2683. retInstr->SetSrc2(retReg);
  2684. insertBeforeInstr->InsertBefore(retInstr);
  2685. // return the last instruction inserted
  2686. return retInstr;
  2687. }