RecyclerChecker.cpp 18 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611
  1. //-------------------------------------------------------------------------------------------------------
  2. // Copyright (C) Microsoft. All rights reserved.
  3. // Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
  4. //-------------------------------------------------------------------------------------------------------
  5. #include "RecyclerChecker.h"
  6. MainVisitor::MainVisitor(
  7. CompilerInstance& compilerInstance, ASTContext& context, bool fix)
  8. : _compilerInstance(compilerInstance), _context(context),
  9. _fix(fix), _fixed(false), _barrierTypeDefined(false)
  10. {
  11. if (_fix)
  12. {
  13. _rewriter.setSourceMgr(compilerInstance.getSourceManager(),
  14. compilerInstance.getLangOpts());
  15. }
  16. }
  17. bool MainVisitor::VisitCXXRecordDecl(CXXRecordDecl* recordDecl)
  18. {
  19. if (Log::GetLevel() < Log::LogLevel::Info)
  20. {
  21. return true; // At least Info level, otherwise this not needed
  22. }
  23. std::string typeName = recordDecl->getQualifiedNameAsString();
  24. // Ignore (system/non-GC types) before seeing "Memory::NoWriteBarrierField"
  25. if (!_barrierTypeDefined)
  26. {
  27. if (typeName != "Memory::NoWriteBarrierField")
  28. {
  29. return true;
  30. }
  31. _barrierTypeDefined = true;
  32. }
  33. if (!recordDecl->hasDefinition())
  34. {
  35. return true;
  36. }
  37. bool hasUnbarrieredPointer = false;
  38. bool hasBarrieredField = false;
  39. for (auto field : recordDecl->fields())
  40. {
  41. const QualType qualType = field->getType();
  42. const Type* type = qualType.getTypePtr();
  43. auto fieldTypeName = qualType.getAsString();
  44. if (StartsWith(fieldTypeName, "typename WriteBarrierFieldTypeTraits") ||
  45. StartsWith(fieldTypeName, "const typename WriteBarrierFieldTypeTraits"))
  46. {
  47. // Note this only indicates the class is write-barrier annotated
  48. hasBarrieredField = true;
  49. }
  50. else if (type->isPointerType())
  51. {
  52. hasUnbarrieredPointer = true;
  53. }
  54. else if (type->isCompoundType())
  55. {
  56. // If the field is a compound type,
  57. // check if it is a fully barriered type or
  58. // has unprotected pointer fields
  59. if (Contains(_pointerClasses, fieldTypeName))
  60. {
  61. hasUnbarrieredPointer = true;
  62. }
  63. else if (Contains(_barrieredClasses, fieldTypeName))
  64. {
  65. hasBarrieredField = true;
  66. }
  67. }
  68. }
  69. if (hasUnbarrieredPointer)
  70. {
  71. _pointerClasses.insert(typeName);
  72. }
  73. else if (hasBarrieredField)
  74. {
  75. _barrieredClasses.insert(typeName);
  76. }
  77. return true;
  78. }
  79. template <class PushFieldType>
  80. void MainVisitor::ProcessUnbarrieredFields(
  81. CXXRecordDecl* recordDecl, const PushFieldType& pushFieldType)
  82. {
  83. std::string typeName = recordDecl->getQualifiedNameAsString();
  84. if (typeName == "Memory::WriteBarrierPtr")
  85. {
  86. return; // Skip WriteBarrierPtr itself
  87. }
  88. const auto& sourceMgr = _compilerInstance.getSourceManager();
  89. DiagnosticsEngine& diagEngine = _context.getDiagnostics();
  90. const unsigned diagID = diagEngine.getCustomDiagID(
  91. DiagnosticsEngine::Error,
  92. "Unbarriered field, see "
  93. "https://github.com/microsoft/ChakraCore/wiki/Software-Write-Barrier#coding-rules");
  94. for (auto field : recordDecl->fields())
  95. {
  96. const QualType qualType = field->getType();
  97. string fieldTypeName = qualType.getAsString();
  98. string fieldName = field->getNameAsString();
  99. if (StartsWith(fieldTypeName, "WriteBarrierPtr<") || // WriteBarrierPtr fields
  100. Contains(fieldTypeName, "_no_write_barrier_policy, ")) // FieldNoBarrier
  101. {
  102. continue; // skip
  103. }
  104. // If an annotated field type is struct/class/union (RecordType), the
  105. // field type in turn should likely be annoatated.
  106. if (fieldTypeName.back() != '*' // not "... *"
  107. &&
  108. (
  109. StartsWith(fieldTypeName, "typename WriteBarrierFieldTypeTraits") ||
  110. StartsWith(fieldTypeName, "WriteBarrierFieldTypeTraits") ||
  111. StartsWith(fieldTypeName, "const typename WriteBarrierFieldTypeTraits") ||
  112. StartsWith(fieldTypeName, "const WriteBarrierFieldTypeTraits") ||
  113. fieldName.length() == 0 // anonymous union/struct
  114. ))
  115. {
  116. auto originalType = qualType->getUnqualifiedDesugaredType();
  117. if (auto arrayType = dyn_cast<ArrayType>(originalType))
  118. {
  119. originalType = arrayType->getElementType()->getUnqualifiedDesugaredType();
  120. }
  121. string originalTypeName = QualType(originalType, 0).getAsString();
  122. if (isa<RecordType>(originalType) &&
  123. !StartsWith(originalTypeName, "class Memory::WriteBarrierPtr<"))
  124. {
  125. if (pushFieldType(originalType))
  126. {
  127. Log::outs() << "Queue field type: " << originalTypeName
  128. << " (" << typeName << "::" << fieldName << ")\n";
  129. }
  130. }
  131. }
  132. else
  133. {
  134. SourceLocation location = field->getLocStart();
  135. if (this->_fix)
  136. {
  137. const char* begin = sourceMgr.getCharacterData(location);
  138. const char* end = begin;
  139. if (MatchType(fieldTypeName, begin, &end))
  140. {
  141. _rewriter.ReplaceText(
  142. location, end - begin,
  143. GetFieldTypeAnnotation(qualType) + string(begin, end) +
  144. (*end == ' ' ? ")" : ") "));
  145. _fixed = true;
  146. continue;
  147. }
  148. Log::errs() << "Fail to fix: " << fieldTypeName << " "
  149. << fieldName << "\n";
  150. }
  151. diagEngine.Report(location, diagID);
  152. }
  153. }
  154. }
  155. static bool SkipSpace(const char*& p)
  156. {
  157. if (*p == ' ')
  158. {
  159. ++p;
  160. return true;
  161. }
  162. return false;
  163. }
  164. template <size_t N>
  165. static bool SkipPrefix(const char*& p, const char (&prefix)[N])
  166. {
  167. if (StartsWith(p, prefix))
  168. {
  169. p += N - 1; // skip
  170. return true;
  171. }
  172. return false;
  173. }
  174. static bool SkipPrefix(const char*& p, const string& prefix)
  175. {
  176. if (StartsWith(p, prefix))
  177. {
  178. p += prefix.length(); // skip
  179. return true;
  180. }
  181. return false;
  182. }
  183. static bool SkipTemplateParameters(const char*& p)
  184. {
  185. if (*p == '<')
  186. {
  187. ++p;
  188. int left = 1;
  189. while (left && *p)
  190. {
  191. switch (*p++)
  192. {
  193. case '<': ++left; break;
  194. case '>': --left; break;
  195. }
  196. }
  197. return true;
  198. }
  199. return false;
  200. }
  201. bool MainVisitor::MatchType(const string& type, const char* source, const char** pSourceEnd)
  202. {
  203. // try match type in source directly (clang "bool" type is "_Bool")
  204. if (SkipPrefix(source, type) || (type == "_Bool" && SkipPrefix(source, "bool")))
  205. {
  206. *pSourceEnd = source;
  207. return true;
  208. }
  209. const char* p = type.c_str();
  210. while (*p && *source)
  211. {
  212. if (SkipSpace(p) || SkipSpace(source))
  213. {
  214. continue;
  215. }
  216. #define SKIP_EITHER_PREFIX(prefix) \
  217. (SkipPrefix(p, prefix) || SkipPrefix(source, prefix))
  218. if (SKIP_EITHER_PREFIX("const ") ||
  219. SKIP_EITHER_PREFIX("class ") ||
  220. SKIP_EITHER_PREFIX("struct ") ||
  221. SKIP_EITHER_PREFIX("union ") ||
  222. SKIP_EITHER_PREFIX("enum "))
  223. {
  224. continue;
  225. }
  226. #undef SKIP_EITHER_PREFIX
  227. // type may contain [...] array specifier, while source has it after field name
  228. if (*p == '[')
  229. {
  230. while (*p && *p++ != ']');
  231. continue;
  232. }
  233. // skip <...> in both
  234. if (SkipTemplateParameters(p) || SkipTemplateParameters(source))
  235. {
  236. continue;
  237. }
  238. // type may contain fully qualified name but source may or may not
  239. const char* pSkipScopeType = strstr(p, "::");
  240. if (pSkipScopeType && !memchr(p, ' ', pSkipScopeType - p))
  241. {
  242. pSkipScopeType += 2;
  243. if (strncmp(source, p, pSkipScopeType - p) == 0)
  244. {
  245. source += pSkipScopeType - p;
  246. }
  247. p = pSkipScopeType;
  248. continue;
  249. }
  250. if (*p == *source)
  251. {
  252. while (*p && *source && *p == *source && !strchr("<>", *p))
  253. {
  254. ++p, ++source;
  255. }
  256. continue;
  257. }
  258. if (*p != *source)
  259. {
  260. return false; // mismatch
  261. }
  262. }
  263. if (!*p && *source) // type match completed and having remaining source
  264. {
  265. while (*(source - 1) == ' ') --source; // try to stop after a non-space char
  266. *pSourceEnd = source;
  267. return true;
  268. }
  269. return false;
  270. }
  271. const char* MainVisitor::GetFieldTypeAnnotation(QualType qtype)
  272. {
  273. if (qtype->isPointerType())
  274. {
  275. auto type = qtype->getUnqualifiedDesugaredType()->getPointeeType().getTypePtr();
  276. const auto& i = _allocationTypes.find(type);
  277. if (i != _allocationTypes.end()
  278. && i->second == AllocationTypes::NonRecycler)
  279. {
  280. return "FieldNoBarrier(";
  281. }
  282. }
  283. return "Field(";
  284. }
  285. bool MainVisitor::VisitFunctionDecl(FunctionDecl* functionDecl)
  286. {
  287. if (functionDecl->hasBody())
  288. {
  289. CheckAllocationsInFunctionVisitor visitor(this, functionDecl);
  290. visitor.TraverseDecl(functionDecl);
  291. }
  292. return true;
  293. }
  294. void MainVisitor::RecordAllocation(QualType qtype, AllocationTypes allocationType)
  295. {
  296. auto type = qtype->getCanonicalTypeInternal().getTypePtr();
  297. _allocationTypes[type] |= allocationType;
  298. }
  299. void MainVisitor::RecordRecyclerAllocation(const string& allocationFunction, const string& type)
  300. {
  301. _allocatorTypeMap[allocationFunction].insert(type);
  302. }
  303. template <class Set, class DumpItemFunc>
  304. void MainVisitor::dump(const char* name, const Set& set, const DumpItemFunc& func)
  305. {
  306. Log::outs() << "-------------------------\n\n";
  307. Log::outs() << name << "\n";
  308. Log::outs() << "-------------------------\n\n";
  309. for (auto item : set)
  310. {
  311. func(Log::outs(), item);
  312. }
  313. Log::outs() << "-------------------------\n\n";
  314. }
  315. template <class Item>
  316. void MainVisitor::dump(const char* name, const set<Item>& set)
  317. {
  318. dump(name, set, [](raw_ostream& out, const Item& item)
  319. {
  320. out << " " << item << "\n";
  321. });
  322. }
  323. void MainVisitor::dump(const char* name, const unordered_set<const Type*> set)
  324. {
  325. dump(name, set, [&](raw_ostream& out, const Type* type)
  326. {
  327. out << " " << QualType(type, 0).getAsString() << "\n";
  328. });
  329. }
  330. void MainVisitor::Inspect()
  331. {
  332. #define Dump(coll) dump(#coll, _##coll)
  333. Dump(pointerClasses);
  334. Dump(barrieredClasses);
  335. Log::outs() << "Recycler allocations\n";
  336. for (auto item : _allocatorTypeMap)
  337. {
  338. dump(item.first.c_str(), item.second);
  339. }
  340. std::queue<const Type*> queue; // queue of types to check
  341. std::unordered_set<const Type*> barrierTypes; // set of types queued
  342. auto pushBarrierType = [&](const Type* type) -> bool
  343. {
  344. if (barrierTypes.insert(type).second)
  345. {
  346. queue.push(type);
  347. return true;
  348. }
  349. return false;
  350. };
  351. for (auto item : _allocationTypes)
  352. {
  353. if (item.second & AllocationTypes::WriteBarrier)
  354. {
  355. pushBarrierType(item.first);
  356. }
  357. }
  358. dump("WriteBarrier allocation types", barrierTypes);
  359. // Examine all barrierd types. They should be fully wb annotated.
  360. while (!queue.empty())
  361. {
  362. auto type = queue.front();
  363. queue.pop();
  364. auto r = type->getCanonicalTypeInternal()->getAsCXXRecordDecl();
  365. if (r)
  366. {
  367. auto typeName = r->getQualifiedNameAsString();
  368. ProcessUnbarrieredFields(r, pushBarrierType);
  369. // queue the type's base classes
  370. for (const auto& base: r->bases())
  371. {
  372. if (pushBarrierType(base.getType().getTypePtr()))
  373. {
  374. Log::outs() << "Queue base type: " << base.getType().getAsString()
  375. << " (base of " << typeName << ")\n";
  376. }
  377. }
  378. }
  379. }
  380. #undef Dump
  381. }
  382. bool MainVisitor::ApplyFix()
  383. {
  384. return _fixed ? _rewriter.overwriteChangedFiles() : false;
  385. }
  386. static AllocationTypes CheckAllocationType(const CXXStaticCastExpr* castNode)
  387. {
  388. QualType targetType = castNode->getTypeAsWritten();
  389. if (const IdentifierInfo* info = targetType.getBaseTypeIdentifier())
  390. {
  391. return info->getName().equals("Recycler") ?
  392. AllocationTypes::Recycler : AllocationTypes::NonRecycler;
  393. }
  394. else
  395. {
  396. // Unknown template dependent allocator types
  397. return AllocationTypes::Unknown;
  398. }
  399. }
  400. template <class A0, class A1, class T>
  401. void CheckAllocationsInFunctionVisitor::VisitAllocate(
  402. const A0& getArg0, const A1& getArg1, const T& getAllocType)
  403. {
  404. const Expr* firstArgNode = getArg0();
  405. // Check if the first argument (to new or AllocateArray) is a static cast
  406. // AllocatorNew/AllocateArray in Chakra always does a static_cast to the AllocatorType
  407. const CXXStaticCastExpr* castNode = nullptr;
  408. if (firstArgNode != nullptr &&
  409. (castNode = dyn_cast<CXXStaticCastExpr>(firstArgNode)))
  410. {
  411. QualType allocatedType = getAllocType();
  412. string allocatedTypeStr = allocatedType.getAsString();
  413. auto allocationType = CheckAllocationType(castNode);
  414. if (allocationType == AllocationTypes::Recycler) // Recycler allocation
  415. {
  416. const Expr* secondArgNode = getArg1();
  417. // Chakra has two types of allocating functions- throwing and non-throwing
  418. // However, recycler allocations are always throwing, so the second parameter
  419. // should be the address of the allocator function
  420. auto unaryNode = cast<UnaryOperator>(secondArgNode);
  421. if (unaryNode != nullptr && unaryNode->getOpcode() == UnaryOperatorKind::UO_AddrOf)
  422. {
  423. Expr* subExpr = unaryNode->getSubExpr();
  424. if (DeclRefExpr* declRef = cast<DeclRefExpr>(subExpr))
  425. {
  426. auto declNameInfo = declRef->getNameInfo();
  427. auto allocationFunctionStr = declNameInfo.getName().getAsString();
  428. _mainVisitor->RecordRecyclerAllocation(allocationFunctionStr, allocatedTypeStr);
  429. if (!Contains(allocationFunctionStr, "Leaf"))
  430. {
  431. // Recycler write barrier allocation -- unless "Leaf" in allocFunc
  432. allocationType = AllocationTypes::RecyclerWriteBarrier;
  433. }
  434. }
  435. else
  436. {
  437. Log::errs() << "ERROR: (internal) Expected DeclRefExpr:\n";
  438. subExpr->dump();
  439. }
  440. }
  441. else if (auto mExpr = cast<MaterializeTemporaryExpr>(secondArgNode))
  442. {
  443. auto name = mExpr->GetTemporaryExpr()->IgnoreImpCasts()->getType().getAsString();
  444. if (StartsWith(name, "InfoBitsWrapper<")) // && Contains(name, "WithBarrierBit"))
  445. {
  446. // RecyclerNewEnumClass, RecyclerNewWithInfoBits -- always have WithBarrier varients
  447. allocationType = AllocationTypes::RecyclerWriteBarrier;
  448. }
  449. }
  450. else
  451. {
  452. Log::errs() << "ERROR: (internal) Expected unary node or MaterializeTemporaryExpr:\n";
  453. secondArgNode->dump();
  454. }
  455. }
  456. if (allocationType & AllocationTypes::WriteBarrier)
  457. {
  458. Log::outs() << "In \"" << _functionDecl->getQualifiedNameAsString() << "\"\n";
  459. Log::outs() << " Allocating \"" << allocatedTypeStr << "\" in write barriered memory\n";
  460. }
  461. _mainVisitor->RecordAllocation(allocatedType, allocationType);
  462. }
  463. }
  464. bool CheckAllocationsInFunctionVisitor::VisitCXXNewExpr(CXXNewExpr* newExpr)
  465. {
  466. if (newExpr->getNumPlacementArgs() > 1)
  467. {
  468. VisitAllocate(
  469. [=]() { return newExpr->getPlacementArg(0); },
  470. [=]() { return newExpr->getPlacementArg(1); },
  471. [=]() { return newExpr->getAllocatedType(); }
  472. );
  473. }
  474. return true;
  475. }
  476. bool CheckAllocationsInFunctionVisitor::VisitCallExpr(CallExpr* callExpr)
  477. {
  478. // Check callExpr for AllocateArray
  479. auto callee = callExpr->getDirectCallee();
  480. if (callExpr->getNumArgs() == 3 &&
  481. callee &&
  482. callee->getName().equals("AllocateArray"))
  483. {
  484. VisitAllocate(
  485. [=]() { return callExpr->getArg(0); },
  486. [=]() { return callExpr->getArg(1); },
  487. [=]()
  488. {
  489. auto retType = callExpr->getCallReturnType(_mainVisitor->getContext());
  490. return QualType(retType->getAs<PointerType>()->getPointeeType());
  491. }
  492. );
  493. }
  494. return true;
  495. }
  496. void RecyclerCheckerConsumer::HandleTranslationUnit(ASTContext& context)
  497. {
  498. MainVisitor mainVisitor(_compilerInstance, context, _fix);
  499. mainVisitor.TraverseDecl(context.getTranslationUnitDecl());
  500. mainVisitor.Inspect();
  501. mainVisitor.ApplyFix();
  502. }
  503. std::unique_ptr<ASTConsumer> RecyclerCheckerAction::CreateASTConsumer(
  504. CompilerInstance& compilerInstance, llvm::StringRef)
  505. {
  506. return llvm::make_unique<RecyclerCheckerConsumer>(compilerInstance, _fix);
  507. }
  508. bool RecyclerCheckerAction::ParseArgs(
  509. const CompilerInstance& compilerInstance, const std::vector<std::string>& args)
  510. {
  511. for (auto i = args.begin(); i != args.end(); i++)
  512. {
  513. if (*i == "-verbose")
  514. {
  515. Log::SetLevel(Log::LogLevel::Verbose);
  516. }
  517. else if (*i == "-fix")
  518. {
  519. this->_fix = true;
  520. }
  521. else
  522. {
  523. Log::errs()
  524. << "ERROR: Unrecognized check-recycler option: " << *i << "\n"
  525. << "Supported options:\n"
  526. << " -fix Fix missing write barrier annotations"
  527. << " -verbose Log verbose messages\n";
  528. return false;
  529. }
  530. }
  531. return true;
  532. }
  533. static FrontendPluginRegistry::Add<RecyclerCheckerAction> recyclerPlugin(
  534. "check-recycler", "Checks the recycler allocations");