1 //===- OperationSupport.cpp -----------------------------------------------===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 // 9 // This file contains out-of-line implementations of the support types that 10 // Operation and related classes build on top of. 11 // 12 //===----------------------------------------------------------------------===// 13 14 #include "mlir/IR/OperationSupport.h" 15 #include "mlir/IR/Block.h" 16 #include "mlir/IR/OpDefinition.h" 17 #include "mlir/IR/Operation.h" 18 #include "mlir/IR/StandardTypes.h" 19 using namespace mlir; 20 21 //===----------------------------------------------------------------------===// 22 // NamedAttrList 23 //===----------------------------------------------------------------------===// 24 25 NamedAttrList::NamedAttrList(ArrayRef<NamedAttribute> attributes) { 26 assign(attributes.begin(), attributes.end()); 27 } 28 29 NamedAttrList::NamedAttrList(const_iterator in_start, const_iterator in_end) { 30 assign(in_start, in_end); 31 } 32 33 ArrayRef<NamedAttribute> NamedAttrList::getAttrs() const { return attrs; } 34 35 Optional<NamedAttribute> NamedAttrList::findDuplicate() const { 36 Optional<NamedAttribute> duplicate = 37 DictionaryAttr::findDuplicate(attrs, isSorted()); 38 // DictionaryAttr::findDuplicate will sort the list, so reset the sorted 39 // state. 40 if (!isSorted()) 41 dictionarySorted.setPointerAndInt(nullptr, true); 42 return duplicate; 43 } 44 45 DictionaryAttr NamedAttrList::getDictionary(MLIRContext *context) const { 46 if (!isSorted()) { 47 DictionaryAttr::sortInPlace(attrs); 48 dictionarySorted.setPointerAndInt(nullptr, true); 49 } 50 if (!dictionarySorted.getPointer()) 51 dictionarySorted.setPointer(DictionaryAttr::getWithSorted(attrs, context)); 52 return dictionarySorted.getPointer().cast<DictionaryAttr>(); 53 } 54 55 NamedAttrList::operator MutableDictionaryAttr() const { 56 if (attrs.empty()) 57 return MutableDictionaryAttr(); 58 return getDictionary(attrs.front().second.getContext()); 59 } 60 61 /// Add an attribute with the specified name. 62 void NamedAttrList::append(StringRef name, Attribute attr) { 63 append(Identifier::get(name, attr.getContext()), attr); 64 } 65 66 /// Add an attribute with the specified name. 67 void NamedAttrList::append(Identifier name, Attribute attr) { 68 push_back({name, attr}); 69 } 70 71 /// Add an array of named attributes. 72 void NamedAttrList::append(ArrayRef<NamedAttribute> newAttributes) { 73 append(newAttributes.begin(), newAttributes.end()); 74 } 75 76 /// Add a range of named attributes. 77 void NamedAttrList::append(const_iterator in_start, const_iterator in_end) { 78 // TODO: expand to handle case where values appended are in order & after 79 // end of current list. 80 dictionarySorted.setPointerAndInt(nullptr, false); 81 attrs.append(in_start, in_end); 82 } 83 84 /// Replaces the attributes with new list of attributes. 85 void NamedAttrList::assign(const_iterator in_start, const_iterator in_end) { 86 DictionaryAttr::sort(ArrayRef<NamedAttribute>{in_start, in_end}, attrs); 87 dictionarySorted.setPointerAndInt(nullptr, true); 88 } 89 90 void NamedAttrList::push_back(NamedAttribute newAttribute) { 91 if (isSorted()) 92 dictionarySorted.setInt( 93 attrs.empty() || 94 strcmp(attrs.back().first.data(), newAttribute.first.data()) < 0); 95 dictionarySorted.setPointer(nullptr); 96 attrs.push_back(newAttribute); 97 } 98 99 /// Helper function to find attribute in possible sorted vector of 100 /// NamedAttributes. 101 template <typename T> 102 static auto *findAttr(SmallVectorImpl<NamedAttribute> &attrs, T name, 103 bool sorted) { 104 if (!sorted) { 105 return llvm::find_if( 106 attrs, [name](NamedAttribute attr) { return attr.first == name; }); 107 } 108 109 auto *it = llvm::lower_bound(attrs, name); 110 if (it == attrs.end() || it->first != name) 111 return attrs.end(); 112 return it; 113 } 114 115 /// Return the specified attribute if present, null otherwise. 116 Attribute NamedAttrList::get(StringRef name) const { 117 auto *it = findAttr(attrs, name, isSorted()); 118 return it != attrs.end() ? it->second : nullptr; 119 } 120 121 /// Return the specified attribute if present, null otherwise. 122 Attribute NamedAttrList::get(Identifier name) const { 123 auto *it = findAttr(attrs, name, isSorted()); 124 return it != attrs.end() ? it->second : nullptr; 125 } 126 127 /// Return the specified named attribute if present, None otherwise. 128 Optional<NamedAttribute> NamedAttrList::getNamed(StringRef name) const { 129 auto *it = findAttr(attrs, name, isSorted()); 130 return it != attrs.end() ? *it : Optional<NamedAttribute>(); 131 } 132 Optional<NamedAttribute> NamedAttrList::getNamed(Identifier name) const { 133 auto *it = findAttr(attrs, name, isSorted()); 134 return it != attrs.end() ? *it : Optional<NamedAttribute>(); 135 } 136 137 /// If the an attribute exists with the specified name, change it to the new 138 /// value. Otherwise, add a new attribute with the specified name/value. 139 void NamedAttrList::set(Identifier name, Attribute value) { 140 assert(value && "attributes may never be null"); 141 142 // Look for an existing value for the given name, and set it in-place. 143 auto *it = findAttr(attrs, name, isSorted()); 144 if (it != attrs.end()) { 145 // Bail out early if the value is the same as what we already have. 146 if (it->second == value) 147 return; 148 dictionarySorted.setPointer(nullptr); 149 it->second = value; 150 return; 151 } 152 153 // Otherwise, insert the new attribute into its sorted position. 154 it = llvm::lower_bound(attrs, name); 155 dictionarySorted.setPointer(nullptr); 156 attrs.insert(it, {name, value}); 157 } 158 void NamedAttrList::set(StringRef name, Attribute value) { 159 assert(value && "setting null attribute not supported"); 160 return set(mlir::Identifier::get(name, value.getContext()), value); 161 } 162 163 Attribute 164 NamedAttrList::eraseImpl(SmallVectorImpl<NamedAttribute>::iterator it) { 165 if (it == attrs.end()) 166 return nullptr; 167 168 // Erasing does not affect the sorted property. 169 Attribute attr = it->second; 170 attrs.erase(it); 171 dictionarySorted.setPointer(nullptr); 172 return attr; 173 } 174 175 Attribute NamedAttrList::erase(Identifier name) { 176 return eraseImpl(findAttr(attrs, name, isSorted())); 177 } 178 179 Attribute NamedAttrList::erase(StringRef name) { 180 return eraseImpl(findAttr(attrs, name, isSorted())); 181 } 182 183 NamedAttrList & 184 NamedAttrList::operator=(const SmallVectorImpl<NamedAttribute> &rhs) { 185 assign(rhs.begin(), rhs.end()); 186 return *this; 187 } 188 189 NamedAttrList::operator ArrayRef<NamedAttribute>() const { return attrs; } 190 191 //===----------------------------------------------------------------------===// 192 // OperationState 193 //===----------------------------------------------------------------------===// 194 195 OperationState::OperationState(Location location, StringRef name) 196 : location(location), name(name, location->getContext()) {} 197 198 OperationState::OperationState(Location location, OperationName name) 199 : location(location), name(name) {} 200 201 OperationState::OperationState(Location location, StringRef name, 202 ValueRange operands, TypeRange types, 203 ArrayRef<NamedAttribute> attributes, 204 BlockRange successors, 205 MutableArrayRef<std::unique_ptr<Region>> regions) 206 : location(location), name(name, location->getContext()), 207 operands(operands.begin(), operands.end()), 208 types(types.begin(), types.end()), 209 attributes(attributes.begin(), attributes.end()), 210 successors(successors.begin(), successors.end()) { 211 for (std::unique_ptr<Region> &r : regions) 212 this->regions.push_back(std::move(r)); 213 } 214 215 void OperationState::addOperands(ValueRange newOperands) { 216 operands.append(newOperands.begin(), newOperands.end()); 217 } 218 219 void OperationState::addSuccessors(BlockRange newSuccessors) { 220 successors.append(newSuccessors.begin(), newSuccessors.end()); 221 } 222 223 Region *OperationState::addRegion() { 224 regions.emplace_back(new Region); 225 return regions.back().get(); 226 } 227 228 void OperationState::addRegion(std::unique_ptr<Region> &®ion) { 229 regions.push_back(std::move(region)); 230 } 231 232 void OperationState::addRegions( 233 MutableArrayRef<std::unique_ptr<Region>> regions) { 234 for (std::unique_ptr<Region> ®ion : regions) 235 addRegion(std::move(region)); 236 } 237 238 //===----------------------------------------------------------------------===// 239 // OperandStorage 240 //===----------------------------------------------------------------------===// 241 242 detail::OperandStorage::OperandStorage(Operation *owner, ValueRange values) 243 : representation(0) { 244 auto &inlineStorage = getInlineStorage(); 245 inlineStorage.numOperands = inlineStorage.capacity = values.size(); 246 auto *operandPtrBegin = getTrailingObjects<OpOperand>(); 247 for (unsigned i = 0, e = inlineStorage.numOperands; i < e; ++i) 248 new (&operandPtrBegin[i]) OpOperand(owner, values[i]); 249 } 250 251 detail::OperandStorage::~OperandStorage() { 252 // Destruct the current storage container. 253 if (isDynamicStorage()) { 254 TrailingOperandStorage &storage = getDynamicStorage(); 255 storage.~TrailingOperandStorage(); 256 free(&storage); 257 } else { 258 getInlineStorage().~TrailingOperandStorage(); 259 } 260 } 261 262 /// Replace the operands contained in the storage with the ones provided in 263 /// 'values'. 264 void detail::OperandStorage::setOperands(Operation *owner, ValueRange values) { 265 MutableArrayRef<OpOperand> storageOperands = resize(owner, values.size()); 266 for (unsigned i = 0, e = values.size(); i != e; ++i) 267 storageOperands[i].set(values[i]); 268 } 269 270 /// Replace the operands beginning at 'start' and ending at 'start' + 'length' 271 /// with the ones provided in 'operands'. 'operands' may be smaller or larger 272 /// than the range pointed to by 'start'+'length'. 273 void detail::OperandStorage::setOperands(Operation *owner, unsigned start, 274 unsigned length, ValueRange operands) { 275 // If the new size is the same, we can update inplace. 276 unsigned newSize = operands.size(); 277 if (newSize == length) { 278 MutableArrayRef<OpOperand> storageOperands = getOperands(); 279 for (unsigned i = 0, e = length; i != e; ++i) 280 storageOperands[start + i].set(operands[i]); 281 return; 282 } 283 // If the new size is greater, remove the extra operands and set the rest 284 // inplace. 285 if (newSize < length) { 286 eraseOperands(start + operands.size(), length - newSize); 287 setOperands(owner, start, newSize, operands); 288 return; 289 } 290 // Otherwise, the new size is greater so we need to grow the storage. 291 auto storageOperands = resize(owner, size() + (newSize - length)); 292 293 // Shift operands to the right to make space for the new operands. 294 unsigned rotateSize = storageOperands.size() - (start + length); 295 auto rbegin = storageOperands.rbegin(); 296 std::rotate(rbegin, std::next(rbegin, newSize - length), rbegin + rotateSize); 297 298 // Update the operands inplace. 299 for (unsigned i = 0, e = operands.size(); i != e; ++i) 300 storageOperands[start + i].set(operands[i]); 301 } 302 303 /// Erase an operand held by the storage. 304 void detail::OperandStorage::eraseOperands(unsigned start, unsigned length) { 305 TrailingOperandStorage &storage = getStorage(); 306 MutableArrayRef<OpOperand> operands = storage.getOperands(); 307 assert((start + length) <= operands.size()); 308 storage.numOperands -= length; 309 310 // Shift all operands down if the operand to remove is not at the end. 311 if (start != storage.numOperands) { 312 auto *indexIt = std::next(operands.begin(), start); 313 std::rotate(indexIt, std::next(indexIt, length), operands.end()); 314 } 315 for (unsigned i = 0; i != length; ++i) 316 operands[storage.numOperands + i].~OpOperand(); 317 } 318 319 /// Resize the storage to the given size. Returns the array containing the new 320 /// operands. 321 MutableArrayRef<OpOperand> detail::OperandStorage::resize(Operation *owner, 322 unsigned newSize) { 323 TrailingOperandStorage &storage = getStorage(); 324 325 // If the number of operands is less than or equal to the current amount, we 326 // can just update in place. 327 unsigned &numOperands = storage.numOperands; 328 MutableArrayRef<OpOperand> operands = storage.getOperands(); 329 if (newSize <= numOperands) { 330 // If the number of new size is less than the current, remove any extra 331 // operands. 332 for (unsigned i = newSize; i != numOperands; ++i) 333 operands[i].~OpOperand(); 334 numOperands = newSize; 335 return operands.take_front(newSize); 336 } 337 338 // If the new size is within the original inline capacity, grow inplace. 339 if (newSize <= storage.capacity) { 340 OpOperand *opBegin = operands.data(); 341 for (unsigned e = newSize; numOperands != e; ++numOperands) 342 new (&opBegin[numOperands]) OpOperand(owner); 343 return MutableArrayRef<OpOperand>(opBegin, newSize); 344 } 345 346 // Otherwise, we need to allocate a new storage. 347 unsigned newCapacity = 348 std::max(unsigned(llvm::NextPowerOf2(storage.capacity + 2)), newSize); 349 auto *newStorageMem = 350 malloc(TrailingOperandStorage::totalSizeToAlloc<OpOperand>(newCapacity)); 351 auto *newStorage = ::new (newStorageMem) TrailingOperandStorage(); 352 newStorage->numOperands = newSize; 353 newStorage->capacity = newCapacity; 354 355 // Move the current operands to the new storage. 356 MutableArrayRef<OpOperand> newOperands = newStorage->getOperands(); 357 std::uninitialized_copy(std::make_move_iterator(operands.begin()), 358 std::make_move_iterator(operands.end()), 359 newOperands.begin()); 360 361 // Destroy the original operands. 362 for (auto &operand : operands) 363 operand.~OpOperand(); 364 365 // Initialize any new operands. 366 for (unsigned e = newSize; numOperands != e; ++numOperands) 367 new (&newOperands[numOperands]) OpOperand(owner); 368 369 // If the current storage is also dynamic, free it. 370 if (isDynamicStorage()) 371 free(&storage); 372 373 // Update the storage representation to use the new dynamic storage. 374 representation = reinterpret_cast<intptr_t>(newStorage); 375 representation |= DynamicStorageBit; 376 return newOperands; 377 } 378 379 //===----------------------------------------------------------------------===// 380 // ResultStorage 381 //===----------------------------------------------------------------------===// 382 383 /// Returns the parent operation of this trailing result. 384 Operation *detail::TrailingOpResult::getOwner() { 385 // We need to do some arithmetic to get the operation pointer. Move the 386 // trailing owner to the start of the array. 387 TrailingOpResult *trailingIt = this - trailingResultNumber; 388 389 // Move the owner past the inline op results to get to the operation. 390 auto *inlineResultIt = reinterpret_cast<InLineOpResult *>(trailingIt) - 391 OpResult::getMaxInlineResults(); 392 return reinterpret_cast<Operation *>(inlineResultIt) - 1; 393 } 394 395 //===----------------------------------------------------------------------===// 396 // Operation Value-Iterators 397 //===----------------------------------------------------------------------===// 398 399 //===----------------------------------------------------------------------===// 400 // OperandRange 401 402 OperandRange::OperandRange(Operation *op) 403 : OperandRange(op->getOpOperands().data(), op->getNumOperands()) {} 404 405 /// Return the operand index of the first element of this range. The range 406 /// must not be empty. 407 unsigned OperandRange::getBeginOperandIndex() const { 408 assert(!empty() && "range must not be empty"); 409 return base->getOperandNumber(); 410 } 411 412 //===----------------------------------------------------------------------===// 413 // MutableOperandRange 414 415 /// Construct a new mutable range from the given operand, operand start index, 416 /// and range length. 417 MutableOperandRange::MutableOperandRange( 418 Operation *owner, unsigned start, unsigned length, 419 ArrayRef<OperandSegment> operandSegments) 420 : owner(owner), start(start), length(length), 421 operandSegments(operandSegments.begin(), operandSegments.end()) { 422 assert((start + length) <= owner->getNumOperands() && "invalid range"); 423 } 424 MutableOperandRange::MutableOperandRange(Operation *owner) 425 : MutableOperandRange(owner, /*start=*/0, owner->getNumOperands()) {} 426 427 /// Slice this range into a sub range, with the additional operand segment. 428 MutableOperandRange 429 MutableOperandRange::slice(unsigned subStart, unsigned subLen, 430 Optional<OperandSegment> segment) { 431 assert((subStart + subLen) <= length && "invalid sub-range"); 432 MutableOperandRange subSlice(owner, start + subStart, subLen, 433 operandSegments); 434 if (segment) 435 subSlice.operandSegments.push_back(*segment); 436 return subSlice; 437 } 438 439 /// Append the given values to the range. 440 void MutableOperandRange::append(ValueRange values) { 441 if (values.empty()) 442 return; 443 owner->insertOperands(start + length, values); 444 updateLength(length + values.size()); 445 } 446 447 /// Assign this range to the given values. 448 void MutableOperandRange::assign(ValueRange values) { 449 owner->setOperands(start, length, values); 450 if (length != values.size()) 451 updateLength(/*newLength=*/values.size()); 452 } 453 454 /// Assign the range to the given value. 455 void MutableOperandRange::assign(Value value) { 456 if (length == 1) { 457 owner->setOperand(start, value); 458 } else { 459 owner->setOperands(start, length, value); 460 updateLength(/*newLength=*/1); 461 } 462 } 463 464 /// Erase the operands within the given sub-range. 465 void MutableOperandRange::erase(unsigned subStart, unsigned subLen) { 466 assert((subStart + subLen) <= length && "invalid sub-range"); 467 if (length == 0) 468 return; 469 owner->eraseOperands(start + subStart, subLen); 470 updateLength(length - subLen); 471 } 472 473 /// Clear this range and erase all of the operands. 474 void MutableOperandRange::clear() { 475 if (length != 0) { 476 owner->eraseOperands(start, length); 477 updateLength(/*newLength=*/0); 478 } 479 } 480 481 /// Allow implicit conversion to an OperandRange. 482 MutableOperandRange::operator OperandRange() const { 483 return owner->getOperands().slice(start, length); 484 } 485 486 /// Update the length of this range to the one provided. 487 void MutableOperandRange::updateLength(unsigned newLength) { 488 int32_t diff = int32_t(newLength) - int32_t(length); 489 length = newLength; 490 491 // Update any of the provided segment attributes. 492 for (OperandSegment &segment : operandSegments) { 493 auto attr = segment.second.second.cast<DenseIntElementsAttr>(); 494 SmallVector<int32_t, 8> segments(attr.getValues<int32_t>()); 495 segments[segment.first] += diff; 496 segment.second.second = DenseIntElementsAttr::get(attr.getType(), segments); 497 owner->setAttr(segment.second.first, segment.second.second); 498 } 499 } 500 501 //===----------------------------------------------------------------------===// 502 // ResultRange 503 504 ResultRange::ResultRange(Operation *op) 505 : ResultRange(op, /*startIndex=*/0, op->getNumResults()) {} 506 507 ArrayRef<Type> ResultRange::getTypes() const { 508 return getBase()->getResultTypes().slice(getStartIndex(), size()); 509 } 510 511 /// See `llvm::indexed_accessor_range` for details. 512 OpResult ResultRange::dereference(Operation *op, ptrdiff_t index) { 513 return op->getResult(index); 514 } 515 516 //===----------------------------------------------------------------------===// 517 // ValueRange 518 519 ValueRange::ValueRange(ArrayRef<Value> values) 520 : ValueRange(values.data(), values.size()) {} 521 ValueRange::ValueRange(OperandRange values) 522 : ValueRange(values.begin().getBase(), values.size()) {} 523 ValueRange::ValueRange(ResultRange values) 524 : ValueRange( 525 {values.getBase(), static_cast<unsigned>(values.getStartIndex())}, 526 values.size()) {} 527 528 /// See `llvm::detail::indexed_accessor_range_base` for details. 529 ValueRange::OwnerT ValueRange::offset_base(const OwnerT &owner, 530 ptrdiff_t index) { 531 if (auto *value = owner.ptr.dyn_cast<const Value *>()) 532 return {value + index}; 533 if (auto *operand = owner.ptr.dyn_cast<OpOperand *>()) 534 return {operand + index}; 535 Operation *operation = reinterpret_cast<Operation *>(owner.ptr.get<void *>()); 536 return {operation, owner.startIndex + static_cast<unsigned>(index)}; 537 } 538 /// See `llvm::detail::indexed_accessor_range_base` for details. 539 Value ValueRange::dereference_iterator(const OwnerT &owner, ptrdiff_t index) { 540 if (auto *value = owner.ptr.dyn_cast<const Value *>()) 541 return value[index]; 542 if (auto *operand = owner.ptr.dyn_cast<OpOperand *>()) 543 return operand[index].get(); 544 Operation *operation = reinterpret_cast<Operation *>(owner.ptr.get<void *>()); 545 return operation->getResult(owner.startIndex + index); 546 } 547 548 //===----------------------------------------------------------------------===// 549 // Operation Equivalency 550 //===----------------------------------------------------------------------===// 551 552 llvm::hash_code OperationEquivalence::computeHash(Operation *op, Flags flags) { 553 // Hash operations based upon their: 554 // - Operation Name 555 // - Attributes 556 llvm::hash_code hash = 557 llvm::hash_combine(op->getName(), op->getMutableAttrDict()); 558 559 // - Result Types 560 ArrayRef<Type> resultTypes = op->getResultTypes(); 561 switch (resultTypes.size()) { 562 case 0: 563 // We don't need to add anything to the hash. 564 break; 565 case 1: 566 // Add in the result type. 567 hash = llvm::hash_combine(hash, resultTypes.front()); 568 break; 569 default: 570 // Use the type buffer as the hash, as we can guarantee it is the same for 571 // any given range of result types. This takes advantage of the fact the 572 // result types >1 are stored in a TupleType and uniqued. 573 hash = llvm::hash_combine(hash, resultTypes.data()); 574 break; 575 } 576 577 // - Operands 578 bool ignoreOperands = flags & Flags::IgnoreOperands; 579 if (!ignoreOperands) { 580 // TODO: Allow commutative operations to have different ordering. 581 hash = llvm::hash_combine( 582 hash, llvm::hash_combine_range(op->operand_begin(), op->operand_end())); 583 } 584 return hash; 585 } 586 587 bool OperationEquivalence::isEquivalentTo(Operation *lhs, Operation *rhs, 588 Flags flags) { 589 if (lhs == rhs) 590 return true; 591 592 // Compare the operation name. 593 if (lhs->getName() != rhs->getName()) 594 return false; 595 // Check operand counts. 596 if (lhs->getNumOperands() != rhs->getNumOperands()) 597 return false; 598 // Compare attributes. 599 if (lhs->getMutableAttrDict() != rhs->getMutableAttrDict()) 600 return false; 601 // Compare result types. 602 ArrayRef<Type> lhsResultTypes = lhs->getResultTypes(); 603 ArrayRef<Type> rhsResultTypes = rhs->getResultTypes(); 604 if (lhsResultTypes.size() != rhsResultTypes.size()) 605 return false; 606 switch (lhsResultTypes.size()) { 607 case 0: 608 break; 609 case 1: 610 // Compare the single result type. 611 if (lhsResultTypes.front() != rhsResultTypes.front()) 612 return false; 613 break; 614 default: 615 // Use the type buffer for the comparison, as we can guarantee it is the 616 // same for any given range of result types. This takes advantage of the 617 // fact the result types >1 are stored in a TupleType and uniqued. 618 if (lhsResultTypes.data() != rhsResultTypes.data()) 619 return false; 620 break; 621 } 622 // Compare operands. 623 bool ignoreOperands = flags & Flags::IgnoreOperands; 624 if (ignoreOperands) 625 return true; 626 // TODO: Allow commutative operations to have different ordering. 627 return std::equal(lhs->operand_begin(), lhs->operand_end(), 628 rhs->operand_begin()); 629 } 630