Lines Matching defs:end
63 // bigger or equal than the *end* of {new_region}.
65 DCHECK(above == regions_.end() || above->begin() >= new_region.end());
68 if (above != regions_.end() && new_region.end() == above->begin()) {
71 DCHECK_EQ(merged_region.end(), above->end());
76 if (below->end() == new_region.begin()) {
95 DCHECK(above == regions_.end() || below->end() < above->begin());
98 if (below->end() == new_region.begin()) {
101 DCHECK_EQ(merged_region.end(), new_region.end());
108 DCHECK_LT(below->end(), new_region.begin());
126 for (auto end = regions_.end(); it != end; ++it) {
137 } else if (ret.end() == old.end()) {
138 // We return a region at the end --> shrink remaining region.
144 regions_.insert(insert_pos, {ret.end(), old.end() - ret.end()});
252 name_buffer.append(name.begin(), name.end());
542 // memory, we append that memory at the end of the owned_code_space_ list, we
558 size_t missing_end = range.end();
561 Address overlap_end = std::min(missing_end, vmem.end());
657 writable_memory_.begin(), writable_memory_.end(), size_t{0},
665 writable_memory_.begin(), writable_memory_.end(),
671 writable_memory_.begin(), writable_memory_.end(), size_t{0},
676 DCHECK(std::none_of(writable_memory_.begin(), writable_memory_.end(),
681 USE(std::accumulate(writable_memory_.begin(), writable_memory_.end(),
684 return region.end();
724 : owned_code_space_.back().end();
760 Address commit_end = RoundUp(code_space.end(), commit_page_size);
768 // The end needs to be committed all through the end of the page.
814 split_range.begin(), split_range.end());
832 Address end = RoundUp(region.end(), commit_page_size);
833 region = base::AddressRegion(begin, end - begin);
860 std::min(RoundDown(merged_region.end(), commit_page_size),
861 RoundUp(region.end(), commit_page_size));
899 split_range.begin(), split_range.end());
911 if (previous->end() == region.begin()) {
916 if (insert_pos != writable_memory_.end() &&
917 region.end() == insert_pos->begin()) {
931 if (it == writable_memory_.end() || it->begin() >= region.end()) {
936 if (it->end() <= region.begin()) continue; // Continue after {it}.
940 if (overlap.end() == region.end()) return; // Fully contained already.
942 region = {overlap.end(), region.end() - overlap.end()};
945 if (overlap.end() == region.end()) {
956 region = {overlap.end(), region.end() - overlap.end()};
1441 WasmCode** end = start + module_->num_declared_functions;
1442 for (WasmCode* code : base::VectorOf(start, end - start)) {
1445 return std::vector<WasmCode*>{start, end};
1453 std::transform(owned_code_.begin(), owned_code_.end(), all_code.begin(),
1455 std::for_each(all_code.begin(), all_code.end(), WasmCodeRefScope::AddRef);
1545 if (code_space_data.jump_table->instructions().end() ==
1731 std::sort(new_owned_code_.begin(), new_owned_code_.end(),
1736 auto insertion_hint = owned_code_.end();
1740 DCHECK(insertion_hint == owned_code_.end() ||
1799 code_region.end() > table_start ? code_region.end() - table_start : 0,
1803 // exactly the end of the region. So all occuring offsets are actually
1951 memory_protection_key_, region.begin(), region.end());
1956 region.begin(), region.end());
1979 region.begin(), region.end());
1988 region.begin(), std::make_pair(region.end(), native_module)));
2005 mem.end(), mem.size());
2234 Address end = code_space.end();
2247 lookup_map_.insert(std::make_pair(start, std::make_pair(end, ret.get())));
2412 if (cache_it != cached_code_->end()) {
2470 code_space.address(), code_space.end(), code_space.size());