1// Copyright 2019 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/compiler/csa-load-elimination.h"
6
7#include "src/compiler/common-operator.h"
8#include "src/compiler/node-matchers.h"
9#include "src/compiler/node-properties.h"
10#include "src/compiler/simplified-operator.h"
11
12namespace v8 {
13namespace internal {
14namespace compiler {
15
16Reduction CsaLoadElimination::Reduce(Node* node) {
17  if (FLAG_trace_turbo_load_elimination) {
18    if (node->op()->EffectInputCount() > 0) {
19      PrintF(" visit #%d:%s", node->id(), node->op()->mnemonic());
20      if (node->op()->ValueInputCount() > 0) {
21        PrintF("(");
22        for (int i = 0; i < node->op()->ValueInputCount(); ++i) {
23          if (i > 0) PrintF(", ");
24          Node* const value = NodeProperties::GetValueInput(node, i);
25          PrintF("#%d:%s", value->id(), value->op()->mnemonic());
26        }
27        PrintF(")");
28      }
29      PrintF("\n");
30      for (int i = 0; i < node->op()->EffectInputCount(); ++i) {
31        Node* const effect = NodeProperties::GetEffectInput(node, i);
32        if (AbstractState const* const state = node_states_.Get(effect)) {
33          PrintF("  state[%i]: #%d:%s\n", i, effect->id(),
34                 effect->op()->mnemonic());
35          state->mutable_state.Print();
36          state->immutable_state.Print();
37        } else {
38          PrintF("  no state[%i]: #%d:%s\n", i, effect->id(),
39                 effect->op()->mnemonic());
40        }
41      }
42    }
43  }
44  switch (node->opcode()) {
45    case IrOpcode::kLoadFromObject:
46    case IrOpcode::kLoadImmutableFromObject:
47      return ReduceLoadFromObject(node, ObjectAccessOf(node->op()));
48    case IrOpcode::kStoreToObject:
49    case IrOpcode::kInitializeImmutableInObject:
50      return ReduceStoreToObject(node, ObjectAccessOf(node->op()));
51    case IrOpcode::kDebugBreak:
52    case IrOpcode::kAbortCSADcheck:
53      // Avoid changing optimizations in the presence of debug instructions.
54      return PropagateInputState(node);
55    case IrOpcode::kCall:
56      return ReduceCall(node);
57    case IrOpcode::kEffectPhi:
58      return ReduceEffectPhi(node);
59    case IrOpcode::kDead:
60      return NoChange();
61    case IrOpcode::kStart:
62      return ReduceStart(node);
63    default:
64      return ReduceOtherNode(node);
65  }
66  UNREACHABLE();
67}
68
69namespace CsaLoadEliminationHelpers {
70
71bool Subsumes(MachineRepresentation from, MachineRepresentation to) {
72  if (from == to) return true;
73  if (IsAnyTagged(from)) return IsAnyTagged(to);
74  if (IsIntegral(from)) {
75    return IsIntegral(to) && ElementSizeInBytes(from) >= ElementSizeInBytes(to);
76  }
77  return false;
78}
79
80bool IsConstantObject(Node* object) {
81  return object->opcode() == IrOpcode::kParameter ||
82         object->opcode() == IrOpcode::kLoadImmutable ||
83         NodeProperties::IsConstant(object);
84}
85
86bool IsFreshObject(Node* object) {
87  DCHECK_IMPLIES(NodeProperties::IsFreshObject(object),
88                 !IsConstantObject(object));
89  return NodeProperties::IsFreshObject(object);
90}
91
92}  // namespace CsaLoadEliminationHelpers
93
94namespace Helpers = CsaLoadEliminationHelpers;
95
96// static
97template <typename OuterKey>
98void CsaLoadElimination::HalfState::IntersectWith(
99    OuterMap<OuterKey>& to, const OuterMap<OuterKey>& from) {
100  FieldInfo empty_info;
101  for (const std::pair<OuterKey, InnerMap>& to_map : to) {
102    InnerMap to_map_copy(to_map.second);
103    OuterKey key = to_map.first;
104    InnerMap current_map = from.Get(key);
105    for (std::pair<Node*, FieldInfo> info : to_map.second) {
106      if (current_map.Get(info.first) != info.second) {
107        to_map_copy.Set(info.first, empty_info);
108      }
109    }
110    to.Set(key, to_map_copy);
111  }
112}
113
114void CsaLoadElimination::HalfState::IntersectWith(HalfState const* that) {
115  IntersectWith(fresh_entries_, that->fresh_entries_);
116  IntersectWith(constant_entries_, that->constant_entries_);
117  IntersectWith(arbitrary_entries_, that->arbitrary_entries_);
118  IntersectWith(fresh_unknown_entries_, that->fresh_unknown_entries_);
119  IntersectWith(constant_unknown_entries_, that->constant_unknown_entries_);
120  IntersectWith(arbitrary_unknown_entries_, that->arbitrary_unknown_entries_);
121}
122
123CsaLoadElimination::HalfState const* CsaLoadElimination::HalfState::KillField(
124    Node* object, Node* offset, MachineRepresentation repr) const {
125  HalfState* result = zone_->New<HalfState>(*this);
126  UnknownOffsetInfos empty_unknown(zone_, InnerMap(zone_));
127  IntPtrMatcher m(offset);
128  if (m.HasResolvedValue()) {
129    uint32_t num_offset = static_cast<uint32_t>(m.ResolvedValue());
130    if (Helpers::IsFreshObject(object)) {
131      // May alias with:
132      // - The same object/offset
133      // - Arbitrary objects with the same offset
134      // - The same object, unkwown offset
135      // - Arbitrary objects with unkwown offset
136      result->KillOffsetInFresh(object, num_offset, repr);
137      KillOffset(result->arbitrary_entries_, num_offset, repr, zone_);
138      result->fresh_unknown_entries_.Set(object, InnerMap(zone_));
139      result->arbitrary_unknown_entries_ = empty_unknown;
140    } else if (Helpers::IsConstantObject(object)) {
141      // May alias with:
142      // - Constant/arbitrary objects with the same offset
143      // - Constant/arbitrary objects with unkwown offset
144      KillOffset(result->constant_entries_, num_offset, repr, zone_);
145      KillOffset(result->arbitrary_entries_, num_offset, repr, zone_);
146      result->constant_unknown_entries_ = empty_unknown;
147      result->arbitrary_unknown_entries_ = empty_unknown;
148    } else {
149      // May alias with:
150      // - Any object with the same or unknown offset
151      KillOffset(result->fresh_entries_, num_offset, repr, zone_);
152      KillOffset(result->constant_entries_, num_offset, repr, zone_);
153      KillOffset(result->arbitrary_entries_, num_offset, repr, zone_);
154      result->fresh_unknown_entries_ = empty_unknown;
155      result->constant_unknown_entries_ = empty_unknown;
156      result->arbitrary_unknown_entries_ = empty_unknown;
157    }
158  } else {
159    ConstantOffsetInfos empty_constant(zone_, InnerMap(zone_));
160    if (Helpers::IsFreshObject(object)) {
161      // May alias with:
162      // - The same object with any known/unknown offset
163      // - Arbitrary objects with any known/unknown offset
164      for (auto map : result->fresh_entries_) {
165        // TODO(manoskouk): Consider adding a map from fresh objects to offsets
166        // to implement this efficiently.
167        InnerMap map_copy(map.second);
168        map_copy.Set(object, FieldInfo());
169        result->fresh_entries_.Set(map.first, map_copy);
170      }
171      result->fresh_unknown_entries_.Set(object, InnerMap(zone_));
172      result->arbitrary_entries_ = empty_constant;
173      result->arbitrary_unknown_entries_ = empty_unknown;
174    } else if (Helpers::IsConstantObject(object)) {
175      // May alias with:
176      // - Constant/arbitrary objects with the any known/unknown offset
177      result->constant_entries_ = empty_constant;
178      result->constant_unknown_entries_ = empty_unknown;
179      result->arbitrary_entries_ = empty_constant;
180      result->arbitrary_unknown_entries_ = empty_unknown;
181    } else {
182      // May alias with anything. Clear the state.
183      return zone_->New<HalfState>(zone_);
184    }
185  }
186
187  return result;
188}
189
190CsaLoadElimination::HalfState const* CsaLoadElimination::HalfState::AddField(
191    Node* object, Node* offset, Node* value, MachineRepresentation repr) const {
192  HalfState* new_state = zone_->New<HalfState>(*this);
193  IntPtrMatcher m(offset);
194  if (m.HasResolvedValue()) {
195    uint32_t offset_num = static_cast<uint32_t>(m.ResolvedValue());
196    ConstantOffsetInfos& infos = Helpers::IsFreshObject(object)
197                                     ? new_state->fresh_entries_
198                                     : Helpers::IsConstantObject(object)
199                                           ? new_state->constant_entries_
200                                           : new_state->arbitrary_entries_;
201    Update(infos, offset_num, object, FieldInfo(value, repr));
202  } else {
203    UnknownOffsetInfos& infos =
204        Helpers::IsFreshObject(object)
205            ? new_state->fresh_unknown_entries_
206            : Helpers::IsConstantObject(object)
207                  ? new_state->constant_unknown_entries_
208                  : new_state->arbitrary_unknown_entries_;
209    Update(infos, object, offset, FieldInfo(value, repr));
210  }
211  return new_state;
212}
213
214CsaLoadElimination::FieldInfo CsaLoadElimination::HalfState::Lookup(
215    Node* object, Node* offset) const {
216  IntPtrMatcher m(offset);
217  if (m.HasResolvedValue()) {
218    uint32_t num_offset = static_cast<uint32_t>(m.ResolvedValue());
219    const ConstantOffsetInfos& infos = Helpers::IsFreshObject(object)
220                                           ? fresh_entries_
221                                           : Helpers::IsConstantObject(object)
222                                                 ? constant_entries_
223                                                 : arbitrary_entries_;
224    return infos.Get(num_offset).Get(object);
225  } else {
226    const UnknownOffsetInfos& infos = Helpers::IsFreshObject(object)
227                                          ? fresh_unknown_entries_
228                                          : Helpers::IsConstantObject(object)
229                                                ? constant_unknown_entries_
230                                                : arbitrary_unknown_entries_;
231    return infos.Get(object).Get(offset);
232  }
233}
234
235// static
236// Kill all elements in {infos} that overlap with an element with {offset} and
237// size {ElementSizeInBytes(repr)}.
238void CsaLoadElimination::HalfState::KillOffset(ConstantOffsetInfos& infos,
239                                               uint32_t offset,
240                                               MachineRepresentation repr,
241                                               Zone* zone) {
242  // All elements in the range [{offset}, {offset + ElementSizeInBytes(repr)})
243  // are in the killed range. We do not need to traverse the inner maps, we can
244  // just clear them.
245  for (int i = 0; i < ElementSizeInBytes(repr); i++) {
246    infos.Set(offset + i, InnerMap(zone));
247  }
248
249  // Now we have to remove all elements in earlier offsets that overlap with an
250  // element in {offset}.
251  // The earliest offset that may overlap with {offset} is
252  // {kMaximumReprSizeInBytes - 1} before.
253  uint32_t initial_offset = offset >= kMaximumReprSizeInBytes - 1
254                                ? offset - (kMaximumReprSizeInBytes - 1)
255                                : 0;
256  // For all offsets from {initial_offset} to {offset}, we traverse the
257  // respective inner map, and reset all elements that are large enough to
258  // overlap with {offset}.
259  for (uint32_t i = initial_offset; i < offset; i++) {
260    InnerMap map_copy(infos.Get(i));
261    for (const std::pair<Node*, FieldInfo> info : infos.Get(i)) {
262      if (info.second.representation != MachineRepresentation::kNone &&
263          ElementSizeInBytes(info.second.representation) >
264              static_cast<int>(offset - i)) {
265        map_copy.Set(info.first, {});
266      }
267    }
268    infos.Set(i, map_copy);
269  }
270}
271
272void CsaLoadElimination::HalfState::KillOffsetInFresh(
273    Node* const object, uint32_t offset, MachineRepresentation repr) {
274  for (int i = 0; i < ElementSizeInBytes(repr); i++) {
275    Update(fresh_entries_, offset + i, object, {});
276  }
277  uint32_t initial_offset = offset >= kMaximumReprSizeInBytes - 1
278                                ? offset - (kMaximumReprSizeInBytes - 1)
279                                : 0;
280  for (uint32_t i = initial_offset; i < offset; i++) {
281    const FieldInfo& info = fresh_entries_.Get(i).Get(object);
282    if (info.representation != MachineRepresentation::kNone &&
283        ElementSizeInBytes(info.representation) >
284            static_cast<int>(offset - i)) {
285      Update(fresh_entries_, i, object, {});
286    }
287  }
288}
289
290// static
291void CsaLoadElimination::HalfState::Print(
292    const CsaLoadElimination::HalfState::ConstantOffsetInfos& infos) {
293  for (const auto outer_entry : infos) {
294    for (const auto inner_entry : outer_entry.second) {
295      Node* object = inner_entry.first;
296      uint32_t offset = outer_entry.first;
297      FieldInfo info = inner_entry.second;
298      PrintF("    #%d:%s+(%d) -> #%d:%s [repr=%s]\n", object->id(),
299             object->op()->mnemonic(), offset, info.value->id(),
300             info.value->op()->mnemonic(),
301             MachineReprToString(info.representation));
302    }
303  }
304}
305
306// static
307void CsaLoadElimination::HalfState::Print(
308    const CsaLoadElimination::HalfState::UnknownOffsetInfos& infos) {
309  for (const auto outer_entry : infos) {
310    for (const auto inner_entry : outer_entry.second) {
311      Node* object = outer_entry.first;
312      Node* offset = inner_entry.first;
313      FieldInfo info = inner_entry.second;
314      PrintF("    #%d:%s+#%d:%s -> #%d:%s [repr=%s]\n", object->id(),
315             object->op()->mnemonic(), offset->id(), offset->op()->mnemonic(),
316             info.value->id(), info.value->op()->mnemonic(),
317             MachineReprToString(info.representation));
318    }
319  }
320}
321
322void CsaLoadElimination::HalfState::Print() const {
323  Print(fresh_entries_);
324  Print(constant_entries_);
325  Print(arbitrary_entries_);
326  Print(fresh_unknown_entries_);
327  Print(constant_unknown_entries_);
328  Print(arbitrary_unknown_entries_);
329}
330
331Reduction CsaLoadElimination::ReduceLoadFromObject(Node* node,
332                                                   ObjectAccess const& access) {
333  DCHECK(node->opcode() == IrOpcode::kLoadFromObject ||
334         node->opcode() == IrOpcode::kLoadImmutableFromObject);
335  Node* object = NodeProperties::GetValueInput(node, 0);
336  Node* offset = NodeProperties::GetValueInput(node, 1);
337  Node* effect = NodeProperties::GetEffectInput(node);
338  AbstractState const* state = node_states_.Get(effect);
339  if (state == nullptr) return NoChange();
340  bool is_mutable = node->opcode() == IrOpcode::kLoadFromObject;
341  // We should never find a field in the wrong half-state.
342  DCHECK((is_mutable ? &state->immutable_state : &state->mutable_state)
343             ->Lookup(object, offset)
344             .IsEmpty());
345  HalfState const* half_state =
346      is_mutable ? &state->mutable_state : &state->immutable_state;
347
348  MachineRepresentation representation = access.machine_type.representation();
349  FieldInfo lookup_result = half_state->Lookup(object, offset);
350  if (!lookup_result.IsEmpty()) {
351    // Make sure we don't reuse values that were recorded with a different
352    // representation or resurrect dead {replacement} nodes.
353    MachineRepresentation from = lookup_result.representation;
354    if (Helpers::Subsumes(from, representation) &&
355        !lookup_result.value->IsDead()) {
356      Node* replacement =
357          TruncateAndExtend(lookup_result.value, from, access.machine_type);
358      ReplaceWithValue(node, replacement, effect);
359      // This might have opened an opportunity for escape analysis to eliminate
360      // the object altogether.
361      Revisit(object);
362      return Replace(replacement);
363    }
364  }
365  half_state = half_state->AddField(object, offset, node, representation);
366
367  AbstractState const* new_state =
368      is_mutable
369          ? zone()->New<AbstractState>(*half_state, state->immutable_state)
370          : zone()->New<AbstractState>(state->mutable_state, *half_state);
371
372  return UpdateState(node, new_state);
373}
374
375Reduction CsaLoadElimination::ReduceStoreToObject(Node* node,
376                                                  ObjectAccess const& access) {
377  DCHECK(node->opcode() == IrOpcode::kStoreToObject ||
378         node->opcode() == IrOpcode::kInitializeImmutableInObject);
379  Node* object = NodeProperties::GetValueInput(node, 0);
380  Node* offset = NodeProperties::GetValueInput(node, 1);
381  Node* value = NodeProperties::GetValueInput(node, 2);
382  Node* effect = NodeProperties::GetEffectInput(node);
383  AbstractState const* state = node_states_.Get(effect);
384  if (state == nullptr) return NoChange();
385  MachineRepresentation repr = access.machine_type.representation();
386  if (node->opcode() == IrOpcode::kStoreToObject) {
387    // We should not find the field in the wrong half-state.
388    DCHECK(state->immutable_state.Lookup(object, offset).IsEmpty());
389    HalfState const* mutable_state =
390        state->mutable_state.KillField(object, offset, repr);
391    mutable_state = mutable_state->AddField(object, offset, value, repr);
392    AbstractState const* new_state =
393        zone()->New<AbstractState>(*mutable_state, state->immutable_state);
394    return UpdateState(node, new_state);
395  } else {
396    // We should not find the field in the wrong half-state.
397    DCHECK(state->mutable_state.Lookup(object, offset).IsEmpty());
398    // We should not initialize the same immutable field twice.
399    DCHECK(state->immutable_state.Lookup(object, offset).IsEmpty());
400    HalfState const* immutable_state =
401        state->immutable_state.AddField(object, offset, value, repr);
402    AbstractState const* new_state =
403        zone()->New<AbstractState>(state->mutable_state, *immutable_state);
404    return UpdateState(node, new_state);
405  }
406}
407
408Reduction CsaLoadElimination::ReduceEffectPhi(Node* node) {
409  Node* const effect0 = NodeProperties::GetEffectInput(node, 0);
410  Node* const control = NodeProperties::GetControlInput(node);
411  AbstractState const* state0 = node_states_.Get(effect0);
412  if (state0 == nullptr) return NoChange();
413  if (control->opcode() == IrOpcode::kLoop) {
414    // Here we rely on having only reducible loops:
415    // The loop entry edge always dominates the header, so we can just take
416    // the state from the first input, and compute the loop state based on it.
417    AbstractState const* state = ComputeLoopState(node, state0);
418    return UpdateState(node, state);
419  }
420  DCHECK_EQ(IrOpcode::kMerge, control->opcode());
421
422  // Shortcut for the case when we do not know anything about some input.
423  int const input_count = node->op()->EffectInputCount();
424  for (int i = 1; i < input_count; ++i) {
425    Node* const effect = NodeProperties::GetEffectInput(node, i);
426    if (node_states_.Get(effect) == nullptr) return NoChange();
427  }
428
429  // Make a copy of the first input's state and intersect it with the state
430  // from other inputs.
431  // TODO(manoskouk): Consider computing phis for at least a subset of the
432  // state.
433  AbstractState* state = zone()->New<AbstractState>(*state0);
434  for (int i = 1; i < input_count; ++i) {
435    Node* const input = NodeProperties::GetEffectInput(node, i);
436    state->IntersectWith(node_states_.Get(input));
437  }
438  return UpdateState(node, state);
439}
440
441Reduction CsaLoadElimination::ReduceStart(Node* node) {
442  return UpdateState(node, empty_state());
443}
444
445Reduction CsaLoadElimination::ReduceCall(Node* node) {
446  Node* value = NodeProperties::GetValueInput(node, 0);
447  ExternalReferenceMatcher m(value);
448  if (m.Is(ExternalReference::check_object_type())) {
449    return PropagateInputState(node);
450  }
451  return ReduceOtherNode(node);
452}
453
454Reduction CsaLoadElimination::ReduceOtherNode(Node* node) {
455  if (node->op()->EffectInputCount() == 1 &&
456      node->op()->EffectOutputCount() == 1) {
457    Node* const effect = NodeProperties::GetEffectInput(node);
458    AbstractState const* state = node_states_.Get(effect);
459    // If we do not know anything about the predecessor, do not propagate just
460    // yet because we will have to recompute anyway once we compute the
461    // predecessor.
462    if (state == nullptr) return NoChange();
463    // If this {node} has some uncontrolled side effects, set its state to
464    // the immutable half-state of its input state, otherwise to its input
465    // state.
466    return UpdateState(
467        node, node->op()->HasProperty(Operator::kNoWrite)
468                  ? state
469                  : zone()->New<AbstractState>(HalfState(zone()),
470                                               state->immutable_state));
471  }
472  DCHECK_EQ(0, node->op()->EffectOutputCount());
473  return NoChange();
474}
475
476Reduction CsaLoadElimination::UpdateState(Node* node,
477                                          AbstractState const* state) {
478  AbstractState const* original = node_states_.Get(node);
479  // Only signal that the {node} has Changed, if the information about {state}
480  // has changed wrt. the {original}.
481  if (state != original) {
482    if (original == nullptr || !state->Equals(original)) {
483      node_states_.Set(node, state);
484      return Changed(node);
485    }
486  }
487  return NoChange();
488}
489
490Reduction CsaLoadElimination::PropagateInputState(Node* node) {
491  Node* const effect = NodeProperties::GetEffectInput(node);
492  AbstractState const* state = node_states_.Get(effect);
493  if (state == nullptr) return NoChange();
494  return UpdateState(node, state);
495}
496
497CsaLoadElimination::AbstractState const* CsaLoadElimination::ComputeLoopState(
498    Node* node, AbstractState const* state) const {
499  DCHECK_EQ(node->opcode(), IrOpcode::kEffectPhi);
500  std::queue<Node*> queue;
501  std::unordered_set<Node*> visited;
502  visited.insert(node);
503  for (int i = 1; i < node->InputCount() - 1; ++i) {
504    queue.push(node->InputAt(i));
505  }
506  while (!queue.empty()) {
507    Node* const current = queue.front();
508    queue.pop();
509    if (visited.insert(current).second) {
510      if (current->opcode() == IrOpcode::kStoreToObject) {
511        Node* object = NodeProperties::GetValueInput(current, 0);
512        Node* offset = NodeProperties::GetValueInput(current, 1);
513        MachineRepresentation repr =
514            ObjectAccessOf(current->op()).machine_type.representation();
515        const HalfState* new_mutable_state =
516            state->mutable_state.KillField(object, offset, repr);
517        state = zone()->New<AbstractState>(*new_mutable_state,
518                                           state->immutable_state);
519      } else if (current->opcode() == IrOpcode::kInitializeImmutableInObject) {
520#if DEBUG
521        // We are not allowed to reset an immutable (object, offset) pair.
522        Node* object = NodeProperties::GetValueInput(current, 0);
523        Node* offset = NodeProperties::GetValueInput(current, 1);
524        CHECK(state->immutable_state.Lookup(object, offset).IsEmpty());
525#endif
526      } else if (!current->op()->HasProperty(Operator::kNoWrite)) {
527        return zone()->New<AbstractState>(HalfState(zone()),
528                                          state->immutable_state);
529      }
530      for (int i = 0; i < current->op()->EffectInputCount(); ++i) {
531        queue.push(NodeProperties::GetEffectInput(current, i));
532      }
533    }
534  }
535  return state;
536}
537
538Node* CsaLoadElimination::TruncateAndExtend(Node* node,
539                                            MachineRepresentation from,
540                                            MachineType to) {
541  DCHECK(Helpers::Subsumes(from, to.representation()));
542  DCHECK_GE(ElementSizeInBytes(from), ElementSizeInBytes(to.representation()));
543
544  if (to == MachineType::Int8() || to == MachineType::Int16()) {
545    // 1st case: We want to eliminate a signed 8/16-bit load using the value
546    // from a previous subsuming load or store. Since that value might be
547    // outside 8/16-bit range, we first truncate it accordingly. Then we
548    // sign-extend the result to 32-bit.
549    DCHECK_EQ(to.semantic(), MachineSemantic::kInt32);
550    if (from == MachineRepresentation::kWord64) {
551      node = graph()->NewNode(machine()->TruncateInt64ToInt32(), node);
552    }
553    int shift = 32 - 8 * ElementSizeInBytes(to.representation());
554    return graph()->NewNode(machine()->Word32Sar(),
555                            graph()->NewNode(machine()->Word32Shl(), node,
556                                             jsgraph()->Int32Constant(shift)),
557                            jsgraph()->Int32Constant(shift));
558  } else if (to == MachineType::Uint8() || to == MachineType::Uint16()) {
559    // 2nd case: We want to eliminate an unsigned 8/16-bit load using the value
560    // from a previous subsuming load or store. Since that value might be
561    // outside 8/16-bit range, we first truncate it accordingly.
562    if (from == MachineRepresentation::kWord64) {
563      node = graph()->NewNode(machine()->TruncateInt64ToInt32(), node);
564    }
565    int mask = (1 << 8 * ElementSizeInBytes(to.representation())) - 1;
566    return graph()->NewNode(machine()->Word32And(), node,
567                            jsgraph()->Int32Constant(mask));
568  } else if (from == MachineRepresentation::kWord64 &&
569             to.representation() == MachineRepresentation::kWord32) {
570    // 3rd case: Truncate 64-bits into 32-bits.
571    return graph()->NewNode(machine()->TruncateInt64ToInt32(), node);
572  } else {
573    // 4th case: No need for truncation.
574    DCHECK((from == to.representation() &&
575            (from == MachineRepresentation::kWord32 ||
576             from == MachineRepresentation::kWord64 || !IsIntegral(from))) ||
577           (IsAnyTagged(from) && IsAnyTagged(to.representation())));
578    return node;
579  }
580}
581
582CommonOperatorBuilder* CsaLoadElimination::common() const {
583  return jsgraph()->common();
584}
585
586MachineOperatorBuilder* CsaLoadElimination::machine() const {
587  return jsgraph()->machine();
588}
589
590Graph* CsaLoadElimination::graph() const { return jsgraph()->graph(); }
591
592Isolate* CsaLoadElimination::isolate() const { return jsgraph()->isolate(); }
593
594}  // namespace compiler
595}  // namespace internal
596}  // namespace v8
597