Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(287)

Side by Side Diff: src/compiler/x64/instruction-selector-x64.cc

Issue 2516413003: [wasm] Add ProtectedStore instruction (Closed)
Patch Set: Formatting Created 4 years ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2014 the V8 project authors. All rights reserved. 1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include <algorithm> 5 #include <algorithm>
6 6
7 #include "src/base/adapters.h" 7 #include "src/base/adapters.h"
8 #include "src/compiler/instruction-selector-impl.h" 8 #include "src/compiler/instruction-selector-impl.h"
9 #include "src/compiler/node-matchers.h" 9 #include "src/compiler/node-matchers.h"
10 #include "src/compiler/node-properties.h" 10 #include "src/compiler/node-properties.h"
(...skipping 162 matching lines...) Expand 10 before | Expand all | Expand 10 after
173 } 173 }
174 } 174 }
175 175
176 bool CanBeBetterLeftOperand(Node* node) const { 176 bool CanBeBetterLeftOperand(Node* node) const {
177 return !selector()->IsLive(node); 177 return !selector()->IsLive(node);
178 } 178 }
179 }; 179 };
180 180
181 namespace { 181 namespace {
182 182
183 ArchOpcode GetLoadOpcode(LoadRepresentation load_rep) { 183 ArchOpcode GetLoadOpcode(LoadRepresentation load_rep, bool protect) {
184 ArchOpcode opcode = kArchNop; 184 ArchOpcode opcode = kArchNop;
185 switch (load_rep.representation()) { 185 switch (load_rep.representation()) {
186 case MachineRepresentation::kFloat32: 186 case MachineRepresentation::kFloat32:
187 DCHECK(!protect);
titzer 2016/11/22 10:33:20 We'll need to support all of these types with prot
Eric Holk 2016/11/22 23:16:25 I think this is a better idea. It looks like I can
187 opcode = kX64Movss; 188 opcode = kX64Movss;
188 break; 189 break;
189 case MachineRepresentation::kFloat64: 190 case MachineRepresentation::kFloat64:
191 DCHECK(!protect);
190 opcode = kX64Movsd; 192 opcode = kX64Movsd;
191 break; 193 break;
192 case MachineRepresentation::kBit: // Fall through. 194 case MachineRepresentation::kBit: // Fall through.
193 case MachineRepresentation::kWord8: 195 case MachineRepresentation::kWord8:
196 DCHECK(!protect);
194 opcode = load_rep.IsSigned() ? kX64Movsxbl : kX64Movzxbl; 197 opcode = load_rep.IsSigned() ? kX64Movsxbl : kX64Movzxbl;
195 break; 198 break;
196 case MachineRepresentation::kWord16: 199 case MachineRepresentation::kWord16:
200 DCHECK(!protect);
197 opcode = load_rep.IsSigned() ? kX64Movsxwl : kX64Movzxwl; 201 opcode = load_rep.IsSigned() ? kX64Movsxwl : kX64Movzxwl;
198 break; 202 break;
199 case MachineRepresentation::kWord32: 203 case MachineRepresentation::kWord32:
200 opcode = kX64Movl; 204 opcode = protect ? kX64TrapMovl : kX64Movl;
201 break; 205 break;
202 case MachineRepresentation::kTaggedSigned: // Fall through. 206 case MachineRepresentation::kTaggedSigned: // Fall through.
203 case MachineRepresentation::kTaggedPointer: // Fall through. 207 case MachineRepresentation::kTaggedPointer: // Fall through.
204 case MachineRepresentation::kTagged: // Fall through. 208 case MachineRepresentation::kTagged: // Fall through.
205 case MachineRepresentation::kWord64: 209 case MachineRepresentation::kWord64:
210 DCHECK(!protect);
206 opcode = kX64Movq; 211 opcode = kX64Movq;
207 break; 212 break;
208 case MachineRepresentation::kSimd128: // Fall through. 213 case MachineRepresentation::kSimd128: // Fall through.
209 case MachineRepresentation::kNone: 214 case MachineRepresentation::kNone:
210 UNREACHABLE(); 215 UNREACHABLE();
211 break; 216 break;
212 } 217 }
213 return opcode; 218 return opcode;
214 } 219 }
215 220
221 ArchOpcode GetStoreOpcode(StoreRepresentation store_rep, bool protect) {
222 switch (store_rep.representation()) {
223 case MachineRepresentation::kFloat32:
224 DCHECK(!protect);
225 return kX64Movss;
226 break;
227 case MachineRepresentation::kFloat64:
228 DCHECK(!protect);
229 return kX64Movsd;
230 break;
231 case MachineRepresentation::kBit: // Fall through.
232 case MachineRepresentation::kWord8:
233 DCHECK(!protect);
234 return kX64Movb;
235 break;
236 case MachineRepresentation::kWord16:
237 DCHECK(!protect);
238 return kX64Movw;
239 break;
240 case MachineRepresentation::kWord32:
241 return protect ? kX64TrapMovl : kX64Movl;
242 break;
243 case MachineRepresentation::kTaggedSigned: // Fall through.
244 case MachineRepresentation::kTaggedPointer: // Fall through.
245 case MachineRepresentation::kTagged: // Fall through.
246 case MachineRepresentation::kWord64:
247 DCHECK(!protect);
248 return kX64Movq;
249 break;
250 case MachineRepresentation::kSimd128: // Fall through.
251 case MachineRepresentation::kNone:
252 UNREACHABLE();
253 return kArchNop;
254 }
255 }
256
216 } // namespace 257 } // namespace
217 258
218 void InstructionSelector::VisitLoad(Node* node) { 259 void InstructionSelector::VisitLoad(Node* node) {
219 LoadRepresentation load_rep = LoadRepresentationOf(node->op()); 260 LoadRepresentation load_rep = LoadRepresentationOf(node->op());
220 X64OperandGenerator g(this); 261 X64OperandGenerator g(this);
221 262
222 ArchOpcode opcode = GetLoadOpcode(load_rep); 263 const bool protect = false;
264 ArchOpcode opcode = GetLoadOpcode(load_rep, protect);
223 InstructionOperand outputs[1]; 265 InstructionOperand outputs[1];
224 outputs[0] = g.DefineAsRegister(node); 266 outputs[0] = g.DefineAsRegister(node);
225 InstructionOperand inputs[3]; 267 InstructionOperand inputs[3];
226 size_t input_count = 0; 268 size_t input_count = 0;
227 AddressingMode mode = 269 AddressingMode mode =
228 g.GetEffectiveAddressMemoryOperand(node, inputs, &input_count); 270 g.GetEffectiveAddressMemoryOperand(node, inputs, &input_count);
229 InstructionCode code = opcode | AddressingModeField::encode(mode); 271 InstructionCode code = opcode | AddressingModeField::encode(mode);
230 Emit(code, 1, outputs, input_count, inputs); 272 Emit(code, 1, outputs, input_count, inputs);
231 } 273 }
232 274
233 void InstructionSelector::VisitProtectedLoad(Node* node) { 275 void InstructionSelector::VisitProtectedLoad(Node* node) {
234 LoadRepresentation load_rep = LoadRepresentationOf(node->op()); 276 LoadRepresentation load_rep = LoadRepresentationOf(node->op());
235 X64OperandGenerator g(this); 277 X64OperandGenerator g(this);
236 278
237 ArchOpcode opcode = GetLoadOpcode(load_rep); 279 const bool protect = true;
280 ArchOpcode opcode = GetLoadOpcode(load_rep, protect);
238 InstructionOperand outputs[1]; 281 InstructionOperand outputs[1];
239 outputs[0] = g.DefineAsRegister(node); 282 outputs[0] = g.DefineAsRegister(node);
240 InstructionOperand inputs[4]; 283 InstructionOperand inputs[4];
241 size_t input_count = 0; 284 size_t input_count = 0;
242 AddressingMode mode = 285 AddressingMode mode =
243 g.GetEffectiveAddressMemoryOperand(node, inputs, &input_count); 286 g.GetEffectiveAddressMemoryOperand(node, inputs, &input_count);
244 // Add the context parameter as an input. 287 // Add the context parameter as an input.
245 inputs[input_count++] = g.UseUniqueRegister(node->InputAt(2)); 288 inputs[input_count++] = g.UseUniqueRegister(node->InputAt(2));
246 // Add the source position as an input 289 // Add the source position as an input
247 inputs[input_count++] = g.UseImmediate(node->InputAt(3)); 290 inputs[input_count++] = g.UseImmediate(node->InputAt(3));
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after
288 record_write_mode = RecordWriteMode::kValueIsAny; 331 record_write_mode = RecordWriteMode::kValueIsAny;
289 break; 332 break;
290 } 333 }
291 InstructionOperand temps[] = {g.TempRegister(), g.TempRegister()}; 334 InstructionOperand temps[] = {g.TempRegister(), g.TempRegister()};
292 size_t const temp_count = arraysize(temps); 335 size_t const temp_count = arraysize(temps);
293 InstructionCode code = kArchStoreWithWriteBarrier; 336 InstructionCode code = kArchStoreWithWriteBarrier;
294 code |= AddressingModeField::encode(addressing_mode); 337 code |= AddressingModeField::encode(addressing_mode);
295 code |= MiscField::encode(static_cast<int>(record_write_mode)); 338 code |= MiscField::encode(static_cast<int>(record_write_mode));
296 Emit(code, 0, nullptr, input_count, inputs, temp_count, temps); 339 Emit(code, 0, nullptr, input_count, inputs, temp_count, temps);
297 } else { 340 } else {
298 ArchOpcode opcode = kArchNop; 341 const bool protect = false;
299 switch (rep) { 342 ArchOpcode opcode = GetStoreOpcode(store_rep, protect);
300 case MachineRepresentation::kFloat32:
301 opcode = kX64Movss;
302 break;
303 case MachineRepresentation::kFloat64:
304 opcode = kX64Movsd;
305 break;
306 case MachineRepresentation::kBit: // Fall through.
307 case MachineRepresentation::kWord8:
308 opcode = kX64Movb;
309 break;
310 case MachineRepresentation::kWord16:
311 opcode = kX64Movw;
312 break;
313 case MachineRepresentation::kWord32:
314 opcode = kX64Movl;
315 break;
316 case MachineRepresentation::kTaggedSigned: // Fall through.
317 case MachineRepresentation::kTaggedPointer: // Fall through.
318 case MachineRepresentation::kTagged: // Fall through.
319 case MachineRepresentation::kWord64:
320 opcode = kX64Movq;
321 break;
322 case MachineRepresentation::kSimd128: // Fall through.
323 case MachineRepresentation::kNone:
324 UNREACHABLE();
325 return;
326 }
327 InstructionOperand inputs[4]; 343 InstructionOperand inputs[4];
328 size_t input_count = 0; 344 size_t input_count = 0;
329 AddressingMode addressing_mode = 345 AddressingMode addressing_mode =
330 g.GetEffectiveAddressMemoryOperand(node, inputs, &input_count); 346 g.GetEffectiveAddressMemoryOperand(node, inputs, &input_count);
331 InstructionCode code = 347 InstructionCode code =
332 opcode | AddressingModeField::encode(addressing_mode); 348 opcode | AddressingModeField::encode(addressing_mode);
333 InstructionOperand value_operand = 349 InstructionOperand value_operand =
334 g.CanBeImmediate(value) ? g.UseImmediate(value) : g.UseRegister(value); 350 g.CanBeImmediate(value) ? g.UseImmediate(value) : g.UseRegister(value);
335 inputs[input_count++] = value_operand; 351 inputs[input_count++] = value_operand;
336 Emit(code, 0, static_cast<InstructionOperand*>(nullptr), input_count, 352 Emit(code, 0, static_cast<InstructionOperand*>(nullptr), input_count,
337 inputs); 353 inputs);
338 } 354 }
339 } 355 }
340 356
357 void InstructionSelector::VisitProtectedStore(Node* node) {
358 X64OperandGenerator g(this);
359 Node* base = node->InputAt(0);
360 Node* index = node->InputAt(1);
361 Node* value = node->InputAt(2);
362 Node* context = node->InputAt(3);
363 Node* position = node->InputAt(4);
364
365 StoreRepresentation store_rep = StoreRepresentationOf(node->op());
366 WriteBarrierKind write_barrier_kind = store_rep.write_barrier_kind();
367 MachineRepresentation rep = store_rep.representation();
368
369 const bool protect = true;
370 ArchOpcode opcode = GetStoreOpcode(store_rep, protect);
371 InstructionOperand inputs[6];
372 size_t input_count = 0;
373 AddressingMode addressing_mode =
374 g.GetEffectiveAddressMemoryOperand(node, inputs, &input_count);
375 InstructionCode code = opcode | AddressingModeField::encode(addressing_mode);
376 InstructionOperand value_operand =
377 g.CanBeImmediate(value) ? g.UseImmediate(value) : g.UseRegister(value);
378 inputs[input_count++] = value_operand;
379 inputs[input_count++] = g.UseRegister(context);
380 inputs[input_count++] = g.UseImmediate(position);
381 Emit(code, 0, static_cast<InstructionOperand*>(nullptr), input_count, inputs);
382 }
383
341 // Architecture supports unaligned access, therefore VisitLoad is used instead 384 // Architecture supports unaligned access, therefore VisitLoad is used instead
342 void InstructionSelector::VisitUnalignedLoad(Node* node) { UNREACHABLE(); } 385 void InstructionSelector::VisitUnalignedLoad(Node* node) { UNREACHABLE(); }
343 386
344 // Architecture supports unaligned access, therefore VisitStore is used instead 387 // Architecture supports unaligned access, therefore VisitStore is used instead
345 void InstructionSelector::VisitUnalignedStore(Node* node) { UNREACHABLE(); } 388 void InstructionSelector::VisitUnalignedStore(Node* node) { UNREACHABLE(); }
346 389
347 void InstructionSelector::VisitCheckedLoad(Node* node) { 390 void InstructionSelector::VisitCheckedLoad(Node* node) {
348 CheckedLoadRepresentation load_rep = CheckedLoadRepresentationOf(node->op()); 391 CheckedLoadRepresentation load_rep = CheckedLoadRepresentationOf(node->op());
349 X64OperandGenerator g(this); 392 X64OperandGenerator g(this);
350 Node* const buffer = node->InputAt(0); 393 Node* const buffer = node->InputAt(0);
(...skipping 2034 matching lines...) Expand 10 before | Expand all | Expand 10 after
2385 // static 2428 // static
2386 MachineOperatorBuilder::AlignmentRequirements 2429 MachineOperatorBuilder::AlignmentRequirements
2387 InstructionSelector::AlignmentRequirements() { 2430 InstructionSelector::AlignmentRequirements() {
2388 return MachineOperatorBuilder::AlignmentRequirements:: 2431 return MachineOperatorBuilder::AlignmentRequirements::
2389 FullUnalignedAccessSupport(); 2432 FullUnalignedAccessSupport();
2390 } 2433 }
2391 2434
2392 } // namespace compiler 2435 } // namespace compiler
2393 } // namespace internal 2436 } // namespace internal
2394 } // namespace v8 2437 } // namespace v8
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698