Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(1030)

Side by Side Diff: src/mips/macro-assembler-mips.cc

Issue 20843012: Extract hardcoded error strings into a single place and replace them with enum. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: styles fixed Created 7 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 238 matching lines...) Expand 10 before | Expand all | Expand 10 after
249 ASSERT(!AreAliased(object, address, value, t8)); 249 ASSERT(!AreAliased(object, address, value, t8));
250 ASSERT(!AreAliased(object, address, value, t9)); 250 ASSERT(!AreAliased(object, address, value, t9));
251 // The compiled code assumes that record write doesn't change the 251 // The compiled code assumes that record write doesn't change the
252 // context register, so we check that none of the clobbered 252 // context register, so we check that none of the clobbered
253 // registers are cp. 253 // registers are cp.
254 ASSERT(!address.is(cp) && !value.is(cp)); 254 ASSERT(!address.is(cp) && !value.is(cp));
255 255
256 if (emit_debug_code()) { 256 if (emit_debug_code()) {
257 lw(at, MemOperand(address)); 257 lw(at, MemOperand(address));
258 Assert( 258 Assert(
259 eq, "Wrong address or value passed to RecordWrite", at, Operand(value)); 259 eq, kWrongAddressOrValuePassedToRecordWrite, at, Operand(value));
260 } 260 }
261 261
262 Label done; 262 Label done;
263 263
264 if (smi_check == INLINE_SMI_CHECK) { 264 if (smi_check == INLINE_SMI_CHECK) {
265 ASSERT_EQ(0, kSmiTag); 265 ASSERT_EQ(0, kSmiTag);
266 JumpIfSmi(value, &done); 266 JumpIfSmi(value, &done);
267 } 267 }
268 268
269 CheckPageFlag(value, 269 CheckPageFlag(value,
(...skipping 81 matching lines...) Expand 10 before | Expand all | Expand 10 after
351 Label same_contexts; 351 Label same_contexts;
352 352
353 ASSERT(!holder_reg.is(scratch)); 353 ASSERT(!holder_reg.is(scratch));
354 ASSERT(!holder_reg.is(at)); 354 ASSERT(!holder_reg.is(at));
355 ASSERT(!scratch.is(at)); 355 ASSERT(!scratch.is(at));
356 356
357 // Load current lexical context from the stack frame. 357 // Load current lexical context from the stack frame.
358 lw(scratch, MemOperand(fp, StandardFrameConstants::kContextOffset)); 358 lw(scratch, MemOperand(fp, StandardFrameConstants::kContextOffset));
359 // In debug mode, make sure the lexical context is set. 359 // In debug mode, make sure the lexical context is set.
360 #ifdef DEBUG 360 #ifdef DEBUG
361 Check(ne, "we should not have an empty lexical context", 361 Check(ne, kWeShouldNotHaveAnEmptyLexicalContext,
362 scratch, Operand(zero_reg)); 362 scratch, Operand(zero_reg));
363 #endif 363 #endif
364 364
365 // Load the native context of the current context. 365 // Load the native context of the current context.
366 int offset = 366 int offset =
367 Context::kHeaderSize + Context::GLOBAL_OBJECT_INDEX * kPointerSize; 367 Context::kHeaderSize + Context::GLOBAL_OBJECT_INDEX * kPointerSize;
368 lw(scratch, FieldMemOperand(scratch, offset)); 368 lw(scratch, FieldMemOperand(scratch, offset));
369 lw(scratch, FieldMemOperand(scratch, GlobalObject::kNativeContextOffset)); 369 lw(scratch, FieldMemOperand(scratch, GlobalObject::kNativeContextOffset));
370 370
371 // Check the context is a native context. 371 // Check the context is a native context.
372 if (emit_debug_code()) { 372 if (emit_debug_code()) {
373 push(holder_reg); // Temporarily save holder on the stack. 373 push(holder_reg); // Temporarily save holder on the stack.
374 // Read the first word and compare to the native_context_map. 374 // Read the first word and compare to the native_context_map.
375 lw(holder_reg, FieldMemOperand(scratch, HeapObject::kMapOffset)); 375 lw(holder_reg, FieldMemOperand(scratch, HeapObject::kMapOffset));
376 LoadRoot(at, Heap::kNativeContextMapRootIndex); 376 LoadRoot(at, Heap::kNativeContextMapRootIndex);
377 Check(eq, "JSGlobalObject::native_context should be a native context.", 377 Check(eq, kJSGlobalObjectNative_contextShouldBeANativeContext,
378 holder_reg, Operand(at)); 378 holder_reg, Operand(at));
379 pop(holder_reg); // Restore holder. 379 pop(holder_reg); // Restore holder.
380 } 380 }
381 381
382 // Check if both contexts are the same. 382 // Check if both contexts are the same.
383 lw(at, FieldMemOperand(holder_reg, JSGlobalProxy::kNativeContextOffset)); 383 lw(at, FieldMemOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
384 Branch(&same_contexts, eq, scratch, Operand(at)); 384 Branch(&same_contexts, eq, scratch, Operand(at));
385 385
386 // Check the context is a native context. 386 // Check the context is a native context.
387 if (emit_debug_code()) { 387 if (emit_debug_code()) {
388 push(holder_reg); // Temporarily save holder on the stack. 388 push(holder_reg); // Temporarily save holder on the stack.
389 mov(holder_reg, at); // Move at to its holding place. 389 mov(holder_reg, at); // Move at to its holding place.
390 LoadRoot(at, Heap::kNullValueRootIndex); 390 LoadRoot(at, Heap::kNullValueRootIndex);
391 Check(ne, "JSGlobalProxy::context() should not be null.", 391 Check(ne, kJSGlobalProxyContextShouldNotBeNull,
392 holder_reg, Operand(at)); 392 holder_reg, Operand(at));
393 393
394 lw(holder_reg, FieldMemOperand(holder_reg, HeapObject::kMapOffset)); 394 lw(holder_reg, FieldMemOperand(holder_reg, HeapObject::kMapOffset));
395 LoadRoot(at, Heap::kNativeContextMapRootIndex); 395 LoadRoot(at, Heap::kNativeContextMapRootIndex);
396 Check(eq, "JSGlobalObject::native_context should be a native context.", 396 Check(eq, kJSGlobalObjectNative_contextShouldBeANativeContext,
397 holder_reg, Operand(at)); 397 holder_reg, Operand(at));
398 // Restore at is not needed. at is reloaded below. 398 // Restore at is not needed. at is reloaded below.
399 pop(holder_reg); // Restore holder. 399 pop(holder_reg); // Restore holder.
400 // Restore at to holder's context. 400 // Restore at to holder's context.
401 lw(at, FieldMemOperand(holder_reg, JSGlobalProxy::kNativeContextOffset)); 401 lw(at, FieldMemOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
402 } 402 }
403 403
404 // Check that the security token in the calling global object is 404 // Check that the security token in the calling global object is
405 // compatible with the security token in the receiving global 405 // compatible with the security token in the receiving global
406 // object. 406 // object.
(...skipping 2524 matching lines...) Expand 10 before | Expand all | Expand 10 after
2931 if ((flags & RESULT_CONTAINS_TOP) == 0) { 2931 if ((flags & RESULT_CONTAINS_TOP) == 0) {
2932 // Load allocation top into result and allocation limit into t9. 2932 // Load allocation top into result and allocation limit into t9.
2933 lw(result, MemOperand(topaddr)); 2933 lw(result, MemOperand(topaddr));
2934 lw(t9, MemOperand(topaddr, kPointerSize)); 2934 lw(t9, MemOperand(topaddr, kPointerSize));
2935 } else { 2935 } else {
2936 if (emit_debug_code()) { 2936 if (emit_debug_code()) {
2937 // Assert that result actually contains top on entry. t9 is used 2937 // Assert that result actually contains top on entry. t9 is used
2938 // immediately below so this use of t9 does not cause difference with 2938 // immediately below so this use of t9 does not cause difference with
2939 // respect to register content between debug and release mode. 2939 // respect to register content between debug and release mode.
2940 lw(t9, MemOperand(topaddr)); 2940 lw(t9, MemOperand(topaddr));
2941 Check(eq, "Unexpected allocation top", result, Operand(t9)); 2941 Check(eq, kUnexpectedAllocationTop, result, Operand(t9));
2942 } 2942 }
2943 // Load allocation limit into t9. Result already contains allocation top. 2943 // Load allocation limit into t9. Result already contains allocation top.
2944 lw(t9, MemOperand(topaddr, limit - top)); 2944 lw(t9, MemOperand(topaddr, limit - top));
2945 } 2945 }
2946 2946
2947 // Calculate new top and bail out if new space is exhausted. Use result 2947 // Calculate new top and bail out if new space is exhausted. Use result
2948 // to calculate the new top. 2948 // to calculate the new top.
2949 Addu(scratch2, result, Operand(obj_size_reg)); 2949 Addu(scratch2, result, Operand(obj_size_reg));
2950 Branch(gc_required, Ugreater, scratch2, Operand(t9)); 2950 Branch(gc_required, Ugreater, scratch2, Operand(t9));
2951 sw(scratch2, MemOperand(topaddr)); 2951 sw(scratch2, MemOperand(topaddr));
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after
3001 if ((flags & RESULT_CONTAINS_TOP) == 0) { 3001 if ((flags & RESULT_CONTAINS_TOP) == 0) {
3002 // Load allocation top into result and allocation limit into t9. 3002 // Load allocation top into result and allocation limit into t9.
3003 lw(result, MemOperand(topaddr)); 3003 lw(result, MemOperand(topaddr));
3004 lw(t9, MemOperand(topaddr, kPointerSize)); 3004 lw(t9, MemOperand(topaddr, kPointerSize));
3005 } else { 3005 } else {
3006 if (emit_debug_code()) { 3006 if (emit_debug_code()) {
3007 // Assert that result actually contains top on entry. t9 is used 3007 // Assert that result actually contains top on entry. t9 is used
3008 // immediately below so this use of t9 does not cause difference with 3008 // immediately below so this use of t9 does not cause difference with
3009 // respect to register content between debug and release mode. 3009 // respect to register content between debug and release mode.
3010 lw(t9, MemOperand(topaddr)); 3010 lw(t9, MemOperand(topaddr));
3011 Check(eq, "Unexpected allocation top", result, Operand(t9)); 3011 Check(eq, kUnexpectedAllocationTop, result, Operand(t9));
3012 } 3012 }
3013 // Load allocation limit into t9. Result already contains allocation top. 3013 // Load allocation limit into t9. Result already contains allocation top.
3014 lw(t9, MemOperand(topaddr, limit - top)); 3014 lw(t9, MemOperand(topaddr, limit - top));
3015 } 3015 }
3016 3016
3017 // Calculate new top and bail out if new space is exhausted. Use result 3017 // Calculate new top and bail out if new space is exhausted. Use result
3018 // to calculate the new top. Object size may be in words so a shift is 3018 // to calculate the new top. Object size may be in words so a shift is
3019 // required to get the number of bytes. 3019 // required to get the number of bytes.
3020 if ((flags & SIZE_IN_WORDS) != 0) { 3020 if ((flags & SIZE_IN_WORDS) != 0) {
3021 sll(scratch2, object_size, kPointerSizeLog2); 3021 sll(scratch2, object_size, kPointerSizeLog2);
3022 Addu(scratch2, result, scratch2); 3022 Addu(scratch2, result, scratch2);
3023 } else { 3023 } else {
3024 Addu(scratch2, result, Operand(object_size)); 3024 Addu(scratch2, result, Operand(object_size));
3025 } 3025 }
3026 Branch(gc_required, Ugreater, scratch2, Operand(t9)); 3026 Branch(gc_required, Ugreater, scratch2, Operand(t9));
3027 3027
3028 // Update allocation top. result temporarily holds the new top. 3028 // Update allocation top. result temporarily holds the new top.
3029 if (emit_debug_code()) { 3029 if (emit_debug_code()) {
3030 And(t9, scratch2, Operand(kObjectAlignmentMask)); 3030 And(t9, scratch2, Operand(kObjectAlignmentMask));
3031 Check(eq, "Unaligned allocation in new space", t9, Operand(zero_reg)); 3031 Check(eq, kUnalignedAllocationInNewSpace, t9, Operand(zero_reg));
3032 } 3032 }
3033 sw(scratch2, MemOperand(topaddr)); 3033 sw(scratch2, MemOperand(topaddr));
3034 3034
3035 // Tag object if requested. 3035 // Tag object if requested.
3036 if ((flags & TAG_OBJECT) != 0) { 3036 if ((flags & TAG_OBJECT) != 0) {
3037 Addu(result, result, Operand(kHeapObjectTag)); 3037 Addu(result, result, Operand(kHeapObjectTag));
3038 } 3038 }
3039 } 3039 }
3040 3040
3041 3041
3042 void MacroAssembler::UndoAllocationInNewSpace(Register object, 3042 void MacroAssembler::UndoAllocationInNewSpace(Register object,
3043 Register scratch) { 3043 Register scratch) {
3044 ExternalReference new_space_allocation_top = 3044 ExternalReference new_space_allocation_top =
3045 ExternalReference::new_space_allocation_top_address(isolate()); 3045 ExternalReference::new_space_allocation_top_address(isolate());
3046 3046
3047 // Make sure the object has no tag before resetting top. 3047 // Make sure the object has no tag before resetting top.
3048 And(object, object, Operand(~kHeapObjectTagMask)); 3048 And(object, object, Operand(~kHeapObjectTagMask));
3049 #ifdef DEBUG 3049 #ifdef DEBUG
3050 // Check that the object un-allocated is below the current top. 3050 // Check that the object un-allocated is below the current top.
3051 li(scratch, Operand(new_space_allocation_top)); 3051 li(scratch, Operand(new_space_allocation_top));
3052 lw(scratch, MemOperand(scratch)); 3052 lw(scratch, MemOperand(scratch));
3053 Check(less, "Undo allocation of non allocated memory", 3053 Check(less, kUndoAllocationOfNonAllocatedMemory,
3054 object, Operand(scratch)); 3054 object, Operand(scratch));
3055 #endif 3055 #endif
3056 // Write the address of the object to un-allocate as the current top. 3056 // Write the address of the object to un-allocate as the current top.
3057 li(scratch, Operand(new_space_allocation_top)); 3057 li(scratch, Operand(new_space_allocation_top));
3058 sw(object, MemOperand(scratch)); 3058 sw(object, MemOperand(scratch));
3059 } 3059 }
3060 3060
3061 3061
3062 void MacroAssembler::AllocateTwoByteString(Register result, 3062 void MacroAssembler::AllocateTwoByteString(Register result,
3063 Register length, 3063 Register length,
(...skipping 232 matching lines...) Expand 10 before | Expand all | Expand 10 after
3296 Addu(src, src, 1); 3296 Addu(src, src, 1);
3297 sb(scratch, MemOperand(dst)); 3297 sb(scratch, MemOperand(dst));
3298 Addu(dst, dst, 1); 3298 Addu(dst, dst, 1);
3299 Subu(length, length, Operand(1)); 3299 Subu(length, length, Operand(1));
3300 Branch(&byte_loop_1, ne, length, Operand(zero_reg)); 3300 Branch(&byte_loop_1, ne, length, Operand(zero_reg));
3301 3301
3302 // Copy bytes in word size chunks. 3302 // Copy bytes in word size chunks.
3303 bind(&word_loop); 3303 bind(&word_loop);
3304 if (emit_debug_code()) { 3304 if (emit_debug_code()) {
3305 And(scratch, src, kPointerSize - 1); 3305 And(scratch, src, kPointerSize - 1);
3306 Assert(eq, "Expecting alignment for CopyBytes", 3306 Assert(eq, kExpectingAlignmentForCopyBytes,
3307 scratch, Operand(zero_reg)); 3307 scratch, Operand(zero_reg));
3308 } 3308 }
3309 Branch(&byte_loop, lt, length, Operand(kPointerSize)); 3309 Branch(&byte_loop, lt, length, Operand(kPointerSize));
3310 lw(scratch, MemOperand(src)); 3310 lw(scratch, MemOperand(src));
3311 Addu(src, src, kPointerSize); 3311 Addu(src, src, kPointerSize);
3312 3312
3313 // TODO(kalmard) check if this can be optimized to use sw in most cases. 3313 // TODO(kalmard) check if this can be optimized to use sw in most cases.
3314 // Can't use unaligned access - copy byte by byte. 3314 // Can't use unaligned access - copy byte by byte.
3315 sb(scratch, MemOperand(dst, 0)); 3315 sb(scratch, MemOperand(dst, 0));
3316 srl(scratch, scratch, 8); 3316 srl(scratch, scratch, 8);
(...skipping 705 matching lines...) Expand 10 before | Expand all | Expand 10 after
4022 } 4022 }
4023 // Load value from ReturnValue. 4023 // Load value from ReturnValue.
4024 lw(v0, MemOperand(fp, return_value_offset_from_fp*kPointerSize)); 4024 lw(v0, MemOperand(fp, return_value_offset_from_fp*kPointerSize));
4025 bind(&return_value_loaded); 4025 bind(&return_value_loaded);
4026 4026
4027 // No more valid handles (the result handle was the last one). Restore 4027 // No more valid handles (the result handle was the last one). Restore
4028 // previous handle scope. 4028 // previous handle scope.
4029 sw(s0, MemOperand(s3, kNextOffset)); 4029 sw(s0, MemOperand(s3, kNextOffset));
4030 if (emit_debug_code()) { 4030 if (emit_debug_code()) {
4031 lw(a1, MemOperand(s3, kLevelOffset)); 4031 lw(a1, MemOperand(s3, kLevelOffset));
4032 Check(eq, "Unexpected level after return from api call", a1, Operand(s2)); 4032 Check(eq, kUnexpectedLevelAfterReturnFromApiCall, a1, Operand(s2));
4033 } 4033 }
4034 Subu(s2, s2, Operand(1)); 4034 Subu(s2, s2, Operand(1));
4035 sw(s2, MemOperand(s3, kLevelOffset)); 4035 sw(s2, MemOperand(s3, kLevelOffset));
4036 lw(at, MemOperand(s3, kLimitOffset)); 4036 lw(at, MemOperand(s3, kLimitOffset));
4037 Branch(&delete_allocated_handles, ne, s1, Operand(at)); 4037 Branch(&delete_allocated_handles, ne, s1, Operand(at));
4038 4038
4039 // Check if the function scheduled an exception. 4039 // Check if the function scheduled an exception.
4040 bind(&leave_exit_frame); 4040 bind(&leave_exit_frame);
4041 LoadRoot(t0, Heap::kTheHoleValueRootIndex); 4041 LoadRoot(t0, Heap::kTheHoleValueRootIndex);
4042 li(at, Operand(ExternalReference::scheduled_exception_address(isolate()))); 4042 li(at, Operand(ExternalReference::scheduled_exception_address(isolate())));
(...skipping 333 matching lines...) Expand 10 before | Expand all | Expand 10 after
4376 lw(scratch1, MemOperand(scratch2)); 4376 lw(scratch1, MemOperand(scratch2));
4377 Subu(scratch1, scratch1, Operand(value)); 4377 Subu(scratch1, scratch1, Operand(value));
4378 sw(scratch1, MemOperand(scratch2)); 4378 sw(scratch1, MemOperand(scratch2));
4379 } 4379 }
4380 } 4380 }
4381 4381
4382 4382
4383 // ----------------------------------------------------------------------------- 4383 // -----------------------------------------------------------------------------
4384 // Debugging. 4384 // Debugging.
4385 4385
4386 void MacroAssembler::Assert(Condition cc, const char* msg, 4386 void MacroAssembler::Assert(Condition cc, BailoutReason reason,
4387 Register rs, Operand rt) { 4387 Register rs, Operand rt) {
4388 if (emit_debug_code()) 4388 if (emit_debug_code())
4389 Check(cc, msg, rs, rt); 4389 Check(cc, reason, rs, rt);
4390 } 4390 }
4391 4391
4392 4392
4393 void MacroAssembler::AssertRegisterIsRoot(Register reg, 4393 void MacroAssembler::AssertRegisterIsRoot(Register reg,
4394 Heap::RootListIndex index) { 4394 Heap::RootListIndex index) {
4395 if (emit_debug_code()) { 4395 if (emit_debug_code()) {
4396 LoadRoot(at, index); 4396 LoadRoot(at, index);
4397 Check(eq, "Register did not match expected root", reg, Operand(at)); 4397 Check(eq, kRegisterDidNotMatchExpectedRoot, reg, Operand(at));
4398 } 4398 }
4399 } 4399 }
4400 4400
4401 4401
4402 void MacroAssembler::AssertFastElements(Register elements) { 4402 void MacroAssembler::AssertFastElements(Register elements) {
4403 if (emit_debug_code()) { 4403 if (emit_debug_code()) {
4404 ASSERT(!elements.is(at)); 4404 ASSERT(!elements.is(at));
4405 Label ok; 4405 Label ok;
4406 push(elements); 4406 push(elements);
4407 lw(elements, FieldMemOperand(elements, HeapObject::kMapOffset)); 4407 lw(elements, FieldMemOperand(elements, HeapObject::kMapOffset));
4408 LoadRoot(at, Heap::kFixedArrayMapRootIndex); 4408 LoadRoot(at, Heap::kFixedArrayMapRootIndex);
4409 Branch(&ok, eq, elements, Operand(at)); 4409 Branch(&ok, eq, elements, Operand(at));
4410 LoadRoot(at, Heap::kFixedDoubleArrayMapRootIndex); 4410 LoadRoot(at, Heap::kFixedDoubleArrayMapRootIndex);
4411 Branch(&ok, eq, elements, Operand(at)); 4411 Branch(&ok, eq, elements, Operand(at));
4412 LoadRoot(at, Heap::kFixedCOWArrayMapRootIndex); 4412 LoadRoot(at, Heap::kFixedCOWArrayMapRootIndex);
4413 Branch(&ok, eq, elements, Operand(at)); 4413 Branch(&ok, eq, elements, Operand(at));
4414 Abort("JSObject with fast elements map has slow elements"); 4414 Abort(kJSObjectWithFastElementsMapHasSlowElements);
4415 bind(&ok); 4415 bind(&ok);
4416 pop(elements); 4416 pop(elements);
4417 } 4417 }
4418 } 4418 }
4419 4419
4420 4420
4421 void MacroAssembler::Check(Condition cc, const char* msg, 4421 void MacroAssembler::Check(Condition cc, BailoutReason reason,
4422 Register rs, Operand rt) { 4422 Register rs, Operand rt) {
4423 Label L; 4423 Label L;
4424 Branch(&L, cc, rs, rt); 4424 Branch(&L, cc, rs, rt);
4425 Abort(msg); 4425 Abort(reason);
4426 // Will not return here. 4426 // Will not return here.
4427 bind(&L); 4427 bind(&L);
4428 } 4428 }
4429 4429
4430 4430
4431 void MacroAssembler::Abort(const char* msg) { 4431 void MacroAssembler::Abort(BailoutReason reason) {
4432 Label abort_start; 4432 Label abort_start;
4433 bind(&abort_start); 4433 bind(&abort_start);
4434 // We want to pass the msg string like a smi to avoid GC 4434 // We want to pass the msg string like a smi to avoid GC
4435 // problems, however msg is not guaranteed to be aligned 4435 // problems, however msg is not guaranteed to be aligned
4436 // properly. Instead, we pass an aligned pointer that is 4436 // properly. Instead, we pass an aligned pointer that is
4437 // a proper v8 smi, but also pass the alignment difference 4437 // a proper v8 smi, but also pass the alignment difference
4438 // from the real pointer as a smi. 4438 // from the real pointer as a smi.
4439 const char* msg = GetBailoutReason(reason);
4439 intptr_t p1 = reinterpret_cast<intptr_t>(msg); 4440 intptr_t p1 = reinterpret_cast<intptr_t>(msg);
4440 intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag; 4441 intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
4441 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi()); 4442 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
4442 #ifdef DEBUG 4443 #ifdef DEBUG
4443 if (msg != NULL) { 4444 if (msg != NULL) {
4444 RecordComment("Abort message: "); 4445 RecordComment("Abort message: ");
4445 RecordComment(msg); 4446 RecordComment(msg);
4446 } 4447 }
4447 #endif 4448 #endif
4448 4449
(...skipping 123 matching lines...) Expand 10 before | Expand all | Expand 10 after
4572 void MacroAssembler::LoadGlobalFunctionInitialMap(Register function, 4573 void MacroAssembler::LoadGlobalFunctionInitialMap(Register function,
4573 Register map, 4574 Register map,
4574 Register scratch) { 4575 Register scratch) {
4575 // Load the initial map. The global functions all have initial maps. 4576 // Load the initial map. The global functions all have initial maps.
4576 lw(map, FieldMemOperand(function, JSFunction::kPrototypeOrInitialMapOffset)); 4577 lw(map, FieldMemOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
4577 if (emit_debug_code()) { 4578 if (emit_debug_code()) {
4578 Label ok, fail; 4579 Label ok, fail;
4579 CheckMap(map, scratch, Heap::kMetaMapRootIndex, &fail, DO_SMI_CHECK); 4580 CheckMap(map, scratch, Heap::kMetaMapRootIndex, &fail, DO_SMI_CHECK);
4580 Branch(&ok); 4581 Branch(&ok);
4581 bind(&fail); 4582 bind(&fail);
4582 Abort("Global functions must have initial map"); 4583 Abort(kGlobalFunctionsMustHaveInitialMap);
4583 bind(&ok); 4584 bind(&ok);
4584 } 4585 }
4585 } 4586 }
4586 4587
4587 4588
4588 void MacroAssembler::EnterFrame(StackFrame::Type type) { 4589 void MacroAssembler::EnterFrame(StackFrame::Type type) {
4589 addiu(sp, sp, -5 * kPointerSize); 4590 addiu(sp, sp, -5 * kPointerSize);
4590 li(t8, Operand(Smi::FromInt(type))); 4591 li(t8, Operand(Smi::FromInt(type)));
4591 li(t9, Operand(CodeObject()), CONSTANT_SIZE); 4592 li(t9, Operand(CodeObject()), CONSTANT_SIZE);
4592 sw(ra, MemOperand(sp, 4 * kPointerSize)); 4593 sw(ra, MemOperand(sp, 4 * kPointerSize));
(...skipping 262 matching lines...) Expand 10 before | Expand all | Expand 10 after
4855 // Both Smi tags must be 1 (not Smi). 4856 // Both Smi tags must be 1 (not Smi).
4856 and_(at, reg1, reg2); 4857 and_(at, reg1, reg2);
4857 JumpIfSmi(at, on_either_smi); 4858 JumpIfSmi(at, on_either_smi);
4858 } 4859 }
4859 4860
4860 4861
4861 void MacroAssembler::AssertNotSmi(Register object) { 4862 void MacroAssembler::AssertNotSmi(Register object) {
4862 if (emit_debug_code()) { 4863 if (emit_debug_code()) {
4863 STATIC_ASSERT(kSmiTag == 0); 4864 STATIC_ASSERT(kSmiTag == 0);
4864 andi(at, object, kSmiTagMask); 4865 andi(at, object, kSmiTagMask);
4865 Check(ne, "Operand is a smi", at, Operand(zero_reg)); 4866 Check(ne, kOperandIsASmi, at, Operand(zero_reg));
4866 } 4867 }
4867 } 4868 }
4868 4869
4869 4870
4870 void MacroAssembler::AssertSmi(Register object) { 4871 void MacroAssembler::AssertSmi(Register object) {
4871 if (emit_debug_code()) { 4872 if (emit_debug_code()) {
4872 STATIC_ASSERT(kSmiTag == 0); 4873 STATIC_ASSERT(kSmiTag == 0);
4873 andi(at, object, kSmiTagMask); 4874 andi(at, object, kSmiTagMask);
4874 Check(eq, "Operand is a smi", at, Operand(zero_reg)); 4875 Check(eq, kOperandIsASmi, at, Operand(zero_reg));
4875 } 4876 }
4876 } 4877 }
4877 4878
4878 4879
4879 void MacroAssembler::AssertString(Register object) { 4880 void MacroAssembler::AssertString(Register object) {
4880 if (emit_debug_code()) { 4881 if (emit_debug_code()) {
4881 STATIC_ASSERT(kSmiTag == 0); 4882 STATIC_ASSERT(kSmiTag == 0);
4882 And(t0, object, Operand(kSmiTagMask)); 4883 And(t0, object, Operand(kSmiTagMask));
4883 Check(ne, "Operand is a smi and not a string", t0, Operand(zero_reg)); 4884 Check(ne, kOperandIsASmiAndNotAString, t0, Operand(zero_reg));
4884 push(object); 4885 push(object);
4885 lw(object, FieldMemOperand(object, HeapObject::kMapOffset)); 4886 lw(object, FieldMemOperand(object, HeapObject::kMapOffset));
4886 lbu(object, FieldMemOperand(object, Map::kInstanceTypeOffset)); 4887 lbu(object, FieldMemOperand(object, Map::kInstanceTypeOffset));
4887 Check(lo, "Operand is not a string", object, Operand(FIRST_NONSTRING_TYPE)); 4888 Check(lo, kOperandIsNotAString, object, Operand(FIRST_NONSTRING_TYPE));
4888 pop(object); 4889 pop(object);
4889 } 4890 }
4890 } 4891 }
4891 4892
4892 4893
4893 void MacroAssembler::AssertName(Register object) { 4894 void MacroAssembler::AssertName(Register object) {
4894 if (emit_debug_code()) { 4895 if (emit_debug_code()) {
4895 STATIC_ASSERT(kSmiTag == 0); 4896 STATIC_ASSERT(kSmiTag == 0);
4896 And(t0, object, Operand(kSmiTagMask)); 4897 And(t0, object, Operand(kSmiTagMask));
4897 Check(ne, "Operand is a smi and not a name", t0, Operand(zero_reg)); 4898 Check(ne, kOperandIsASmiAndNotAName, t0, Operand(zero_reg));
4898 push(object); 4899 push(object);
4899 lw(object, FieldMemOperand(object, HeapObject::kMapOffset)); 4900 lw(object, FieldMemOperand(object, HeapObject::kMapOffset));
4900 lbu(object, FieldMemOperand(object, Map::kInstanceTypeOffset)); 4901 lbu(object, FieldMemOperand(object, Map::kInstanceTypeOffset));
4901 Check(le, "Operand is not a name", object, Operand(LAST_NAME_TYPE)); 4902 Check(le, kOperandIsNotAName, object, Operand(LAST_NAME_TYPE));
4902 pop(object); 4903 pop(object);
4903 } 4904 }
4904 } 4905 }
4905 4906
4906 4907
4907 void MacroAssembler::AssertRootValue(Register src, 4908 void MacroAssembler::AssertRootValue(Register src,
4908 Heap::RootListIndex root_value_index, 4909 Heap::RootListIndex root_value_index,
4909 const char* message) { 4910 BailoutReason reason) {
4910 if (emit_debug_code()) { 4911 if (emit_debug_code()) {
4911 ASSERT(!src.is(at)); 4912 ASSERT(!src.is(at));
4912 LoadRoot(at, root_value_index); 4913 LoadRoot(at, root_value_index);
4913 Check(eq, message, src, Operand(at)); 4914 Check(eq, reason, src, Operand(at));
4914 } 4915 }
4915 } 4916 }
4916 4917
4917 4918
4918 void MacroAssembler::JumpIfNotHeapNumber(Register object, 4919 void MacroAssembler::JumpIfNotHeapNumber(Register object,
4919 Register heap_number_map, 4920 Register heap_number_map,
4920 Register scratch, 4921 Register scratch,
4921 Label* on_not_heap_number) { 4922 Label* on_not_heap_number) {
4922 lw(scratch, FieldMemOperand(object, HeapObject::kMapOffset)); 4923 lw(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
4923 AssertRegisterIsRoot(heap_number_map, Heap::kHeapNumberMapRootIndex); 4924 AssertRegisterIsRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
(...skipping 196 matching lines...) Expand 10 before | Expand all | Expand 10 after
5120 #undef BRANCH_ARGS_CHECK 5121 #undef BRANCH_ARGS_CHECK
5121 5122
5122 5123
5123 void MacroAssembler::PatchRelocatedValue(Register li_location, 5124 void MacroAssembler::PatchRelocatedValue(Register li_location,
5124 Register scratch, 5125 Register scratch,
5125 Register new_value) { 5126 Register new_value) {
5126 lw(scratch, MemOperand(li_location)); 5127 lw(scratch, MemOperand(li_location));
5127 // At this point scratch is a lui(at, ...) instruction. 5128 // At this point scratch is a lui(at, ...) instruction.
5128 if (emit_debug_code()) { 5129 if (emit_debug_code()) {
5129 And(scratch, scratch, kOpcodeMask); 5130 And(scratch, scratch, kOpcodeMask);
5130 Check(eq, "The instruction to patch should be a lui.", 5131 Check(eq, kTheInstructionToPatchShouldBeALui,
5131 scratch, Operand(LUI)); 5132 scratch, Operand(LUI));
5132 lw(scratch, MemOperand(li_location)); 5133 lw(scratch, MemOperand(li_location));
5133 } 5134 }
5134 srl(t9, new_value, kImm16Bits); 5135 srl(t9, new_value, kImm16Bits);
5135 Ins(scratch, t9, 0, kImm16Bits); 5136 Ins(scratch, t9, 0, kImm16Bits);
5136 sw(scratch, MemOperand(li_location)); 5137 sw(scratch, MemOperand(li_location));
5137 5138
5138 lw(scratch, MemOperand(li_location, kInstrSize)); 5139 lw(scratch, MemOperand(li_location, kInstrSize));
5139 // scratch is now ori(at, ...). 5140 // scratch is now ori(at, ...).
5140 if (emit_debug_code()) { 5141 if (emit_debug_code()) {
5141 And(scratch, scratch, kOpcodeMask); 5142 And(scratch, scratch, kOpcodeMask);
5142 Check(eq, "The instruction to patch should be an ori.", 5143 Check(eq, kTheInstructionToPatchShouldBeAnOri,
5143 scratch, Operand(ORI)); 5144 scratch, Operand(ORI));
5144 lw(scratch, MemOperand(li_location, kInstrSize)); 5145 lw(scratch, MemOperand(li_location, kInstrSize));
5145 } 5146 }
5146 Ins(scratch, new_value, 0, kImm16Bits); 5147 Ins(scratch, new_value, 0, kImm16Bits);
5147 sw(scratch, MemOperand(li_location, kInstrSize)); 5148 sw(scratch, MemOperand(li_location, kInstrSize));
5148 5149
5149 // Update the I-cache so the new lui and ori can be executed. 5150 // Update the I-cache so the new lui and ori can be executed.
5150 FlushICache(li_location, 2); 5151 FlushICache(li_location, 2);
5151 } 5152 }
5152 5153
5153 void MacroAssembler::GetRelocatedValue(Register li_location, 5154 void MacroAssembler::GetRelocatedValue(Register li_location,
5154 Register value, 5155 Register value,
5155 Register scratch) { 5156 Register scratch) {
5156 lw(value, MemOperand(li_location)); 5157 lw(value, MemOperand(li_location));
5157 if (emit_debug_code()) { 5158 if (emit_debug_code()) {
5158 And(value, value, kOpcodeMask); 5159 And(value, value, kOpcodeMask);
5159 Check(eq, "The instruction should be a lui.", 5160 Check(eq, kTheInstructionShouldBeALui,
5160 value, Operand(LUI)); 5161 value, Operand(LUI));
5161 lw(value, MemOperand(li_location)); 5162 lw(value, MemOperand(li_location));
5162 } 5163 }
5163 5164
5164 // value now holds a lui instruction. Extract the immediate. 5165 // value now holds a lui instruction. Extract the immediate.
5165 sll(value, value, kImm16Bits); 5166 sll(value, value, kImm16Bits);
5166 5167
5167 lw(scratch, MemOperand(li_location, kInstrSize)); 5168 lw(scratch, MemOperand(li_location, kInstrSize));
5168 if (emit_debug_code()) { 5169 if (emit_debug_code()) {
5169 And(scratch, scratch, kOpcodeMask); 5170 And(scratch, scratch, kOpcodeMask);
5170 Check(eq, "The instruction should be an ori.", 5171 Check(eq, kTheInstructionShouldBeAnOri,
5171 scratch, Operand(ORI)); 5172 scratch, Operand(ORI));
5172 lw(scratch, MemOperand(li_location, kInstrSize)); 5173 lw(scratch, MemOperand(li_location, kInstrSize));
5173 } 5174 }
5174 // "scratch" now holds an ori instruction. Extract the immediate. 5175 // "scratch" now holds an ori instruction. Extract the immediate.
5175 andi(scratch, scratch, kImm16Mask); 5176 andi(scratch, scratch, kImm16Mask);
5176 5177
5177 // Merge the results. 5178 // Merge the results.
5178 or_(value, value, scratch); 5179 or_(value, value, scratch);
5179 } 5180 }
5180 5181
(...skipping 387 matching lines...) Expand 10 before | Expand all | Expand 10 after
5568 opcode == BGTZL); 5569 opcode == BGTZL);
5569 opcode = (cond == eq) ? BEQ : BNE; 5570 opcode = (cond == eq) ? BEQ : BNE;
5570 instr = (instr & ~kOpcodeMask) | opcode; 5571 instr = (instr & ~kOpcodeMask) | opcode;
5571 masm_.emit(instr); 5572 masm_.emit(instr);
5572 } 5573 }
5573 5574
5574 5575
5575 } } // namespace v8::internal 5576 } } // namespace v8::internal
5576 5577
5577 #endif // V8_TARGET_ARCH_MIPS 5578 #endif // V8_TARGET_ARCH_MIPS
OLDNEW
« src/compiler.cc ('K') | « src/mips/macro-assembler-mips.h ('k') | src/objects.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698