Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(568)

Side by Side Diff: src/x64/assembler-x64.cc

Issue 11359127: Refactoring: Make predictable code flag handling architecture-independent. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Rebased Created 8 years, 1 month ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/x64/assembler-x64.h ('k') | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 332 matching lines...) Expand 10 before | Expand all | Expand 10 after
343 // Implementation of Assembler. 343 // Implementation of Assembler.
344 344
345 #ifdef GENERATED_CODE_COVERAGE 345 #ifdef GENERATED_CODE_COVERAGE
346 static void InitCoverageLog(); 346 static void InitCoverageLog();
347 #endif 347 #endif
348 348
349 Assembler::Assembler(Isolate* arg_isolate, void* buffer, int buffer_size) 349 Assembler::Assembler(Isolate* arg_isolate, void* buffer, int buffer_size)
350 : AssemblerBase(arg_isolate), 350 : AssemblerBase(arg_isolate),
351 code_targets_(100), 351 code_targets_(100),
352 positions_recorder_(this), 352 positions_recorder_(this),
353 emit_debug_code_(FLAG_debug_code), 353 emit_debug_code_(FLAG_debug_code) {
354 predictable_code_size_(false) {
355 if (buffer == NULL) { 354 if (buffer == NULL) {
356 // Do our own buffer management. 355 // Do our own buffer management.
357 if (buffer_size <= kMinimalBufferSize) { 356 if (buffer_size <= kMinimalBufferSize) {
358 buffer_size = kMinimalBufferSize; 357 buffer_size = kMinimalBufferSize;
359 358
360 if (isolate() != NULL && isolate()->assembler_spare_buffer() != NULL) { 359 if (isolate() != NULL && isolate()->assembler_spare_buffer() != NULL) {
361 buffer = isolate()->assembler_spare_buffer(); 360 buffer = isolate()->assembler_spare_buffer();
362 isolate()->set_assembler_spare_buffer(NULL); 361 isolate()->set_assembler_spare_buffer(NULL);
363 } 362 }
364 } 363 }
(...skipping 866 matching lines...) Expand 10 before | Expand all | Expand 10 after
1231 EnsureSpace ensure_space(this); 1230 EnsureSpace ensure_space(this);
1232 ASSERT(is_uint4(cc)); 1231 ASSERT(is_uint4(cc));
1233 if (L->is_bound()) { 1232 if (L->is_bound()) {
1234 const int short_size = 2; 1233 const int short_size = 2;
1235 const int long_size = 6; 1234 const int long_size = 6;
1236 int offs = L->pos() - pc_offset(); 1235 int offs = L->pos() - pc_offset();
1237 ASSERT(offs <= 0); 1236 ASSERT(offs <= 0);
1238 // Determine whether we can use 1-byte offsets for backwards branches, 1237 // Determine whether we can use 1-byte offsets for backwards branches,
1239 // which have a max range of 128 bytes. 1238 // which have a max range of 128 bytes.
1240 1239
1241 // We also need to check the predictable_code_size_ flag here, because 1240 // We also need to check predictable_code_size() flag here, because on x64,
1242 // on x64, when the full code generator recompiles code for debugging, some 1241 // when the full code generator recompiles code for debugging, some places
1243 // places need to be padded out to a certain size. The debugger is keeping 1242 // need to be padded out to a certain size. The debugger is keeping track of
1244 // track of how often it did this so that it can adjust return addresses on 1243 // how often it did this so that it can adjust return addresses on the
1245 // the stack, but if the size of jump instructions can also change, that's 1244 // stack, but if the size of jump instructions can also change, that's not
1246 // not enough and the calculated offsets would be incorrect. 1245 // enough and the calculated offsets would be incorrect.
1247 if (is_int8(offs - short_size) && !predictable_code_size_) { 1246 if (is_int8(offs - short_size) && !predictable_code_size()) {
1248 // 0111 tttn #8-bit disp. 1247 // 0111 tttn #8-bit disp.
1249 emit(0x70 | cc); 1248 emit(0x70 | cc);
1250 emit((offs - short_size) & 0xFF); 1249 emit((offs - short_size) & 0xFF);
1251 } else { 1250 } else {
1252 // 0000 1111 1000 tttn #32-bit disp. 1251 // 0000 1111 1000 tttn #32-bit disp.
1253 emit(0x0F); 1252 emit(0x0F);
1254 emit(0x80 | cc); 1253 emit(0x80 | cc);
1255 emitl(offs - long_size); 1254 emitl(offs - long_size);
1256 } 1255 }
1257 } else if (distance == Label::kNear) { 1256 } else if (distance == Label::kNear) {
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after
1294 } 1293 }
1295 1294
1296 1295
1297 void Assembler::jmp(Label* L, Label::Distance distance) { 1296 void Assembler::jmp(Label* L, Label::Distance distance) {
1298 EnsureSpace ensure_space(this); 1297 EnsureSpace ensure_space(this);
1299 const int short_size = sizeof(int8_t); 1298 const int short_size = sizeof(int8_t);
1300 const int long_size = sizeof(int32_t); 1299 const int long_size = sizeof(int32_t);
1301 if (L->is_bound()) { 1300 if (L->is_bound()) {
1302 int offs = L->pos() - pc_offset() - 1; 1301 int offs = L->pos() - pc_offset() - 1;
1303 ASSERT(offs <= 0); 1302 ASSERT(offs <= 0);
1304 if (is_int8(offs - short_size) && !predictable_code_size_) { 1303 if (is_int8(offs - short_size) && !predictable_code_size()) {
1305 // 1110 1011 #8-bit disp. 1304 // 1110 1011 #8-bit disp.
1306 emit(0xEB); 1305 emit(0xEB);
1307 emit((offs - short_size) & 0xFF); 1306 emit((offs - short_size) & 0xFF);
1308 } else { 1307 } else {
1309 // 1110 1001 #32-bit disp. 1308 // 1110 1001 #32-bit disp.
1310 emit(0xE9); 1309 emit(0xE9);
1311 emitl(offs - long_size); 1310 emitl(offs - long_size);
1312 } 1311 }
1313 } else if (distance == Label::kNear) { 1312 } else if (distance == Label::kNear) {
1314 emit(0xEB); 1313 emit(0xEB);
(...skipping 1739 matching lines...) Expand 10 before | Expand all | Expand 10 after
3054 bool RelocInfo::IsCodedSpecially() { 3053 bool RelocInfo::IsCodedSpecially() {
3055 // The deserializer needs to know whether a pointer is specially coded. Being 3054 // The deserializer needs to know whether a pointer is specially coded. Being
3056 // specially coded on x64 means that it is a relative 32 bit address, as used 3055 // specially coded on x64 means that it is a relative 32 bit address, as used
3057 // by branch instructions. 3056 // by branch instructions.
3058 return (1 << rmode_) & kApplyMask; 3057 return (1 << rmode_) & kApplyMask;
3059 } 3058 }
3060 3059
3061 } } // namespace v8::internal 3060 } } // namespace v8::internal
3062 3061
3063 #endif // V8_TARGET_ARCH_X64 3062 #endif // V8_TARGET_ARCH_X64
OLDNEW
« no previous file with comments | « src/x64/assembler-x64.h ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698