Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(208)

Side by Side Diff: test/cctest/test-alloc.cc

Issue 6685088: Merge isolates to bleeding_edge. (Closed) Base URL: http://v8.googlecode.com/svn/branches/bleeding_edge/
Patch Set: '' Created 9 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « test/cctest/test-accessors.cc ('k') | test/cctest/test-api.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2007-2008 the V8 project authors. All rights reserved. 1 // Copyright 2007-2008 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution. 11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its 12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived 13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission. 14 // from this software without specific prior written permission.
15 // 15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 27
28 #include "v8.h" 28 #include "v8.h"
29 #include "accessors.h" 29 #include "accessors.h"
30 #include "top.h"
31 30
32 #include "cctest.h" 31 #include "cctest.h"
33 32
34 33
35 using namespace v8::internal; 34 using namespace v8::internal;
36 35
37 36
38 static MaybeObject* AllocateAfterFailures() { 37 static MaybeObject* AllocateAfterFailures() {
39 static int attempts = 0; 38 static int attempts = 0;
40 if (++attempts < 3) return Failure::RetryAfterGC(); 39 if (++attempts < 3) return Failure::RetryAfterGC();
40 Heap* heap = Isolate::Current()->heap();
41 41
42 // New space. 42 // New space.
43 NewSpace* new_space = Heap::new_space(); 43 NewSpace* new_space = heap->new_space();
44 static const int kNewSpaceFillerSize = ByteArray::SizeFor(0); 44 static const int kNewSpaceFillerSize = ByteArray::SizeFor(0);
45 while (new_space->Available() > kNewSpaceFillerSize) { 45 while (new_space->Available() > kNewSpaceFillerSize) {
46 int available_before = static_cast<int>(new_space->Available()); 46 int available_before = static_cast<int>(new_space->Available());
47 CHECK(!Heap::AllocateByteArray(0)->IsFailure()); 47 CHECK(!heap->AllocateByteArray(0)->IsFailure());
48 if (available_before == new_space->Available()) { 48 if (available_before == new_space->Available()) {
49 // It seems that we are avoiding new space allocations when 49 // It seems that we are avoiding new space allocations when
50 // allocation is forced, so no need to fill up new space 50 // allocation is forced, so no need to fill up new space
51 // in order to make the test harder. 51 // in order to make the test harder.
52 break; 52 break;
53 } 53 }
54 } 54 }
55 CHECK(!Heap::AllocateByteArray(100)->IsFailure()); 55 CHECK(!heap->AllocateByteArray(100)->IsFailure());
56 CHECK(!Heap::AllocateFixedArray(100, NOT_TENURED)->IsFailure()); 56 CHECK(!heap->AllocateFixedArray(100, NOT_TENURED)->IsFailure());
57 57
58 // Make sure we can allocate through optimized allocation functions 58 // Make sure we can allocate through optimized allocation functions
59 // for specific kinds. 59 // for specific kinds.
60 CHECK(!Heap::AllocateFixedArray(100)->IsFailure()); 60 CHECK(!heap->AllocateFixedArray(100)->IsFailure());
61 CHECK(!Heap::AllocateHeapNumber(0.42)->IsFailure()); 61 CHECK(!heap->AllocateHeapNumber(0.42)->IsFailure());
62 CHECK(!Heap::AllocateArgumentsObject(Smi::FromInt(87), 10)->IsFailure()); 62 CHECK(!heap->AllocateArgumentsObject(Smi::FromInt(87), 10)->IsFailure());
63 Object* object = 63 Object* object = heap->AllocateJSObject(
64 Heap::AllocateJSObject(*Top::object_function())->ToObjectChecked(); 64 *Isolate::Current()->object_function())->ToObjectChecked();
65 CHECK(!Heap::CopyJSObject(JSObject::cast(object))->IsFailure()); 65 CHECK(!heap->CopyJSObject(JSObject::cast(object))->IsFailure());
66 66
67 // Old data space. 67 // Old data space.
68 OldSpace* old_data_space = Heap::old_data_space(); 68 OldSpace* old_data_space = heap->old_data_space();
69 static const int kOldDataSpaceFillerSize = ByteArray::SizeFor(0); 69 static const int kOldDataSpaceFillerSize = ByteArray::SizeFor(0);
70 while (old_data_space->Available() > kOldDataSpaceFillerSize) { 70 while (old_data_space->Available() > kOldDataSpaceFillerSize) {
71 CHECK(!Heap::AllocateByteArray(0, TENURED)->IsFailure()); 71 CHECK(!heap->AllocateByteArray(0, TENURED)->IsFailure());
72 } 72 }
73 CHECK(!Heap::AllocateRawAsciiString(100, TENURED)->IsFailure()); 73 CHECK(!heap->AllocateRawAsciiString(100, TENURED)->IsFailure());
74 74
75 // Large object space. 75 // Large object space.
76 while (!Heap::OldGenerationAllocationLimitReached()) { 76 while (!heap->OldGenerationAllocationLimitReached()) {
77 CHECK(!Heap::AllocateFixedArray(10000, TENURED)->IsFailure()); 77 CHECK(!heap->AllocateFixedArray(10000, TENURED)->IsFailure());
78 } 78 }
79 CHECK(!Heap::AllocateFixedArray(10000, TENURED)->IsFailure()); 79 CHECK(!heap->AllocateFixedArray(10000, TENURED)->IsFailure());
80 80
81 // Map space. 81 // Map space.
82 MapSpace* map_space = Heap::map_space(); 82 MapSpace* map_space = heap->map_space();
83 static const int kMapSpaceFillerSize = Map::kSize; 83 static const int kMapSpaceFillerSize = Map::kSize;
84 InstanceType instance_type = JS_OBJECT_TYPE; 84 InstanceType instance_type = JS_OBJECT_TYPE;
85 int instance_size = JSObject::kHeaderSize; 85 int instance_size = JSObject::kHeaderSize;
86 while (map_space->Available() > kMapSpaceFillerSize) { 86 while (map_space->Available() > kMapSpaceFillerSize) {
87 CHECK(!Heap::AllocateMap(instance_type, instance_size)->IsFailure()); 87 CHECK(!heap->AllocateMap(instance_type, instance_size)->IsFailure());
88 } 88 }
89 CHECK(!Heap::AllocateMap(instance_type, instance_size)->IsFailure()); 89 CHECK(!heap->AllocateMap(instance_type, instance_size)->IsFailure());
90 90
91 // Test that we can allocate in old pointer space and code space. 91 // Test that we can allocate in old pointer space and code space.
92 CHECK(!Heap::AllocateFixedArray(100, TENURED)->IsFailure()); 92 CHECK(!heap->AllocateFixedArray(100, TENURED)->IsFailure());
93 CHECK(!Heap::CopyCode(Builtins::builtin(Builtins::Illegal))->IsFailure()); 93 CHECK(!heap->CopyCode(Isolate::Current()->builtins()->builtin(
94 Builtins::Illegal))->IsFailure());
94 95
95 // Return success. 96 // Return success.
96 return Smi::FromInt(42); 97 return Smi::FromInt(42);
97 } 98 }
98 99
99 100
100 static Handle<Object> Test() { 101 static Handle<Object> Test() {
101 CALL_HEAP_FUNCTION(AllocateAfterFailures(), Object); 102 CALL_HEAP_FUNCTION(ISOLATE, AllocateAfterFailures(), Object);
102 } 103 }
103 104
104 105
105 TEST(StressHandles) { 106 TEST(StressHandles) {
106 v8::Persistent<v8::Context> env = v8::Context::New(); 107 v8::Persistent<v8::Context> env = v8::Context::New();
107 v8::HandleScope scope; 108 v8::HandleScope scope;
108 env->Enter(); 109 env->Enter();
109 Handle<Object> o = Test(); 110 Handle<Object> o = Test();
110 CHECK(o->IsSmi() && Smi::cast(*o)->value() == 42); 111 CHECK(o->IsSmi() && Smi::cast(*o)->value() == 42);
111 env->Exit(); 112 env->Exit();
(...skipping 10 matching lines...) Expand all
122 0, 123 0,
123 0 124 0
124 }; 125 };
125 126
126 127
127 TEST(StressJS) { 128 TEST(StressJS) {
128 v8::Persistent<v8::Context> env = v8::Context::New(); 129 v8::Persistent<v8::Context> env = v8::Context::New();
129 v8::HandleScope scope; 130 v8::HandleScope scope;
130 env->Enter(); 131 env->Enter();
131 Handle<JSFunction> function = 132 Handle<JSFunction> function =
132 Factory::NewFunction(Factory::function_symbol(), Factory::null_value()); 133 FACTORY->NewFunction(FACTORY->function_symbol(), FACTORY->null_value());
133 // Force the creation of an initial map and set the code to 134 // Force the creation of an initial map and set the code to
134 // something empty. 135 // something empty.
135 Factory::NewJSObject(function); 136 FACTORY->NewJSObject(function);
136 function->ReplaceCode(Builtins::builtin(Builtins::EmptyFunction)); 137 function->ReplaceCode(Isolate::Current()->builtins()->builtin(
138 Builtins::EmptyFunction));
137 // Patch the map to have an accessor for "get". 139 // Patch the map to have an accessor for "get".
138 Handle<Map> map(function->initial_map()); 140 Handle<Map> map(function->initial_map());
139 Handle<DescriptorArray> instance_descriptors(map->instance_descriptors()); 141 Handle<DescriptorArray> instance_descriptors(map->instance_descriptors());
140 Handle<Proxy> proxy = Factory::NewProxy(&kDescriptor); 142 Handle<Proxy> proxy = FACTORY->NewProxy(&kDescriptor);
141 instance_descriptors = Factory::CopyAppendProxyDescriptor( 143 instance_descriptors = FACTORY->CopyAppendProxyDescriptor(
142 instance_descriptors, 144 instance_descriptors,
143 Factory::NewStringFromAscii(Vector<const char>("get", 3)), 145 FACTORY->NewStringFromAscii(Vector<const char>("get", 3)),
144 proxy, 146 proxy,
145 static_cast<PropertyAttributes>(0)); 147 static_cast<PropertyAttributes>(0));
146 map->set_instance_descriptors(*instance_descriptors); 148 map->set_instance_descriptors(*instance_descriptors);
147 // Add the Foo constructor the global object. 149 // Add the Foo constructor the global object.
148 env->Global()->Set(v8::String::New("Foo"), v8::Utils::ToLocal(function)); 150 env->Global()->Set(v8::String::New("Foo"), v8::Utils::ToLocal(function));
149 // Call the accessor through JavaScript. 151 // Call the accessor through JavaScript.
150 v8::Handle<v8::Value> result = 152 v8::Handle<v8::Value> result =
151 v8::Script::Compile(v8::String::New("(new Foo).get"))->Run(); 153 v8::Script::Compile(v8::String::New("(new Foo).get"))->Run();
152 CHECK_EQ(42, result->Int32Value()); 154 CHECK_EQ(42, result->Int32Value());
153 env->Exit(); 155 env->Exit();
(...skipping 22 matching lines...) Expand all
176 Block(void* base_arg, int size_arg) 178 Block(void* base_arg, int size_arg)
177 : base(base_arg), size(size_arg) {} 179 : base(base_arg), size(size_arg) {}
178 180
179 void *base; 181 void *base;
180 int size; 182 int size;
181 }; 183 };
182 184
183 185
184 TEST(CodeRange) { 186 TEST(CodeRange) {
185 const int code_range_size = 16*MB; 187 const int code_range_size = 16*MB;
186 CodeRange::Setup(code_range_size); 188 OS::Setup();
189 Isolate::Current()->code_range()->Setup(code_range_size);
187 int current_allocated = 0; 190 int current_allocated = 0;
188 int total_allocated = 0; 191 int total_allocated = 0;
189 List<Block> blocks(1000); 192 List<Block> blocks(1000);
190 193
191 while (total_allocated < 5 * code_range_size) { 194 while (total_allocated < 5 * code_range_size) {
192 if (current_allocated < code_range_size / 10) { 195 if (current_allocated < code_range_size / 10) {
193 // Allocate a block. 196 // Allocate a block.
194 // Geometrically distributed sizes, greater than Page::kPageSize. 197 // Geometrically distributed sizes, greater than Page::kPageSize.
195 size_t requested = (Page::kPageSize << (Pseudorandom() % 6)) + 198 size_t requested = (Page::kPageSize << (Pseudorandom() % 6)) +
196 Pseudorandom() % 5000 + 1; 199 Pseudorandom() % 5000 + 1;
197 size_t allocated = 0; 200 size_t allocated = 0;
198 void* base = CodeRange::AllocateRawMemory(requested, &allocated); 201 void* base = Isolate::Current()->code_range()->
202 AllocateRawMemory(requested, &allocated);
199 blocks.Add(Block(base, static_cast<int>(allocated))); 203 blocks.Add(Block(base, static_cast<int>(allocated)));
200 current_allocated += static_cast<int>(allocated); 204 current_allocated += static_cast<int>(allocated);
201 total_allocated += static_cast<int>(allocated); 205 total_allocated += static_cast<int>(allocated);
202 } else { 206 } else {
203 // Free a block. 207 // Free a block.
204 int index = Pseudorandom() % blocks.length(); 208 int index = Pseudorandom() % blocks.length();
205 CodeRange::FreeRawMemory(blocks[index].base, blocks[index].size); 209 Isolate::Current()->code_range()->FreeRawMemory(
210 blocks[index].base, blocks[index].size);
206 current_allocated -= blocks[index].size; 211 current_allocated -= blocks[index].size;
207 if (index < blocks.length() - 1) { 212 if (index < blocks.length() - 1) {
208 blocks[index] = blocks.RemoveLast(); 213 blocks[index] = blocks.RemoveLast();
209 } else { 214 } else {
210 blocks.RemoveLast(); 215 blocks.RemoveLast();
211 } 216 }
212 } 217 }
213 } 218 }
214 219
215 CodeRange::TearDown(); 220 Isolate::Current()->code_range()->TearDown();
216 } 221 }
OLDNEW
« no previous file with comments | « test/cctest/test-accessors.cc ('k') | test/cctest/test-api.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698