My favorites | Sign in
v8
Project Home Downloads Wiki Issues Source Code Search
Checkout   Browse   Changes  
Changes to /trunk/src/arm/stub-cache-arm.cc
r12643 vs. r12661 Compare: vs.  Format:
Revision r12661
Go to: 
Project members, sign in to write a code review
/trunk/src/arm/stub-cache-arm.cc   r12643 /trunk/src/arm/stub-cache-arm.cc   r12661
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution. 11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its 12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived 13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission. 14 // from this software without specific prior written permission.
15 // 15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 27
28 #include "v8.h" 28 #include "v8.h"
29 29
30 #if defined(V8_TARGET_ARCH_ARM) 30 #if defined(V8_TARGET_ARCH_ARM)
31 31
32 #include "ic-inl.h" 32 #include "ic-inl.h"
33 #include "codegen.h" 33 #include "codegen.h"
34 #include "stub-cache.h" 34 #include "stub-cache.h"
35 35
36 namespace v8 { 36 namespace v8 {
37 namespace internal { 37 namespace internal {
38 38
39 #define __ ACCESS_MASM(masm) 39 #define __ ACCESS_MASM(masm)
40 40
41 41
42 static void ProbeTable(Isolate* isolate, 42 static void ProbeTable(Isolate* isolate,
43 MacroAssembler* masm, 43 MacroAssembler* masm,
44 Code::Flags flags, 44 Code::Flags flags,
45 StubCache::Table table, 45 StubCache::Table table,
46 Register receiver, 46 Register receiver,
47 Register name, 47 Register name,
48 // Number of the cache entry, not scaled. 48 // Number of the cache entry, not scaled.
49 Register offset, 49 Register offset,
50 Register scratch, 50 Register scratch,
51 Register scratch2, 51 Register scratch2,
52 Register offset_scratch) { 52 Register offset_scratch) {
53 ExternalReference key_offset(isolate->stub_cache()->key_reference(table)); 53 ExternalReference key_offset(isolate->stub_cache()->key_reference(table));
54 ExternalReference value_offset(isolate->stub_cache()->value_reference(table)); 54 ExternalReference value_offset(isolate->stub_cache()->value_reference(table));
55 ExternalReference map_offset(isolate->stub_cache()->map_reference(table)); 55 ExternalReference map_offset(isolate->stub_cache()->map_reference(table));
56 56
57 uint32_t key_off_addr = reinterpret_cast<uint32_t>(key_offset.address()); 57 uint32_t key_off_addr = reinterpret_cast<uint32_t>(key_offset.address());
58 uint32_t value_off_addr = reinterpret_cast<uint32_t>(value_offset.address()); 58 uint32_t value_off_addr = reinterpret_cast<uint32_t>(value_offset.address());
59 uint32_t map_off_addr = reinterpret_cast<uint32_t>(map_offset.address()); 59 uint32_t map_off_addr = reinterpret_cast<uint32_t>(map_offset.address());
60 60
61 // Check the relative positions of the address fields. 61 // Check the relative positions of the address fields.
62 ASSERT(value_off_addr > key_off_addr); 62 ASSERT(value_off_addr > key_off_addr);
63 ASSERT((value_off_addr - key_off_addr) % 4 == 0); 63 ASSERT((value_off_addr - key_off_addr) % 4 == 0);
64 ASSERT((value_off_addr - key_off_addr) < (256 * 4)); 64 ASSERT((value_off_addr - key_off_addr) < (256 * 4));
65 ASSERT(map_off_addr > key_off_addr); 65 ASSERT(map_off_addr > key_off_addr);
66 ASSERT((map_off_addr - key_off_addr) % 4 == 0); 66 ASSERT((map_off_addr - key_off_addr) % 4 == 0);
67 ASSERT((map_off_addr - key_off_addr) < (256 * 4)); 67 ASSERT((map_off_addr - key_off_addr) < (256 * 4));
68 68
69 Label miss; 69 Label miss;
70 Register base_addr = scratch; 70 Register base_addr = scratch;
71 scratch = no_reg; 71 scratch = no_reg;
72 72
73 // Multiply by 3 because there are 3 fields per entry (name, code, map). 73 // Multiply by 3 because there are 3 fields per entry (name, code, map).
74 __ add(offset_scratch, offset, Operand(offset, LSL, 1)); 74 __ add(offset_scratch, offset, Operand(offset, LSL, 1));
75 75
76 // Calculate the base address of the entry. 76 // Calculate the base address of the entry.
77 __ mov(base_addr, Operand(key_offset)); 77 __ mov(base_addr, Operand(key_offset));
78 __ add(base_addr, base_addr, Operand(offset_scratch, LSL, kPointerSizeLog2)); 78 __ add(base_addr, base_addr, Operand(offset_scratch, LSL, kPointerSizeLog2));
79 79
80 // Check that the key in the entry matches the name. 80 // Check that the key in the entry matches the name.
81 __ ldr(ip, MemOperand(base_addr, 0)); 81 __ ldr(ip, MemOperand(base_addr, 0));
82 __ cmp(name, ip); 82 __ cmp(name, ip);
83 __ b(ne, &miss); 83 __ b(ne, &miss);
84 84
85 // Check the map matches. 85 // Check the map matches.
86 __ ldr(ip, MemOperand(base_addr, map_off_addr - key_off_addr)); 86 __ ldr(ip, MemOperand(base_addr, map_off_addr - key_off_addr));
87 __ ldr(scratch2, FieldMemOperand(receiver, HeapObject::kMapOffset)); 87 __ ldr(scratch2, FieldMemOperand(receiver, HeapObject::kMapOffset));
88 __ cmp(ip, scratch2); 88 __ cmp(ip, scratch2);
89 __ b(ne, &miss); 89 __ b(ne, &miss);
90 90
91 // Get the code entry from the cache. 91 // Get the code entry from the cache.
92 Register code = scratch2; 92 Register code = scratch2;
93 scratch2 = no_reg; 93 scratch2 = no_reg;
94 __ ldr(code, MemOperand(base_addr, value_off_addr - key_off_addr)); 94 __ ldr(code, MemOperand(base_addr, value_off_addr - key_off_addr));
95 95
96 // Check that the flags match what we're looking for. 96 // Check that the flags match what we're looking for.
97 Register flags_reg = base_addr; 97 Register flags_reg = base_addr;
98 base_addr = no_reg; 98 base_addr = no_reg;
99 __ ldr(flags_reg, FieldMemOperand(code, Code::kFlagsOffset)); 99 __ ldr(flags_reg, FieldMemOperand(code, Code::kFlagsOffset));
100 // It's a nice optimization if this constant is encodable in the bic insn. 100 // It's a nice optimization if this constant is encodable in the bic insn.
101 101
102 uint32_t mask = Code::kFlagsNotUsedInLookup; 102 uint32_t mask = Code::kFlagsNotUsedInLookup;
103 ASSERT(__ ImmediateFitsAddrMode1Instruction(mask)); 103 ASSERT(__ ImmediateFitsAddrMode1Instruction(mask));
104 __ bic(flags_reg, flags_reg, Operand(mask)); 104 __ bic(flags_reg, flags_reg, Operand(mask));
105 // Using cmn and the negative instead of cmp means we can use movw. 105 // Using cmn and the negative instead of cmp means we can use movw.
106 if (flags < 0) { 106 if (flags < 0) {
107 __ cmn(flags_reg, Operand(-flags)); 107 __ cmn(flags_reg, Operand(-flags));
108 } else { 108 } else {
109 __ cmp(flags_reg, Operand(flags)); 109 __ cmp(flags_reg, Operand(flags));
110 } 110 }
111 __ b(ne, &miss); 111 __ b(ne, &miss);
112 112
113 #ifdef DEBUG 113 #ifdef DEBUG
114 if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) { 114 if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) {
115 __ jmp(&miss); 115 __ jmp(&miss);
116 } else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) { 116 } else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) {
117 __ jmp(&miss); 117 __ jmp(&miss);
118 } 118 }
119 #endif 119 #endif
120 120
121 // Jump to the first instruction in the code stub. 121 // Jump to the first instruction in the code stub.
122 __ add(pc, code, Operand(Code::kHeaderSize - kHeapObjectTag)); 122 __ add(pc, code, Operand(Code::kHeaderSize - kHeapObjectTag));
123 123
124 // Miss: fall through. 124 // Miss: fall through.
125 __ bind(&miss); 125 __ bind(&miss);
126 } 126 }
127 127
128 128
129 // Helper function used to check that the dictionary doesn't contain 129 // Helper function used to check that the dictionary doesn't contain
130 // the property. This function may return false negatives, so miss_label 130 // the property. This function may return false negatives, so miss_label
131 // must always call a backup property check that is complete. 131 // must always call a backup property check that is complete.
132 // This function is safe to call if the receiver has fast properties. 132 // This function is safe to call if the receiver has fast properties.
133 // Name must be a symbol and receiver must be a heap object. 133 // Name must be a symbol and receiver must be a heap object.
134 static void GenerateDictionaryNegativeLookup(MacroAssembler* masm, 134 static void GenerateDictionaryNegativeLookup(MacroAssembler* masm,
135 Label* miss_label, 135 Label* miss_label,
136 Register receiver, 136 Register receiver,
137 Handle<String> name, 137 Handle<String> name,
138 Register scratch0, 138 Register scratch0,
139 Register scratch1) { 139 Register scratch1) {
140 ASSERT(name->IsSymbol()); 140 ASSERT(name->IsSymbol());
141 Counters* counters = masm->isolate()->counters(); 141 Counters* counters = masm->isolate()->counters();
142 __ IncrementCounter(counters->negative_lookups(), 1, scratch0, scratch1); 142 __ IncrementCounter(counters->negative_lookups(), 1, scratch0, scratch1);
143 __ IncrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1); 143 __ IncrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
144 144
145 Label done; 145 Label done;
146 146
147 const int kInterceptorOrAccessCheckNeededMask = 147 const int kInterceptorOrAccessCheckNeededMask =
148 (1 << Map::kHasNamedInterceptor) | (1 << Map::kIsAccessCheckNeeded); 148 (1 << Map::kHasNamedInterceptor) | (1 << Map::kIsAccessCheckNeeded);
149 149
150 // Bail out if the receiver has a named interceptor or requires access checks. 150 // Bail out if the receiver has a named interceptor or requires access checks.
151 Register map = scratch1; 151 Register map = scratch1;
152 __ ldr(map, FieldMemOperand(receiver, HeapObject::kMapOffset)); 152 __ ldr(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
153 __ ldrb(scratch0, FieldMemOperand(map, Map::kBitFieldOffset)); 153 __ ldrb(scratch0, FieldMemOperand(map, Map::kBitFieldOffset));
154 __ tst(scratch0, Operand(kInterceptorOrAccessCheckNeededMask)); 154 __ tst(scratch0, Operand(kInterceptorOrAccessCheckNeededMask));
155 __ b(ne, miss_label); 155 __ b(ne, miss_label);
156 156
157 // Check that receiver is a JSObject. 157 // Check that receiver is a JSObject.
158 __ ldrb(scratch0, FieldMemOperand(map, Map::kInstanceTypeOffset)); 158 __ ldrb(scratch0, FieldMemOperand(map, Map::kInstanceTypeOffset));
159 __ cmp(scratch0, Operand(FIRST_SPEC_OBJECT_TYPE)); 159 __ cmp(scratch0, Operand(FIRST_SPEC_OBJECT_TYPE));
160 __ b(lt, miss_label); 160 __ b(lt, miss_label);
161 161
162 // Load properties array. 162 // Load properties array.
163 Register properties = scratch0; 163 Register properties = scratch0;
164 __ ldr(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset)); 164 __ ldr(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
165 // Check that the properties array is a dictionary. 165 // Check that the properties array is a dictionary.
166 __ ldr(map, FieldMemOperand(properties, HeapObject::kMapOffset)); 166 __ ldr(map, FieldMemOperand(properties, HeapObject::kMapOffset));
167 Register tmp = properties; 167 Register tmp = properties;
168 __ LoadRoot(tmp, Heap::kHashTableMapRootIndex); 168 __ LoadRoot(tmp, Heap::kHashTableMapRootIndex);
169 __ cmp(map, tmp); 169 __ cmp(map, tmp);
170 __ b(ne, miss_label); 170 __ b(ne, miss_label);
171 171
172 // Restore the temporarily used register. 172 // Restore the temporarily used register.
173 __ ldr(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset)); 173 __ ldr(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
174 174
175 175
176 StringDictionaryLookupStub::GenerateNegativeLookup(masm, 176 StringDictionaryLookupStub::GenerateNegativeLookup(masm,
177 miss_label, 177 miss_label,
178 &done, 178 &done,
179 receiver, 179 receiver,
180 properties, 180 properties,
181 name, 181 name,
182 scratch1); 182 scratch1);
183 __ bind(&done); 183 __ bind(&done);
184 __ DecrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1); 184 __ DecrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
185 } 185 }
186 186
187 187
188 void StubCache::GenerateProbe(MacroAssembler* masm, 188 void StubCache::GenerateProbe(MacroAssembler* masm,
189 Code::Flags flags, 189 Code::Flags flags,
190 Register receiver, 190 Register receiver,
191 Register name, 191 Register name,
192 Register scratch, 192 Register scratch,
193 Register extra, 193 Register extra,
194 Register extra2, 194 Register extra2,
195 Register extra3) { 195 Register extra3) {
196 Isolate* isolate = masm->isolate(); 196 Isolate* isolate = masm->isolate();
197 Label miss; 197 Label miss;
198 198
199 // Make sure that code is valid. The multiplying code relies on the 199 // Make sure that code is valid. The multiplying code relies on the
200 // entry size being 12. 200 // entry size being 12.
201 ASSERT(sizeof(Entry) == 12); 201 ASSERT(sizeof(Entry) == 12);
202 202
203 // Make sure the flags does not name a specific type. 203 // Make sure the flags does not name a specific type.
204 ASSERT(Code::ExtractTypeFromFlags(flags) == 0); 204 ASSERT(Code::ExtractTypeFromFlags(flags) == 0);
205 205
206 // Make sure that there are no register conflicts. 206 // Make sure that there are no register conflicts.
207 ASSERT(!scratch.is(receiver)); 207 ASSERT(!scratch.is(receiver));
208 ASSERT(!scratch.is(name)); 208 ASSERT(!scratch.is(name));
209 ASSERT(!extra.is(receiver)); 209 ASSERT(!extra.is(receiver));
210 ASSERT(!extra.is(name)); 210 ASSERT(!extra.is(name));
211 ASSERT(!extra.is(scratch)); 211 ASSERT(!extra.is(scratch));
212 ASSERT(!extra2.is(receiver)); 212 ASSERT(!extra2.is(receiver));
213 ASSERT(!extra2.is(name)); 213 ASSERT(!extra2.is(name));
214 ASSERT(!extra2.is(scratch)); 214 ASSERT(!extra2.is(scratch));
215 ASSERT(!extra2.is(extra)); 215 ASSERT(!extra2.is(extra));
216 216
217 // Check scratch, extra and extra2 registers are valid. 217 // Check scratch, extra and extra2 registers are valid.
218 ASSERT(!scratch.is(no_reg)); 218 ASSERT(!scratch.is(no_reg));
219 ASSERT(!extra.is(no_reg)); 219 ASSERT(!extra.is(no_reg));
220 ASSERT(!extra2.is(no_reg)); 220 ASSERT(!extra2.is(no_reg));
221 ASSERT(!extra3.is(no_reg)); 221 ASSERT(!extra3.is(no_reg));
222 222
223 Counters* counters = masm->isolate()->counters(); 223 Counters* counters = masm->isolate()->counters();
224 __ IncrementCounter(counters->megamorphic_stub_cache_probes(), 1, 224 __ IncrementCounter(counters->megamorphic_stub_cache_probes(), 1,
225 extra2, extra3); 225 extra2, extra3);
226 226
227 // Check that the receiver isn't a smi. 227 // Check that the receiver isn't a smi.
228 __ JumpIfSmi(receiver, &miss); 228 __ JumpIfSmi(receiver, &miss);
229 229
230 // Get the map of the receiver and compute the hash. 230 // Get the map of the receiver and compute the hash.
231 __ ldr(scratch, FieldMemOperand(name, String::kHashFieldOffset)); 231 __ ldr(scratch, FieldMemOperand(name, String::kHashFieldOffset));
232 __ ldr(ip, FieldMemOperand(receiver, HeapObject::kMapOffset)); 232 __ ldr(ip, FieldMemOperand(receiver, HeapObject::kMapOffset));
233 __ add(scratch, scratch, Operand(ip)); 233 __ add(scratch, scratch, Operand(ip));
234 uint32_t mask = kPrimaryTableSize - 1; 234 uint32_t mask = kPrimaryTableSize - 1;
235 // We shift out the last two bits because they are not part of the hash and 235 // We shift out the last two bits because they are not part of the hash and
236 // they are always 01 for maps. 236 // they are always 01 for maps.
237 __ mov(scratch, Operand(scratch, LSR, kHeapObjectTagSize)); 237 __ mov(scratch, Operand(scratch, LSR, kHeapObjectTagSize));
238 // Mask down the eor argument to the minimum to keep the immediate 238 // Mask down the eor argument to the minimum to keep the immediate
239 // ARM-encodable. 239 // ARM-encodable.
240 __ eor(scratch, scratch, Operand((flags >> kHeapObjectTagSize) & mask)); 240 __ eor(scratch, scratch, Operand((flags >> kHeapObjectTagSize) & mask));
241 // Prefer and_ to ubfx here because ubfx takes 2 cycles. 241 // Prefer and_ to ubfx here because ubfx takes 2 cycles.
242 __ and_(scratch, scratch, Operand(mask)); 242 __ and_(scratch, scratch, Operand(mask));
243 243
244 // Probe the primary table. 244 // Probe the primary table.
245 ProbeTable(isolate, 245 ProbeTable(isolate,
246 masm, 246 masm,
247 flags, 247 flags,
248 kPrimary, 248 kPrimary,
249 receiver, 249 receiver,
250 name, 250 name,
251 scratch, 251 scratch,
252 extra, 252 extra,
253 extra2, 253 extra2,
254 extra3); 254 extra3);
255 255
256 // Primary miss: Compute hash for secondary probe. 256 // Primary miss: Compute hash for secondary probe.
257 __ sub(scratch, scratch, Operand(name, LSR, kHeapObjectTagSize)); 257 __ sub(scratch, scratch, Operand(name, LSR, kHeapObjectTagSize));
258 uint32_t mask2 = kSecondaryTableSize - 1; 258 uint32_t mask2 = kSecondaryTableSize - 1;
259 __ add(scratch, scratch, Operand((flags >> kHeapObjectTagSize) & mask2)); 259 __ add(scratch, scratch, Operand((flags >> kHeapObjectTagSize) & mask2));
260 __ and_(scratch, scratch, Operand(mask2)); 260 __ and_(scratch, scratch, Operand(mask2));
261 261
262 // Probe the secondary table. 262 // Probe the secondary table.
263 ProbeTable(isolate, 263 ProbeTable(isolate,
264 masm, 264 masm,
265 flags, 265 flags,
266 kSecondary, 266 kSecondary,
267 receiver, 267 receiver,
268 name, 268 name,
269 scratch, 269 scratch,
270 extra, 270 extra,
271 extra2, 271 extra2,
272 extra3); 272 extra3);
273 273
274 // Cache miss: Fall-through and let caller handle the miss by 274 // Cache miss: Fall-through and let caller handle the miss by
275 // entering the runtime system. 275 // entering the runtime system.
276 __ bind(&miss); 276 __ bind(&miss);
277 __ IncrementCounter(counters->megamorphic_stub_cache_misses(), 1, 277 __ IncrementCounter(counters->megamorphic_stub_cache_misses(), 1,
278 extra2, extra3); 278 extra2, extra3);
279 } 279 }
280 280
281 281
282 void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm, 282 void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm,
283 int index, 283 int index,
284 Register prototype) { 284 Register prototype) {
285 // Load the global or builtins object from the current context. 285 // Load the global or builtins object from the current context.
286 __ ldr(prototype, 286 __ ldr(prototype,
287 MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); 287 MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
288 // Load the native context from the global or builtins object. 288 // Load the native context from the global or builtins object.
289 __ ldr(prototype, 289 __ ldr(prototype,
290 FieldMemOperand(prototype, GlobalObject::kNativeContextOffset)); 290 FieldMemOperand(prototype, GlobalObject::kNativeContextOffset));
291 // Load the function from the native context. 291 // Load the function from the native context.
292 __ ldr(prototype, MemOperand(prototype, Context::SlotOffset(index))); 292 __ ldr(prototype, MemOperand(prototype, Context::SlotOffset(index)));
293 // Load the initial map. The global functions all have initial maps. 293 // Load the initial map. The global functions all have initial maps.
294 __ ldr(prototype, 294 __ ldr(prototype,
295 FieldMemOperand(prototype, JSFunction::kPrototypeOrInitialMapOffset)); 295 FieldMemOperand(prototype, JSFunction::kPrototypeOrInitialMapOffset));
296 // Load the prototype from the initial map. 296 // Load the prototype from the initial map.
297 __ ldr(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset)); 297 __ ldr(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset));
298 } 298 }
299 299
300 300
301 void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype( 301 void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype(
302 MacroAssembler* masm, 302 MacroAssembler* masm,
303 int index, 303 int index,
304 Register prototype, 304 Register prototype,
305 Label* miss) { 305 Label* miss) {
306 Isolate* isolate = masm->isolate(); 306 Isolate* isolate = masm->isolate();
307 // Check we're still in the same context. 307 // Check we're still in the same context.
308 __ ldr(prototype, 308 __ ldr(prototype,
309 MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); 309 MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
310 __ Move(ip, isolate->global_object()); 310 __ Move(ip, isolate->global_object());
311 __ cmp(prototype, ip); 311 __ cmp(prototype, ip);
312 __ b(ne, miss); 312 __ b(ne, miss);
313 // Get the global function with the given index. 313 // Get the global function with the given index.
314 Handle<JSFunction> function( 314 Handle<JSFunction> function(
315 JSFunction::cast(isolate->native_context()->get(index))); 315 JSFunction::cast(isolate->native_context()->get(index)));
316 // Load its initial map. The global functions all have initial maps. 316 // Load its initial map. The global functions all have initial maps.
317 __ Move(prototype, Handle<Map>(function->initial_map())); 317 __ Move(prototype, Handle<Map>(function->initial_map()));
318 // Load the prototype from the initial map. 318 // Load the prototype from the initial map.
319 __ ldr(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset)); 319 __ ldr(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset));
320 } 320 }
321 321
322 322
323 // Load a fast property out of a holder object (src). In-object properties 323 // Load a fast property out of a holder object (src). In-object properties
324 // are loaded directly otherwise the property is loaded from the properties 324 // are loaded directly otherwise the property is loaded from the properties
325 // fixed array. 325 // fixed array.
326 void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm, 326 void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm,
327 Register dst, 327 Register dst,
328 Register src, 328 Register src,
329 Handle<JSObject> holder, 329 Handle<JSObject> holder,
330 int index) { 330 int index) {
331 // Adjust for the number of properties stored in the holder. 331 // Adjust for the number of properties stored in the holder.
332 index -= holder->map()->inobject_properties(); 332 index -= holder->map()->inobject_properties();
333 if (index < 0) { 333 if (index < 0) {
334 // Get the property straight out of the holder. 334 // Get the property straight out of the holder.
335 int offset = holder->map()->instance_size() + (index * kPointerSize); 335 int offset = holder->map()->instance_size() + (index * kPointerSize);
336 __ ldr(dst, FieldMemOperand(src, offset)); 336 __ ldr(dst, FieldMemOperand(src, offset));
337 } else { 337 } else {
338 // Calculate the offset into the properties array. 338 // Calculate the offset into the properties array.
339 int offset = index * kPointerSize + FixedArray::kHeaderSize; 339 int offset = index * kPointerSize + FixedArray::kHeaderSize;
340 __ ldr(dst, FieldMemOperand(src, JSObject::kPropertiesOffset)); 340 __ ldr(dst, FieldMemOperand(src, JSObject::kPropertiesOffset));
341 __ ldr(dst, FieldMemOperand(dst, offset)); 341 __ ldr(dst, FieldMemOperand(dst, offset));
342 } 342 }
343 } 343 }
344 344
345 345
346 void StubCompiler::GenerateLoadArrayLength(MacroAssembler* masm, 346 void StubCompiler::GenerateLoadArrayLength(MacroAssembler* masm,
347 Register receiver, 347 Register receiver,
348 Register scratch, 348 Register scratch,
349 Label* miss_label) { 349 Label* miss_label) {
350 // Check that the receiver isn't a smi. 350 // Check that the receiver isn't a smi.
351 __ JumpIfSmi(receiver, miss_label); 351 __ JumpIfSmi(receiver, miss_label);
352 352
353 // Check that the object is a JS array. 353 // Check that the object is a JS array.
354 __ CompareObjectType(receiver, scratch, scratch, JS_ARRAY_TYPE); 354 __ CompareObjectType(receiver, scratch, scratch, JS_ARRAY_TYPE);
355 __ b(ne, miss_label); 355 __ b(ne, miss_label);
356 356
357 // Load length directly from the JS array. 357 // Load length directly from the JS array.
358 __ ldr(r0, FieldMemOperand(receiver, JSArray::kLengthOffset)); 358 __ ldr(r0, FieldMemOperand(receiver, JSArray::kLengthOffset));
359 __ Ret(); 359 __ Ret();
360 } 360 }
361 361
362 362
363 // Generate code to check if an object is a string. If the object is a 363 // Generate code to check if an object is a string. If the object is a
364 // heap object, its map's instance type is left in the scratch1 register. 364 // heap object, its map's instance type is left in the scratch1 register.
365 // If this is not needed, scratch1 and scratch2 may be the same register. 365 // If this is not needed, scratch1 and scratch2 may be the same register.
366 static void GenerateStringCheck(MacroAssembler* masm, 366 static void GenerateStringCheck(MacroAssembler* masm,
367 Register receiver, 367 Register receiver,
368 Register scratch1, 368 Register scratch1,
369 Register scratch2, 369 Register scratch2,
370 Label* smi, 370 Label* smi,
371 Label* non_string_object) { 371 Label* non_string_object) {
372 // Check that the receiver isn't a smi. 372 // Check that the receiver isn't a smi.
373 __ JumpIfSmi(receiver, smi); 373 __ JumpIfSmi(receiver, smi);
374 374
375 // Check that the object is a string. 375 // Check that the object is a string.
376 __ ldr(scratch1, FieldMemOperand(receiver, HeapObject::kMapOffset)); 376 __ ldr(scratch1, FieldMemOperand(receiver, HeapObject::kMapOffset));
377 __ ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset)); 377 __ ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
378 __ and_(scratch2, scratch1, Operand(kIsNotStringMask)); 378 __ and_(scratch2, scratch1, Operand(kIsNotStringMask));
379 // The cast is to resolve the overload for the argument of 0x0. 379 // The cast is to resolve the overload for the argument of 0x0.
380 __ cmp(scratch2, Operand(static_cast<int32_t>(kStringTag))); 380 __ cmp(scratch2, Operand(static_cast<int32_t>(kStringTag)));
381 __ b(ne, non_string_object); 381 __ b(ne, non_string_object);
382 } 382 }
383 383
384 384
385 // Generate code to load the length from a string object and return the length. 385 // Generate code to load the length from a string object and return the length.
386 // If the receiver object is not a string or a wrapped string object the 386 // If the receiver object is not a string or a wrapped string object the
387 // execution continues at the miss label. The register containing the 387 // execution continues at the miss label. The register containing the
388 // receiver is potentially clobbered. 388 // receiver is potentially clobbered.
389 void StubCompiler::GenerateLoadStringLength(MacroAssembler* masm, 389 void StubCompiler::GenerateLoadStringLength(MacroAssembler* masm,
390 Register receiver, 390 Register receiver,
391 Register scratch1, 391 Register scratch1,
392 Register scratch2, 392 Register scratch2,
393 Label* miss, 393 Label* miss,
394 bool support_wrappers) { 394 bool support_wrappers) {
395 Label check_wrapper; 395 Label check_wrapper;
396 396
397 // Check if the object is a string leaving the instance type in the 397 // Check if the object is a string leaving the instance type in the
398 // scratch1 register. 398 // scratch1 register.
399 GenerateStringCheck(masm, receiver, scratch1, scratch2, miss, 399 GenerateStringCheck(masm, receiver, scratch1, scratch2, miss,
400 support_wrappers ? &check_wrapper : miss); 400 support_wrappers ? &check_wrapper : miss);
401 401
402 // Load length directly from the string. 402 // Load length directly from the string.
403 __ ldr(r0, FieldMemOperand(receiver, String::kLengthOffset)); 403 __ ldr(r0, FieldMemOperand(receiver, String::kLengthOffset));
404 __ Ret(); 404 __ Ret();
405 405
406 if (support_wrappers) { 406 if (support_wrappers) {
407 // Check if the object is a JSValue wrapper. 407 // Check if the object is a JSValue wrapper.
408 __ bind(&check_wrapper); 408 __ bind(&check_wrapper);
409 __ cmp(scratch1, Operand(JS_VALUE_TYPE)); 409 __ cmp(scratch1, Operand(JS_VALUE_TYPE));
410 __ b(ne, miss); 410 __ b(ne, miss);
411 411
412 // Unwrap the value and check if the wrapped value is a string. 412 // Unwrap the value and check if the wrapped value is a string.
413 __ ldr(scratch1, FieldMemOperand(receiver, JSValue::kValueOffset)); 413 __ ldr(scratch1, FieldMemOperand(receiver, JSValue::kValueOffset));
414 GenerateStringCheck(masm, scratch1, scratch2, scratch2, miss, miss); 414 GenerateStringCheck(masm, scratch1, scratch2, scratch2, miss, miss);
415 __ ldr(r0, FieldMemOperand(scratch1, String::kLengthOffset)); 415 __ ldr(r0, FieldMemOperand(scratch1, String::kLengthOffset));
416 __ Ret(); 416 __ Ret();
417 } 417 }
418 } 418 }
419 419
420 420
421 void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm, 421 void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm,
422 Register receiver, 422 Register receiver,
423 Register scratch1, 423 Register scratch1,
424 Register scratch2, 424 Register scratch2,
425 Label* miss_label) { 425 Label* miss_label) {
426 __ TryGetFunctionPrototype(receiver, scratch1, scratch2, miss_label); 426 __ TryGetFunctionPrototype(receiver, scratch1, scratch2, miss_label);
427 __ mov(r0, scratch1); 427 __ mov(r0, scratch1);
428 __ Ret(); 428 __ Ret();
429 } 429 }
430 430
431 431
432 // Generate StoreField code, value is passed in r0 register. 432 // Generate StoreField code, value is passed in r0 register.
433 // When leaving generated code after success, the receiver_reg and name_reg 433 // When leaving generated code after success, the receiver_reg and name_reg
434 // may be clobbered. Upon branch to miss_label, the receiver and name 434 // may be clobbered. Upon branch to miss_label, the receiver and name
435 // registers have their original values. 435 // registers have their original values.
436 void StubCompiler::GenerateStoreField(MacroAssembler* masm, 436 void StubCompiler::GenerateStoreField(MacroAssembler* masm,
437 Handle<JSObject> object, 437 Handle<JSObject> object,
438 int index, 438 int index,
439 Handle<Map> transition, 439 Handle<Map> transition,
440 Handle<String> name, 440 Handle<String> name,
441 Register receiver_reg, 441 Register receiver_reg,
442 Register name_reg, 442 Register name_reg,
443 Register scratch1, 443 Register scratch1,
444 Register scratch2, 444 Register scratch2,
445 Label* miss_label) { 445 Label* miss_label) {
446 // r0 : value 446 // r0 : value
447 Label exit; 447 Label exit;
448 448
449 LookupResult lookup(masm->isolate()); 449 LookupResult lookup(masm->isolate());
450 object->Lookup(*name, &lookup); 450 object->Lookup(*name, &lookup);
451 if (lookup.IsFound() && (lookup.IsReadOnly() || !lookup.IsCacheable())) { 451 if (lookup.IsFound() && (lookup.IsReadOnly() || !lookup.IsCacheable())) {
452 // In sloppy mode, we could just return the value and be done. However, we 452 // In sloppy mode, we could just return the value and be done. However, we
453 // might be in strict mode, where we have to throw. Since we cannot tell, 453 // might be in strict mode, where we have to throw. Since we cannot tell,
454 // go into slow case unconditionally. 454 // go into slow case unconditionally.
455 __ jmp(miss_label); 455 __ jmp(miss_label);
456 return; 456 return;
457 } 457 }
458 458
459 // Check that the map of the object hasn't changed. 459 // Check that the map of the object hasn't changed.
460 CompareMapMode mode = transition.is_null() ? ALLOW_ELEMENT_TRANSITION_MAPS 460 CompareMapMode mode = transition.is_null() ? ALLOW_ELEMENT_TRANSITION_MAPS
461 : REQUIRE_EXACT_MAP; 461 : REQUIRE_EXACT_MAP;
462 __ CheckMap(receiver_reg, scratch1, Handle<Map>(object->map()), miss_label, 462 __ CheckMap(receiver_reg, scratch1, Handle<Map>(object->map()), miss_label,
463 DO_SMI_CHECK, mode); 463 DO_SMI_CHECK, mode);
464 464
465 // Perform global security token check if needed. 465 // Perform global security token check if needed.
466 if (object->IsJSGlobalProxy()) { 466 if (object->IsJSGlobalProxy()) {
467 __ CheckAccessGlobalProxy(receiver_reg, scratch1, miss_label); 467 __ CheckAccessGlobalProxy(receiver_reg, scratch1, miss_label);
468 } 468 }
469 469
470 // Check that we are allowed to write this. 470 // Check that we are allowed to write this.
471 if (!transition.is_null() && object->GetPrototype()->IsJSObject()) { 471 if (!transition.is_null() && object->GetPrototype()->IsJSObject()) {
472 JSObject* holder; 472 JSObject* holder;
473 if (lookup.IsFound()) { 473 if (lookup.IsFound()) {
474 holder = lookup.holder(); 474 holder = lookup.holder();
475 } else { 475 } else {
476 // Find the top object. 476 // Find the top object.
477 holder = *object; 477 holder = *object;
478 do { 478 do {
479 holder = JSObject::cast(holder->GetPrototype()); 479 holder = JSObject::cast(holder->GetPrototype());
480 } while (holder->GetPrototype()->IsJSObject()); 480 } while (holder->GetPrototype()->IsJSObject());
481 } 481 }
482 // We need an extra register, push 482 // We need an extra register, push
483 __ push(name_reg); 483 __ push(name_reg);
484 Label miss_pop, done_check; 484 Label miss_pop, done_check;
485 CheckPrototypes(object, receiver_reg, Handle<JSObject>(holder), name_reg, 485 CheckPrototypes(object, receiver_reg, Handle<JSObject>(holder), name_reg,
486 scratch1, scratch2, name, &miss_pop); 486 scratch1, scratch2, name, &miss_pop);
487 __ jmp(&done_check); 487 __ jmp(&done_check);
488 __ bind(&miss_pop); 488 __ bind(&miss_pop);
489 __ pop(name_reg); 489 __ pop(name_reg);
490 __ jmp(miss_label); 490 __ jmp(miss_label);
491 __ bind(&done_check); 491 __ bind(&done_check);
492 __ pop(name_reg); 492 __ pop(name_reg);
493 } 493 }
494 494
495 // Stub never generated for non-global objects that require access 495 // Stub never generated for non-global objects that require access
496 // checks. 496 // checks.
497 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded()); 497 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
498 498
499 // Perform map transition for the receiver if necessary. 499 // Perform map transition for the receiver if necessary.
500 if (!transition.is_null() && (object->map()->unused_property_fields() == 0)) { 500 if (!transition.is_null() && (object->map()->unused_property_fields() == 0)) {
501 // The properties must be extended before we can store the value. 501 // The properties must be extended before we can store the value.
502 // We jump to a runtime call that extends the properties array. 502 // We jump to a runtime call that extends the properties array.
503 __ push(receiver_reg); 503 __ push(receiver_reg);
504 __ mov(r2, Operand(transition)); 504 __ mov(r2, Operand(transition));
505 __ Push(r2, r0); 505 __ Push(r2, r0);
506 __ TailCallExternalReference( 506 __ TailCallExternalReference(
507 ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage), 507 ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage),
508 masm->isolate()), 508 masm->isolate()),
509 3, 509 3,
510 1); 510 1);
511 return; 511 return;
512 } 512 }
513 513
514 if (!transition.is_null()) { 514 if (!transition.is_null()) {
515 // Update the map of the object. 515 // Update the map of the object.
516 __ mov(scratch1, Operand(transition)); 516 __ mov(scratch1, Operand(transition));
517 __ str(scratch1, FieldMemOperand(receiver_reg, HeapObject::kMapOffset)); 517 __ str(scratch1, FieldMemOperand(receiver_reg, HeapObject::kMapOffset));
518 518
519 // Update the write barrier for the map field and pass the now unused 519 // Update the write barrier for the map field and pass the now unused
520 // name_reg as scratch register. 520 // name_reg as scratch register.
521 __ RecordWriteField(receiver_reg, 521 __ RecordWriteField(receiver_reg,
522 HeapObject::kMapOffset, 522 HeapObject::kMapOffset,
523 scratch1, 523 scratch1,
524 name_reg, 524 name_reg,
525 kLRHasNotBeenSaved, 525 kLRHasNotBeenSaved,
526 kDontSaveFPRegs, 526 kDontSaveFPRegs,
527 OMIT_REMEMBERED_SET, 527 OMIT_REMEMBERED_SET,
528 OMIT_SMI_CHECK); 528 OMIT_SMI_CHECK);
529 } 529 }
530 530
531 // Adjust for the number of properties stored in the object. Even in the 531 // Adjust for the number of properties stored in the object. Even in the
532 // face of a transition we can use the old map here because the size of the 532 // face of a transition we can use the old map here because the size of the
533 // object and the number of in-object properties is not going to change. 533 // object and the number of in-object properties is not going to change.
534 index -= object->map()->inobject_properties(); 534 index -= object->map()->inobject_properties();
535 535
536 if (index < 0) { 536 if (index < 0) {
537 // Set the property straight into the object. 537 // Set the property straight into the object.
538 int offset = object->map()->instance_size() + (index * kPointerSize); 538 int offset = object->map()->instance_size() + (index * kPointerSize);
539 __ str(r0, FieldMemOperand(receiver_reg, offset)); 539 __ str(r0, FieldMemOperand(receiver_reg, offset));
540 540
541 // Skip updating write barrier if storing a smi. 541 // Skip updating write barrier if storing a smi.
542 __ JumpIfSmi(r0, &exit); 542 __ JumpIfSmi(r0, &exit);
543 543
544 // Update the write barrier for the array address. 544 // Update the write barrier for the array address.
545 // Pass the now unused name_reg as a scratch register. 545 // Pass the now unused name_reg as a scratch register.
546 __ mov(name_reg, r0); 546 __ mov(name_reg, r0);
547 __ RecordWriteField(receiver_reg, 547 __ RecordWriteField(receiver_reg,
548 offset, 548 offset,
549 name_reg, 549 name_reg,
550 scratch1, 550 scratch1,
551 kLRHasNotBeenSaved, 551 kLRHasNotBeenSaved,
552 kDontSaveFPRegs); 552 kDontSaveFPRegs);
553 } else { 553 } else {
554 // Write to the properties array. 554 // Write to the properties array.
555 int offset = index * kPointerSize + FixedArray::kHeaderSize; 555 int offset = index * kPointerSize + FixedArray::kHeaderSize;
556 // Get the properties array 556 // Get the properties array
557 __ ldr(scratch1, 557 __ ldr(scratch1,
558 FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset)); 558 FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset));
559 __ str(r0, FieldMemOperand(scratch1, offset)); 559 __ str(r0, FieldMemOperand(scratch1, offset));
560 560
561 // Skip updating write barrier if storing a smi. 561 // Skip updating write barrier if storing a smi.
562 __ JumpIfSmi(r0, &exit); 562 __ JumpIfSmi(r0, &exit);
563 563
564 // Update the write barrier for the array address. 564 // Update the write barrier for the array address.
565 // Ok to clobber receiver_reg and name_reg, since we return. 565 // Ok to clobber receiver_reg and name_reg, since we return.
566 __ mov(name_reg, r0); 566 __ mov(name_reg, r0);
567 __ RecordWriteField(scratch1, 567 __ RecordWriteField(scratch1,
568 offset, 568 offset,
569 name_reg, 569 name_reg,
570 receiver_reg, 570 receiver_reg,
571 kLRHasNotBeenSaved, 571 kLRHasNotBeenSaved,
572 kDontSaveFPRegs); 572 kDontSaveFPRegs);
573 } 573 }
574 574
575 // Return the value (register r0). 575 // Return the value (register r0).
576 __ bind(&exit); 576 __ bind(&exit);
577 __ Ret(); 577 __ Ret();
578 } 578 }
579 579
580 580
581 void StubCompiler::GenerateLoadMiss(MacroAssembler* masm, Code::Kind kind) { 581 void StubCompiler::GenerateLoadMiss(MacroAssembler* masm, Code::Kind kind) {
582 ASSERT(kind == Code::LOAD_IC || kind == Code::KEYED_LOAD_IC); 582 ASSERT(kind == Code::LOAD_IC || kind == Code::KEYED_LOAD_IC);
583 Handle<Code> code = (kind == Code::LOAD_IC) 583 Handle<Code> code = (kind == Code::LOAD_IC)
584 ? masm->isolate()->builtins()->LoadIC_Miss() 584 ? masm->isolate()->builtins()->LoadIC_Miss()
585 : masm->isolate()->builtins()->KeyedLoadIC_Miss(); 585 : masm->isolate()->builtins()->KeyedLoadIC_Miss();
586 __ Jump(code, RelocInfo::CODE_TARGET); 586 __ Jump(code, RelocInfo::CODE_TARGET);
587 } 587 }
588 588
589 589
590 static void GenerateCallFunction(MacroAssembler* masm, 590 static void GenerateCallFunction(MacroAssembler* masm,
591 Handle<Object> object, 591 Handle<Object> object,
592 const ParameterCount& arguments, 592 const ParameterCount& arguments,
593 Label* miss, 593 Label* miss,
594 Code::ExtraICState extra_ic_state) { 594 Code::ExtraICState extra_ic_state) {
595 // ----------- S t a t e ------------- 595 // ----------- S t a t e -------------
596 // -- r0: receiver 596 // -- r0: receiver
597 // -- r1: function to call 597 // -- r1: function to call
598 // ----------------------------------- 598 // -----------------------------------
599 599
600 // Check that the function really is a function. 600 // Check that the function really is a function.
601 __ JumpIfSmi(r1, miss); 601 __ JumpIfSmi(r1, miss);
602 __ CompareObjectType(r1, r3, r3, JS_FUNCTION_TYPE); 602 __ CompareObjectType(r1, r3, r3, JS_FUNCTION_TYPE);
603 __ b(ne, miss); 603 __ b(ne, miss);
604 604
605 // Patch the receiver on the stack with the global proxy if 605 // Patch the receiver on the stack with the global proxy if
606 // necessary. 606 // necessary.
607 if (object->IsGlobalObject()) { 607 if (object->IsGlobalObject()) {
608 __ ldr(r3, FieldMemOperand(r0, GlobalObject::kGlobalReceiverOffset)); 608 __ ldr(r3, FieldMemOperand(r0, GlobalObject::kGlobalReceiverOffset));
609 __ str(r3, MemOperand(sp, arguments.immediate() * kPointerSize)); 609 __ str(r3, MemOperand(sp, arguments.immediate() * kPointerSize));
610 } 610 }
611 611
612 // Invoke the function. 612 // Invoke the function.
613 CallKind call_kind = CallICBase::Contextual::decode(extra_ic_state) 613 CallKind call_kind = CallICBase::Contextual::decode(extra_ic_state)
614 ? CALL_AS_FUNCTION 614 ? CALL_AS_FUNCTION
615 : CALL_AS_METHOD; 615 : CALL_AS_METHOD;
616 __ InvokeFunction(r1, arguments, JUMP_FUNCTION, NullCallWrapper(), call_kind); 616 __ InvokeFunction(r1, arguments, JUMP_FUNCTION, NullCallWrapper(), call_kind);
617 } 617 }
618 618
619 619
620 static void PushInterceptorArguments(MacroAssembler* masm, 620 static void PushInterceptorArguments(MacroAssembler* masm,
621 Register receiver, 621 Register receiver,
622 Register holder, 622 Register holder,
623 Register name, 623 Register name,
624 Handle<JSObject> holder_obj) { 624 Handle<JSObject> holder_obj) {
625 __ push(name); 625 __ push(name);
626 Handle<InterceptorInfo> interceptor(holder_obj->GetNamedInterceptor()); 626 Handle<InterceptorInfo> interceptor(holder_obj->GetNamedInterceptor());
627 ASSERT(!masm->isolate()->heap()->InNewSpace(*interceptor)); 627 ASSERT(!masm->isolate()->heap()->InNewSpace(*interceptor));
628 Register scratch = name; 628 Register scratch = name;
629 __ mov(scratch, Operand(interceptor)); 629 __ mov(scratch, Operand(interceptor));
630 __ push(scratch); 630 __ push(scratch);
631 __ push(receiver); 631 __ push(receiver);
632 __ push(holder); 632 __ push(holder);
633 __ ldr(scratch, FieldMemOperand(scratch, InterceptorInfo::kDataOffset)); 633 __ ldr(scratch, FieldMemOperand(scratch, InterceptorInfo::kDataOffset));
634 __ push(scratch); 634 __ push(scratch);
635 __ mov(scratch, Operand(ExternalReference::isolate_address())); 635 __ mov(scratch, Operand(ExternalReference::isolate_address()));
636 __ push(scratch); 636 __ push(scratch);
637 } 637 }
638 638
639 639
640 static void CompileCallLoadPropertyWithInterceptor( 640 static void CompileCallLoadPropertyWithInterceptor(
641 MacroAssembler* masm, 641 MacroAssembler* masm,
642 Register receiver, 642 Register receiver,
643 Register holder, 643 Register holder,
644 Register name, 644 Register name,
645 Handle<JSObject> holder_obj) { 645 Handle<JSObject> holder_obj) {
646 PushInterceptorArguments(masm, receiver, holder, name, holder_obj); 646 PushInterceptorArguments(masm, receiver, holder, name, holder_obj);
647 647
648 ExternalReference ref = 648 ExternalReference ref =
649 ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorOnly), 649 ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorOnly),
650 masm->isolate()); 650 masm->isolate());
651 __ mov(r0, Operand(6)); 651 __ mov(r0, Operand(6));
652 __ mov(r1, Operand(ref)); 652 __ mov(r1, Operand(ref));
653 653
654 CEntryStub stub(1); 654 CEntryStub stub(1);
655 __ CallStub(&stub); 655 __ CallStub(&stub);
656 } 656 }
657 657
658 658
659 static const int kFastApiCallArguments = 4; 659 static const int kFastApiCallArguments = 4;
660 660
661 // Reserves space for the extra arguments to API function in the 661 // Reserves space for the extra arguments to API function in the
662 // caller's frame. 662 // caller's frame.
663 // 663 //
664 // These arguments are set by CheckPrototypes and GenerateFastApiDirectCall. 664 // These arguments are set by CheckPrototypes and GenerateFastApiDirectCall.
665 static void ReserveSpaceForFastApiCall(MacroAssembler* masm, 665 static void ReserveSpaceForFastApiCall(MacroAssembler* masm,
666 Register scratch) { 666 Register scratch) {
667 __ mov(scratch, Operand(Smi::FromInt(0))); 667 __ mov(scratch, Operand(Smi::FromInt(0)));
668 for (int i = 0; i < kFastApiCallArguments; i++) { 668 for (int i = 0; i < kFastApiCallArguments; i++) {
669 __ push(scratch); 669 __ push(scratch);
670 } 670 }
671 } 671 }
672 672
673 673
674 // Undoes the effects of ReserveSpaceForFastApiCall. 674 // Undoes the effects of ReserveSpaceForFastApiCall.
675 static void FreeSpaceForFastApiCall(MacroAssembler* masm) { 675 static void FreeSpaceForFastApiCall(MacroAssembler* masm) {
676 __ Drop(kFastApiCallArguments); 676 __ Drop(kFastApiCallArguments);
677 } 677 }
678 678
679 679
680 static void GenerateFastApiDirectCall(MacroAssembler* masm, 680 static void GenerateFastApiDirectCall(MacroAssembler* masm,
681 const CallOptimization& optimization, 681 const CallOptimization& optimization,
682 int argc) { 682 int argc) {
683 // ----------- S t a t e ------------- 683 // ----------- S t a t e -------------
684 // -- sp[0] : holder (set by CheckPrototypes) 684 // -- sp[0] : holder (set by CheckPrototypes)
685 // -- sp[4] : callee JS function 685 // -- sp[4] : callee JS function
686 // -- sp[8] : call data 686 // -- sp[8] : call data
687 // -- sp[12] : isolate 687 // -- sp[12] : isolate
688 // -- sp[16] : last JS argument 688 // -- sp[16] : last JS argument
689 // -- ... 689 // -- ...
690 // -- sp[(argc + 3) * 4] : first JS argument 690 // -- sp[(argc + 3) * 4] : first JS argument
691 // -- sp[(argc + 4) * 4] : receiver 691 // -- sp[(argc + 4) * 4] : receiver
692 // ----------------------------------- 692 // -----------------------------------
693 // Get the function and setup the context. 693 // Get the function and setup the context.
694 Handle<JSFunction> function = optimization.constant_function(); 694 Handle<JSFunction> function = optimization.constant_function();
695 __ LoadHeapObject(r5, function); 695 __ LoadHeapObject(r5, function);
696 __ ldr(cp, FieldMemOperand(r5, JSFunction::kContextOffset)); 696 __ ldr(cp, FieldMemOperand(r5, JSFunction::kContextOffset));
697 697
698 // Pass the additional arguments. 698 // Pass the additional arguments.
699 Handle<CallHandlerInfo> api_call_info = optimization.api_call_info(); 699 Handle<CallHandlerInfo> api_call_info = optimization.api_call_info();
700 Handle<Object> call_data(api_call_info->data()); 700 Handle<Object> call_data(api_call_info->data());
701 if (masm->isolate()->heap()->InNewSpace(*call_data)) { 701 if (masm->isolate()->heap()->InNewSpace(*call_data)) {
702 __ Move(r0, api_call_info); 702 __ Move(r0, api_call_info);
703 __ ldr(r6, FieldMemOperand(r0, CallHandlerInfo::kDataOffset)); 703 __ ldr(r6, FieldMemOperand(r0, CallHandlerInfo::kDataOffset));
704 } else { 704 } else {
705 __ Move(r6, call_data); 705 __ Move(r6, call_data);
706 } 706 }
707 __ mov(r7, Operand(ExternalReference::isolate_address())); 707 __ mov(r7, Operand(ExternalReference::isolate_address()));
708 // Store JS function, call data and isolate. 708 // Store JS function, call data and isolate.
709 __ stm(ib, sp, r5.bit() | r6.bit() | r7.bit()); 709 __ stm(ib, sp, r5.bit() | r6.bit() | r7.bit());
710 710
711 // Prepare arguments. 711 // Prepare arguments.
712 __ add(r2, sp, Operand(3 * kPointerSize)); 712 __ add(r2, sp, Operand(3 * kPointerSize));
713 713
714 // Allocate the v8::Arguments structure in the arguments' space since 714 // Allocate the v8::Arguments structure in the arguments' space since
715 // it's not controlled by GC. 715 // it's not controlled by GC.
716 const int kApiStackSpace = 4; 716 const int kApiStackSpace = 4;
717 717
718 FrameScope frame_scope(masm, StackFrame::MANUAL); 718 FrameScope frame_scope(masm, StackFrame::MANUAL);
719 __ EnterExitFrame(false, kApiStackSpace); 719 __ EnterExitFrame(false, kApiStackSpace);
720 720
721 // r0 = v8::Arguments& 721 // r0 = v8::Arguments&
722 // Arguments is after the return address. 722 // Arguments is after the return address.
723 __ add(r0, sp, Operand(1 * kPointerSize)); 723 __ add(r0, sp, Operand(1 * kPointerSize));
724 // v8::Arguments::implicit_args_ 724 // v8::Arguments::implicit_args_
725 __ str(r2, MemOperand(r0, 0 * kPointerSize)); 725 __ str(r2, MemOperand(r0, 0 * kPointerSize));
726 // v8::Arguments::values_ 726 // v8::Arguments::values_
727 __ add(ip, r2, Operand(argc * kPointerSize)); 727 __ add(ip, r2, Operand(argc * kPointerSize));
728 __ str(ip, MemOperand(r0, 1 * kPointerSize)); 728 __ str(ip, MemOperand(r0, 1 * kPointerSize));
729 // v8::Arguments::length_ = argc 729 // v8::Arguments::length_ = argc
730 __ mov(ip, Operand(argc)); 730 __ mov(ip, Operand(argc));
731 __ str(ip, MemOperand(r0, 2 * kPointerSize)); 731 __ str(ip, MemOperand(r0, 2 * kPointerSize));
732 // v8::Arguments::is_construct_call = 0 732 // v8::Arguments::is_construct_call = 0
733 __ mov(ip, Operand(0)); 733 __ mov(ip, Operand(0));
734 __ str(ip, MemOperand(r0, 3 * kPointerSize)); 734 __ str(ip, MemOperand(r0, 3 * kPointerSize));
735 735
736 const int kStackUnwindSpace = argc + kFastApiCallArguments + 1; 736 const int kStackUnwindSpace = argc + kFastApiCallArguments + 1;
737 Address function_address = v8::ToCData<Address>(api_call_info->callback()); 737 Address function_address = v8::ToCData<Address>(api_call_info->callback());
738 ApiFunction fun(function_address); 738 ApiFunction fun(function_address);
739 ExternalReference ref = ExternalReference(&fun, 739 ExternalReference ref = ExternalReference(&fun,
740 ExternalReference::DIRECT_API_CALL, 740 ExternalReference::DIRECT_API_CALL,
741 masm->isolate()); 741 masm->isolate());
742 AllowExternalCallThatCantCauseGC scope(masm); 742 AllowExternalCallThatCantCauseGC scope(masm);
743 743
744 __ CallApiFunctionAndReturn(ref, kStackUnwindSpace); 744 __ CallApiFunctionAndReturn(ref, kStackUnwindSpace);
745 } 745 }
746 746
747 747
748 class CallInterceptorCompiler BASE_EMBEDDED { 748 class CallInterceptorCompiler BASE_EMBEDDED {
749 public: 749 public:
750 CallInterceptorCompiler(StubCompiler* stub_compiler, 750 CallInterceptorCompiler(StubCompiler* stub_compiler,
751 const ParameterCount& arguments, 751 const ParameterCount& arguments,
752 Register name, 752 Register name,
753 Code::ExtraICState extra_ic_state) 753 Code::ExtraICState extra_ic_state)
754 : stub_compiler_(stub_compiler), 754 : stub_compiler_(stub_compiler),
755 arguments_(arguments), 755 arguments_(arguments),
756 name_(name), 756 name_(name),
757 extra_ic_state_(extra_ic_state) {} 757 extra_ic_state_(extra_ic_state) {}
758 758
759 void Compile(MacroAssembler* masm, 759 void Compile(MacroAssembler* masm,
760 Handle<JSObject> object, 760 Handle<JSObject> object,
761 Handle<JSObject> holder, 761 Handle<JSObject> holder,
762 Handle<String> name, 762 Handle<String> name,
763 LookupResult* lookup, 763 LookupResult* lookup,
764 Register receiver, 764 Register receiver,
765 Register scratch1, 765 Register scratch1,
766 Register scratch2, 766 Register scratch2,
767 Register scratch3, 767 Register scratch3,
768 Label* miss) { 768 Label* miss) {
769 ASSERT(holder->HasNamedInterceptor()); 769 ASSERT(holder->HasNamedInterceptor());
770 ASSERT(!holder->GetNamedInterceptor()->getter()->IsUndefined()); 770 ASSERT(!holder->GetNamedInterceptor()->getter()->IsUndefined());
771 771
772 // Check that the receiver isn't a smi. 772 // Check that the receiver isn't a smi.
773 __ JumpIfSmi(receiver, miss); 773 __ JumpIfSmi(receiver, miss);
774 CallOptimization optimization(lookup); 774 CallOptimization optimization(lookup);
775 if (optimization.is_constant_call()) { 775 if (optimization.is_constant_call()) {
776 CompileCacheable(masm, object, receiver, scratch1, scratch2, scratch3, 776 CompileCacheable(masm, object, receiver, scratch1, scratch2, scratch3,
777 holder, lookup, name, optimization, miss); 777 holder, lookup, name, optimization, miss);
778 } else { 778 } else {
779 CompileRegular(masm, object, receiver, scratch1, scratch2, scratch3, 779 CompileRegular(masm, object, receiver, scratch1, scratch2, scratch3,
780 name, holder, miss); 780 name, holder, miss);
781 } 781 }
782 } 782 }
783 783
784 private: 784 private:
785 void CompileCacheable(MacroAssembler* masm, 785 void CompileCacheable(MacroAssembler* masm,
786 Handle<JSObject> object, 786 Handle<JSObject> object,
787 Register receiver, 787 Register receiver,
788 Register scratch1, 788 Register scratch1,
789 Register scratch2, 789 Register scratch2,
790 Register scratch3, 790 Register scratch3,
791 Handle<JSObject> interceptor_holder, 791 Handle<JSObject> interceptor_holder,
792 LookupResult* lookup, 792 LookupResult* lookup,
793 Handle<String> name, 793 Handle<String> name,
794 const CallOptimization& optimization, 794 const CallOptimization& optimization,
795 Label* miss_label) { 795 Label* miss_label) {
796 ASSERT(optimization.is_constant_call()); 796 ASSERT(optimization.is_constant_call());
797 ASSERT(!lookup->holder()->IsGlobalObject()); 797 ASSERT(!lookup->holder()->IsGlobalObject());
798 Counters* counters = masm->isolate()->counters(); 798 Counters* counters = masm->isolate()->counters();
799 int depth1 = kInvalidProtoDepth; 799 int depth1 = kInvalidProtoDepth;
800 int depth2 = kInvalidProtoDepth; 800 int depth2 = kInvalidProtoDepth;
801 bool can_do_fast_api_call = false; 801 bool can_do_fast_api_call = false;
802 if (optimization.is_simple_api_call() && 802 if (optimization.is_simple_api_call() &&
803 !lookup->holder()->IsGlobalObject()) { 803 !lookup->holder()->IsGlobalObject()) {
804 depth1 = optimization.GetPrototypeDepthOfExpectedType( 804 depth1 = optimization.GetPrototypeDepthOfExpectedType(
805 object, interceptor_holder); 805 object, interceptor_holder);
806 if (depth1 == kInvalidProtoDepth) { 806 if (depth1 == kInvalidProtoDepth) {
807 depth2 = optimization.GetPrototypeDepthOfExpectedType( 807 depth2 = optimization.GetPrototypeDepthOfExpectedType(
808 interceptor_holder, Handle<JSObject>(lookup->holder())); 808 interceptor_holder, Handle<JSObject>(lookup->holder()));
809 } 809 }
810 can_do_fast_api_call = 810 can_do_fast_api_call =
811 depth1 != kInvalidProtoDepth || depth2 != kInvalidProtoDepth; 811 depth1 != kInvalidProtoDepth || depth2 != kInvalidProtoDepth;
812 } 812 }
813 813
814 __ IncrementCounter(counters->call_const_interceptor(), 1, 814 __ IncrementCounter(counters->call_const_interceptor(), 1,
815 scratch1, scratch2); 815 scratch1, scratch2);
816 816
817 if (can_do_fast_api_call) { 817 if (can_do_fast_api_call) {
818 __ IncrementCounter(counters->call_const_interceptor_fast_api(), 1, 818 __ IncrementCounter(counters->call_const_interceptor_fast_api(), 1,
819 scratch1, scratch2); 819 scratch1, scratch2);
820 ReserveSpaceForFastApiCall(masm, scratch1); 820 ReserveSpaceForFastApiCall(masm, scratch1);
821 } 821 }
822 822
823 // Check that the maps from receiver to interceptor's holder 823 // Check that the maps from receiver to interceptor's holder
824 // haven't changed and thus we can invoke interceptor. 824 // haven't changed and thus we can invoke interceptor.
825 Label miss_cleanup; 825 Label miss_cleanup;
826 Label* miss = can_do_fast_api_call ? &miss_cleanup : miss_label; 826 Label* miss = can_do_fast_api_call ? &miss_cleanup : miss_label;
827 Register holder = 827 Register holder =
828 stub_compiler_->CheckPrototypes(object, receiver, interceptor_holder, 828 stub_compiler_->CheckPrototypes(object, receiver, interceptor_holder,
829 scratch1, scratch2, scratch3, 829 scratch1, scratch2, scratch3,
830 name, depth1, miss); 830 name, depth1, miss);
831 831
832 // Invoke an interceptor and if it provides a value, 832 // Invoke an interceptor and if it provides a value,
833 // branch to |regular_invoke|. 833 // branch to |regular_invoke|.
834 Label regular_invoke; 834 Label regular_invoke;
835 LoadWithInterceptor(masm, receiver, holder, interceptor_holder, scratch2, 835 LoadWithInterceptor(masm, receiver, holder, interceptor_holder, scratch2,
836 &regular_invoke); 836 &regular_invoke);
837 837
838 // Interceptor returned nothing for this property. Try to use cached 838 // Interceptor returned nothing for this property. Try to use cached
839 // constant function. 839 // constant function.
840 840
841 // Check that the maps from interceptor's holder to constant function's 841 // Check that the maps from interceptor's holder to constant function's
842 // holder haven't changed and thus we can use cached constant function. 842 // holder haven't changed and thus we can use cached constant function.
843 if (*interceptor_holder != lookup->holder()) { 843 if (*interceptor_holder != lookup->holder()) {
844 stub_compiler_->CheckPrototypes(interceptor_holder, receiver, 844 stub_compiler_->CheckPrototypes(interceptor_holder, receiver,
845 Handle<JSObject>(lookup->holder()), 845 Handle<JSObject>(lookup->holder()),
846 scratch1, scratch2, scratch3, 846 scratch1, scratch2, scratch3,
847 name, depth2, miss); 847 name, depth2, miss);
848 } else { 848 } else {
849 // CheckPrototypes has a side effect of fetching a 'holder' 849 // CheckPrototypes has a side effect of fetching a 'holder'
850 // for API (object which is instanceof for the signature). It's 850 // for API (object which is instanceof for the signature). It's
851 // safe to omit it here, as if present, it should be fetched 851 // safe to omit it here, as if present, it should be fetched
852 // by the previous CheckPrototypes. 852 // by the previous CheckPrototypes.
853 ASSERT(depth2 == kInvalidProtoDepth); 853 ASSERT(depth2 == kInvalidProtoDepth);
854 } 854 }
855 855
856 // Invoke function. 856 // Invoke function.
857 if (can_do_fast_api_call) { 857 if (can_do_fast_api_call) {
858 GenerateFastApiDirectCall(masm, optimization, arguments_.immediate()); 858 GenerateFastApiDirectCall(masm, optimization, arguments_.immediate());
859 } else { 859 } else {
860 CallKind call_kind = CallICBase::Contextual::decode(extra_ic_state_) 860 CallKind call_kind = CallICBase::Contextual::decode(extra_ic_state_)
861 ? CALL_AS_FUNCTION 861 ? CALL_AS_FUNCTION
862 : CALL_AS_METHOD; 862 : CALL_AS_METHOD;
863 __ InvokeFunction(optimization.constant_function(), arguments_, 863 __ InvokeFunction(optimization.constant_function(), arguments_,
864 JUMP_FUNCTION, NullCallWrapper(), call_kind); 864 JUMP_FUNCTION, NullCallWrapper(), call_kind);
865 } 865 }
866 866
867 // Deferred code for fast API call case---clean preallocated space. 867 // Deferred code for fast API call case---clean preallocated space.
868 if (can_do_fast_api_call) { 868 if (can_do_fast_api_call) {
869 __ bind(&miss_cleanup); 869 __ bind(&miss_cleanup);
870 FreeSpaceForFastApiCall(masm); 870 FreeSpaceForFastApiCall(masm);
871 __ b(miss_label); 871 __ b(miss_label);
872 } 872 }
873 873
874 // Invoke a regular function. 874 // Invoke a regular function.
875 __ bind(&regular_invoke); 875 __ bind(&regular_invoke);
876 if (can_do_fast_api_call) { 876 if (can_do_fast_api_call) {
877 FreeSpaceForFastApiCall(masm); 877 FreeSpaceForFastApiCall(masm);
878 } 878 }
879 } 879 }
880 880
881 void CompileRegular(MacroAssembler* masm, 881 void CompileRegular(MacroAssembler* masm,
882 Handle<JSObject> object, 882 Handle<JSObject> object,
883 Register receiver, 883 Register receiver,
884 Register scratch1, 884 Register scratch1,
885 Register scratch2, 885 Register scratch2,
886 Register scratch3, 886 Register scratch3,
887 Handle<String> name, 887 Handle<String> name,
888 Handle<JSObject> interceptor_holder, 888 Handle<JSObject> interceptor_holder,
889 Label* miss_label) { 889 Label* miss_label) {
890 Register holder = 890 Register holder =
891 stub_compiler_->CheckPrototypes(object, receiver, interceptor_holder, 891 stub_compiler_->CheckPrototypes(object, receiver, interceptor_holder,
892 scratch1, scratch2, scratch3, 892 scratch1, scratch2, scratch3,
893 name, miss_label); 893 name, miss_label);
894 894
895 // Call a runtime function to load the interceptor property. 895 // Call a runtime function to load the interceptor property.
896 FrameScope scope(masm, StackFrame::INTERNAL); 896 FrameScope scope(masm, StackFrame::INTERNAL);
897 // Save the name_ register across the call. 897 // Save the name_ register across the call.
898 __ push(name_); 898 __ push(name_);
899 PushInterceptorArguments(masm, receiver, holder, name_, interceptor_holder); 899 PushInterceptorArguments(masm, receiver, holder, name_, interceptor_holder);
900 __ CallExternalReference( 900 __ CallExternalReference(
901 ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorForCall), 901 ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorForCall),
902 masm->isolate()), 902 masm->isolate()),
903 6); 903 6);
904 // Restore the name_ register. 904 // Restore the name_ register.
905 __ pop(name_); 905 __ pop(name_);
906 // Leave the internal frame. 906 // Leave the internal frame.
907 } 907 }
908 908
909 void LoadWithInterceptor(MacroAssembler* masm, 909 void LoadWithInterceptor(MacroAssembler* masm,
910 Register receiver, 910 Register receiver,
911 Register holder, 911 Register holder,
912 Handle<JSObject> holder_obj, 912 Handle<JSObject> holder_obj,
913 Register scratch, 913 Register scratch,
914 Label* interceptor_succeeded) { 914 Label* interceptor_succeeded) {
915 { 915 {
916 FrameScope scope(masm, StackFrame::INTERNAL); 916 FrameScope scope(masm, StackFrame::INTERNAL);
917 __ Push(holder, name_); 917 __ Push(holder, name_);
918 CompileCallLoadPropertyWithInterceptor(masm, 918 CompileCallLoadPropertyWithInterceptor(masm,
919 receiver, 919 receiver,
920 holder, 920 holder,
921 name_, 921 name_,
922 holder_obj); 922 holder_obj);
923 __ pop(name_); // Restore the name. 923 __ pop(name_); // Restore the name.
924 __ pop(receiver); // Restore the holder. 924 __ pop(receiver); // Restore the holder.
925 } 925 }
926 // If interceptor returns no-result sentinel, call the constant function. 926 // If interceptor returns no-result sentinel, call the constant function.
927 __ LoadRoot(scratch, Heap::kNoInterceptorResultSentinelRootIndex); 927 __ LoadRoot(scratch, Heap::kNoInterceptorResultSentinelRootIndex);
928 __ cmp(r0, scratch); 928 __ cmp(r0, scratch);
929 __ b(ne, interceptor_succeeded); 929 __ b(ne, interceptor_succeeded);
930 } 930 }
931 931
932 StubCompiler* stub_compiler_; 932 StubCompiler* stub_compiler_;
933 const ParameterCount& arguments_; 933 const ParameterCount& arguments_;
934 Register name_; 934 Register name_;
935 Code::ExtraICState extra_ic_state_; 935 Code::ExtraICState extra_ic_state_;
936 }; 936 };
937 937
938 938
939 // Generate code to check that a global property cell is empty. Create 939 // Generate code to check that a global property cell is empty. Create
940 // the property cell at compilation time if no cell exists for the 940 // the property cell at compilation time if no cell exists for the
941 // property. 941 // property.
942 static void GenerateCheckPropertyCell(MacroAssembler* masm, 942 static void GenerateCheckPropertyCell(MacroAssembler* masm,
943 Handle<GlobalObject> global, 943 Handle<GlobalObject> global,
944 Handle<String> name, 944 Handle<String> name,
945 Register scratch, 945 Register scratch,
946 Label* miss) { 946 Label* miss) {
947 Handle<JSGlobalPropertyCell> cell = 947 Handle<JSGlobalPropertyCell> cell =
948 GlobalObject::EnsurePropertyCell(global, name); 948 GlobalObject::EnsurePropertyCell(global, name);
949 ASSERT(cell->value()->IsTheHole()); 949 ASSERT(cell->value()->IsTheHole());
950 __ mov(scratch, Operand(cell)); 950 __ mov(scratch, Operand(cell));
951 __ ldr(scratch, 951 __ ldr(scratch,
952 FieldMemOperand(scratch, JSGlobalPropertyCell::kValueOffset)); 952 FieldMemOperand(scratch, JSGlobalPropertyCell::kValueOffset));
953 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); 953 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
954 __ cmp(scratch, ip); 954 __ cmp(scratch, ip);
955 __ b(ne, miss); 955 __ b(ne, miss);
956 } 956 }
957 957
958 958
959 // Calls GenerateCheckPropertyCell for each global object in the prototype chain 959 // Calls GenerateCheckPropertyCell for each global object in the prototype chain
960 // from object to (but not including) holder. 960 // from object to (but not including) holder.
961 static void GenerateCheckPropertyCells(MacroAssembler* masm, 961 static void GenerateCheckPropertyCells(MacroAssembler* masm,
962 Handle<JSObject> object, 962 Handle<JSObject> object,
963 Handle<JSObject> holder, 963 Handle<JSObject> holder,
964 Handle<String> name, 964 Handle<String> name,
965 Register scratch, 965 Register scratch,
966 Label* miss) { 966 Label* miss) {
967 Handle<JSObject> current = object; 967 Handle<JSObject> current = object;
968 while (!current.is_identical_to(holder)) { 968 while (!current.is_identical_to(holder)) {
969 if (current->IsGlobalObject()) { 969 if (current->IsGlobalObject()) {
970 GenerateCheckPropertyCell(masm, 970 GenerateCheckPropertyCell(masm,
971 Handle<GlobalObject>::cast(current), 971 Handle<GlobalObject>::cast(current),
972 name, 972 name,
973 scratch, 973 scratch,
974 miss); 974 miss);
975 } 975 }
976 current = Handle<JSObject>(JSObject::cast(current->GetPrototype())); 976 current = Handle<JSObject>(JSObject::cast(current->GetPrototype()));
977 } 977 }
978 } 978 }
979 979
980 980
981 // Convert and store int passed in register ival to IEEE 754 single precision 981 // Convert and store int passed in register ival to IEEE 754 single precision
982 // floating point value at memory location (dst + 4 * wordoffset) 982 // floating point value at memory location (dst + 4 * wordoffset)
983 // If VFP3 is available use it for conversion. 983 // If VFP3 is available use it for conversion.
984 static void StoreIntAsFloat(MacroAssembler* masm, 984 static void StoreIntAsFloat(MacroAssembler* masm,
985 Register dst, 985 Register dst,
986 Register wordoffset, 986 Register wordoffset,
987 Register ival, 987 Register ival,
988 Register fval, 988 Register fval,
989 Register scratch1, 989 Register scratch1,
990 Register scratch2) { 990 Register scratch2) {
991 if (CpuFeatures::IsSupported(VFP2)) { 991 if (CpuFeatures::IsSupported(VFP2)) {
992 CpuFeatures::Scope scope(VFP2); 992 CpuFeatures::Scope scope(VFP2);
993 __ vmov(s0, ival); 993 __ vmov(s0, ival);
994 __ add(scratch1, dst, Operand(wordoffset, LSL, 2)); 994 __ add(scratch1, dst, Operand(wordoffset, LSL, 2));
995 __ vcvt_f32_s32(s0, s0); 995 __ vcvt_f32_s32(s0, s0);
996 __ vstr(s0, scratch1, 0); 996 __ vstr(s0, scratch1, 0);
997 } else { 997 } else {
998 Label not_special, done; 998 Label not_special, done;
999 // Move sign bit from source to destination. This works because the sign 999 // Move sign bit from source to destination. This works because the sign
1000 // bit in the exponent word of the double has the same position and polarity 1000 // bit in the exponent word of the double has the same position and polarity
1001 // as the 2's complement sign bit in a Smi. 1001 // as the 2's complement sign bit in a Smi.
1002 ASSERT(kBinary32SignMask == 0x80000000u); 1002 ASSERT(kBinary32SignMask == 0x80000000u);
1003 1003
1004 __ and_(fval, ival, Operand(kBinary32SignMask), SetCC); 1004 __ and_(fval, ival, Operand(kBinary32SignMask), SetCC);
1005 // Negate value if it is negative. 1005 // Negate value if it is negative.
1006 __ rsb(ival, ival, Operand(0, RelocInfo::NONE), LeaveCC, ne); 1006 __ rsb(ival, ival, Operand(0, RelocInfo::NONE), LeaveCC, ne);
1007 1007
1008 // We have -1, 0 or 1, which we treat specially. Register ival contains 1008 // We have -1, 0 or 1, which we treat specially. Register ival contains
1009 // absolute value: it is either equal to 1 (special case of -1 and 1), 1009 // absolute value: it is either equal to 1 (special case of -1 and 1),
1010 // greater than 1 (not a special case) or less than 1 (special case of 0). 1010 // greater than 1 (not a special case) or less than 1 (special case of 0).
1011 __ cmp(ival, Operand(1)); 1011 __ cmp(ival, Operand(1));
1012 __ b(gt, &not_special); 1012 __ b(gt, &not_special);
1013 1013
1014 // For 1 or -1 we need to or in the 0 exponent (biased). 1014 // For 1 or -1 we need to or in the 0 exponent (biased).
1015 static const uint32_t exponent_word_for_1 = 1015 static const uint32_t exponent_word_for_1 =
1016 kBinary32ExponentBias << kBinary32ExponentShift; 1016 kBinary32ExponentBias << kBinary32ExponentShift;
1017 1017
1018 __ orr(fval, fval, Operand(exponent_word_for_1), LeaveCC, eq); 1018 __ orr(fval, fval, Operand(exponent_word_for_1), LeaveCC, eq);
1019 __ b(&done); 1019 __ b(&done);
1020 1020
1021 __ bind(&not_special); 1021 __ bind(&not_special);
1022 // Count leading zeros. 1022 // Count leading zeros.
1023 // Gets the wrong answer for 0, but we already checked for that case above. 1023 // Gets the wrong answer for 0, but we already checked for that case above.
1024 Register zeros = scratch2; 1024 Register zeros = scratch2;
1025 __ CountLeadingZeros(zeros, ival, scratch1); 1025 __ CountLeadingZeros(zeros, ival, scratch1);
1026 1026
1027 // Compute exponent and or it into the exponent register. 1027 // Compute exponent and or it into the exponent register.
1028 __ rsb(scratch1, 1028 __ rsb(scratch1,
1029 zeros, 1029 zeros,
1030 Operand((kBitsPerInt - 1) + kBinary32ExponentBias)); 1030 Operand((kBitsPerInt - 1) + kBinary32ExponentBias));
1031 1031
1032 __ orr(fval, 1032 __ orr(fval,
1033 fval, 1033 fval,
1034 Operand(scratch1, LSL, kBinary32ExponentShift)); 1034 Operand(scratch1, LSL, kBinary32ExponentShift));
1035 1035
1036 // Shift up the source chopping the top bit off. 1036 // Shift up the source chopping the top bit off.
1037 __ add(zeros, zeros, Operand(1)); 1037 __ add(zeros, zeros, Operand(1));
1038 // This wouldn't work for 1 and -1 as the shift would be 32 which means 0. 1038 // This wouldn't work for 1 and -1 as the shift would be 32 which means 0.
1039 __ mov(ival, Operand(ival, LSL, zeros)); 1039 __ mov(ival, Operand(ival, LSL, zeros));
1040 // And the top (top 20 bits). 1040 // And the top (top 20 bits).
1041 __ orr(fval, 1041 __ orr(fval,
1042 fval, 1042 fval,
1043 Operand(ival, LSR, kBitsPerInt - kBinary32MantissaBits)); 1043 Operand(ival, LSR, kBitsPerInt - kBinary32MantissaBits));
1044 1044
1045 __ bind(&done); 1045 __ bind(&done);
1046 __ str(fval, MemOperand(dst, wordoffset, LSL, 2)); 1046 __ str(fval, MemOperand(dst, wordoffset, LSL, 2));
1047 } 1047 }
1048 } 1048 }
1049 1049
1050 1050
1051 // Convert unsigned integer with specified number of leading zeroes in binary 1051 // Convert unsigned integer with specified number of leading zeroes in binary
1052 // representation to IEEE 754 double. 1052 // representation to IEEE 754 double.
1053 // Integer to convert is passed in register hiword. 1053 // Integer to convert is passed in register hiword.
1054 // Resulting double is returned in registers hiword:loword. 1054 // Resulting double is returned in registers hiword:loword.
1055 // This functions does not work correctly for 0. 1055 // This functions does not work correctly for 0.
1056 static void GenerateUInt2Double(MacroAssembler* masm, 1056 static void GenerateUInt2Double(MacroAssembler* masm,
1057 Register hiword, 1057 Register hiword,
1058 Register loword, 1058 Register loword,
1059 Register scratch, 1059 Register scratch,
1060 int leading_zeroes) { 1060 int leading_zeroes) {
1061 const int meaningful_bits = kBitsPerInt - leading_zeroes - 1; 1061 const int meaningful_bits = kBitsPerInt - leading_zeroes - 1;
1062 const int biased_exponent = HeapNumber::kExponentBias + meaningful_bits; 1062 const int biased_exponent = HeapNumber::kExponentBias + meaningful_bits;
1063 1063
1064 const int mantissa_shift_for_hi_word = 1064 const int mantissa_shift_for_hi_word =
1065 meaningful_bits - HeapNumber::kMantissaBitsInTopWord; 1065 meaningful_bits - HeapNumber::kMantissaBitsInTopWord;
1066 1066
1067 const int mantissa_shift_for_lo_word = 1067 const int mantissa_shift_for_lo_word =
1068 kBitsPerInt - mantissa_shift_for_hi_word; 1068 kBitsPerInt - mantissa_shift_for_hi_word;
1069 1069
1070 __ mov(scratch, Operand(biased_exponent << HeapNumber::kExponentShift)); 1070 __ mov(scratch, Operand(biased_exponent << HeapNumber::kExponentShift));
1071 if (mantissa_shift_for_hi_word > 0) { 1071 if (mantissa_shift_for_hi_word > 0) {
1072 __ mov(loword, Operand(hiword, LSL, mantissa_shift_for_lo_word)); 1072 __ mov(loword, Operand(hiword, LSL, mantissa_shift_for_lo_word));
1073 __ orr(hiword, scratch, Operand(hiword, LSR, mantissa_shift_for_hi_word)); 1073 __ orr(hiword, scratch, Operand(hiword, LSR, mantissa_shift_for_hi_word));
1074 } else { 1074 } else {
1075 __ mov(loword, Operand(0, RelocInfo::NONE)); 1075 __ mov(loword, Operand(0, RelocInfo::NONE));
1076 __ orr(hiword, scratch, Operand(hiword, LSL, mantissa_shift_for_hi_word)); 1076 __ orr(hiword, scratch, Operand(hiword, LSL, mantissa_shift_for_hi_word));
1077 } 1077 }
1078 1078
1079 // If least significant bit of biased exponent was not 1 it was corrupted 1079 // If least significant bit of biased exponent was not 1 it was corrupted
1080 // by most significant bit of mantissa so we should fix that. 1080 // by most significant bit of mantissa so we should fix that.
1081 if (!(biased_exponent & 1)) { 1081 if (!(biased_exponent & 1)) {
1082 __ bic(hiword, hiword, Operand(1 << HeapNumber::kExponentShift)); 1082 __ bic(hiword, hiword, Operand(1 << HeapNumber::kExponentShift));
1083 } 1083 }
1084 } 1084 }
1085 1085
1086 1086
1087 #undef __ 1087 #undef __
1088 #define __ ACCESS_MASM(masm()) 1088 #define __ ACCESS_MASM(masm())
1089 1089
1090 1090
1091 Register StubCompiler::CheckPrototypes(Handle<JSObject> object, 1091 Register StubCompiler::CheckPrototypes(Handle<JSObject> object,
1092 Register object_reg, 1092 Register object_reg,
1093 Handle<JSObject> holder, 1093 Handle<JSObject> holder,
1094 Register holder_reg, 1094 Register holder_reg,
1095 Register scratch1, 1095 Register scratch1,
1096 Register scratch2, 1096 Register scratch2,
1097 Handle<String> name, 1097 Handle<String> name,
1098 int save_at_depth, 1098 int save_at_depth,
1099 Label* miss) { 1099 Label* miss) {
1100 // Make sure there's no overlap between holder and object registers. 1100 // Make sure there's no overlap between holder and object registers.
1101 ASSERT(!scratch1.is(object_reg) && !scratch1.is(holder_reg)); 1101 ASSERT(!scratch1.is(object_reg) && !scratch1.is(holder_reg));
1102 ASSERT(!scratch2.is(object_reg) && !scratch2.is(holder_reg) 1102 ASSERT(!scratch2.is(object_reg) && !scratch2.is(holder_reg)
1103 && !scratch2.is(scratch1)); 1103 && !scratch2.is(scratch1));
1104 1104
1105 // Keep track of the current object in register reg. 1105 // Keep track of the current object in register reg.
1106 Register reg = object_reg; 1106 Register reg = object_reg;
1107 int depth = 0; 1107 int depth = 0;
1108 1108
1109 if (save_at_depth == depth) { 1109 if (save_at_depth == depth) {
1110 __ str(reg, MemOperand(sp)); 1110 __ str(reg, MemOperand(sp));
1111 } 1111 }
1112 1112
1113 // Check the maps in the prototype chain. 1113 // Check the maps in the prototype chain.
1114 // Traverse the prototype chain from the object and do map checks. 1114 // Traverse the prototype chain from the object and do map checks.
1115 Handle<JSObject> current = object; 1115 Handle<JSObject> current = object;
1116 while (!current.is_identical_to(holder)) { 1116 while (!current.is_identical_to(holder)) {
1117 ++depth; 1117 ++depth;
1118 1118
1119 // Only global objects and objects that do not require access 1119 // Only global objects and objects that do not require access
1120 // checks are allowed in stubs. 1120 // checks are allowed in stubs.
1121 ASSERT(current->IsJSGlobalProxy() || !current->IsAccessCheckNeeded()); 1121 ASSERT(current->IsJSGlobalProxy() || !current->IsAccessCheckNeeded());
1122 1122
1123 Handle<JSObject> prototype(JSObject::cast(current->GetPrototype())); 1123 Handle<JSObject> prototype(JSObject::cast(current->GetPrototype()));
1124 if (!current->HasFastProperties() && 1124 if (!current->HasFastProperties() &&
1125 !current->IsJSGlobalObject() && 1125 !current->IsJSGlobalObject() &&
1126 !current->IsJSGlobalProxy()) { 1126 !current->IsJSGlobalProxy()) {
1127 if (!name->IsSymbol()) { 1127 if (!name->IsSymbol()) {
1128 name = factory()->LookupSymbol(name); 1128 name = factory()->LookupSymbol(name);
1129 } 1129 }
1130 ASSERT(current->property_dictionary()->FindEntry(*name) == 1130 ASSERT(current->property_dictionary()->FindEntry(*name) ==
1131 StringDictionary::kNotFound); 1131 StringDictionary::kNotFound);
1132 1132
1133 GenerateDictionaryNegativeLookup(masm(), miss, reg, name, 1133 GenerateDictionaryNegativeLookup(masm(), miss, reg, name,
1134 scratch1, scratch2); 1134 scratch1, scratch2);
1135 1135
1136 __ ldr(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset)); 1136 __ ldr(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
1137 reg = holder_reg; // From now on the object will be in holder_reg. 1137 reg = holder_reg; // From now on the object will be in holder_reg.
1138 __ ldr(reg, FieldMemOperand(scratch1, Map::kPrototypeOffset)); 1138 __ ldr(reg, FieldMemOperand(scratch1, Map::kPrototypeOffset));
1139 } else { 1139 } else {
1140 Handle<Map> current_map(current->map()); 1140 Handle<Map> current_map(current->map());
1141 __ CheckMap(reg, scratch1, current_map, miss, DONT_DO_SMI_CHECK, 1141 __ CheckMap(reg, scratch1, current_map, miss, DONT_DO_SMI_CHECK,
1142 ALLOW_ELEMENT_TRANSITION_MAPS); 1142 ALLOW_ELEMENT_TRANSITION_MAPS);
1143 1143
1144 // Check access rights to the global object. This has to happen after 1144 // Check access rights to the global object. This has to happen after
1145 // the map check so that we know that the object is actually a global 1145 // the map check so that we know that the object is actually a global
1146 // object. 1146 // object.
1147 if (current->IsJSGlobalProxy()) { 1147 if (current->IsJSGlobalProxy()) {
1148 __ CheckAccessGlobalProxy(reg, scratch2, miss); 1148 __ CheckAccessGlobalProxy(reg, scratch2, miss);
1149 } 1149 }
1150 reg = holder_reg; // From now on the object will be in holder_reg. 1150 reg = holder_reg; // From now on the object will be in holder_reg.
1151 1151
1152 if (heap()->InNewSpace(*prototype)) { 1152 if (heap()->InNewSpace(*prototype)) {
1153 // The prototype is in new space; we cannot store a reference to it 1153 // The prototype is in new space; we cannot store a reference to it
1154 // in the code. Load it from the map. 1154 // in the code. Load it from the map.
1155 __ ldr(reg, FieldMemOperand(scratch1, Map::kPrototypeOffset)); 1155 __ ldr(reg, FieldMemOperand(scratch1, Map::kPrototypeOffset));
1156 } else { 1156 } else {
1157 // The prototype is in old space; load it directly. 1157 // The prototype is in old space; load it directly.
1158 __ mov(reg, Operand(prototype)); 1158 __ mov(reg, Operand(prototype));
1159 } 1159 }
1160 } 1160 }
1161 1161
1162 if (save_at_depth == depth) { 1162 if (save_at_depth == depth) {
1163 __ str(reg, MemOperand(sp)); 1163 __ str(reg, MemOperand(sp));
1164 } 1164 }
1165 1165
1166 // Go to the next object in the prototype chain. 1166 // Go to the next object in the prototype chain.
1167 current = prototype; 1167 current = prototype;
1168 } 1168 }
1169 1169
1170 // Log the check depth. 1170 // Log the check depth.
1171 LOG(masm()->isolate(), IntEvent("check-maps-depth", depth + 1)); 1171 LOG(masm()->isolate(), IntEvent("check-maps-depth", depth + 1));
1172 1172
1173 // Check the holder map. 1173 // Check the holder map.
1174 __ CheckMap(reg, scratch1, Handle<Map>(current->map()), miss, 1174 __ CheckMap(reg, scratch1, Handle<Map>(current->map()), miss,
1175 DONT_DO_SMI_CHECK, ALLOW_ELEMENT_TRANSITION_MAPS); 1175 DONT_DO_SMI_CHECK, ALLOW_ELEMENT_TRANSITION_MAPS);
1176 1176
1177 // Perform security check for access to the global object. 1177 // Perform security check for access to the global object.
1178 ASSERT(holder->IsJSGlobalProxy() || !holder->IsAccessCheckNeeded()); 1178 ASSERT(holder->IsJSGlobalProxy() || !holder->IsAccessCheckNeeded());
1179 if (holder->IsJSGlobalProxy()) { 1179 if (holder->IsJSGlobalProxy()) {
1180 __ CheckAccessGlobalProxy(reg, scratch1, miss); 1180 __ CheckAccessGlobalProxy(reg, scratch1, miss);
1181 } 1181 }
1182 1182
1183 // If we've skipped any global objects, it's not enough to verify that 1183 // If we've skipped any global objects, it's not enough to verify that
1184 // their maps haven't changed. We also need to check that the property 1184 // their maps haven't changed. We also need to check that the property
1185 // cell for the property is still empty. 1185 // cell for the property is still empty.
1186 GenerateCheckPropertyCells(masm(), object, holder, name, scratch1, miss); 1186 GenerateCheckPropertyCells(masm(), object, holder, name, scratch1, miss);
1187 1187
1188 // Return the register containing the holder. 1188 // Return the register containing the holder.
1189 return reg; 1189 return reg;
1190 } 1190 }
1191 1191
1192 1192
1193 void StubCompiler::GenerateLoadField(Handle<JSObject> object, 1193 void StubCompiler::GenerateLoadField(Handle<JSObject> object,
1194 Handle<JSObject> holder, 1194 Handle<JSObject> holder,
1195 Register receiver, 1195 Register receiver,
1196 Register scratch1, 1196 Register scratch1,
1197 Register scratch2, 1197 Register scratch2,
1198 Register scratch3, 1198 Register scratch3,
1199 int index, 1199 int index,
1200 Handle<String> name, 1200 Handle<String> name,
1201 Label* miss) { 1201 Label* miss) {
1202 // Check that the receiver isn't a smi. 1202 // Check that the receiver isn't a smi.
1203 __ JumpIfSmi(receiver, miss); 1203 __ JumpIfSmi(receiver, miss);
1204 1204
1205 // Check that the maps haven't changed. 1205 // Check that the maps haven't changed.
1206 Register reg = CheckPrototypes( 1206 Register reg = CheckPrototypes(
1207 object, receiver, holder, scratch1, scratch2, scratch3, name, miss); 1207 object, receiver, holder, scratch1, scratch2, scratch3, name, miss);
1208 GenerateFastPropertyLoad(masm(), r0, reg, holder, index); 1208 GenerateFastPropertyLoad(masm(), r0, reg, holder, index);
1209 __ Ret(); 1209 __ Ret();
1210 } 1210 }
1211 1211
1212 1212
1213 void StubCompiler::GenerateLoadConstant(Handle<JSObject> object, 1213 void StubCompiler::GenerateLoadConstant(Handle<JSObject> object,
1214 Handle<JSObject> holder, 1214 Handle<JSObject> holder,
1215 Register receiver, 1215 Register receiver,
1216 Register scratch1, 1216 Register scratch1,
1217 Register scratch2, 1217 Register scratch2,
1218 Register scratch3, 1218 Register scratch3,
1219 Handle<JSFunction> value, 1219 Handle<JSFunction> value,
1220 Handle<String> name, 1220 Handle<String> name,
1221 Label* miss) { 1221 Label* miss) {
1222 // Check that the receiver isn't a smi. 1222 // Check that the receiver isn't a smi.
1223 __ JumpIfSmi(receiver, miss); 1223 __ JumpIfSmi(receiver, miss);
1224 1224
1225 // Check that the maps haven't changed. 1225 // Check that the maps haven't changed.
1226 CheckPrototypes( 1226 CheckPrototypes(
1227 object, receiver, holder, scratch1, scratch2, scratch3, name, miss); 1227 object, receiver, holder, scratch1, scratch2, scratch3, name, miss);
1228 1228
1229 // Return the constant value. 1229 // Return the constant value.
1230 __ LoadHeapObject(r0, value); 1230 __ LoadHeapObject(r0, value);
1231 __ Ret(); 1231 __ Ret();
1232 } 1232 }
1233 1233
1234 1234
1235 void StubCompiler::GenerateDictionaryLoadCallback(Register receiver, 1235 void StubCompiler::GenerateDictionaryLoadCallback(Register receiver,
1236 Register name_reg, 1236 Register name_reg,
1237 Register scratch1, 1237 Register scratch1,
1238 Register scratch2, 1238 Register scratch2,
1239 Register scratch3, 1239 Register scratch3,
1240 Handle<AccessorInfo> callback, 1240 Handle<AccessorInfo> callback,
1241 Handle<String> name, 1241 Handle<String> name,
1242 Label* miss) { 1242 Label* miss) {
1243 ASSERT(!receiver.is(scratch1)); 1243 ASSERT(!receiver.is(scratch1));
1244 ASSERT(!receiver.is(scratch2)); 1244 ASSERT(!receiver.is(scratch2));
1245 ASSERT(!receiver.is(scratch3)); 1245 ASSERT(!receiver.is(scratch3));
1246 1246
1247 // Load the properties dictionary. 1247 // Load the properties dictionary.
1248 Register dictionary = scratch1; 1248 Register dictionary = scratch1;
1249 __ ldr(dictionary, FieldMemOperand(receiver, JSObject::kPropertiesOffset)); 1249 __ ldr(dictionary, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
1250 1250
1251 // Probe the dictionary. 1251 // Probe the dictionary.
1252 Label probe_done; 1252 Label probe_done;
1253 StringDictionaryLookupStub::GeneratePositiveLookup(masm(), 1253 StringDictionaryLookupStub::GeneratePositiveLookup(masm(),
1254 miss, 1254 miss,
1255 &probe_done, 1255 &probe_done,
1256 dictionary, 1256 dictionary,
1257 name_reg, 1257 name_reg,
1258 scratch2, 1258 scratch2,
1259 scratch3); 1259 scratch3);
1260 __ bind(&probe_done); 1260 __ bind(&probe_done);
1261 1261
1262 // If probing finds an entry in the dictionary, scratch3 contains the 1262 // If probing finds an entry in the dictionary, scratch3 contains the
1263 // pointer into the dictionary. Check that the value is the callback. 1263 // pointer into the dictionary. Check that the value is the callback.
1264 Register pointer = scratch3; 1264 Register pointer = scratch3;
1265 const int kElementsStartOffset = StringDictionary::kHeaderSize + 1265 const int kElementsStartOffset = StringDictionary::kHeaderSize +
1266 StringDictionary::kElementsStartIndex * kPointerSize; 1266 StringDictionary::kElementsStartIndex * kPointerSize;
1267 const int kValueOffset = kElementsStartOffset + kPointerSize; 1267 const int kValueOffset = kElementsStartOffset + kPointerSize;
1268 __ ldr(scratch2, FieldMemOperand(pointer, kValueOffset)); 1268 __ ldr(scratch2, FieldMemOperand(pointer, kValueOffset));
1269 __ cmp(scratch2, Operand(callback)); 1269 __ cmp(scratch2, Operand(callback));
1270 __ b(ne, miss); 1270 __ b(ne, miss);
1271 } 1271 }
1272 1272
1273 1273
1274 void StubCompiler::GenerateLoadCallback(Handle<JSObject> object, 1274 void StubCompiler::GenerateLoadCallback(Handle<JSObject> object,
1275 Handle<JSObject> holder, 1275 Handle<JSObject> holder,
1276 Register receiver, 1276 Register receiver,
1277 Register name_reg, 1277 Register name_reg,
1278 Register scratch1, 1278 Register scratch1,
1279 Register scratch2, 1279 Register scratch2,
1280 Register scratch3, 1280 Register scratch3,
1281 Register scratch4, 1281 Register scratch4,
1282 Handle<AccessorInfo> callback, 1282 Handle<AccessorInfo> callback,
1283 Handle<String> name, 1283 Handle<String> name,
1284 Label* miss) { 1284 Label* miss) {
1285 // Check that the receiver isn't a smi. 1285 // Check that the receiver isn't a smi.
1286 __ JumpIfSmi(receiver, miss); 1286 __ JumpIfSmi(receiver, miss);
1287 1287
1288 // Check that the maps haven't changed. 1288 // Check that the maps haven't changed.
1289 Register reg = CheckPrototypes(object, receiver, holder, scratch1, 1289 Register reg = CheckPrototypes(object, receiver, holder, scratch1,
1290 scratch2, scratch3, name, miss); 1290 scratch2, scratch3, name, miss);
1291 1291
1292 if (!holder->HasFastProperties() && !holder->IsJSGlobalObject()) { 1292 if (!holder->HasFastProperties() && !holder->IsJSGlobalObject()) {
1293 GenerateDictionaryLoadCallback( 1293 GenerateDictionaryLoadCallback(
1294 reg, name_reg, scratch2, scratch3, scratch4, callback, name, miss); 1294 reg, name_reg, scratch2, scratch3, scratch4, callback, name, miss);
1295 } 1295 }
1296 1296
1297 // Build AccessorInfo::args_ list on the stack and push property name below 1297 // Build AccessorInfo::args_ list on the stack and push property name below
1298 // the exit frame to make GC aware of them and store pointers to them. 1298 // the exit frame to make GC aware of them and store pointers to them.
1299 __ push(receiver); 1299 __ push(receiver);
1300 __ mov(scratch2, sp); // scratch2 = AccessorInfo::args_ 1300 __ mov(scratch2, sp); // scratch2 = AccessorInfo::args_
1301 if (heap()->InNewSpace(callback->data())) { 1301 if (heap()->InNewSpace(callback->data())) {
1302 __ Move(scratch3, callback); 1302 __ Move(scratch3, callback);
1303 __ ldr(scratch3, FieldMemOperand(scratch3, AccessorInfo::kDataOffset)); 1303 __ ldr(scratch3, FieldMemOperand(scratch3, AccessorInfo::kDataOffset));
1304 } else { 1304 } else {
1305 __ Move(scratch3, Handle<Object>(callback->data())); 1305 __ Move(scratch3, Handle<Object>(callback->data()));
1306 } 1306 }
1307 __ Push(reg, scratch3); 1307 __ Push(reg, scratch3);
1308 __ mov(scratch3, Operand(ExternalReference::isolate_address())); 1308 __ mov(scratch3, Operand(ExternalReference::isolate_address()));
1309 __ Push(scratch3, name_reg); 1309 __ Push(scratch3, name_reg);
1310 __ mov(r0, sp); // r0 = Handle<String> 1310 __ mov(r0, sp); // r0 = Handle<String>
1311 1311
1312 const int kApiStackSpace = 1; 1312 const int kApiStackSpace = 1;
1313 FrameScope frame_scope(masm(), StackFrame::MANUAL); 1313 FrameScope frame_scope(masm(), StackFrame::MANUAL);
1314 __ EnterExitFrame(false, kApiStackSpace); 1314 __ EnterExitFrame(false, kApiStackSpace);
1315 1315
1316 // Create AccessorInfo instance on the stack above the exit frame with 1316 // Create AccessorInfo instance on the stack above the exit frame with
1317 // scratch2 (internal::Object** args_) as the data. 1317 // scratch2 (internal::Object** args_) as the data.
1318 __ str(scratch2, MemOperand(sp, 1 * kPointerSize)); 1318 __ str(scratch2, MemOperand(sp, 1 * kPointerSize));
1319 __ add(r1, sp, Operand(1 * kPointerSize)); // r1 = AccessorInfo& 1319 __ add(r1, sp, Operand(1 * kPointerSize)); // r1 = AccessorInfo&
1320 1320
1321 const int kStackUnwindSpace = 5; 1321 const int kStackUnwindSpace = 5;
1322 Address getter_address = v8::ToCData<Address>(callback->getter()); 1322 Address getter_address = v8::ToCData<Address>(callback->getter());
1323 ApiFunction fun(getter_address); 1323 ApiFunction fun(getter_address);
1324 ExternalReference ref = 1324 ExternalReference ref =
1325 ExternalReference(&fun, 1325 ExternalReference(&fun,
1326 ExternalReference::DIRECT_GETTER_CALL, 1326 ExternalReference::DIRECT_GETTER_CALL,
1327 masm()->isolate()); 1327 masm()->isolate());
1328 __ CallApiFunctionAndReturn(ref, kStackUnwindSpace); 1328 __ CallApiFunctionAndReturn(ref, kStackUnwindSpace);
1329 } 1329 }
1330 1330
1331 1331
1332 void StubCompiler::GenerateLoadInterceptor(Handle<JSObject> object, 1332 void StubCompiler::GenerateLoadInterceptor(Handle<JSObject> object,
1333 Handle<JSObject> interceptor_holder, 1333 Handle<JSObject> interceptor_holder,
1334 LookupResult* lookup, 1334 LookupResult* lookup,
1335 Register receiver, 1335 Register receiver,
1336 Register name_reg, 1336 Register name_reg,
1337 Register scratch1, 1337 Register scratch1,
1338 Register scratch2, 1338 Register scratch2,
1339 Register scratch3, 1339 Register scratch3,
1340 Handle<String> name, 1340 Handle<String> name,
1341 Label* miss) { 1341 Label* miss) {
1342 ASSERT(interceptor_holder->HasNamedInterceptor()); 1342 ASSERT(interceptor_holder->HasNamedInterceptor());
1343 ASSERT(!interceptor_holder->GetNamedInterceptor()->getter()->IsUndefined()); 1343 ASSERT(!interceptor_holder->GetNamedInterceptor()->getter()->IsUndefined());
1344 1344
1345 // Check that the receiver isn't a smi. 1345 // Check that the receiver isn't a smi.
1346 __ JumpIfSmi(receiver, miss); 1346 __ JumpIfSmi(receiver, miss);
1347 1347
1348 // So far the most popular follow ups for interceptor loads are FIELD 1348 // So far the most popular follow ups for interceptor loads are FIELD
1349 // and CALLBACKS, so inline only them, other cases may be added 1349 // and CALLBACKS, so inline only them, other cases may be added
1350 // later. 1350 // later.
1351 bool compile_followup_inline = false; 1351 bool compile_followup_inline = false;
1352 if (lookup->IsFound() && lookup->IsCacheable()) { 1352 if (lookup->IsFound() && lookup->IsCacheable()) {
1353 if (lookup->IsField()) { 1353 if (lookup->IsField()) {
1354 compile_followup_inline = true; 1354 compile_followup_inline = true;
1355 } else if (lookup->type() == CALLBACKS && 1355 } else if (lookup->type() == CALLBACKS &&
1356 lookup->GetCallbackObject()->IsAccessorInfo()) { 1356 lookup->GetCallbackObject()->IsAccessorInfo()) {
1357 AccessorInfo* callback = AccessorInfo::cast(lookup->GetCallbackObject()); 1357 AccessorInfo* callback = AccessorInfo::cast(lookup->GetCallbackObject());
1358 compile_followup_inline = callback->getter() != NULL && 1358 compile_followup_inline = callback->getter() != NULL &&
1359 callback->IsCompatibleReceiver(*object); 1359 callback->IsCompatibleReceiver(*object);
1360 } 1360 }
1361 } 1361 }
1362 1362
1363 if (compile_followup_inline) { 1363 if (compile_followup_inline) {
1364 // Compile the interceptor call, followed by inline code to load the 1364 // Compile the interceptor call, followed by inline code to load the
1365 // property from further up the prototype chain if the call fails. 1365 // property from further up the prototype chain if the call fails.
1366 // Check that the maps haven't changed. 1366 // Check that the maps haven't changed.
1367 Register holder_reg = CheckPrototypes(object, receiver, interceptor_holder, 1367 Register holder_reg = CheckPrototypes(object, receiver, interceptor_holder,
1368 scratch1, scratch2, scratch3, 1368 scratch1, scratch2, scratch3,
1369 name, miss); 1369 name, miss);
1370 ASSERT(holder_reg.is(receiver) || holder_reg.is(scratch1)); 1370 ASSERT(holder_reg.is(receiver) || holder_reg.is(scratch1));
1371 1371
1372 // Preserve the receiver register explicitly whenever it is different from 1372 // Preserve the receiver register explicitly whenever it is different from
1373 // the holder and it is needed should the interceptor return without any 1373 // the holder and it is needed should the interceptor return without any
1374 // result. The CALLBACKS case needs the receiver to be passed into C++ code, 1374 // result. The CALLBACKS case needs the receiver to be passed into C++ code,
1375 // the FIELD case might cause a miss during the prototype check. 1375 // the FIELD case might cause a miss during the prototype check.
1376 bool must_perfrom_prototype_check = *interceptor_holder != lookup->holder(); 1376 bool must_perfrom_prototype_check = *interceptor_holder != lookup->holder();
1377 bool must_preserve_receiver_reg = !receiver.is(holder_reg) && 1377 bool must_preserve_receiver_reg = !receiver.is(holder_reg) &&
1378 (lookup->type() == CALLBACKS || must_perfrom_prototype_check); 1378 (lookup->type() == CALLBACKS || must_perfrom_prototype_check);
1379 1379
1380 // Save necessary data before invoking an interceptor. 1380 // Save necessary data before invoking an interceptor.
1381 // Requires a frame to make GC aware of pushed pointers. 1381 // Requires a frame to make GC aware of pushed pointers.
1382 { 1382 {
1383 FrameScope frame_scope(masm(), StackFrame::INTERNAL); 1383 FrameScope frame_scope(masm(), StackFrame::INTERNAL);
1384 if (must_preserve_receiver_reg) { 1384 if (must_preserve_receiver_reg) {
1385 __ Push(receiver, holder_reg, name_reg); 1385 __ Push(receiver, holder_reg, name_reg);
1386 } else { 1386 } else {
1387 __ Push(holder_reg, name_reg); 1387 __ Push(holder_reg, name_reg);
1388 } 1388 }
1389 // Invoke an interceptor. Note: map checks from receiver to 1389 // Invoke an interceptor. Note: map checks from receiver to
1390 // interceptor's holder has been compiled before (see a caller 1390 // interceptor's holder has been compiled before (see a caller
1391 // of this method.) 1391 // of this method.)
1392 CompileCallLoadPropertyWithInterceptor(masm(), 1392 CompileCallLoadPropertyWithInterceptor(masm(),
1393 receiver, 1393 receiver,
1394 holder_reg, 1394 holder_reg,
1395 name_reg, 1395 name_reg,
1396 interceptor_holder); 1396 interceptor_holder);
1397 // Check if interceptor provided a value for property. If it's 1397 // Check if interceptor provided a value for property. If it's
1398 // the case, return immediately. 1398 // the case, return immediately.
1399 Label interceptor_failed; 1399 Label interceptor_failed;
1400 __ LoadRoot(scratch1, Heap::kNoInterceptorResultSentinelRootIndex); 1400 __ LoadRoot(scratch1, Heap::kNoInterceptorResultSentinelRootIndex);
1401 __ cmp(r0, scratch1); 1401 __ cmp(r0, scratch1);
1402 __ b(eq, &interceptor_failed); 1402 __ b(eq, &interceptor_failed);
1403 frame_scope.GenerateLeaveFrame(); 1403 frame_scope.GenerateLeaveFrame();
1404 __ Ret(); 1404 __ Ret();
1405 1405
1406 __ bind(&interceptor_failed); 1406 __ bind(&interceptor_failed);
1407 __ pop(name_reg); 1407 __ pop(name_reg);
1408 __ pop(holder_reg); 1408 __ pop(holder_reg);
1409 if (must_preserve_receiver_reg) { 1409 if (must_preserve_receiver_reg) {
1410 __ pop(receiver); 1410 __ pop(receiver);
1411 } 1411 }
1412 // Leave the internal frame. 1412 // Leave the internal frame.
1413 } 1413 }
1414 // Check that the maps from interceptor's holder to lookup's holder 1414 // Check that the maps from interceptor's holder to lookup's holder
1415 // haven't changed. And load lookup's holder into |holder| register. 1415 // haven't changed. And load lookup's holder into |holder| register.
1416 if (must_perfrom_prototype_check) { 1416 if (must_perfrom_prototype_check) {
1417 holder_reg = CheckPrototypes(interceptor_holder, 1417 holder_reg = CheckPrototypes(interceptor_holder,
1418 holder_reg, 1418 holder_reg,
1419 Handle<JSObject>(lookup->holder()), 1419 Handle<JSObject>(lookup->holder()),
1420 scratch1, 1420 scratch1,
1421 scratch2, 1421 scratch2,
1422 scratch3, 1422 scratch3,
1423 name, 1423 name,
1424 miss); 1424 miss);
1425 } 1425 }
1426 1426
1427 if (lookup->IsField()) { 1427 if (lookup->IsField()) {
1428 // We found FIELD property in prototype chain of interceptor's holder. 1428 // We found FIELD property in prototype chain of interceptor's holder.
1429 // Retrieve a field from field's holder. 1429 // Retrieve a field from field's holder.
1430 GenerateFastPropertyLoad(masm(), r0, holder_reg, 1430 GenerateFastPropertyLoad(masm(), r0, holder_reg,
1431 Handle<JSObject>(lookup->holder()), 1431 Handle<JSObject>(lookup->holder()),
1432 lookup->GetFieldIndex()); 1432 lookup->GetFieldIndex());
1433 __ Ret(); 1433 __ Ret();
1434 } else { 1434 } else {
1435 // We found CALLBACKS property in prototype chain of interceptor's 1435 // We found CALLBACKS property in prototype chain of interceptor's
1436 // holder. 1436 // holder.
1437 ASSERT(lookup->type() == CALLBACKS); 1437 ASSERT(lookup->type() == CALLBACKS);
1438 Handle<AccessorInfo> callback( 1438 Handle<AccessorInfo> callback(
1439 AccessorInfo::cast(lookup->GetCallbackObject())); 1439 AccessorInfo::cast(lookup->GetCallbackObject()));
1440 ASSERT(callback->getter() != NULL); 1440 ASSERT(callback->getter() != NULL);
1441 1441
1442 // Tail call to runtime. 1442 // Tail call to runtime.
1443 // Important invariant in CALLBACKS case: the code above must be 1443 // Important invariant in CALLBACKS case: the code above must be
1444 // structured to never clobber |receiver| register. 1444 // structured to never clobber |receiver| register.
1445 __ Move(scratch2, callback); 1445 __ Move(scratch2, callback);
1446 // holder_reg is either receiver or scratch1. 1446 // holder_reg is either receiver or scratch1.
1447 if (!receiver.is(holder_reg)) { 1447 if (!receiver.is(holder_reg)) {
1448 ASSERT(scratch1.is(holder_reg)); 1448 ASSERT(scratch1.is(holder_reg));
1449 __ Push(receiver, holder_reg); 1449 __ Push(receiver, holder_reg);
1450 } else { 1450 } else {
1451 __ push(receiver); 1451 __ push(receiver);
1452 __ push(holder_reg); 1452 __ push(holder_reg);
1453 } 1453 }
1454 __ ldr(scratch3, 1454 __ ldr(scratch3,
1455 FieldMemOperand(scratch2, AccessorInfo::kDataOffset)); 1455 FieldMemOperand(scratch2, AccessorInfo::kDataOffset));
1456 __ mov(scratch1, Operand(ExternalReference::isolate_address())); 1456 __ mov(scratch1, Operand(ExternalReference::isolate_address()));
1457 __ Push(scratch3, scratch1, scratch2, name_reg); 1457 __ Push(scratch3, scratch1, scratch2, name_reg);
1458 1458
1459 ExternalReference ref = 1459 ExternalReference ref =
1460 ExternalReference(IC_Utility(IC::kLoadCallbackProperty), 1460 ExternalReference(IC_Utility(IC::kLoadCallbackProperty),
1461 masm()->isolate()); 1461 masm()->isolate());
1462 __ TailCallExternalReference(ref, 6, 1); 1462 __ TailCallExternalReference(ref, 6, 1);
1463 } 1463 }
1464 } else { // !compile_followup_inline 1464 } else { // !compile_followup_inline
1465 // Call the runtime system to load the interceptor. 1465 // Call the runtime system to load the interceptor.
1466 // Check that the maps haven't changed. 1466 // Check that the maps haven't changed.
1467 Register holder_reg = CheckPrototypes(object, receiver, interceptor_holder, 1467 Register holder_reg = CheckPrototypes(object, receiver, interceptor_holder,
1468 scratch1, scratch2, scratch3, 1468 scratch1, scratch2, scratch3,
1469 name, miss); 1469 name, miss);
1470 PushInterceptorArguments(masm(), receiver, holder_reg, 1470 PushInterceptorArguments(masm(), receiver, holder_reg,
1471 name_reg, interceptor_holder); 1471 name_reg, interceptor_holder);
1472 1472
1473 ExternalReference ref = 1473 ExternalReference ref =
1474 ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorForLoad), 1474 ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorForLoad),
1475 masm()->isolate()); 1475 masm()->isolate());
1476 __ TailCallExternalReference(ref, 6, 1); 1476 __ TailCallExternalReference(ref, 6, 1);
1477 } 1477 }
1478 } 1478 }
1479 1479
1480 1480
1481 void CallStubCompiler::GenerateNameCheck(Handle<String> name, Label* miss) { 1481 void CallStubCompiler::GenerateNameCheck(Handle<String> name, Label* miss) {
1482 if (kind_ == Code::KEYED_CALL_IC) { 1482 if (kind_ == Code::KEYED_CALL_IC) {
1483 __ cmp(r2, Operand(name)); 1483 __ cmp(r2, Operand(name));
1484 __ b(ne, miss); 1484 __ b(ne, miss);
1485 } 1485 }
1486 } 1486 }
1487 1487
1488 1488
1489 void CallStubCompiler::GenerateGlobalReceiverCheck(Handle<JSObject> object, 1489 void CallStubCompiler::GenerateGlobalReceiverCheck(Handle<JSObject> object,
1490 Handle<JSObject> holder, 1490 Handle<JSObject> holder,
1491 Handle<String> name, 1491 Handle<String> name,
1492 Label* miss) { 1492 Label* miss) {
1493 ASSERT(holder->IsGlobalObject()); 1493 ASSERT(holder->IsGlobalObject());
1494 1494
1495 // Get the number of arguments. 1495 // Get the number of arguments.
1496 const int argc = arguments().immediate(); 1496 const int argc = arguments().immediate();
1497 1497
1498 // Get the receiver from the stack. 1498 // Get the receiver from the stack.
1499 __ ldr(r0, MemOperand(sp, argc * kPointerSize)); 1499 __ ldr(r0, MemOperand(sp, argc * kPointerSize));
1500 1500
1501 // Check that the maps haven't changed. 1501 // Check that the maps haven't changed.
1502 __ JumpIfSmi(r0, miss); 1502 __ JumpIfSmi(r0, miss);
1503 CheckPrototypes(object, r0, holder, r3, r1, r4, name, miss); 1503 CheckPrototypes(object, r0, holder, r3, r1, r4, name, miss);
1504 } 1504 }
1505 1505
1506 1506
1507 void CallStubCompiler::GenerateLoadFunctionFromCell( 1507 void CallStubCompiler::GenerateLoadFunctionFromCell(
1508 Handle<JSGlobalPropertyCell> cell, 1508 Handle<JSGlobalPropertyCell> cell,
1509 Handle<JSFunction> function, 1509 Handle<JSFunction> function,
1510 Label* miss) { 1510 Label* miss) {
1511 // Get the value from the cell. 1511 // Get the value from the cell.
1512 __ mov(r3, Operand(cell)); 1512 __ mov(r3, Operand(cell));
1513 __ ldr(r1, FieldMemOperand(r3, JSGlobalPropertyCell::kValueOffset)); 1513 __ ldr(r1, FieldMemOperand(r3, JSGlobalPropertyCell::kValueOffset));
1514 1514
1515 // Check that the cell contains the same function. 1515 // Check that the cell contains the same function.
1516 if (heap()->InNewSpace(*function)) { 1516 if (heap()->InNewSpace(*function)) {
1517 // We can't embed a pointer to a function in new space so we have 1517 // We can't embed a pointer to a function in new space so we have
1518 // to verify that the shared function info is unchanged. This has 1518 // to verify that the shared function info is unchanged. This has
1519 // the nice side effect that multiple closures based on the same 1519 // the nice side effect that multiple closures based on the same
1520 // function can all use this call IC. Before we load through the 1520 // function can all use this call IC. Before we load through the
1521 // function, we have to verify that it still is a function. 1521 // function, we have to verify that it still is a function.
1522 __ JumpIfSmi(r1, miss); 1522 __ JumpIfSmi(r1, miss);
1523 __ CompareObjectType(r1, r3, r3, JS_FUNCTION_TYPE); 1523 __ CompareObjectType(r1, r3, r3, JS_FUNCTION_TYPE);
1524 __ b(ne, miss); 1524 __ b(ne, miss);
1525 1525
1526 // Check the shared function info. Make sure it hasn't changed. 1526 // Check the shared function info. Make sure it hasn't changed.
1527 __ Move(r3, Handle<SharedFunctionInfo>(function->shared())); 1527 __ Move(r3, Handle<SharedFunctionInfo>(function->shared()));
1528 __ ldr(r4, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset)); 1528 __ ldr(r4, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
1529 __ cmp(r4, r3); 1529 __ cmp(r4, r3);
1530 } else { 1530 } else {
1531 __ cmp(r1, Operand(function)); 1531 __ cmp(r1, Operand(function));
1532 } 1532 }
1533 __ b(ne, miss); 1533 __ b(ne, miss);
1534 } 1534 }
1535 1535
1536 1536
1537 void CallStubCompiler::GenerateMissBranch() { 1537 void CallStubCompiler::GenerateMissBranch() {
1538 Handle<Code> code = 1538 Handle<Code> code =
1539 isolate()->stub_cache()->ComputeCallMiss(arguments().immediate(), 1539 isolate()->stub_cache()->ComputeCallMiss(arguments().immediate(),
1540 kind_, 1540 kind_,
1541 extra_state_); 1541 extra_state_);
1542 __ Jump(code, RelocInfo::CODE_TARGET); 1542 __ Jump(code, RelocInfo::CODE_TARGET);
1543 } 1543 }
1544 1544
1545 1545
1546 Handle<Code> CallStubCompiler::CompileCallField(Handle<JSObject> object, 1546 Handle<Code> CallStubCompiler::CompileCallField(Handle<JSObject> object,
1547 Handle<JSObject> holder, 1547 Handle<JSObject> holder,
1548 int index, 1548 int index,
1549 Handle<String> name) { 1549 Handle<String> name) {
1550 // ----------- S t a t e ------------- 1550 // ----------- S t a t e -------------
1551 // -- r2 : name 1551 // -- r2 : name
1552 // -- lr : return address 1552 // -- lr : return address
1553 // ----------------------------------- 1553 // -----------------------------------
1554 Label miss; 1554 Label miss;
1555 1555
1556 GenerateNameCheck(name, &miss); 1556 GenerateNameCheck(name, &miss);
1557 1557
1558 const int argc = arguments().immediate(); 1558 const int argc = arguments().immediate();
1559 1559
1560 // Get the receiver of the function from the stack into r0. 1560 // Get the receiver of the function from the stack into r0.
1561 __ ldr(r0, MemOperand(sp, argc * kPointerSize)); 1561 __ ldr(r0, MemOperand(sp, argc * kPointerSize));
1562 // Check that the receiver isn't a smi. 1562 // Check that the receiver isn't a smi.
1563 __ JumpIfSmi(r0, &miss); 1563 __ JumpIfSmi(r0, &miss);
1564 1564
1565 // Do the right check and compute the holder register. 1565 // Do the right check and compute the holder register.
1566 Register reg = CheckPrototypes(object, r0, holder, r1, r3, r4, name, &miss); 1566 Register reg = CheckPrototypes(object, r0, holder, r1, r3, r4, name, &miss);
1567 GenerateFastPropertyLoad(masm(), r1, reg, holder, index); 1567 GenerateFastPropertyLoad(masm(), r1, reg, holder, index);
1568 1568
1569 GenerateCallFunction(masm(), object, arguments(), &miss, extra_state_); 1569 GenerateCallFunction(masm(), object, arguments(), &miss, extra_state_);
1570 1570
1571 // Handle call cache miss. 1571 // Handle call cache miss.
1572 __ bind(&miss); 1572 __ bind(&miss);
1573 GenerateMissBranch(); 1573 GenerateMissBranch();
1574 1574
1575 // Return the generated code. 1575 // Return the generated code.
1576 return GetCode(Code::FIELD, name); 1576 return GetCode(Code::FIELD, name);
1577 } 1577 }
1578 1578
1579 1579
1580 Handle<Code> CallStubCompiler::CompileArrayPushCall( 1580 Handle<Code> CallStubCompiler::CompileArrayPushCall(
1581 Handle<Object> object, 1581 Handle<Object> object,
1582 Handle<JSObject> holder, 1582 Handle<JSObject> holder,
1583 Handle<JSGlobalPropertyCell> cell, 1583 Handle<JSGlobalPropertyCell> cell,
1584 Handle<JSFunction> function, 1584 Handle<JSFunction> function,
1585 Handle<String> name) { 1585 Handle<String> name) {
1586 // ----------- S t a t e ------------- 1586 // ----------- S t a t e -------------
1587 // -- r2 : name 1587 // -- r2 : name
1588 // -- lr : return address 1588 // -- lr : return address
1589 // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based) 1589 // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
1590 // -- ... 1590 // -- ...
1591 // -- sp[argc * 4] : receiver 1591 // -- sp[argc * 4] : receiver
1592 // ----------------------------------- 1592 // -----------------------------------
1593 1593
1594 // If object is not an array, bail out to regular call. 1594 // If object is not an array, bail out to regular call.
1595 if (!object->IsJSArray() || !cell.is_null()) return Handle<Code>::null(); 1595 if (!object->IsJSArray() || !cell.is_null()) return Handle<Code>::null();
1596 1596
1597 Label miss; 1597 Label miss;
1598 GenerateNameCheck(name, &miss); 1598 GenerateNameCheck(name, &miss);
1599 1599
1600 Register receiver = r1; 1600 Register receiver = r1;
1601 // Get the receiver from the stack 1601 // Get the receiver from the stack
1602 const int argc = arguments().immediate(); 1602 const int argc = arguments().immediate();
1603 __ ldr(receiver, MemOperand(sp, argc * kPointerSize)); 1603 __ ldr(receiver, MemOperand(sp, argc * kPointerSize));
1604 1604
1605 // Check that the receiver isn't a smi. 1605 // Check that the receiver isn't a smi.
1606 __ JumpIfSmi(receiver, &miss); 1606 __ JumpIfSmi(receiver, &miss);
1607 1607
1608 // Check that the maps haven't changed. 1608 // Check that the maps haven't changed.
1609 CheckPrototypes(Handle<JSObject>::cast(object), receiver, holder, r3, r0, r4, 1609 CheckPrototypes(Handle<JSObject>::cast(object), receiver, holder, r3, r0, r4,
1610 name, &miss); 1610 name, &miss);
1611 1611
1612 if (argc == 0) { 1612 if (argc == 0) {
1613 // Nothing to do, just return the length. 1613 // Nothing to do, just return the length.
1614 __ ldr(r0, FieldMemOperand(receiver, JSArray::kLengthOffset)); 1614 __ ldr(r0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1615 __ Drop(argc + 1); 1615 __ Drop(argc + 1);
1616 __ Ret(); 1616 __ Ret();
1617 } else { 1617 } else {
1618 Label call_builtin; 1618 Label call_builtin;
1619 1619
1620 if (argc == 1) { // Otherwise fall through to call the builtin. 1620 if (argc == 1) { // Otherwise fall through to call the builtin.
1621 Label attempt_to_grow_elements; 1621 Label attempt_to_grow_elements;
1622 1622
1623 Register elements = r6; 1623 Register elements = r6;
1624 Register end_elements = r5; 1624 Register end_elements = r5;
1625 // Get the elements array of the object. 1625 // Get the elements array of the object.
1626 __ ldr(elements, FieldMemOperand(receiver, JSArray::kElementsOffset)); 1626 __ ldr(elements, FieldMemOperand(receiver, JSArray::kElementsOffset));
1627 1627
1628 // Check that the elements are in fast mode and writable. 1628 // Check that the elements are in fast mode and writable.
1629 __ CheckMap(elements, 1629 __ CheckMap(elements,
1630 r0, 1630 r0,
1631 Heap::kFixedArrayMapRootIndex, 1631 Heap::kFixedArrayMapRootIndex,
1632 &call_builtin, 1632 &call_builtin,
1633 DONT_DO_SMI_CHECK); 1633 DONT_DO_SMI_CHECK);
1634 1634
1635 1635
1636 // Get the array's length into r0 and calculate new length. 1636 // Get the array's length into r0 and calculate new length.
1637 __ ldr(r0, FieldMemOperand(receiver, JSArray::kLengthOffset)); 1637 __ ldr(r0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1638 STATIC_ASSERT(kSmiTagSize == 1); 1638 STATIC_ASSERT(kSmiTagSize == 1);
1639 STATIC_ASSERT(kSmiTag == 0); 1639 STATIC_ASSERT(kSmiTag == 0);
1640 __ add(r0, r0, Operand(Smi::FromInt(argc))); 1640 __ add(r0, r0, Operand(Smi::FromInt(argc)));
1641 1641
1642 // Get the elements' length. 1642 // Get the elements' length.
1643 __ ldr(r4, FieldMemOperand(elements, FixedArray::kLengthOffset)); 1643 __ ldr(r4, FieldMemOperand(elements, FixedArray::kLengthOffset));
1644 1644
1645 // Check if we could survive without allocation. 1645 // Check if we could survive without allocation.
1646 __ cmp(r0, r4); 1646 __ cmp(r0, r4);
1647 __ b(gt, &attempt_to_grow_elements); 1647 __ b(gt, &attempt_to_grow_elements);
1648 1648
1649 // Check if value is a smi. 1649 // Check if value is a smi.
1650 Label with_write_barrier; 1650 Label with_write_barrier;
1651 __ ldr(r4, MemOperand(sp, (argc - 1) * kPointerSize)); 1651 __ ldr(r4, MemOperand(sp, (argc - 1) * kPointerSize));
1652 __ JumpIfNotSmi(r4, &with_write_barrier); 1652 __ JumpIfNotSmi(r4, &with_write_barrier);
1653 1653
1654 // Save new length. 1654 // Save new length.
1655 __ str(r0, FieldMemOperand(receiver, JSArray::kLengthOffset)); 1655 __ str(r0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1656 1656
1657 // Store the value. 1657 // Store the value.
1658 // We may need a register containing the address end_elements below, 1658 // We may need a register containing the address end_elements below,
1659 // so write back the value in end_elements. 1659 // so write back the value in end_elements.
1660 __ add(end_elements, elements, 1660 __ add(end_elements, elements,
1661 Operand(r0, LSL, kPointerSizeLog2 - kSmiTagSize)); 1661 Operand(r0, LSL, kPointerSizeLog2 - kSmiTagSize));
1662 const int kEndElementsOffset = 1662 const int kEndElementsOffset =
1663 FixedArray::kHeaderSize - kHeapObjectTag - argc * kPointerSize; 1663 FixedArray::kHeaderSize - kHeapObjectTag - argc * kPointerSize;
1664 __ str(r4, MemOperand(end_elements, kEndElementsOffset, PreIndex)); 1664 __ str(r4, MemOperand(end_elements, kEndElementsOffset, PreIndex));
1665 1665
1666 // Check for a smi. 1666 // Check for a smi.
1667 __ Drop(argc + 1); 1667 __ Drop(argc + 1);
1668 __ Ret(); 1668 __ Ret();
1669 1669
1670 __ bind(&with_write_barrier); 1670 __ bind(&with_write_barrier);
1671 1671
1672 __ ldr(r3, FieldMemOperand(receiver, HeapObject::kMapOffset)); 1672 __ ldr(r3, FieldMemOperand(receiver, HeapObject::kMapOffset));
1673 1673
1674 if (FLAG_smi_only_arrays && !FLAG_trace_elements_transitions) { 1674 if (FLAG_smi_only_arrays && !FLAG_trace_elements_transitions) {
1675 Label fast_object, not_fast_object; 1675 Label fast_object, not_fast_object;
1676 __ CheckFastObjectElements(r3, r7, &not_fast_object); 1676 __ CheckFastObjectElements(r3, r7, &not_fast_object);
1677 __ jmp(&fast_object); 1677 __ jmp(&fast_object);
1678 // In case of fast smi-only, convert to fast object, otherwise bail out. 1678 // In case of fast smi-only, convert to fast object, otherwise bail out.
1679 __ bind(&not_fast_object); 1679 __ bind(&not_fast_object);
1680 __ CheckFastSmiElements(r3, r7, &call_builtin); 1680 __ CheckFastSmiElements(r3, r7, &call_builtin);
1681 // edx: receiver 1681 // edx: receiver
1682 // r3: map 1682 // r3: map
1683 Label try_holey_map; 1683 Label try_holey_map;
1684 __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS, 1684 __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
1685 FAST_ELEMENTS, 1685 FAST_ELEMENTS,
1686 r3, 1686 r3,
1687 r7, 1687 r7,
1688 &try_holey_map); 1688 &try_holey_map);
1689 __ mov(r2, receiver); 1689 __ mov(r2, receiver);
1690 ElementsTransitionGenerator:: 1690 ElementsTransitionGenerator::
1691 GenerateMapChangeElementsTransition(masm()); 1691 GenerateMapChangeElementsTransition(masm());
1692 __ jmp(&fast_object); 1692 __ jmp(&fast_object);
1693 1693
1694 __ bind(&try_holey_map); 1694 __ bind(&try_holey_map);
1695 __ LoadTransitionedArrayMapConditional(FAST_HOLEY_SMI_ELEMENTS, 1695 __ LoadTransitionedArrayMapConditional(FAST_HOLEY_SMI_ELEMENTS,
1696 FAST_HOLEY_ELEMENTS, 1696 FAST_HOLEY_ELEMENTS,
1697 r3, 1697 r3,
1698 r7, 1698 r7,
1699 &call_builtin); 1699 &call_builtin);
1700 __ mov(r2, receiver); 1700 __ mov(r2, receiver);
1701 ElementsTransitionGenerator:: 1701 ElementsTransitionGenerator::
1702 GenerateMapChangeElementsTransition(masm()); 1702 GenerateMapChangeElementsTransition(masm());
1703 __ bind(&fast_object); 1703 __ bind(&fast_object);
1704 } else { 1704 } else {
1705 __ CheckFastObjectElements(r3, r3, &call_builtin); 1705 __ CheckFastObjectElements(r3, r3, &call_builtin);
1706 } 1706 }
1707 1707
1708 // Save new length. 1708 // Save new length.
1709 __ str(r0, FieldMemOperand(receiver, JSArray::kLengthOffset)); 1709 __ str(r0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1710 1710
1711 // Store the value. 1711 // Store the value.
1712 // We may need a register containing the address end_elements below, 1712 // We may need a register containing the address end_elements below,
1713 // so write back the value in end_elements. 1713 // so write back the value in end_elements.
1714 __ add(end_elements, elements, 1714 __ add(end_elements, elements,
1715 Operand(r0, LSL, kPointerSizeLog2 - kSmiTagSize)); 1715 Operand(r0, LSL, kPointerSizeLog2 - kSmiTagSize));
1716 __ str(r4, MemOperand(end_elements, kEndElementsOffset, PreIndex)); 1716 __ str(r4, MemOperand(end_elements, kEndElementsOffset, PreIndex));
1717 1717
1718 __ RecordWrite(elements, 1718 __ RecordWrite(elements,
1719 end_elements, 1719 end_elements,
1720 r4, 1720 r4,
1721 kLRHasNotBeenSaved, 1721 kLRHasNotBeenSaved,
1722 kDontSaveFPRegs, 1722 kDontSaveFPRegs,
1723 EMIT_REMEMBERED_SET, 1723 EMIT_REMEMBERED_SET,
1724 OMIT_SMI_CHECK); 1724 OMIT_SMI_CHECK);
1725 __ Drop(argc + 1); 1725 __ Drop(argc + 1);
1726 __ Ret(); 1726 __ Ret();
1727 1727
1728 __ bind(&attempt_to_grow_elements); 1728 __ bind(&attempt_to_grow_elements);
1729 // r0: array's length + 1. 1729 // r0: array's length + 1.
1730 // r4: elements' length. 1730 // r4: elements' length.
1731 1731
1732 if (!FLAG_inline_new) { 1732 if (!FLAG_inline_new) {
1733 __ b(&call_builtin); 1733 __ b(&call_builtin);
1734 } 1734 }
1735 1735
1736 __ ldr(r2, MemOperand(sp, (argc - 1) * kPointerSize)); 1736 __ ldr(r2, MemOperand(sp, (argc - 1) * kPointerSize));
1737 // Growing elements that are SMI-only requires special handling in case 1737 // Growing elements that are SMI-only requires special handling in case
1738 // the new element is non-Smi. For now, delegate to the builtin. 1738 // the new element is non-Smi. For now, delegate to the builtin.
1739 Label no_fast_elements_check; 1739 Label no_fast_elements_check;
1740 __ JumpIfSmi(r2, &no_fast_elements_check); 1740 __ JumpIfSmi(r2, &no_fast_elements_check);
1741 __ ldr(r7, FieldMemOperand(receiver, HeapObject::kMapOffset)); 1741 __ ldr(r7, FieldMemOperand(receiver, HeapObject::kMapOffset));
1742 __ CheckFastObjectElements(r7, r7, &call_builtin); 1742 __ CheckFastObjectElements(r7, r7, &call_builtin);
1743 __ bind(&no_fast_elements_check); 1743 __ bind(&no_fast_elements_check);
1744 1744
1745 Isolate* isolate = masm()->isolate(); 1745 Isolate* isolate = masm()->isolate();
1746 ExternalReference new_space_allocation_top = 1746 ExternalReference new_space_allocation_top =
1747 ExternalReference::new_space_allocation_top_address(isolate); 1747 ExternalReference::new_space_allocation_top_address(isolate);
1748 ExternalReference new_space_allocation_limit = 1748 ExternalReference new_space_allocation_limit =
1749 ExternalReference::new_space_allocation_limit_address(isolate); 1749 ExternalReference::new_space_allocation_limit_address(isolate);
1750 1750
1751 const int kAllocationDelta = 4; 1751 const int kAllocationDelta = 4;
1752 // Load top and check if it is the end of elements. 1752 // Load top and check if it is the end of elements.
1753 __ add(end_elements, elements, 1753 __ add(end_elements, elements,
1754 Operand(r0, LSL, kPointerSizeLog2 - kSmiTagSize)); 1754 Operand(r0, LSL, kPointerSizeLog2 - kSmiTagSize));
1755 __ add(end_elements, end_elements, Operand(kEndElementsOffset)); 1755 __ add(end_elements, end_elements, Operand(kEndElementsOffset));
1756 __ mov(r7, Operand(new_space_allocation_top)); 1756 __ mov(r7, Operand(new_space_allocation_top));
1757 __ ldr(r3, MemOperand(r7)); 1757 __ ldr(r3, MemOperand(r7));
1758 __ cmp(end_elements, r3); 1758 __ cmp(end_elements, r3);
1759 __ b(ne, &call_builtin); 1759 __ b(ne, &call_builtin);
1760 1760
1761 __ mov(r9, Operand(new_space_allocation_limit)); 1761 __ mov(r9, Operand(new_space_allocation_limit));
1762 __ ldr(r9, MemOperand(r9)); 1762 __ ldr(r9, MemOperand(r9));
1763 __ add(r3, r3, Operand(kAllocationDelta * kPointerSize)); 1763 __ add(r3, r3, Operand(kAllocationDelta * kPointerSize));
1764 __ cmp(r3, r9); 1764 __ cmp(r3, r9);
1765 __ b(hi, &call_builtin); 1765 __ b(hi, &call_builtin);
1766 1766
1767 // We fit and could grow elements. 1767 // We fit and could grow elements.
1768 // Update new_space_allocation_top. 1768 // Update new_space_allocation_top.
1769 __ str(r3, MemOperand(r7)); 1769 __ str(r3, MemOperand(r7));
1770 // Push the argument. 1770 // Push the argument.
1771 __ str(r2, MemOperand(end_elements)); 1771 __ str(r2, MemOperand(end_elements));
1772 // Fill the rest with holes. 1772 // Fill the rest with holes.
1773 __ LoadRoot(r3, Heap::kTheHoleValueRootIndex); 1773 __ LoadRoot(r3, Heap::kTheHoleValueRootIndex);
1774 for (int i = 1; i < kAllocationDelta; i++) { 1774 for (int i = 1; i < kAllocationDelta; i++) {
1775 __ str(r3, MemOperand(end_elements, i * kPointerSize)); 1775 __ str(r3, MemOperand(end_elements, i * kPointerSize));
1776 } 1776 }
1777 1777
1778 // Update elements' and array's sizes. 1778 // Update elements' and array's sizes.
1779 __ str(r0, FieldMemOperand(receiver, JSArray::kLengthOffset)); 1779 __ str(r0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1780 __ add(r4, r4, Operand(Smi::FromInt(kAllocationDelta))); 1780 __ add(r4, r4, Operand(Smi::FromInt(kAllocationDelta)));
1781 __ str(r4, FieldMemOperand(elements, FixedArray::kLengthOffset)); 1781 __ str(r4, FieldMemOperand(elements, FixedArray::kLengthOffset));
1782 1782
1783 // Elements are in new space, so write barrier is not required. 1783 // Elements are in new space, so write barrier is not required.
1784 __ Drop(argc + 1); 1784 __ Drop(argc + 1);
1785 __ Ret(); 1785 __ Ret();
1786 } 1786 }
1787 __ bind(&call_builtin); 1787 __ bind(&call_builtin);
1788 __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPush, 1788 __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPush,
1789 masm()->isolate()), 1789 masm()->isolate()),
1790 argc + 1, 1790 argc + 1,
1791 1); 1791 1);
1792 } 1792 }
1793 1793
1794 // Handle call cache miss. 1794 // Handle call cache miss.
1795 __ bind(&miss); 1795 __ bind(&miss);
1796 GenerateMissBranch(); 1796 GenerateMissBranch();
1797 1797
1798 // Return the generated code. 1798 // Return the generated code.
1799 return GetCode(function); 1799 return GetCode(function);
1800 } 1800 }
1801 1801
1802 1802
1803 Handle<Code> CallStubCompiler::CompileArrayPopCall( 1803 Handle<Code> CallStubCompiler::CompileArrayPopCall(
1804 Handle<Object> object, 1804 Handle<Object> object,
1805 Handle<JSObject> holder, 1805 Handle<JSObject> holder,
1806 Handle<JSGlobalPropertyCell> cell, 1806 Handle<JSGlobalPropertyCell> cell,
1807 Handle<JSFunction> function, 1807 Handle<JSFunction> function,
1808 Handle<String> name) { 1808 Handle<String> name) {
1809 // ----------- S t a t e ------------- 1809 // ----------- S t a t e -------------
1810 // -- r2 : name 1810 // -- r2 : name
1811 // -- lr : return address 1811 // -- lr : return address
1812 // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based) 1812 // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
1813 // -- ... 1813 // -- ...
1814 // -- sp[argc * 4] : receiver 1814 // -- sp[argc * 4] : receiver
1815 // ----------------------------------- 1815 // -----------------------------------
1816 1816
1817 // If object is not an array, bail out to regular call. 1817 // If object is not an array, bail out to regular call.
1818 if (!object->IsJSArray() || !cell.is_null()) return Handle<Code>::null(); 1818 if (!object->IsJSArray() || !cell.is_null()) return Handle<Code>::null();
1819 1819
1820 Label miss, return_undefined, call_builtin; 1820 Label miss, return_undefined, call_builtin;
1821 Register receiver = r1; 1821 Register receiver = r1;
1822 Register elements = r3; 1822 Register elements = r3;
1823 GenerateNameCheck(name, &miss); 1823 GenerateNameCheck(name, &miss);
1824 1824
1825 // Get the receiver from the stack 1825 // Get the receiver from the stack
1826 const int argc = arguments().immediate(); 1826 const int argc = arguments().immediate();
1827 __ ldr(receiver, MemOperand(sp, argc * kPointerSize)); 1827 __ ldr(receiver, MemOperand(sp, argc * kPointerSize));
1828 // Check that the receiver isn't a smi. 1828 // Check that the receiver isn't a smi.
1829 __ JumpIfSmi(receiver, &miss); 1829 __ JumpIfSmi(receiver, &miss);
1830 1830
1831 // Check that the maps haven't changed. 1831 // Check that the maps haven't changed.
1832 CheckPrototypes(Handle<JSObject>::cast(object), receiver, holder, elements, 1832 CheckPrototypes(Handle<JSObject>::cast(object), receiver, holder, elements,
1833 r4, r0, name, &miss); 1833 r4, r0, name, &miss);
1834 1834
1835 // Get the elements array of the object. 1835 // Get the elements array of the object.
1836 __ ldr(elements, FieldMemOperand(receiver, JSArray::kElementsOffset)); 1836 __ ldr(elements, FieldMemOperand(receiver, JSArray::kElementsOffset));
1837 1837
1838 // Check that the elements are in fast mode and writable. 1838 // Check that the elements are in fast mode and writable.
1839 __ CheckMap(elements, 1839 __ CheckMap(elements,
1840 r0, 1840 r0,
1841 Heap::kFixedArrayMapRootIndex, 1841 Heap::kFixedArrayMapRootIndex,
1842 &call_builtin, 1842 &call_builtin,
1843 DONT_DO_SMI_CHECK); 1843 DONT_DO_SMI_CHECK);
1844 1844
1845 // Get the array's length into r4 and calculate new length. 1845 // Get the array's length into r4 and calculate new length.
1846 __ ldr(r4, FieldMemOperand(receiver, JSArray::kLengthOffset)); 1846 __ ldr(r4, FieldMemOperand(receiver, JSArray::kLengthOffset));
1847 __ sub(r4, r4, Operand(Smi::FromInt(1)), SetCC); 1847 __ sub(r4, r4, Operand(Smi::FromInt(1)), SetCC);
1848 __ b(lt, &return_undefined); 1848 __ b(lt, &return_undefined);
1849 1849
1850 // Get the last element. 1850 // Get the last element.
1851 __ LoadRoot(r6, Heap::kTheHoleValueRootIndex); 1851 __ LoadRoot(r6, Heap::kTheHoleValueRootIndex);
1852 STATIC_ASSERT(kSmiTagSize == 1); 1852 STATIC_ASSERT(kSmiTagSize == 1);
1853 STATIC_ASSERT(kSmiTag == 0); 1853 STATIC_ASSERT(kSmiTag == 0);
1854 // We can't address the last element in one operation. Compute the more 1854 // We can't address the last element in one operation. Compute the more
1855 // expensive shift first, and use an offset later on. 1855 // expensive shift first, and use an offset later on.
1856 __ add(elements, elements, Operand(r4, LSL, kPointerSizeLog2 - kSmiTagSize)); 1856 __ add(elements, elements, Operand(r4, LSL, kPointerSizeLog2 - kSmiTagSize));
1857 __ ldr(r0, FieldMemOperand(elements, FixedArray::kHeaderSize)); 1857 __ ldr(r0, FieldMemOperand(elements, FixedArray::kHeaderSize));
1858 __ cmp(r0, r6); 1858 __ cmp(r0, r6);
1859 __ b(eq, &call_builtin); 1859 __ b(eq, &call_builtin);
1860 1860
1861 // Set the array's length. 1861 // Set the array's length.
1862 __ str(r4, FieldMemOperand(receiver, JSArray::kLengthOffset)); 1862 __ str(r4, FieldMemOperand(receiver, JSArray::kLengthOffset));
1863 1863
1864 // Fill with the hole. 1864 // Fill with the hole.
1865 __ str(r6, FieldMemOperand(elements, FixedArray::kHeaderSize)); 1865 __ str(r6, FieldMemOperand(elements, FixedArray::kHeaderSize));
1866 __ Drop(argc + 1); 1866 __ Drop(argc + 1);
1867 __ Ret(); 1867 __ Ret();
1868 1868
1869 __ bind(&return_undefined); 1869 __ bind(&return_undefined);
1870 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); 1870 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
1871 __ Drop(argc + 1); 1871 __ Drop(argc + 1);
1872 __ Ret(); 1872 __ Ret();
1873 1873
1874 __ bind(&call_builtin); 1874 __ bind(&call_builtin);
1875 __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPop, 1875 __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPop,
1876 masm()->isolate()), 1876 masm()->isolate()),
1877 argc + 1, 1877 argc + 1,
1878 1); 1878 1);
1879 1879
1880 // Handle call cache miss. 1880 // Handle call cache miss.
1881 __ bind(&miss); 1881 __ bind(&miss);
1882 GenerateMissBranch(); 1882 GenerateMissBranch();
1883 1883
1884 // Return the generated code. 1884 // Return the generated code.
1885 return GetCode(function); 1885 return GetCode(function);
1886 } 1886 }
1887 1887
1888 1888
1889 Handle<Code> CallStubCompiler::CompileStringCharCodeAtCall( 1889 Handle<Code> CallStubCompiler::CompileStringCharCodeAtCall(
1890 Handle<Object> object, 1890 Handle<Object> object,
1891 Handle<JSObject> holder, 1891 Handle<JSObject> holder,
1892 Handle<JSGlobalPropertyCell> cell, 1892 Handle<JSGlobalPropertyCell> cell,
1893 Handle<JSFunction> function, 1893 Handle<JSFunction> function,
1894 Handle<String> name) { 1894 Handle<String> name) {
1895 // ----------- S t a t e ------------- 1895 // ----------- S t a t e -------------
1896 // -- r2 : function name 1896 // -- r2 : function name
1897 // -- lr : return address 1897 // -- lr : return address
1898 // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based) 1898 // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
1899 // -- ... 1899 // -- ...
1900 // -- sp[argc * 4] : receiver 1900 // -- sp[argc * 4] : receiver
1901 // ----------------------------------- 1901 // -----------------------------------
1902 1902
1903 // If object is not a string, bail out to regular call. 1903 // If object is not a string, bail out to regular call.
1904 if (!object->IsString() || !cell.is_null()) return Handle<Code>::null(); 1904 if (!object->IsString() || !cell.is_null()) return Handle<Code>::null();
1905 1905
1906 const int argc = arguments().immediate(); 1906 const int argc = arguments().immediate();
1907 Label miss; 1907 Label miss;
1908 Label name_miss; 1908 Label name_miss;
1909 Label index_out_of_range; 1909 Label index_out_of_range;
1910 Label* index_out_of_range_label = &index_out_of_range; 1910 Label* index_out_of_range_label = &index_out_of_range;
1911 1911
1912 if (kind_ == Code::CALL_IC && 1912 if (kind_ == Code::CALL_IC &&
1913 (CallICBase::StringStubState::decode(extra_state_) == 1913 (CallICBase::StringStubState::decode(extra_state_) ==
1914 DEFAULT_STRING_STUB)) { 1914 DEFAULT_STRING_STUB)) {
1915 index_out_of_range_label = &miss; 1915 index_out_of_range_label = &miss;
1916 } 1916 }
1917 GenerateNameCheck(name, &name_miss); 1917 GenerateNameCheck(name, &name_miss);
1918 1918
1919 // Check that the maps starting from the prototype haven't changed. 1919 // Check that the maps starting from the prototype haven't changed.
1920 GenerateDirectLoadGlobalFunctionPrototype(masm(), 1920 GenerateDirectLoadGlobalFunctionPrototype(masm(),
1921 Context::STRING_FUNCTION_INDEX, 1921 Context::STRING_FUNCTION_INDEX,
1922 r0, 1922 r0,
1923 &miss); 1923 &miss);
1924 ASSERT(!object.is_identical_to(holder)); 1924 ASSERT(!object.is_identical_to(holder));
1925 CheckPrototypes(Handle<JSObject>(JSObject::cast(object->GetPrototype())), 1925 CheckPrototypes(Handle<JSObject>(JSObject::cast(object->GetPrototype())),
1926 r0, holder, r1, r3, r4, name, &miss); 1926 r0, holder, r1, r3, r4, name, &miss);
1927 1927
1928 Register receiver = r1; 1928 Register receiver = r1;
1929 Register index = r4; 1929 Register index = r4;
1930 Register result = r0; 1930 Register result = r0;
1931 __ ldr(receiver, MemOperand(sp, argc * kPointerSize)); 1931 __ ldr(receiver, MemOperand(sp, argc * kPointerSize));
1932 if (argc > 0) { 1932 if (argc > 0) {
1933 __ ldr(index, MemOperand(sp, (argc - 1) * kPointerSize)); 1933 __ ldr(index, MemOperand(sp, (argc - 1) * kPointerSize));
1934 } else { 1934 } else {
1935 __ LoadRoot(index, Heap::kUndefinedValueRootIndex); 1935 __ LoadRoot(index, Heap::kUndefinedValueRootIndex);
1936 } 1936 }
1937 1937
1938 StringCharCodeAtGenerator generator(receiver, 1938 StringCharCodeAtGenerator generator(receiver,
1939 index, 1939 index,
1940 result, 1940 result,
1941 &miss, // When not a string. 1941 &miss, // When not a string.
1942 &miss, // When not a number. 1942 &miss, // When not a number.
1943 index_out_of_range_label, 1943 index_out_of_range_label,
1944 STRING_INDEX_IS_NUMBER); 1944 STRING_INDEX_IS_NUMBER);
1945 generator.GenerateFast(masm()); 1945 generator.GenerateFast(masm());
1946 __ Drop(argc + 1); 1946 __ Drop(argc + 1);
1947 __ Ret(); 1947 __ Ret();
1948 1948
1949 StubRuntimeCallHelper call_helper; 1949 StubRuntimeCallHelper call_helper;
1950 generator.GenerateSlow(masm(), call_helper); 1950 generator.GenerateSlow(masm(), call_helper);
1951 1951
1952 if (index_out_of_range.is_linked()) { 1952 if (index_out_of_range.is_linked()) {
1953 __ bind(&index_out_of_range); 1953 __ bind(&index_out_of_range);
1954 __ LoadRoot(r0, Heap::kNanValueRootIndex); 1954 __ LoadRoot(r0, Heap::kNanValueRootIndex);
1955 __ Drop(argc + 1); 1955 __ Drop(argc + 1);
1956 __ Ret(); 1956 __ Ret();
1957 } 1957 }
1958 1958
1959 __ bind(&miss); 1959 __ bind(&miss);
1960 // Restore function name in r2. 1960 // Restore function name in r2.
1961 __ Move(r2, name); 1961 __ Move(r2, name);
1962 __ bind(&name_miss); 1962 __ bind(&name_miss);
1963 GenerateMissBranch(); 1963 GenerateMissBranch();
1964 1964
1965 // Return the generated code. 1965 // Return the generated code.
1966 return GetCode(function); 1966 return GetCode(function);
1967 } 1967 }
1968 1968
1969 1969
1970 Handle<Code> CallStubCompiler::CompileStringCharAtCall( 1970 Handle<Code> CallStubCompiler::CompileStringCharAtCall(
1971 Handle<Object> object, 1971 Handle<Object> object,
1972 Handle<JSObject> holder, 1972 Handle<JSObject> holder,
1973 Handle<JSGlobalPropertyCell> cell, 1973 Handle<JSGlobalPropertyCell> cell,
1974 Handle<JSFunction> function, 1974 Handle<JSFunction> function,
1975 Handle<String> name) { 1975 Handle<String> name) {
1976 // ----------- S t a t e ------------- 1976 // ----------- S t a t e -------------
1977 // -- r2 : function name 1977 // -- r2 : function name
1978 // -- lr : return address 1978 // -- lr : return address
1979 // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based) 1979 // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
1980 // -- ... 1980 // -- ...
1981 // -- sp[argc * 4] : receiver 1981 // -- sp[argc * 4] : receiver
1982 // ----------------------------------- 1982 // -----------------------------------
1983 1983
1984 // If object is not a string, bail out to regular call. 1984 // If object is not a string, bail out to regular call.
1985 if (!object->IsString() || !cell.is_null()) return Handle<Code>::null(); 1985 if (!object->IsString() || !cell.is_null()) return Handle<Code>::null();
1986 1986
1987 const int argc = arguments().immediate(); 1987 const int argc = arguments().immediate();
1988 Label miss; 1988 Label miss;
1989 Label name_miss; 1989 Label name_miss;
1990 Label index_out_of_range; 1990 Label index_out_of_range;
1991 Label* index_out_of_range_label = &index_out_of_range; 1991 Label* index_out_of_range_label = &index_out_of_range;
1992 if (kind_ == Code::CALL_IC && 1992 if (kind_ == Code::CALL_IC &&
1993 (CallICBase::StringStubState::decode(extra_state_) == 1993 (CallICBase::StringStubState::decode(extra_state_) ==
1994 DEFAULT_STRING_STUB)) { 1994 DEFAULT_STRING_STUB)) {
1995 index_out_of_range_label = &miss; 1995 index_out_of_range_label = &miss;
1996 } 1996 }
1997 GenerateNameCheck(name, &name_miss); 1997 GenerateNameCheck(name, &name_miss);
1998 1998
1999 // Check that the maps starting from the prototype haven't changed. 1999 // Check that the maps starting from the prototype haven't changed.
2000 GenerateDirectLoadGlobalFunctionPrototype(masm(), 2000 GenerateDirectLoadGlobalFunctionPrototype(masm(),
2001 Context::STRING_FUNCTION_INDEX, 2001 Context::STRING_FUNCTION_INDEX,
2002 r0, 2002 r0,
2003 &miss); 2003 &miss);
2004 ASSERT(!object.is_identical_to(holder)); 2004 ASSERT(!object.is_identical_to(holder));
2005 CheckPrototypes(Handle<JSObject>(JSObject::cast(object->GetPrototype())), 2005 CheckPrototypes(Handle<JSObject>(JSObject::cast(object->GetPrototype())),
2006 r0, holder, r1, r3, r4, name, &miss); 2006 r0, holder, r1, r3, r4, name, &miss);
2007 2007
2008 Register receiver = r0; 2008 Register receiver = r0;
2009 Register index = r4; 2009 Register index = r4;
2010 Register scratch = r3; 2010 Register scratch = r3;
2011 Register result = r0; 2011 Register result = r0;
2012 __ ldr(receiver, MemOperand(sp, argc * kPointerSize)); 2012 __ ldr(receiver, MemOperand(sp, argc * kPointerSize));
2013 if (argc > 0) { 2013 if (argc > 0) {
2014 __ ldr(index, MemOperand(sp, (argc - 1) * kPointerSize)); 2014 __ ldr(index, MemOperand(sp, (argc - 1) * kPointerSize));
2015 } else { 2015 } else {
2016 __ LoadRoot(index, Heap::kUndefinedValueRootIndex); 2016 __ LoadRoot(index, Heap::kUndefinedValueRootIndex);
2017 } 2017 }
2018 2018
2019 StringCharAtGenerator generator(receiver, 2019 StringCharAtGenerator generator(receiver,
2020 index, 2020 index,
2021 scratch, 2021 scratch,
2022 result, 2022 result,
2023 &miss, // When not a string. 2023 &miss, // When not a string.
2024 &miss, // When not a number. 2024 &miss, // When not a number.
2025 index_out_of_range_label, 2025 index_out_of_range_label,
2026 STRING_INDEX_IS_NUMBER); 2026 STRING_INDEX_IS_NUMBER);
2027 generator.GenerateFast(masm()); 2027 generator.GenerateFast(masm());
2028 __ Drop(argc + 1); 2028 __ Drop(argc + 1);
2029 __ Ret(); 2029 __ Ret();
2030 2030
2031 StubRuntimeCallHelper call_helper; 2031 StubRuntimeCallHelper call_helper;
2032 generator.GenerateSlow(masm(), call_helper); 2032 generator.GenerateSlow(masm(), call_helper);
2033 2033
2034 if (index_out_of_range.is_linked()) { 2034 if (index_out_of_range.is_linked()) {
2035 __ bind(&index_out_of_range); 2035 __ bind(&index_out_of_range);
2036 __ LoadRoot(r0, Heap::kEmptyStringRootIndex); 2036 __ LoadRoot(r0, Heap::kEmptyStringRootIndex);
2037 __ Drop(argc + 1); 2037 __ Drop(argc + 1);
2038 __ Ret(); 2038 __ Ret();
2039 } 2039 }
2040 2040
2041 __ bind(&miss); 2041 __ bind(&miss);
2042 // Restore function name in r2. 2042 // Restore function name in r2.
2043 __ Move(r2, name); 2043 __ Move(r2, name);
2044 __ bind(&name_miss); 2044 __ bind(&name_miss);
2045 GenerateMissBranch(); 2045 GenerateMissBranch();
2046 2046
2047 // Return the generated code. 2047 // Return the generated code.
2048 return GetCode(function); 2048 return GetCode(function);
2049 } 2049 }
2050 2050
2051 2051
2052 Handle<Code> CallStubCompiler::CompileStringFromCharCodeCall( 2052 Handle<Code> CallStubCompiler::CompileStringFromCharCodeCall(
2053 Handle<Object> object, 2053 Handle<Object> object,
2054 Handle<JSObject> holder, 2054 Handle<JSObject> holder,
2055 Handle<JSGlobalPropertyCell> cell, 2055 Handle<JSGlobalPropertyCell> cell,
2056 Handle<JSFunction> function, 2056 Handle<JSFunction> function,
2057 Handle<String> name) { 2057 Handle<String> name) {
2058 // ----------- S t a t e ------------- 2058 // ----------- S t a t e -------------
2059 // -- r2 : function name 2059 // -- r2 : function name
2060 // -- lr : return address 2060 // -- lr : return address
2061 // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based) 2061 // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
2062 // -- ... 2062 // -- ...
2063 // -- sp[argc * 4] : receiver 2063 // -- sp[argc * 4] : receiver
2064 // ----------------------------------- 2064 // -----------------------------------
2065 2065
2066 const int argc = arguments().immediate(); 2066 const int argc = arguments().immediate();
2067 2067
2068 // If the object is not a JSObject or we got an unexpected number of 2068 // If the object is not a JSObject or we got an unexpected number of
2069 // arguments, bail out to the regular call. 2069 // arguments, bail out to the regular call.
2070 if (!object->IsJSObject() || argc != 1) return Handle<Code>::null(); 2070 if (!object->IsJSObject() || argc != 1) return Handle<Code>::null();
2071 2071
2072 Label miss; 2072 Label miss;
2073 GenerateNameCheck(name, &miss); 2073 GenerateNameCheck(name, &miss);
2074 2074
2075 if (cell.is_null()) { 2075 if (cell.is_null()) {
2076 __ ldr(r1, MemOperand(sp, 1 * kPointerSize)); 2076 __ ldr(r1, MemOperand(sp, 1 * kPointerSize));
2077 2077
2078 STATIC_ASSERT(kSmiTag == 0); 2078 STATIC_ASSERT(kSmiTag == 0);
2079 __ JumpIfSmi(r1, &miss); 2079 __ JumpIfSmi(r1, &miss);
2080 2080
2081 CheckPrototypes(Handle<JSObject>::cast(object), r1, holder, r0, r3, r4, 2081 CheckPrototypes(Handle<JSObject>::cast(object), r1, holder, r0, r3, r4,
2082 name, &miss); 2082 name, &miss);
2083 } else { 2083 } else {
2084 ASSERT(cell->value() == *function); 2084 ASSERT(cell->value() == *function);
2085 GenerateGlobalReceiverCheck(Handle<JSObject>::cast(object), holder, name, 2085 GenerateGlobalReceiverCheck(Handle<JSObject>::cast(object), holder, name,
2086 &miss); 2086 &miss);
2087 GenerateLoadFunctionFromCell(cell, function, &miss); 2087 GenerateLoadFunctionFromCell(cell, function, &miss);
2088 } 2088 }
2089 2089
2090 // Load the char code argument. 2090 // Load the char code argument.
2091 Register code = r1; 2091 Register code = r1;
2092 __ ldr(code, MemOperand(sp, 0 * kPointerSize)); 2092 __ ldr(code, MemOperand(sp, 0 * kPointerSize));
2093 2093
2094 // Check the code is a smi. 2094 // Check the code is a smi.
2095 Label slow; 2095 Label slow;
2096 STATIC_ASSERT(kSmiTag == 0); 2096 STATIC_ASSERT(kSmiTag == 0);
2097 __ JumpIfNotSmi(code, &slow); 2097 __ JumpIfNotSmi(code, &slow);
2098 2098
2099 // Convert the smi code to uint16. 2099 // Convert the smi code to uint16.
2100 __ and_(code, code, Operand(Smi::FromInt(0xffff))); 2100 __ and_(code, code, Operand(Smi::FromInt(0xffff)));
2101 2101
2102 StringCharFromCodeGenerator generator(code, r0); 2102 StringCharFromCodeGenerator generator(code, r0);
2103 generator.GenerateFast(masm()); 2103 generator.GenerateFast(masm());
2104 __ Drop(argc + 1); 2104 __ Drop(argc + 1);
2105 __ Ret(); 2105 __ Ret();
2106 2106
2107 StubRuntimeCallHelper call_helper; 2107 StubRuntimeCallHelper call_helper;
2108 generator.GenerateSlow(masm(), call_helper); 2108 generator.GenerateSlow(masm(), call_helper);
2109 2109
2110 // Tail call the full function. We do not have to patch the receiver 2110 // Tail call the full function. We do not have to patch the receiver
2111 // because the function makes no use of it. 2111 // because the function makes no use of it.
2112 __ bind(&slow); 2112 __ bind(&slow);
2113 __ InvokeFunction( 2113 __ InvokeFunction(
2114 function, arguments(), JUMP_FUNCTION, NullCallWrapper(), CALL_AS_METHOD); 2114 function, arguments(), JUMP_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
2115 2115
2116 __ bind(&miss); 2116 __ bind(&miss);
2117 // r2: function name. 2117 // r2: function name.
2118 GenerateMissBranch(); 2118 GenerateMissBranch();
2119 2119
2120 // Return the generated code. 2120 // Return the generated code.
2121 return cell.is_null() ? GetCode(function) : GetCode(Code::NORMAL, name); 2121 return cell.is_null() ? GetCode(function) : GetCode(Code::NORMAL, name);
2122 } 2122 }
2123 2123
2124 2124
2125 Handle<Code> CallStubCompiler::CompileMathFloorCall( 2125 Handle<Code> CallStubCompiler::CompileMathFloorCall(
2126 Handle<Object> object, 2126 Handle<Object> object,
2127 Handle<JSObject> holder, 2127 Handle<JSObject> holder,
2128 Handle<JSGlobalPropertyCell> cell, 2128 Handle<JSGlobalPropertyCell> cell,
2129 Handle<JSFunction> function, 2129 Handle<JSFunction> function,
2130 Handle<String> name) { 2130 Handle<String> name) {
2131 // ----------- S t a t e ------------- 2131 // ----------- S t a t e -------------
2132 // -- r2 : function name 2132 // -- r2 : function name
2133 // -- lr : return address 2133 // -- lr : return address
2134 // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based) 2134 // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
2135 // -- ... 2135 // -- ...
2136 // -- sp[argc * 4] : receiver 2136 // -- sp[argc * 4] : receiver
2137 // ----------------------------------- 2137 // -----------------------------------
2138 2138
2139 if (!CpuFeatures::IsSupported(VFP2)) { 2139 if (!CpuFeatures::IsSupported(VFP2)) {
2140 return Handle<Code>::null(); 2140 return Handle<Code>::null();
2141 } 2141 }
2142 2142
2143 CpuFeatures::Scope scope_vfp2(VFP2); 2143 CpuFeatures::Scope scope_vfp2(VFP2);
2144 const int argc = arguments().immediate(); 2144 const int argc = arguments().immediate();
2145 // If the object is not a JSObject or we got an unexpected number of 2145 // If the object is not a JSObject or we got an unexpected number of
2146 // arguments, bail out to the regular call. 2146 // arguments, bail out to the regular call.
2147 if (!object->IsJSObject() || argc != 1) return Handle<Code>::null(); 2147 if (!object->IsJSObject() || argc != 1) return Handle<Code>::null();
2148 2148
2149 Label miss, slow; 2149 Label miss, slow;
2150 GenerateNameCheck(name, &miss); 2150 GenerateNameCheck(name, &miss);
2151 2151
2152 if (cell.is_null()) { 2152 if (cell.is_null()) {
2153 __ ldr(r1, MemOperand(sp, 1 * kPointerSize)); 2153 __ ldr(r1, MemOperand(sp, 1 * kPointerSize));
2154 STATIC_ASSERT(kSmiTag == 0); 2154 STATIC_ASSERT(kSmiTag == 0);
2155 __ JumpIfSmi(r1, &miss); 2155 __ JumpIfSmi(r1, &miss);
2156 CheckPrototypes(Handle<JSObject>::cast(object), r1, holder, r0, r3, r4, 2156 CheckPrototypes(Handle<JSObject>::cast(object), r1, holder, r0, r3, r4,
2157 name, &miss); 2157 name, &miss);
2158 } else { 2158 } else {
2159 ASSERT(cell->value() == *function); 2159 ASSERT(cell->value() == *function);
2160 GenerateGlobalReceiverCheck(Handle<JSObject>::cast(object), holder, name, 2160 GenerateGlobalReceiverCheck(Handle<JSObject>::cast(object), holder, name,
2161 &miss); 2161 &miss);
2162 GenerateLoadFunctionFromCell(cell, function, &miss); 2162 GenerateLoadFunctionFromCell(cell, function, &miss);
2163 } 2163 }
2164 2164
2165 // Load the (only) argument into r0. 2165 // Load the (only) argument into r0.
2166 __ ldr(r0, MemOperand(sp, 0 * kPointerSize)); 2166 __ ldr(r0, MemOperand(sp, 0 * kPointerSize));
2167 2167
2168 // If the argument is a smi, just return. 2168 // If the argument is a smi, just return.
2169 STATIC_ASSERT(kSmiTag == 0); 2169 STATIC_ASSERT(kSmiTag == 0);
2170 __ tst(r0, Operand(kSmiTagMask)); 2170 __ tst(r0, Operand(kSmiTagMask));
2171 __ Drop(argc + 1, eq); 2171 __ Drop(argc + 1, eq);
2172 __ Ret(eq); 2172 __ Ret(eq);
2173 2173
2174 __ CheckMap(r0, r1, Heap::kHeapNumberMapRootIndex, &slow, DONT_DO_SMI_CHECK); 2174 __ CheckMap(r0, r1, Heap::kHeapNumberMapRootIndex, &slow, DONT_DO_SMI_CHECK);
2175 2175
2176 Label wont_fit_smi, no_vfp_exception, restore_fpscr_and_return; 2176 Label wont_fit_smi, no_vfp_exception, restore_fpscr_and_return;
2177 2177
2178 // If vfp3 is enabled, we use the fpu rounding with the RM (round towards 2178 // If vfp3 is enabled, we use the fpu rounding with the RM (round towards
2179 // minus infinity) mode. 2179 // minus infinity) mode.
2180 2180
2181 // Load the HeapNumber value. 2181 // Load the HeapNumber value.
2182 // We will need access to the value in the core registers, so we load it 2182 // We will need access to the value in the core registers, so we load it
2183 // with ldrd and move it to the fpu. It also spares a sub instruction for 2183 // with ldrd and move it to the fpu. It also spares a sub instruction for
2184 // updating the HeapNumber value address, as vldr expects a multiple 2184 // updating the HeapNumber value address, as vldr expects a multiple
2185 // of 4 offset. 2185 // of 4 offset.
2186 __ Ldrd(r4, r5, FieldMemOperand(r0, HeapNumber::kValueOffset)); 2186 __ Ldrd(r4, r5, FieldMemOperand(r0, HeapNumber::kValueOffset));
2187 __ vmov(d1, r4, r5); 2187 __ vmov(d1, r4, r5);
2188 2188
2189 // Backup FPSCR. 2189 // Backup FPSCR.
2190 __ vmrs(r3); 2190 __ vmrs(r3);
2191 // Set custom FPCSR: 2191 // Set custom FPCSR:
2192 // - Set rounding mode to "Round towards Minus Infinity" 2192 // - Set rounding mode to "Round towards Minus Infinity"
2193 // (i.e. bits [23:22] = 0b10). 2193 // (i.e. bits [23:22] = 0b10).
2194 // - Clear vfp cumulative exception flags (bits [3:0]). 2194 // - Clear vfp cumulative exception flags (bits [3:0]).
2195 // - Make sure Flush-to-zero mode control bit is unset (bit 22). 2195 // - Make sure Flush-to-zero mode control bit is unset (bit 22).
2196 __ bic(r9, r3, 2196 __ bic(r9, r3,
2197 Operand(kVFPExceptionMask | kVFPRoundingModeMask | kVFPFlushToZeroMask)); 2197 Operand(kVFPExceptionMask | kVFPRoundingModeMask | kVFPFlushToZeroMask));
2198 __ orr(r9, r9, Operand(kRoundToMinusInf)); 2198 __ orr(r9, r9, Operand(kRoundToMinusInf));
2199 __ vmsr(r9); 2199 __ vmsr(r9);
2200 2200
2201 // Convert the argument to an integer. 2201 // Convert the argument to an integer.
2202 __ vcvt_s32_f64(s0, d1, kFPSCRRounding); 2202 __ vcvt_s32_f64(s0, d1, kFPSCRRounding);
2203 2203
2204 // Use vcvt latency to start checking for special cases. 2204 // Use vcvt latency to start checking for special cases.
2205 // Get the argument exponent and clear the sign bit. 2205 // Get the argument exponent and clear the sign bit.
2206 __ bic(r6, r5, Operand(HeapNumber::kSignMask)); 2206 __ bic(r6, r5, Operand(HeapNumber::kSignMask));
2207 __ mov(r6, Operand(r6, LSR, HeapNumber::kMantissaBitsInTopWord)); 2207 __ mov(r6, Operand(r6, LSR, HeapNumber::kMantissaBitsInTopWord));
2208 2208
2209 // Retrieve FPSCR and check for vfp exceptions. 2209 // Retrieve FPSCR and check for vfp exceptions.
2210 __ vmrs(r9); 2210 __ vmrs(r9);
2211 __ tst(r9, Operand(kVFPExceptionMask)); 2211 __ tst(r9, Operand(kVFPExceptionMask));
2212 __ b(&no_vfp_exception, eq); 2212 __ b(&no_vfp_exception, eq);
2213 2213
2214 // Check for NaN, Infinity, and -Infinity. 2214 // Check for NaN, Infinity, and -Infinity.
2215 // They are invariant through a Math.Floor call, so just 2215 // They are invariant through a Math.Floor call, so just
2216 // return the original argument. 2216 // return the original argument.
2217 __ sub(r7, r6, Operand(HeapNumber::kExponentMask 2217 __ sub(r7, r6, Operand(HeapNumber::kExponentMask
2218 >> HeapNumber::kMantissaBitsInTopWord), SetCC); 2218 >> HeapNumber::kMantissaBitsInTopWord), SetCC);
2219 __ b(&restore_fpscr_and_return, eq); 2219 __ b(&restore_fpscr_and_return, eq);
2220 // We had an overflow or underflow in the conversion. Check if we 2220 // We had an overflow or underflow in the conversion. Check if we
2221 // have a big exponent. 2221 // have a big exponent.
2222 __ cmp(r7, Operand(HeapNumber::kMantissaBits)); 2222 __ cmp(r7, Operand(HeapNumber::kMantissaBits));
2223 // If greater or equal, the argument is already round and in r0. 2223 // If greater or equal, the argument is already round and in r0.
2224 __ b(&restore_fpscr_and_return, ge); 2224 __ b(&restore_fpscr_and_return, ge);
2225 __ b(&wont_fit_smi); 2225 __ b(&wont_fit_smi);
2226 2226
2227 __ bind(&no_vfp_exception); 2227 __ bind(&no_vfp_exception);
2228 // Move the result back to general purpose register r0. 2228 // Move the result back to general purpose register r0.
2229 __ vmov(r0, s0); 2229 __ vmov(r0, s0);
2230 // Check if the result fits into a smi. 2230 // Check if the result fits into a smi.
2231 __ add(r1, r0, Operand(0x40000000), SetCC); 2231 __ add(r1, r0, Operand(0x40000000), SetCC);
2232 __ b(&wont_fit_smi, mi); 2232 __ b(&wont_fit_smi, mi);
2233 // Tag the result. 2233 // Tag the result.
2234 STATIC_ASSERT(kSmiTag == 0); 2234 STATIC_ASSERT(kSmiTag == 0);
2235 __ mov(r0, Operand(r0, LSL, kSmiTagSize)); 2235 __ mov(r0, Operand(r0, LSL, kSmiTagSize));
2236 2236
2237 // Check for -0. 2237 // Check for -0.
2238 __ cmp(r0, Operand(0, RelocInfo::NONE)); 2238 __ cmp(r0, Operand(0, RelocInfo::NONE));
2239 __ b(&restore_fpscr_and_return, ne); 2239 __ b(&restore_fpscr_and_return, ne);
2240 // r5 already holds the HeapNumber exponent. 2240 // r5 already holds the HeapNumber exponent.
2241 __ tst(r5, Operand(HeapNumber::kSignMask)); 2241 __ tst(r5, Operand(HeapNumber::kSignMask));
2242 // If our HeapNumber is negative it was -0, so load its address and return. 2242 // If our HeapNumber is negative it was -0, so load its address and return.
2243 // Else r0 is loaded with 0, so we can also just return. 2243 // Else r0 is loaded with 0, so we can also just return.
2244 __ ldr(r0, MemOperand(sp, 0 * kPointerSize), ne); 2244 __ ldr(r0, MemOperand(sp, 0 * kPointerSize), ne);
2245 2245
2246 __ bind(&restore_fpscr_and_return); 2246 __ bind(&restore_fpscr_and_return);
2247 // Restore FPSCR and return. 2247 // Restore FPSCR and return.
2248 __ vmsr(r3); 2248 __ vmsr(r3);
2249 __ Drop(argc + 1); 2249 __ Drop(argc + 1);
2250 __ Ret(); 2250 __ Ret();
2251 2251
2252 __ bind(&wont_fit_smi); 2252 __ bind(&wont_fit_smi);
2253 // Restore FPCSR and fall to slow case. 2253 // Restore FPCSR and fall to slow case.
2254 __ vmsr(r3); 2254 __ vmsr(r3);
2255 2255
2256 __ bind(&slow); 2256 __ bind(&slow);
2257 // Tail call the full function. We do not have to patch the receiver 2257 // Tail call the full function. We do not have to patch the receiver
2258 // because the function makes no use of it. 2258 // because the function makes no use of it.
2259 __ InvokeFunction( 2259 __ InvokeFunction(
2260 function, arguments(), JUMP_FUNCTION, NullCallWrapper(), CALL_AS_METHOD); 2260 function, arguments(), JUMP_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
2261 2261
2262 __ bind(&miss); 2262 __ bind(&miss);
2263 // r2: function name. 2263 // r2: function name.
2264 GenerateMissBranch(); 2264 GenerateMissBranch();
2265 2265
2266 // Return the generated code. 2266 // Return the generated code.
2267 return cell.is_null() ? GetCode(function) : GetCode(Code::NORMAL, name); 2267 return cell.is_null() ? GetCode(function) : GetCode(Code::NORMAL, name);
2268 } 2268 }
2269 2269
2270 2270
2271 Handle<Code> CallStubCompiler::CompileMathAbsCall( 2271 Handle<Code> CallStubCompiler::CompileMathAbsCall(
2272 Handle<Object> object, 2272 Handle<Object> object,
2273 Handle<JSObject> holder, 2273 Handle<JSObject> holder,
2274 Handle<JSGlobalPropertyCell> cell, 2274 Handle<JSGlobalPropertyCell> cell,
2275 Handle<JSFunction> function, 2275 Handle<JSFunction> function,
2276 Handle<String> name) { 2276 Handle<String> name) {
2277 // ----------- S t a t e ------------- 2277 // ----------- S t a t e -------------
2278 // -- r2 : function name 2278 // -- r2 : function name
2279 // -- lr : return address 2279 // -- lr : return address
2280 // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based) 2280 // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
2281 // -- ... 2281 // -- ...
2282 // -- sp[argc * 4] : receiver 2282 // -- sp[argc * 4] : receiver
2283 // ----------------------------------- 2283 // -----------------------------------
2284 2284
2285 const int argc = arguments().immediate(); 2285 const int argc = arguments().immediate();
2286 // If the object is not a JSObject or we got an unexpected number of 2286 // If the object is not a JSObject or we got an unexpected number of
2287 // arguments, bail out to the regular call. 2287 // arguments, bail out to the regular call.
2288 if (!object->IsJSObject() || argc != 1) return Handle<Code>::null(); 2288 if (!object->IsJSObject() || argc != 1) return Handle<Code>::null();
2289 2289
2290 Label miss; 2290 Label miss;
2291 GenerateNameCheck(name, &miss); 2291 GenerateNameCheck(name, &miss);
2292 if (cell.is_null()) { 2292 if (cell.is_null()) {
2293 __ ldr(r1, MemOperand(sp, 1 * kPointerSize)); 2293 __ ldr(r1, MemOperand(sp, 1 * kPointerSize));
2294 STATIC_ASSERT(kSmiTag == 0); 2294 STATIC_ASSERT(kSmiTag == 0);
2295 __ JumpIfSmi(r1, &miss); 2295 __ JumpIfSmi(r1, &miss);
2296 CheckPrototypes(Handle<JSObject>::cast(object), r1, holder, r0, r3, r4, 2296 CheckPrototypes(Handle<JSObject>::cast(object), r1, holder, r0, r3, r4,
2297 name, &miss); 2297 name, &miss);
2298 } else { 2298 } else {
2299 ASSERT(cell->value() == *function); 2299 ASSERT(cell->value() == *function);
2300 GenerateGlobalReceiverCheck(Handle<JSObject>::cast(object), holder, name, 2300 GenerateGlobalReceiverCheck(Handle<JSObject>::cast(object), holder, name,
2301 &miss); 2301 &miss);
2302 GenerateLoadFunctionFromCell(cell, function, &miss); 2302 GenerateLoadFunctionFromCell(cell, function, &miss);
2303 } 2303 }
2304 2304
2305 // Load the (only) argument into r0. 2305 // Load the (only) argument into r0.
2306 __ ldr(r0, MemOperand(sp, 0 * kPointerSize)); 2306 __ ldr(r0, MemOperand(sp, 0 * kPointerSize));
2307 2307
2308 // Check if the argument is a smi. 2308 // Check if the argument is a smi.
2309 Label not_smi; 2309 Label not_smi;
2310 STATIC_ASSERT(kSmiTag == 0); 2310 STATIC_ASSERT(kSmiTag == 0);
2311 __ JumpIfNotSmi(r0, &not_smi); 2311 __ JumpIfNotSmi(r0, &not_smi);
2312 2312
2313 // Do bitwise not or do nothing depending on the sign of the 2313 // Do bitwise not or do nothing depending on the sign of the
2314 // argument. 2314 // argument.
2315 __ eor(r1, r0, Operand(r0, ASR, kBitsPerInt - 1)); 2315 __ eor(r1, r0, Operand(r0, ASR, kBitsPerInt - 1));
2316 2316
2317 // Add 1 or do nothing depending on the sign of the argument. 2317 // Add 1 or do nothing depending on the sign of the argument.
2318 __ sub(r0, r1, Operand(r0, ASR, kBitsPerInt - 1), SetCC); 2318 __ sub(r0, r1, Operand(r0, ASR, kBitsPerInt - 1), SetCC);
2319 2319
2320 // If the result is still negative, go to the slow case. 2320 // If the result is still negative, go to the slow case.
2321 // This only happens for the most negative smi. 2321 // This only happens for the most negative smi.
2322 Label slow; 2322 Label slow;
2323 __ b(mi, &slow); 2323 __ b(mi, &slow);
2324 2324
2325 // Smi case done. 2325 // Smi case done.
2326 __ Drop(argc + 1); 2326 __ Drop(argc + 1);
2327 __ Ret(); 2327 __ Ret();
2328 2328
2329 // Check if the argument is a heap number and load its exponent and 2329 // Check if the argument is a heap number and load its exponent and
2330 // sign. 2330 // sign.
2331 __ bind(&not_smi); 2331 __ bind(&not_smi);
2332 __ CheckMap(r0, r1, Heap::kHeapNumberMapRootIndex, &slow, DONT_DO_SMI_CHECK); 2332 __ CheckMap(r0, r1, Heap::kHeapNumberMapRootIndex, &slow, DONT_DO_SMI_CHECK);
2333 __ ldr(r1, FieldMemOperand(r0, HeapNumber::kExponentOffset)); 2333 __ ldr(r1, FieldMemOperand(r0, HeapNumber::kExponentOffset));
2334 2334
2335 // Check the sign of the argument. If the argument is positive, 2335 // Check the sign of the argument. If the argument is positive,
2336 // just return it. 2336 // just return it.
2337 Label negative_sign; 2337 Label negative_sign;
2338 __ tst(r1, Operand(HeapNumber::kSignMask)); 2338 __ tst(r1, Operand(HeapNumber::kSignMask));
2339 __ b(ne, &negative_sign); 2339 __ b(ne, &negative_sign);
2340 __ Drop(argc + 1); 2340 __ Drop(argc + 1);
2341 __ Ret(); 2341 __ Ret();
2342 2342
2343 // If the argument is negative, clear the sign, and return a new 2343 // If the argument is negative, clear the sign, and return a new
2344 // number. 2344 // number.
2345 __ bind(&negative_sign); 2345 __ bind(&negative_sign);
2346 __ eor(r1, r1, Operand(HeapNumber::kSignMask)); 2346 __ eor(r1, r1, Operand(HeapNumber::kSignMask));
2347 __ ldr(r3, FieldMemOperand(r0, HeapNumber::kMantissaOffset)); 2347 __ ldr(r3, FieldMemOperand(r0, HeapNumber::kMantissaOffset));
2348 __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex); 2348 __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
2349 __ AllocateHeapNumber(r0, r4, r5, r6, &slow); 2349 __ AllocateHeapNumber(r0, r4, r5, r6, &slow);
2350 __ str(r1, FieldMemOperand(r0, HeapNumber::kExponentOffset)); 2350 __ str(r1, FieldMemOperand(r0, HeapNumber::kExponentOffset));
2351 __ str(r3, FieldMemOperand(r0, HeapNumber::kMantissaOffset)); 2351 __ str(r3, FieldMemOperand(r0, HeapNumber::kMantissaOffset));
2352 __ Drop(argc + 1); 2352 __ Drop(argc + 1);
2353 __ Ret(); 2353 __ Ret();
2354 2354
2355 // Tail call the full function. We do not have to patch the receiver 2355 // Tail call the full function. We do not have to patch the receiver
2356 // because the function makes no use of it. 2356 // because the function makes no use of it.
2357 __ bind(&slow); 2357 __ bind(&slow);
2358 __ InvokeFunction( 2358 __ InvokeFunction(
2359 function, arguments(), JUMP_FUNCTION, NullCallWrapper(), CALL_AS_METHOD); 2359 function, arguments(), JUMP_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
2360 2360
2361 __ bind(&miss); 2361 __ bind(&miss);
2362 // r2: function name. 2362 // r2: function name.
2363 GenerateMissBranch(); 2363 GenerateMissBranch();
2364 2364
2365 // Return the generated code. 2365 // Return the generated code.
2366 return cell.is_null() ? GetCode(function) : GetCode(Code::NORMAL, name); 2366 return cell.is_null() ? GetCode(function) : GetCode(Code::NORMAL, name);
2367 } 2367 }
2368 2368
2369 2369
2370 Handle<Code> CallStubCompiler::CompileFastApiCall( 2370 Handle<Code> CallStubCompiler::CompileFastApiCall(
2371 const CallOptimization& optimization, 2371 const CallOptimization& optimization,
2372 Handle<Object> object, 2372 Handle<Object> object,
2373 Handle<JSObject> holder, 2373 Handle<JSObject> holder,
2374 Handle<JSGlobalPropertyCell> cell, 2374 Handle<JSGlobalPropertyCell> cell,
2375 Handle<JSFunction> function, 2375 Handle<JSFunction> function,
2376 Handle<String> name) { 2376 Handle<String> name) {
2377 Counters* counters = isolate()->counters(); 2377 Counters* counters = isolate()->counters();
2378 2378
2379 ASSERT(optimization.is_simple_api_call()); 2379 ASSERT(optimization.is_simple_api_call());
2380 // Bail out if object is a global object as we don't want to 2380 // Bail out if object is a global object as we don't want to
2381 // repatch it to global receiver. 2381 // repatch it to global receiver.
2382 if (object->IsGlobalObject()) return Handle<Code>::null(); 2382 if (object->IsGlobalObject()) return Handle<Code>::null();
2383 if (!cell.is_null()) return Handle<Code>::null(); 2383 if (!cell.is_null()) return Handle<Code>::null();
2384 if (!object->IsJSObject()) return Handle<Code>::null(); 2384 if (!object->IsJSObject()) return Handle<Code>::null();
2385 int depth = optimization.GetPrototypeDepthOfExpectedType( 2385 int depth = optimization.GetPrototypeDepthOfExpectedType(
2386 Handle<JSObject>::cast(object), holder); 2386 Handle<JSObject>::cast(object), holder);
2387 if (depth == kInvalidProtoDepth) return Handle<Code>::null(); 2387 if (depth == kInvalidProtoDepth) return Handle<Code>::null();
2388 2388
2389 Label miss, miss_before_stack_reserved; 2389 Label miss, miss_before_stack_reserved;
2390 GenerateNameCheck(name, &miss_before_stack_reserved); 2390 GenerateNameCheck(name, &miss_before_stack_reserved);
2391 2391
2392 // Get the receiver from the stack. 2392 // Get the receiver from the stack.
2393 const int argc = arguments().immediate(); 2393 const int argc = arguments().immediate();
2394 __ ldr(r1, MemOperand(sp, argc * kPointerSize)); 2394 __ ldr(r1, MemOperand(sp, argc * kPointerSize));
2395 2395
2396 // Check that the receiver isn't a smi. 2396 // Check that the receiver isn't a smi.
2397 __ JumpIfSmi(r1, &miss_before_stack_reserved); 2397 __ JumpIfSmi(r1, &miss_before_stack_reserved);
2398 2398
2399 __ IncrementCounter(counters->call_const(), 1, r0, r3); 2399 __ IncrementCounter(counters->call_const(), 1, r0, r3);
2400 __ IncrementCounter(counters->call_const_fast_api(), 1, r0, r3); 2400 __ IncrementCounter(counters->call_const_fast_api(), 1, r0, r3);
2401 2401
2402 ReserveSpaceForFastApiCall(masm(), r0); 2402 ReserveSpaceForFastApiCall(masm(), r0);
2403 2403
2404 // Check that the maps haven't changed and find a Holder as a side effect. 2404 // Check that the maps haven't changed and find a Holder as a side effect.
2405 CheckPrototypes(Handle<JSObject>::cast(object), r1, holder, r0, r3, r4, name, 2405 CheckPrototypes(Handle<JSObject>::cast(object), r1, holder, r0, r3, r4, name,
2406 depth, &miss); 2406 depth, &miss);
2407 2407
2408 GenerateFastApiDirectCall(masm(), optimization, argc); 2408 GenerateFastApiDirectCall(masm(), optimization, argc);
2409 2409
2410 __ bind(&miss); 2410 __ bind(&miss);
2411 FreeSpaceForFastApiCall(masm()); 2411 FreeSpaceForFastApiCall(masm());
2412 2412
2413 __ bind(&miss_before_stack_reserved); 2413 __ bind(&miss_before_stack_reserved);
2414 GenerateMissBranch(); 2414 GenerateMissBranch();
2415 2415
2416 // Return the generated code. 2416 // Return the generated code.
2417 return GetCode(function); 2417 return GetCode(function);
2418 } 2418 }
2419 2419
2420 2420
2421 Handle<Code> CallStubCompiler::CompileCallConstant(Handle<Object> object, 2421 Handle<Code> CallStubCompiler::CompileCallConstant(Handle<Object> object,
2422 Handle<JSObject> holder, 2422 Handle<JSObject> holder,
2423 Handle<JSFunction> function, 2423 Handle<JSFunction> function,
2424 Handle<String> name, 2424 Handle<String> name,
2425 CheckType check) { 2425 CheckType check) {
2426 // ----------- S t a t e ------------- 2426 // ----------- S t a t e -------------
2427 // -- r2 : name 2427 // -- r2 : name
2428 // -- lr : return address 2428 // -- lr : return address
2429 // ----------------------------------- 2429 // -----------------------------------
2430 if (HasCustomCallGenerator(function)) { 2430 if (HasCustomCallGenerator(function)) {
2431 Handle<Code> code = CompileCustomCall(object, holder, 2431 Handle<Code> code = CompileCustomCall(object, holder,
2432 Handle<JSGlobalPropertyCell>::null(), 2432 Handle<JSGlobalPropertyCell>::null(),
2433 function, name); 2433 function, name);
2434 // A null handle means bail out to the regular compiler code below. 2434 // A null handle means bail out to the regular compiler code below.
2435 if (!code.is_null()) return code; 2435 if (!code.is_null()) return code;
2436 } 2436 }
2437 2437
2438 Label miss; 2438 Label miss;
2439 GenerateNameCheck(name, &miss); 2439 GenerateNameCheck(name, &miss);
2440 2440
2441 // Get the receiver from the stack 2441 // Get the receiver from the stack
2442 const int argc = arguments().immediate(); 2442 const int argc = arguments().immediate();
2443 __ ldr(r1, MemOperand(sp, argc * kPointerSize)); 2443 __ ldr(r1, MemOperand(sp, argc * kPointerSize));
2444 2444
2445 // Check that the receiver isn't a smi. 2445 // Check that the receiver isn't a smi.
2446 if (check != NUMBER_CHECK) { 2446 if (check != NUMBER_CHECK) {
2447 __ JumpIfSmi(r1, &miss); 2447 __ JumpIfSmi(r1, &miss);
2448 } 2448 }
2449 2449
2450 // Make sure that it's okay not to patch the on stack receiver 2450 // Make sure that it's okay not to patch the on stack receiver
2451 // unless we're doing a receiver map check. 2451 // unless we're doing a receiver map check.
2452 ASSERT(!object->IsGlobalObject() || check == RECEIVER_MAP_CHECK); 2452 ASSERT(!object->IsGlobalObject() || check == RECEIVER_MAP_CHECK);
2453 switch (check) { 2453 switch (check) {
2454 case RECEIVER_MAP_CHECK: 2454 case RECEIVER_MAP_CHECK:
2455 __ IncrementCounter(masm()->isolate()->counters()->call_const(), 2455 __ IncrementCounter(masm()->isolate()->counters()->call_const(),
2456 1, r0, r3); 2456 1, r0, r3);
2457 2457
2458 // Check that the maps haven't changed. 2458 // Check that the maps haven't changed.
2459 CheckPrototypes(Handle<JSObject>::cast(object), r1, holder, r0, r3, r4, 2459 CheckPrototypes(Handle<JSObject>::cast(object), r1, holder, r0, r3, r4,
2460 name, &miss); 2460 name, &miss);
2461 2461
2462 // Patch the receiver on the stack with the global proxy if 2462 // Patch the receiver on the stack with the global proxy if
2463 // necessary. 2463 // necessary.
2464 if (object->IsGlobalObject()) { 2464 if (object->IsGlobalObject()) {
2465 __ ldr(r3, FieldMemOperand(r1, GlobalObject::kGlobalReceiverOffset)); 2465 __ ldr(r3, FieldMemOperand(r1, GlobalObject::kGlobalReceiverOffset));
2466 __ str(r3, MemOperand(sp, argc * kPointerSize)); 2466 __ str(r3, MemOperand(sp, argc * kPointerSize));
2467 } 2467 }
2468 break; 2468 break;
2469 2469
2470 case STRING_CHECK: 2470 case STRING_CHECK:
2471 if (function->IsBuiltin() || !function->shared()->is_classic_mode()) { 2471 if (function->IsBuiltin() || !function->shared()->is_classic_mode()) {
2472 // Check that the object is a two-byte string or a symbol. 2472 // Check that the object is a two-byte string or a symbol.
2473 __ CompareObjectType(r1, r3, r3, FIRST_NONSTRING_TYPE); 2473 __ CompareObjectType(r1, r3, r3, FIRST_NONSTRING_TYPE);
2474 __ b(ge, &miss); 2474 __ b(ge, &miss);
2475 // Check that the maps starting from the prototype haven't changed. 2475 // Check that the maps starting from the prototype haven't changed.
2476 GenerateDirectLoadGlobalFunctionPrototype( 2476 GenerateDirectLoadGlobalFunctionPrototype(
2477 masm(), Context::STRING_FUNCTION_INDEX, r0, &miss); 2477 masm(), Context::STRING_FUNCTION_INDEX, r0, &miss);
2478 CheckPrototypes( 2478 CheckPrototypes(
2479 Handle<JSObject>(JSObject::cast(object->GetPrototype())), 2479 Handle<JSObject>(JSObject::cast(object->GetPrototype())),
2480 r0, holder, r3, r1, r4, name, &miss); 2480 r0, holder, r3, r1, r4, name, &miss);
2481 } else { 2481 } else {
2482 // Calling non-strict non-builtins with a value as the receiver 2482 // Calling non-strict non-builtins with a value as the receiver
2483 // requires boxing. 2483 // requires boxing.
2484 __ jmp(&miss); 2484 __ jmp(&miss);
2485 } 2485 }
2486 break; 2486 break;
2487 2487
2488 case NUMBER_CHECK: 2488 case NUMBER_CHECK:
2489 if (function->IsBuiltin() || !function->shared()->is_classic_mode()) { 2489 if (function->IsBuiltin() || !function->shared()->is_classic_mode()) {
2490 Label fast; 2490 Label fast;
2491 // Check that the object is a smi or a heap number. 2491 // Check that the object is a smi or a heap number.
2492 __ JumpIfSmi(r1, &fast); 2492 __ JumpIfSmi(r1, &fast);
2493 __ CompareObjectType(r1, r0, r0, HEAP_NUMBER_TYPE); 2493 __ CompareObjectType(r1, r0, r0, HEAP_NUMBER_TYPE);
2494 __ b(ne, &miss); 2494 __ b(ne, &miss);
2495 __ bind(&fast); 2495 __ bind(&fast);
2496 // Check that the maps starting from the prototype haven't changed. 2496 // Check that the maps starting from the prototype haven't changed.
2497 GenerateDirectLoadGlobalFunctionPrototype( 2497 GenerateDirectLoadGlobalFunctionPrototype(
2498 masm(), Context::NUMBER_FUNCTION_INDEX, r0, &miss); 2498 masm(), Context::NUMBER_FUNCTION_INDEX, r0, &miss);
2499 CheckPrototypes( 2499 CheckPrototypes(
2500 Handle<JSObject>(JSObject::cast(object->GetPrototype())), 2500 Handle<JSObject>(JSObject::cast(object->GetPrototype())),
2501 r0, holder, r3, r1, r4, name, &miss); 2501 r0, holder, r3, r1, r4, name, &miss);
2502 } else { 2502 } else {
2503 // Calling non-strict non-builtins with a value as the receiver 2503 // Calling non-strict non-builtins with a value as the receiver
2504 // requires boxing. 2504 // requires boxing.
2505 __ jmp(&miss); 2505 __ jmp(&miss);
2506 } 2506 }
2507 break; 2507 break;
2508 2508
2509 case BOOLEAN_CHECK: 2509 case BOOLEAN_CHECK:
2510 if (function->IsBuiltin() || !function->shared()->is_classic_mode()) { 2510 if (function->IsBuiltin() || !function->shared()->is_classic_mode()) {
2511 Label fast; 2511 Label fast;
2512 // Check that the object is a boolean. 2512 // Check that the object is a boolean.
2513 __ LoadRoot(ip, Heap::kTrueValueRootIndex); 2513 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
2514 __ cmp(r1, ip); 2514 __ cmp(r1, ip);
2515 __ b(eq, &fast); 2515 __ b(eq, &fast);
2516 __ LoadRoot(ip, Heap::kFalseValueRootIndex); 2516 __ LoadRoot(ip, Heap::kFalseValueRootIndex);
2517 __ cmp(r1, ip); 2517 __ cmp(r1, ip);
2518 __ b(ne, &miss); 2518 __ b(ne, &miss);
2519 __ bind(&fast); 2519 __ bind(&fast);
2520 // Check that the maps starting from the prototype haven't changed. 2520 // Check that the maps starting from the prototype haven't changed.
2521 GenerateDirectLoadGlobalFunctionPrototype( 2521 GenerateDirectLoadGlobalFunctionPrototype(
2522 masm(), Context::BOOLEAN_FUNCTION_INDEX, r0, &miss); 2522 masm(), Context::BOOLEAN_FUNCTION_INDEX, r0, &miss);
2523 CheckPrototypes( 2523 CheckPrototypes(
2524 Handle<JSObject>(JSObject::cast(object->GetPrototype())), 2524 Handle<JSObject>(JSObject::cast(object->GetPrototype())),
2525 r0, holder, r3, r1, r4, name, &miss); 2525 r0, holder, r3, r1, r4, name, &miss);
2526 } else { 2526 } else {
2527 // Calling non-strict non-builtins with a value as the receiver 2527 // Calling non-strict non-builtins with a value as the receiver
2528 // requires boxing. 2528 // requires boxing.
2529 __ jmp(&miss); 2529 __ jmp(&miss);
2530 } 2530 }
2531 break; 2531 break;
2532 } 2532 }
2533 2533
2534 CallKind call_kind = CallICBase::Contextual::decode(extra_state_) 2534 CallKind call_kind = CallICBase::Contextual::decode(extra_state_)
2535 ? CALL_AS_FUNCTION 2535 ? CALL_AS_FUNCTION
2536 : CALL_AS_METHOD; 2536 : CALL_AS_METHOD;
2537 __ InvokeFunction( 2537 __ InvokeFunction(
2538 function, arguments(), JUMP_FUNCTION, NullCallWrapper(), call_kind); 2538 function, arguments(), JUMP_FUNCTION, NullCallWrapper(), call_kind);
2539 2539
2540 // Handle call cache miss. 2540 // Handle call cache miss.
2541 __ bind(&miss); 2541 __ bind(&miss);
2542 GenerateMissBranch(); 2542 GenerateMissBranch();
2543 2543
2544 // Return the generated code. 2544 // Return the generated code.
2545 return GetCode(function); 2545 return GetCode(function);
2546 } 2546 }
2547 2547
2548 2548
2549 Handle<Code> CallStubCompiler::CompileCallInterceptor(Handle<JSObject> object, 2549 Handle<Code> CallStubCompiler::CompileCallInterceptor(Handle<JSObject> object,
2550 Handle<JSObject> holder, 2550 Handle<JSObject> holder,
2551 Handle<String> name) { 2551 Handle<String> name) {
2552 // ----------- S t a t e ------------- 2552 // ----------- S t a t e -------------
2553 // -- r2 : name 2553 // -- r2 : name
2554 // -- lr : return address 2554 // -- lr : return address
2555 // ----------------------------------- 2555 // -----------------------------------
2556 Label miss; 2556 Label miss;
2557 GenerateNameCheck(name, &miss); 2557 GenerateNameCheck(name, &miss);
2558 2558
2559 // Get the number of arguments. 2559 // Get the number of arguments.
2560 const int argc = arguments().immediate(); 2560 const int argc = arguments().immediate();
2561 LookupResult lookup(isolate()); 2561 LookupResult lookup(isolate());
2562 LookupPostInterceptor(holder, name, &lookup); 2562 LookupPostInterceptor(holder, name, &lookup);
2563 2563
2564 // Get the receiver from the stack. 2564 // Get the receiver from the stack.
2565 __ ldr(r1, MemOperand(sp, argc * kPointerSize)); 2565 __ ldr(r1, MemOperand(sp, argc * kPointerSize));
2566 2566
2567 CallInterceptorCompiler compiler(this, arguments(), r2, extra_state_); 2567 CallInterceptorCompiler compiler(this, arguments(), r2, extra_state_);
2568 compiler.Compile(masm(), object, holder, name, &lookup, r1, r3, r4, r0, 2568 compiler.Compile(masm(), object, holder, name, &lookup, r1, r3, r4, r0,
2569 &miss); 2569 &miss);
2570 2570
2571 // Move returned value, the function to call, to r1. 2571 // Move returned value, the function to call, to r1.
2572 __ mov(r1, r0); 2572 __ mov(r1, r0);
2573 // Restore receiver. 2573 // Restore receiver.
2574 __ ldr(r0, MemOperand(sp, argc * kPointerSize)); 2574 __ ldr(r0, MemOperand(sp, argc * kPointerSize));
2575 2575
2576 GenerateCallFunction(masm(), object, arguments(), &miss, extra_state_); 2576 GenerateCallFunction(masm(), object, arguments(), &miss, extra_state_);
2577 2577
2578 // Handle call cache miss. 2578 // Handle call cache miss.
2579 __ bind(&miss); 2579 __ bind(&miss);
2580 GenerateMissBranch(); 2580 GenerateMissBranch();
2581 2581
2582 // Return the generated code. 2582 // Return the generated code.
2583 return GetCode(Code::INTERCEPTOR, name); 2583 return GetCode(Code::INTERCEPTOR, name);
2584 } 2584 }
2585 2585
2586 2586
2587 Handle<Code> CallStubCompiler::CompileCallGlobal( 2587 Handle<Code> CallStubCompiler::CompileCallGlobal(
2588 Handle<JSObject> object, 2588 Handle<JSObject> object,
2589 Handle<GlobalObject> holder, 2589 Handle<GlobalObject> holder,
2590 Handle<JSGlobalPropertyCell> cell, 2590 Handle<JSGlobalPropertyCell> cell,
2591 Handle<JSFunction> function, 2591 Handle<JSFunction> function,
2592 Handle<String> name) { 2592 Handle<String> name) {
2593 // ----------- S t a t e ------------- 2593 // ----------- S t a t e -------------
2594 // -- r2 : name 2594 // -- r2 : name
2595 // -- lr : return address 2595 // -- lr : return address
2596 // ----------------------------------- 2596 // -----------------------------------
2597 if (HasCustomCallGenerator(function)) { 2597 if (HasCustomCallGenerator(function)) {
2598 Handle<Code> code = CompileCustomCall(object, holder, cell, function, name); 2598 Handle<Code> code = CompileCustomCall(object, holder, cell, function, name);
2599 // A null handle means bail out to the regular compiler code below. 2599 // A null handle means bail out to the regular compiler code below.
2600 if (!code.is_null()) return code; 2600 if (!code.is_null()) return code;
2601 } 2601 }
2602 2602
2603 Label miss; 2603 Label miss;
2604 GenerateNameCheck(name, &miss); 2604 GenerateNameCheck(name, &miss);
2605 2605
2606 // Get the number of arguments. 2606 // Get the number of arguments.
2607 const int argc = arguments().immediate(); 2607 const int argc = arguments().immediate();
2608 GenerateGlobalReceiverCheck(object, holder, name, &miss); 2608 GenerateGlobalReceiverCheck(object, holder, name, &miss);
2609 GenerateLoadFunctionFromCell(cell, function, &miss); 2609 GenerateLoadFunctionFromCell(cell, function, &miss);
2610 2610
2611 // Patch the receiver on the stack with the global proxy if 2611 // Patch the receiver on the stack with the global proxy if
2612 // necessary. 2612 // necessary.
2613 if (object->IsGlobalObject()) { 2613 if (object->IsGlobalObject()) {
2614 __ ldr(r3, FieldMemOperand(r0, GlobalObject::kGlobalReceiverOffset)); 2614 __ ldr(r3, FieldMemOperand(r0, GlobalObject::kGlobalReceiverOffset));
2615 __ str(r3, MemOperand(sp, argc * kPointerSize)); 2615 __ str(r3, MemOperand(sp, argc * kPointerSize));
2616 } 2616 }
2617 2617
2618 // Set up the context (function already in r1). 2618 // Set up the context (function already in r1).
2619 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset)); 2619 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
2620 2620
2621 // Jump to the cached code (tail call). 2621 // Jump to the cached code (tail call).
2622 Counters* counters = masm()->isolate()->counters(); 2622 Counters* counters = masm()->isolate()->counters();
2623 __ IncrementCounter(counters->call_global_inline(), 1, r3, r4); 2623 __ IncrementCounter(counters->call_global_inline(), 1, r3, r4);
2624 ParameterCount expected(function->shared()->formal_parameter_count()); 2624 ParameterCount expected(function->shared()->formal_parameter_count());
2625 CallKind call_kind = CallICBase::Contextual::decode(extra_state_) 2625 CallKind call_kind = CallICBase::Contextual::decode(extra_state_)
2626 ? CALL_AS_FUNCTION 2626 ? CALL_AS_FUNCTION
2627 : CALL_AS_METHOD; 2627 : CALL_AS_METHOD;
2628 // We call indirectly through the code field in the function to 2628 // We call indirectly through the code field in the function to
2629 // allow recompilation to take effect without changing any of the 2629 // allow recompilation to take effect without changing any of the
2630 // call sites. 2630 // call sites.
2631 __ ldr(r3, FieldMemOperand(r1, JSFunction::kCodeEntryOffset)); 2631 __ ldr(r3, FieldMemOperand(r1, JSFunction::kCodeEntryOffset));
2632 __ InvokeCode(r3, expected, arguments(), JUMP_FUNCTION, 2632 __ InvokeCode(r3, expected, arguments(), JUMP_FUNCTION,
2633 NullCallWrapper(), call_kind); 2633 NullCallWrapper(), call_kind);
2634 2634
2635 // Handle call cache miss. 2635 // Handle call cache miss.
2636 __ bind(&miss); 2636 __ bind(&miss);
2637 __ IncrementCounter(counters->call_global_inline_miss(), 1, r1, r3); 2637 __ IncrementCounter(counters->call_global_inline_miss(), 1, r1, r3);
2638 GenerateMissBranch(); 2638 GenerateMissBranch();
2639 2639
2640 // Return the generated code. 2640 // Return the generated code.
2641 return GetCode(Code::NORMAL, name); 2641 return GetCode(Code::NORMAL, name);
2642 } 2642 }
2643 2643
2644 2644
2645 Handle<Code> StoreStubCompiler::CompileStoreField(Handle<JSObject> object, 2645 Handle<Code> StoreStubCompiler::CompileStoreField(Handle<JSObject> object,
2646 int index, 2646 int index,
2647 Handle<Map> transition, 2647 Handle<Map> transition,
2648 Handle<String> name) { 2648 Handle<String> name) {
2649 // ----------- S t a t e ------------- 2649 // ----------- S t a t e -------------
2650 // -- r0 : value 2650 // -- r0 : value
2651 // -- r1 : receiver 2651 // -- r1 : receiver
2652 // -- r2 : name 2652 // -- r2 : name
2653 // -- lr : return address 2653 // -- lr : return address
2654 // ----------------------------------- 2654 // -----------------------------------
2655 Label miss; 2655 Label miss;
2656 2656
2657 GenerateStoreField(masm(), 2657 GenerateStoreField(masm(),
2658 object, 2658 object,
2659 index, 2659 index,
2660 transition, 2660 transition,
2661 name, 2661 name,
2662 r1, r2, r3, r4, 2662 r1, r2, r3, r4,
2663 &miss); 2663 &miss);
2664 __ bind(&miss); 2664 __ bind(&miss);
2665 Handle<Code> ic = masm()->isolate()->builtins()->StoreIC_Miss(); 2665 Handle<Code> ic = masm()->isolate()->builtins()->StoreIC_Miss();
2666 __ Jump(ic, RelocInfo::CODE_TARGET); 2666 __ Jump(ic, RelocInfo::CODE_TARGET);
2667 2667
2668 // Return the generated code. 2668 // Return the generated code.
2669 return GetCode(transition.is_null() 2669 return GetCode(transition.is_null()
2670 ? Code::FIELD 2670 ? Code::FIELD
2671 : Code::MAP_TRANSITION, name); 2671 : Code::MAP_TRANSITION, name);
2672 } 2672 }
2673 2673
2674 2674
2675 Handle<Code> StoreStubCompiler::CompileStoreCallback( 2675 Handle<Code> StoreStubCompiler::CompileStoreCallback(
2676 Handle<String> name, 2676 Handle<String> name,
2677 Handle<JSObject> receiver, 2677 Handle<JSObject> receiver,
2678 Handle<JSObject> holder, 2678 Handle<JSObject> holder,
2679 Handle<AccessorInfo> callback) { 2679 Handle<AccessorInfo> callback) {
2680 // ----------- S t a t e ------------- 2680 // ----------- S t a t e -------------
2681 // -- r0 : value 2681 // -- r0 : value
2682 // -- r1 : receiver 2682 // -- r1 : receiver
2683 // -- r2 : name 2683 // -- r2 : name
2684 // -- lr : return address 2684 // -- lr : return address
2685 // ----------------------------------- 2685 // -----------------------------------
2686 Label miss; 2686 Label miss;
2687 // Check that the maps haven't changed. 2687 // Check that the maps haven't changed.
2688 __ JumpIfSmi(r1, &miss); 2688 __ JumpIfSmi(r1, &miss);
2689 CheckPrototypes(receiver, r1, holder, r3, r4, r5, name, &miss); 2689 CheckPrototypes(receiver, r1, holder, r3, r4, r5, name, &miss);
2690 2690
2691 // Stub never generated for non-global objects that require access checks. 2691 // Stub never generated for non-global objects that require access checks.
2692 ASSERT(holder->IsJSGlobalProxy() || !holder->IsAccessCheckNeeded()); 2692 ASSERT(holder->IsJSGlobalProxy() || !holder->IsAccessCheckNeeded());
2693 2693
2694 __ push(r1); // receiver 2694 __ push(r1); // receiver
2695 __ mov(ip, Operand(callback)); // callback info 2695 __ mov(ip, Operand(callback)); // callback info
2696 __ Push(ip, r2, r0); 2696 __ Push(ip, r2, r0);
2697 2697
2698 // Do tail-call to the runtime system. 2698 // Do tail-call to the runtime system.
2699 ExternalReference store_callback_property = 2699 ExternalReference store_callback_property =
2700 ExternalReference(IC_Utility(IC::kStoreCallbackProperty), 2700 ExternalReference(IC_Utility(IC::kStoreCallbackProperty),
2701 masm()->isolate()); 2701 masm()->isolate());
2702 __ TailCallExternalReference(store_callback_property, 4, 1); 2702 __ TailCallExternalReference(store_callback_property, 4, 1);
2703 2703
2704 // Handle store cache miss. 2704 // Handle store cache miss.
2705 __ bind(&miss); 2705 __ bind(&miss);
2706 Handle<Code> ic = masm()->isolate()->builtins()->StoreIC_Miss(); 2706 Handle<Code> ic = masm()->isolate()->builtins()->StoreIC_Miss();
2707 __ Jump(ic, RelocInfo::CODE_TARGET); 2707 __ Jump(ic, RelocInfo::CODE_TARGET);
2708 2708
2709 // Return the generated code. 2709 // Return the generated code.
2710 return GetCode(Code::CALLBACKS, name); 2710 return GetCode(Code::CALLBACKS, name);
2711 } 2711 }
2712 2712
2713 2713
2714 #undef __ 2714 #undef __
2715 #define __ ACCESS_MASM(masm) 2715 #define __ ACCESS_MASM(masm)
2716 2716
2717 2717
2718 void StoreStubCompiler::GenerateStoreViaSetter( 2718 void StoreStubCompiler::GenerateStoreViaSetter(
2719 MacroAssembler* masm, 2719 MacroAssembler* masm,
2720 Handle<JSFunction> setter) { 2720 Handle<JSFunction> setter) {
2721 // ----------- S t a t e ------------- 2721 // ----------- S t a t e -------------
2722 // -- r0 : value 2722 // -- r0 : value
2723 // -- r1 : receiver 2723 // -- r1 : receiver
2724 // -- r2 : name 2724 // -- r2 : name
2725 // -- lr : return address 2725 // -- lr : return address
2726 // ----------------------------------- 2726 // -----------------------------------
2727 { 2727 {
2728 FrameScope scope(masm, StackFrame::INTERNAL); 2728 FrameScope scope(masm, StackFrame::INTERNAL);
2729 2729
2730 // Save value register, so we can restore it later. 2730 // Save value register, so we can restore it later.
2731 __ push(r0); 2731 __ push(r0);
2732 2732
2733 if (!setter.is_null()) { 2733 if (!setter.is_null()) {
2734 // Call the JavaScript setter with receiver and value on the stack. 2734 // Call the JavaScript setter with receiver and value on the stack.
2735 __ Push(r1, r0); 2735 __ Push(r1, r0);
2736 ParameterCount actual(1); 2736 ParameterCount actual(1);
2737 __ InvokeFunction(setter, actual, CALL_FUNCTION, NullCallWrapper(), 2737 __ InvokeFunction(setter, actual, CALL_FUNCTION, NullCallWrapper(),
2738 CALL_AS_METHOD); 2738 CALL_AS_METHOD);
2739 } else { 2739 } else {
2740 // If we generate a global code snippet for deoptimization only, remember 2740 // If we generate a global code snippet for deoptimization only, remember
2741 // the place to continue after deoptimization. 2741 // the place to continue after deoptimization.
2742 masm->isolate()->heap()->SetSetterStubDeoptPCOffset(masm->pc_offset()); 2742 masm->isolate()->heap()->SetSetterStubDeoptPCOffset(masm->pc_offset());
2743 } 2743 }
2744 2744
2745 // We have to return the passed value, not the return value of the setter. 2745 // We have to return the passed value, not the return value of the setter.
2746 __ pop(r0); 2746 __ pop(r0);
2747 2747
2748 // Restore context register. 2748 // Restore context register.
2749 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 2749 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2750 } 2750 }
2751 __ Ret(); 2751 __ Ret();
2752 } 2752 }
2753 2753
2754 2754
2755 #undef __ 2755 #undef __
2756 #define __ ACCESS_MASM(masm()) 2756 #define __ ACCESS_MASM(masm())
2757 2757
2758 2758
2759 Handle<Code> StoreStubCompiler::CompileStoreViaSetter( 2759 Handle<Code> StoreStubCompiler::CompileStoreViaSetter(
2760 Handle<String> name, 2760 Handle<String> name,
2761 Handle<JSObject> receiver, 2761 Handle<JSObject> receiver,
2762 Handle<JSObject> holder, 2762 Handle<JSObject> holder,
2763 Handle<JSFunction> setter) { 2763 Handle<JSFunction> setter) {
2764 // ----------- S t a t e ------------- 2764 // ----------- S t a t e -------------
2765 // -- r0 : value 2765 // -- r0 : value
2766 // -- r1 : receiver 2766 // -- r1 : receiver
2767 // -- r2 : name 2767 // -- r2 : name
2768 // -- lr : return address 2768 // -- lr : return address
2769 // ----------------------------------- 2769 // -----------------------------------
2770 Label miss; 2770 Label miss;
2771 2771
2772 // Check that the maps haven't changed. 2772 // Check that the maps haven't changed.
2773 __ JumpIfSmi(r1, &miss); 2773 __ JumpIfSmi(r1, &miss);
2774 CheckPrototypes(receiver, r1, holder, r3, r4, r5, name, &miss); 2774 CheckPrototypes(receiver, r1, holder, r3, r4, r5, name, &miss);
2775 2775
2776 GenerateStoreViaSetter(masm(), setter); 2776 GenerateStoreViaSetter(masm(), setter);
2777 2777
2778 __ bind(&miss); 2778 __ bind(&miss);
2779 Handle<Code> ic = masm()->isolate()->builtins()->StoreIC_Miss(); 2779 Handle<Code> ic = masm()->isolate()->builtins()->StoreIC_Miss();
2780 __ Jump(ic, RelocInfo::CODE_TARGET); 2780 __ Jump(ic, RelocInfo::CODE_TARGET);
2781 2781
2782 // Return the generated code. 2782 // Return the generated code.
2783 return GetCode(Code::CALLBACKS, name); 2783 return GetCode(Code::CALLBACKS, name);
2784 } 2784 }
2785 2785
2786 2786
2787 Handle<Code> StoreStubCompiler::CompileStoreInterceptor( 2787 Handle<Code> StoreStubCompiler::CompileStoreInterceptor(
2788 Handle<JSObject> receiver, 2788 Handle<JSObject> receiver,
2789 Handle<String> name) { 2789 Handle<String> name) {
2790 // ----------- S t a t e ------------- 2790 // ----------- S t a t e -------------
2791 // -- r0 : value 2791 // -- r0 : value
2792 // -- r1 : receiver 2792 // -- r1 : receiver
2793 // -- r2 : name 2793 // -- r2 : name
2794 // -- lr : return address 2794 // -- lr : return address
2795 // ----------------------------------- 2795 // -----------------------------------
2796 Label miss; 2796 Label miss;
2797 2797
2798 // Check that the map of the object hasn't changed. 2798 // Check that the map of the object hasn't changed.
2799 __ CheckMap(r1, r3, Handle<Map>(receiver->map()), &miss, 2799 __ CheckMap(r1, r3, Handle<Map>(receiver->map()), &miss,
2800 DO_SMI_CHECK, ALLOW_ELEMENT_TRANSITION_MAPS); 2800 DO_SMI_CHECK, ALLOW_ELEMENT_TRANSITION_MAPS);
2801 2801
2802 // Perform global security token check if needed. 2802 // Perform global security token check if needed.
2803 if (receiver->IsJSGlobalProxy()) { 2803 if (receiver->IsJSGlobalProxy()) {
2804 __ CheckAccessGlobalProxy(r1, r3, &miss); 2804 __ CheckAccessGlobalProxy(r1, r3, &miss);
2805 } 2805 }
2806 2806
2807 // Stub is never generated for non-global objects that require access 2807 // Stub is never generated for non-global objects that require access
2808 // checks. 2808 // checks.
2809 ASSERT(receiver->IsJSGlobalProxy() || !receiver->IsAccessCheckNeeded()); 2809 ASSERT(receiver->IsJSGlobalProxy() || !receiver->IsAccessCheckNeeded());
2810 2810
2811 __ Push(r1, r2, r0); // Receiver, name, value. 2811 __ Push(r1, r2, r0); // Receiver, name, value.
2812 2812
2813 __ mov(r0, Operand(Smi::FromInt(strict_mode_))); 2813 __ mov(r0, Operand(Smi::FromInt(strict_mode_)));
2814 __ push(r0); // strict mode 2814 __ push(r0); // strict mode
2815 2815
2816 // Do tail-call to the runtime system. 2816 // Do tail-call to the runtime system.
2817 ExternalReference store_ic_property = 2817 ExternalReference store_ic_property =
2818 ExternalReference(IC_Utility(IC::kStoreInterceptorProperty), 2818 ExternalReference(IC_Utility(IC::kStoreInterceptorProperty),
2819 masm()->isolate()); 2819 masm()->isolate());
2820 __ TailCallExternalReference(store_ic_property, 4, 1); 2820 __ TailCallExternalReference(store_ic_property, 4, 1);
2821 2821
2822 // Handle store cache miss. 2822 // Handle store cache miss.
2823 __ bind(&miss); 2823 __ bind(&miss);
2824 Handle<Code> ic = masm()->isolate()->builtins()->StoreIC_Miss(); 2824 Handle<Code> ic = masm()->isolate()->builtins()->StoreIC_Miss();
2825 __ Jump(ic, RelocInfo::CODE_TARGET); 2825 __ Jump(ic, RelocInfo::CODE_TARGET);
2826 2826
2827 // Return the generated code. 2827 // Return the generated code.
2828 return GetCode(Code::INTERCEPTOR, name); 2828 return GetCode(Code::INTERCEPTOR, name);
2829 } 2829 }
2830 2830
2831 2831
2832 Handle<Code> StoreStubCompiler::CompileStoreGlobal( 2832 Handle<Code> StoreStubCompiler::CompileStoreGlobal(
2833 Handle<GlobalObject> object, 2833 Handle<GlobalObject> object,
2834 Handle<JSGlobalPropertyCell> cell, 2834 Handle<JSGlobalPropertyCell> cell,
2835 Handle<String> name) { 2835 Handle<String> name) {
2836 // ----------- S t a t e ------------- 2836 // ----------- S t a t e -------------
2837 // -- r0 : value 2837 // -- r0 : value
2838 // -- r1 : receiver 2838 // -- r1 : receiver
2839 // -- r2 : name 2839 // -- r2 : name
2840 // -- lr : return address 2840 // -- lr : return address
2841 // ----------------------------------- 2841 // -----------------------------------
2842 Label miss; 2842 Label miss;
2843 2843
2844 // Check that the map of the global has not changed. 2844 // Check that the map of the global has not changed.
2845 __ ldr(r3, FieldMemOperand(r1, HeapObject::kMapOffset)); 2845 __ ldr(r3, FieldMemOperand(r1, HeapObject::kMapOffset));
2846 __ cmp(r3, Operand(Handle<Map>(object->map()))); 2846 __ cmp(r3, Operand(Handle<Map>(object->map())));
2847 __ b(ne, &miss); 2847 __ b(ne, &miss);
2848 2848
2849 // Check that the value in the cell is not the hole. If it is, this 2849 // Check that the value in the cell is not the hole. If it is, this
2850 // cell could have been deleted and reintroducing the global needs 2850 // cell could have been deleted and reintroducing the global needs
2851 // to update the property details in the property dictionary of the 2851 // to update the property details in the property dictionary of the
2852 // global object. We bail out to the runtime system to do that. 2852 // global object. We bail out to the runtime system to do that.
2853 __ mov(r4, Operand(cell)); 2853 __ mov(r4, Operand(cell));
2854 __ LoadRoot(r5, Heap::kTheHoleValueRootIndex); 2854 __ LoadRoot(r5, Heap::kTheHoleValueRootIndex);
2855 __ ldr(r6, FieldMemOperand(r4, JSGlobalPropertyCell::kValueOffset)); 2855 __ ldr(r6, FieldMemOperand(r4, JSGlobalPropertyCell::kValueOffset));
2856 __ cmp(r5, r6); 2856 __ cmp(r5, r6);
2857 __ b(eq, &miss); 2857 __ b(eq, &miss);
2858 2858
2859 // Store the value in the cell. 2859 // Store the value in the cell.
2860 __ str(r0, FieldMemOperand(r4, JSGlobalPropertyCell::kValueOffset)); 2860 __ str(r0, FieldMemOperand(r4, JSGlobalPropertyCell::kValueOffset));
2861 // Cells are always rescanned, so no write barrier here. 2861 // Cells are always rescanned, so no write barrier here.
2862 2862
2863 Counters* counters = masm()->isolate()->counters(); 2863 Counters* counters = masm()->isolate()->counters();
2864 __ IncrementCounter(counters->named_store_global_inline(), 1, r4, r3); 2864 __ IncrementCounter(counters->named_store_global_inline(), 1, r4, r3);
2865 __ Ret(); 2865 __ Ret();
2866 2866
2867 // Handle store cache miss. 2867 // Handle store cache miss.
2868 __ bind(&miss); 2868 __ bind(&miss);
2869 __ IncrementCounter(counters->named_store_global_inline_miss(), 1, r4, r3); 2869 __ IncrementCounter(counters->named_store_global_inline_miss(), 1, r4, r3);
2870 Handle<Code> ic = masm()->isolate()->builtins()->StoreIC_Miss(); 2870 Handle<Code> ic = masm()->isolate()->builtins()->StoreIC_Miss();
2871 __ Jump(ic, RelocInfo::CODE_TARGET); 2871 __ Jump(ic, RelocInfo::CODE_TARGET);
2872 2872
2873 // Return the generated code. 2873 // Return the generated code.
2874 return GetCode(Code::NORMAL, name); 2874 return GetCode(Code::NORMAL, name);
2875 } 2875 }
2876 2876
2877 2877
2878 Handle<Code> LoadStubCompiler::CompileLoadNonexistent(Handle<String> name, 2878 Handle<Code> LoadStubCompiler::CompileLoadNonexistent(Handle<String> name,
2879 Handle<JSObject> object, 2879 Handle<JSObject> object,
2880 Handle<JSObject> last) { 2880 Handle<JSObject> last) {
2881 // ----------- S t a t e ------------- 2881 // ----------- S t a t e -------------
2882 // -- r0 : receiver 2882 // -- r0 : receiver
2883 // -- lr : return address 2883 // -- lr : return address
2884 // ----------------------------------- 2884 // -----------------------------------
2885 Label miss; 2885 Label miss;
2886 2886
2887 // Check that receiver is not a smi. 2887 // Check that receiver is not a smi.
2888 __ JumpIfSmi(r0, &miss); 2888 __ JumpIfSmi(r0, &miss);
2889 2889
2890 // Check the maps of the full prototype chain. 2890 // Check the maps of the full prototype chain.
2891 CheckPrototypes(object, r0, last, r3, r1, r4, name, &miss); 2891 CheckPrototypes(object, r0, last, r3, r1, r4, name, &miss);
2892 2892
2893 // If the last object in the prototype chain is a global object, 2893 // If the last object in the prototype chain is a global object,
2894 // check that the global property cell is empty. 2894 // check that the global property cell is empty.
2895 if (last->IsGlobalObject()) { 2895 if (last->IsGlobalObject()) {
2896 GenerateCheckPropertyCell( 2896 GenerateCheckPropertyCell(
2897 masm(), Handle<GlobalObject>::cast(last), name, r1, &miss); 2897 masm(), Handle<GlobalObject>::cast(last), name, r1, &miss);
2898 } 2898 }
2899 2899
2900 // Return undefined if maps of the full prototype chain are still the 2900 // Return undefined if maps of the full prototype chain are still the
2901 // same and no global property with this name contains a value. 2901 // same and no global property with this name contains a value.
2902 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); 2902 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
2903 __ Ret(); 2903 __ Ret();
2904 2904
2905 __ bind(&miss); 2905 __ bind(&miss);
2906 GenerateLoadMiss(masm(), Code::LOAD_IC); 2906 GenerateLoadMiss(masm(), Code::LOAD_IC);
2907 2907
2908 // Return the generated code. 2908 // Return the generated code.
2909 return GetCode(Code::NONEXISTENT, factory()->empty_string()); 2909 return GetCode(Code::NONEXISTENT, factory()->empty_string());
2910 } 2910 }
2911 2911
2912 2912
2913 Handle<Code> LoadStubCompiler::CompileLoadField(Handle<JSObject> object, 2913 Handle<Code> LoadStubCompiler::CompileLoadField(Handle<JSObject> object,
2914 Handle<JSObject> holder, 2914 Handle<JSObject> holder,
2915 int index, 2915 int index,
2916 Handle<String> name) { 2916 Handle<String> name) {
2917 // ----------- S t a t e ------------- 2917 // ----------- S t a t e -------------
2918 // -- r0 : receiver 2918 // -- r0 : receiver
2919 // -- r2 : name 2919 // -- r2 : name
2920 // -- lr : return address 2920 // -- lr : return address
2921 // ----------------------------------- 2921 // -----------------------------------
2922 Label miss; 2922 Label miss;
2923 2923
2924 GenerateLoadField(object, holder, r0, r3, r1, r4, index, name, &miss); 2924 GenerateLoadField(object, holder, r0, r3, r1, r4, index, name, &miss);
2925 __ bind(&miss); 2925 __ bind(&miss);
2926 GenerateLoadMiss(masm(), Code::LOAD_IC); 2926 GenerateLoadMiss(masm(), Code::LOAD_IC);
2927 2927
2928 // Return the generated code. 2928 // Return the generated code.
2929 return GetCode(Code::FIELD, name); 2929 return GetCode(Code::FIELD, name);
2930 } 2930 }
2931 2931
2932 2932
2933 Handle<Code> LoadStubCompiler::CompileLoadCallback( 2933 Handle<Code> LoadStubCompiler::CompileLoadCallback(
2934 Handle<String> name, 2934 Handle<String> name,
2935 Handle<JSObject> object, 2935 Handle<JSObject> object,
2936 Handle<JSObject> holder, 2936 Handle<JSObject> holder,
2937 Handle<AccessorInfo> callback) { 2937 Handle<AccessorInfo> callback) {
2938 // ----------- S t a t e ------------- 2938 // ----------- S t a t e -------------
2939 // -- r0 : receiver 2939 // -- r0 : receiver
2940 // -- r2 : name 2940 // -- r2 : name
2941 // -- lr : return address 2941 // -- lr : return address
2942 // ----------------------------------- 2942 // -----------------------------------
2943 Label miss; 2943 Label miss;
2944 GenerateLoadCallback(object, holder, r0, r2, r3, r1, r4, r5, callback, name, 2944 GenerateLoadCallback(object, holder, r0, r2, r3, r1, r4, r5, callback, name,
2945 &miss); 2945 &miss);
2946 __ bind(&miss); 2946 __ bind(&miss);
2947 GenerateLoadMiss(masm(), Code::LOAD_IC); 2947 GenerateLoadMiss(masm(), Code::LOAD_IC);
2948 2948
2949 // Return the generated code. 2949 // Return the generated code.
2950 return GetCode(Code::CALLBACKS, name); 2950 return GetCode(Code::CALLBACKS, name);
2951 } 2951 }
2952 2952
2953 2953
2954 #undef __ 2954 #undef __
2955 #define __ ACCESS_MASM(masm) 2955 #define __ ACCESS_MASM(masm)
2956 2956
2957 2957
2958 void LoadStubCompiler::GenerateLoadViaGetter(MacroAssembler* masm, 2958 void LoadStubCompiler::GenerateLoadViaGetter(MacroAssembler* masm,
2959 Handle<JSFunction> getter) { 2959 Handle<JSFunction> getter) {
2960 // ----------- S t a t e ------------- 2960 // ----------- S t a t e -------------
2961 // -- r0 : receiver 2961 // -- r0 : receiver
2962 // -- r2 : name 2962 // -- r2 : name
2963 // -- lr : return address 2963 // -- lr : return address
2964 // ----------------------------------- 2964 // -----------------------------------
2965 { 2965 {
2966 FrameScope scope(masm, StackFrame::INTERNAL); 2966 FrameScope scope(masm, StackFrame::INTERNAL);
2967 2967
2968 if (!getter.is_null()) { 2968 if (!getter.is_null()) {
2969 // Call the JavaScript getter with the receiver on the stack. 2969 // Call the JavaScript getter with the receiver on the stack.
2970 __ push(r0); 2970 __ push(r0);
2971 ParameterCount actual(0); 2971 ParameterCount actual(0);
2972 __ InvokeFunction(getter, actual, CALL_FUNCTION, NullCallWrapper(), 2972 __ InvokeFunction(getter, actual, CALL_FUNCTION, NullCallWrapper(),
2973 CALL_AS_METHOD); 2973 CALL_AS_METHOD);
2974 } else { 2974 } else {
2975 // If we generate a global code snippet for deoptimization only, remember 2975 // If we generate a global code snippet for deoptimization only, remember
2976 // the place to continue after deoptimization. 2976 // the place to continue after deoptimization.
2977 masm->isolate()->heap()->SetGetterStubDeoptPCOffset(masm->pc_offset()); 2977 masm->isolate()->heap()->SetGetterStubDeoptPCOffset(masm->pc_offset());
2978 } 2978 }
2979 2979
2980 // Restore context register. 2980 // Restore context register.
2981 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 2981 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2982 } 2982 }
2983 __ Ret(); 2983 __ Ret();
2984 } 2984 }
2985 2985
2986 2986
2987 #undef __ 2987 #undef __
2988 #define __ ACCESS_MASM(masm()) 2988 #define __ ACCESS_MASM(masm())
2989 2989
2990 2990
2991 Handle<Code> LoadStubCompiler::CompileLoadViaGetter( 2991 Handle<Code> LoadStubCompiler::CompileLoadViaGetter(
2992 Handle<String> name, 2992 Handle<String> name,
2993 Handle<JSObject> receiver, 2993 Handle<JSObject> receiver,
2994 Handle<JSObject> holder, 2994 Handle<JSObject> holder,
2995 Handle<JSFunction> getter) { 2995 Handle<JSFunction> getter) {
2996 // ----------- S t a t e ------------- 2996 // ----------- S t a t e -------------
2997 // -- r0 : receiver 2997 // -- r0 : receiver
2998 // -- r2 : name 2998 // -- r2 : name
2999 // -- lr : return address 2999 // -- lr : return address
3000 // ----------------------------------- 3000 // -----------------------------------
3001 Label miss; 3001 Label miss;
3002 3002
3003 // Check that the maps haven't changed. 3003 // Check that the maps haven't changed.
3004 __ JumpIfSmi(r0, &miss); 3004 __ JumpIfSmi(r0, &miss);
3005 CheckPrototypes(receiver, r0, holder, r3, r4, r1, name, &miss); 3005 CheckPrototypes(receiver, r0, holder, r3, r4, r1, name, &miss);
3006 3006
3007 GenerateLoadViaGetter(masm(), getter); 3007 GenerateLoadViaGetter(masm(), getter);
3008 3008
3009 __ bind(&miss); 3009 __ bind(&miss);
3010 GenerateLoadMiss(masm(), Code::LOAD_IC); 3010 GenerateLoadMiss(masm(), Code::LOAD_IC);
3011 3011
3012 // Return the generated code. 3012 // Return the generated code.
3013 return GetCode(Code::CALLBACKS, name); 3013 return GetCode(Code::CALLBACKS, name);
3014 } 3014 }
3015 3015
3016 3016
3017 Handle<Code> LoadStubCompiler::CompileLoadConstant(Handle<JSObject> object, 3017 Handle<Code> LoadStubCompiler::CompileLoadConstant(Handle<JSObject> object,
3018 Handle<JSObject> holder, 3018 Handle<JSObject> holder,
3019 Handle<JSFunction> value, 3019 Handle<JSFunction> value,
3020 Handle<String> name) { 3020 Handle<String> name) {
3021 // ----------- S t a t e ------------- 3021 // ----------- S t a t e -------------
3022 // -- r0 : receiver 3022 // -- r0 : receiver
3023 // -- r2 : name 3023 // -- r2 : name
3024 // -- lr : return address 3024 // -- lr : return address
3025 // ----------------------------------- 3025 // -----------------------------------
3026 Label miss; 3026 Label miss;
3027 3027
3028 GenerateLoadConstant(object, holder, r0, r3, r1, r4, value, name, &miss); 3028 GenerateLoadConstant(object, holder, r0, r3, r1, r4, value, name, &miss);
3029 __ bind(&miss); 3029 __ bind(&miss);
3030 GenerateLoadMiss(masm(), Code::LOAD_IC); 3030 GenerateLoadMiss(masm(), Code::LOAD_IC);
3031 3031
3032 // Return the generated code. 3032 // Return the generated code.
3033 return GetCode(Code::CONSTANT_FUNCTION, name); 3033 return GetCode(Code::CONSTANT_FUNCTION, name);
3034 } 3034 }
3035 3035
3036 3036
3037 Handle<Code> LoadStubCompiler::CompileLoadInterceptor(Handle<JSObject> object, 3037 Handle<Code> LoadStubCompiler::CompileLoadInterceptor(Handle<JSObject> object,
3038 Handle<JSObject> holder, 3038 Handle<JSObject> holder,
3039 Handle<String> name) { 3039 Handle<String> name) {
3040 // ----------- S t a t e ------------- 3040 // ----------- S t a t e -------------
3041 // -- r0 : receiver 3041 // -- r0 : receiver
3042 // -- r2 : name 3042 // -- r2 : name
3043 // -- lr : return address 3043 // -- lr : return address
3044 // ----------------------------------- 3044 // -----------------------------------
3045 Label miss; 3045 Label miss;
3046 3046
3047 LookupResult lookup(isolate()); 3047 LookupResult lookup(isolate());
3048 LookupPostInterceptor(holder, name, &lookup); 3048 LookupPostInterceptor(holder, name, &lookup);
3049 GenerateLoadInterceptor(object, holder, &lookup, r0, r2, r3, r1, r4, name, 3049 GenerateLoadInterceptor(object, holder, &lookup, r0, r2, r3, r1, r4, name,
3050 &miss); 3050 &miss);
3051 __ bind(&miss); 3051 __ bind(&miss);
3052 GenerateLoadMiss(masm(), Code::LOAD_IC); 3052 GenerateLoadMiss(masm(), Code::LOAD_IC);
3053 3053
3054 // Return the generated code. 3054 // Return the generated code.
3055 return GetCode(Code::INTERCEPTOR, name); 3055 return GetCode(Code::INTERCEPTOR, name);
3056 } 3056 }
3057 3057
3058 3058
3059 Handle<Code> LoadStubCompiler::CompileLoadGlobal( 3059 Handle<Code> LoadStubCompiler::CompileLoadGlobal(
3060 Handle<JSObject> object, 3060 Handle<JSObject> object,
3061 Handle<GlobalObject> holder, 3061 Handle<GlobalObject> holder,
3062 Handle<JSGlobalPropertyCell> cell, 3062 Handle<JSGlobalPropertyCell> cell,
3063 Handle<String> name, 3063 Handle<String> name,
3064 bool is_dont_delete) { 3064 bool is_dont_delete) {
3065 // ----------- S t a t e ------------- 3065 // ----------- S t a t e -------------
3066 // -- r0 : receiver 3066 // -- r0 : receiver
3067 // -- r2 : name 3067 // -- r2 : name
3068 // -- lr : return address 3068 // -- lr : return address
3069 // ----------------------------------- 3069 // -----------------------------------
3070 Label miss; 3070 Label miss;
3071 3071
3072 // Check that the map of the global has not changed. 3072 // Check that the map of the global has not changed.
3073 __ JumpIfSmi(r0, &miss); 3073 __ JumpIfSmi(r0, &miss);
3074 CheckPrototypes(object, r0, holder, r3, r4, r1, name, &miss); 3074 CheckPrototypes(object, r0, holder, r3, r4, r1, name, &miss);
3075 3075
3076 // Get the value from the cell. 3076 // Get the value from the cell.
3077 __ mov(r3, Operand(cell)); 3077 __ mov(r3, Operand(cell));
3078 __ ldr(r4, FieldMemOperand(r3, JSGlobalPropertyCell::kValueOffset)); 3078 __ ldr(r4, FieldMemOperand(r3, JSGlobalPropertyCell::kValueOffset));
3079 3079
3080 // Check for deleted property if property can actually be deleted. 3080 // Check for deleted property if property can actually be deleted.
3081 if (!is_dont_delete) { 3081 if (!is_dont_delete) {
3082 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); 3082 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
3083 __ cmp(r4, ip); 3083 __ cmp(r4, ip);
3084 __ b(eq, &miss); 3084 __ b(eq, &miss);
3085 } 3085 }
3086 3086
3087 __ mov(r0, r4); 3087 __ mov(r0, r4);
3088 Counters* counters = masm()->isolate()->counters(); 3088 Counters* counters = masm()->isolate()->counters();
3089 __ IncrementCounter(counters->named_load_global_stub(), 1, r1, r3); 3089 __ IncrementCounter(counters->named_load_global_stub(), 1, r1, r3);
3090 __ Ret(); 3090 __ Ret();
3091 3091
3092 __ bind(&miss); 3092 __ bind(&miss);
3093 __ IncrementCounter(counters->named_load_global_stub_miss(), 1, r1, r3); 3093 __ IncrementCounter(counters->named_load_global_stub_miss(), 1, r1, r3);
3094 GenerateLoadMiss(masm(), Code::LOAD_IC); 3094 GenerateLoadMiss(masm(), Code::LOAD_IC);
3095 3095
3096 // Return the generated code. 3096 // Return the generated code.
3097 return GetCode(Code::NORMAL, name); 3097 return GetCode(Code::NORMAL, name);
3098 } 3098 }
3099 3099
3100 3100
3101 Handle<Code> KeyedLoadStubCompiler::CompileLoadField(Handle<String> name, 3101 Handle<Code> KeyedLoadStubCompiler::CompileLoadField(Handle<String> name,
3102 Handle<JSObject> receiver, 3102 Handle<JSObject> receiver,
3103 Handle<JSObject> holder, 3103 Handle<JSObject> holder,
3104 int index) { 3104 int index) {
3105 // ----------- S t a t e ------------- 3105 // ----------- S t a t e -------------
3106 // -- lr : return address 3106 // -- lr : return address
3107 // -- r0 : key 3107 // -- r0 : key
3108 // -- r1 : receiver 3108 // -- r1 : receiver
3109 // ----------------------------------- 3109 // -----------------------------------
3110 Label miss; 3110 Label miss;
3111 3111
3112 // Check the key is the cached one. 3112 // Check the key is the cached one.
3113 __ cmp(r0, Operand(name)); 3113 __ cmp(r0, Operand(name));
3114 __ b(ne, &miss); 3114 __ b(ne, &miss);
3115 3115
3116 GenerateLoadField(receiver, holder, r1, r2, r3, r4, index, name, &miss); 3116 GenerateLoadField(receiver, holder, r1, r2, r3, r4, index, name, &miss);
3117 __ bind(&miss); 3117 __ bind(&miss);
3118 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC); 3118 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3119 3119
3120 return GetCode(Code::FIELD, name); 3120 return GetCode(Code::FIELD, name);
3121 } 3121 }
3122 3122
3123 3123
3124 Handle<Code> KeyedLoadStubCompiler::CompileLoadCallback( 3124 Handle<Code> KeyedLoadStubCompiler::CompileLoadCallback(
3125 Handle<String> name, 3125 Handle<String> name,
3126 Handle<JSObject> receiver, 3126 Handle<JSObject> receiver,
3127 Handle<JSObject> holder, 3127 Handle<JSObject> holder,
3128 Handle<AccessorInfo> callback) { 3128 Handle<AccessorInfo> callback) {
3129 // ----------- S t a t e ------------- 3129 // ----------- S t a t e -------------
3130 // -- lr : return address 3130 // -- lr : return address
3131 // -- r0 : key 3131 // -- r0 : key
3132 // -- r1 : receiver 3132 // -- r1 : receiver
3133 // ----------------------------------- 3133 // -----------------------------------
3134 Label miss; 3134 Label miss;
3135 3135
3136 // Check the key is the cached one. 3136 // Check the key is the cached one.
3137 __ cmp(r0, Operand(name)); 3137 __ cmp(r0, Operand(name));
3138 __ b(ne, &miss); 3138 __ b(ne, &miss);
3139 3139
3140 GenerateLoadCallback(receiver, holder, r1, r0, r2, r3, r4, r5, callback, name, 3140 GenerateLoadCallback(receiver, holder, r1, r0, r2, r3, r4, r5, callback, name,
3141 &miss); 3141 &miss);
3142 __ bind(&miss); 3142 __ bind(&miss);
3143 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC); 3143 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3144 3144
3145 return GetCode(Code::CALLBACKS, name); 3145 return GetCode(Code::CALLBACKS, name);
3146 } 3146 }
3147 3147
3148 3148
3149 Handle<Code> KeyedLoadStubCompiler::CompileLoadConstant( 3149 Handle<Code> KeyedLoadStubCompiler::CompileLoadConstant(
3150 Handle<String> name, 3150 Handle<String> name,
3151 Handle<JSObject> receiver, 3151 Handle<JSObject> receiver,
3152 Handle<JSObject> holder, 3152 Handle<JSObject> holder,
3153 Handle<JSFunction> value) { 3153 Handle<JSFunction> value) {
3154 // ----------- S t a t e ------------- 3154 // ----------- S t a t e -------------
3155 // -- lr : return address 3155 // -- lr : return address
3156 // -- r0 : key 3156 // -- r0 : key
3157 // -- r1 : receiver 3157 // -- r1 : receiver
3158 // ----------------------------------- 3158 // -----------------------------------
3159 Label miss; 3159 Label miss;
3160 3160
3161 // Check the key is the cached one. 3161 // Check the key is the cached one.
3162 __ cmp(r0, Operand(name)); 3162 __ cmp(r0, Operand(name));
3163 __ b(ne, &miss); 3163 __ b(ne, &miss);
3164 3164
3165 GenerateLoadConstant(receiver, holder, r1, r2, r3, r4, value, name, &miss); 3165 GenerateLoadConstant(receiver, holder, r1, r2, r3, r4, value, name, &miss);
3166 __ bind(&miss); 3166 __ bind(&miss);
3167 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC); 3167 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3168 3168
3169 // Return the generated code. 3169 // Return the generated code.
3170 return GetCode(Code::CONSTANT_FUNCTION, name); 3170 return GetCode(Code::CONSTANT_FUNCTION, name);
3171 } 3171 }
3172 3172
3173 3173
3174 Handle<Code> KeyedLoadStubCompiler::CompileLoadInterceptor( 3174 Handle<Code> KeyedLoadStubCompiler::CompileLoadInterceptor(
3175 Handle<JSObject> receiver, 3175 Handle<JSObject> receiver,
3176 Handle<JSObject> holder, 3176 Handle<JSObject> holder,
3177 Handle<String> name) { 3177 Handle<String> name) {
3178 // ----------- S t a t e ------------- 3178 // ----------- S t a t e -------------
3179 // -- lr : return address 3179 // -- lr : return address
3180 // -- r0 : key 3180 // -- r0 : key
3181 // -- r1 : receiver 3181 // -- r1 : receiver
3182 // ----------------------------------- 3182 // -----------------------------------
3183 Label miss; 3183 Label miss;
3184 3184
3185 // Check the key is the cached one. 3185 // Check the key is the cached one.
3186 __ cmp(r0, Operand(name)); 3186 __ cmp(r0, Operand(name));
3187 __ b(ne, &miss); 3187 __ b(ne, &miss);
3188 3188
3189 LookupResult lookup(isolate()); 3189 LookupResult lookup(isolate());
3190 LookupPostInterceptor(holder, name, &lookup); 3190 LookupPostInterceptor(holder, name, &lookup);
3191 GenerateLoadInterceptor(receiver, holder, &lookup, r1, r0, r2, r3, r4, name, 3191 GenerateLoadInterceptor(receiver, holder, &lookup, r1, r0, r2, r3, r4, name,
3192 &miss); 3192 &miss);
3193 __ bind(&miss); 3193 __ bind(&miss);
3194 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC); 3194 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3195 3195
3196 return GetCode(Code::INTERCEPTOR, name); 3196 return GetCode(Code::INTERCEPTOR, name);
3197 } 3197 }
3198 3198
3199 3199
3200 Handle<Code> KeyedLoadStubCompiler::CompileLoadArrayLength( 3200 Handle<Code> KeyedLoadStubCompiler::CompileLoadArrayLength(
3201 Handle<String> name) { 3201 Handle<String> name) {
3202 // ----------- S t a t e ------------- 3202 // ----------- S t a t e -------------
3203 // -- lr : return address 3203 // -- lr : return address
3204 // -- r0 : key 3204 // -- r0 : key
3205 // -- r1 : receiver 3205 // -- r1 : receiver
3206 // ----------------------------------- 3206 // -----------------------------------
3207 Label miss; 3207 Label miss;
3208 3208
3209 // Check the key is the cached one. 3209 // Check the key is the cached one.
3210 __ cmp(r0, Operand(name)); 3210 __ cmp(r0, Operand(name));
3211 __ b(ne, &miss); 3211 __ b(ne, &miss);
3212 3212
3213 GenerateLoadArrayLength(masm(), r1, r2, &miss); 3213 GenerateLoadArrayLength(masm(), r1, r2, &miss);
3214 __ bind(&miss); 3214 __ bind(&miss);
3215 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC); 3215 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3216 3216
3217 return GetCode(Code::CALLBACKS, name); 3217 return GetCode(Code::CALLBACKS, name);
3218 } 3218 }
3219 3219
3220 3220
3221 Handle<Code> KeyedLoadStubCompiler::CompileLoadStringLength( 3221 Handle<Code> KeyedLoadStubCompiler::CompileLoadStringLength(
3222 Handle<String> name) { 3222 Handle<String> name) {
3223 // ----------- S t a t e ------------- 3223 // ----------- S t a t e -------------
3224 // -- lr : return address 3224 // -- lr : return address
3225 // -- r0 : key 3225 // -- r0 : key
3226 // -- r1 : receiver 3226 // -- r1 : receiver
3227 // ----------------------------------- 3227 // -----------------------------------
3228 Label miss; 3228 Label miss;
3229 3229
3230 Counters* counters = masm()->isolate()->counters(); 3230 Counters* counters = masm()->isolate()->counters();
3231 __ IncrementCounter(counters->keyed_load_string_length(), 1, r2, r3); 3231 __ IncrementCounter(counters->keyed_load_string_length(), 1, r2, r3);
3232 3232
3233 // Check the key is the cached one. 3233 // Check the key is the cached one.
3234 __ cmp(r0, Operand(name)); 3234 __ cmp(r0, Operand(name));
3235 __ b(ne, &miss); 3235 __ b(ne, &miss);
3236 3236
3237 GenerateLoadStringLength(masm(), r1, r2, r3, &miss, true); 3237 GenerateLoadStringLength(masm(), r1, r2, r3, &miss, true);
3238 __ bind(&miss); 3238 __ bind(&miss);
3239 __ DecrementCounter(counters->keyed_load_string_length(), 1, r2, r3); 3239 __ DecrementCounter(counters->keyed_load_string_length(), 1, r2, r3);
3240 3240
3241 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC); 3241 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3242 3242
3243 return GetCode(Code::CALLBACKS, name); 3243 return GetCode(Code::CALLBACKS, name);
3244 } 3244 }
3245 3245
3246 3246
3247 Handle<Code> KeyedLoadStubCompiler::CompileLoadFunctionPrototype( 3247 Handle<Code> KeyedLoadStubCompiler::CompileLoadFunctionPrototype(
3248 Handle<String> name) { 3248 Handle<String> name) {
3249 // ----------- S t a t e ------------- 3249 // ----------- S t a t e -------------
3250 // -- lr : return address 3250 // -- lr : return address
3251 // -- r0 : key 3251 // -- r0 : key
3252 // -- r1 : receiver 3252 // -- r1 : receiver
3253 // ----------------------------------- 3253 // -----------------------------------
3254 Label miss; 3254 Label miss;
3255 3255
3256 Counters* counters = masm()->isolate()->counters(); 3256 Counters* counters = masm()->isolate()->counters();
3257 __ IncrementCounter(counters->keyed_load_function_prototype(), 1, r2, r3); 3257 __ IncrementCounter(counters->keyed_load_function_prototype(), 1, r2, r3);
3258 3258
3259 // Check the name hasn't changed. 3259 // Check the name hasn't changed.
3260 __ cmp(r0, Operand(name)); 3260 __ cmp(r0, Operand(name));
3261 __ b(ne, &miss); 3261 __ b(ne, &miss);
3262 3262
3263 GenerateLoadFunctionPrototype(masm(), r1, r2, r3, &miss); 3263 GenerateLoadFunctionPrototype(masm(), r1, r2, r3, &miss);
3264 __ bind(&miss); 3264 __ bind(&miss);
3265 __ DecrementCounter(counters->keyed_load_function_prototype(), 1, r2, r3); 3265 __ DecrementCounter(counters->keyed_load_function_prototype(), 1, r2, r3);
3266 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC); 3266 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3267 3267
3268 return GetCode(Code::CALLBACKS, name); 3268 return GetCode(Code::CALLBACKS, name);
3269 } 3269 }
3270 3270
3271 3271
3272 Handle<Code> KeyedLoadStubCompiler::CompileLoadElement( 3272 Handle<Code> KeyedLoadStubCompiler::CompileLoadElement(
3273 Handle<Map> receiver_map) { 3273 Handle<Map> receiver_map) {
3274 // ----------- S t a t e ------------- 3274 // ----------- S t a t e -------------
3275 // -- lr : return address 3275 // -- lr : return address
3276 // -- r0 : key 3276 // -- r0 : key
3277 // -- r1 : receiver 3277 // -- r1 : receiver
3278 // ----------------------------------- 3278 // -----------------------------------
3279 ElementsKind elements_kind = receiver_map->elements_kind(); 3279 ElementsKind elements_kind = receiver_map->elements_kind();
3280 Handle<Code> stub = KeyedLoadElementStub(elements_kind).GetCode(); 3280 Handle<Code> stub = KeyedLoadElementStub(elements_kind).GetCode();
3281 3281
3282 __ DispatchMap(r1, r2, receiver_map, stub, DO_SMI_CHECK); 3282 __ DispatchMap(r1, r2, receiver_map, stub, DO_SMI_CHECK);
3283 3283
3284 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Miss(); 3284 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Miss();
3285 __ Jump(ic, RelocInfo::CODE_TARGET); 3285 __ Jump(ic, RelocInfo::CODE_TARGET);
3286 3286
3287 // Return the generated code. 3287 // Return the generated code.
3288 return GetCode(Code::NORMAL, factory()->empty_string()); 3288 return GetCode(Code::NORMAL, factory()->empty_string());
3289 } 3289 }
3290 3290
3291 3291
3292 Handle<Code> KeyedLoadStubCompiler::CompileLoadPolymorphic( 3292 Handle<Code> KeyedLoadStubCompiler::CompileLoadPolymorphic(
3293 MapHandleList* receiver_maps, 3293 MapHandleList* receiver_maps,
3294 CodeHandleList* handler_ics) { 3294 CodeHandleList* handler_ics) {
3295 // ----------- S t a t e ------------- 3295 // ----------- S t a t e -------------
3296 // -- lr : return address 3296 // -- lr : return address
3297 // -- r0 : key 3297 // -- r0 : key
3298 // -- r1 : receiver 3298 // -- r1 : receiver
3299 // ----------------------------------- 3299 // -----------------------------------
3300 Label miss; 3300 Label miss;
3301 __ JumpIfSmi(r1, &miss); 3301 __ JumpIfSmi(r1, &miss);
3302 3302
3303 int receiver_count = receiver_maps->length(); 3303 int receiver_count = receiver_maps->length();
3304 __ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset)); 3304 __ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset));
3305 for (int current = 0; current < receiver_count; ++current) { 3305 for (int current = 0; current < receiver_count; ++current) {
3306 __ mov(ip, Operand(receiver_maps->at(current))); 3306 __ mov(ip, Operand(receiver_maps->at(current)));
3307 __ cmp(r2, ip); 3307 __ cmp(r2, ip);
3308 __ Jump(handler_ics->at(current), RelocInfo::CODE_TARGET, eq); 3308 __ Jump(handler_ics->at(current), RelocInfo::CODE_TARGET, eq);
3309 } 3309 }
3310 3310
3311 __ bind(&miss); 3311 __ bind(&miss);
3312 Handle<Code> miss_ic = isolate()->builtins()->KeyedLoadIC_Miss(); 3312 Handle<Code> miss_ic = isolate()->builtins()->KeyedLoadIC_Miss();
3313 __ Jump(miss_ic, RelocInfo::CODE_TARGET, al); 3313 __ Jump(miss_ic, RelocInfo::CODE_TARGET, al);
3314 3314
3315 // Return the generated code. 3315 // Return the generated code.
3316 return GetCode(Code::NORMAL, factory()->empty_string(), MEGAMORPHIC); 3316 return GetCode(Code::NORMAL, factory()->empty_string(), MEGAMORPHIC);
3317 } 3317 }
3318 3318
3319 3319
3320 Handle<Code> KeyedStoreStubCompiler::CompileStoreField(Handle<JSObject> object, 3320 Handle<Code> KeyedStoreStubCompiler::CompileStoreField(Handle<JSObject> object,
3321 int index, 3321 int index,
3322 Handle<Map> transition, 3322 Handle<Map> transition,
3323 Handle<String> name) { 3323 Handle<String> name) {
3324 // ----------- S t a t e ------------- 3324 // ----------- S t a t e -------------
3325 // -- r0 : value 3325 // -- r0 : value
3326 // -- r1 : name 3326 // -- r1 : name
3327 // -- r2 : receiver 3327 // -- r2 : receiver
3328 // -- lr : return address 3328 // -- lr : return address
3329 // ----------------------------------- 3329 // -----------------------------------
3330 Label miss; 3330 Label miss;
3331 3331
3332 Counters* counters = masm()->isolate()->counters(); 3332 Counters* counters = masm()->isolate()->counters();
3333 __ IncrementCounter(counters->keyed_store_field(), 1, r3, r4); 3333 __ IncrementCounter(counters->keyed_store_field(), 1, r3, r4);
3334 3334
3335 // Check that the name has not changed. 3335 // Check that the name has not changed.
3336 __ cmp(r1, Operand(name)); 3336 __ cmp(r1, Operand(name));
3337 __ b(ne, &miss); 3337 __ b(ne, &miss);
3338 3338
3339 // r3 is used as scratch register. r1 and r2 keep their values if a jump to 3339 // r3 is used as scratch register. r1 and r2 keep their values if a jump to
3340 // the miss label is generated. 3340 // the miss label is generated.
3341 GenerateStoreField(masm(), 3341 GenerateStoreField(masm(),
3342 object, 3342 object,
3343 index, 3343 index,
3344 transition, 3344 transition,
3345 name, 3345 name,
3346 r2, r1, r3, r4, 3346 r2, r1, r3, r4,
3347 &miss); 3347 &miss);
3348 __ bind(&miss); 3348 __ bind(&miss);
3349 3349
3350 __ DecrementCounter(counters->keyed_store_field(), 1, r3, r4); 3350 __ DecrementCounter(counters->keyed_store_field(), 1, r3, r4);
3351 Handle<Code> ic = masm()->isolate()->builtins()->KeyedStoreIC_Miss(); 3351 Handle<Code> ic = masm()->isolate()->builtins()->KeyedStoreIC_Miss();
3352 __ Jump(ic, RelocInfo::CODE_TARGET); 3352 __ Jump(ic, RelocInfo::CODE_TARGET);
3353 3353
3354 // Return the generated code. 3354 // Return the generated code.
3355 return GetCode(transition.is_null() 3355 return GetCode(transition.is_null()
3356 ? Code::FIELD 3356 ? Code::FIELD
3357 : Code::MAP_TRANSITION, name); 3357 : Code::MAP_TRANSITION, name);
3358 } 3358 }
3359 3359
3360 3360
3361 Handle<Code> KeyedStoreStubCompiler::CompileStoreElement( 3361 Handle<Code> KeyedStoreStubCompiler::CompileStoreElement(
3362 Handle<Map> receiver_map) { 3362 Handle<Map> receiver_map) {
3363 // ----------- S t a t e ------------- 3363 // ----------- S t a t e -------------
3364 // -- r0 : value 3364 // -- r0 : value
3365 // -- r1 : key 3365 // -- r1 : key
3366 // -- r2 : receiver 3366 // -- r2 : receiver
3367 // -- lr : return address 3367 // -- lr : return address
3368 // -- r3 : scratch 3368 // -- r3 : scratch
3369 // ----------------------------------- 3369 // -----------------------------------
3370 ElementsKind elements_kind = receiver_map->elements_kind(); 3370 ElementsKind elements_kind = receiver_map->elements_kind();
3371 bool is_js_array = receiver_map->instance_type() == JS_ARRAY_TYPE; 3371 bool is_js_array = receiver_map->instance_type() == JS_ARRAY_TYPE;
3372 Handle<Code> stub = 3372 Handle<Code> stub =
3373 KeyedStoreElementStub(is_js_array, elements_kind, grow_mode_).GetCode(); 3373 KeyedStoreElementStub(is_js_array, elements_kind, grow_mode_).GetCode();
3374 3374
3375 __ DispatchMap(r2, r3, receiver_map, stub, DO_SMI_CHECK); 3375 __ DispatchMap(r2, r3, receiver_map, stub, DO_SMI_CHECK);
3376 3376
3377 Handle<Code> ic = isolate()->builtins()->KeyedStoreIC_Miss(); 3377 Handle<Code> ic = isolate()->builtins()->KeyedStoreIC_Miss();
3378 __ Jump(ic, RelocInfo::CODE_TARGET); 3378 __ Jump(ic, RelocInfo::CODE_TARGET);
3379 3379
3380 // Return the generated code. 3380 // Return the generated code.
3381 return GetCode(Code::NORMAL, factory()->empty_string()); 3381 return GetCode(Code::NORMAL, factory()->empty_string());
3382 } 3382 }
3383 3383
3384 3384
3385 Handle<Code> KeyedStoreStubCompiler::CompileStorePolymorphic( 3385 Handle<Code> KeyedStoreStubCompiler::CompileStorePolymorphic(
3386 MapHandleList* receiver_maps, 3386 MapHandleList* receiver_maps,
3387 CodeHandleList* handler_stubs, 3387 CodeHandleList* handler_stubs,
3388 MapHandleList* transitioned_maps) { 3388 MapHandleList* transitioned_maps) {
3389 // ----------- S t a t e ------------- 3389 // ----------- S t a t e -------------
3390 // -- r0 : value 3390 // -- r0 : value
3391 // -- r1 : key 3391 // -- r1 : key
3392 // -- r2 : receiver 3392 // -- r2 : receiver
3393 // -- lr : return address 3393 // -- lr : return address
3394 // -- r3 : scratch 3394 // -- r3 : scratch
3395 // ----------------------------------- 3395 // -----------------------------------
3396 Label miss; 3396 Label miss;
3397 __ JumpIfSmi(r2, &miss); 3397 __ JumpIfSmi(r2, &miss);
3398 3398
3399 int receiver_count = receiver_maps->length(); 3399 int receiver_count = receiver_maps->length();
3400 __ ldr(r3, FieldMemOperand(r2, HeapObject::kMapOffset)); 3400 __ ldr(r3, FieldMemOperand(r2, HeapObject::kMapOffset));
3401 for (int i = 0; i < receiver_count; ++i) { 3401 for (int i = 0; i < receiver_count; ++i) {
3402 __ mov(ip, Operand(receiver_maps->at(i))); 3402 __ mov(ip, Operand(receiver_maps->at(i)));
3403 __ cmp(r3, ip); 3403 __ cmp(r3, ip);
3404 if (transitioned_maps->at(i).is_null()) { 3404 if (transitioned_maps->at(i).is_null()) {
3405 __ Jump(handler_stubs->at(i), RelocInfo::CODE_TARGET, eq); 3405 __ Jump(handler_stubs->at(i), RelocInfo::CODE_TARGET, eq);
3406 } else { 3406 } else {
3407 Label next_map; 3407 Label next_map;
3408 __ b(ne, &next_map); 3408 __ b(ne, &next_map);
3409 __ mov(r3, Operand(transitioned_maps->at(i))); 3409 __ mov(r3, Operand(transitioned_maps->at(i)));
3410 __ Jump(handler_stubs->at(i), RelocInfo::CODE_TARGET, al); 3410 __ Jump(handler_stubs->at(i), RelocInfo::CODE_TARGET, al);
3411 __ bind(&next_map); 3411 __ bind(&next_map);
3412 } 3412 }
3413 } 3413 }
3414 3414
3415 __ bind(&miss); 3415 __ bind(&miss);
3416 Handle<Code> miss_ic = isolate()->builtins()->KeyedStoreIC_Miss(); 3416 Handle<Code> miss_ic = isolate()->builtins()->KeyedStoreIC_Miss();
3417 __ Jump(miss_ic, RelocInfo::CODE_TARGET, al); 3417 __ Jump(miss_ic, RelocInfo::CODE_TARGET, al);
3418 3418
3419 // Return the generated code. 3419 // Return the generated code.
3420 return GetCode(Code::NORMAL, factory()->empty_string(), MEGAMORPHIC); 3420 return GetCode(Code::NORMAL, factory()->empty_string(), MEGAMORPHIC);
3421 } 3421 }
3422 3422
3423 3423
3424 Handle<Code> ConstructStubCompiler::CompileConstructStub( 3424 Handle<Code> ConstructStubCompiler::CompileConstructStub(
3425 Handle<JSFunction> function) { 3425 Handle<JSFunction> function) {
3426 // ----------- S t a t e ------------- 3426 // ----------- S t a t e -------------
3427 // -- r0 : argc 3427 // -- r0 : argc
3428 // -- r1 : constructor 3428 // -- r1 : constructor
3429 // -- lr : return address 3429 // -- lr : return address
3430 // -- [sp] : last argument 3430 // -- [sp] : last argument
3431 // ----------------------------------- 3431 // -----------------------------------
3432 Label generic_stub_call; 3432 Label generic_stub_call;
3433 3433
3434 // Use r7 for holding undefined which is used in several places below. 3434 // Use r7 for holding undefined which is used in several places below.
3435 __ LoadRoot(r7, Heap::kUndefinedValueRootIndex); 3435 __ LoadRoot(r7, Heap::kUndefinedValueRootIndex);
3436 3436
3437 #ifdef ENABLE_DEBUGGER_SUPPORT 3437 #ifdef ENABLE_DEBUGGER_SUPPORT
3438 // Check to see whether there are any break points in the function code. If 3438 // Check to see whether there are any break points in the function code. If
3439 // there are jump to the generic constructor stub which calls the actual 3439 // there are jump to the generic constructor stub which calls the actual
3440 // code for the function thereby hitting the break points. 3440 // code for the function thereby hitting the break points.
3441 __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset)); 3441 __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
3442 __ ldr(r2, FieldMemOperand(r2, SharedFunctionInfo::kDebugInfoOffset)); 3442 __ ldr(r2, FieldMemOperand(r2, SharedFunctionInfo::kDebugInfoOffset));
3443 __ cmp(r2, r7); 3443 __ cmp(r2, r7);
3444 __ b(ne, &generic_stub_call); 3444 __ b(ne, &generic_stub_call);
3445 #endif 3445 #endif
3446 3446
3447 // Load the initial map and verify that it is in fact a map. 3447 // Load the initial map and verify that it is in fact a map.
3448 // r1: constructor function 3448 // r1: constructor function
3449 // r7: undefined 3449 // r7: undefined
3450 __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset)); 3450 __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
3451 __ JumpIfSmi(r2, &generic_stub_call); 3451 __ JumpIfSmi(r2, &generic_stub_call);
3452 __ CompareObjectType(r2, r3, r4, MAP_TYPE); 3452 __ CompareObjectType(r2, r3, r4, MAP_TYPE);
3453 __ b(ne, &generic_stub_call); 3453 __ b(ne, &generic_stub_call);
3454 3454
3455 #ifdef DEBUG 3455 #ifdef DEBUG
3456 // Cannot construct functions this way. 3456 // Cannot construct functions this way.
3457 // r0: argc 3457 // r0: argc
3458 // r1: constructor function 3458 // r1: constructor function
3459 // r2: initial map 3459 // r2: initial map
3460 // r7: undefined 3460 // r7: undefined
3461 __ CompareInstanceType(r2, r3, JS_FUNCTION_TYPE); 3461 __ CompareInstanceType(r2, r3, JS_FUNCTION_TYPE);
3462 __ Check(ne, "Function constructed by construct stub."); 3462 __ Check(ne, "Function constructed by construct stub.");
3463 #endif 3463 #endif
3464 3464
3465 // Now allocate the JSObject in new space. 3465 // Now allocate the JSObject in new space.
3466 // r0: argc 3466 // r0: argc
3467 // r1: constructor function 3467 // r1: constructor function
3468 // r2: initial map 3468 // r2: initial map
3469 // r7: undefined 3469 // r7: undefined
3470 __ ldrb(r3, FieldMemOperand(r2, Map::kInstanceSizeOffset)); 3470 __ ldrb(r3, FieldMemOperand(r2, Map::kInstanceSizeOffset));
3471 __ AllocateInNewSpace(r3, r4, r5, r6, &generic_stub_call, SIZE_IN_WORDS); 3471 __ AllocateInNewSpace(r3, r4, r5, r6, &generic_stub_call, SIZE_IN_WORDS);
3472 3472
3473 // Allocated the JSObject, now initialize the fields. Map is set to initial 3473 // Allocated the JSObject, now initialize the fields. Map is set to initial
3474 // map and properties and elements are set to empty fixed array. 3474 // map and properties and elements are set to empty fixed array.
3475 // r0: argc 3475 // r0: argc
3476 // r1: constructor function 3476 // r1: constructor function
3477 // r2: initial map 3477 // r2: initial map
3478 // r3: object size (in words) 3478 // r3: object size (in words)
3479 // r4: JSObject (not tagged) 3479 // r4: JSObject (not tagged)
3480 // r7: undefined 3480 // r7: undefined
3481 __ LoadRoot(r6, Heap::kEmptyFixedArrayRootIndex); 3481 __ LoadRoot(r6, Heap::kEmptyFixedArrayRootIndex);
3482 __ mov(r5, r4); 3482 __ mov(r5, r4);
3483 ASSERT_EQ(0 * kPointerSize, JSObject::kMapOffset); 3483 ASSERT_EQ(0 * kPointerSize, JSObject::kMapOffset);
3484 __ str(r2, MemOperand(r5, kPointerSize, PostIndex)); 3484 __ str(r2, MemOperand(r5, kPointerSize, PostIndex));
3485 ASSERT_EQ(1 * kPointerSize, JSObject::kPropertiesOffset); 3485 ASSERT_EQ(1 * kPointerSize, JSObject::kPropertiesOffset);
3486 __ str(r6, MemOperand(r5, kPointerSize, PostIndex)); 3486 __ str(r6, MemOperand(r5, kPointerSize, PostIndex));
3487 ASSERT_EQ(2 * kPointerSize, JSObject::kElementsOffset); 3487 ASSERT_EQ(2 * kPointerSize, JSObject::kElementsOffset);
3488 __ str(r6, MemOperand(r5, kPointerSize, PostIndex)); 3488 __ str(r6, MemOperand(r5, kPointerSize, PostIndex));
3489 3489
3490 // Calculate the location of the first argument. The stack contains only the 3490 // Calculate the location of the first argument. The stack contains only the
3491 // argc arguments. 3491 // argc arguments.
3492 __ add(r1, sp, Operand(r0, LSL, kPointerSizeLog2)); 3492 __ add(r1, sp, Operand(r0, LSL, kPointerSizeLog2));
3493 3493
3494 // Fill all the in-object properties with undefined. 3494 // Fill all the in-object properties with undefined.
3495 // r0: argc 3495 // r0: argc
3496 // r1: first argument 3496 // r1: first argument
3497 // r3: object size (in words) 3497 // r3: object size (in words)
3498 // r4: JSObject (not tagged) 3498 // r4: JSObject (not tagged)
3499 // r5: First in-object property of JSObject (not tagged) 3499 // r5: First in-object property of JSObject (not tagged)
3500 // r7: undefined 3500 // r7: undefined
3501 // Fill the initialized properties with a constant value or a passed argument 3501 // Fill the initialized properties with a constant value or a passed argument
3502 // depending on the this.x = ...; assignment in the function. 3502 // depending on the this.x = ...; assignment in the function.
3503 Handle<SharedFunctionInfo> shared(function->shared()); 3503 Handle<SharedFunctionInfo> shared(function->shared());
3504 for (int i = 0; i < shared->this_property_assignments_count(); i++) { 3504 for (int i = 0; i < shared->this_property_assignments_count(); i++) {
3505 if (shared->IsThisPropertyAssignmentArgument(i)) { 3505 if (shared->IsThisPropertyAssignmentArgument(i)) {
3506 Label not_passed, next; 3506 Label not_passed, next;
3507 // Check if the argument assigned to the property is actually passed. 3507 // Check if the argument assigned to the property is actually passed.
3508 int arg_number = shared->GetThisPropertyAssignmentArgument(i); 3508 int arg_number = shared->GetThisPropertyAssignmentArgument(i);
3509 __ cmp(r0, Operand(arg_number)); 3509 __ cmp(r0, Operand(arg_number));
3510 __ b(le, &not_passed); 3510 __ b(le, &not_passed);
3511 // Argument passed - find it on the stack. 3511 // Argument passed - find it on the stack.
3512 __ ldr(r2, MemOperand(r1, (arg_number + 1) * -kPointerSize)); 3512 __ ldr(r2, MemOperand(r1, (arg_number + 1) * -kPointerSize));
3513 __ str(r2, MemOperand(r5, kPointerSize, PostIndex)); 3513 __ str(r2, MemOperand(r5, kPointerSize, PostIndex));
3514 __ b(&next); 3514 __ b(&next);
3515 __ bind(&not_passed); 3515 __ bind(&not_passed);
3516 // Set the property to undefined. 3516 // Set the property to undefined.
3517 __ str(r7, MemOperand(r5, kPointerSize, PostIndex)); 3517 __ str(r7, MemOperand(r5, kPointerSize, PostIndex));
3518 __ bind(&next); 3518 __ bind(&next);
3519 } else { 3519 } else {
3520 // Set the property to the constant value. 3520 // Set the property to the constant value.
3521 Handle<Object> constant(shared->GetThisPropertyAssignmentConstant(i)); 3521 Handle<Object> constant(shared->GetThisPropertyAssignmentConstant(i));
3522 __ mov(r2, Operand(constant)); 3522 __ mov(r2, Operand(constant));
3523 __ str(r2, MemOperand(r5, kPointerSize, PostIndex)); 3523 __ str(r2, MemOperand(r5, kPointerSize, PostIndex));
3524 } 3524 }
3525 } 3525 }
3526 3526
3527 // Fill the unused in-object property fields with undefined. 3527 // Fill the unused in-object property fields with undefined.
3528 ASSERT(function->has_initial_map()); 3528 ASSERT(function->has_initial_map());
3529 for (int i = shared->this_property_assignments_count(); 3529 for (int i = shared->this_property_assignments_count();
3530 i < function->initial_map()->inobject_properties(); 3530 i < function->initial_map()->inobject_properties();
3531 i++) { 3531 i++) {
3532 __ str(r7, MemOperand(r5, kPointerSize, PostIndex)); 3532 __ str(r7, MemOperand(r5, kPointerSize, PostIndex));
3533 } 3533 }
3534 3534
3535 // r0: argc 3535 // r0: argc
3536 // r4: JSObject (not tagged) 3536 // r4: JSObject (not tagged)
3537 // Move argc to r1 and the JSObject to return to r0 and tag it. 3537 // Move argc to r1 and the JSObject to return to r0 and tag it.
3538 __ mov(r1, r0); 3538 __ mov(r1, r0);
3539 __ mov(r0, r4); 3539 __ mov(r0, r4);
3540 __ orr(r0, r0, Operand(kHeapObjectTag)); 3540 __ orr(r0, r0, Operand(kHeapObjectTag));
3541 3541
3542 // r0: JSObject 3542 // r0: JSObject
3543 // r1: argc 3543 // r1: argc
3544 // Remove caller arguments and receiver from the stack and return. 3544 // Remove caller arguments and receiver from the stack and return.
3545 __ add(sp, sp, Operand(r1, LSL, kPointerSizeLog2)); 3545 __ add(sp, sp, Operand(r1, LSL, kPointerSizeLog2));
3546 __ add(sp, sp, Operand(kPointerSize)); 3546 __ add(sp, sp, Operand(kPointerSize));
3547 Counters* counters = masm()->isolate()->counters(); 3547 Counters* counters = masm()->isolate()->counters();
3548 __ IncrementCounter(counters->constructed_objects(), 1, r1, r2); 3548 __ IncrementCounter(counters->constructed_objects(), 1, r1, r2);
3549 __ IncrementCounter(counters->constructed_objects_stub(), 1, r1, r2); 3549 __ IncrementCounter(counters->constructed_objects_stub(), 1, r1, r2);
3550 __ Jump(lr); 3550 __ Jump(lr);
3551 3551
3552 // Jump to the generic stub in case the specialized code cannot handle the 3552 // Jump to the generic stub in case the specialized code cannot handle the
3553 // construction. 3553 // construction.
3554 __ bind(&generic_stub_call); 3554 __ bind(&generic_stub_call);
3555 Handle<Code> code = masm()->isolate()->builtins()->JSConstructStubGeneric(); 3555 Handle<Code> code = masm()->isolate()->builtins()->JSConstructStubGeneric();
3556 __ Jump(code, RelocInfo::CODE_TARGET); 3556 __ Jump(code, RelocInfo::CODE_TARGET);
3557 3557
3558 // Return the generated code. 3558 // Return the generated code.
3559 return GetCode(); 3559 return GetCode();
3560 } 3560 }
3561 3561
3562 3562
3563 #undef __ 3563 #undef __
3564 #define __ ACCESS_MASM(masm) 3564 #define __ ACCESS_MASM(masm)
3565 3565
3566 3566
3567 void KeyedLoadStubCompiler::GenerateLoadDictionaryElement( 3567 void KeyedLoadStubCompiler::GenerateLoadDictionaryElement(
3568 MacroAssembler* masm) { 3568 MacroAssembler* masm) {
3569 // ---------- S t a t e -------------- 3569 // ---------- S t a t e --------------
3570 // -- lr : return address 3570 // -- lr : return address
3571 // -- r0 : key 3571 // -- r0 : key
3572 // -- r1 : receiver 3572 // -- r1 : receiver
3573 // ----------------------------------- 3573 // -----------------------------------
3574 Label slow, miss_force_generic; 3574 Label slow, miss_force_generic;
3575 3575
3576 Register key = r0; 3576 Register key = r0;
3577 Register receiver = r1; 3577 Register receiver = r1;
3578 3578
3579 __ JumpIfNotSmi(key, &miss_force_generic); 3579 __ JumpIfNotSmi(key, &miss_force_generic);
3580 __ mov(r2, Operand(key, ASR, kSmiTagSize)); 3580 __ mov(r2, Operand(key, ASR, kSmiTagSize));
3581 __ ldr(r4, FieldMemOperand(receiver, JSObject::kElementsOffset)); 3581 __ ldr(r4, FieldMemOperand(receiver, JSObject::kElementsOffset));
3582 __ LoadFromNumberDictionary(&slow, r4, key, r0, r2, r3, r5); 3582 __ LoadFromNumberDictionary(&slow, r4, key, r0, r2, r3, r5);
3583 __ Ret(); 3583 __ Ret();
3584 3584
3585 __ bind(&slow); 3585 __ bind(&slow);
3586 __ IncrementCounter( 3586 __ IncrementCounter(
3587 masm->isolate()->counters()->keyed_load_external_array_slow(), 3587 masm->isolate()->counters()->keyed_load_external_array_slow(),
3588 1, r2, r3); 3588 1, r2, r3);
3589 3589
3590 // ---------- S t a t e -------------- 3590 // ---------- S t a t e --------------
3591 // -- lr : return address 3591 // -- lr : return address
3592 // -- r0 : key 3592 // -- r0 : key
3593 // -- r1 : receiver 3593 // -- r1 : receiver
3594 // ----------------------------------- 3594 // -----------------------------------
3595 Handle<Code> slow_ic = 3595 Handle<Code> slow_ic =
3596 masm->isolate()->builtins()->KeyedLoadIC_Slow(); 3596 masm->isolate()->builtins()->KeyedLoadIC_Slow();
3597 __ Jump(slow_ic, RelocInfo::CODE_TARGET); 3597 __ Jump(slow_ic, RelocInfo::CODE_TARGET);
3598 3598
3599 // Miss case, call the runtime. 3599 // Miss case, call the runtime.
3600 __ bind(&miss_force_generic); 3600 __ bind(&miss_force_generic);
3601 3601
3602 // ---------- S t a t e -------------- 3602 // ---------- S t a t e --------------
3603 // -- lr : return address 3603 // -- lr : return address
3604 // -- r0 : key 3604 // -- r0 : key
3605 // -- r1 : receiver 3605 // -- r1 : receiver
3606 // ----------------------------------- 3606 // -----------------------------------
3607 3607
3608 Handle<Code> miss_ic = 3608 Handle<Code> miss_ic =
3609 masm->isolate()->builtins()->KeyedLoadIC_MissForceGeneric(); 3609 masm->isolate()->builtins()->KeyedLoadIC_MissForceGeneric();
3610 __ Jump(miss_ic, RelocInfo::CODE_TARGET); 3610 __ Jump(miss_ic, RelocInfo::CODE_TARGET);
3611 } 3611 }
3612 3612
3613 3613
3614 static bool IsElementTypeSigned(ElementsKind elements_kind) { 3614 static bool IsElementTypeSigned(ElementsKind elements_kind) {
3615 switch (elements_kind) { 3615 switch (elements_kind) {
3616 case EXTERNAL_BYTE_ELEMENTS: 3616 case EXTERNAL_BYTE_ELEMENTS:
3617 case EXTERNAL_SHORT_ELEMENTS: 3617 case EXTERNAL_SHORT_ELEMENTS:
3618 case EXTERNAL_INT_ELEMENTS: 3618 case EXTERNAL_INT_ELEMENTS:
3619 return true; 3619 return true;
3620 3620
3621 case EXTERNAL_UNSIGNED_BYTE_ELEMENTS: 3621 case EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
3622 case EXTERNAL_UNSIGNED_SHORT_ELEMENTS: 3622 case EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
3623 case EXTERNAL_UNSIGNED_INT_ELEMENTS: 3623 case EXTERNAL_UNSIGNED_INT_ELEMENTS:
3624 case EXTERNAL_PIXEL_ELEMENTS: 3624 case EXTERNAL_PIXEL_ELEMENTS:
3625 return false; 3625 return false;
3626 3626
3627 case EXTERNAL_FLOAT_ELEMENTS: 3627 case EXTERNAL_FLOAT_ELEMENTS:
3628 case EXTERNAL_DOUBLE_ELEMENTS: 3628 case EXTERNAL_DOUBLE_ELEMENTS:
3629 case FAST_ELEMENTS: 3629 case FAST_ELEMENTS:
3630 case FAST_SMI_ELEMENTS: 3630 case FAST_SMI_ELEMENTS:
3631 case FAST_DOUBLE_ELEMENTS: 3631 case FAST_DOUBLE_ELEMENTS:
3632 case FAST_HOLEY_ELEMENTS: 3632 case FAST_HOLEY_ELEMENTS:
3633 case FAST_HOLEY_SMI_ELEMENTS: 3633 case FAST_HOLEY_SMI_ELEMENTS:
3634 case FAST_HOLEY_DOUBLE_ELEMENTS: 3634 case FAST_HOLEY_DOUBLE_ELEMENTS:
3635 case DICTIONARY_ELEMENTS: 3635 case DICTIONARY_ELEMENTS:
3636 case NON_STRICT_ARGUMENTS_ELEMENTS: 3636 case NON_STRICT_ARGUMENTS_ELEMENTS:
3637 UNREACHABLE(); 3637 UNREACHABLE();
3638 return false; 3638 return false;
3639 } 3639 }
3640 return false; 3640 return false;
3641 } 3641 }
3642 3642
3643 3643
3644 static void GenerateSmiKeyCheck(MacroAssembler* masm, 3644 static void GenerateSmiKeyCheck(MacroAssembler* masm,
3645 Register key, 3645 Register key,
3646 Register scratch0, 3646 Register scratch0,
3647 Register scratch1, 3647 Register scratch1,
3648 DwVfpRegister double_scratch0, 3648 DwVfpRegister double_scratch0,
3649 Label* fail) { 3649 Label* fail) {
3650 if (CpuFeatures::IsSupported(VFP2)) { 3650 if (CpuFeatures::IsSupported(VFP2)) {
3651 CpuFeatures::Scope scope(VFP2); 3651 CpuFeatures::Scope scope(VFP2);
3652 Label key_ok; 3652 Label key_ok;
3653 // Check for smi or a smi inside a heap number. We convert the heap 3653 // Check for smi or a smi inside a heap number. We convert the heap
3654 // number and check if the conversion is exact and fits into the smi 3654 // number and check if the conversion is exact and fits into the smi
3655 // range. 3655 // range.
3656 __ JumpIfSmi(key, &key_ok); 3656 __ JumpIfSmi(key, &key_ok);
3657 __ CheckMap(key, 3657 __ CheckMap(key,
3658 scratch0, 3658 scratch0,
3659 Heap::kHeapNumberMapRootIndex, 3659 Heap::kHeapNumberMapRootIndex,
3660 fail, 3660 fail,
3661 DONT_DO_SMI_CHECK); 3661 DONT_DO_SMI_CHECK);
3662 __ sub(ip, key, Operand(kHeapObjectTag)); 3662 __ sub(ip, key, Operand(kHeapObjectTag));
3663 __ vldr(double_scratch0, ip, HeapNumber::kValueOffset); 3663 __ vldr(double_scratch0, ip, HeapNumber::kValueOffset);
3664 __ EmitVFPTruncate(kRoundToZero, 3664 __ EmitVFPTruncate(kRoundToZero,
3665 double_scratch0.low(), 3665 double_scratch0.low(),
3666 double_scratch0, 3666 double_scratch0,
3667 scratch0, 3667 scratch0,
3668 scratch1, 3668 scratch1,
3669 kCheckForInexactConversion); 3669 kCheckForInexactConversion);
3670 __ b(ne, fail); 3670 __ b(ne, fail);
3671 __ vmov(scratch0, double_scratch0.low()); 3671 __ vmov(scratch0, double_scratch0.low());
3672 __ TrySmiTag(scratch0, fail, scratch1); 3672 __ TrySmiTag(scratch0, fail, scratch1);
3673 __ mov(key, scratch0); 3673 __ mov(key, scratch0);
3674 __ bind(&key_ok); 3674 __ bind(&key_ok);
3675 } else { 3675 } else {
3676 // Check that the key is a smi. 3676 // Check that the key is a smi.
3677 __ JumpIfNotSmi(key, fail); 3677 __ JumpIfNotSmi(key, fail);
3678 } 3678 }
3679 } 3679 }
3680 3680
3681 3681
3682 void KeyedLoadStubCompiler::GenerateLoadExternalArray( 3682 void KeyedLoadStubCompiler::GenerateLoadExternalArray(
3683 MacroAssembler* masm, 3683 MacroAssembler* masm,
3684 ElementsKind elements_kind) { 3684 ElementsKind elements_kind) {
3685 // ---------- S t a t e -------------- 3685 // ---------- S t a t e --------------
3686 // -- lr : return address 3686 // -- lr : return address
3687 // -- r0 : key 3687 // -- r0 : key
3688 // -- r1 : receiver 3688 // -- r1 : receiver
3689 // ----------------------------------- 3689 // -----------------------------------
3690 Label miss_force_generic, slow, failed_allocation; 3690 Label miss_force_generic, slow, failed_allocation;
3691 3691
3692 Register key = r0; 3692 Register key = r0;
3693 Register receiver = r1; 3693 Register receiver = r1;
3694 3694
3695 // This stub is meant to be tail-jumped to, the receiver must already 3695 // This stub is meant to be tail-jumped to, the receiver must already
3696 // have been verified by the caller to not be a smi. 3696 // have been verified by the caller to not be a smi.
3697 3697
3698 // Check that the key is a smi or a heap number convertible to a smi. 3698 // Check that the key is a smi or a heap number convertible to a smi.
3699 GenerateSmiKeyCheck(masm, key, r4, r5, d1, &miss_force_generic); 3699 GenerateSmiKeyCheck(masm, key, r4, r5, d1, &miss_force_generic);
3700 3700
3701 __ ldr(r3, FieldMemOperand(receiver, JSObject::kElementsOffset)); 3701 __ ldr(r3, FieldMemOperand(receiver, JSObject::kElementsOffset));
3702 // r3: elements array 3702 // r3: elements array
3703 3703
3704 // Check that the index is in range. 3704 // Check that the index is in range.
3705 __ ldr(ip, FieldMemOperand(r3, ExternalArray::kLengthOffset)); 3705 __ ldr(ip, FieldMemOperand(r3, ExternalArray::kLengthOffset));
3706 __ cmp(key, ip); 3706 __ cmp(key, ip);
3707 // Unsigned comparison catches both negative and too-large values. 3707 // Unsigned comparison catches both negative and too-large values.
3708 __ b(hs, &miss_force_generic); 3708 __ b(hs, &miss_force_generic);
3709 3709
3710 __ ldr(r3, FieldMemOperand(r3, ExternalArray::kExternalPointerOffset)); 3710 __ ldr(r3, FieldMemOperand(r3, ExternalArray::kExternalPointerOffset));
3711 // r3: base pointer of external storage 3711 // r3: base pointer of external storage
3712 3712
3713 // We are not untagging smi key and instead work with it 3713 // We are not untagging smi key and instead work with it
3714 // as if it was premultiplied by 2. 3714 // as if it was premultiplied by 2.
3715 STATIC_ASSERT((kSmiTag == 0) && (kSmiTagSize == 1)); 3715 STATIC_ASSERT((kSmiTag == 0) && (kSmiTagSize == 1));
3716 3716
3717 Register value = r2; 3717 Register value = r2;
3718 switch (elements_kind) { 3718 switch (elements_kind) {
3719 case EXTERNAL_BYTE_ELEMENTS: 3719 case EXTERNAL_BYTE_ELEMENTS:
3720 __ ldrsb(value, MemOperand(r3, key, LSR, 1)); 3720 __ ldrsb(value, MemOperand(r3, key, LSR, 1));
3721 break; 3721 break;
3722 case EXTERNAL_PIXEL_ELEMENTS: 3722 case EXTERNAL_PIXEL_ELEMENTS:
3723 case EXTERNAL_UNSIGNED_BYTE_ELEMENTS: 3723 case EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
3724 __ ldrb(value, MemOperand(r3, key, LSR, 1)); 3724 __ ldrb(value, MemOperand(r3, key, LSR, 1));
3725 break; 3725 break;
3726 case EXTERNAL_SHORT_ELEMENTS: 3726 case EXTERNAL_SHORT_ELEMENTS:
3727 __ ldrsh(value, MemOperand(r3, key, LSL, 0)); 3727 __ ldrsh(value, MemOperand(r3, key, LSL, 0));
3728 break; 3728 break;
3729 case EXTERNAL_UNSIGNED_SHORT_ELEMENTS: 3729 case EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
3730 __ ldrh(value, MemOperand(r3, key, LSL, 0)); 3730 __ ldrh(value, MemOperand(r3, key, LSL, 0));
3731 break; 3731 break;
3732 case EXTERNAL_INT_ELEMENTS: 3732 case EXTERNAL_INT_ELEMENTS:
3733 case EXTERNAL_UNSIGNED_INT_ELEMENTS: 3733 case EXTERNAL_UNSIGNED_INT_ELEMENTS:
3734 __ ldr(value, MemOperand(r3, key, LSL, 1)); 3734 __ ldr(value, MemOperand(r3, key, LSL, 1));
3735 break; 3735 break;
3736 case EXTERNAL_FLOAT_ELEMENTS: 3736 case EXTERNAL_FLOAT_ELEMENTS:
3737 if (CpuFeatures::IsSupported(VFP2)) { 3737 if (CpuFeatures::IsSupported(VFP2)) {
3738 CpuFeatures::Scope scope(VFP2); 3738 CpuFeatures::Scope scope(VFP2);
3739 __ add(r2, r3, Operand(key, LSL, 1)); 3739 __ add(r2, r3, Operand(key, LSL, 1));
3740 __ vldr(s0, r2, 0); 3740 __ vldr(s0, r2, 0);
3741 } else { 3741 } else {
3742 __ ldr(value, MemOperand(r3, key, LSL, 1)); 3742 __ ldr(value, MemOperand(r3, key, LSL, 1));
3743 } 3743 }
3744 break; 3744 break;
3745 case EXTERNAL_DOUBLE_ELEMENTS: 3745 case EXTERNAL_DOUBLE_ELEMENTS:
3746 if (CpuFeatures::IsSupported(VFP2)) { 3746 if (CpuFeatures::IsSupported(VFP2)) {
3747 CpuFeatures::Scope scope(VFP2); 3747 CpuFeatures::Scope scope(VFP2);
3748 __ add(r2, r3, Operand(key, LSL, 2)); 3748 __ add(r2, r3, Operand(key, LSL, 2));
3749 __ vldr(d0, r2, 0); 3749 __ vldr(d0, r2, 0);
3750 } else { 3750 } else {
3751 __ add(r4, r3, Operand(key, LSL, 2)); 3751 __ add(r4, r3, Operand(key, LSL, 2));
3752 // r4: pointer to the beginning of the double we want to load. 3752 // r4: pointer to the beginning of the double we want to load.
3753 __ ldr(r2, MemOperand(r4, 0)); 3753 __ ldr(r2, MemOperand(r4, 0));
3754 __ ldr(r3, MemOperand(r4, Register::kSizeInBytes)); 3754 __ ldr(r3, MemOperand(r4, Register::kSizeInBytes));
3755 } 3755 }
3756 break; 3756 break;
3757 case FAST_ELEMENTS: 3757 case FAST_ELEMENTS:
3758 case FAST_SMI_ELEMENTS: 3758 case FAST_SMI_ELEMENTS:
3759 case FAST_DOUBLE_ELEMENTS: 3759 case FAST_DOUBLE_ELEMENTS:
3760 case FAST_HOLEY_ELEMENTS: 3760 case FAST_HOLEY_ELEMENTS:
3761 case FAST_HOLEY_SMI_ELEMENTS: 3761 case FAST_HOLEY_SMI_ELEMENTS:
3762 case FAST_HOLEY_DOUBLE_ELEMENTS: 3762 case FAST_HOLEY_DOUBLE_ELEMENTS:
3763 case DICTIONARY_ELEMENTS: 3763 case DICTIONARY_ELEMENTS:
3764 case NON_STRICT_ARGUMENTS_ELEMENTS: 3764 case NON_STRICT_ARGUMENTS_ELEMENTS:
3765 UNREACHABLE(); 3765 UNREACHABLE();
3766 break; 3766 break;
3767 } 3767 }
3768 3768
3769 // For integer array types: 3769 // For integer array types:
3770 // r2: value 3770 // r2: value
3771 // For float array type: 3771 // For float array type:
3772 // s0: value (if VFP3 is supported) 3772 // s0: value (if VFP3 is supported)
3773 // r2: value (if VFP3 is not supported) 3773 // r2: value (if VFP3 is not supported)
3774 // For double array type: 3774 // For double array type:
3775 // d0: value (if VFP3 is supported) 3775 // d0: value (if VFP3 is supported)
3776 // r2/r3: value (if VFP3 is not supported) 3776 // r2/r3: value (if VFP3 is not supported)
3777 3777
3778 if (elements_kind == EXTERNAL_INT_ELEMENTS) { 3778 if (elements_kind == EXTERNAL_INT_ELEMENTS) {
3779 // For the Int and UnsignedInt array types, we need to see whether 3779 // For the Int and UnsignedInt array types, we need to see whether
3780 // the value can be represented in a Smi. If not, we need to convert 3780 // the value can be represented in a Smi. If not, we need to convert
3781 // it to a HeapNumber. 3781 // it to a HeapNumber.
3782 Label box_int; 3782 Label box_int;
3783 __ cmp(value, Operand(0xC0000000)); 3783 __ cmp(value, Operand(0xC0000000));
3784 __ b(mi, &box_int); 3784 __ b(mi, &box_int);
3785 // Tag integer as smi and return it. 3785 // Tag integer as smi and return it.
3786 __ mov(r0, Operand(value, LSL, kSmiTagSize)); 3786 __ mov(r0, Operand(value, LSL, kSmiTagSize));
3787 __ Ret(); 3787 __ Ret();
3788 3788
3789 __ bind(&box_int); 3789 __ bind(&box_int);
3790 // Allocate a HeapNumber for the result and perform int-to-double 3790 // Allocate a HeapNumber for the result and perform int-to-double
3791 // conversion. Don't touch r0 or r1 as they are needed if allocation 3791 // conversion. Don't touch r0 or r1 as they are needed if allocation
3792 // fails. 3792 // fails.
3793 __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex); 3793 __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
3794 __ AllocateHeapNumber(r5, r3, r4, r6, &slow); 3794 __ AllocateHeapNumber(r5, r3, r4, r6, &slow);
3795 // Now we can use r0 for the result as key is not needed any more. 3795 // Now we can use r0 for the result as key is not needed any more.
3796 __ mov(r0, r5); 3796 __ mov(r0, r5);
3797 3797
3798 if (CpuFeatures::IsSupported(VFP2)) { 3798 if (CpuFeatures::IsSupported(VFP2)) {
3799 CpuFeatures::Scope scope(VFP2); 3799 CpuFeatures::Scope scope(VFP2);
3800 __ vmov(s0, value); 3800 __ vmov(s0, value);
3801 __ vcvt_f64_s32(d0, s0); 3801 __ vcvt_f64_s32(d0, s0);
3802 __ sub(r3, r0, Operand(kHeapObjectTag)); 3802 __ sub(r3, r0, Operand(kHeapObjectTag));
3803 __ vstr(d0, r3, HeapNumber::kValueOffset); 3803 __ vstr(d0, r3, HeapNumber::kValueOffset);
3804 __ Ret(); 3804 __ Ret();
3805 } else { 3805 } else {
3806 Register dst1 = r1; 3806 Register dst1 = r1;
3807 Register dst2 = r3; 3807 Register dst2 = r3;
3808 FloatingPointHelper::Destination dest = 3808 FloatingPointHelper::Destination dest =
3809 FloatingPointHelper::kCoreRegisters; 3809 FloatingPointHelper::kCoreRegisters;
3810 FloatingPointHelper::ConvertIntToDouble(masm, 3810 FloatingPointHelper::ConvertIntToDouble(masm,
3811 value, 3811 value,
3812 dest, 3812 dest,
3813 d0, 3813 d0,
3814 dst1, 3814 dst1,
3815 dst2, 3815 dst2,
3816 r9, 3816 r9,
3817 s0); 3817 s0);
3818 __ str(dst1, FieldMemOperand(r0, HeapNumber::kMantissaOffset)); 3818 __ str(dst1, FieldMemOperand(r0, HeapNumber::kMantissaOffset));
3819 __ str(dst2, FieldMemOperand(r0, HeapNumber::kExponentOffset)); 3819 __ str(dst2, FieldMemOperand(r0, HeapNumber::kExponentOffset));
3820 __ Ret(); 3820 __ Ret();
3821 } 3821 }
3822 } else if (elements_kind == EXTERNAL_UNSIGNED_INT_ELEMENTS) { 3822 } else if (elements_kind == EXTERNAL_UNSIGNED_INT_ELEMENTS) {
3823 // The test is different for unsigned int values. Since we need 3823 // The test is different for unsigned int values. Since we need
3824 // the value to be in the range of a positive smi, we can't 3824 // the value to be in the range of a positive smi, we can't
3825 // handle either of the top two bits being set in the value. 3825 // handle either of the top two bits being set in the value.
3826 if (CpuFeatures::IsSupported(VFP2)) { 3826 if (CpuFeatures::IsSupported(VFP2)) {
3827 CpuFeatures::Scope scope(VFP2); 3827 CpuFeatures::Scope scope(VFP2);
3828 Label box_int, done; 3828 Label box_int, done;
3829 __ tst(value, Operand(0xC0000000)); 3829 __ tst(value, Operand(0xC0000000));
3830 __ b(ne, &box_int); 3830 __ b(ne, &box_int);
3831 // Tag integer as smi and return it. 3831 // Tag integer as smi and return it.
3832 __ mov(r0, Operand(value, LSL, kSmiTagSize)); 3832 __ mov(r0, Operand(value, LSL, kSmiTagSize));
3833 __ Ret(); 3833 __ Ret();
3834 3834
3835 __ bind(&box_int); 3835 __ bind(&box_int);
3836 __ vmov(s0, value); 3836 __ vmov(s0, value);
3837 // Allocate a HeapNumber for the result and perform int-to-double 3837 // Allocate a HeapNumber for the result and perform int-to-double
3838 // conversion. Don't use r0 and r1 as AllocateHeapNumber clobbers all 3838 // conversion. Don't use r0 and r1 as AllocateHeapNumber clobbers all
3839 // registers - also when jumping due to exhausted young space. 3839 // registers - also when jumping due to exhausted young space.
3840 __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex); 3840 __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
3841 __ AllocateHeapNumber(r2, r3, r4, r6, &slow); 3841 __ AllocateHeapNumber(r2, r3, r4, r6, &slow);
3842 3842
3843 __ vcvt_f64_u32(d0, s0); 3843 __ vcvt_f64_u32(d0, s0);
3844 __ sub(r1, r2, Operand(kHeapObjectTag)); 3844 __ sub(r1, r2, Operand(kHeapObjectTag));
3845 __ vstr(d0, r1, HeapNumber::kValueOffset); 3845 __ vstr(d0, r1, HeapNumber::kValueOffset);
3846 3846
3847 __ mov(r0, r2); 3847 __ mov(r0, r2);
3848 __ Ret(); 3848 __ Ret();
3849 } else { 3849 } else {
3850 // Check whether unsigned integer fits into smi. 3850 // Check whether unsigned integer fits into smi.
3851 Label box_int_0, box_int_1, done; 3851 Label box_int_0, box_int_1, done;
3852 __ tst(value, Operand(0x80000000)); 3852 __ tst(value, Operand(0x80000000));
3853 __ b(ne, &box_int_0); 3853 __ b(ne, &box_int_0);
3854 __ tst(value, Operand(0x40000000)); 3854 __ tst(value, Operand(0x40000000));
3855 __ b(ne, &box_int_1); 3855 __ b(ne, &box_int_1);
3856 // Tag integer as smi and return it. 3856 // Tag integer as smi and return it.
3857 __ mov(r0, Operand(value, LSL, kSmiTagSize)); 3857 __ mov(r0, Operand(value, LSL, kSmiTagSize));
3858 __ Ret(); 3858 __ Ret();
3859 3859
3860 Register hiword = value; // r2. 3860 Register hiword = value; // r2.
3861 Register loword = r3; 3861 Register loword = r3;
3862 3862
3863 __ bind(&box_int_0); 3863 __ bind(&box_int_0);
3864 // Integer does not have leading zeros. 3864 // Integer does not have leading zeros.
3865 GenerateUInt2Double(masm, hiword, loword, r4, 0); 3865 GenerateUInt2Double(masm, hiword, loword, r4, 0);
3866 __ b(&done); 3866 __ b(&done);
3867 3867
3868 __ bind(&box_int_1); 3868 __ bind(&box_int_1);
3869 // Integer has one leading zero. 3869 // Integer has one leading zero.
3870 GenerateUInt2Double(masm, hiword, loword, r4, 1); 3870 GenerateUInt2Double(masm, hiword, loword, r4, 1);
3871 3871
3872 3872
3873 __ bind(&done); 3873 __ bind(&done);
3874 // Integer was converted to double in registers hiword:loword. 3874 // Integer was converted to double in registers hiword:loword.
3875 // Wrap it into a HeapNumber. Don't use r0 and r1 as AllocateHeapNumber 3875 // Wrap it into a HeapNumber. Don't use r0 and r1 as AllocateHeapNumber
3876 // clobbers all registers - also when jumping due to exhausted young 3876 // clobbers all registers - also when jumping due to exhausted young
3877 // space. 3877 // space.
3878 __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex); 3878 __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
3879 __ AllocateHeapNumber(r4, r5, r7, r6, &slow); 3879 __ AllocateHeapNumber(r4, r5, r7, r6, &slow);
3880 3880
3881 __ str(hiword, FieldMemOperand(r4, HeapNumber::kExponentOffset)); 3881 __ str(hiword, FieldMemOperand(r4, HeapNumber::kExponentOffset));
3882 __ str(loword, FieldMemOperand(r4, HeapNumber::kMantissaOffset)); 3882 __ str(loword, FieldMemOperand(r4, HeapNumber::kMantissaOffset));
3883 3883
3884 __ mov(r0, r4); 3884 __ mov(r0, r4);
3885 __ Ret(); 3885 __ Ret();
3886 } 3886 }
3887 } else if (elements_kind == EXTERNAL_FLOAT_ELEMENTS) { 3887 } else if (elements_kind == EXTERNAL_FLOAT_ELEMENTS) {
3888 // For the floating-point array type, we need to always allocate a 3888 // For the floating-point array type, we need to always allocate a
3889 // HeapNumber. 3889 // HeapNumber.
3890 if (CpuFeatures::IsSupported(VFP2)) { 3890 if (CpuFeatures::IsSupported(VFP2)) {
3891 CpuFeatures::Scope scope(VFP2); 3891 CpuFeatures::Scope scope(VFP2);
3892 // Allocate a HeapNumber for the result. Don't use r0 and r1 as 3892 // Allocate a HeapNumber for the result. Don't use r0 and r1 as
3893 // AllocateHeapNumber clobbers all registers - also when jumping due to 3893 // AllocateHeapNumber clobbers all registers - also when jumping due to
3894 // exhausted young space. 3894 // exhausted young space.
3895 __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex); 3895 __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
3896 __ AllocateHeapNumber(r2, r3, r4, r6, &slow); 3896 __ AllocateHeapNumber(r2, r3, r4, r6, &slow);
3897 __ vcvt_f64_f32(d0, s0); 3897 __ vcvt_f64_f32(d0, s0);
3898 __ sub(r1, r2, Operand(kHeapObjectTag)); 3898 __ sub(r1, r2, Operand(kHeapObjectTag));
3899 __ vstr(d0, r1, HeapNumber::kValueOffset); 3899 __ vstr(d0, r1, HeapNumber::kValueOffset);
3900 3900
3901 __ mov(r0, r2); 3901 __ mov(r0, r2);
3902 __ Ret(); 3902 __ Ret();
3903 } else { 3903 } else {
3904 // Allocate a HeapNumber for the result. Don't use r0 and r1 as 3904 // Allocate a HeapNumber for the result. Don't use r0 and r1 as
3905 // AllocateHeapNumber clobbers all registers - also when jumping due to 3905 // AllocateHeapNumber clobbers all registers - also when jumping due to
3906 // exhausted young space. 3906 // exhausted young space.
3907 __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex); 3907 __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
3908 __ AllocateHeapNumber(r3, r4, r5, r6, &slow); 3908 __ AllocateHeapNumber(r3, r4, r5, r6, &slow);
3909 // VFP is not available, do manual single to double conversion. 3909 // VFP is not available, do manual single to double conversion.
3910 3910
3911 // r2: floating point value (binary32) 3911 // r2: floating point value (binary32)
3912 // r3: heap number for result 3912 // r3: heap number for result
3913 3913
3914 // Extract mantissa to r0. OK to clobber r0 now as there are no jumps to 3914 // Extract mantissa to r0. OK to clobber r0 now as there are no jumps to
3915 // the slow case from here. 3915 // the slow case from here.
3916 __ and_(r0, value, Operand(kBinary32MantissaMask)); 3916 __ and_(r0, value, Operand(kBinary32MantissaMask));
3917 3917
3918 // Extract exponent to r1. OK to clobber r1 now as there are no jumps to 3918 // Extract exponent to r1. OK to clobber r1 now as there are no jumps to
3919 // the slow case from here. 3919 // the slow case from here.
3920 __ mov(r1, Operand(value, LSR, kBinary32MantissaBits)); 3920 __ mov(r1, Operand(value, LSR, kBinary32MantissaBits));
3921 __ and_(r1, r1, Operand(kBinary32ExponentMask >> kBinary32MantissaBits)); 3921 __ and_(r1, r1, Operand(kBinary32ExponentMask >> kBinary32MantissaBits));
3922 3922
3923 Label exponent_rebiased; 3923 Label exponent_rebiased;
3924 __ teq(r1, Operand(0x00)); 3924 __ teq(r1, Operand(0x00));
3925 __ b(eq, &exponent_rebiased); 3925 __ b(eq, &exponent_rebiased);
3926 3926
3927 __ teq(r1, Operand(0xff)); 3927 __ teq(r1, Operand(0xff));
3928 __ mov(r1, Operand(0x7ff), LeaveCC, eq); 3928 __ mov(r1, Operand(0x7ff), LeaveCC, eq);
3929 __ b(eq, &exponent_rebiased); 3929 __ b(eq, &exponent_rebiased);
3930 3930
3931 // Rebias exponent. 3931 // Rebias exponent.
3932 __ add(r1, 3932 __ add(r1,
3933 r1, 3933 r1,
3934 Operand(-kBinary32ExponentBias + HeapNumber::kExponentBias)); 3934 Operand(-kBinary32ExponentBias + HeapNumber::kExponentBias));
3935 3935
3936 __ bind(&exponent_rebiased); 3936 __ bind(&exponent_rebiased);
3937 __ and_(r2, value, Operand(kBinary32SignMask)); 3937 __ and_(r2, value, Operand(kBinary32SignMask));
3938 value = no_reg; 3938 value = no_reg;
3939 __ orr(r2, r2, Operand(r1, LSL, HeapNumber::kMantissaBitsInTopWord)); 3939 __ orr(r2, r2, Operand(r1, LSL, HeapNumber::kMantissaBitsInTopWord));
3940 3940
3941 // Shift mantissa. 3941 // Shift mantissa.
3942 static const int kMantissaShiftForHiWord = 3942 static const int kMantissaShiftForHiWord =
3943 kBinary32MantissaBits - HeapNumber::kMantissaBitsInTopWord; 3943 kBinary32MantissaBits - HeapNumber::kMantissaBitsInTopWord;
3944 3944
3945 static const int kMantissaShiftForLoWord = 3945 static const int kMantissaShiftForLoWord =
3946 kBitsPerInt - kMantissaShiftForHiWord; 3946 kBitsPerInt - kMantissaShiftForHiWord;
3947 3947
3948 __ orr(r2, r2, Operand(r0, LSR, kMantissaShiftForHiWord)); 3948 __ orr(r2, r2, Operand(r0, LSR, kMantissaShiftForHiWord));
3949 __ mov(r0, Operand(r0, LSL, kMantissaShiftForLoWord)); 3949 __ mov(r0, Operand(r0, LSL, kMantissaShiftForLoWord));
3950 3950
3951 __ str(r2, FieldMemOperand(r3, HeapNumber::kExponentOffset)); 3951 __ str(r2, FieldMemOperand(r3, HeapNumber::kExponentOffset));
3952 __ str(r0, FieldMemOperand(r3, HeapNumber::kMantissaOffset)); 3952 __ str(r0, FieldMemOperand(r3, HeapNumber::kMantissaOffset));
3953 3953
3954 __ mov(r0, r3); 3954 __ mov(r0, r3);
3955 __ Ret(); 3955 __ Ret();
3956 } 3956 }
3957 } else if (elements_kind == EXTERNAL_DOUBLE_ELEMENTS) { 3957 } else if (elements_kind == EXTERNAL_DOUBLE_ELEMENTS) {
3958 if (CpuFeatures::IsSupported(VFP2)) { 3958 if (CpuFeatures::IsSupported(VFP2)) {
3959 CpuFeatures::Scope scope(VFP2); 3959 CpuFeatures::Scope scope(VFP2);
3960 // Allocate a HeapNumber for the result. Don't use r0 and r1 as 3960 // Allocate a HeapNumber for the result. Don't use r0 and r1 as
3961 // AllocateHeapNumber clobbers all registers - also when jumping due to 3961 // AllocateHeapNumber clobbers all registers - also when jumping due to
3962 // exhausted young space. 3962 // exhausted young space.
3963 __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex); 3963 __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
3964 __ AllocateHeapNumber(r2, r3, r4, r6, &slow); 3964 __ AllocateHeapNumber(r2, r3, r4, r6, &slow);
3965 __ sub(r1, r2, Operand(kHeapObjectTag)); 3965 __ sub(r1, r2, Operand(kHeapObjectTag));
3966 __ vstr(d0, r1, HeapNumber::kValueOffset); 3966 __ vstr(d0, r1, HeapNumber::kValueOffset);
3967 3967
3968 __ mov(r0, r2); 3968 __ mov(r0, r2);
3969 __ Ret(); 3969 __ Ret();
3970 } else { 3970 } else {
3971 // Allocate a HeapNumber for the result. Don't use r0 and r1 as 3971 // Allocate a HeapNumber for the result. Don't use r0 and r1 as
3972 // AllocateHeapNumber clobbers all registers - also when jumping due to 3972 // AllocateHeapNumber clobbers all registers - also when jumping due to
3973 // exhausted young space. 3973 // exhausted young space.
3974 __ LoadRoot(r7, Heap::kHeapNumberMapRootIndex); 3974 __ LoadRoot(r7, Heap::kHeapNumberMapRootIndex);
3975 __ AllocateHeapNumber(r4, r5, r6, r7, &slow); 3975 __ AllocateHeapNumber(r4, r5, r6, r7, &slow);
3976 3976
3977 __ str(r2, FieldMemOperand(r4, HeapNumber::kMantissaOffset)); 3977 __ str(r2, FieldMemOperand(r4, HeapNumber::kMantissaOffset));
3978 __ str(r3, FieldMemOperand(r4, HeapNumber::kExponentOffset)); 3978 __ str(r3, FieldMemOperand(r4, HeapNumber::kExponentOffset));
3979 __ mov(r0, r4); 3979 __ mov(r0, r4);
3980 __ Ret(); 3980 __ Ret();
3981 } 3981 }
3982 3982
3983 } else { 3983 } else {
3984 // Tag integer as smi and return it. 3984 // Tag integer as smi and return it.
3985 __ mov(r0, Operand(value, LSL, kSmiTagSize)); 3985 __ mov(r0, Operand(value, LSL, kSmiTagSize));
3986 __ Ret(); 3986 __ Ret();
3987 } 3987 }
3988 3988
3989 // Slow case, key and receiver still in r0 and r1. 3989 // Slow case, key and receiver still in r0 and r1.
3990 __ bind(&slow); 3990 __ bind(&slow);
3991 __ IncrementCounter( 3991 __ IncrementCounter(
3992 masm->isolate()->counters()->keyed_load_external_array_slow(), 3992 masm->isolate()->counters()->keyed_load_external_array_slow(),
3993 1, r2, r3); 3993 1, r2, r3);
3994 3994
3995 // ---------- S t a t e -------------- 3995 // ---------- S t a t e --------------
3996 // -- lr : return address 3996 // -- lr : return address
3997 // -- r0 : key 3997 // -- r0 : key
3998 // -- r1 : receiver 3998 // -- r1 : receiver
3999 // ----------------------------------- 3999 // -----------------------------------
4000 4000
4001 __ Push(r1, r0); 4001 __ Push(r1, r0);
4002 4002
4003 __ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1); 4003 __ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1);
4004 4004
4005 __ bind(&miss_force_generic); 4005 __ bind(&miss_force_generic);
4006 Handle<Code> stub = 4006 Handle<Code> stub =
4007 masm->isolate()->builtins()->KeyedLoadIC_MissForceGeneric(); 4007 masm->isolate()->builtins()->KeyedLoadIC_MissForceGeneric();
4008 __ Jump(stub, RelocInfo::CODE_TARGET); 4008 __ Jump(stub, RelocInfo::CODE_TARGET);
4009 } 4009 }
4010 4010
4011 4011
4012 void KeyedStoreStubCompiler::GenerateStoreExternalArray( 4012 void KeyedStoreStubCompiler::GenerateStoreExternalArray(
4013 MacroAssembler* masm, 4013 MacroAssembler* masm,
4014 ElementsKind elements_kind) { 4014 ElementsKind elements_kind) {
4015 // ---------- S t a t e -------------- 4015 // ---------- S t a t e --------------
4016 // -- r0 : value 4016 // -- r0 : value
4017 // -- r1 : key 4017 // -- r1 : key
4018 // -- r2 : receiver 4018 // -- r2 : receiver
4019 // -- lr : return address 4019 // -- lr : return address
4020 // ----------------------------------- 4020 // -----------------------------------
4021 Label slow, check_heap_number, miss_force_generic; 4021 Label slow, check_heap_number, miss_force_generic;
4022 4022
4023 // Register usage. 4023 // Register usage.
4024 Register value = r0; 4024 Register value = r0;
4025 Register key = r1; 4025 Register key = r1;
4026 Register receiver = r2; 4026 Register receiver = r2;
4027 // r3 mostly holds the elements array or the destination external array. 4027 // r3 mostly holds the elements array or the destination external array.
4028 4028
4029 // This stub is meant to be tail-jumped to, the receiver must already 4029 // This stub is meant to be tail-jumped to, the receiver must already
4030 // have been verified by the caller to not be a smi. 4030 // have been verified by the caller to not be a smi.
4031 4031
4032 // Check that the key is a smi or a heap number convertible to a smi. 4032 // Check that the key is a smi or a heap number convertible to a smi.
4033 GenerateSmiKeyCheck(masm, key, r4, r5, d1, &miss_force_generic); 4033 GenerateSmiKeyCheck(masm, key, r4, r5, d1, &miss_force_generic);
4034 4034
4035 __ ldr(r3, FieldMemOperand(receiver, JSObject::kElementsOffset)); 4035 __ ldr(r3, FieldMemOperand(receiver, JSObject::kElementsOffset));
4036 4036
4037 // Check that the index is in range 4037 // Check that the index is in range
4038 __ ldr(ip, FieldMemOperand(r3, ExternalArray::kLengthOffset)); 4038 __ ldr(ip, FieldMemOperand(r3, ExternalArray::kLengthOffset));
4039 __ cmp(key, ip); 4039 __ cmp(key, ip);
4040 // Unsigned comparison catches both negative and too-large values. 4040 // Unsigned comparison catches both negative and too-large values.
4041 __ b(hs, &miss_force_generic); 4041 __ b(hs, &miss_force_generic);
4042 4042
4043 // Handle both smis and HeapNumbers in the fast path. Go to the 4043 // Handle both smis and HeapNumbers in the fast path. Go to the
4044 // runtime for all other kinds of values. 4044 // runtime for all other kinds of values.
4045 // r3: external array. 4045 // r3: external array.
4046 if (elements_kind == EXTERNAL_PIXEL_ELEMENTS) { 4046 if (elements_kind == EXTERNAL_PIXEL_ELEMENTS) {
4047 // Double to pixel conversion is only implemented in the runtime for now. 4047 // Double to pixel conversion is only implemented in the runtime for now.
4048 __ JumpIfNotSmi(value, &slow); 4048 __ JumpIfNotSmi(value, &slow);
4049 } else { 4049 } else {
4050 __ JumpIfNotSmi(value, &check_heap_number); 4050 __ JumpIfNotSmi(value, &check_heap_number);
4051 } 4051 }
4052 __ SmiUntag(r5, value); 4052 __ SmiUntag(r5, value);
4053 __ ldr(r3, FieldMemOperand(r3, ExternalArray::kExternalPointerOffset)); 4053 __ ldr(r3, FieldMemOperand(r3, ExternalArray::kExternalPointerOffset));
4054 4054
4055 // r3: base pointer of external storage. 4055 // r3: base pointer of external storage.
4056 // r5: value (integer). 4056 // r5: value (integer).
4057 switch (elements_kind) { 4057 switch (elements_kind) {
4058 case EXTERNAL_PIXEL_ELEMENTS: 4058 case EXTERNAL_PIXEL_ELEMENTS:
4059 // Clamp the value to [0..255]. 4059 // Clamp the value to [0..255].
4060 __ Usat(r5, 8, Operand(r5)); 4060 __ Usat(r5, 8, Operand(r5));
4061 __ strb(r5, MemOperand(r3, key, LSR, 1)); 4061 __ strb(r5, MemOperand(r3, key, LSR, 1));
4062 break; 4062 break;
4063 case EXTERNAL_BYTE_ELEMENTS: 4063 case EXTERNAL_BYTE_ELEMENTS:
4064 case EXTERNAL_UNSIGNED_BYTE_ELEMENTS: 4064 case EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
4065 __ strb(r5, MemOperand(r3, key, LSR, 1)); 4065 __ strb(r5, MemOperand(r3, key, LSR, 1));
4066 break; 4066 break;
4067 case EXTERNAL_SHORT_ELEMENTS: 4067 case EXTERNAL_SHORT_ELEMENTS:
4068 case EXTERNAL_UNSIGNED_SHORT_ELEMENTS: 4068 case EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
4069 __ strh(r5, MemOperand(r3, key, LSL, 0)); 4069 __ strh(r5, MemOperand(r3, key, LSL, 0));
4070 break; 4070 break;
4071 case EXTERNAL_INT_ELEMENTS: 4071 case EXTERNAL_INT_ELEMENTS:
4072 case EXTERNAL_UNSIGNED_INT_ELEMENTS: 4072 case EXTERNAL_UNSIGNED_INT_ELEMENTS:
4073 __ str(r5, MemOperand(r3, key, LSL, 1)); 4073 __ str(r5, MemOperand(r3, key, LSL, 1));
4074 break; 4074 break;
4075 case EXTERNAL_FLOAT_ELEMENTS: 4075 case EXTERNAL_FLOAT_ELEMENTS:
4076 // Perform int-to-float conversion and store to memory. 4076 // Perform int-to-float conversion and store to memory.
4077 __ SmiUntag(r4, key); 4077 __ SmiUntag(r4, key);
4078 StoreIntAsFloat(masm, r3, r4, r5, r6, r7, r9); 4078 StoreIntAsFloat(masm, r3, r4, r5, r6, r7, r9);
4079 break; 4079 break;
4080 case EXTERNAL_DOUBLE_ELEMENTS: 4080 case EXTERNAL_DOUBLE_ELEMENTS:
4081 __ add(r3, r3, Operand(key, LSL, 2)); 4081 __ add(r3, r3, Operand(key, LSL, 2));
4082 // r3: effective address of the double element 4082 // r3: effective address of the double element
4083 FloatingPointHelper::Destination destination; 4083 FloatingPointHelper::Destination destination;
4084 if (CpuFeatures::IsSupported(VFP2)) { 4084 if (CpuFeatures::IsSupported(VFP2)) {
4085 destination = FloatingPointHelper::kVFPRegisters; 4085 destination = FloatingPointHelper::kVFPRegisters;
4086 } else { 4086 } else {
4087 destination = FloatingPointHelper::kCoreRegisters; 4087 destination = FloatingPointHelper::kCoreRegisters;
4088 } 4088 }
4089 FloatingPointHelper::ConvertIntToDouble( 4089 FloatingPointHelper::ConvertIntToDouble(
4090 masm, r5, destination, 4090 masm, r5, destination,
4091 d0, r6, r7, // These are: double_dst, dst1, dst2. 4091 d0, r6, r7, // These are: double_dst, dst1, dst2.
4092 r4, s2); // These are: scratch2, single_scratch. 4092 r4, s2); // These are: scratch2, single_scratch.
4093 if (destination == FloatingPointHelper::kVFPRegisters) { 4093 if (destination == FloatingPointHelper::kVFPRegisters) {
4094 CpuFeatures::Scope scope(VFP2); 4094 CpuFeatures::Scope scope(VFP2);
4095 __ vstr(d0, r3, 0); 4095 __ vstr(d0, r3, 0);
4096 } else { 4096 } else {
4097 __ str(r6, MemOperand(r3, 0)); 4097 __ str(r6, MemOperand(r3, 0));
4098 __ str(r7, MemOperand(r3, Register::kSizeInBytes)); 4098 __ str(r7, MemOperand(r3, Register::kSizeInBytes));
4099 } 4099 }
4100 break; 4100 break;
4101 case FAST_ELEMENTS: 4101 case FAST_ELEMENTS:
4102 case FAST_SMI_ELEMENTS: 4102 case FAST_SMI_ELEMENTS:
4103 case FAST_DOUBLE_ELEMENTS: 4103 case FAST_DOUBLE_ELEMENTS:
4104 case FAST_HOLEY_ELEMENTS: 4104 case FAST_HOLEY_ELEMENTS:
4105 case FAST_HOLEY_SMI_ELEMENTS: 4105 case FAST_HOLEY_SMI_ELEMENTS:
4106 case FAST_HOLEY_DOUBLE_ELEMENTS: 4106 case FAST_HOLEY_DOUBLE_ELEMENTS:
4107 case DICTIONARY_ELEMENTS: 4107 case DICTIONARY_ELEMENTS:
4108 case NON_STRICT_ARGUMENTS_ELEMENTS: 4108 case NON_STRICT_ARGUMENTS_ELEMENTS:
4109 UNREACHABLE(); 4109 UNREACHABLE();
4110 break; 4110 break;
4111 } 4111 }
4112 4112
4113 // Entry registers are intact, r0 holds the value which is the return value. 4113 // Entry registers are intact, r0 holds the value which is the return value.
4114 __ Ret(); 4114 __ Ret();
4115 4115
4116 if (elements_kind != EXTERNAL_PIXEL_ELEMENTS) { 4116 if (elements_kind != EXTERNAL_PIXEL_ELEMENTS) {
4117 // r3: external array. 4117 // r3: external array.
4118 __ bind(&check_heap_number); 4118 __ bind(&check_heap_number);
4119 __ CompareObjectType(value, r5, r6, HEAP_NUMBER_TYPE); 4119 __ CompareObjectType(value, r5, r6, HEAP_NUMBER_TYPE);
4120 __ b(ne, &slow); 4120 __ b(ne, &slow);
4121 4121
4122 __ ldr(r3, FieldMemOperand(r3, ExternalArray::kExternalPointerOffset)); 4122 __ ldr(r3, FieldMemOperand(r3, ExternalArray::kExternalPointerOffset));
4123 4123
4124 // r3: base pointer of external storage. 4124 // r3: base pointer of external storage.
4125 4125
4126 // The WebGL specification leaves the behavior of storing NaN and 4126 // The WebGL specification leaves the behavior of storing NaN and
4127 // +/-Infinity into integer arrays basically undefined. For more 4127 // +/-Infinity into integer arrays basically undefined. For more
4128 // reproducible behavior, convert these to zero. 4128 // reproducible behavior, convert these to zero.
4129 if (CpuFeatures::IsSupported(VFP2)) { 4129 if (CpuFeatures::IsSupported(VFP2)) {
4130 CpuFeatures::Scope scope(VFP2); 4130 CpuFeatures::Scope scope(VFP2);
4131 4131
4132 if (elements_kind == EXTERNAL_FLOAT_ELEMENTS) { 4132 if (elements_kind == EXTERNAL_FLOAT_ELEMENTS) {
4133 // vldr requires offset to be a multiple of 4 so we can not 4133 // vldr requires offset to be a multiple of 4 so we can not
4134 // include -kHeapObjectTag into it. 4134 // include -kHeapObjectTag into it.
4135 __ sub(r5, r0, Operand(kHeapObjectTag)); 4135 __ sub(r5, r0, Operand(kHeapObjectTag));
4136 __ vldr(d0, r5, HeapNumber::kValueOffset); 4136 __ vldr(d0, r5, HeapNumber::kValueOffset);
4137 __ add(r5, r3, Operand(key, LSL, 1)); 4137 __ add(r5, r3, Operand(key, LSL, 1));
4138 __ vcvt_f32_f64(s0, d0); 4138 __ vcvt_f32_f64(s0, d0);
4139 __ vstr(s0, r5, 0); 4139 __ vstr(s0, r5, 0);
4140 } else if (elements_kind == EXTERNAL_DOUBLE_ELEMENTS) { 4140 } else if (elements_kind == EXTERNAL_DOUBLE_ELEMENTS) {
4141 __ sub(r5, r0, Operand(kHeapObjectTag)); 4141 __ sub(r5, r0, Operand(kHeapObjectTag));
4142 __ vldr(d0, r5, HeapNumber::kValueOffset); 4142 __ vldr(d0, r5, HeapNumber::kValueOffset);
4143 __ add(r5, r3, Operand(key, LSL, 2)); 4143 __ add(r5, r3, Operand(key, LSL, 2));
4144 __ vstr(d0, r5, 0); 4144 __ vstr(d0, r5, 0);
4145 } else { 4145 } else {
4146 // Hoisted load. vldr requires offset to be a multiple of 4 so we can 4146 // Hoisted load. vldr requires offset to be a multiple of 4 so we can
4147 // not include -kHeapObjectTag into it. 4147 // not include -kHeapObjectTag into it.
4148 __ sub(r5, value, Operand(kHeapObjectTag)); 4148 __ sub(r5, value, Operand(kHeapObjectTag));
4149 __ vldr(d0, r5, HeapNumber::kValueOffset); 4149 __ vldr(d0, r5, HeapNumber::kValueOffset);
4150 __ EmitECMATruncate(r5, d0, s2, r6, r7, r9); 4150 __ EmitECMATruncate(r5, d0, s2, r6, r7, r9);
4151 4151
4152 switch (elements_kind) { 4152 switch (elements_kind) {
4153 case EXTERNAL_BYTE_ELEMENTS: 4153 case EXTERNAL_BYTE_ELEMENTS:
4154 case EXTERNAL_UNSIGNED_BYTE_ELEMENTS: 4154 case EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
4155 __ strb(r5, MemOperand(r3, key, LSR, 1)); 4155 __ strb(r5, MemOperand(r3, key, LSR, 1));
4156 break; 4156 break;
4157 case EXTERNAL_SHORT_ELEMENTS: 4157 case EXTERNAL_SHORT_ELEMENTS:
4158 case EXTERNAL_UNSIGNED_SHORT_ELEMENTS: 4158 case EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
4159 __ strh(r5, MemOperand(r3, key, LSL, 0)); 4159 __ strh(r5, MemOperand(r3, key, LSL, 0));
4160 break; 4160 break;
4161 case EXTERNAL_INT_ELEMENTS: 4161 case EXTERNAL_INT_ELEMENTS:
4162 case EXTERNAL_UNSIGNED_INT_ELEMENTS: 4162 case EXTERNAL_UNSIGNED_INT_ELEMENTS:
4163 __ str(r5, MemOperand(r3, key, LSL, 1)); 4163 __ str(r5, MemOperand(r3, key, LSL, 1));
4164 break; 4164 break;
4165 case EXTERNAL_PIXEL_ELEMENTS: 4165 case EXTERNAL_PIXEL_ELEMENTS:
4166 case EXTERNAL_FLOAT_ELEMENTS: 4166 case EXTERNAL_FLOAT_ELEMENTS:
4167 case EXTERNAL_DOUBLE_ELEMENTS: 4167 case EXTERNAL_DOUBLE_ELEMENTS:
4168 case FAST_ELEMENTS: 4168 case FAST_ELEMENTS:
4169 case FAST_SMI_ELEMENTS: 4169 case FAST_SMI_ELEMENTS:
4170 case FAST_DOUBLE_ELEMENTS: 4170 case FAST_DOUBLE_ELEMENTS:
4171 case FAST_HOLEY_ELEMENTS: 4171 case FAST_HOLEY_ELEMENTS:
4172 case FAST_HOLEY_SMI_ELEMENTS: 4172 case FAST_HOLEY_SMI_ELEMENTS:
4173 case FAST_HOLEY_DOUBLE_ELEMENTS: 4173 case FAST_HOLEY_DOUBLE_ELEMENTS:
4174 case DICTIONARY_ELEMENTS: 4174 case DICTIONARY_ELEMENTS:
4175 case NON_STRICT_ARGUMENTS_ELEMENTS: 4175 case NON_STRICT_ARGUMENTS_ELEMENTS:
4176 UNREACHABLE(); 4176 UNREACHABLE();
4177 break; 4177 break;
4178 } 4178 }
4179 } 4179 }
4180 4180
4181 // Entry registers are intact, r0 holds the value which is the return 4181 // Entry registers are intact, r0 holds the value which is the return
4182 // value. 4182 // value.
4183 __ Ret(); 4183 __ Ret();
4184 } else { 4184 } else {
4185 // VFP3 is not available do manual conversions. 4185 // VFP3 is not available do manual conversions.
4186 __ ldr(r5, FieldMemOperand(value, HeapNumber::kExponentOffset)); 4186 __ ldr(r5, FieldMemOperand(value, HeapNumber::kExponentOffset));
4187 __ ldr(r6, FieldMemOperand(value, HeapNumber::kMantissaOffset)); 4187 __ ldr(r6, FieldMemOperand(value, HeapNumber::kMantissaOffset));
4188 4188
4189 if (elements_kind == EXTERNAL_FLOAT_ELEMENTS) { 4189 if (elements_kind == EXTERNAL_FLOAT_ELEMENTS) {
4190 Label done, nan_or_infinity_or_zero; 4190 Label done, nan_or_infinity_or_zero;
4191 static const int kMantissaInHiWordShift = 4191 static const int kMantissaInHiWordShift =
4192 kBinary32MantissaBits - HeapNumber::kMantissaBitsInTopWord; 4192 kBinary32MantissaBits - HeapNumber::kMantissaBitsInTopWord;
4193 4193
4194 static const int kMantissaInLoWordShift = 4194 static const int kMantissaInLoWordShift =
4195 kBitsPerInt - kMantissaInHiWordShift; 4195 kBitsPerInt - kMantissaInHiWordShift;
4196 4196
4197 // Test for all special exponent values: zeros, subnormal numbers, NaNs 4197 // Test for all special exponent values: zeros, subnormal numbers, NaNs
4198 // and infinities. All these should be converted to 0. 4198 // and infinities. All these should be converted to 0.
4199 __ mov(r7, Operand(HeapNumber::kExponentMask)); 4199 __ mov(r7, Operand(HeapNumber::kExponentMask));
4200 __ and_(r9, r5, Operand(r7), SetCC); 4200 __ and_(r9, r5, Operand(r7), SetCC);
4201 __ b(eq, &nan_or_infinity_or_zero); 4201 __ b(eq, &nan_or_infinity_or_zero);
4202 4202
4203 __ teq(r9, Operand(r7)); 4203 __ teq(r9, Operand(r7));
4204 __ mov(r9, Operand(kBinary32ExponentMask), LeaveCC, eq); 4204 __ mov(r9, Operand(kBinary32ExponentMask), LeaveCC, eq);
4205 __ b(eq, &nan_or_infinity_or_zero); 4205 __ b(eq, &nan_or_infinity_or_zero);
4206 4206
4207 // Rebias exponent. 4207 // Rebias exponent.
4208 __ mov(r9, Operand(r9, LSR, HeapNumber::kExponentShift)); 4208 __ mov(r9, Operand(r9, LSR, HeapNumber::kExponentShift));
4209 __ add(r9, 4209 __ add(r9,
4210 r9, 4210 r9,
4211 Operand(kBinary32ExponentBias - HeapNumber::kExponentBias)); 4211 Operand(kBinary32ExponentBias - HeapNumber::kExponentBias));
4212 4212
4213 __ cmp(r9, Operand(kBinary32MaxExponent)); 4213 __ cmp(r9, Operand(kBinary32MaxExponent));
4214 __ and_(r5, r5, Operand(HeapNumber::kSignMask), LeaveCC, gt); 4214 __ and_(r5, r5, Operand(HeapNumber::kSignMask), LeaveCC, gt);
4215 __ orr(r5, r5, Operand(kBinary32ExponentMask), LeaveCC, gt); 4215 __ orr(r5, r5, Operand(kBinary32ExponentMask), LeaveCC, gt);
4216 __ b(gt, &done); 4216 __ b(gt, &done);
4217 4217
4218 __ cmp(r9, Operand(kBinary32MinExponent)); 4218 __ cmp(r9, Operand(kBinary32MinExponent));
4219 __ and_(r5, r5, Operand(HeapNumber::kSignMask), LeaveCC, lt); 4219 __ and_(r5, r5, Operand(HeapNumber::kSignMask), LeaveCC, lt);
4220 __ b(lt, &done); 4220 __ b(lt, &done);
4221 4221
4222 __ and_(r7, r5, Operand(HeapNumber::kSignMask)); 4222 __ and_(r7, r5, Operand(HeapNumber::kSignMask));
4223 __ and_(r5, r5, Operand(HeapNumber::kMantissaMask)); 4223 __ and_(r5, r5, Operand(HeapNumber::kMantissaMask));
4224 __ orr(r7, r7, Operand(r5, LSL, kMantissaInHiWordShift)); 4224 __ orr(r7, r7, Operand(r5, LSL, kMantissaInHiWordShift));
4225 __ orr(r7, r7, Operand(r6, LSR, kMantissaInLoWordShift)); 4225 __ orr(r7, r7, Operand(r6, LSR, kMantissaInLoWordShift));
4226 __ orr(r5, r7, Operand(r9, LSL, kBinary32ExponentShift)); 4226 __ orr(r5, r7, Operand(r9, LSL, kBinary32ExponentShift));
4227 4227
4228 __ bind(&done); 4228 __ bind(&done);
4229 __ str(r5, MemOperand(r3, key, LSL, 1)); 4229 __ str(r5, MemOperand(r3, key, LSL, 1));
4230 // Entry registers are intact, r0 holds the value which is the return 4230 // Entry registers are intact, r0 holds the value which is the return
4231 // value. 4231 // value.
4232 __ Ret(); 4232 __ Ret();
4233 4233
4234 __ bind(&nan_or_infinity_or_zero); 4234 __ bind(&nan_or_infinity_or_zero);
4235 __ and_(r7, r5, Operand(HeapNumber::kSignMask)); 4235 __ and_(r7, r5, Operand(HeapNumber::kSignMask));
4236 __ and_(r5, r5, Operand(HeapNumber::kMantissaMask)); 4236 __ and_(r5, r5, Operand(HeapNumber::kMantissaMask));
4237 __ orr(r9, r9, r7); 4237 __ orr(r9, r9, r7);
4238 __ orr(r9, r9, Operand(r5, LSL, kMantissaInHiWordShift)); 4238 __ orr(r9, r9, Operand(r5, LSL, kMantissaInHiWordShift));
4239 __ orr(r5, r9, Operand(r6, LSR, kMantissaInLoWordShift)); 4239 __ orr(r5, r9, Operand(r6, LSR, kMantissaInLoWordShift));
4240 __ b(&done); 4240 __ b(&done);
4241 } else if (elements_kind == EXTERNAL_DOUBLE_ELEMENTS) { 4241 } else if (elements_kind == EXTERNAL_DOUBLE_ELEMENTS) {
4242 __ add(r7, r3, Operand(key, LSL, 2)); 4242 __ add(r7, r3, Operand(key, LSL, 2));
4243 // r7: effective address of destination element. 4243 // r7: effective address of destination element.
4244 __ str(r6, MemOperand(r7, 0)); 4244 __ str(r6, MemOperand(r7, 0));
4245 __ str(r5, MemOperand(r7, Register::kSizeInBytes)); 4245 __ str(r5, MemOperand(r7, Register::kSizeInBytes));
4246 __ Ret(); 4246 __ Ret();
4247 } else { 4247 } else {
4248 bool is_signed_type = IsElementTypeSigned(elements_kind); 4248 bool is_signed_type = IsElementTypeSigned(elements_kind);
4249 int meaningfull_bits = is_signed_type ? (kBitsPerInt - 1) : kBitsPerInt; 4249 int meaningfull_bits = is_signed_type ? (kBitsPerInt - 1) : kBitsPerInt;
4250 int32_t min_value = is_signed_type ? 0x80000000 : 0x00000000; 4250 int32_t min_value = is_signed_type ? 0x80000000 : 0x00000000;
4251 4251
4252 Label done, sign; 4252 Label done, sign;
4253 4253
4254 // Test for all special exponent values: zeros, subnormal numbers, NaNs 4254 // Test for all special exponent values: zeros, subnormal numbers, NaNs
4255 // and infinities. All these should be converted to 0. 4255 // and infinities. All these should be converted to 0.
4256 __ mov(r7, Operand(HeapNumber::kExponentMask)); 4256 __ mov(r7, Operand(HeapNumber::kExponentMask));
4257 __ and_(r9, r5, Operand(r7), SetCC); 4257 __ and_(r9, r5, Operand(r7), SetCC);
4258 __ mov(r5, Operand(0, RelocInfo::NONE), LeaveCC, eq); 4258 __ mov(r5, Operand(0, RelocInfo::NONE), LeaveCC, eq);
4259 __ b(eq, &done); 4259 __ b(eq, &done);
4260 4260
4261 __ teq(r9, Operand(r7)); 4261 __ teq(r9, Operand(r7));
4262 __ mov(r5, Operand(0, RelocInfo::NONE), LeaveCC, eq); 4262 __ mov(r5, Operand(0, RelocInfo::NONE), LeaveCC, eq);
4263 __ b(eq, &done); 4263 __ b(eq, &done);
4264 4264
4265 // Unbias exponent. 4265 // Unbias exponent.
4266 __ mov(r9, Operand(r9, LSR, HeapNumber::kExponentShift)); 4266 __ mov(r9, Operand(r9, LSR, HeapNumber::kExponentShift));
4267 __ sub(r9, r9, Operand(HeapNumber::kExponentBias), SetCC); 4267 __ sub(r9, r9, Operand(HeapNumber::kExponentBias), SetCC);
4268 // If exponent is negative then result is 0. 4268 // If exponent is negative then result is 0.
4269 __ mov(r5, Operand(0, RelocInfo::NONE), LeaveCC, mi); 4269 __ mov(r5, Operand(0, RelocInfo::NONE), LeaveCC, mi);
4270 __ b(mi, &done); 4270 __ b(mi, &done);
4271 4271
4272 // If exponent is too big then result is minimal value. 4272 // If exponent is too big then result is minimal value.
4273 __ cmp(r9, Operand(meaningfull_bits - 1)); 4273 __ cmp(r9, Operand(meaningfull_bits - 1));
4274 __ mov(r5, Operand(min_value), LeaveCC, ge); 4274 __ mov(r5, Operand(min_value), LeaveCC, ge);
4275 __ b(ge, &done); 4275 __ b(ge, &done);
4276 4276
4277 __ and_(r7, r5, Operand(HeapNumber::kSignMask), SetCC); 4277 __ and_(r7, r5, Operand(HeapNumber::kSignMask), SetCC);
4278 __ and_(r5, r5, Operand(HeapNumber::kMantissaMask)); 4278 __ and_(r5, r5, Operand(HeapNumber::kMantissaMask));
4279 __ orr(r5, r5, Operand(1u << HeapNumber::kMantissaBitsInTopWord)); 4279 __ orr(r5, r5, Operand(1u << HeapNumber::kMantissaBitsInTopWord));
4280 4280
4281 __ rsb(r9, r9, Operand(HeapNumber::kMantissaBitsInTopWord), SetCC); 4281 __ rsb(r9, r9, Operand(HeapNumber::kMantissaBitsInTopWord), SetCC);
4282 __ mov(r5, Operand(r5, LSR, r9), LeaveCC, pl); 4282 __ mov(r5, Operand(r5, LSR, r9), LeaveCC, pl);
4283 __ b(pl, &sign); 4283 __ b(pl, &sign);
4284 4284
4285 __ rsb(r9, r9, Operand(0, RelocInfo::NONE)); 4285 __ rsb(r9, r9, Operand(0, RelocInfo::NONE));
4286 __ mov(r5, Operand(r5, LSL, r9)); 4286 __ mov(r5, Operand(r5, LSL, r9));
4287 __ rsb(r9, r9, Operand(meaningfull_bits)); 4287 __ rsb(r9, r9, Operand(meaningfull_bits));
4288 __ orr(r5, r5, Operand(r6, LSR, r9)); 4288 __ orr(r5, r5, Operand(r6, LSR, r9));
4289 4289
4290 __ bind(&sign); 4290 __ bind(&sign);
4291 __ teq(r7, Operand(0, RelocInfo::NONE)); 4291 __ teq(r7, Operand(0, RelocInfo::NONE));
4292 __ rsb(r5, r5, Operand(0, RelocInfo::NONE), LeaveCC, ne); 4292 __ rsb(r5, r5, Operand(0, RelocInfo::NONE), LeaveCC, ne);
4293 4293
4294 __ bind(&done); 4294 __ bind(&done);
4295 switch (elements_kind) { 4295 switch (elements_kind) {
4296 case EXTERNAL_BYTE_ELEMENTS: 4296 case EXTERNAL_BYTE_ELEMENTS:
4297 case EXTERNAL_UNSIGNED_BYTE_ELEMENTS: 4297 case EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
4298 __ strb(r5, MemOperand(r3, key, LSR, 1)); 4298 __ strb(r5, MemOperand(r3, key, LSR, 1));
4299 break; 4299 break;
4300 case EXTERNAL_SHORT_ELEMENTS: 4300 case EXTERNAL_SHORT_ELEMENTS:
4301 case EXTERNAL_UNSIGNED_SHORT_ELEMENTS: 4301 case EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
4302 __ strh(r5, MemOperand(r3, key, LSL, 0)); 4302 __ strh(r5, MemOperand(r3, key, LSL, 0));
4303 break; 4303 break;
4304 case EXTERNAL_INT_ELEMENTS: 4304 case EXTERNAL_INT_ELEMENTS:
4305 case EXTERNAL_UNSIGNED_INT_ELEMENTS: 4305 case EXTERNAL_UNSIGNED_INT_ELEMENTS:
4306 __ str(r5, MemOperand(r3, key, LSL, 1)); 4306 __ str(r5, MemOperand(r3, key, LSL, 1));
4307 break; 4307 break;
4308 case EXTERNAL_PIXEL_ELEMENTS: 4308 case EXTERNAL_PIXEL_ELEMENTS:
4309 case EXTERNAL_FLOAT_ELEMENTS: 4309 case EXTERNAL_FLOAT_ELEMENTS:
4310 case EXTERNAL_DOUBLE_ELEMENTS: 4310 case EXTERNAL_DOUBLE_ELEMENTS:
4311 case FAST_ELEMENTS: 4311 case FAST_ELEMENTS:
4312 case FAST_SMI_ELEMENTS: 4312 case FAST_SMI_ELEMENTS:
4313 case FAST_DOUBLE_ELEMENTS: 4313 case FAST_DOUBLE_ELEMENTS:
4314 case FAST_HOLEY_ELEMENTS: 4314 case FAST_HOLEY_ELEMENTS:
4315 case FAST_HOLEY_SMI_ELEMENTS: 4315 case FAST_HOLEY_SMI_ELEMENTS:
4316 case FAST_HOLEY_DOUBLE_ELEMENTS: 4316 case FAST_HOLEY_DOUBLE_ELEMENTS: