My favorites | Sign in
v8
Project Home Downloads Wiki Issues Source Code Search
Checkout   Browse   Changes  
Changes to /trunk/src/arm/full-codegen-arm.cc
r12669 vs. r12683 Compare: vs.  Format:
Revision r12683
Go to: 
Project members, sign in to write a code review
/trunk/src/arm/full-codegen-arm.cc   r12669 /trunk/src/arm/full-codegen-arm.cc   r12683
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution. 11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its 12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived 13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission. 14 // from this software without specific prior written permission.
15 // 15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 27
28 #include "v8.h" 28 #include "v8.h"
29 29
30 #if defined(V8_TARGET_ARCH_ARM) 30 #if defined(V8_TARGET_ARCH_ARM)
31 31
32 #include "code-stubs.h" 32 #include "code-stubs.h"
33 #include "codegen.h" 33 #include "codegen.h"
34 #include "compiler.h" 34 #include "compiler.h"
35 #include "debug.h" 35 #include "debug.h"
36 #include "full-codegen.h" 36 #include "full-codegen.h"
37 #include "isolate-inl.h" 37 #include "isolate-inl.h"
38 #include "parser.h" 38 #include "parser.h"
39 #include "scopes.h" 39 #include "scopes.h"
40 #include "stub-cache.h" 40 #include "stub-cache.h"
41 41
42 #include "arm/code-stubs-arm.h" 42 #include "arm/code-stubs-arm.h"
43 #include "arm/macro-assembler-arm.h" 43 #include "arm/macro-assembler-arm.h"
44 44
45 namespace v8 { 45 namespace v8 {
46 namespace internal { 46 namespace internal {
47 47
48 #define __ ACCESS_MASM(masm_) 48 #define __ ACCESS_MASM(masm_)
49 49
50 50
51 // A patch site is a location in the code which it is possible to patch. This 51 // A patch site is a location in the code which it is possible to patch. This
52 // class has a number of methods to emit the code which is patchable and the 52 // class has a number of methods to emit the code which is patchable and the
53 // method EmitPatchInfo to record a marker back to the patchable code. This 53 // method EmitPatchInfo to record a marker back to the patchable code. This
54 // marker is a cmp rx, #yyy instruction, and x * 0x00000fff + yyy (raw 12 bit 54 // marker is a cmp rx, #yyy instruction, and x * 0x00000fff + yyy (raw 12 bit
55 // immediate value is used) is the delta from the pc to the first instruction of 55 // immediate value is used) is the delta from the pc to the first instruction of
56 // the patchable code. 56 // the patchable code.
57 class JumpPatchSite BASE_EMBEDDED { 57 class JumpPatchSite BASE_EMBEDDED {
58 public: 58 public:
59 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) { 59 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
60 #ifdef DEBUG 60 #ifdef DEBUG
61 info_emitted_ = false; 61 info_emitted_ = false;
62 #endif 62 #endif
63 } 63 }
64 64
65 ~JumpPatchSite() { 65 ~JumpPatchSite() {
66 ASSERT(patch_site_.is_bound() == info_emitted_); 66 ASSERT(patch_site_.is_bound() == info_emitted_);
67 } 67 }
68 68
69 // When initially emitting this ensure that a jump is always generated to skip 69 // When initially emitting this ensure that a jump is always generated to skip
70 // the inlined smi code. 70 // the inlined smi code.
71 void EmitJumpIfNotSmi(Register reg, Label* target) { 71 void EmitJumpIfNotSmi(Register reg, Label* target) {
72 ASSERT(!patch_site_.is_bound() && !info_emitted_); 72 ASSERT(!patch_site_.is_bound() && !info_emitted_);
73 Assembler::BlockConstPoolScope block_const_pool(masm_); 73 Assembler::BlockConstPoolScope block_const_pool(masm_);
74 __ bind(&patch_site_); 74 __ bind(&patch_site_);
75 __ cmp(reg, Operand(reg)); 75 __ cmp(reg, Operand(reg));
76 __ b(eq, target); // Always taken before patched. 76 __ b(eq, target); // Always taken before patched.
77 } 77 }
78 78
79 // When initially emitting this ensure that a jump is never generated to skip 79 // When initially emitting this ensure that a jump is never generated to skip
80 // the inlined smi code. 80 // the inlined smi code.
81 void EmitJumpIfSmi(Register reg, Label* target) { 81 void EmitJumpIfSmi(Register reg, Label* target) {
82 ASSERT(!patch_site_.is_bound() && !info_emitted_); 82 ASSERT(!patch_site_.is_bound() && !info_emitted_);
83 Assembler::BlockConstPoolScope block_const_pool(masm_); 83 Assembler::BlockConstPoolScope block_const_pool(masm_);
84 __ bind(&patch_site_); 84 __ bind(&patch_site_);
85 __ cmp(reg, Operand(reg)); 85 __ cmp(reg, Operand(reg));
86 __ b(ne, target); // Never taken before patched. 86 __ b(ne, target); // Never taken before patched.
87 } 87 }
88 88
89 void EmitPatchInfo() { 89 void EmitPatchInfo() {
90 // Block literal pool emission whilst recording patch site information. 90 // Block literal pool emission whilst recording patch site information.
91 Assembler::BlockConstPoolScope block_const_pool(masm_); 91 Assembler::BlockConstPoolScope block_const_pool(masm_);
92 if (patch_site_.is_bound()) { 92 if (patch_site_.is_bound()) {
93 int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_); 93 int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
94 Register reg; 94 Register reg;
95 reg.set_code(delta_to_patch_site / kOff12Mask); 95 reg.set_code(delta_to_patch_site / kOff12Mask);
96 __ cmp_raw_immediate(reg, delta_to_patch_site % kOff12Mask); 96 __ cmp_raw_immediate(reg, delta_to_patch_site % kOff12Mask);
97 #ifdef DEBUG 97 #ifdef DEBUG
98 info_emitted_ = true; 98 info_emitted_ = true;
99 #endif 99 #endif
100 } else { 100 } else {
101 __ nop(); // Signals no inlined code. 101 __ nop(); // Signals no inlined code.
102 } 102 }
103 } 103 }
104 104
105 private: 105 private:
106 MacroAssembler* masm_; 106 MacroAssembler* masm_;
107 Label patch_site_; 107 Label patch_site_;
108 #ifdef DEBUG 108 #ifdef DEBUG
109 bool info_emitted_; 109 bool info_emitted_;
110 #endif 110 #endif
111 }; 111 };
112 112
113 113
114 // Generate code for a JS function. On entry to the function the receiver 114 // Generate code for a JS function. On entry to the function the receiver
115 // and arguments have been pushed on the stack left to right. The actual 115 // and arguments have been pushed on the stack left to right. The actual
116 // argument count matches the formal parameter count expected by the 116 // argument count matches the formal parameter count expected by the
117 // function. 117 // function.
118 // 118 //
119 // The live registers are: 119 // The live registers are:
120 // o r1: the JS function object being called (i.e., ourselves) 120 // o r1: the JS function object being called (i.e., ourselves)
121 // o cp: our context 121 // o cp: our context
122 // o fp: our caller's frame pointer 122 // o fp: our caller's frame pointer
123 // o sp: stack pointer 123 // o sp: stack pointer
124 // o lr: return address 124 // o lr: return address
125 // 125 //
126 // The function builds a JS frame. Please see JavaScriptFrameConstants in 126 // The function builds a JS frame. Please see JavaScriptFrameConstants in
127 // frames-arm.h for its layout. 127 // frames-arm.h for its layout.
128 void FullCodeGenerator::Generate() { 128 void FullCodeGenerator::Generate() {
129 CompilationInfo* info = info_; 129 CompilationInfo* info = info_;
130 handler_table_ = 130 handler_table_ =
131 isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED); 131 isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
132 profiling_counter_ = isolate()->factory()->NewJSGlobalPropertyCell( 132 profiling_counter_ = isolate()->factory()->NewJSGlobalPropertyCell(
133 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget))); 133 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget)));
134 SetFunctionPosition(function()); 134 SetFunctionPosition(function());
135 Comment cmnt(masm_, "[ function compiled by full code generator"); 135 Comment cmnt(masm_, "[ function compiled by full code generator");
136 136
137 ProfileEntryHookStub::MaybeCallEntryHook(masm_); 137 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
138 138
139 #ifdef DEBUG 139 #ifdef DEBUG
140 if (strlen(FLAG_stop_at) > 0 && 140 if (strlen(FLAG_stop_at) > 0 &&
141 info->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) { 141 info->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
142 __ stop("stop-at"); 142 __ stop("stop-at");
143 } 143 }
144 #endif 144 #endif
145 145
146 // Strict mode functions and builtins need to replace the receiver 146 // Strict mode functions and builtins need to replace the receiver
147 // with undefined when called as functions (without an explicit 147 // with undefined when called as functions (without an explicit
148 // receiver object). r5 is zero for method calls and non-zero for 148 // receiver object). r5 is zero for method calls and non-zero for
149 // function calls. 149 // function calls.
150 if (!info->is_classic_mode() || info->is_native()) { 150 if (!info->is_classic_mode() || info->is_native()) {
151 Label ok; 151 Label ok;
152 __ cmp(r5, Operand(0)); 152 __ cmp(r5, Operand(0));
153 __ b(eq, &ok); 153 __ b(eq, &ok);
154 int receiver_offset = info->scope()->num_parameters() * kPointerSize; 154 int receiver_offset = info->scope()->num_parameters() * kPointerSize;
155 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex); 155 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
156 __ str(r2, MemOperand(sp, receiver_offset)); 156 __ str(r2, MemOperand(sp, receiver_offset));
157 __ bind(&ok); 157 __ bind(&ok);
158 } 158 }
159 159
160 // Open a frame scope to indicate that there is a frame on the stack. The 160 // Open a frame scope to indicate that there is a frame on the stack. The
161 // MANUAL indicates that the scope shouldn't actually generate code to set up 161 // MANUAL indicates that the scope shouldn't actually generate code to set up
162 // the frame (that is done below). 162 // the frame (that is done below).
163 FrameScope frame_scope(masm_, StackFrame::MANUAL); 163 FrameScope frame_scope(masm_, StackFrame::MANUAL);
164 164
165 int locals_count = info->scope()->num_stack_slots(); 165 int locals_count = info->scope()->num_stack_slots();
166 166
167 __ Push(lr, fp, cp, r1); 167 __ Push(lr, fp, cp, r1);
168 if (locals_count > 0) { 168 if (locals_count > 0) {
169 // Load undefined value here, so the value is ready for the loop 169 // Load undefined value here, so the value is ready for the loop
170 // below. 170 // below.
171 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); 171 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
172 } 172 }
173 // Adjust fp to point to caller's fp. 173 // Adjust fp to point to caller's fp.
174 __ add(fp, sp, Operand(2 * kPointerSize)); 174 __ add(fp, sp, Operand(2 * kPointerSize));
175 175
176 { Comment cmnt(masm_, "[ Allocate locals"); 176 { Comment cmnt(masm_, "[ Allocate locals");
177 for (int i = 0; i < locals_count; i++) { 177 for (int i = 0; i < locals_count; i++) {
178 __ push(ip); 178 __ push(ip);
179 } 179 }
180 } 180 }
181 181
182 bool function_in_register = true; 182 bool function_in_register = true;
183 183
184 // Possibly allocate a local context. 184 // Possibly allocate a local context.
185 int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS; 185 int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
186 if (heap_slots > 0) { 186 if (heap_slots > 0) {
187 // Argument to NewContext is the function, which is still in r1. 187 // Argument to NewContext is the function, which is still in r1.
188 Comment cmnt(masm_, "[ Allocate context"); 188 Comment cmnt(masm_, "[ Allocate context");
189 __ push(r1); 189 __ push(r1);
190 if (FLAG_harmony_scoping && info->scope()->is_global_scope()) { 190 if (FLAG_harmony_scoping && info->scope()->is_global_scope()) {
191 __ Push(info->scope()->GetScopeInfo()); 191 __ Push(info->scope()->GetScopeInfo());
192 __ CallRuntime(Runtime::kNewGlobalContext, 2); 192 __ CallRuntime(Runtime::kNewGlobalContext, 2);
193 } else if (heap_slots <= FastNewContextStub::kMaximumSlots) { 193 } else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
194 FastNewContextStub stub(heap_slots); 194 FastNewContextStub stub(heap_slots);
195 __ CallStub(&stub); 195 __ CallStub(&stub);
196 } else { 196 } else {
197 __ CallRuntime(Runtime::kNewFunctionContext, 1); 197 __ CallRuntime(Runtime::kNewFunctionContext, 1);
198 } 198 }
199 function_in_register = false; 199 function_in_register = false;
200 // Context is returned in both r0 and cp. It replaces the context 200 // Context is returned in both r0 and cp. It replaces the context
201 // passed to us. It's saved in the stack and kept live in cp. 201 // passed to us. It's saved in the stack and kept live in cp.
202 __ str(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 202 __ str(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
203 // Copy any necessary parameters into the context. 203 // Copy any necessary parameters into the context.
204 int num_parameters = info->scope()->num_parameters(); 204 int num_parameters = info->scope()->num_parameters();
205 for (int i = 0; i < num_parameters; i++) { 205 for (int i = 0; i < num_parameters; i++) {
206 Variable* var = scope()->parameter(i); 206 Variable* var = scope()->parameter(i);
207 if (var->IsContextSlot()) { 207 if (var->IsContextSlot()) {
208 int parameter_offset = StandardFrameConstants::kCallerSPOffset + 208 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
209 (num_parameters - 1 - i) * kPointerSize; 209 (num_parameters - 1 - i) * kPointerSize;
210 // Load parameter from stack. 210 // Load parameter from stack.
211 __ ldr(r0, MemOperand(fp, parameter_offset)); 211 __ ldr(r0, MemOperand(fp, parameter_offset));
212 // Store it in the context. 212 // Store it in the context.
213 MemOperand target = ContextOperand(cp, var->index()); 213 MemOperand target = ContextOperand(cp, var->index());
214 __ str(r0, target); 214 __ str(r0, target);
215 215
216 // Update the write barrier. 216 // Update the write barrier.
217 __ RecordWriteContextSlot( 217 __ RecordWriteContextSlot(
218 cp, target.offset(), r0, r3, kLRHasBeenSaved, kDontSaveFPRegs); 218 cp, target.offset(), r0, r3, kLRHasBeenSaved, kDontSaveFPRegs);
219 } 219 }
220 } 220 }
221 } 221 }
222 222
223 Variable* arguments = scope()->arguments(); 223 Variable* arguments = scope()->arguments();
224 if (arguments != NULL) { 224 if (arguments != NULL) {
225 // Function uses arguments object. 225 // Function uses arguments object.
226 Comment cmnt(masm_, "[ Allocate arguments object"); 226 Comment cmnt(masm_, "[ Allocate arguments object");
227 if (!function_in_register) { 227 if (!function_in_register) {
228 // Load this again, if it's used by the local context below. 228 // Load this again, if it's used by the local context below.
229 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 229 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
230 } else { 230 } else {
231 __ mov(r3, r1); 231 __ mov(r3, r1);
232 } 232 }
233 // Receiver is just before the parameters on the caller's stack. 233 // Receiver is just before the parameters on the caller's stack.
234 int num_parameters = info->scope()->num_parameters(); 234 int num_parameters = info->scope()->num_parameters();
235 int offset = num_parameters * kPointerSize; 235 int offset = num_parameters * kPointerSize;
236 __ add(r2, fp, 236 __ add(r2, fp,
237 Operand(StandardFrameConstants::kCallerSPOffset + offset)); 237 Operand(StandardFrameConstants::kCallerSPOffset + offset));
238 __ mov(r1, Operand(Smi::FromInt(num_parameters))); 238 __ mov(r1, Operand(Smi::FromInt(num_parameters)));
239 __ Push(r3, r2, r1); 239 __ Push(r3, r2, r1);
240 240
241 // Arguments to ArgumentsAccessStub: 241 // Arguments to ArgumentsAccessStub:
242 // function, receiver address, parameter count. 242 // function, receiver address, parameter count.
243 // The stub will rewrite receiever and parameter count if the previous 243 // The stub will rewrite receiever and parameter count if the previous
244 // stack frame was an arguments adapter frame. 244 // stack frame was an arguments adapter frame.
245 ArgumentsAccessStub::Type type; 245 ArgumentsAccessStub::Type type;
246 if (!is_classic_mode()) { 246 if (!is_classic_mode()) {
247 type = ArgumentsAccessStub::NEW_STRICT; 247 type = ArgumentsAccessStub::NEW_STRICT;
248 } else if (function()->has_duplicate_parameters()) { 248 } else if (function()->has_duplicate_parameters()) {
249 type = ArgumentsAccessStub::NEW_NON_STRICT_SLOW; 249 type = ArgumentsAccessStub::NEW_NON_STRICT_SLOW;
250 } else { 250 } else {
251 type = ArgumentsAccessStub::NEW_NON_STRICT_FAST; 251 type = ArgumentsAccessStub::NEW_NON_STRICT_FAST;
252 } 252 }
253 ArgumentsAccessStub stub(type); 253 ArgumentsAccessStub stub(type);
254 __ CallStub(&stub); 254 __ CallStub(&stub);
255 255
256 SetVar(arguments, r0, r1, r2); 256 SetVar(arguments, r0, r1, r2);
257 } 257 }
258 258
259 if (FLAG_trace) { 259 if (FLAG_trace) {
260 __ CallRuntime(Runtime::kTraceEnter, 0); 260 __ CallRuntime(Runtime::kTraceEnter, 0);
261 } 261 }
262 262
263 // Visit the declarations and body unless there is an illegal 263 // Visit the declarations and body unless there is an illegal
264 // redeclaration. 264 // redeclaration.
265 if (scope()->HasIllegalRedeclaration()) { 265 if (scope()->HasIllegalRedeclaration()) {
266 Comment cmnt(masm_, "[ Declarations"); 266 Comment cmnt(masm_, "[ Declarations");
267 scope()->VisitIllegalRedeclaration(this); 267 scope()->VisitIllegalRedeclaration(this);
268 268
269 } else { 269 } else {
270 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS); 270 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
271 { Comment cmnt(masm_, "[ Declarations"); 271 { Comment cmnt(masm_, "[ Declarations");
272 // For named function expressions, declare the function name as a 272 // For named function expressions, declare the function name as a
273 // constant. 273 // constant.
274 if (scope()->is_function_scope() && scope()->function() != NULL) { 274 if (scope()->is_function_scope() && scope()->function() != NULL) {
275 VariableDeclaration* function = scope()->function(); 275 VariableDeclaration* function = scope()->function();
276 ASSERT(function->proxy()->var()->mode() == CONST || 276 ASSERT(function->proxy()->var()->mode() == CONST ||
277 function->proxy()->var()->mode() == CONST_HARMONY); 277 function->proxy()->var()->mode() == CONST_HARMONY);
278 ASSERT(function->proxy()->var()->location() != Variable::UNALLOCATED); 278 ASSERT(function->proxy()->var()->location() != Variable::UNALLOCATED);
279 VisitVariableDeclaration(function); 279 VisitVariableDeclaration(function);
280 } 280 }
281 VisitDeclarations(scope()->declarations()); 281 VisitDeclarations(scope()->declarations());
282 } 282 }
283 283
284 { Comment cmnt(masm_, "[ Stack check"); 284 { Comment cmnt(masm_, "[ Stack check");
285 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS); 285 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
286 Label ok; 286 Label ok;
287 __ LoadRoot(ip, Heap::kStackLimitRootIndex); 287 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
288 __ cmp(sp, Operand(ip)); 288 __ cmp(sp, Operand(ip));
289 __ b(hs, &ok); 289 __ b(hs, &ok);
290 StackCheckStub stub; 290 StackCheckStub stub;
291 __ CallStub(&stub); 291 __ CallStub(&stub);
292 __ bind(&ok); 292 __ bind(&ok);
293 } 293 }
294 294
295 { Comment cmnt(masm_, "[ Body"); 295 { Comment cmnt(masm_, "[ Body");
296 ASSERT(loop_depth() == 0); 296 ASSERT(loop_depth() == 0);
297 VisitStatements(function()->body()); 297 VisitStatements(function()->body());
298 ASSERT(loop_depth() == 0); 298 ASSERT(loop_depth() == 0);
299 } 299 }
300 } 300 }
301 301
302 // Always emit a 'return undefined' in case control fell off the end of 302 // Always emit a 'return undefined' in case control fell off the end of
303 // the body. 303 // the body.
304 { Comment cmnt(masm_, "[ return <undefined>;"); 304 { Comment cmnt(masm_, "[ return <undefined>;");
305 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); 305 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
306 } 306 }
307 EmitReturnSequence(); 307 EmitReturnSequence();
308 308
309 // Force emit the constant pool, so it doesn't get emitted in the middle 309 // Force emit the constant pool, so it doesn't get emitted in the middle
310 // of the stack check table. 310 // of the stack check table.
311 masm()->CheckConstPool(true, false); 311 masm()->CheckConstPool(true, false);
312 } 312 }
313 313
314 314
315 void FullCodeGenerator::ClearAccumulator() { 315 void FullCodeGenerator::ClearAccumulator() {
316 __ mov(r0, Operand(Smi::FromInt(0))); 316 __ mov(r0, Operand(Smi::FromInt(0)));
317 } 317 }
318 318
319 319
320 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) { 320 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
321 __ mov(r2, Operand(profiling_counter_)); 321 __ mov(r2, Operand(profiling_counter_));
322 __ ldr(r3, FieldMemOperand(r2, JSGlobalPropertyCell::kValueOffset)); 322 __ ldr(r3, FieldMemOperand(r2, JSGlobalPropertyCell::kValueOffset));
323 __ sub(r3, r3, Operand(Smi::FromInt(delta)), SetCC); 323 __ sub(r3, r3, Operand(Smi::FromInt(delta)), SetCC);
324 __ str(r3, FieldMemOperand(r2, JSGlobalPropertyCell::kValueOffset)); 324 __ str(r3, FieldMemOperand(r2, JSGlobalPropertyCell::kValueOffset));
325 } 325 }
326 326
327 327
328 void FullCodeGenerator::EmitProfilingCounterReset() { 328 void FullCodeGenerator::EmitProfilingCounterReset() {
329 int reset_value = FLAG_interrupt_budget; 329 int reset_value = FLAG_interrupt_budget;
330 if (info_->ShouldSelfOptimize() && !FLAG_retry_self_opt) { 330 if (info_->ShouldSelfOptimize() && !FLAG_retry_self_opt) {
331 // Self-optimization is a one-off thing: if it fails, don't try again. 331 // Self-optimization is a one-off thing: if it fails, don't try again.
332 reset_value = Smi::kMaxValue; 332 reset_value = Smi::kMaxValue;
333 } 333 }
334 if (isolate()->IsDebuggerActive()) { 334 if (isolate()->IsDebuggerActive()) {
335 // Detect debug break requests as soon as possible. 335 // Detect debug break requests as soon as possible.
336 reset_value = FLAG_interrupt_budget >> 4; 336 reset_value = FLAG_interrupt_budget >> 4;
337 } 337 }
338 __ mov(r2, Operand(profiling_counter_)); 338 __ mov(r2, Operand(profiling_counter_));
339 __ mov(r3, Operand(Smi::FromInt(reset_value))); 339 __ mov(r3, Operand(Smi::FromInt(reset_value)));
340 __ str(r3, FieldMemOperand(r2, JSGlobalPropertyCell::kValueOffset)); 340 __ str(r3, FieldMemOperand(r2, JSGlobalPropertyCell::kValueOffset));
341 } 341 }
342 342
343 343
344 void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt, 344 void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt,
345 Label* back_edge_target) { 345 Label* back_edge_target) {
346 Comment cmnt(masm_, "[ Stack check"); 346 Comment cmnt(masm_, "[ Stack check");
347 // Block literal pools whilst emitting stack check code. 347 // Block literal pools whilst emitting stack check code.
348 Assembler::BlockConstPoolScope block_const_pool(masm_); 348 Assembler::BlockConstPoolScope block_const_pool(masm_);
349 Label ok; 349 Label ok;
350 350
351 if (FLAG_count_based_interrupts) { 351 if (FLAG_count_based_interrupts) {
352 int weight = 1; 352 int weight = 1;
353 if (FLAG_weighted_back_edges) { 353 if (FLAG_weighted_back_edges) {
354 ASSERT(back_edge_target->is_bound()); 354 ASSERT(back_edge_target->is_bound());
355 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target); 355 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
356 weight = Min(kMaxBackEdgeWeight, 356 weight = Min(kMaxBackEdgeWeight,
357 Max(1, distance / kBackEdgeDistanceUnit)); 357 Max(1, distance / kBackEdgeDistanceUnit));
358 } 358 }
359 EmitProfilingCounterDecrement(weight); 359 EmitProfilingCounterDecrement(weight);
360 __ b(pl, &ok); 360 __ b(pl, &ok);
361 InterruptStub stub; 361 InterruptStub stub;
362 __ CallStub(&stub); 362 __ CallStub(&stub);
363 } else { 363 } else {
364 __ LoadRoot(ip, Heap::kStackLimitRootIndex); 364 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
365 __ cmp(sp, Operand(ip)); 365 __ cmp(sp, Operand(ip));
366 __ b(hs, &ok); 366 __ b(hs, &ok);
367 StackCheckStub stub; 367 StackCheckStub stub;
368 __ CallStub(&stub); 368 __ CallStub(&stub);
369 } 369 }
370 370
371 // Record a mapping of this PC offset to the OSR id. This is used to find 371 // Record a mapping of this PC offset to the OSR id. This is used to find
372 // the AST id from the unoptimized code in order to use it as a key into 372 // the AST id from the unoptimized code in order to use it as a key into
373 // the deoptimization input data found in the optimized code. 373 // the deoptimization input data found in the optimized code.
374 RecordStackCheck(stmt->OsrEntryId()); 374 RecordStackCheck(stmt->OsrEntryId());
375 375
376 if (FLAG_count_based_interrupts) { 376 if (FLAG_count_based_interrupts) {
377 EmitProfilingCounterReset(); 377 EmitProfilingCounterReset();
378 } 378 }
379 379
380 __ bind(&ok); 380 __ bind(&ok);
381 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS); 381 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
382 // Record a mapping of the OSR id to this PC. This is used if the OSR 382 // Record a mapping of the OSR id to this PC. This is used if the OSR
383 // entry becomes the target of a bailout. We don't expect it to be, but 383 // entry becomes the target of a bailout. We don't expect it to be, but
384 // we want it to work if it is. 384 // we want it to work if it is.
385 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS); 385 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
386 } 386 }
387 387
388 388
389 void FullCodeGenerator::EmitReturnSequence() { 389 void FullCodeGenerator::EmitReturnSequence() {
390 Comment cmnt(masm_, "[ Return sequence"); 390 Comment cmnt(masm_, "[ Return sequence");
391 if (return_label_.is_bound()) { 391 if (return_label_.is_bound()) {
392 __ b(&return_label_); 392 __ b(&return_label_);
393 } else { 393 } else {
394 __ bind(&return_label_); 394 __ bind(&return_label_);
395 if (FLAG_trace) { 395 if (FLAG_trace) {
396 // Push the return value on the stack as the parameter. 396 // Push the return value on the stack as the parameter.
397 // Runtime::TraceExit returns its parameter in r0. 397 // Runtime::TraceExit returns its parameter in r0.
398 __ push(r0); 398 __ push(r0);
399 __ CallRuntime(Runtime::kTraceExit, 1); 399 __ CallRuntime(Runtime::kTraceExit, 1);
400 } 400 }
401 if (FLAG_interrupt_at_exit || FLAG_self_optimization) { 401 if (FLAG_interrupt_at_exit || FLAG_self_optimization) {
402 // Pretend that the exit is a backwards jump to the entry. 402 // Pretend that the exit is a backwards jump to the entry.
403 int weight = 1; 403 int weight = 1;
404 if (info_->ShouldSelfOptimize()) { 404 if (info_->ShouldSelfOptimize()) {
405 weight = FLAG_interrupt_budget / FLAG_self_opt_count; 405 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
406 } else if (FLAG_weighted_back_edges) { 406 } else if (FLAG_weighted_back_edges) {
407 int distance = masm_->pc_offset(); 407 int distance = masm_->pc_offset();
408 weight = Min(kMaxBackEdgeWeight, 408 weight = Min(kMaxBackEdgeWeight,
409 Max(1, distance / kBackEdgeDistanceUnit)); 409 Max(1, distance / kBackEdgeDistanceUnit));
410 } 410 }
411 EmitProfilingCounterDecrement(weight); 411 EmitProfilingCounterDecrement(weight);
412 Label ok; 412 Label ok;
413 __ b(pl, &ok); 413 __ b(pl, &ok);
414 __ push(r0); 414 __ push(r0);
415 if (info_->ShouldSelfOptimize() && FLAG_direct_self_opt) { 415 if (info_->ShouldSelfOptimize() && FLAG_direct_self_opt) {
416 __ ldr(r2, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 416 __ ldr(r2, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
417 __ push(r2); 417 __ push(r2);
418 __ CallRuntime(Runtime::kOptimizeFunctionOnNextCall, 1); 418 __ CallRuntime(Runtime::kOptimizeFunctionOnNextCall, 1);
419 } else { 419 } else {
420 InterruptStub stub; 420 InterruptStub stub;
421 __ CallStub(&stub); 421 __ CallStub(&stub);
422 } 422 }
423 __ pop(r0); 423 __ pop(r0);
424 EmitProfilingCounterReset(); 424 EmitProfilingCounterReset();
425 __ bind(&ok); 425 __ bind(&ok);
426 } 426 }
427 427
428 #ifdef DEBUG 428 #ifdef DEBUG
429 // Add a label for checking the size of the code used for returning. 429 // Add a label for checking the size of the code used for returning.
430 Label check_exit_codesize; 430 Label check_exit_codesize;
431 masm_->bind(&check_exit_codesize); 431 masm_->bind(&check_exit_codesize);
432 #endif 432 #endif
433 // Make sure that the constant pool is not emitted inside of the return 433 // Make sure that the constant pool is not emitted inside of the return
434 // sequence. 434 // sequence.
435 { Assembler::BlockConstPoolScope block_const_pool(masm_); 435 { Assembler::BlockConstPoolScope block_const_pool(masm_);
436 // Here we use masm_-> instead of the __ macro to avoid the code coverage 436 // Here we use masm_-> instead of the __ macro to avoid the code coverage
437 // tool from instrumenting as we rely on the code size here. 437 // tool from instrumenting as we rely on the code size here.
438 int32_t sp_delta = (info_->scope()->num_parameters() + 1) * kPointerSize; 438 int32_t sp_delta = (info_->scope()->num_parameters() + 1) * kPointerSize;
439 CodeGenerator::RecordPositions(masm_, function()->end_position() - 1); 439 CodeGenerator::RecordPositions(masm_, function()->end_position() - 1);
440 __ RecordJSReturn(); 440 __ RecordJSReturn();
441 masm_->mov(sp, fp); 441 masm_->mov(sp, fp);
442 masm_->ldm(ia_w, sp, fp.bit() | lr.bit()); 442 masm_->ldm(ia_w, sp, fp.bit() | lr.bit());
443 masm_->add(sp, sp, Operand(sp_delta)); 443 masm_->add(sp, sp, Operand(sp_delta));
444 masm_->Jump(lr); 444 masm_->Jump(lr);
445 } 445 }
446 446
447 #ifdef DEBUG 447 #ifdef DEBUG
448 // Check that the size of the code used for returning is large enough 448 // Check that the size of the code used for returning is large enough
449 // for the debugger's requirements. 449 // for the debugger's requirements.
450 ASSERT(Assembler::kJSReturnSequenceInstructions <= 450 ASSERT(Assembler::kJSReturnSequenceInstructions <=
451 masm_->InstructionsGeneratedSince(&check_exit_codesize)); 451 masm_->InstructionsGeneratedSince(&check_exit_codesize));
452 #endif 452 #endif
453 } 453 }
454 } 454 }
455 455
456 456
457 void FullCodeGenerator::EffectContext::Plug(Variable* var) const { 457 void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
458 ASSERT(var->IsStackAllocated() || var->IsContextSlot()); 458 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
459 } 459 }
460 460
461 461
462 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const { 462 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
463 ASSERT(var->IsStackAllocated() || var->IsContextSlot()); 463 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
464 codegen()->GetVar(result_register(), var); 464 codegen()->GetVar(result_register(), var);
465 } 465 }
466 466
467 467
468 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const { 468 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
469 ASSERT(var->IsStackAllocated() || var->IsContextSlot()); 469 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
470 codegen()->GetVar(result_register(), var); 470 codegen()->GetVar(result_register(), var);
471 __ push(result_register()); 471 __ push(result_register());
472 } 472 }
473 473
474 474
475 void FullCodeGenerator::TestContext::Plug(Variable* var) const { 475 void FullCodeGenerator::TestContext::Plug(Variable* var) const {
476 ASSERT(var->IsStackAllocated() || var->IsContextSlot()); 476 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
477 // For simplicity we always test the accumulator register. 477 // For simplicity we always test the accumulator register.
478 codegen()->GetVar(result_register(), var); 478 codegen()->GetVar(result_register(), var);
479 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL); 479 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
480 codegen()->DoTest(this); 480 codegen()->DoTest(this);
481 } 481 }
482 482
483 483
484 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const { 484 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
485 } 485 }
486 486
487 487
488 void FullCodeGenerator::AccumulatorValueContext::Plug( 488 void FullCodeGenerator::AccumulatorValueContext::Plug(
489 Heap::RootListIndex index) const { 489 Heap::RootListIndex index) const {
490 __ LoadRoot(result_register(), index); 490 __ LoadRoot(result_register(), index);
491 } 491 }
492 492
493 493
494 void FullCodeGenerator::StackValueContext::Plug( 494 void FullCodeGenerator::StackValueContext::Plug(
495 Heap::RootListIndex index) const { 495 Heap::RootListIndex index) const {
496 __ LoadRoot(result_register(), index); 496 __ LoadRoot(result_register(), index);
497 __ push(result_register()); 497 __ push(result_register());
498 } 498 }
499 499
500 500
501 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const { 501 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
502 codegen()->PrepareForBailoutBeforeSplit(condition(), 502 codegen()->PrepareForBailoutBeforeSplit(condition(),
503 true, 503 true,
504 true_label_, 504 true_label_,
505 false_label_); 505 false_label_);
506 if (index == Heap::kUndefinedValueRootIndex || 506 if (index == Heap::kUndefinedValueRootIndex ||
507 index == Heap::kNullValueRootIndex || 507 index == Heap::kNullValueRootIndex ||
508 index == Heap::kFalseValueRootIndex) { 508 index == Heap::kFalseValueRootIndex) {
509 if (false_label_ != fall_through_) __ b(false_label_); 509 if (false_label_ != fall_through_) __ b(false_label_);
510 } else if (index == Heap::kTrueValueRootIndex) { 510 } else if (index == Heap::kTrueValueRootIndex) {
511 if (true_label_ != fall_through_) __ b(true_label_); 511 if (true_label_ != fall_through_) __ b(true_label_);
512 } else { 512 } else {
513 __ LoadRoot(result_register(), index); 513 __ LoadRoot(result_register(), index);
514 codegen()->DoTest(this); 514 codegen()->DoTest(this);
515 } 515 }
516 } 516 }
517 517
518 518
519 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const { 519 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
520 } 520 }
521 521
522 522
523 void FullCodeGenerator::AccumulatorValueContext::Plug( 523 void FullCodeGenerator::AccumulatorValueContext::Plug(
524 Handle<Object> lit) const { 524 Handle<Object> lit) const {
525 __ mov(result_register(), Operand(lit)); 525 __ mov(result_register(), Operand(lit));
526 } 526 }
527 527
528 528
529 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const { 529 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
530 // Immediates cannot be pushed directly. 530 // Immediates cannot be pushed directly.
531 __ mov(result_register(), Operand(lit)); 531 __ mov(result_register(), Operand(lit));
532 __ push(result_register()); 532 __ push(result_register());
533 } 533 }
534 534
535 535
536 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const { 536 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
537 codegen()->PrepareForBailoutBeforeSplit(condition(), 537 codegen()->PrepareForBailoutBeforeSplit(condition(),
538 true, 538 true,
539 true_label_, 539 true_label_,
540 false_label_); 540 false_label_);
541 ASSERT(!lit->IsUndetectableObject()); // There are no undetectable literals. 541 ASSERT(!lit->IsUndetectableObject()); // There are no undetectable literals.
542 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) { 542 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
543 if (false_label_ != fall_through_) __ b(false_label_); 543 if (false_label_ != fall_through_) __ b(false_label_);
544 } else if (lit->IsTrue() || lit->IsJSObject()) { 544 } else if (lit->IsTrue() || lit->IsJSObject()) {
545 if (true_label_ != fall_through_) __ b(true_label_); 545 if (true_label_ != fall_through_) __ b(true_label_);
546 } else if (lit->IsString()) { 546 } else if (lit->IsString()) {
547 if (String::cast(*lit)->length() == 0) { 547 if (String::cast(*lit)->length() == 0) {
548 if (false_label_ != fall_through_) __ b(false_label_); 548 if (false_label_ != fall_through_) __ b(false_label_);
549 } else { 549 } else {
550 if (true_label_ != fall_through_) __ b(true_label_); 550 if (true_label_ != fall_through_) __ b(true_label_);
551 } 551 }
552 } else if (lit->IsSmi()) { 552 } else if (lit->IsSmi()) {
553 if (Smi::cast(*lit)->value() == 0) { 553 if (Smi::cast(*lit)->value() == 0) {
554 if (false_label_ != fall_through_) __ b(false_label_); 554 if (false_label_ != fall_through_) __ b(false_label_);
555 } else { 555 } else {
556 if (true_label_ != fall_through_) __ b(true_label_); 556 if (true_label_ != fall_through_) __ b(true_label_);
557 } 557 }
558 } else { 558 } else {
559 // For simplicity we always test the accumulator register. 559 // For simplicity we always test the accumulator register.
560 __ mov(result_register(), Operand(lit)); 560 __ mov(result_register(), Operand(lit));
561 codegen()->DoTest(this); 561 codegen()->DoTest(this);
562 } 562 }
563 } 563 }
564 564
565 565
566 void FullCodeGenerator::EffectContext::DropAndPlug(int count, 566 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
567 Register reg) const { 567 Register reg) const {
568 ASSERT(count > 0); 568 ASSERT(count > 0);
569 __ Drop(count); 569 __ Drop(count);
570 } 570 }
571 571
572 572
573 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug( 573 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
574 int count, 574 int count,
575 Register reg) const { 575 Register reg) const {
576 ASSERT(count > 0); 576 ASSERT(count > 0);
577 __ Drop(count); 577 __ Drop(count);
578 __ Move(result_register(), reg); 578 __ Move(result_register(), reg);
579 } 579 }
580 580
581 581
582 void FullCodeGenerator::StackValueContext::DropAndPlug(int count, 582 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
583 Register reg) const { 583 Register reg) const {
584 ASSERT(count > 0); 584 ASSERT(count > 0);
585 if (count > 1) __ Drop(count - 1); 585 if (count > 1) __ Drop(count - 1);
586 __ str(reg, MemOperand(sp, 0)); 586 __ str(reg, MemOperand(sp, 0));
587 } 587 }
588 588
589 589
590 void FullCodeGenerator::TestContext::DropAndPlug(int count, 590 void FullCodeGenerator::TestContext::DropAndPlug(int count,
591 Register reg) const { 591 Register reg) const {
592 ASSERT(count > 0); 592 ASSERT(count > 0);
593 // For simplicity we always test the accumulator register. 593 // For simplicity we always test the accumulator register.
594 __ Drop(count); 594 __ Drop(count);
595 __ Move(result_register(), reg); 595 __ Move(result_register(), reg);
596 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL); 596 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
597 codegen()->DoTest(this); 597 codegen()->DoTest(this);
598 } 598 }
599 599
600 600
601 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true, 601 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
602 Label* materialize_false) const { 602 Label* materialize_false) const {
603 ASSERT(materialize_true == materialize_false); 603 ASSERT(materialize_true == materialize_false);
604 __ bind(materialize_true); 604 __ bind(materialize_true);
605 } 605 }
606 606
607 607
608 void FullCodeGenerator::AccumulatorValueContext::Plug( 608 void FullCodeGenerator::AccumulatorValueContext::Plug(
609 Label* materialize_true, 609 Label* materialize_true,
610 Label* materialize_false) const { 610 Label* materialize_false) const {
611 Label done; 611 Label done;
612 __ bind(materialize_true); 612 __ bind(materialize_true);
613 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex); 613 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
614 __ jmp(&done); 614 __ jmp(&done);
615 __ bind(materialize_false); 615 __ bind(materialize_false);
616 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex); 616 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
617 __ bind(&done); 617 __ bind(&done);
618 } 618 }
619 619
620 620
621 void FullCodeGenerator::StackValueContext::Plug( 621 void FullCodeGenerator::StackValueContext::Plug(
622 Label* materialize_true, 622 Label* materialize_true,
623 Label* materialize_false) const { 623 Label* materialize_false) const {
624 Label done; 624 Label done;
625 __ bind(materialize_true); 625 __ bind(materialize_true);
626 __ LoadRoot(ip, Heap::kTrueValueRootIndex); 626 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
627 __ push(ip); 627 __ push(ip);
628 __ jmp(&done); 628 __ jmp(&done);
629 __ bind(materialize_false); 629 __ bind(materialize_false);
630 __ LoadRoot(ip, Heap::kFalseValueRootIndex); 630 __ LoadRoot(ip, Heap::kFalseValueRootIndex);
631 __ push(ip); 631 __ push(ip);
632 __ bind(&done); 632 __ bind(&done);
633 } 633 }
634 634
635 635
636 void FullCodeGenerator::TestContext::Plug(Label* materialize_true, 636 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
637 Label* materialize_false) const { 637 Label* materialize_false) const {
638 ASSERT(materialize_true == true_label_); 638 ASSERT(materialize_true == true_label_);
639 ASSERT(materialize_false == false_label_); 639 ASSERT(materialize_false == false_label_);
640 } 640 }
641 641
642 642
643 void FullCodeGenerator::EffectContext::Plug(bool flag) const { 643 void FullCodeGenerator::EffectContext::Plug(bool flag) const {
644 } 644 }
645 645
646 646
647 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const { 647 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
648 Heap::RootListIndex value_root_index = 648 Heap::RootListIndex value_root_index =
649 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex; 649 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
650 __ LoadRoot(result_register(), value_root_index); 650 __ LoadRoot(result_register(), value_root_index);
651 } 651 }
652 652
653 653
654 void FullCodeGenerator::StackValueContext::Plug(bool flag) const { 654 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
655 Heap::RootListIndex value_root_index = 655 Heap::RootListIndex value_root_index =
656 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex; 656 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
657 __ LoadRoot(ip, value_root_index); 657 __ LoadRoot(ip, value_root_index);
658 __ push(ip); 658 __ push(ip);
659 } 659 }
660 660
661 661
662 void FullCodeGenerator::TestContext::Plug(bool flag) const { 662 void FullCodeGenerator::TestContext::Plug(bool flag) const {
663 codegen()->PrepareForBailoutBeforeSplit(condition(), 663 codegen()->PrepareForBailoutBeforeSplit(condition(),
664 true, 664 true,
665 true_label_, 665 true_label_,
666 false_label_); 666 false_label_);
667 if (flag) { 667 if (flag) {
668 if (true_label_ != fall_through_) __ b(true_label_); 668 if (true_label_ != fall_through_) __ b(true_label_);
669 } else { 669 } else {
670 if (false_label_ != fall_through_) __ b(false_label_); 670 if (false_label_ != fall_through_) __ b(false_label_);
671 } 671 }
672 } 672 }
673 673
674 674
675 void FullCodeGenerator::DoTest(Expression* condition, 675 void FullCodeGenerator::DoTest(Expression* condition,
676 Label* if_true, 676 Label* if_true,
677 Label* if_false, 677 Label* if_false,
678 Label* fall_through) { 678 Label* fall_through) {
679 ToBooleanStub stub(result_register()); 679 ToBooleanStub stub(result_register());
680 __ CallStub(&stub); 680 __ CallStub(&stub);
681 __ tst(result_register(), result_register()); 681 __ tst(result_register(), result_register());
682 Split(ne, if_true, if_false, fall_through); 682 Split(ne, if_true, if_false, fall_through);
683 } 683 }
684 684
685 685
686 void FullCodeGenerator::Split(Condition cond, 686 void FullCodeGenerator::Split(Condition cond,
687 Label* if_true, 687 Label* if_true,
688 Label* if_false, 688 Label* if_false,
689 Label* fall_through) { 689 Label* fall_through) {
690 if (if_false == fall_through) { 690 if (if_false == fall_through) {
691 __ b(cond, if_true); 691 __ b(cond, if_true);
692 } else if (if_true == fall_through) { 692 } else if (if_true == fall_through) {
693 __ b(NegateCondition(cond), if_false); 693 __ b(NegateCondition(cond), if_false);
694 } else { 694 } else {
695 __ b(cond, if_true); 695 __ b(cond, if_true);
696 __ b(if_false); 696 __ b(if_false);
697 } 697 }
698 } 698 }
699 699
700 700
701 MemOperand FullCodeGenerator::StackOperand(Variable* var) { 701 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
702 ASSERT(var->IsStackAllocated()); 702 ASSERT(var->IsStackAllocated());
703 // Offset is negative because higher indexes are at lower addresses. 703 // Offset is negative because higher indexes are at lower addresses.
704 int offset = -var->index() * kPointerSize; 704 int offset = -var->index() * kPointerSize;
705 // Adjust by a (parameter or local) base offset. 705 // Adjust by a (parameter or local) base offset.
706 if (var->IsParameter()) { 706 if (var->IsParameter()) {
707 offset += (info_->scope()->num_parameters() + 1) * kPointerSize; 707 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
708 } else { 708 } else {
709 offset += JavaScriptFrameConstants::kLocal0Offset; 709 offset += JavaScriptFrameConstants::kLocal0Offset;
710 } 710 }
711 return MemOperand(fp, offset); 711 return MemOperand(fp, offset);
712 } 712 }
713 713
714 714
715 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) { 715 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
716 ASSERT(var->IsContextSlot() || var->IsStackAllocated()); 716 ASSERT(var->IsContextSlot() || var->IsStackAllocated());
717 if (var->IsContextSlot()) { 717 if (var->IsContextSlot()) {
718 int context_chain_length = scope()->ContextChainLength(var->scope()); 718 int context_chain_length = scope()->ContextChainLength(var->scope());
719 __ LoadContext(scratch, context_chain_length); 719 __ LoadContext(scratch, context_chain_length);
720 return ContextOperand(scratch, var->index()); 720 return ContextOperand(scratch, var->index());
721 } else { 721 } else {
722 return StackOperand(var); 722 return StackOperand(var);
723 } 723 }
724 } 724 }
725 725
726 726
727 void FullCodeGenerator::GetVar(Register dest, Variable* var) { 727 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
728 // Use destination as scratch. 728 // Use destination as scratch.
729 MemOperand location = VarOperand(var, dest); 729 MemOperand location = VarOperand(var, dest);
730 __ ldr(dest, location); 730 __ ldr(dest, location);
731 } 731 }
732 732
733 733
734 void FullCodeGenerator::SetVar(Variable* var, 734 void FullCodeGenerator::SetVar(Variable* var,
735 Register src, 735 Register src,
736 Register scratch0, 736 Register scratch0,
737 Register scratch1) { 737 Register scratch1) {
738 ASSERT(var->IsContextSlot() || var->IsStackAllocated()); 738 ASSERT(var->IsContextSlot() || var->IsStackAllocated());
739 ASSERT(!scratch0.is(src)); 739 ASSERT(!scratch0.is(src));
740 ASSERT(!scratch0.is(scratch1)); 740 ASSERT(!scratch0.is(scratch1));
741 ASSERT(!scratch1.is(src)); 741 ASSERT(!scratch1.is(src));
742 MemOperand location = VarOperand(var, scratch0); 742 MemOperand location = VarOperand(var, scratch0);
743 __ str(src, location); 743 __ str(src, location);
744 744
745 // Emit the write barrier code if the location is in the heap. 745 // Emit the write barrier code if the location is in the heap.
746 if (var->IsContextSlot()) { 746 if (var->IsContextSlot()) {
747 __ RecordWriteContextSlot(scratch0, 747 __ RecordWriteContextSlot(scratch0,
748 location.offset(), 748 location.offset(),
749 src, 749 src,
750 scratch1, 750 scratch1,
751 kLRHasBeenSaved, 751 kLRHasBeenSaved,
752 kDontSaveFPRegs); 752 kDontSaveFPRegs);
753 } 753 }
754 } 754 }
755 755
756 756
757 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr, 757 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
758 bool should_normalize, 758 bool should_normalize,
759 Label* if_true, 759 Label* if_true,
760 Label* if_false) { 760 Label* if_false) {
761 // Only prepare for bailouts before splits if we're in a test 761 // Only prepare for bailouts before splits if we're in a test
762 // context. Otherwise, we let the Visit function deal with the 762 // context. Otherwise, we let the Visit function deal with the
763 // preparation to avoid preparing with the same AST id twice. 763 // preparation to avoid preparing with the same AST id twice.
764 if (!context()->IsTest() || !info_->IsOptimizable()) return; 764 if (!context()->IsTest() || !info_->IsOptimizable()) return;
765 765
766 Label skip; 766 Label skip;
767 if (should_normalize) __ b(&skip); 767 if (should_normalize) __ b(&skip);
768 PrepareForBailout(expr, TOS_REG); 768 PrepareForBailout(expr, TOS_REG);
769 if (should_normalize) { 769 if (should_normalize) {
770 __ LoadRoot(ip, Heap::kTrueValueRootIndex); 770 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
771 __ cmp(r0, ip); 771 __ cmp(r0, ip);
772 Split(eq, if_true, if_false, NULL); 772 Split(eq, if_true, if_false, NULL);
773 __ bind(&skip); 773 __ bind(&skip);
774 } 774 }
775 } 775 }
776 776
777 777
778 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) { 778 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
779 // The variable in the declaration always resides in the current function 779 // The variable in the declaration always resides in the current function
780 // context. 780 // context.
781 ASSERT_EQ(0, scope()->ContextChainLength(variable->scope())); 781 ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
782 if (generate_debug_code_) { 782 if (generate_debug_code_) {
783 // Check that we're not inside a with or catch context. 783 // Check that we're not inside a with or catch context.
784 __ ldr(r1, FieldMemOperand(cp, HeapObject::kMapOffset)); 784 __ ldr(r1, FieldMemOperand(cp, HeapObject::kMapOffset));
785 __ CompareRoot(r1, Heap::kWithContextMapRootIndex); 785 __ CompareRoot(r1, Heap::kWithContextMapRootIndex);
786 __ Check(ne, "Declaration in with context."); 786 __ Check(ne, "Declaration in with context.");
787 __ CompareRoot(r1, Heap::kCatchContextMapRootIndex); 787 __ CompareRoot(r1, Heap::kCatchContextMapRootIndex);
788 __ Check(ne, "Declaration in catch context."); 788 __ Check(ne, "Declaration in catch context.");
789 } 789 }
790 } 790 }
791 791
792 792
793 void FullCodeGenerator::VisitVariableDeclaration( 793 void FullCodeGenerator::VisitVariableDeclaration(
794 VariableDeclaration* declaration) { 794 VariableDeclaration* declaration) {
795 // If it was not possible to allocate the variable at compile time, we 795 // If it was not possible to allocate the variable at compile time, we
796 // need to "declare" it at runtime to make sure it actually exists in the 796 // need to "declare" it at runtime to make sure it actually exists in the
797 // local context. 797 // local context.
798 VariableProxy* proxy = declaration->proxy(); 798 VariableProxy* proxy = declaration->proxy();
799 VariableMode mode = declaration->mode(); 799 VariableMode mode = declaration->mode();
800 Variable* variable = proxy->var(); 800 Variable* variable = proxy->var();
801 bool hole_init = mode == CONST || mode == CONST_HARMONY || mode == LET; 801 bool hole_init = mode == CONST || mode == CONST_HARMONY || mode == LET;
802 switch (variable->location()) { 802 switch (variable->location()) {
803 case Variable::UNALLOCATED: 803 case Variable::UNALLOCATED:
804 globals_->Add(variable->name(), zone()); 804 globals_->Add(variable->name(), zone());
805 globals_->Add(variable->binding_needs_init() 805 globals_->Add(variable->binding_needs_init()
806 ? isolate()->factory()->the_hole_value() 806 ? isolate()->factory()->the_hole_value()
807 : isolate()->factory()->undefined_value(), 807 : isolate()->factory()->undefined_value(),
808 zone()); 808 zone());
809 break; 809 break;
810 810
811 case Variable::PARAMETER: 811 case Variable::PARAMETER:
812 case Variable::LOCAL: 812 case Variable::LOCAL:
813 if (hole_init) { 813 if (hole_init) {
814 Comment cmnt(masm_, "[ VariableDeclaration"); 814 Comment cmnt(masm_, "[ VariableDeclaration");
815 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); 815 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
816 __ str(ip, StackOperand(variable)); 816 __ str(ip, StackOperand(variable));
817 } 817 }
818 break; 818 break;
819 819
820 case Variable::CONTEXT: 820 case Variable::CONTEXT:
821 if (hole_init) { 821 if (hole_init) {
822 Comment cmnt(masm_, "[ VariableDeclaration"); 822 Comment cmnt(masm_, "[ VariableDeclaration");
823 EmitDebugCheckDeclarationContext(variable); 823 EmitDebugCheckDeclarationContext(variable);
824 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); 824 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
825 __ str(ip, ContextOperand(cp, variable->index())); 825 __ str(ip, ContextOperand(cp, variable->index()));
826 // No write barrier since the_hole_value is in old space. 826 // No write barrier since the_hole_value is in old space.
827 PrepareForBailoutForId(proxy->id(), NO_REGISTERS); 827 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
828 } 828 }
829 break; 829 break;
830 830
831 case Variable::LOOKUP: { 831 case Variable::LOOKUP: {
832 Comment cmnt(masm_, "[ VariableDeclaration"); 832 Comment cmnt(masm_, "[ VariableDeclaration");
833 __ mov(r2, Operand(variable->name())); 833 __ mov(r2, Operand(variable->name()));
834 // Declaration nodes are always introduced in one of four modes. 834 // Declaration nodes are always introduced in one of four modes.
835 ASSERT(IsDeclaredVariableMode(mode)); 835 ASSERT(IsDeclaredVariableMode(mode));
836 PropertyAttributes attr = 836 PropertyAttributes attr =
837 IsImmutableVariableMode(mode) ? READ_ONLY : NONE; 837 IsImmutableVariableMode(mode) ? READ_ONLY : NONE;
838 __ mov(r1, Operand(Smi::FromInt(attr))); 838 __ mov(r1, Operand(Smi::FromInt(attr)));
839 // Push initial value, if any. 839 // Push initial value, if any.
840 // Note: For variables we must not push an initial value (such as 840 // Note: For variables we must not push an initial value (such as
841 // 'undefined') because we may have a (legal) redeclaration and we 841 // 'undefined') because we may have a (legal) redeclaration and we
842 // must not destroy the current value. 842 // must not destroy the current value.
843 if (hole_init) { 843 if (hole_init) {
844 __ LoadRoot(r0, Heap::kTheHoleValueRootIndex); 844 __ LoadRoot(r0, Heap::kTheHoleValueRootIndex);
845 __ Push(cp, r2, r1, r0); 845 __ Push(cp, r2, r1, r0);
846 } else { 846 } else {
847 __ mov(r0, Operand(Smi::FromInt(0))); // Indicates no initial value. 847 __ mov(r0, Operand(Smi::FromInt(0))); // Indicates no initial value.
848 __ Push(cp, r2, r1, r0); 848 __ Push(cp, r2, r1, r0);
849 } 849 }
850 __ CallRuntime(Runtime::kDeclareContextSlot, 4); 850 __ CallRuntime(Runtime::kDeclareContextSlot, 4);
851 break; 851 break;
852 } 852 }
853 } 853 }
854 } 854 }
855 855
856 856
857 void FullCodeGenerator::VisitFunctionDeclaration( 857 void FullCodeGenerator::VisitFunctionDeclaration(
858 FunctionDeclaration* declaration) { 858 FunctionDeclaration* declaration) {
859 VariableProxy* proxy = declaration->proxy(); 859 VariableProxy* proxy = declaration->proxy();
860 Variable* variable = proxy->var(); 860 Variable* variable = proxy->var();
861 switch (variable->location()) { 861 switch (variable->location()) {
862 case Variable::UNALLOCATED: { 862 case Variable::UNALLOCATED: {
863 globals_->Add(variable->name(), zone()); 863 globals_->Add(variable->name(), zone());
864 Handle<SharedFunctionInfo> function = 864 Handle<SharedFunctionInfo> function =
865 Compiler::BuildFunctionInfo(declaration->fun(), script()); 865 Compiler::BuildFunctionInfo(declaration->fun(), script());
866 // Check for stack-overflow exception. 866 // Check for stack-overflow exception.
867 if (function.is_null()) return SetStackOverflow(); 867 if (function.is_null()) return SetStackOverflow();
868 globals_->Add(function, zone()); 868 globals_->Add(function, zone());
869 break; 869 break;
870 } 870 }
871 871
872 case Variable::PARAMETER: 872 case Variable::PARAMETER:
873 case Variable::LOCAL: { 873 case Variable::LOCAL: {
874 Comment cmnt(masm_, "[ FunctionDeclaration"); 874 Comment cmnt(masm_, "[ FunctionDeclaration");
875 VisitForAccumulatorValue(declaration->fun()); 875 VisitForAccumulatorValue(declaration->fun());
876 __ str(result_register(), StackOperand(variable)); 876 __ str(result_register(), StackOperand(variable));
877 break; 877 break;
878 } 878 }
879 879
880 case Variable::CONTEXT: { 880 case Variable::CONTEXT: {
881 Comment cmnt(masm_, "[ FunctionDeclaration"); 881 Comment cmnt(masm_, "[ FunctionDeclaration");
882 EmitDebugCheckDeclarationContext(variable); 882 EmitDebugCheckDeclarationContext(variable);
883 VisitForAccumulatorValue(declaration->fun()); 883 VisitForAccumulatorValue(declaration->fun());
884 __ str(result_register(), ContextOperand(cp, variable->index())); 884 __ str(result_register(), ContextOperand(cp, variable->index()));
885 int offset = Context::SlotOffset(variable->index()); 885 int offset = Context::SlotOffset(variable->index());
886 // We know that we have written a function, which is not a smi. 886 // We know that we have written a function, which is not a smi.
887 __ RecordWriteContextSlot(cp, 887 __ RecordWriteContextSlot(cp,
888 offset, 888 offset,
889 result_register(), 889 result_register(),
890 r2, 890 r2,
891 kLRHasBeenSaved, 891 kLRHasBeenSaved,
892 kDontSaveFPRegs, 892 kDontSaveFPRegs,
893 EMIT_REMEMBERED_SET, 893 EMIT_REMEMBERED_SET,
894 OMIT_SMI_CHECK); 894 OMIT_SMI_CHECK);
895 PrepareForBailoutForId(proxy->id(), NO_REGISTERS); 895 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
896 break; 896 break;
897 } 897 }
898 898
899 case Variable::LOOKUP: { 899 case Variable::LOOKUP: {
900 Comment cmnt(masm_, "[ FunctionDeclaration"); 900 Comment cmnt(masm_, "[ FunctionDeclaration");
901 __ mov(r2, Operand(variable->name())); 901 __ mov(r2, Operand(variable->name()));
902 __ mov(r1, Operand(Smi::FromInt(NONE))); 902 __ mov(r1, Operand(Smi::FromInt(NONE)));
903 __ Push(cp, r2, r1); 903 __ Push(cp, r2, r1);
904 // Push initial value for function declaration. 904 // Push initial value for function declaration.
905 VisitForStackValue(declaration->fun()); 905 VisitForStackValue(declaration->fun());
906 __ CallRuntime(Runtime::kDeclareContextSlot, 4); 906 __ CallRuntime(Runtime::kDeclareContextSlot, 4);
907 break; 907 break;
908 } 908 }
909 } 909 }
910 } 910 }
911 911
912 912
913 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) { 913 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
914 VariableProxy* proxy = declaration->proxy(); 914 VariableProxy* proxy = declaration->proxy();
915 Variable* variable = proxy->var(); 915 Variable* variable = proxy->var();
916 Handle<JSModule> instance = declaration->module()->interface()->Instance(); 916 Handle<JSModule> instance = declaration->module()->interface()->Instance();
917 ASSERT(!instance.is_null()); 917 ASSERT(!instance.is_null());
918 918
919 switch (variable->location()) { 919 switch (variable->location()) {
920 case Variable::UNALLOCATED: { 920 case Variable::UNALLOCATED: {
921 Comment cmnt(masm_, "[ ModuleDeclaration"); 921 Comment cmnt(masm_, "[ ModuleDeclaration");
922 globals_->Add(variable->name(), zone()); 922 globals_->Add(variable->name(), zone());
923 globals_->Add(instance, zone()); 923 globals_->Add(instance, zone());
924 Visit(declaration->module()); 924 Visit(declaration->module());
925 break; 925 break;
926 } 926 }
927 927
928 case Variable::CONTEXT: { 928 case Variable::CONTEXT: {
929 Comment cmnt(masm_, "[ ModuleDeclaration"); 929 Comment cmnt(masm_, "[ ModuleDeclaration");
930 EmitDebugCheckDeclarationContext(variable); 930 EmitDebugCheckDeclarationContext(variable);
931 __ mov(r1, Operand(instance)); 931 __ mov(r1, Operand(instance));
932 __ str(r1, ContextOperand(cp, variable->index())); 932 __ str(r1, ContextOperand(cp, variable->index()));
933 Visit(declaration->module()); 933 Visit(declaration->module());
934 break; 934 break;
935 } 935 }
936 936
937 case Variable::PARAMETER: 937 case Variable::PARAMETER:
938 case Variable::LOCAL: 938 case Variable::LOCAL:
939 case Variable::LOOKUP: 939 case Variable::LOOKUP:
940 UNREACHABLE(); 940 UNREACHABLE();
941 } 941 }
942 } 942 }
943 943
944 944
945 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) { 945 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
946 VariableProxy* proxy = declaration->proxy(); 946 VariableProxy* proxy = declaration->proxy();
947 Variable* variable = proxy->var(); 947 Variable* variable = proxy->var();
948 switch (variable->location()) { 948 switch (variable->location()) {
949 case Variable::UNALLOCATED: 949 case Variable::UNALLOCATED:
950 // TODO(rossberg) 950 // TODO(rossberg)
951 break; 951 break;
952 952
953 case Variable::CONTEXT: { 953 case Variable::CONTEXT: {
954 Comment cmnt(masm_, "[ ImportDeclaration"); 954 Comment cmnt(masm_, "[ ImportDeclaration");
955 EmitDebugCheckDeclarationContext(variable); 955 EmitDebugCheckDeclarationContext(variable);
956 // TODO(rossberg) 956 // TODO(rossberg)
957 break; 957 break;
958 } 958 }
959 959
960 case Variable::PARAMETER: 960 case Variable::PARAMETER:
961 case Variable::LOCAL: 961 case Variable::LOCAL:
962 case Variable::LOOKUP: 962 case Variable::LOOKUP:
963 UNREACHABLE(); 963 UNREACHABLE();
964 } 964 }
965 } 965 }
966 966
967 967
968 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) { 968 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
969 // TODO(rossberg) 969 // TODO(rossberg)
970 } 970 }
971 971
972 972
973 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) { 973 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
974 // Call the runtime to declare the globals. 974 // Call the runtime to declare the globals.
975 // The context is the first argument. 975 // The context is the first argument.
976 __ mov(r1, Operand(pairs)); 976 __ mov(r1, Operand(pairs));
977 __ mov(r0, Operand(Smi::FromInt(DeclareGlobalsFlags()))); 977 __ mov(r0, Operand(Smi::FromInt(DeclareGlobalsFlags())));
978 __ Push(cp, r1, r0); 978 __ Push(cp, r1, r0);
979 __ CallRuntime(Runtime::kDeclareGlobals, 3); 979 __ CallRuntime(Runtime::kDeclareGlobals, 3);
980 // Return value is ignored. 980 // Return value is ignored.
981 } 981 }
982 982
983 983
984 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) { 984 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
985 Comment cmnt(masm_, "[ SwitchStatement"); 985 Comment cmnt(masm_, "[ SwitchStatement");
986 Breakable nested_statement(this, stmt); 986 Breakable nested_statement(this, stmt);
987 SetStatementPosition(stmt); 987 SetStatementPosition(stmt);
988 988
989 // Keep the switch value on the stack until a case matches. 989 // Keep the switch value on the stack until a case matches.
990 VisitForStackValue(stmt->tag()); 990 VisitForStackValue(stmt->tag());
991 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS); 991 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
992 992
993 ZoneList<CaseClause*>* clauses = stmt->cases(); 993 ZoneList<CaseClause*>* clauses = stmt->cases();
994 CaseClause* default_clause = NULL; // Can occur anywhere in the list. 994 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
995 995
996 Label next_test; // Recycled for each test. 996 Label next_test; // Recycled for each test.
997 // Compile all the tests with branches to their bodies. 997 // Compile all the tests with branches to their bodies.
998 for (int i = 0; i < clauses->length(); i++) { 998 for (int i = 0; i < clauses->length(); i++) {
999 CaseClause* clause = clauses->at(i); 999 CaseClause* clause = clauses->at(i);
1000 clause->body_target()->Unuse(); 1000 clause->body_target()->Unuse();
1001 1001
1002 // The default is not a test, but remember it as final fall through. 1002 // The default is not a test, but remember it as final fall through.
1003 if (clause->is_default()) { 1003 if (clause->is_default()) {
1004 default_clause = clause; 1004 default_clause = clause;
1005 continue; 1005 continue;
1006 } 1006 }
1007 1007
1008 Comment cmnt(masm_, "[ Case comparison"); 1008 Comment cmnt(masm_, "[ Case comparison");
1009 __ bind(&next_test); 1009 __ bind(&next_test);
1010 next_test.Unuse(); 1010 next_test.Unuse();
1011 1011
1012 // Compile the label expression. 1012 // Compile the label expression.
1013 VisitForAccumulatorValue(clause->label()); 1013 VisitForAccumulatorValue(clause->label());
1014 1014
1015 // Perform the comparison as if via '==='. 1015 // Perform the comparison as if via '==='.
1016 __ ldr(r1, MemOperand(sp, 0)); // Switch value. 1016 __ ldr(r1, MemOperand(sp, 0)); // Switch value.
1017 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT); 1017 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
1018 JumpPatchSite patch_site(masm_); 1018 JumpPatchSite patch_site(masm_);
1019 if (inline_smi_code) { 1019 if (inline_smi_code) {
1020 Label slow_case; 1020 Label slow_case;
1021 __ orr(r2, r1, r0); 1021 __ orr(r2, r1, r0);
1022 patch_site.EmitJumpIfNotSmi(r2, &slow_case); 1022 patch_site.EmitJumpIfNotSmi(r2, &slow_case);
1023 1023
1024 __ cmp(r1, r0); 1024 __ cmp(r1, r0);
1025 __ b(ne, &next_test); 1025 __ b(ne, &next_test);
1026 __ Drop(1); // Switch value is no longer needed. 1026 __ Drop(1); // Switch value is no longer needed.
1027 __ b(clause->body_target()); 1027 __ b(clause->body_target());
1028 __ bind(&slow_case); 1028 __ bind(&slow_case);
1029 } 1029 }
1030 1030
1031 // Record position before stub call for type feedback. 1031 // Record position before stub call for type feedback.
1032 SetSourcePosition(clause->position()); 1032 SetSourcePosition(clause->position());
1033 Handle<Code> ic = CompareIC::GetUninitialized(Token::EQ_STRICT); 1033 Handle<Code> ic = CompareIC::GetUninitialized(Token::EQ_STRICT);
1034 CallIC(ic, RelocInfo::CODE_TARGET, clause->CompareId()); 1034 CallIC(ic, RelocInfo::CODE_TARGET, clause->CompareId());
1035 patch_site.EmitPatchInfo(); 1035 patch_site.EmitPatchInfo();
1036 1036
1037 __ cmp(r0, Operand(0)); 1037 __ cmp(r0, Operand(0));
1038 __ b(ne, &next_test); 1038 __ b(ne, &next_test);
1039 __ Drop(1); // Switch value is no longer needed. 1039 __ Drop(1); // Switch value is no longer needed.
1040 __ b(clause->body_target()); 1040 __ b(clause->body_target());
1041 } 1041 }
1042 1042
1043 // Discard the test value and jump to the default if present, otherwise to 1043 // Discard the test value and jump to the default if present, otherwise to
1044 // the end of the statement. 1044 // the end of the statement.
1045 __ bind(&next_test); 1045 __ bind(&next_test);
1046 __ Drop(1); // Switch value is no longer needed. 1046 __ Drop(1); // Switch value is no longer needed.
1047 if (default_clause == NULL) { 1047 if (default_clause == NULL) {
1048 __ b(nested_statement.break_label()); 1048 __ b(nested_statement.break_label());
1049 } else { 1049 } else {
1050 __ b(default_clause->body_target()); 1050 __ b(default_clause->body_target());
1051 } 1051 }
1052 1052
1053 // Compile all the case bodies. 1053 // Compile all the case bodies.
1054 for (int i = 0; i < clauses->length(); i++) { 1054 for (int i = 0; i < clauses->length(); i++) {
1055 Comment cmnt(masm_, "[ Case body"); 1055 Comment cmnt(masm_, "[ Case body");
1056 CaseClause* clause = clauses->at(i); 1056 CaseClause* clause = clauses->at(i);
1057 __ bind(clause->body_target()); 1057 __ bind(clause->body_target());
1058 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS); 1058 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
1059 VisitStatements(clause->statements()); 1059 VisitStatements(clause->statements());
1060 } 1060 }
1061 1061
1062 __ bind(nested_statement.break_label()); 1062 __ bind(nested_statement.break_label());
1063 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS); 1063 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1064 } 1064 }
1065 1065
1066 1066
1067 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) { 1067 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1068 Comment cmnt(masm_, "[ ForInStatement"); 1068 Comment cmnt(masm_, "[ ForInStatement");
1069 SetStatementPosition(stmt); 1069 SetStatementPosition(stmt);
1070 1070
1071 Label loop, exit; 1071 Label loop, exit;
1072 ForIn loop_statement(this, stmt); 1072 ForIn loop_statement(this, stmt);
1073 increment_loop_depth(); 1073 increment_loop_depth();
1074 1074
1075 // Get the object to enumerate over. Both SpiderMonkey and JSC 1075 // Get the object to enumerate over. Both SpiderMonkey and JSC
1076 // ignore null and undefined in contrast to the specification; see 1076 // ignore null and undefined in contrast to the specification; see
1077 // ECMA-262 section 12.6.4. 1077 // ECMA-262 section 12.6.4.
1078 VisitForAccumulatorValue(stmt->enumerable()); 1078 VisitForAccumulatorValue(stmt->enumerable());
1079 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); 1079 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1080 __ cmp(r0, ip); 1080 __ cmp(r0, ip);
1081 __ b(eq, &exit); 1081 __ b(eq, &exit);
1082 Register null_value = r5; 1082 Register null_value = r5;
1083 __ LoadRoot(null_value, Heap::kNullValueRootIndex); 1083 __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1084 __ cmp(r0, null_value); 1084 __ cmp(r0, null_value);
1085 __ b(eq, &exit); 1085 __ b(eq, &exit);
1086 1086
1087 PrepareForBailoutForId(stmt->PrepareId(), TOS_REG); 1087 PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1088 1088
1089 // Convert the object to a JS object. 1089 // Convert the object to a JS object.
1090 Label convert, done_convert; 1090 Label convert, done_convert;
1091 __ JumpIfSmi(r0, &convert); 1091 __ JumpIfSmi(r0, &convert);
1092 __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE); 1092 __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE);
1093 __ b(ge, &done_convert); 1093 __ b(ge, &done_convert);
1094 __ bind(&convert); 1094 __ bind(&convert);
1095 __ push(r0); 1095 __ push(r0);
1096 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); 1096 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1097 __ bind(&done_convert); 1097 __ bind(&done_convert);
1098 __ push(r0); 1098 __ push(r0);
1099 1099
1100 // Check for proxies. 1100 // Check for proxies.
1101 Label call_runtime; 1101 Label call_runtime;
1102 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE); 1102 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1103 __ CompareObjectType(r0, r1, r1, LAST_JS_PROXY_TYPE); 1103 __ CompareObjectType(r0, r1, r1, LAST_JS_PROXY_TYPE);
1104 __ b(le, &call_runtime); 1104 __ b(le, &call_runtime);
1105 1105
1106 // Check cache validity in generated code. This is a fast case for 1106 // Check cache validity in generated code. This is a fast case for
1107 // the JSObject::IsSimpleEnum cache validity checks. If we cannot 1107 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1108 // guarantee cache validity, call the runtime system to check cache 1108 // guarantee cache validity, call the runtime system to check cache
1109 // validity or get the property names in a fixed array. 1109 // validity or get the property names in a fixed array.
1110 __ CheckEnumCache(null_value, &call_runtime); 1110 __ CheckEnumCache(null_value, &call_runtime);
1111 1111
1112 // The enum cache is valid. Load the map of the object being 1112 // The enum cache is valid. Load the map of the object being
1113 // iterated over and use the cache for the iteration. 1113 // iterated over and use the cache for the iteration.
1114 Label use_cache; 1114 Label use_cache;
1115 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset)); 1115 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
1116 __ b(&use_cache); 1116 __ b(&use_cache);
1117 1117
1118 // Get the set of properties to enumerate. 1118 // Get the set of properties to enumerate.
1119 __ bind(&call_runtime); 1119 __ bind(&call_runtime);
1120 __ push(r0); // Duplicate the enumerable object on the stack. 1120 __ push(r0); // Duplicate the enumerable object on the stack.
1121 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1); 1121 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1122 1122
1123 // If we got a map from the runtime call, we can do a fast 1123 // If we got a map from the runtime call, we can do a fast
1124 // modification check. Otherwise, we got a fixed array, and we have 1124 // modification check. Otherwise, we got a fixed array, and we have
1125 // to do a slow check. 1125 // to do a slow check.
1126 Label fixed_array; 1126 Label fixed_array;
1127 __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset)); 1127 __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
1128 __ LoadRoot(ip, Heap::kMetaMapRootIndex); 1128 __ LoadRoot(ip, Heap::kMetaMapRootIndex);
1129 __ cmp(r2, ip); 1129 __ cmp(r2, ip);
1130 __ b(ne, &fixed_array); 1130 __ b(ne, &fixed_array);
1131 1131
1132 // We got a map in register r0. Get the enumeration cache from it. 1132 // We got a map in register r0. Get the enumeration cache from it.
1133 Label no_descriptors; 1133 Label no_descriptors;
1134 __ bind(&use_cache); 1134 __ bind(&use_cache);
1135 1135
1136 __ EnumLength(r1, r0); 1136 __ EnumLength(r1, r0);
1137 __ cmp(r1, Operand(Smi::FromInt(0))); 1137 __ cmp(r1, Operand(Smi::FromInt(0)));
1138 __ b(eq, &no_descriptors); 1138 __ b(eq, &no_descriptors);
1139 1139
1140 __ LoadInstanceDescriptors(r0, r2, r4); 1140 __ LoadInstanceDescriptors(r0, r2, r4);
1141 __ ldr(r2, FieldMemOperand(r2, DescriptorArray::kEnumCacheOffset)); 1141 __ ldr(r2, FieldMemOperand(r2, DescriptorArray::kEnumCacheOffset));
1142 __ ldr(r2, FieldMemOperand(r2, DescriptorArray::kEnumCacheBridgeCacheOffset)); 1142 __ ldr(r2, FieldMemOperand(r2, DescriptorArray::kEnumCacheBridgeCacheOffset));
1143 1143
1144 // Set up the four remaining stack slots. 1144 // Set up the four remaining stack slots.
1145 __ push(r0); // Map. 1145 __ push(r0); // Map.
1146 __ mov(r0, Operand(Smi::FromInt(0))); 1146 __ mov(r0, Operand(Smi::FromInt(0)));
1147 // Push enumeration cache, enumeration cache length (as smi) and zero. 1147 // Push enumeration cache, enumeration cache length (as smi) and zero.
1148 __ Push(r2, r1, r0); 1148 __ Push(r2, r1, r0);
1149 __ jmp(&loop); 1149 __ jmp(&loop);
1150 1150
1151 __ bind(&no_descriptors); 1151 __ bind(&no_descriptors);
1152 __ Drop(1); 1152 __ Drop(1);
1153 __ jmp(&exit); 1153 __ jmp(&exit);
1154 1154
1155 // We got a fixed array in register r0. Iterate through that. 1155 // We got a fixed array in register r0. Iterate through that.
1156 Label non_proxy; 1156 Label non_proxy;
1157 __ bind(&fixed_array); 1157 __ bind(&fixed_array);
1158 1158
1159 Handle<JSGlobalPropertyCell> cell = 1159 Handle<JSGlobalPropertyCell> cell =
1160 isolate()->factory()->NewJSGlobalPropertyCell( 1160 isolate()->factory()->NewJSGlobalPropertyCell(
1161 Handle<Object>( 1161 Handle<Object>(
1162 Smi::FromInt(TypeFeedbackCells::kForInFastCaseMarker))); 1162 Smi::FromInt(TypeFeedbackCells::kForInFastCaseMarker)));
1163 RecordTypeFeedbackCell(stmt->ForInFeedbackId(), cell); 1163 RecordTypeFeedbackCell(stmt->ForInFeedbackId(), cell);
1164 __ LoadHeapObject(r1, cell); 1164 __ LoadHeapObject(r1, cell);
1165 __ mov(r2, Operand(Smi::FromInt(TypeFeedbackCells::kForInSlowCaseMarker))); 1165 __ mov(r2, Operand(Smi::FromInt(TypeFeedbackCells::kForInSlowCaseMarker)));
1166 __ str(r2, FieldMemOperand(r1, JSGlobalPropertyCell::kValueOffset)); 1166 __ str(r2, FieldMemOperand(r1, JSGlobalPropertyCell::kValueOffset));
1167 1167
1168 __ mov(r1, Operand(Smi::FromInt(1))); // Smi indicates slow check 1168 __ mov(r1, Operand(Smi::FromInt(1))); // Smi indicates slow check
1169 __ ldr(r2, MemOperand(sp, 0 * kPointerSize)); // Get enumerated object 1169 __ ldr(r2, MemOperand(sp, 0 * kPointerSize)); // Get enumerated object
1170 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE); 1170 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1171 __ CompareObjectType(r2, r3, r3, LAST_JS_PROXY_TYPE); 1171 __ CompareObjectType(r2, r3, r3, LAST_JS_PROXY_TYPE);
1172 __ b(gt, &non_proxy); 1172 __ b(gt, &non_proxy);
1173 __ mov(r1, Operand(Smi::FromInt(0))); // Zero indicates proxy 1173 __ mov(r1, Operand(Smi::FromInt(0))); // Zero indicates proxy
1174 __ bind(&non_proxy); 1174 __ bind(&non_proxy);
1175 __ Push(r1, r0); // Smi and array 1175 __ Push(r1, r0); // Smi and array
1176 __ ldr(r1, FieldMemOperand(r0, FixedArray::kLengthOffset)); 1176 __ ldr(r1, FieldMemOperand(r0, FixedArray::kLengthOffset));
1177 __ mov(r0, Operand(Smi::FromInt(0))); 1177 __ mov(r0, Operand(Smi::FromInt(0)));
1178 __ Push(r1, r0); // Fixed array length (as smi) and initial index. 1178 __ Push(r1, r0); // Fixed array length (as smi) and initial index.
1179 1179
1180 // Generate code for doing the condition check. 1180 // Generate code for doing the condition check.
1181 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS); 1181 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1182 __ bind(&loop); 1182 __ bind(&loop);
1183 // Load the current count to r0, load the length to r1. 1183 // Load the current count to r0, load the length to r1.
1184 __ Ldrd(r0, r1, MemOperand(sp, 0 * kPointerSize)); 1184 __ Ldrd(r0, r1, MemOperand(sp, 0 * kPointerSize));
1185 __ cmp(r0, r1); // Compare to the array length. 1185 __ cmp(r0, r1); // Compare to the array length.
1186 __ b(hs, loop_statement.break_label()); 1186 __ b(hs, loop_statement.break_label());
1187 1187
1188 // Get the current entry of the array into register r3. 1188 // Get the current entry of the array into register r3.
1189 __ ldr(r2, MemOperand(sp, 2 * kPointerSize)); 1189 __ ldr(r2, MemOperand(sp, 2 * kPointerSize));
1190 __ add(r2, r2, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); 1190 __ add(r2, r2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1191 __ ldr(r3, MemOperand(r2, r0, LSL, kPointerSizeLog2 - kSmiTagSize)); 1191 __ ldr(r3, MemOperand(r2, r0, LSL, kPointerSizeLog2 - kSmiTagSize));
1192 1192
1193 // Get the expected map from the stack or a smi in the 1193 // Get the expected map from the stack or a smi in the
1194 // permanent slow case into register r2. 1194 // permanent slow case into register r2.
1195 __ ldr(r2, MemOperand(sp, 3 * kPointerSize)); 1195 __ ldr(r2, MemOperand(sp, 3 * kPointerSize));
1196 1196
1197 // Check if the expected map still matches that of the enumerable. 1197 // Check if the expected map still matches that of the enumerable.
1198 // If not, we may have to filter the key. 1198 // If not, we may have to filter the key.
1199 Label update_each; 1199 Label update_each;
1200 __ ldr(r1, MemOperand(sp, 4 * kPointerSize)); 1200 __ ldr(r1, MemOperand(sp, 4 * kPointerSize));
1201 __ ldr(r4, FieldMemOperand(r1, HeapObject::kMapOffset)); 1201 __ ldr(r4, FieldMemOperand(r1, HeapObject::kMapOffset));
1202 __ cmp(r4, Operand(r2)); 1202 __ cmp(r4, Operand(r2));
1203 __ b(eq, &update_each); 1203 __ b(eq, &update_each);
1204 1204
1205 // For proxies, no filtering is done. 1205 // For proxies, no filtering is done.
1206 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet. 1206 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1207 __ cmp(r2, Operand(Smi::FromInt(0))); 1207 __ cmp(r2, Operand(Smi::FromInt(0)));
1208 __ b(eq, &update_each); 1208 __ b(eq, &update_each);
1209 1209
1210 // Convert the entry to a string or (smi) 0 if it isn't a property 1210 // Convert the entry to a string or (smi) 0 if it isn't a property
1211 // any more. If the property has been removed while iterating, we 1211 // any more. If the property has been removed while iterating, we
1212 // just skip it. 1212 // just skip it.
1213 __ push(r1); // Enumerable. 1213 __ push(r1); // Enumerable.
1214 __ push(r3); // Current entry. 1214 __ push(r3); // Current entry.
1215 __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION); 1215 __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
1216 __ mov(r3, Operand(r0), SetCC); 1216 __ mov(r3, Operand(r0), SetCC);
1217 __ b(eq, loop_statement.continue_label()); 1217 __ b(eq, loop_statement.continue_label());
1218 1218
1219 // Update the 'each' property or variable from the possibly filtered 1219 // Update the 'each' property or variable from the possibly filtered
1220 // entry in register r3. 1220 // entry in register r3.
1221 __ bind(&update_each); 1221 __ bind(&update_each);
1222 __ mov(result_register(), r3); 1222 __ mov(result_register(), r3);
1223 // Perform the assignment as if via '='. 1223 // Perform the assignment as if via '='.
1224 { EffectContext context(this); 1224 { EffectContext context(this);
1225 EmitAssignment(stmt->each()); 1225 EmitAssignment(stmt->each());
1226 } 1226 }
1227 1227
1228 // Generate code for the body of the loop. 1228 // Generate code for the body of the loop.
1229 Visit(stmt->body()); 1229 Visit(stmt->body());
1230 1230
1231 // Generate code for the going to the next element by incrementing 1231 // Generate code for the going to the next element by incrementing
1232 // the index (smi) stored on top of the stack. 1232 // the index (smi) stored on top of the stack.
1233 __ bind(loop_statement.continue_label()); 1233 __ bind(loop_statement.continue_label());
1234 __ pop(r0); 1234 __ pop(r0);
1235 __ add(r0, r0, Operand(Smi::FromInt(1))); 1235 __ add(r0, r0, Operand(Smi::FromInt(1)));
1236 __ push(r0); 1236 __ push(r0);
1237 1237
1238 EmitStackCheck(stmt, &loop); 1238 EmitStackCheck(stmt, &loop);
1239 __ b(&loop); 1239 __ b(&loop);
1240 1240
1241 // Remove the pointers stored on the stack. 1241 // Remove the pointers stored on the stack.
1242 __ bind(loop_statement.break_label()); 1242 __ bind(loop_statement.break_label());
1243 __ Drop(5); 1243 __ Drop(5);
1244 1244
1245 // Exit and decrement the loop depth. 1245 // Exit and decrement the loop depth.
1246 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS); 1246 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1247 __ bind(&exit); 1247 __ bind(&exit);
1248 decrement_loop_depth(); 1248 decrement_loop_depth();
1249 } 1249 }
1250 1250
1251 1251
1252 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info, 1252 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1253 bool pretenure) { 1253 bool pretenure) {
1254 // Use the fast case closure allocation code that allocates in new 1254 // Use the fast case closure allocation code that allocates in new
1255 // space for nested functions that don't need literals cloning. If 1255 // space for nested functions that don't need literals cloning. If
1256 // we're running with the --always-opt or the --prepare-always-opt 1256 // we're running with the --always-opt or the --prepare-always-opt
1257 // flag, we need to use the runtime function so that the new function 1257 // flag, we need to use the runtime function so that the new function
1258 // we are creating here gets a chance to have its code optimized and 1258 // we are creating here gets a chance to have its code optimized and
1259 // doesn't just get a copy of the existing unoptimized code. 1259 // doesn't just get a copy of the existing unoptimized code.
1260 if (!FLAG_always_opt && 1260 if (!FLAG_always_opt &&
1261 !FLAG_prepare_always_opt && 1261 !FLAG_prepare_always_opt &&
1262 !pretenure && 1262 !pretenure &&
1263 scope()->is_function_scope() && 1263 scope()->is_function_scope() &&
1264 info->num_literals() == 0) { 1264 info->num_literals() == 0) {
1265 FastNewClosureStub stub(info->language_mode()); 1265 FastNewClosureStub stub(info->language_mode());
1266 __ mov(r0, Operand(info)); 1266 __ mov(r0, Operand(info));
1267 __ push(r0); 1267 __ push(r0);
1268 __ CallStub(&stub); 1268 __ CallStub(&stub);
1269 } else { 1269 } else {
1270 __ mov(r0, Operand(info)); 1270 __ mov(r0, Operand(info));
1271 __ LoadRoot(r1, pretenure ? Heap::kTrueValueRootIndex 1271 __ LoadRoot(r1, pretenure ? Heap::kTrueValueRootIndex
1272 : Heap::kFalseValueRootIndex); 1272 : Heap::kFalseValueRootIndex);
1273 __ Push(cp, r0, r1); 1273 __ Push(cp, r0, r1);
1274 __ CallRuntime(Runtime::kNewClosure, 3); 1274 __ CallRuntime(Runtime::kNewClosure, 3);
1275 } 1275 }
1276 context()->Plug(r0); 1276 context()->Plug(r0);
1277 } 1277 }
1278 1278
1279 1279
1280 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) { 1280 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1281 Comment cmnt(masm_, "[ VariableProxy"); 1281 Comment cmnt(masm_, "[ VariableProxy");
1282 EmitVariableLoad(expr); 1282 EmitVariableLoad(expr);
1283 } 1283 }
1284 1284
1285 1285
1286 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var, 1286 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var,
1287 TypeofState typeof_state, 1287 TypeofState typeof_state,
1288 Label* slow) { 1288 Label* slow) {
1289 Register current = cp; 1289 Register current = cp;
1290 Register next = r1; 1290 Register next = r1;
1291 Register temp = r2; 1291 Register temp = r2;
1292 1292
1293 Scope* s = scope(); 1293 Scope* s = scope();
1294 while (s != NULL) { 1294 while (s != NULL) {
1295 if (s->num_heap_slots() > 0) { 1295 if (s->num_heap_slots() > 0) {
1296 if (s->calls_non_strict_eval()) { 1296 if (s->calls_non_strict_eval()) {
1297 // Check that extension is NULL. 1297 // Check that extension is NULL.
1298 __ ldr(temp, ContextOperand(current, Context::EXTENSION_INDEX)); 1298 __ ldr(temp, ContextOperand(current, Context::EXTENSION_INDEX));
1299 __ tst(temp, temp); 1299 __ tst(temp, temp);
1300 __ b(ne, slow); 1300 __ b(ne, slow);
1301 } 1301 }
1302 // Load next context in chain. 1302 // Load next context in chain.
1303 __ ldr(next, ContextOperand(current, Context::PREVIOUS_INDEX)); 1303 __ ldr(next, ContextOperand(current, Context::PREVIOUS_INDEX));
1304 // Walk the rest of the chain without clobbering cp. 1304 // Walk the rest of the chain without clobbering cp.
1305 current = next; 1305 current = next;
1306 } 1306 }
1307 // If no outer scope calls eval, we do not need to check more 1307 // If no outer scope calls eval, we do not need to check more
1308 // context extensions. 1308 // context extensions.
1309 if (!s->outer_scope_calls_non_strict_eval() || s->is_eval_scope()) break; 1309 if (!s->outer_scope_calls_non_strict_eval() || s->is_eval_scope()) break;
1310 s = s->outer_scope(); 1310 s = s->outer_scope();
1311 } 1311 }
1312 1312
1313 if (s->is_eval_scope()) { 1313 if (s->is_eval_scope()) {
1314 Label loop, fast; 1314 Label loop, fast;
1315 if (!current.is(next)) { 1315 if (!current.is(next)) {
1316 __ Move(next, current); 1316 __ Move(next, current);
1317 } 1317 }
1318 __ bind(&loop); 1318 __ bind(&loop);
1319 // Terminate at native context. 1319 // Terminate at native context.
1320 __ ldr(temp, FieldMemOperand(next, HeapObject::kMapOffset)); 1320 __ ldr(temp, FieldMemOperand(next, HeapObject::kMapOffset));
1321 __ LoadRoot(ip, Heap::kNativeContextMapRootIndex); 1321 __ LoadRoot(ip, Heap::kNativeContextMapRootIndex);
1322 __ cmp(temp, ip); 1322 __ cmp(temp, ip);
1323 __ b(eq, &fast); 1323 __ b(eq, &fast);
1324 // Check that extension is NULL. 1324 // Check that extension is NULL.
1325 __ ldr(temp, ContextOperand(next, Context::EXTENSION_INDEX)); 1325 __ ldr(temp, ContextOperand(next, Context::EXTENSION_INDEX));
1326 __ tst(temp, temp); 1326 __ tst(temp, temp);
1327 __ b(ne, slow); 1327 __ b(ne, slow);
1328 // Load next context in chain. 1328 // Load next context in chain.
1329 __ ldr(next, ContextOperand(next, Context::PREVIOUS_INDEX)); 1329 __ ldr(next, ContextOperand(next, Context::PREVIOUS_INDEX));
1330 __ b(&loop); 1330 __ b(&loop);
1331 __ bind(&fast); 1331 __ bind(&fast);
1332 } 1332 }
1333 1333
1334 __ ldr(r0, GlobalObjectOperand()); 1334 __ ldr(r0, GlobalObjectOperand());
1335 __ mov(r2, Operand(var->name())); 1335 __ mov(r2, Operand(var->name()));
1336 RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF) 1336 RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF)
1337 ? RelocInfo::CODE_TARGET 1337 ? RelocInfo::CODE_TARGET
1338 : RelocInfo::CODE_TARGET_CONTEXT; 1338 : RelocInfo::CODE_TARGET_CONTEXT;
1339 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); 1339 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1340 CallIC(ic, mode); 1340 CallIC(ic, mode);
1341 } 1341 }
1342 1342
1343 1343
1344 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var, 1344 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1345 Label* slow) { 1345 Label* slow) {
1346 ASSERT(var->IsContextSlot()); 1346 ASSERT(var->IsContextSlot());
1347 Register context = cp; 1347 Register context = cp;
1348 Register next = r3; 1348 Register next = r3;
1349 Register temp = r4; 1349 Register temp = r4;
1350 1350
1351 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) { 1351 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1352 if (s->num_heap_slots() > 0) { 1352 if (s->num_heap_slots() > 0) {
1353 if (s->calls_non_strict_eval()) { 1353 if (s->calls_non_strict_eval()) {
1354 // Check that extension is NULL. 1354 // Check that extension is NULL.
1355 __ ldr(temp, ContextOperand(context, Context::EXTENSION_INDEX)); 1355 __ ldr(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1356 __ tst(temp, temp); 1356 __ tst(temp, temp);
1357 __ b(ne, slow); 1357 __ b(ne, slow);
1358 } 1358 }
1359 __ ldr(next, ContextOperand(context, Context::PREVIOUS_INDEX)); 1359 __ ldr(next, ContextOperand(context, Context::PREVIOUS_INDEX));
1360 // Walk the rest of the chain without clobbering cp. 1360 // Walk the rest of the chain without clobbering cp.
1361 context = next; 1361 context = next;
1362 } 1362 }
1363 } 1363 }
1364 // Check that last extension is NULL. 1364 // Check that last extension is NULL.
1365 __ ldr(temp, ContextOperand(context, Context::EXTENSION_INDEX)); 1365 __ ldr(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1366 __ tst(temp, temp); 1366 __ tst(temp, temp);
1367 __ b(ne, slow); 1367 __ b(ne, slow);
1368 1368
1369 // This function is used only for loads, not stores, so it's safe to 1369 // This function is used only for loads, not stores, so it's safe to
1370 // return an cp-based operand (the write barrier cannot be allowed to 1370 // return an cp-based operand (the write barrier cannot be allowed to
1371 // destroy the cp register). 1371 // destroy the cp register).
1372 return ContextOperand(context, var->index()); 1372 return ContextOperand(context, var->index());
1373 } 1373 }
1374 1374
1375 1375
1376 void FullCodeGenerator::EmitDynamicLookupFastCase(Variable* var, 1376 void FullCodeGenerator::EmitDynamicLookupFastCase(Variable* var,
1377 TypeofState typeof_state, 1377 TypeofState typeof_state,
1378 Label* slow, 1378 Label* slow,
1379 Label* done) { 1379 Label* done) {
1380 // Generate fast-case code for variables that might be shadowed by 1380 // Generate fast-case code for variables that might be shadowed by
1381 // eval-introduced variables. Eval is used a lot without 1381 // eval-introduced variables. Eval is used a lot without
1382 // introducing variables. In those cases, we do not want to 1382 // introducing variables. In those cases, we do not want to
1383 // perform a runtime call for all variables in the scope 1383 // perform a runtime call for all variables in the scope
1384 // containing the eval. 1384 // containing the eval.
1385 if (var->mode() == DYNAMIC_GLOBAL) { 1385 if (var->mode() == DYNAMIC_GLOBAL) {
1386 EmitLoadGlobalCheckExtensions(var, typeof_state, slow); 1386 EmitLoadGlobalCheckExtensions(var, typeof_state, slow);
1387 __ jmp(done); 1387 __ jmp(done);
1388 } else if (var->mode() == DYNAMIC_LOCAL) { 1388 } else if (var->mode() == DYNAMIC_LOCAL) {
1389 Variable* local = var->local_if_not_shadowed(); 1389 Variable* local = var->local_if_not_shadowed();
1390 __ ldr(r0, ContextSlotOperandCheckExtensions(local, slow)); 1390 __ ldr(r0, ContextSlotOperandCheckExtensions(local, slow));
1391 if (local->mode() == CONST || 1391 if (local->mode() == CONST ||
1392 local->mode() == CONST_HARMONY || 1392 local->mode() == CONST_HARMONY ||
1393 local->mode() == LET) { 1393 local->mode() == LET) {
1394 __ CompareRoot(r0, Heap::kTheHoleValueRootIndex); 1394 __ CompareRoot(r0, Heap::kTheHoleValueRootIndex);
1395 if (local->mode() == CONST) { 1395 if (local->mode() == CONST) {
1396 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq); 1396 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
1397 } else { // LET || CONST_HARMONY 1397 } else { // LET || CONST_HARMONY
1398 __ b(ne, done); 1398 __ b(ne, done);
1399 __ mov(r0, Operand(var->name())); 1399 __ mov(r0, Operand(var->name()));
1400 __ push(r0); 1400 __ push(r0);
1401 __ CallRuntime(Runtime::kThrowReferenceError, 1); 1401 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1402 } 1402 }
1403 } 1403 }
1404 __ jmp(done); 1404 __ jmp(done);
1405 } 1405 }
1406 } 1406 }
1407 1407
1408 1408
1409 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) { 1409 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1410 // Record position before possible IC call. 1410 // Record position before possible IC call.
1411 SetSourcePosition(proxy->position()); 1411 SetSourcePosition(proxy->position());
1412 Variable* var = proxy->var(); 1412 Variable* var = proxy->var();
1413 1413
1414 // Three cases: global variables, lookup variables, and all other types of 1414 // Three cases: global variables, lookup variables, and all other types of
1415 // variables. 1415 // variables.
1416 switch (var->location()) { 1416 switch (var->location()) {
1417 case Variable::UNALLOCATED: { 1417 case Variable::UNALLOCATED: {
1418 Comment cmnt(masm_, "Global variable"); 1418 Comment cmnt(masm_, "Global variable");
1419 // Use inline caching. Variable name is passed in r2 and the global 1419 // Use inline caching. Variable name is passed in r2 and the global
1420 // object (receiver) in r0. 1420 // object (receiver) in r0.
1421 __ ldr(r0, GlobalObjectOperand()); 1421 __ ldr(r0, GlobalObjectOperand());
1422 __ mov(r2, Operand(var->name())); 1422 __ mov(r2, Operand(var->name()));
1423 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); 1423 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1424 CallIC(ic, RelocInfo::CODE_TARGET_CONTEXT); 1424 CallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
1425 context()->Plug(r0); 1425 context()->Plug(r0);
1426 break; 1426 break;
1427 } 1427 }
1428 1428
1429 case Variable::PARAMETER: 1429 case Variable::PARAMETER:
1430 case Variable::LOCAL: 1430 case Variable::LOCAL:
1431 case Variable::CONTEXT: { 1431 case Variable::CONTEXT: {
1432 Comment cmnt(masm_, var->IsContextSlot() 1432 Comment cmnt(masm_, var->IsContextSlot()
1433 ? "Context variable" 1433 ? "Context variable"
1434 : "Stack variable"); 1434 : "Stack variable");
1435 if (var->binding_needs_init()) { 1435 if (var->binding_needs_init()) {
1436 // var->scope() may be NULL when the proxy is located in eval code and 1436 // var->scope() may be NULL when the proxy is located in eval code and
1437 // refers to a potential outside binding. Currently those bindings are 1437 // refers to a potential outside binding. Currently those bindings are
1438 // always looked up dynamically, i.e. in that case 1438 // always looked up dynamically, i.e. in that case
1439 // var->location() == LOOKUP. 1439 // var->location() == LOOKUP.
1440 // always holds. 1440 // always holds.
1441 ASSERT(var->scope() != NULL); 1441 ASSERT(var->scope() != NULL);
1442 1442
1443 // Check if the binding really needs an initialization check. The check 1443 // Check if the binding really needs an initialization check. The check
1444 // can be skipped in the following situation: we have a LET or CONST 1444 // can be skipped in the following situation: we have a LET or CONST
1445 // binding in harmony mode, both the Variable and the VariableProxy have 1445 // binding in harmony mode, both the Variable and the VariableProxy have
1446 // the same declaration scope (i.e. they are both in global code, in the 1446 // the same declaration scope (i.e. they are both in global code, in the
1447 // same function or in the same eval code) and the VariableProxy is in 1447 // same function or in the same eval code) and the VariableProxy is in
1448 // the source physically located after the initializer of the variable. 1448 // the source physically located after the initializer of the variable.
1449 // 1449 //
1450 // We cannot skip any initialization checks for CONST in non-harmony 1450 // We cannot skip any initialization checks for CONST in non-harmony
1451 // mode because const variables may be declared but never initialized: 1451 // mode because const variables may be declared but never initialized:
1452 // if (false) { const x; }; var y = x; 1452 // if (false) { const x; }; var y = x;
1453 // 1453 //
1454 // The condition on the declaration scopes is a conservative check for 1454 // The condition on the declaration scopes is a conservative check for
1455 // nested functions that access a binding and are called before the 1455 // nested functions that access a binding and are called before the
1456 // binding is initialized: 1456 // binding is initialized:
1457 // function() { f(); let x = 1; function f() { x = 2; } } 1457 // function() { f(); let x = 1; function f() { x = 2; } }
1458 // 1458 //
1459 bool skip_init_check; 1459 bool skip_init_check;
1460 if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) { 1460 if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1461 skip_init_check = false; 1461 skip_init_check = false;
1462 } else { 1462 } else {
1463 // Check that we always have valid source position. 1463 // Check that we always have valid source position.
1464 ASSERT(var->initializer_position() != RelocInfo::kNoPosition); 1464 ASSERT(var->initializer_position() != RelocInfo::kNoPosition);
1465 ASSERT(proxy->position() != RelocInfo::kNoPosition); 1465 ASSERT(proxy->position() != RelocInfo::kNoPosition);
1466 skip_init_check = var->mode() != CONST && 1466 skip_init_check = var->mode() != CONST &&
1467 var->initializer_position() < proxy->position(); 1467 var->initializer_position() < proxy->position();
1468 } 1468 }
1469 1469
1470 if (!skip_init_check) { 1470 if (!skip_init_check) {
1471 // Let and const need a read barrier. 1471 // Let and const need a read barrier.
1472 GetVar(r0, var); 1472 GetVar(r0, var);
1473 __ CompareRoot(r0, Heap::kTheHoleValueRootIndex); 1473 __ CompareRoot(r0, Heap::kTheHoleValueRootIndex);
1474 if (var->mode() == LET || var->mode() == CONST_HARMONY) { 1474 if (var->mode() == LET || var->mode() == CONST_HARMONY) {
1475 // Throw a reference error when using an uninitialized let/const 1475 // Throw a reference error when using an uninitialized let/const
1476 // binding in harmony mode. 1476 // binding in harmony mode.
1477 Label done; 1477 Label done;
1478 __ b(ne, &done); 1478 __ b(ne, &done);
1479 __ mov(r0, Operand(var->name())); 1479 __ mov(r0, Operand(var->name()));
1480 __ push(r0); 1480 __ push(r0);
1481 __ CallRuntime(Runtime::kThrowReferenceError, 1); 1481 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1482 __ bind(&done); 1482 __ bind(&done);
1483 } else { 1483 } else {
1484 // Uninitalized const bindings outside of harmony mode are unholed. 1484 // Uninitalized const bindings outside of harmony mode are unholed.
1485 ASSERT(var->mode() == CONST); 1485 ASSERT(var->mode() == CONST);
1486 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq); 1486 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
1487 } 1487 }
1488 context()->Plug(r0); 1488 context()->Plug(r0);
1489 break; 1489 break;
1490 } 1490 }
1491 } 1491 }
1492 context()->Plug(var); 1492 context()->Plug(var);
1493 break; 1493 break;
1494 } 1494 }
1495 1495
1496 case Variable::LOOKUP: { 1496 case Variable::LOOKUP: {
1497 Label done, slow; 1497 Label done, slow;
1498 // Generate code for loading from variables potentially shadowed 1498 // Generate code for loading from variables potentially shadowed
1499 // by eval-introduced variables. 1499 // by eval-introduced variables.
1500 EmitDynamicLookupFastCase(var, NOT_INSIDE_TYPEOF, &slow, &done); 1500 EmitDynamicLookupFastCase(var, NOT_INSIDE_TYPEOF, &slow, &done);
1501 __ bind(&slow); 1501 __ bind(&slow);
1502 Comment cmnt(masm_, "Lookup variable"); 1502 Comment cmnt(masm_, "Lookup variable");
1503 __ mov(r1, Operand(var->name())); 1503 __ mov(r1, Operand(var->name()));
1504 __ Push(cp, r1); // Context and name. 1504 __ Push(cp, r1); // Context and name.
1505 __ CallRuntime(Runtime::kLoadContextSlot, 2); 1505 __ CallRuntime(Runtime::kLoadContextSlot, 2);
1506 __ bind(&done); 1506 __ bind(&done);
1507 context()->Plug(r0); 1507 context()->Plug(r0);
1508 } 1508 }
1509 } 1509 }
1510 } 1510 }
1511 1511
1512 1512
1513 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) { 1513 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1514 Comment cmnt(masm_, "[ RegExpLiteral"); 1514 Comment cmnt(masm_, "[ RegExpLiteral");
1515 Label materialized; 1515 Label materialized;
1516 // Registers will be used as follows: 1516 // Registers will be used as follows:
1517 // r5 = materialized value (RegExp literal) 1517 // r5 = materialized value (RegExp literal)
1518 // r4 = JS function, literals array 1518 // r4 = JS function, literals array
1519 // r3 = literal index 1519 // r3 = literal index
1520 // r2 = RegExp pattern 1520 // r2 = RegExp pattern
1521 // r1 = RegExp flags 1521 // r1 = RegExp flags
1522 // r0 = RegExp literal clone 1522 // r0 = RegExp literal clone
1523 __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 1523 __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1524 __ ldr(r4, FieldMemOperand(r0, JSFunction::kLiteralsOffset)); 1524 __ ldr(r4, FieldMemOperand(r0, JSFunction::kLiteralsOffset));
1525 int literal_offset = 1525 int literal_offset =
1526 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize; 1526 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1527 __ ldr(r5, FieldMemOperand(r4, literal_offset)); 1527 __ ldr(r5, FieldMemOperand(r4, literal_offset));
1528 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); 1528 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1529 __ cmp(r5, ip); 1529 __ cmp(r5, ip);
1530 __ b(ne, &materialized); 1530 __ b(ne, &materialized);
1531 1531
1532 // Create regexp literal using runtime function. 1532 // Create regexp literal using runtime function.
1533 // Result will be in r0. 1533 // Result will be in r0.
1534 __ mov(r3, Operand(Smi::FromInt(expr->literal_index()))); 1534 __ mov(r3, Operand(Smi::FromInt(expr->literal_index())));
1535 __ mov(r2, Operand(expr->pattern())); 1535 __ mov(r2, Operand(expr->pattern()));
1536 __ mov(r1, Operand(expr->flags())); 1536 __ mov(r1, Operand(expr->flags()));
1537 __ Push(r4, r3, r2, r1); 1537 __ Push(r4, r3, r2, r1);
1538 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4); 1538 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1539 __ mov(r5, r0); 1539 __ mov(r5, r0);
1540 1540
1541 __ bind(&materialized); 1541 __ bind(&materialized);
1542 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize; 1542 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
1543 Label allocated, runtime_allocate; 1543 Label allocated, runtime_allocate;
1544 __ AllocateInNewSpace(size, r0, r2, r3, &runtime_allocate, TAG_OBJECT); 1544 __ AllocateInNewSpace(size, r0, r2, r3, &runtime_allocate, TAG_OBJECT);
1545 __ jmp(&allocated); 1545 __ jmp(&allocated);
1546 1546
1547 __ bind(&runtime_allocate); 1547 __ bind(&runtime_allocate);
1548 __ push(r5); 1548 __ push(r5);
1549 __ mov(r0, Operand(Smi::FromInt(size))); 1549 __ mov(r0, Operand(Smi::FromInt(size)));
1550 __ push(r0); 1550 __ push(r0);
1551 __ CallRuntime(Runtime::kAllocateInNewSpace, 1); 1551 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1552 __ pop(r5); 1552 __ pop(r5);
1553 1553
1554 __ bind(&allocated); 1554 __ bind(&allocated);
1555 // After this, registers are used as follows: 1555 // After this, registers are used as follows:
1556 // r0: Newly allocated regexp. 1556 // r0: Newly allocated regexp.
1557 // r5: Materialized regexp. 1557 // r5: Materialized regexp.
1558 // r2: temp. 1558 // r2: temp.
1559 __ CopyFields(r0, r5, r2.bit(), size / kPointerSize); 1559 __ CopyFields(r0, r5, r2.bit(), size / kPointerSize);
1560 context()->Plug(r0); 1560 context()->Plug(r0);
1561 } 1561 }
1562 1562
1563 1563
1564 void FullCodeGenerator::EmitAccessor(Expression* expression) { 1564 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1565 if (expression == NULL) { 1565 if (expression == NULL) {
1566 __ LoadRoot(r1, Heap::kNullValueRootIndex); 1566 __ LoadRoot(r1, Heap::kNullValueRootIndex);
1567 __ push(r1); 1567 __ push(r1);
1568 } else { 1568 } else {
1569 VisitForStackValue(expression); 1569 VisitForStackValue(expression);
1570 } 1570 }
1571 } 1571 }
1572 1572
1573 1573
1574 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) { 1574 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1575 Comment cmnt(masm_, "[ ObjectLiteral"); 1575 Comment cmnt(masm_, "[ ObjectLiteral");
1576 Handle<FixedArray> constant_properties = expr->constant_properties(); 1576 Handle<FixedArray> constant_properties = expr->constant_properties();
1577 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 1577 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1578 __ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset)); 1578 __ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
1579 __ mov(r2, Operand(Smi::FromInt(expr->literal_index()))); 1579 __ mov(r2, Operand(Smi::FromInt(expr->literal_index())));
1580 __ mov(r1, Operand(constant_properties)); 1580 __ mov(r1, Operand(constant_properties));
1581 int flags = expr->fast_elements() 1581 int flags = expr->fast_elements()
1582 ? ObjectLiteral::kFastElements 1582 ? ObjectLiteral::kFastElements
1583 : ObjectLiteral::kNoFlags; 1583 : ObjectLiteral::kNoFlags;
1584 flags |= expr->has_function() 1584 flags |= expr->has_function()
1585 ? ObjectLiteral::kHasFunction 1585 ? ObjectLiteral::kHasFunction
1586 : ObjectLiteral::kNoFlags; 1586 : ObjectLiteral::kNoFlags;
1587 __ mov(r0, Operand(Smi::FromInt(flags))); 1587 __ mov(r0, Operand(Smi::FromInt(flags)));
1588 __ Push(r3, r2, r1, r0); 1588 __ Push(r3, r2, r1, r0);
1589 int properties_count = constant_properties->length() / 2; 1589 int properties_count = constant_properties->length() / 2;
1590 if (expr->depth() > 1) { 1590 if (expr->depth() > 1) {
1591 __ CallRuntime(Runtime::kCreateObjectLiteral, 4); 1591 __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1592 } else if (flags != ObjectLiteral::kFastElements || 1592 } else if (flags != ObjectLiteral::kFastElements ||
1593 properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) { 1593 properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) {
1594 __ CallRuntime(Runtime::kCreateObjectLiteralShallow, 4); 1594 __ CallRuntime(Runtime::kCreateObjectLiteralShallow, 4);
1595 } else { 1595 } else {
1596 FastCloneShallowObjectStub stub(properties_count); 1596 FastCloneShallowObjectStub stub(properties_count);
1597 __ CallStub(&stub); 1597 __ CallStub(&stub);
1598 } 1598 }
1599 1599
1600 // If result_saved is true the result is on top of the stack. If 1600 // If result_saved is true the result is on top of the stack. If
1601 // result_saved is false the result is in r0. 1601 // result_saved is false the result is in r0.
1602 bool result_saved = false; 1602 bool result_saved = false;
1603 1603
1604 // Mark all computed expressions that are bound to a key that 1604 // Mark all computed expressions that are bound to a key that
1605 // is shadowed by a later occurrence of the same key. For the 1605 // is shadowed by a later occurrence of the same key. For the
1606 // marked expressions, no store code is emitted. 1606 // marked expressions, no store code is emitted.
1607 expr->CalculateEmitStore(zone()); 1607 expr->CalculateEmitStore(zone());
1608 1608
1609 AccessorTable accessor_table(zone()); 1609 AccessorTable accessor_table(zone());
1610 for (int i = 0; i < expr->properties()->length(); i++) { 1610 for (int i = 0; i < expr->properties()->length(); i++) {
1611 ObjectLiteral::Property* property = expr->properties()->at(i); 1611 ObjectLiteral::Property* property = expr->properties()->at(i);
1612 if (property->IsCompileTimeValue()) continue; 1612 if (property->IsCompileTimeValue()) continue;
1613 1613
1614 Literal* key = property->key(); 1614 Literal* key = property->key();
1615 Expression* value = property->value(); 1615 Expression* value = property->value();
1616 if (!result_saved) { 1616 if (!result_saved) {
1617 __ push(r0); // Save result on stack 1617 __ push(r0); // Save result on stack
1618 result_saved = true; 1618 result_saved = true;
1619 } 1619 }
1620 switch (property->kind()) { 1620 switch (property->kind()) {
1621 case ObjectLiteral::Property::CONSTANT: 1621 case ObjectLiteral::Property::CONSTANT:
1622 UNREACHABLE(); 1622 UNREACHABLE();
1623 case ObjectLiteral::Property::MATERIALIZED_LITERAL: 1623 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1624 ASSERT(!CompileTimeValue::IsCompileTimeValue(property->value())); 1624 ASSERT(!CompileTimeValue::IsCompileTimeValue(property->value()));
1625 // Fall through. 1625 // Fall through.
1626 case ObjectLiteral::Property::COMPUTED: 1626 case ObjectLiteral::Property::COMPUTED:
1627 if (key->handle()->IsSymbol()) { 1627 if (key->handle()->IsSymbol()) {
1628 if (property->emit_store()) { 1628 if (property->emit_store()) {
1629 VisitForAccumulatorValue(value); 1629 VisitForAccumulatorValue(value);
1630 __ mov(r2, Operand(key->handle())); 1630 __ mov(r2, Operand(key->handle()));
1631 __ ldr(r1, MemOperand(sp)); 1631 __ ldr(r1, MemOperand(sp));
1632 Handle<Code> ic = is_classic_mode() 1632 Handle<Code> ic = is_classic_mode()
1633 ? isolate()->builtins()->StoreIC_Initialize() 1633 ? isolate()->builtins()->StoreIC_Initialize()
1634 : isolate()->builtins()->StoreIC_Initialize_Strict(); 1634 : isolate()->builtins()->StoreIC_Initialize_Strict();
1635 CallIC(ic, RelocInfo::CODE_TARGET, key->LiteralFeedbackId()); 1635 CallIC(ic, RelocInfo::CODE_TARGET, key->LiteralFeedbackId());
1636 PrepareForBailoutForId(key->id(), NO_REGISTERS); 1636 PrepareForBailoutForId(key->id(), NO_REGISTERS);
1637 } else { 1637 } else {
1638 VisitForEffect(value); 1638 VisitForEffect(value);
1639 } 1639 }
1640 break; 1640 break;
1641 } 1641 }
1642 // Fall through. 1642 // Fall through.
1643 case ObjectLiteral::Property::PROTOTYPE: 1643 case ObjectLiteral::Property::PROTOTYPE:
1644 // Duplicate receiver on stack. 1644 // Duplicate receiver on stack.
1645 __ ldr(r0, MemOperand(sp)); 1645 __ ldr(r0, MemOperand(sp));
1646 __ push(r0); 1646 __ push(r0);
1647 VisitForStackValue(key); 1647 VisitForStackValue(key);
1648 VisitForStackValue(value); 1648 VisitForStackValue(value);
1649 if (property->emit_store()) { 1649 if (property->emit_store()) {
1650 __ mov(r0, Operand(Smi::FromInt(NONE))); // PropertyAttributes 1650 __ mov(r0, Operand(Smi::FromInt(NONE))); // PropertyAttributes
1651 __ push(r0); 1651 __ push(r0);
1652 __ CallRuntime(Runtime::kSetProperty, 4); 1652 __ CallRuntime(Runtime::kSetProperty, 4);
1653 } else { 1653 } else {
1654 __ Drop(3); 1654 __ Drop(3);
1655 } 1655 }
1656 break; 1656 break;
1657 case ObjectLiteral::Property::GETTER: 1657 case ObjectLiteral::Property::GETTER:
1658 accessor_table.lookup(key)->second->getter = value; 1658 accessor_table.lookup(key)->second->getter = value;
1659 break; 1659 break;
1660 case ObjectLiteral::Property::SETTER: 1660 case ObjectLiteral::Property::SETTER:
1661 accessor_table.lookup(key)->second->setter = value; 1661 accessor_table.lookup(key)->second->setter = value;
1662 break; 1662 break;
1663 } 1663 }
1664 } 1664 }
1665 1665
1666 // Emit code to define accessors, using only a single call to the runtime for 1666 // Emit code to define accessors, using only a single call to the runtime for
1667 // each pair of corresponding getters and setters. 1667 // each pair of corresponding getters and setters.
1668 for (AccessorTable::Iterator it = accessor_table.begin(); 1668 for (AccessorTable::Iterator it = accessor_table.begin();
1669 it != accessor_table.end(); 1669 it != accessor_table.end();
1670 ++it) { 1670 ++it) {
1671 __ ldr(r0, MemOperand(sp)); // Duplicate receiver. 1671 __ ldr(r0, MemOperand(sp)); // Duplicate receiver.
1672 __ push(r0); 1672 __ push(r0);
1673 VisitForStackValue(it->first); 1673 VisitForStackValue(it->first);
1674 EmitAccessor(it->second->getter); 1674 EmitAccessor(it->second->getter);
1675 EmitAccessor(it->second->setter); 1675 EmitAccessor(it->second->setter);
1676 __ mov(r0, Operand(Smi::FromInt(NONE))); 1676 __ mov(r0, Operand(Smi::FromInt(NONE)));
1677 __ push(r0); 1677 __ push(r0);
1678 __ CallRuntime(Runtime::kDefineOrRedefineAccessorProperty, 5); 1678 __ CallRuntime(Runtime::kDefineOrRedefineAccessorProperty, 5);
1679 } 1679 }
1680 1680
1681 if (expr->has_function()) { 1681 if (expr->has_function()) {
1682 ASSERT(result_saved); 1682 ASSERT(result_saved);
1683 __ ldr(r0, MemOperand(sp)); 1683 __ ldr(r0, MemOperand(sp));
1684 __ push(r0); 1684 __ push(r0);
1685 __ CallRuntime(Runtime::kToFastProperties, 1); 1685 __ CallRuntime(Runtime::kToFastProperties, 1);
1686 } 1686 }
1687 1687
1688 if (result_saved) { 1688 if (result_saved) {
1689 context()->PlugTOS(); 1689 context()->PlugTOS();
1690 } else { 1690 } else {
1691 context()->Plug(r0); 1691 context()->Plug(r0);
1692 } 1692 }
1693 } 1693 }
1694 1694
1695 1695
1696 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { 1696 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1697 Comment cmnt(masm_, "[ ArrayLiteral"); 1697 Comment cmnt(masm_, "[ ArrayLiteral");
1698 1698
1699 ZoneList<Expression*>* subexprs = expr->values(); 1699 ZoneList<Expression*>* subexprs = expr->values();
1700 int length = subexprs->length(); 1700 int length = subexprs->length();
1701 Handle<FixedArray> constant_elements = expr->constant_elements(); 1701 Handle<FixedArray> constant_elements = expr->constant_elements();
1702 ASSERT_EQ(2, constant_elements->length()); 1702 ASSERT_EQ(2, constant_elements->length());
1703 ElementsKind constant_elements_kind = 1703 ElementsKind constant_elements_kind =
1704 static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value()); 1704 static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
1705 bool has_fast_elements = IsFastObjectElementsKind(constant_elements_kind); 1705 bool has_fast_elements = IsFastObjectElementsKind(constant_elements_kind);
1706 Handle<FixedArrayBase> constant_elements_values( 1706 Handle<FixedArrayBase> constant_elements_values(
1707 FixedArrayBase::cast(constant_elements->get(1))); 1707 FixedArrayBase::cast(constant_elements->get(1)));
1708 1708
1709 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 1709 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1710 __ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset)); 1710 __ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
1711 __ mov(r2, Operand(Smi::FromInt(expr->literal_index()))); 1711 __ mov(r2, Operand(Smi::FromInt(expr->literal_index())));
1712 __ mov(r1, Operand(constant_elements)); 1712 __ mov(r1, Operand(constant_elements));
1713 __ Push(r3, r2, r1); 1713 __ Push(r3, r2, r1);
1714 if (has_fast_elements && constant_elements_values->map() == 1714 if (has_fast_elements && constant_elements_values->map() ==
1715 isolate()->heap()->fixed_cow_array_map()) { 1715 isolate()->heap()->fixed_cow_array_map()) {
1716 FastCloneShallowArrayStub stub( 1716 FastCloneShallowArrayStub stub(
1717 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, length); 1717 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, length);
1718 __ CallStub(&stub); 1718 __ CallStub(&stub);
1719 __ IncrementCounter( 1719 __ IncrementCounter(
1720 isolate()->counters()->cow_arrays_created_stub(), 1, r1, r2); 1720 isolate()->counters()->cow_arrays_created_stub(), 1, r1, r2);
1721 } else if (expr->depth() > 1) { 1721 } else if (expr->depth() > 1) {
1722 __ CallRuntime(Runtime::kCreateArrayLiteral, 3); 1722 __ CallRuntime(Runtime::kCreateArrayLiteral, 3);
1723 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) { 1723 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
1724 __ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3); 1724 __ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
1725 } else { 1725 } else {
1726 ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) || 1726 ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) ||
1727 FLAG_smi_only_arrays); 1727 FLAG_smi_only_arrays);
1728 FastCloneShallowArrayStub::Mode mode = has_fast_elements 1728 FastCloneShallowArrayStub::Mode mode = has_fast_elements
1729 ? FastCloneShallowArrayStub::CLONE_ELEMENTS 1729 ? FastCloneShallowArrayStub::CLONE_ELEMENTS
1730 : FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS; 1730 : FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS;
1731 FastCloneShallowArrayStub stub(mode, length); 1731 FastCloneShallowArrayStub stub(mode, length);
1732 __ CallStub(&stub); 1732 __ CallStub(&stub);
1733 } 1733 }
1734 1734
1735 bool result_saved = false; // Is the result saved to the stack? 1735 bool result_saved = false; // Is the result saved to the stack?
1736 1736
1737 // Emit code to evaluate all the non-constant subexpressions and to store 1737 // Emit code to evaluate all the non-constant subexpressions and to store
1738 // them into the newly cloned array. 1738 // them into the newly cloned array.
1739 for (int i = 0; i < length; i++) { 1739 for (int i = 0; i < length; i++) {
1740 Expression* subexpr = subexprs->at(i); 1740 Expression* subexpr = subexprs->at(i);
1741 // If the subexpression is a literal or a simple materialized literal it 1741 // If the subexpression is a literal or a simple materialized literal it
1742 // is already set in the cloned array. 1742 // is already set in the cloned array.
1743 if (subexpr->AsLiteral() != NULL || 1743 if (subexpr->AsLiteral() != NULL ||
1744 CompileTimeValue::IsCompileTimeValue(subexpr)) { 1744 CompileTimeValue::IsCompileTimeValue(subexpr)) {
1745 continue; 1745 continue;
1746 } 1746 }
1747 1747
1748 if (!result_saved) { 1748 if (!result_saved) {
1749 __ push(r0); 1749 __ push(r0);
1750 result_saved = true; 1750 result_saved = true;
1751 } 1751 }
1752 VisitForAccumulatorValue(subexpr); 1752 VisitForAccumulatorValue(subexpr);
1753 1753
1754 if (IsFastObjectElementsKind(constant_elements_kind)) { 1754 if (IsFastObjectElementsKind(constant_elements_kind)) {
1755 int offset = FixedArray::kHeaderSize + (i * kPointerSize); 1755 int offset = FixedArray::kHeaderSize + (i * kPointerSize);
1756 __ ldr(r6, MemOperand(sp)); // Copy of array literal. 1756 __ ldr(r6, MemOperand(sp)); // Copy of array literal.
1757 __ ldr(r1, FieldMemOperand(r6, JSObject::kElementsOffset)); 1757 __ ldr(r1, FieldMemOperand(r6, JSObject::kElementsOffset));
1758 __ str(result_register(), FieldMemOperand(r1, offset)); 1758 __ str(result_register(), FieldMemOperand(r1, offset));
1759 // Update the write barrier for the array store. 1759 // Update the write barrier for the array store.
1760 __ RecordWriteField(r1, offset, result_register(), r2, 1760 __ RecordWriteField(r1, offset, result_register(), r2,
1761 kLRHasBeenSaved, kDontSaveFPRegs, 1761 kLRHasBeenSaved, kDontSaveFPRegs,
1762 EMIT_REMEMBERED_SET, INLINE_SMI_CHECK); 1762 EMIT_REMEMBERED_SET, INLINE_SMI_CHECK);
1763 } else { 1763 } else {
1764 __ ldr(r1, MemOperand(sp)); // Copy of array literal. 1764 __ ldr(r1, MemOperand(sp)); // Copy of array literal.
1765 __ ldr(r2, FieldMemOperand(r1, JSObject::kMapOffset)); 1765 __ ldr(r2, FieldMemOperand(r1, JSObject::kMapOffset));
1766 __ mov(r3, Operand(Smi::FromInt(i))); 1766 __ mov(r3, Operand(Smi::FromInt(i)));
1767 __ mov(r4, Operand(Smi::FromInt(expr->literal_index()))); 1767 __ mov(r4, Operand(Smi::FromInt(expr->literal_index())));
1768 StoreArrayLiteralElementStub stub; 1768 StoreArrayLiteralElementStub stub;
1769 __ CallStub(&stub); 1769 __ CallStub(&stub);
1770 } 1770 }
1771 1771
1772 PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS); 1772 PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
1773 } 1773 }
1774 1774
1775 if (result_saved) { 1775 if (result_saved) {
1776 context()->PlugTOS(); 1776 context()->PlugTOS();
1777 } else { 1777 } else {
1778 context()->Plug(r0); 1778 context()->Plug(r0);
1779 } 1779 }
1780 } 1780 }
1781 1781
1782 1782
1783 void FullCodeGenerator::VisitAssignment(Assignment* expr) { 1783 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1784 Comment cmnt(masm_, "[ Assignment"); 1784 Comment cmnt(masm_, "[ Assignment");
1785 // Invalid left-hand sides are rewritten to have a 'throw ReferenceError' 1785 // Invalid left-hand sides are rewritten to have a 'throw ReferenceError'
1786 // on the left-hand side. 1786 // on the left-hand side.
1787 if (!expr->target()->IsValidLeftHandSide()) { 1787 if (!expr->target()->IsValidLeftHandSide()) {
1788 VisitForEffect(expr->target()); 1788 VisitForEffect(expr->target());
1789 return; 1789 return;
1790 } 1790 }
1791 1791
1792 // Left-hand side can only be a property, a global or a (parameter or local) 1792 // Left-hand side can only be a property, a global or a (parameter or local)
1793 // slot. 1793 // slot.
1794 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY }; 1794 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1795 LhsKind assign_type = VARIABLE; 1795 LhsKind assign_type = VARIABLE;
1796 Property* property = expr->target()->AsProperty(); 1796 Property* property = expr->target()->AsProperty();
1797 if (property != NULL) { 1797 if (property != NULL) {
1798 assign_type = (property->key()->IsPropertyName()) 1798 assign_type = (property->key()->IsPropertyName())
1799 ? NAMED_PROPERTY 1799 ? NAMED_PROPERTY
1800 : KEYED_PROPERTY; 1800 : KEYED_PROPERTY;
1801 } 1801 }
1802 1802
1803 // Evaluate LHS expression. 1803 // Evaluate LHS expression.
1804 switch (assign_type) { 1804 switch (assign_type) {
1805 case VARIABLE: 1805 case VARIABLE:
1806 // Nothing to do here. 1806 // Nothing to do here.
1807 break; 1807 break;
1808 case NAMED_PROPERTY: 1808 case NAMED_PROPERTY:
1809 if (expr->is_compound()) { 1809 if (expr->is_compound()) {
1810 // We need the receiver both on the stack and in the accumulator. 1810 // We need the receiver both on the stack and in the accumulator.
1811 VisitForAccumulatorValue(property->obj()); 1811 VisitForAccumulatorValue(property->obj());
1812 __ push(result_register()); 1812 __ push(result_register());
1813 } else { 1813 } else {
1814 VisitForStackValue(property->obj()); 1814 VisitForStackValue(property->obj());
1815 } 1815 }
1816 break; 1816 break;
1817 case KEYED_PROPERTY: 1817 case KEYED_PROPERTY:
1818 if (expr->is_compound()) { 1818 if (expr->is_compound()) {
1819 VisitForStackValue(property->obj()); 1819 VisitForStackValue(property->obj());
1820 VisitForAccumulatorValue(property->key()); 1820 VisitForAccumulatorValue(property->key());
1821 __ ldr(r1, MemOperand(sp, 0)); 1821 __ ldr(r1, MemOperand(sp, 0));
1822 __ push(r0); 1822 __ push(r0);
1823 } else { 1823 } else {
1824 VisitForStackValue(property->obj()); 1824 VisitForStackValue(property->obj());
1825 VisitForStackValue(property->key()); 1825 VisitForStackValue(property->key());
1826 } 1826 }
1827 break; 1827 break;
1828 } 1828 }
1829 1829
1830 // For compound assignments we need another deoptimization point after the 1830 // For compound assignments we need another deoptimization point after the
1831 // variable/property load. 1831 // variable/property load.
1832 if (expr->is_compound()) { 1832 if (expr->is_compound()) {
1833 { AccumulatorValueContext context(this); 1833 { AccumulatorValueContext context(this);
1834 switch (assign_type) { 1834 switch (assign_type) {
1835 case VARIABLE: 1835 case VARIABLE:
1836 EmitVariableLoad(expr->target()->AsVariableProxy()); 1836 EmitVariableLoad(expr->target()->AsVariableProxy());
1837 PrepareForBailout(expr->target(), TOS_REG); 1837 PrepareForBailout(expr->target(), TOS_REG);
1838 break; 1838 break;
1839 case NAMED_PROPERTY: 1839 case NAMED_PROPERTY:
1840 EmitNamedPropertyLoad(property); 1840 EmitNamedPropertyLoad(property);
1841 PrepareForBailoutForId(property->LoadId(), TOS_REG); 1841 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1842 break; 1842 break;
1843 case KEYED_PROPERTY: 1843 case KEYED_PROPERTY:
1844 EmitKeyedPropertyLoad(property); 1844 EmitKeyedPropertyLoad(property);
1845 PrepareForBailoutForId(property->LoadId(), TOS_REG); 1845 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1846 break; 1846 break;
1847 } 1847 }
1848 } 1848 }
1849 1849
1850 Token::Value op = expr->binary_op(); 1850 Token::Value op = expr->binary_op();
1851 __ push(r0); // Left operand goes on the stack. 1851 __ push(r0); // Left operand goes on the stack.
1852 VisitForAccumulatorValue(expr->value()); 1852 VisitForAccumulatorValue(expr->value());
1853 1853
1854 OverwriteMode mode = expr->value()->ResultOverwriteAllowed() 1854 OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
1855 ? OVERWRITE_RIGHT 1855 ? OVERWRITE_RIGHT
1856 : NO_OVERWRITE; 1856 : NO_OVERWRITE;
1857 SetSourcePosition(expr->position() + 1); 1857 SetSourcePosition(expr->position() + 1);
1858 AccumulatorValueContext context(this); 1858 AccumulatorValueContext context(this);
1859 if (ShouldInlineSmiCase(op)) { 1859 if (ShouldInlineSmiCase(op)) {
1860 EmitInlineSmiBinaryOp(expr->binary_operation(), 1860 EmitInlineSmiBinaryOp(expr->binary_operation(),
1861 op, 1861 op,
1862 mode, 1862 mode,
1863 expr->target(), 1863 expr->target(),
1864 expr->value()); 1864 expr->value());
1865 } else { 1865 } else {
1866 EmitBinaryOp(expr->binary_operation(), op, mode); 1866 EmitBinaryOp(expr->binary_operation(), op, mode);
1867 } 1867 }
1868 1868
1869 // Deoptimization point in case the binary operation may have side effects. 1869 // Deoptimization point in case the binary operation may have side effects.
1870 PrepareForBailout(expr->binary_operation(), TOS_REG); 1870 PrepareForBailout(expr->binary_operation(), TOS_REG);
1871 } else { 1871 } else {
1872 VisitForAccumulatorValue(expr->value()); 1872 VisitForAccumulatorValue(expr->value());
1873 } 1873 }
1874 1874
1875 // Record source position before possible IC call. 1875 // Record source position before possible IC call.
1876 SetSourcePosition(expr->position()); 1876 SetSourcePosition(expr->position());
1877 1877
1878 // Store the value. 1878 // Store the value.
1879 switch (assign_type) { 1879 switch (assign_type) {
1880 case VARIABLE: 1880 case VARIABLE:
1881 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(), 1881 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1882 expr->op()); 1882 expr->op());
1883 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 1883 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1884 context()->Plug(r0); 1884 context()->Plug(r0);
1885 break; 1885 break;
1886 case NAMED_PROPERTY: 1886 case NAMED_PROPERTY:
1887 EmitNamedPropertyAssignment(expr); 1887 EmitNamedPropertyAssignment(expr);
1888 break; 1888 break;
1889 case KEYED_PROPERTY: 1889 case KEYED_PROPERTY:
1890 EmitKeyedPropertyAssignment(expr); 1890 EmitKeyedPropertyAssignment(expr);
1891 break; 1891 break;
1892 } 1892 }
1893 } 1893 }
1894 1894
1895 1895
1896 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) { 1896 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
1897 SetSourcePosition(prop->position()); 1897 SetSourcePosition(prop->position());
1898 Literal* key = prop->key()->AsLiteral(); 1898 Literal* key = prop->key()->AsLiteral();
1899 __ mov(r2, Operand(key->handle())); 1899 __ mov(r2, Operand(key->handle()));
1900 // Call load IC. It has arguments receiver and property name r0 and r2. 1900 // Call load IC. It has arguments receiver and property name r0 and r2.
1901 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); 1901 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1902 CallIC(ic, RelocInfo::CODE_TARGET, prop->PropertyFeedbackId()); 1902 CallIC(ic, RelocInfo::CODE_TARGET, prop->PropertyFeedbackId());
1903 } 1903 }
1904 1904
1905 1905
1906 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) { 1906 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
1907 SetSourcePosition(prop->position()); 1907 SetSourcePosition(prop->position());
1908 // Call keyed load IC. It has arguments key and receiver in r0 and r1. 1908 // Call keyed load IC. It has arguments key and receiver in r0 and r1.
1909 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize(); 1909 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
1910 CallIC(ic, RelocInfo::CODE_TARGET, prop->PropertyFeedbackId()); 1910 CallIC(ic, RelocInfo::CODE_TARGET, prop->PropertyFeedbackId());
1911 } 1911 }
1912 1912
1913 1913
1914 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr, 1914 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
1915 Token::Value op, 1915 Token::Value op,
1916 OverwriteMode mode, 1916 OverwriteMode mode,
1917 Expression* left_expr, 1917 Expression* left_expr,
1918 Expression* right_expr) { 1918 Expression* right_expr) {
1919 Label done, smi_case, stub_call; 1919 Label done, smi_case, stub_call;
1920 1920
1921 Register scratch1 = r2; 1921 Register scratch1 = r2;
1922 Register scratch2 = r3; 1922 Register scratch2 = r3;
1923 1923
1924 // Get the arguments. 1924 // Get the arguments.
1925 Register left = r1; 1925 Register left = r1;
1926 Register right = r0; 1926 Register right = r0;
1927 __ pop(left); 1927 __ pop(left);
1928 1928
1929 // Perform combined smi check on both operands. 1929 // Perform combined smi check on both operands.
1930 __ orr(scratch1, left, Operand(right)); 1930 __ orr(scratch1, left, Operand(right));
1931 STATIC_ASSERT(kSmiTag == 0); 1931 STATIC_ASSERT(kSmiTag == 0);
1932 JumpPatchSite patch_site(masm_); 1932 JumpPatchSite patch_site(masm_);
1933 patch_site.EmitJumpIfSmi(scratch1, &smi_case); 1933 patch_site.EmitJumpIfSmi(scratch1, &smi_case);
1934 1934
1935 __ bind(&stub_call); 1935 __ bind(&stub_call);
1936 BinaryOpStub stub(op, mode); 1936 BinaryOpStub stub(op, mode);
1937 CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, 1937 CallIC(stub.GetCode(), RelocInfo::CODE_TARGET,
1938 expr->BinaryOperationFeedbackId()); 1938 expr->BinaryOperationFeedbackId());
1939 patch_site.EmitPatchInfo(); 1939 patch_site.EmitPatchInfo();
1940 __ jmp(&done); 1940 __ jmp(&done);
1941 1941
1942 __ bind(&smi_case); 1942 __ bind(&smi_case);
1943 // Smi case. This code works the same way as the smi-smi case in the type 1943 // Smi case. This code works the same way as the smi-smi case in the type
1944 // recording binary operation stub, see 1944 // recording binary operation stub, see
1945 // BinaryOpStub::GenerateSmiSmiOperation for comments. 1945 // BinaryOpStub::GenerateSmiSmiOperation for comments.
1946 switch (op) { 1946 switch (op) {
1947 case Token::SAR: 1947 case Token::SAR:
1948 __ b(&stub_call); 1948 __ b(&stub_call);
1949 __ GetLeastBitsFromSmi(scratch1, right, 5); 1949 __ GetLeastBitsFromSmi(scratch1, right, 5);
1950 __ mov(right, Operand(left, ASR, scratch1)); 1950 __ mov(right, Operand(left, ASR, scratch1));
1951 __ bic(right, right, Operand(kSmiTagMask)); 1951 __ bic(right, right, Operand(kSmiTagMask));
1952 break; 1952 break;
1953 case Token::SHL: { 1953 case Token::SHL: {
1954 __ b(&stub_call); 1954 __ b(&stub_call);
1955 __ SmiUntag(scratch1, left); 1955 __ SmiUntag(scratch1, left);
1956 __ GetLeastBitsFromSmi(scratch2, right, 5); 1956 __ GetLeastBitsFromSmi(scratch2, right, 5);
1957 __ mov(scratch1, Operand(scratch1, LSL, scratch2)); 1957 __ mov(scratch1, Operand(scratch1, LSL, scratch2));
1958 __ add(scratch2, scratch1, Operand(0x40000000), SetCC); 1958 __ add(scratch2, scratch1, Operand(0x40000000), SetCC);
1959 __ b(mi, &stub_call); 1959 __ b(mi, &stub_call);
1960 __ SmiTag(right, scratch1); 1960 __ SmiTag(right, scratch1);
1961 break; 1961 break;
1962 } 1962 }
1963 case Token::SHR: { 1963 case Token::SHR: {
1964 __ b(&stub_call); 1964 __ b(&stub_call);
1965 __ SmiUntag(scratch1, left); 1965 __ SmiUntag(scratch1, left);
1966 __ GetLeastBitsFromSmi(scratch2, right, 5); 1966 __ GetLeastBitsFromSmi(scratch2, right, 5);
1967 __ mov(scratch1, Operand(scratch1, LSR, scratch2)); 1967 __ mov(scratch1, Operand(scratch1, LSR, scratch2));
1968 __ tst(scratch1, Operand(0xc0000000)); 1968 __ tst(scratch1, Operand(0xc0000000));
1969 __ b(ne, &stub_call); 1969 __ b(ne, &stub_call);
1970 __ SmiTag(right, scratch1); 1970 __ SmiTag(right, scratch1);
1971 break; 1971 break;
1972 } 1972 }
1973 case Token::ADD: 1973 case Token::ADD:
1974 __ add(scratch1, left, Operand(right), SetCC); 1974 __ add(scratch1, left, Operand(right), SetCC);
1975 __ b(vs, &stub_call); 1975 __ b(vs, &stub_call);
1976 __ mov(right, scratch1); 1976 __ mov(right, scratch1);
1977 break; 1977 break;
1978 case Token::SUB: 1978 case Token::SUB:
1979 __ sub(scratch1, left, Operand(right), SetCC); 1979 __ sub(scratch1, left, Operand(right), SetCC);
1980 __ b(vs, &stub_call); 1980 __ b(vs, &stub_call);
1981 __ mov(right, scratch1); 1981 __ mov(right, scratch1);
1982 break; 1982 break;
1983 case Token::MUL: { 1983 case Token::MUL: {
1984 __ SmiUntag(ip, right); 1984 __ SmiUntag(ip, right);
1985 __ smull(scratch1, scratch2, left, ip); 1985 __ smull(scratch1, scratch2, left, ip);
1986 __ mov(ip, Operand(scratch1, ASR, 31)); 1986 __ mov(ip, Operand(scratch1, ASR, 31));
1987 __ cmp(ip, Operand(scratch2)); 1987 __ cmp(ip, Operand(scratch2));
1988 __ b(ne, &stub_call); 1988 __ b(ne, &stub_call);
1989 __ cmp(scratch1, Operand(0)); 1989 __ cmp(scratch1, Operand(0));
1990 __ mov(right, Operand(scratch1), LeaveCC, ne); 1990 __ mov(right, Operand(scratch1), LeaveCC, ne);
1991 __ b(ne, &done); 1991 __ b(ne, &done);
1992 __ add(scratch2, right, Operand(left), SetCC); 1992 __ add(scratch2, right, Operand(left), SetCC);
1993 __ mov(right, Operand(Smi::FromInt(0)), LeaveCC, pl); 1993 __ mov(right, Operand(Smi::FromInt(0)), LeaveCC, pl);
1994 __ b(mi, &stub_call); 1994 __ b(mi, &stub_call);
1995 break; 1995 break;
1996 } 1996 }
1997 case Token::BIT_OR: 1997 case Token::BIT_OR:
1998 __ orr(right, left, Operand(right)); 1998 __ orr(right, left, Operand(right));
1999 break; 1999 break;
2000 case Token::BIT_AND: 2000 case Token::BIT_AND:
2001 __ and_(right, left, Operand(right)); 2001 __ and_(right, left, Operand(right));
2002 break; 2002 break;
2003 case Token::BIT_XOR: 2003 case Token::BIT_XOR:
2004 __ eor(right, left, Operand(right)); 2004 __ eor(right, left, Operand(right));
2005 break; 2005 break;
2006 default: 2006 default:
2007 UNREACHABLE(); 2007 UNREACHABLE();
2008 } 2008 }
2009 2009
2010 __ bind(&done); 2010 __ bind(&done);
2011 context()->Plug(r0); 2011 context()->Plug(r0);
2012 } 2012 }
2013 2013
2014 2014
2015 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, 2015 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
2016 Token::Value op, 2016 Token::Value op,
2017 OverwriteMode mode) { 2017 OverwriteMode mode) {
2018 __ pop(r1); 2018 __ pop(r1);
2019 BinaryOpStub stub(op, mode); 2019 BinaryOpStub stub(op, mode);
2020 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code. 2020 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
2021 CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, 2021 CallIC(stub.GetCode(), RelocInfo::CODE_TARGET,
2022 expr->BinaryOperationFeedbackId()); 2022 expr->BinaryOperationFeedbackId());
2023 patch_site.EmitPatchInfo(); 2023 patch_site.EmitPatchInfo();
2024 context()->Plug(r0); 2024 context()->Plug(r0);
2025 } 2025 }
2026 2026
2027 2027
2028 void FullCodeGenerator::EmitAssignment(Expression* expr) { 2028 void FullCodeGenerator::EmitAssignment(Expression* expr) {
2029 // Invalid left-hand sides are rewritten to have a 'throw 2029 // Invalid left-hand sides are rewritten to have a 'throw
2030 // ReferenceError' on the left-hand side. 2030 // ReferenceError' on the left-hand side.
2031 if (!expr->IsValidLeftHandSide()) { 2031 if (!expr->IsValidLeftHandSide()) {
2032 VisitForEffect(expr); 2032 VisitForEffect(expr);
2033 return; 2033 return;
2034 } 2034 }
2035 2035
2036 // Left-hand side can only be a property, a global or a (parameter or local) 2036 // Left-hand side can only be a property, a global or a (parameter or local)
2037 // slot. 2037 // slot.
2038 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY }; 2038 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
2039 LhsKind assign_type = VARIABLE; 2039 LhsKind assign_type = VARIABLE;
2040 Property* prop = expr->AsProperty(); 2040 Property* prop = expr->AsProperty();
2041 if (prop != NULL) { 2041 if (prop != NULL) {
2042 assign_type = (prop->key()->IsPropertyName()) 2042 assign_type = (prop->key()->IsPropertyName())
2043 ? NAMED_PROPERTY 2043 ? NAMED_PROPERTY
2044 : KEYED_PROPERTY; 2044 : KEYED_PROPERTY;
2045 } 2045 }
2046 2046
2047 switch (assign_type) { 2047 switch (assign_type) {
2048 case VARIABLE: { 2048 case VARIABLE: {
2049 Variable* var = expr->AsVariableProxy()->var(); 2049 Variable* var = expr->AsVariableProxy()->var();
2050 EffectContext context(this); 2050 EffectContext context(this);
2051 EmitVariableAssignment(var, Token::ASSIGN); 2051 EmitVariableAssignment(var, Token::ASSIGN);
2052 break; 2052 break;
2053 } 2053 }
2054 case NAMED_PROPERTY: { 2054 case NAMED_PROPERTY: {
2055 __ push(r0); // Preserve value. 2055 __ push(r0); // Preserve value.
2056 VisitForAccumulatorValue(prop->obj()); 2056 VisitForAccumulatorValue(prop->obj());
2057 __ mov(r1, r0); 2057 __ mov(r1, r0);
2058 __ pop(r0); // Restore value. 2058 __ pop(r0); // Restore value.
2059 __ mov(r2, Operand(prop->key()->AsLiteral()->handle())); 2059 __ mov(r2, Operand(prop->key()->AsLiteral()->handle()));
2060 Handle<Code> ic = is_classic_mode() 2060 Handle<Code> ic = is_classic_mode()
2061 ? isolate()->builtins()->StoreIC_Initialize() 2061 ? isolate()->builtins()->StoreIC_Initialize()
2062 : isolate()->builtins()->StoreIC_Initialize_Strict(); 2062 : isolate()->builtins()->StoreIC_Initialize_Strict();
2063 CallIC(ic); 2063 CallIC(ic);
2064 break; 2064 break;
2065 } 2065 }
2066 case KEYED_PROPERTY: { 2066 case KEYED_PROPERTY: {
2067 __ push(r0); // Preserve value. 2067 __ push(r0); // Preserve value.
2068 VisitForStackValue(prop->obj()); 2068 VisitForStackValue(prop->obj());
2069 VisitForAccumulatorValue(prop->key()); 2069 VisitForAccumulatorValue(prop->key());
2070 __ mov(r1, r0); 2070 __ mov(r1, r0);
2071 __ pop(r2); 2071 __ pop(r2);
2072 __ pop(r0); // Restore value. 2072 __ pop(r0); // Restore value.
2073 Handle<Code> ic = is_classic_mode() 2073 Handle<Code> ic = is_classic_mode()
2074 ? isolate()->builtins()->KeyedStoreIC_Initialize() 2074 ? isolate()->builtins()->KeyedStoreIC_Initialize()
2075 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict(); 2075 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2076 CallIC(ic); 2076 CallIC(ic);
2077 break; 2077 break;
2078 } 2078 }
2079 } 2079 }
2080 context()->Plug(r0); 2080 context()->Plug(r0);
2081 } 2081 }
2082 2082
2083 2083
2084 void FullCodeGenerator::EmitVariableAssignment(Variable* var, 2084 void FullCodeGenerator::EmitVariableAssignment(Variable* var,
2085 Token::Value op) { 2085 Token::Value op) {
2086 if (var->IsUnallocated()) { 2086 if (var->IsUnallocated()) {
2087 // Global var, const, or let. 2087 // Global var, const, or let.
2088 __ mov(r2, Operand(var->name())); 2088 __ mov(r2, Operand(var->name()));
2089 __ ldr(r1, GlobalObjectOperand()); 2089 __ ldr(r1, GlobalObjectOperand());
2090 Handle<Code> ic = is_classic_mode() 2090 Handle<Code> ic = is_classic_mode()
2091 ? isolate()->builtins()->StoreIC_Initialize() 2091 ? isolate()->builtins()->StoreIC_Initialize()
2092 : isolate()->builtins()->StoreIC_Initialize_Strict(); 2092 : isolate()->builtins()->StoreIC_Initialize_Strict();
2093 CallIC(ic, RelocInfo::CODE_TARGET_CONTEXT); 2093 CallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
2094 2094
2095 } else if (op == Token::INIT_CONST) { 2095 } else if (op == Token::INIT_CONST) {
2096 // Const initializers need a write barrier. 2096 // Const initializers need a write barrier.
2097 ASSERT(!var->IsParameter()); // No const parameters. 2097 ASSERT(!var->IsParameter()); // No const parameters.
2098 if (var->IsStackLocal()) { 2098 if (var->IsStackLocal()) {
2099 Label skip; 2099 Label skip;
2100 __ ldr(r1, StackOperand(var)); 2100 __ ldr(r1, StackOperand(var));
2101 __ CompareRoot(r1, Heap::kTheHoleValueRootIndex); 2101 __ CompareRoot(r1, Heap::kTheHoleValueRootIndex);
2102 __ b(ne, &skip); 2102 __ b(ne, &skip);
2103 __ str(result_register(), StackOperand(var)); 2103 __ str(result_register(), StackOperand(var));
2104 __ bind(&skip); 2104 __ bind(&skip);
2105 } else { 2105 } else {
2106 ASSERT(var->IsContextSlot() || var->IsLookupSlot()); 2106 ASSERT(var->IsContextSlot() || var->IsLookupSlot());
2107 // Like var declarations, const declarations are hoisted to function 2107 // Like var declarations, const declarations are hoisted to function
2108 // scope. However, unlike var initializers, const initializers are 2108 // scope. However, unlike var initializers, const initializers are
2109 // able to drill a hole to that function context, even from inside a 2109 // able to drill a hole to that function context, even from inside a
2110 // 'with' context. We thus bypass the normal static scope lookup for 2110 // 'with' context. We thus bypass the normal static scope lookup for
2111 // var->IsContextSlot(). 2111 // var->IsContextSlot().
2112 __ push(r0); 2112 __ push(r0);
2113 __ mov(r0, Operand(var->name())); 2113 __ mov(r0, Operand(var->name()));
2114 __ Push(cp, r0); // Context and name. 2114 __ Push(cp, r0); // Context and name.
2115 __ CallRuntime(Runtime::kInitializeConstContextSlot, 3); 2115 __ CallRuntime(Runtime::kInitializeConstContextSlot, 3);
2116 } 2116 }
2117 2117
2118 } else if (var->mode() == LET && op != Token::INIT_LET) { 2118 } else if (var->mode() == LET && op != Token::INIT_LET) {
2119 // Non-initializing assignment to let variable needs a write barrier. 2119 // Non-initializing assignment to let variable needs a write barrier.
2120 if (var->IsLookupSlot()) { 2120 if (var->IsLookupSlot()) {
2121 __ push(r0); // Value. 2121 __ push(r0); // Value.
2122 __ mov(r1, Operand(var->name())); 2122 __ mov(r1, Operand(var->name()));
2123 __ mov(r0, Operand(Smi::FromInt(language_mode()))); 2123 __ mov(r0, Operand(Smi::FromInt(language_mode())));
2124 __ Push(cp, r1, r0); // Context, name, strict mode. 2124 __ Push(cp, r1, r0); // Context, name, strict mode.
2125 __ CallRuntime(Runtime::kStoreContextSlot, 4); 2125 __ CallRuntime(Runtime::kStoreContextSlot, 4);
2126 } else { 2126 } else {
2127 ASSERT(var->IsStackAllocated() || var->IsContextSlot()); 2127 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
2128 Label assign; 2128 Label assign;
2129 MemOperand location = VarOperand(var, r1); 2129 MemOperand location = VarOperand(var, r1);
2130 __ ldr(r3, location); 2130 __ ldr(r3, location);
2131 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex); 2131 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
2132 __ b(ne, &assign); 2132 __ b(ne, &assign);
2133 __ mov(r3, Operand(var->name())); 2133 __ mov(r3, Operand(var->name()));
2134 __ push(r3); 2134 __ push(r3);
2135 __ CallRuntime(Runtime::kThrowReferenceError, 1); 2135 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2136 // Perform the assignment. 2136 // Perform the assignment.
2137 __ bind(&assign); 2137 __ bind(&assign);
2138 __ str(result_register(), location); 2138 __ str(result_register(), location);
2139 if (var->IsContextSlot()) { 2139 if (var->IsContextSlot()) {
2140 // RecordWrite may destroy all its register arguments. 2140 // RecordWrite may destroy all its register arguments.
2141 __ mov(r3, result_register()); 2141 __ mov(r3, result_register());
2142 int offset = Context::SlotOffset(var->index()); 2142 int offset = Context::SlotOffset(var->index());
2143 __ RecordWriteContextSlot( 2143 __ RecordWriteContextSlot(
2144 r1, offset, r3, r2, kLRHasBeenSaved, kDontSaveFPRegs); 2144 r1, offset, r3, r2, kLRHasBeenSaved, kDontSaveFPRegs);
2145 } 2145 }
2146 } 2146 }
2147 2147
2148 } else if (!var->is_const_mode() || op == Token::INIT_CONST_HARMONY) { 2148 } else if (!var->is_const_mode() || op == Token::INIT_CONST_HARMONY) {
2149 // Assignment to var or initializing assignment to let/const 2149 // Assignment to var or initializing assignment to let/const
2150 // in harmony mode. 2150 // in harmony mode.
2151 if (var->IsStackAllocated() || var->IsContextSlot()) { 2151 if (var->IsStackAllocated() || var->IsContextSlot()) {
2152 MemOperand location = VarOperand(var, r1); 2152 MemOperand location = VarOperand(var, r1);
2153 if (generate_debug_code_ && op == Token::INIT_LET) { 2153 if (generate_debug_code_ && op == Token::INIT_LET) {
2154 // Check for an uninitialized let binding. 2154 // Check for an uninitialized let binding.
2155 __ ldr(r2, location); 2155 __ ldr(r2, location);
2156 __ CompareRoot(r2, Heap::kTheHoleValueRootIndex); 2156 __ CompareRoot(r2, Heap::kTheHoleValueRootIndex);
2157 __ Check(eq, "Let binding re-initialization."); 2157 __ Check(eq, "Let binding re-initialization.");
2158 } 2158 }
2159 // Perform the assignment. 2159 // Perform the assignment.
2160 __ str(r0, location); 2160 __ str(r0, location);
2161 if (var->IsContextSlot()) { 2161 if (var->IsContextSlot()) {
2162 __ mov(r3, r0); 2162 __ mov(r3, r0);
2163 int offset = Context::SlotOffset(var->index()); 2163 int offset = Context::SlotOffset(var->index());
2164 __ RecordWriteContextSlot( 2164 __ RecordWriteContextSlot(
2165 r1, offset, r3, r2, kLRHasBeenSaved, kDontSaveFPRegs); 2165 r1, offset, r3, r2, kLRHasBeenSaved, kDontSaveFPRegs);
2166 } 2166 }
2167 } else { 2167 } else {
2168 ASSERT(var->IsLookupSlot()); 2168 ASSERT(var->IsLookupSlot());
2169 __ push(r0); // Value. 2169 __ push(r0); // Value.
2170 __ mov(r1, Operand(var->name())); 2170 __ mov(r1, Operand(var->name()));
2171 __ mov(r0, Operand(Smi::FromInt(language_mode()))); 2171 __ mov(r0, Operand(Smi::FromInt(language_mode())));
2172 __ Push(cp, r1, r0); // Context, name, strict mode. 2172 __ Push(cp, r1, r0); // Context, name, strict mode.
2173 __ CallRuntime(Runtime::kStoreContextSlot, 4); 2173 __ CallRuntime(Runtime::kStoreContextSlot, 4);
2174 } 2174 }
2175 } 2175 }
2176 // Non-initializing assignments to consts are ignored. 2176 // Non-initializing assignments to consts are ignored.
2177 } 2177 }
2178 2178
2179 2179
2180 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) { 2180 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2181 // Assignment to a property, using a named store IC. 2181 // Assignment to a property, using a named store IC.
2182 Property* prop = expr->target()->AsProperty(); 2182 Property* prop = expr->target()->AsProperty();
2183 ASSERT(prop != NULL); 2183 ASSERT(prop != NULL);
2184 ASSERT(prop->key()->AsLiteral() != NULL); 2184 ASSERT(prop->key()->AsLiteral() != NULL);
2185 2185
2186 // If the assignment starts a block of assignments to the same object, 2186 // If the assignment starts a block of assignments to the same object,
2187 // change to slow case to avoid the quadratic behavior of repeatedly 2187 // change to slow case to avoid the quadratic behavior of repeatedly
2188 // adding fast properties. 2188 // adding fast properties.
2189 if (expr->starts_initialization_block()) { 2189 if (expr->starts_initialization_block()) {
2190 __ push(result_register()); 2190 __ push(result_register());
2191 __ ldr(ip, MemOperand(sp, kPointerSize)); // Receiver is now under value. 2191 __ ldr(ip, MemOperand(sp, kPointerSize)); // Receiver is now under value.
2192 __ push(ip); 2192 __ push(ip);
2193 __ CallRuntime(Runtime::kToSlowProperties, 1); 2193 __ CallRuntime(Runtime::kToSlowProperties, 1);
2194 __ pop(result_register()); 2194 __ pop(result_register());
2195 } 2195 }
2196 2196
2197 // Record source code position before IC call. 2197 // Record source code position before IC call.
2198 SetSourcePosition(expr->position()); 2198 SetSourcePosition(expr->position());
2199 __ mov(r2, Operand(prop->key()->AsLiteral()->handle())); 2199 __ mov(r2, Operand(prop->key()->AsLiteral()->handle()));
2200 // Load receiver to r1. Leave a copy in the stack if needed for turning the 2200 // Load receiver to r1. Leave a copy in the stack if needed for turning the
2201 // receiver into fast case. 2201 // receiver into fast case.
2202 if (expr->ends_initialization_block()) { 2202 if (expr->ends_initialization_block()) {
2203 __ ldr(r1, MemOperand(sp)); 2203 __ ldr(r1, MemOperand(sp));
2204 } else { 2204 } else {
2205 __ pop(r1); 2205 __ pop(r1);
2206 } 2206 }
2207 2207
2208 Handle<Code> ic = is_classic_mode() 2208 Handle<Code> ic = is_classic_mode()
2209 ? isolate()->builtins()->StoreIC_Initialize() 2209 ? isolate()->builtins()->StoreIC_Initialize()
2210 : isolate()->builtins()->StoreIC_Initialize_Strict(); 2210 : isolate()->builtins()->StoreIC_Initialize_Strict();
2211 CallIC(ic, RelocInfo::CODE_TARGET, expr->AssignmentFeedbackId()); 2211 CallIC(ic, RelocInfo::CODE_TARGET, expr->AssignmentFeedbackId());
2212 2212
2213 // If the assignment ends an initialization block, revert to fast case. 2213 // If the assignment ends an initialization block, revert to fast case.
2214 if (expr->ends_initialization_block()) { 2214 if (expr->ends_initialization_block()) {
2215 __ push(r0); // Result of assignment, saved even if not needed. 2215 __ push(r0); // Result of assignment, saved even if not needed.
2216 // Receiver is under the result value. 2216 // Receiver is under the result value.
2217 __ ldr(ip, MemOperand(sp, kPointerSize)); 2217 __ ldr(ip, MemOperand(sp, kPointerSize));
2218 __ push(ip); 2218 __ push(ip);
2219 __ CallRuntime(Runtime::kToFastProperties, 1); 2219 __ CallRuntime(Runtime::kToFastProperties, 1);
2220 __ pop(r0); 2220 __ pop(r0);
2221 __ Drop(1); 2221 __ Drop(1);
2222 } 2222 }
2223 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 2223 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2224 context()->Plug(r0); 2224 context()->Plug(r0);
2225 } 2225 }
2226 2226
2227 2227
2228 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) { 2228 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2229 // Assignment to a property, using a keyed store IC. 2229 // Assignment to a property, using a keyed store IC.
2230 2230
2231 // If the assignment starts a block of assignments to the same object, 2231 // If the assignment starts a block of assignments to the same object,
2232 // change to slow case to avoid the quadratic behavior of repeatedly 2232 // change to slow case to avoid the quadratic behavior of repeatedly
2233 // adding fast properties. 2233 // adding fast properties.
2234 if (expr->starts_initialization_block()) { 2234 if (expr->starts_initialization_block()) {
2235 __ push(result_register()); 2235 __ push(result_register());
2236 // Receiver is now under the key and value. 2236 // Receiver is now under the key and value.
2237 __ ldr(ip, MemOperand(sp, 2 * kPointerSize)); 2237 __ ldr(ip, MemOperand(sp, 2 * kPointerSize));
2238 __ push(ip); 2238 __ push(ip);
2239 __ CallRuntime(Runtime::kToSlowProperties, 1); 2239 __ CallRuntime(Runtime::kToSlowProperties, 1);
2240 __ pop(result_register()); 2240 __ pop(result_register());
2241 } 2241 }
2242 2242
2243 // Record source code position before IC call. 2243 // Record source code position before IC call.
2244 SetSourcePosition(expr->position()); 2244 SetSourcePosition(expr->position());
2245 __ pop(r1); // Key. 2245 __ pop(r1); // Key.
2246 // Load receiver to r2. Leave a copy in the stack if needed for turning the 2246 // Load receiver to r2. Leave a copy in the stack if needed for turning the
2247 // receiver into fast case. 2247 // receiver into fast case.
2248 if (expr->ends_initialization_block()) { 2248 if (expr->ends_initialization_block()) {
2249 __ ldr(r2, MemOperand(sp)); 2249 __ ldr(r2, MemOperand(sp));
2250 } else { 2250 } else {
2251 __ pop(r2); 2251 __ pop(r2);
2252 } 2252 }
2253 2253
2254 Handle<Code> ic = is_classic_mode() 2254 Handle<Code> ic = is_classic_mode()
2255 ? isolate()->builtins()->KeyedStoreIC_Initialize() 2255 ? isolate()->builtins()->KeyedStoreIC_Initialize()
2256 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict(); 2256 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2257 CallIC(ic, RelocInfo::CODE_TARGET, expr->AssignmentFeedbackId()); 2257 CallIC(ic, RelocInfo::CODE_TARGET, expr->AssignmentFeedbackId());
2258 2258
2259 // If the assignment ends an initialization block, revert to fast case. 2259 // If the assignment ends an initialization block, revert to fast case.
2260 if (expr->ends_initialization_block()) { 2260 if (expr->ends_initialization_block()) {
2261 __ push(r0); // Result of assignment, saved even if not needed. 2261 __ push(r0); // Result of assignment, saved even if not needed.
2262 // Receiver is under the result value. 2262 // Receiver is under the result value.
2263 __ ldr(ip, MemOperand(sp, kPointerSize)); 2263 __ ldr(ip, MemOperand(sp, kPointerSize));
2264 __ push(ip); 2264 __ push(ip);
2265 __ CallRuntime(Runtime::kToFastProperties, 1); 2265 __ CallRuntime(Runtime::kToFastProperties, 1);
2266 __ pop(r0); 2266 __ pop(r0);
2267 __ Drop(1); 2267 __ Drop(1);
2268 } 2268 }
2269 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 2269 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2270 context()->Plug(r0); 2270 context()->Plug(r0);
2271 } 2271 }
2272 2272
2273 2273
2274 void FullCodeGenerator::VisitProperty(Property* expr) { 2274 void FullCodeGenerator::VisitProperty(Property* expr) {
2275 Comment cmnt(masm_, "[ Property"); 2275 Comment cmnt(masm_, "[ Property");
2276 Expression* key = expr->key(); 2276 Expression* key = expr->key();
2277 2277
2278 if (key->IsPropertyName()) { 2278 if (key->IsPropertyName()) {
2279 VisitForAccumulatorValue(expr->obj()); 2279 VisitForAccumulatorValue(expr->obj());
2280 EmitNamedPropertyLoad(expr); 2280 EmitNamedPropertyLoad(expr);
2281 PrepareForBailoutForId(expr->LoadId(), TOS_REG); 2281 PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2282 context()->Plug(r0); 2282 context()->Plug(r0);
2283 } else { 2283 } else {
2284 VisitForStackValue(expr->obj()); 2284 VisitForStackValue(expr->obj());
2285 VisitForAccumulatorValue(expr->key()); 2285 VisitForAccumulatorValue(expr->key());
2286 __ pop(r1); 2286 __ pop(r1);
2287 EmitKeyedPropertyLoad(expr); 2287 EmitKeyedPropertyLoad(expr);
2288 context()->Plug(r0); 2288 context()->Plug(r0);
2289 } 2289 }
2290 } 2290 }
2291 2291
2292 2292
2293 void FullCodeGenerator::CallIC(Handle<Code> code, 2293 void FullCodeGenerator::CallIC(Handle<Code> code,
2294 RelocInfo::Mode rmode, 2294 RelocInfo::Mode rmode,
2295 TypeFeedbackId ast_id) { 2295 TypeFeedbackId ast_id) {
2296 ic_total_count_++; 2296 ic_total_count_++;
2297 __ Call(code, rmode, ast_id); 2297 __ Call(code, rmode, ast_id);
2298 } 2298 }
2299 2299
2300 void FullCodeGenerator::EmitCallWithIC(Call* expr, 2300 void FullCodeGenerator::EmitCallWithIC(Call* expr,
2301 Handle<Object> name, 2301 Handle<Object> name,
2302 RelocInfo::Mode mode) { 2302 RelocInfo::Mode mode) {
2303 // Code common for calls using the IC. 2303 // Code common for calls using the IC.
2304 ZoneList<Expression*>* args = expr->arguments(); 2304 ZoneList<Expression*>* args = expr->arguments();
2305 int arg_count = args->length(); 2305 int arg_count = args->length();
2306 { PreservePositionScope scope(masm()->positions_recorder()); 2306 { PreservePositionScope scope(masm()->positions_recorder());
2307 for (int i = 0; i < arg_count; i++) { 2307 for (int i = 0; i < arg_count; i++) {
2308 VisitForStackValue(args->at(i)); 2308 VisitForStackValue(args->at(i));
2309 } 2309 }
2310 __ mov(r2, Operand(name)); 2310 __ mov(r2, Operand(name));
2311 } 2311 }
2312 // Record source position for debugger. 2312 // Record source position for debugger.
2313 SetSourcePosition(expr->position()); 2313 SetSourcePosition(expr->position());
2314 // Call the IC initialization code. 2314 // Call the IC initialization code.
2315 Handle<Code> ic = 2315 Handle<Code> ic =
2316 isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode); 2316 isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode);
2317 CallIC(ic, mode, expr->CallFeedbackId()); 2317 CallIC(ic, mode, expr->CallFeedbackId());
2318 RecordJSReturnSite(expr); 2318 RecordJSReturnSite(expr);
2319 // Restore context register. 2319 // Restore context register.
2320 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 2320 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2321 context()->Plug(r0); 2321 context()->Plug(r0);
2322 } 2322 }
2323 2323
2324 2324
2325 void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr, 2325 void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
2326 Expression* key) { 2326 Expression* key) {
2327 // Load the key. 2327 // Load the key.
2328 VisitForAccumulatorValue(key); 2328 VisitForAccumulatorValue(key);
2329 2329
2330 // Swap the name of the function and the receiver on the stack to follow 2330 // Swap the name of the function and the receiver on the stack to follow
2331 // the calling convention for call ICs. 2331 // the calling convention for call ICs.
2332 __ pop(r1); 2332 __ pop(r1);
2333 __ push(r0); 2333 __ push(r0);
2334 __ push(r1); 2334 __ push(r1);
2335 2335
2336 // Code common for calls using the IC. 2336 // Code common for calls using the IC.
2337 ZoneList<Expression*>* args = expr->arguments(); 2337 ZoneList<Expression*>* args = expr->arguments();
2338 int arg_count = args->length(); 2338 int arg_count = args->length();
2339 { PreservePositionScope scope(masm()->positions_recorder()); 2339 { PreservePositionScope scope(masm()->positions_recorder());
2340 for (int i = 0; i < arg_count; i++) { 2340 for (int i = 0; i < arg_count; i++) {
2341 VisitForStackValue(args->at(i)); 2341 VisitForStackValue(args->at(i));
2342 } 2342 }
2343 } 2343 }
2344 // Record source position for debugger. 2344 // Record source position for debugger.
2345 SetSourcePosition(expr->position()); 2345 SetSourcePosition(expr->position());
2346 // Call the IC initialization code. 2346 // Call the IC initialization code.
2347 Handle<Code> ic = 2347 Handle<Code> ic =
2348 isolate()->stub_cache()->ComputeKeyedCallInitialize(arg_count); 2348 isolate()->stub_cache()->ComputeKeyedCallInitialize(arg_count);
2349 __ ldr(r2, MemOperand(sp, (arg_count + 1) * kPointerSize)); // Key. 2349 __ ldr(r2, MemOperand(sp, (arg_count + 1) * kPointerSize)); // Key.
2350 CallIC(ic, RelocInfo::CODE_TARGET, expr->CallFeedbackId()); 2350 CallIC(ic, RelocInfo::CODE_TARGET, expr->CallFeedbackId());
2351 RecordJSReturnSite(expr); 2351 RecordJSReturnSite(expr);
2352 // Restore context register. 2352 // Restore context register.
2353 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 2353 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2354 context()->DropAndPlug(1, r0); // Drop the key still on the stack. 2354 context()->DropAndPlug(1, r0); // Drop the key still on the stack.
2355 } 2355 }
2356 2356
2357 2357
2358 void FullCodeGenerator::EmitCallWithStub(Call* expr, CallFunctionFlags flags) { 2358 void FullCodeGenerator::EmitCallWithStub(Call* expr, CallFunctionFlags flags) {
2359 // Code common for calls using the call stub. 2359 // Code common for calls using the call stub.
2360 ZoneList<Expression*>* args = expr->arguments(); 2360 ZoneList<Expression*>* args = expr->arguments();
2361 int arg_count = args->length(); 2361 int arg_count = args->length();
2362 { PreservePositionScope scope(masm()->positions_recorder()); 2362 { PreservePositionScope scope(masm()->positions_recorder());
2363 for (int i = 0; i < arg_count; i++) { 2363 for (int i = 0; i < arg_count; i++) {
2364 VisitForStackValue(args->at(i)); 2364 VisitForStackValue(args->at(i));
2365 } 2365 }
2366 } 2366 }
2367 // Record source position for debugger. 2367 // Record source position for debugger.
2368 SetSourcePosition(expr->position()); 2368 SetSourcePosition(expr->position());
2369 2369
2370 // Record call targets in unoptimized code. 2370 // Record call targets in unoptimized code.
2371 flags = static_cast<CallFunctionFlags>(flags | RECORD_CALL_TARGET); 2371 flags = static_cast<CallFunctionFlags>(flags | RECORD_CALL_TARGET);
2372 Handle<Object> uninitialized = 2372 Handle<Object> uninitialized =
2373 TypeFeedbackCells::UninitializedSentinel(isolate()); 2373 TypeFeedbackCells::UninitializedSentinel(isolate());
2374 Handle<JSGlobalPropertyCell> cell = 2374 Handle<JSGlobalPropertyCell> cell =
2375 isolate()->factory()->NewJSGlobalPropertyCell(uninitialized); 2375 isolate()->factory()->NewJSGlobalPropertyCell(uninitialized);
2376 RecordTypeFeedbackCell(expr->CallFeedbackId(), cell); 2376 RecordTypeFeedbackCell(expr->CallFeedbackId(), cell);
2377 __ mov(r2, Operand(cell)); 2377 __ mov(r2, Operand(cell));
2378 2378
2379 CallFunctionStub stub(arg_count, flags); 2379 CallFunctionStub stub(arg_count, flags);
2380 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize)); 2380 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2381 __ CallStub(&stub); 2381 __ CallStub(&stub);
2382 RecordJSReturnSite(expr); 2382 RecordJSReturnSite(expr);
2383 // Restore context register. 2383 // Restore context register.
2384 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 2384 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2385 context()->DropAndPlug(1, r0); 2385 context()->DropAndPlug(1, r0);
2386 } 2386 }
2387 2387
2388 2388
2389 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) { 2389 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
2390 // Push copy of the first argument or undefined if it doesn't exist. 2390 // Push copy of the first argument or undefined if it doesn't exist.
2391 if (arg_count > 0) { 2391 if (arg_count > 0) {
2392 __ ldr(r1, MemOperand(sp, arg_count * kPointerSize)); 2392 __ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
2393 } else { 2393 } else {
2394 __ LoadRoot(r1, Heap::kUndefinedValueRootIndex); 2394 __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
2395 } 2395 }
2396 __ push(r1); 2396 __ push(r1);
2397 2397
2398 // Push the receiver of the enclosing function. 2398 // Push the receiver of the enclosing function.
2399 int receiver_offset = 2 + info_->scope()->num_parameters(); 2399 int receiver_offset = 2 + info_->scope()->num_parameters();
2400 __ ldr(r1, MemOperand(fp, receiver_offset * kPointerSize)); 2400 __ ldr(r1, MemOperand(fp, receiver_offset * kPointerSize));
2401 __ push(r1); 2401 __ push(r1);
2402 // Push the language mode. 2402 // Push the language mode.
2403 __ mov(r1, Operand(Smi::FromInt(language_mode()))); 2403 __ mov(r1, Operand(Smi::FromInt(language_mode())));
2404 __ push(r1); 2404 __ push(r1);
2405 2405
2406 // Push the start position of the scope the calls resides in. 2406 // Push the start position of the scope the calls resides in.
2407 __ mov(r1, Operand(Smi::FromInt(scope()->start_position()))); 2407 __ mov(r1, Operand(Smi::FromInt(scope()->start_position())));
2408 __ push(r1); 2408 __ push(r1);
2409 2409
2410 // Do the runtime call. 2410 // Do the runtime call.
2411 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5); 2411 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5);
2412 } 2412 }
2413 2413
2414 2414
2415 void FullCodeGenerator::VisitCall(Call* expr) { 2415 void FullCodeGenerator::VisitCall(Call* expr) {
2416 #ifdef DEBUG 2416 #ifdef DEBUG
2417 // We want to verify that RecordJSReturnSite gets called on all paths 2417 // We want to verify that RecordJSReturnSite gets called on all paths
2418 // through this function. Avoid early returns. 2418 // through this function. Avoid early returns.
2419 expr->return_is_recorded_ = false; 2419 expr->return_is_recorded_ = false;
2420 #endif 2420 #endif
2421 2421
2422 Comment cmnt(masm_, "[ Call"); 2422 Comment cmnt(masm_, "[ Call");
2423 Expression* callee = expr->expression(); 2423 Expression* callee = expr->expression();
2424 VariableProxy* proxy = callee->AsVariableProxy(); 2424 VariableProxy* proxy = callee->AsVariableProxy();
2425 Property* property = callee->AsProperty(); 2425 Property* property = callee->AsProperty();
2426 2426
2427 if (proxy != NULL && proxy->var()->is_possibly_eval()) { 2427 if (proxy != NULL && proxy->var()->is_possibly_eval()) {
2428 // In a call to eval, we first call %ResolvePossiblyDirectEval to 2428 // In a call to eval, we first call %ResolvePossiblyDirectEval to
2429 // resolve the function we need to call and the receiver of the 2429 // resolve the function we need to call and the receiver of the
2430 // call. Then we call the resolved function using the given 2430 // call. Then we call the resolved function using the given
2431 // arguments. 2431 // arguments.
2432 ZoneList<Expression*>* args = expr->arguments(); 2432 ZoneList<Expression*>* args = expr->arguments();
2433 int arg_count = args->length(); 2433 int arg_count = args->length();
2434 2434
2435 { PreservePositionScope pos_scope(masm()->positions_recorder()); 2435 { PreservePositionScope pos_scope(masm()->positions_recorder());
2436 VisitForStackValue(callee); 2436 VisitForStackValue(callee);
2437 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex); 2437 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
2438 __ push(r2); // Reserved receiver slot. 2438 __ push(r2); // Reserved receiver slot.
2439 2439
2440 // Push the arguments. 2440 // Push the arguments.
2441 for (int i = 0; i < arg_count; i++) { 2441 for (int i = 0; i < arg_count; i++) {
2442 VisitForStackValue(args->at(i)); 2442 VisitForStackValue(args->at(i));
2443 } 2443 }
2444 2444
2445 // Push a copy of the function (found below the arguments) and 2445 // Push a copy of the function (found below the arguments) and
2446 // resolve eval. 2446 // resolve eval.
2447 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize)); 2447 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2448 __ push(r1); 2448 __ push(r1);
2449 EmitResolvePossiblyDirectEval(arg_count); 2449 EmitResolvePossiblyDirectEval(arg_count);
2450 2450
2451 // The runtime call returns a pair of values in r0 (function) and 2451 // The runtime call returns a pair of values in r0 (function) and
2452 // r1 (receiver). Touch up the stack with the right values. 2452 // r1 (receiver). Touch up the stack with the right values.
2453 __ str(r0, MemOperand(sp, (arg_count + 1) * kPointerSize)); 2453 __ str(r0, MemOperand(sp, (arg_count + 1) * kPointerSize));
2454 __ str(r1, MemOperand(sp, arg_count * kPointerSize)); 2454 __ str(r1, MemOperand(sp, arg_count * kPointerSize));
2455 } 2455 }
2456 2456
2457 // Record source position for debugger. 2457 // Record source position for debugger.
2458 SetSourcePosition(expr->position()); 2458 SetSourcePosition(expr->position());
2459 CallFunctionStub stub(arg_count, RECEIVER_MIGHT_BE_IMPLICIT); 2459 CallFunctionStub stub(arg_count, RECEIVER_MIGHT_BE_IMPLICIT);
2460 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize)); 2460 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2461 __ CallStub(&stub); 2461 __ CallStub(&stub);
2462 RecordJSReturnSite(expr); 2462 RecordJSReturnSite(expr);
2463 // Restore context register. 2463 // Restore context register.
2464 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 2464 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2465 context()->DropAndPlug(1, r0); 2465 context()->DropAndPlug(1, r0);
2466 } else if (proxy != NULL && proxy->var()->IsUnallocated()) { 2466 } else if (proxy != NULL && proxy->var()->IsUnallocated()) {
2467 // Push global object as receiver for the call IC. 2467 // Push global object as receiver for the call IC.
2468 __ ldr(r0, GlobalObjectOperand()); 2468 __ ldr(r0, GlobalObjectOperand());
2469 __ push(r0); 2469 __ push(r0);
2470 EmitCallWithIC(expr, proxy->name(), RelocInfo::CODE_TARGET_CONTEXT); 2470 EmitCallWithIC(expr, proxy->name(), RelocInfo::CODE_TARGET_CONTEXT);
2471 } else if (proxy != NULL && proxy->var()->IsLookupSlot()) { 2471 } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
2472 // Call to a lookup slot (dynamically introduced variable). 2472 // Call to a lookup slot (dynamically introduced variable).
2473 Label slow, done; 2473 Label slow, done;
2474 2474
2475 { PreservePositionScope scope(masm()->positions_recorder()); 2475 { PreservePositionScope scope(masm()->positions_recorder());
2476 // Generate code for loading from variables potentially shadowed 2476 // Generate code for loading from variables potentially shadowed
2477 // by eval-introduced variables. 2477 // by eval-introduced variables.
2478 EmitDynamicLookupFastCase(proxy->var(), NOT_INSIDE_TYPEOF, &slow, &done); 2478 EmitDynamicLookupFastCase(proxy->var(), NOT_INSIDE_TYPEOF, &slow, &done);
2479 } 2479 }
2480 2480
2481 __ bind(&slow); 2481 __ bind(&slow);
2482 // Call the runtime to find the function to call (returned in r0) 2482 // Call the runtime to find the function to call (returned in r0)
2483 // and the object holding it (returned in edx). 2483 // and the object holding it (returned in edx).
2484 __ push(context_register()); 2484 __ push(context_register());
2485 __ mov(r2, Operand(proxy->name())); 2485 __ mov(r2, Operand(proxy->name()));
2486 __ push(r2); 2486 __ push(r2);
2487 __ CallRuntime(Runtime::kLoadContextSlot, 2); 2487 __ CallRuntime(Runtime::kLoadContextSlot, 2);
2488 __ Push(r0, r1); // Function, receiver. 2488 __ Push(r0, r1); // Function, receiver.
2489 2489
2490 // If fast case code has been generated, emit code to push the 2490 // If fast case code has been generated, emit code to push the
2491 // function and receiver and have the slow path jump around this 2491 // function and receiver and have the slow path jump around this
2492 // code. 2492 // code.
2493 if (done.is_linked()) { 2493 if (done.is_linked()) {
2494 Label call; 2494 Label call;
2495 __ b(&call); 2495 __ b(&call);
2496 __ bind(&done); 2496 __ bind(&done);
2497 // Push function. 2497 // Push function.
2498 __ push(r0); 2498 __ push(r0);
2499 // The receiver is implicitly the global receiver. Indicate this 2499 // The receiver is implicitly the global receiver. Indicate this
2500 // by passing the hole to the call function stub. 2500 // by passing the hole to the call function stub.
2501 __ LoadRoot(r1, Heap::kTheHoleValueRootIndex); 2501 __ LoadRoot(r1, Heap::kTheHoleValueRootIndex);
2502 __ push(r1); 2502 __ push(r1);
2503 __ bind(&call); 2503 __ bind(&call);
2504 } 2504 }
2505 2505
2506 // The receiver is either the global receiver or an object found 2506 // The receiver is either the global receiver or an object found
2507 // by LoadContextSlot. That object could be the hole if the 2507 // by LoadContextSlot. That object could be the hole if the
2508 // receiver is implicitly the global object. 2508 // receiver is implicitly the global object.
2509 EmitCallWithStub(expr, RECEIVER_MIGHT_BE_IMPLICIT); 2509 EmitCallWithStub(expr, RECEIVER_MIGHT_BE_IMPLICIT);
2510 } else if (property != NULL) { 2510 } else if (property != NULL) {
2511 { PreservePositionScope scope(masm()->positions_recorder()); 2511 { PreservePositionScope scope(masm()->positions_recorder());
2512 VisitForStackValue(property->obj()); 2512 VisitForStackValue(property->obj());
2513 } 2513 }
2514 if (property->key()->IsPropertyName()) { 2514 if (property->key()->IsPropertyName()) {
2515 EmitCallWithIC(expr, 2515 EmitCallWithIC(expr,
2516 property->key()->AsLiteral()->handle(), 2516 property->key()->AsLiteral()->handle(),
2517 RelocInfo::CODE_TARGET); 2517 RelocInfo::CODE_TARGET);
2518 } else { 2518 } else {
2519 EmitKeyedCallWithIC(expr, property->key()); 2519 EmitKeyedCallWithIC(expr, property->key());
2520 } 2520 }
2521 } else { 2521 } else {
2522 // Call to an arbitrary expression not handled specially above. 2522 // Call to an arbitrary expression not handled specially above.
2523 { PreservePositionScope scope(masm()->positions_recorder()); 2523 { PreservePositionScope scope(masm()->positions_recorder());
2524 VisitForStackValue(callee); 2524 VisitForStackValue(callee);
2525 } 2525 }
2526 // Load global receiver object. 2526 // Load global receiver object.
2527 __ ldr(r1, GlobalObjectOperand()); 2527 __ ldr(r1, GlobalObjectOperand());
2528 __ ldr(r1, FieldMemOperand(r1, GlobalObject::kGlobalReceiverOffset)); 2528 __ ldr(r1, FieldMemOperand(r1, GlobalObject::kGlobalReceiverOffset));
2529 __ push(r1); 2529 __ push(r1);
2530 // Emit function call. 2530 // Emit function call.
2531 EmitCallWithStub(expr, NO_CALL_FUNCTION_FLAGS); 2531 EmitCallWithStub(expr, NO_CALL_FUNCTION_FLAGS);
2532 } 2532 }
2533 2533
2534 #ifdef DEBUG 2534 #ifdef DEBUG
2535 // RecordJSReturnSite should have been called. 2535 // RecordJSReturnSite should have been called.
2536 ASSERT(expr->return_is_recorded_); 2536 ASSERT(expr->return_is_recorded_);
2537 #endif 2537 #endif
2538 } 2538 }
2539 2539
2540 2540
2541 void FullCodeGenerator::VisitCallNew(CallNew* expr) { 2541 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2542 Comment cmnt(masm_, "[ CallNew"); 2542 Comment cmnt(masm_, "[ CallNew");
2543 // According to ECMA-262, section 11.2.2, page 44, the function 2543 // According to ECMA-262, section 11.2.2, page 44, the function
2544 // expression in new calls must be evaluated before the 2544 // expression in new calls must be evaluated before the
2545 // arguments. 2545 // arguments.
2546 2546
2547 // Push constructor on the stack. If it's not a function it's used as 2547 // Push constructor on the stack. If it's not a function it's used as
2548 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is 2548 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2549 // ignored. 2549 // ignored.
2550 VisitForStackValue(expr->expression()); 2550 VisitForStackValue(expr->expression());
2551 2551
2552 // Push the arguments ("left-to-right") on the stack. 2552 // Push the arguments ("left-to-right") on the stack.
2553 ZoneList<Expression*>* args = expr->arguments(); 2553 ZoneList<Expression*>* args = expr->arguments();
2554 int arg_count = args->length(); 2554 int arg_count = args->length();
2555 for (int i = 0; i < arg_count; i++) { 2555 for (int i = 0; i < arg_count; i++) {
2556 VisitForStackValue(args->at(i)); 2556 VisitForStackValue(args->at(i));
2557 } 2557 }
2558 2558
2559 // Call the construct call builtin that handles allocation and 2559 // Call the construct call builtin that handles allocation and
2560 // constructor invocation. 2560 // constructor invocation.
2561 SetSourcePosition(expr->position()); 2561 SetSourcePosition(expr->position());
2562 2562
2563 // Load function and argument count into r1 and r0. 2563 // Load function and argument count into r1 and r0.
2564 __ mov(r0, Operand(arg_count)); 2564 __ mov(r0, Operand(arg_count));
2565 __ ldr(r1, MemOperand(sp, arg_count * kPointerSize)); 2565 __ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
2566 2566
2567 // Record call targets in unoptimized code. 2567 // Record call targets in unoptimized code.
2568 Handle<Object> uninitialized = 2568 Handle<Object> uninitialized =
2569 TypeFeedbackCells::UninitializedSentinel(isolate()); 2569 TypeFeedbackCells::UninitializedSentinel(isolate());
2570 Handle<JSGlobalPropertyCell> cell = 2570 Handle<JSGlobalPropertyCell> cell =
2571 isolate()->factory()->NewJSGlobalPropertyCell(uninitialized); 2571 isolate()->factory()->NewJSGlobalPropertyCell(uninitialized);
2572 RecordTypeFeedbackCell(expr->CallNewFeedbackId(), cell); 2572 RecordTypeFeedbackCell(expr->CallNewFeedbackId(), cell);
2573 __ mov(r2, Operand(cell)); 2573 __ mov(r2, Operand(cell));
2574 2574
2575 CallConstructStub stub(RECORD_CALL_TARGET); 2575 CallConstructStub stub(RECORD_CALL_TARGET);
2576 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL); 2576 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
2577 PrepareForBailoutForId(expr->ReturnId(), TOS_REG); 2577 PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
2578 context()->Plug(r0); 2578 context()->Plug(r0);
2579 } 2579 }
2580 2580
2581 2581
2582 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) { 2582 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2583 ZoneList<Expression*>* args = expr->arguments(); 2583 ZoneList<Expression*>* args = expr->arguments();
2584 ASSERT(args->length() == 1); 2584 ASSERT(args->length() == 1);
2585 2585
2586 VisitForAccumulatorValue(args->at(0)); 2586 VisitForAccumulatorValue(args->at(0));
2587 2587
2588 Label materialize_true, materialize_false; 2588 Label materialize_true, materialize_false;
2589 Label* if_true = NULL; 2589 Label* if_true = NULL;
2590 Label* if_false = NULL; 2590 Label* if_false = NULL;
2591 Label* fall_through = NULL; 2591 Label* fall_through = NULL;
2592 context()->PrepareTest(&materialize_true, &materialize_false, 2592 context()->PrepareTest(&materialize_true, &materialize_false,
2593 &if_true, &if_false, &fall_through); 2593 &if_true, &if_false, &fall_through);
2594 2594
2595 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 2595 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2596 __ tst(r0, Operand(kSmiTagMask)); 2596 __ tst(r0, Operand(kSmiTagMask));
2597 Split(eq, if_true, if_false, fall_through); 2597 Split(eq, if_true, if_false, fall_through);
2598 2598
2599 context()->Plug(if_true, if_false); 2599 context()->Plug(if_true, if_false);
2600 } 2600 }
2601 2601
2602 2602
2603 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) { 2603 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
2604 ZoneList<Expression*>* args = expr->arguments(); 2604 ZoneList<Expression*>* args = expr->arguments();
2605 ASSERT(args->length() == 1); 2605 ASSERT(args->length() == 1);
2606 2606
2607 VisitForAccumulatorValue(args->at(0)); 2607 VisitForAccumulatorValue(args->at(0));
2608 2608
2609 Label materialize_true, materialize_false; 2609 Label materialize_true, materialize_false;
2610 Label* if_true = NULL; 2610 Label* if_true = NULL;
2611 Label* if_false = NULL; 2611 Label* if_false = NULL;
2612 Label* fall_through = NULL; 2612 Label* fall_through = NULL;
2613 context()->PrepareTest(&materialize_true, &materialize_false, 2613 context()->PrepareTest(&materialize_true, &materialize_false,
2614 &if_true, &if_false, &fall_through); 2614 &if_true, &if_false, &fall_through);
2615 2615
2616 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 2616 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2617 __ tst(r0, Operand(kSmiTagMask | 0x80000000)); 2617 __ tst(r0, Operand(kSmiTagMask | 0x80000000));
2618 Split(eq, if_true, if_false, fall_through); 2618 Split(eq, if_true, if_false, fall_through);
2619 2619
2620 context()->Plug(if_true, if_false); 2620 context()->Plug(if_true, if_false);
2621 } 2621 }
2622 2622
2623 2623
2624 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) { 2624 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
2625 ZoneList<Expression*>* args = expr->arguments(); 2625 ZoneList<Expression*>* args = expr->arguments();
2626 ASSERT(args->length() == 1); 2626 ASSERT(args->length() == 1);
2627 2627
2628 VisitForAccumulatorValue(args->at(0)); 2628 VisitForAccumulatorValue(args->at(0));
2629 2629
2630 Label materialize_true, materialize_false; 2630 Label materialize_true, materialize_false;
2631 Label* if_true = NULL; 2631 Label* if_true = NULL;
2632 Label* if_false = NULL; 2632 Label* if_false = NULL;
2633 Label* fall_through = NULL; 2633 Label* fall_through = NULL;
2634 context()->PrepareTest(&materialize_true, &materialize_false, 2634 context()->PrepareTest(&materialize_true, &materialize_false,
2635 &if_true, &if_false, &fall_through); 2635 &if_true, &if_false, &fall_through);
2636 2636
2637 __ JumpIfSmi(r0, if_false); 2637 __ JumpIfSmi(r0, if_false);
2638 __ LoadRoot(ip, Heap::kNullValueRootIndex); 2638 __ LoadRoot(ip, Heap::kNullValueRootIndex);
2639 __ cmp(r0, ip); 2639 __ cmp(r0, ip);
2640 __ b(eq, if_true); 2640 __ b(eq, if_true);
2641 __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset)); 2641 __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
2642 // Undetectable objects behave like undefined when tested with typeof. 2642 // Undetectable objects behave like undefined when tested with typeof.
2643 __ ldrb(r1, FieldMemOperand(r2, Map::kBitFieldOffset)); 2643 __ ldrb(r1, FieldMemOperand(r2, Map::kBitFieldOffset));
2644 __ tst(r1, Operand(1 << Map::kIsUndetectable)); 2644 __ tst(r1, Operand(1 << Map::kIsUndetectable));
2645 __ b(ne, if_false); 2645 __ b(ne, if_false);
2646 __ ldrb(r1, FieldMemOperand(r2, Map::kInstanceTypeOffset)); 2646 __ ldrb(r1, FieldMemOperand(r2, Map::kInstanceTypeOffset));
2647 __ cmp(r1, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE)); 2647 __ cmp(r1, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
2648 __ b(lt, if_false); 2648 __ b(lt, if_false);
2649 __ cmp(r1, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE)); 2649 __ cmp(r1, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE));
2650 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 2650 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2651 Split(le, if_true, if_false, fall_through); 2651 Split(le, if_true, if_false, fall_through);
2652 2652
2653 context()->Plug(if_true, if_false); 2653 context()->Plug(if_true, if_false);
2654 } 2654 }
2655 2655
2656 2656
2657 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) { 2657 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
2658 ZoneList<Expression*>* args = expr->arguments(); 2658 ZoneList<Expression*>* args = expr->arguments();
2659 ASSERT(args->length() == 1); 2659 ASSERT(args->length() == 1);
2660 2660
2661 VisitForAccumulatorValue(args->at(0)); 2661 VisitForAccumulatorValue(args->at(0));
2662 2662
2663 Label materialize_true, materialize_false; 2663 Label materialize_true, materialize_false;
2664 Label* if_true = NULL; 2664 Label* if_true = NULL;
2665 Label* if_false = NULL; 2665 Label* if_false = NULL;
2666 Label* fall_through = NULL; 2666 Label* fall_through = NULL;
2667 context()->PrepareTest(&materialize_true, &materialize_false, 2667 context()->PrepareTest(&materialize_true, &materialize_false,
2668 &if_true, &if_false, &fall_through); 2668 &if_true, &if_false, &fall_through);
2669 2669
2670 __ JumpIfSmi(r0, if_false); 2670 __ JumpIfSmi(r0, if_false);
2671 __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE); 2671 __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE);
2672 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 2672 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2673 Split(ge, if_true, if_false, fall_through); 2673 Split(ge, if_true, if_false, fall_through);
2674 2674
2675 context()->Plug(if_true, if_false); 2675 context()->Plug(if_true, if_false);
2676 } 2676 }
2677 2677
2678 2678
2679 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) { 2679 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
2680 ZoneList<Expression*>* args = expr->arguments(); 2680 ZoneList<Expression*>* args = expr->arguments();
2681 ASSERT(args->length() == 1); 2681 ASSERT(args->length() == 1);
2682 2682
2683 VisitForAccumulatorValue(args->at(0)); 2683 VisitForAccumulatorValue(args->at(0));
2684 2684
2685 Label materialize_true, materialize_false; 2685 Label materialize_true, materialize_false;
2686 Label* if_true = NULL; 2686 Label* if_true = NULL;
2687 Label* if_false = NULL; 2687 Label* if_false = NULL;
2688 Label* fall_through = NULL; 2688 Label* fall_through = NULL;
2689 context()->PrepareTest(&materialize_true, &materialize_false, 2689 context()->PrepareTest(&materialize_true, &materialize_false,
2690 &if_true, &if_false, &fall_through); 2690 &if_true, &if_false, &fall_through);
2691 2691
2692 __ JumpIfSmi(r0, if_false); 2692 __ JumpIfSmi(r0, if_false);
2693 __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset)); 2693 __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
2694 __ ldrb(r1, FieldMemOperand(r1, Map::kBitFieldOffset)); 2694 __ ldrb(r1, FieldMemOperand(r1, Map::kBitFieldOffset));
2695 __ tst(r1, Operand(1 << Map::kIsUndetectable)); 2695 __ tst(r1, Operand(1 << Map::kIsUndetectable));
2696 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 2696 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2697 Split(ne, if_true, if_false, fall_through); 2697 Split(ne, if_true, if_false, fall_through);
2698 2698
2699 context()->Plug(if_true, if_false); 2699 context()->Plug(if_true, if_false);
2700 } 2700 }
2701 2701
2702 2702
2703 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf( 2703 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
2704 CallRuntime* expr) { 2704 CallRuntime* expr) {
2705 ZoneList<Expression*>* args = expr->arguments(); 2705 ZoneList<Expression*>* args = expr->arguments();
2706 ASSERT(args->length() == 1); 2706 ASSERT(args->length() == 1);
2707 2707
2708 VisitForAccumulatorValue(args->at(0)); 2708 VisitForAccumulatorValue(args->at(0));
2709 2709
2710 Label materialize_true, materialize_false; 2710 Label materialize_true, materialize_false;
2711 Label* if_true = NULL; 2711 Label* if_true = NULL;
2712 Label* if_false = NULL; 2712 Label* if_false = NULL;
2713 Label* fall_through = NULL; 2713 Label* fall_through = NULL;
2714 context()->PrepareTest(&materialize_true, &materialize_false, 2714 context()->PrepareTest(&materialize_true, &materialize_false,
2715 &if_true, &if_false, &fall_through); 2715 &if_true, &if_false, &fall_through);
2716 2716
2717 if (generate_debug_code_) __ AbortIfSmi(r0); 2717 if (generate_debug_code_) __ AbortIfSmi(r0);
2718 2718
2719 __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset)); 2719 __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
2720 __ ldrb(ip, FieldMemOperand(r1, Map::kBitField2Offset)); 2720 __ ldrb(ip, FieldMemOperand(r1, Map::kBitField2Offset));
2721 __ tst(ip, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf)); 2721 __ tst(ip, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
2722 __ b(ne, if_true); 2722 __ b(ne, if_true);
2723 2723
2724 // Check for fast case object. Generate false result for slow case object. 2724 // Check for fast case object. Generate false result for slow case object.
2725 __ ldr(r2, FieldMemOperand(r0, JSObject::kPropertiesOffset)); 2725 __ ldr(r2, FieldMemOperand(r0, JSObject::kPropertiesOffset));
2726 __ ldr(r2, FieldMemOperand(r2, HeapObject::kMapOffset)); 2726 __ ldr(r2, FieldMemOperand(r2, HeapObject::kMapOffset));
2727 __ LoadRoot(ip, Heap::kHashTableMapRootIndex); 2727 __ LoadRoot(ip, Heap::kHashTableMapRootIndex);
2728 __ cmp(r2, ip); 2728 __ cmp(r2, ip);
2729 __ b(eq, if_false); 2729 __ b(eq, if_false);
2730 2730
2731 // Look for valueOf symbol in the descriptor array, and indicate false if 2731 // Look for valueOf symbol in the descriptor array, and indicate false if
2732 // found. The type is not checked, so if it is a transition it is a false 2732 // found. Since we omit an enumeration index check, if it is added via a
2733 // negative. 2733 // transition that shares its descriptor array, this is a false positive.
2734 __ LoadInstanceDescriptors(r1, r4, r3); 2734 Label entry, loop, done;
2735 __ ldr(r3, FieldMemOperand(r4, FixedArray::kLengthOffset)); 2735
2736 // r4: descriptor array 2736 // Skip loop if no descriptors are valid.
2737 // r3: length of descriptor array 2737 __ NumberOfOwnDescriptors(r3, r1);
2738 // Calculate the end of the descriptor array. 2738 __ cmp(r3, Operand(0));
2739 __ b(eq, &done);
2740
2741 __ LoadInstanceDescriptors(r1, r4, r2);
2742 // r4: descriptor array.
2743 // r3: valid entries in the descriptor array.
2739 STATIC_ASSERT(kSmiTag == 0); 2744 STATIC_ASSERT(kSmiTag == 0);
2740 STATIC_ASSERT(kSmiTagSize == 1); 2745 STATIC_ASSERT(kSmiTagSize == 1);
2741 STATIC_ASSERT(kPointerSize == 4); 2746 STATIC_ASSERT(kPointerSize == 4);
2742 __ add(r2, r4, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); 2747 __ mov(ip, Operand(DescriptorArray::kDescriptorSize));
2748 __ mul(r3, r3, ip);
2749 // Calculate location of the first key name.
2750 __ add(r4, r4, Operand(DescriptorArray::kFirstOffset - kHeapObjectTag));
2751 // Calculate the end of the descriptor array.
2752 __ mov(r2, r4);
2743 __ add(r2, r2, Operand(r3, LSL, kPointerSizeLog2 - kSmiTagSize)); 2753 __ add(r2, r2, Operand(r3, LSL, kPointerSizeLog2 - kSmiTagSize));
2744 2754
2745 // Calculate location of the first key name.
2746 __ add(r4,
2747 r4,
2748 Operand(DescriptorArray::kFirstOffset - kHeapObjectTag));
2749 // Loop through all the keys in the descriptor array. If one of these is the 2755 // Loop through all the keys in the descriptor array. If one of these is the
2750 // symbol valueOf the result is false. 2756 // symbol valueOf the result is false.
2751 Label entry, loop;
2752 // The use of ip to store the valueOf symbol asumes that it is not otherwise 2757 // The use of ip to store the valueOf symbol asumes that it is not otherwise
2753 // used in the loop below. 2758 // used in the loop below.
2754 __ mov(ip, Operand(FACTORY->value_of_symbol())); 2759 __ mov(ip, Operand(FACTORY->value_of_symbol()));
2755 __ jmp(&entry); 2760 __ jmp(&entry);
2756 __ bind(&loop); 2761 __ bind(&loop);
2757 __ ldr(r3, MemOperand(r4, 0)); 2762 __ ldr(r3, MemOperand(r4, 0));
2758 __ cmp(r3, ip); 2763 __ cmp(r3, ip);
2759 __ b(eq, if_false); 2764 __ b(eq, if_false);
2760 __ add(r4, r4, Operand(DescriptorArray::kDescriptorSize * kPointerSize)); 2765 __ add(r4, r4, Operand(DescriptorArray::kDescriptorSize * kPointerSize));
2761 __ bind(&entry); 2766 __ bind(&entry);
2762 __ cmp(r4, Operand(r2)); 2767 __ cmp(r4, Operand(r2));
2763 __ b(ne, &loop); 2768 __ b(ne, &loop);
2764 2769
2765 // If a valueOf property is not found on the object check that it's 2770 __ bind(&done);
2771 // If a valueOf property is not found on the object check that its
2766 // prototype is the un-modified String prototype. If not result is false. 2772 // prototype is the un-modified String prototype. If not result is false.
2767 __ ldr(r2, FieldMemOperand(r1, Map::kPrototypeOffset)); 2773 __ ldr(r2, FieldMemOperand(r1, Map::kPrototypeOffset));
2768 __ JumpIfSmi(r2, if_false); 2774 __ JumpIfSmi(r2, if_false);
2769 __ ldr(r2, FieldMemOperand(r2, HeapObject::kMapOffset)); 2775 __ ldr(r2, FieldMemOperand(r2, HeapObject::kMapOffset));
2770 __ ldr(r3, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX)); 2776 __ ldr(r3, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
2771 __ ldr(r3, FieldMemOperand(r3, GlobalObject::kNativeContextOffset)); 2777 __ ldr(r3, FieldMemOperand(r3, GlobalObject::kNativeContextOffset));
2772 __ ldr(r3, ContextOperand(r3, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX)); 2778 __ ldr(r3, ContextOperand(r3, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
2773 __ cmp(r2, r3); 2779 __ cmp(r2, r3);
2774 __ b(ne, if_false); 2780 __ b(ne, if_false);
2775 2781
2776 // Set the bit in the map to indicate that it has been checked safe for 2782 // Set the bit in the map to indicate that it has been checked safe for
2777 // default valueOf and set true result. 2783 // default valueOf and set true result.
2778 __ ldrb(r2, FieldMemOperand(r1, Map::kBitField2Offset)); 2784 __ ldrb(r2, FieldMemOperand(r1, Map::kBitField2Offset));
2779 __ orr(r2, r2, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf)); 2785 __ orr(r2, r2, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
2780 __ strb(r2, FieldMemOperand(r1, Map::kBitField2Offset)); 2786 __ strb(r2, FieldMemOperand(r1, Map::kBitField2Offset));
2781 __ jmp(if_true); 2787 __ jmp(if_true);
2782 2788
2783 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 2789 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2784 context()->Plug(if_true, if_false); 2790 context()->Plug(if_true, if_false);
2785 } 2791 }
2786 2792
2787 2793
2788 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) { 2794 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
2789 ZoneList<Expression*>* args = expr->arguments(); 2795 ZoneList<Expression*>* args = expr->arguments();
2790 ASSERT(args->length() == 1); 2796 ASSERT(args->length() == 1);
2791 2797
2792 VisitForAccumulatorValue(args->at(0)); 2798 VisitForAccumulatorValue(args->at(0));
2793 2799
2794 Label materialize_true, materialize_false; 2800 Label materialize_true, materialize_false;
2795 Label* if_true = NULL; 2801 Label* if_true = NULL;
2796 Label* if_false = NULL; 2802 Label* if_false = NULL;
2797 Label* fall_through = NULL; 2803 Label* fall_through = NULL;
2798 context()->PrepareTest(&materialize_true, &materialize_false, 2804 context()->PrepareTest(&materialize_true, &materialize_false,
2799 &if_true, &if_false, &fall_through); 2805 &if_true, &if_false, &fall_through);
2800 2806
2801 __ JumpIfSmi(r0, if_false); 2807 __ JumpIfSmi(r0, if_false);
2802 __ CompareObjectType(r0, r1, r2, JS_FUNCTION_TYPE); 2808 __ CompareObjectType(r0, r1, r2, JS_FUNCTION_TYPE);
2803 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 2809 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2804 Split(eq, if_true, if_false, fall_through); 2810 Split(eq, if_true, if_false, fall_through);
2805 2811
2806 context()->Plug(if_true, if_false); 2812 context()->Plug(if_true, if_false);
2807 } 2813 }
2808 2814
2809 2815
2810 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) { 2816 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
2811 ZoneList<Expression*>* args = expr->arguments(); 2817 ZoneList<Expression*>* args = expr->arguments();
2812 ASSERT(args->length() == 1); 2818 ASSERT(args->length() == 1);
2813 2819
2814 VisitForAccumulatorValue(args->at(0)); 2820 VisitForAccumulatorValue(args->at(0));
2815 2821
2816 Label materialize_true, materialize_false; 2822 Label materialize_true, materialize_false;
2817 Label* if_true = NULL; 2823 Label* if_true = NULL;
2818 Label* if_false = NULL; 2824 Label* if_false = NULL;
2819 Label* fall_through = NULL; 2825 Label* fall_through = NULL;
2820 context()->PrepareTest(&materialize_true, &materialize_false, 2826 context()->PrepareTest(&materialize_true, &materialize_false,
2821 &if_true, &if_false, &fall_through); 2827 &if_true, &if_false, &fall_through);
2822 2828
2823 __ JumpIfSmi(r0, if_false); 2829 __ JumpIfSmi(r0, if_false);
2824 __ CompareObjectType(r0, r1, r1, JS_ARRAY_TYPE); 2830 __ CompareObjectType(r0, r1, r1, JS_ARRAY_TYPE);
2825 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 2831 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2826 Split(eq, if_true, if_false, fall_through); 2832 Split(eq, if_true, if_false, fall_through);
2827 2833
2828 context()->Plug(if_true, if_false); 2834 context()->Plug(if_true, if_false);
2829 } 2835 }
2830 2836
2831 2837
2832 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) { 2838 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
2833 ZoneList<Expression*>* args = expr->arguments(); 2839 ZoneList<Expression*>* args = expr->arguments();
2834 ASSERT(args->length() == 1); 2840 ASSERT(args->length() == 1);
2835 2841
2836 VisitForAccumulatorValue(args->at(0)); 2842 VisitForAccumulatorValue(args->at(0));
2837 2843
2838 Label materialize_true, materialize_false; 2844 Label materialize_true, materialize_false;
2839 Label* if_true = NULL; 2845 Label* if_true = NULL;
2840 Label* if_false = NULL; 2846 Label* if_false = NULL;
2841 Label* fall_through = NULL; 2847 Label* fall_through = NULL;
2842 context()->PrepareTest(&materialize_true, &materialize_false, 2848 context()->PrepareTest(&materialize_true, &materialize_false,
2843 &if_true, &if_false, &fall_through); 2849 &if_true, &if_false, &fall_through);
2844 2850
2845 __ JumpIfSmi(r0, if_false); 2851 __ JumpIfSmi(r0, if_false);
2846 __ CompareObjectType(r0, r1, r1, JS_REGEXP_TYPE); 2852 __ CompareObjectType(r0, r1, r1, JS_REGEXP_TYPE);
2847 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 2853 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2848 Split(eq, if_true, if_false, fall_through); 2854 Split(eq, if_true, if_false, fall_through);
2849 2855
2850 context()->Plug(if_true, if_false); 2856 context()->Plug(if_true, if_false);
2851 } 2857 }
2852 2858
2853 2859
2854 2860
2855 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) { 2861 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
2856 ASSERT(expr->arguments()->length() == 0); 2862 ASSERT(expr->arguments()->length() == 0);
2857 2863
2858 Label materialize_true, materialize_false; 2864 Label materialize_true, materialize_false;
2859 Label* if_true = NULL; 2865 Label* if_true = NULL;
2860 Label* if_false = NULL; 2866 Label* if_false = NULL;
2861 Label* fall_through = NULL; 2867 Label* fall_through = NULL;
2862 context()->PrepareTest(&materialize_true, &materialize_false, 2868 context()->PrepareTest(&materialize_true, &materialize_false,
2863 &if_true, &if_false, &fall_through); 2869 &if_true, &if_false, &fall_through);
2864 2870
2865 // Get the frame pointer for the calling frame. 2871 // Get the frame pointer for the calling frame.
2866 __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); 2872 __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2867 2873
2868 // Skip the arguments adaptor frame if it exists. 2874 // Skip the arguments adaptor frame if it exists.
2869 Label check_frame_marker; 2875 Label check_frame_marker;
2870 __ ldr(r1, MemOperand(r2, StandardFrameConstants::kContextOffset)); 2876 __ ldr(r1, MemOperand(r2, StandardFrameConstants::kContextOffset));
2871 __ cmp(r1, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); 2877 __ cmp(r1, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
2872 __ b(ne, &check_frame_marker); 2878 __ b(ne, &check_frame_marker);
2873 __ ldr(r2, MemOperand(r2, StandardFrameConstants::kCallerFPOffset)); 2879 __ ldr(r2, MemOperand(r2, StandardFrameConstants::kCallerFPOffset));
2874 2880
2875 // Check the marker in the calling frame. 2881 // Check the marker in the calling frame.
2876 __ bind(&check_frame_marker); 2882 __ bind(&check_frame_marker);
2877 __ ldr(r1, MemOperand(r2, StandardFrameConstants::kMarkerOffset)); 2883 __ ldr(r1, MemOperand(r2, StandardFrameConstants::kMarkerOffset));
2878 __ cmp(r1, Operand(Smi::FromInt(StackFrame::CONSTRUCT))); 2884 __ cmp(r1, Operand(Smi::FromInt(StackFrame::CONSTRUCT)));
2879 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 2885 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2880 Split(eq, if_true, if_false, fall_through); 2886 Split(eq, if_true, if_false, fall_through);
2881 2887
2882 context()->Plug(if_true, if_false); 2888 context()->Plug(if_true, if_false);
2883 } 2889 }
2884 2890
2885 2891
2886 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) { 2892 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
2887 ZoneList<Expression*>* args = expr->arguments(); 2893 ZoneList<Expression*>* args = expr->arguments();
2888 ASSERT(args->length() == 2); 2894 ASSERT(args->length() == 2);
2889 2895
2890 // Load the two objects into registers and perform the comparison. 2896 // Load the two objects into registers and perform the comparison.
2891 VisitForStackValue(args->at(0)); 2897 VisitForStackValue(args->at(0));
2892 VisitForAccumulatorValue(args->at(1)); 2898 VisitForAccumulatorValue(args->at(1));
2893 2899
2894 Label materialize_true, materialize_false; 2900 Label materialize_true, materialize_false;
2895 Label* if_true = NULL; 2901 Label* if_true = NULL;
2896 Label* if_false = NULL; 2902 Label* if_false = NULL;
2897 Label* fall_through = NULL; 2903 Label* fall_through = NULL;
2898 context()->PrepareTest(&materialize_true, &materialize_false, 2904 context()->PrepareTest(&materialize_true, &materialize_false,
2899 &if_true, &if_false, &fall_through); 2905 &if_true, &if_false, &fall_through);
2900 2906
2901 __ pop(r1); 2907 __ pop(r1);
2902 __ cmp(r0, r1); 2908 __ cmp(r0, r1);
2903 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 2909 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2904 Split(eq, if_true, if_false, fall_through); 2910 Split(eq, if_true, if_false, fall_through);
2905 2911
2906 context()->Plug(if_true, if_false); 2912 context()->Plug(if_true, if_false);
2907 } 2913 }
2908 2914
2909 2915
2910 void FullCodeGenerator::EmitArguments(CallRuntime* expr) { 2916 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
2911 ZoneList<Expression*>* args = expr->arguments(); 2917 ZoneList<Expression*>* args = expr->arguments();
2912 ASSERT(args->length() == 1); 2918 ASSERT(args->length() == 1);
2913 2919
2914 // ArgumentsAccessStub expects the key in edx and the formal 2920 // ArgumentsAccessStub expects the key in edx and the formal
2915 // parameter count in r0. 2921 // parameter count in r0.
2916 VisitForAccumulatorValue(args->at(0)); 2922 VisitForAccumulatorValue(args->at(0));
2917 __ mov(r1, r0); 2923 __ mov(r1, r0);
2918 __ mov(r0, Operand(Smi::FromInt(info_->scope()->num_parameters()))); 2924 __ mov(r0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
2919 ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT); 2925 ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT);
2920 __ CallStub(&stub); 2926 __ CallStub(&stub);
2921 context()->Plug(r0); 2927 context()->Plug(r0);
2922 } 2928 }
2923 2929
2924 2930
2925 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) { 2931 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
2926 ASSERT(expr->arguments()->length() == 0); 2932 ASSERT(expr->arguments()->length() == 0);
2927 Label exit; 2933 Label exit;
2928 // Get the number of formal parameters. 2934 // Get the number of formal parameters.
2929 __ mov(r0, Operand(Smi::FromInt(info_->scope()->num_parameters()))); 2935 __ mov(r0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
2930 2936
2931 // Check if the calling frame is an arguments adaptor frame. 2937 // Check if the calling frame is an arguments adaptor frame.
2932 __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); 2938 __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2933 __ ldr(r3, MemOperand(r2, StandardFrameConstants::kContextOffset)); 2939 __ ldr(r3, MemOperand(r2, StandardFrameConstants::kContextOffset));
2934 __ cmp(r3, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); 2940 __ cmp(r3, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
2935 __ b(ne, &exit); 2941 __ b(ne, &exit);
2936 2942
2937 // Arguments adaptor case: Read the arguments length from the 2943 // Arguments adaptor case: Read the arguments length from the
2938 // adaptor frame. 2944 // adaptor frame.
2939 __ ldr(r0, MemOperand(r2, ArgumentsAdaptorFrameConstants::kLengthOffset)); 2945 __ ldr(r0, MemOperand(r2, ArgumentsAdaptorFrameConstants::kLengthOffset));
2940 2946
2941 __ bind(&exit); 2947 __ bind(&exit);
2942 context()->Plug(r0); 2948 context()->Plug(r0);
2943 } 2949 }
2944 2950
2945 2951
2946 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) { 2952 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
2947 ZoneList<Expression*>* args = expr->arguments(); 2953 ZoneList<Expression*>* args = expr->arguments();
2948 ASSERT(args->length() == 1); 2954 ASSERT(args->length() == 1);
2949 Label done, null, function, non_function_constructor; 2955 Label done, null, function, non_function_constructor;
2950 2956
2951 VisitForAccumulatorValue(args->at(0)); 2957 VisitForAccumulatorValue(args->at(0));
2952 2958
2953 // If the object is a smi, we return null. 2959 // If the object is a smi, we return null.
2954 __ JumpIfSmi(r0, &null); 2960 __ JumpIfSmi(r0, &null);
2955 2961
2956 // Check that the object is a JS object but take special care of JS 2962 // Check that the object is a JS object but take special care of JS
2957 // functions to make sure they have 'Function' as their class. 2963 // functions to make sure they have 'Function' as their class.
2958 // Assume that there are only two callable types, and one of them is at 2964 // Assume that there are only two callable types, and one of them is at
2959 // either end of the type range for JS object types. Saves extra comparisons. 2965 // either end of the type range for JS object types. Saves extra comparisons.
2960 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2); 2966 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
2961 __ CompareObjectType(r0, r0, r1, FIRST_SPEC_OBJECT_TYPE); 2967 __ CompareObjectType(r0, r0, r1, FIRST_SPEC_OBJECT_TYPE);
2962 // Map is now in r0. 2968 // Map is now in r0.
2963 __ b(lt, &null); 2969 __ b(lt, &null);
2964 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE == 2970 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
2965 FIRST_SPEC_OBJECT_TYPE + 1); 2971 FIRST_SPEC_OBJECT_TYPE + 1);
2966 __ b(eq, &function); 2972 __ b(eq, &function);
2967 2973
2968 __ cmp(r1, Operand(LAST_SPEC_OBJECT_TYPE)); 2974 __ cmp(r1, Operand(LAST_SPEC_OBJECT_TYPE));
2969 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == 2975 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE ==
2970 LAST_SPEC_OBJECT_TYPE - 1); 2976 LAST_SPEC_OBJECT_TYPE - 1);
2971 __ b(eq, &function); 2977 __ b(eq, &function);
2972 // Assume that there is no larger type. 2978 // Assume that there is no larger type.
2973 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1); 2979 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1);
2974 2980
2975 // Check if the constructor in the map is a JS function. 2981 // Check if the constructor in the map is a JS function.
2976 __ ldr(r0, FieldMemOperand(r0, Map::kConstructorOffset)); 2982 __ ldr(r0, FieldMemOperand(r0, Map::kConstructorOffset));
2977 __ CompareObjectType(r0, r1, r1, JS_FUNCTION_TYPE); 2983 __ CompareObjectType(r0, r1, r1, JS_FUNCTION_TYPE);
2978 __ b(ne, &non_function_constructor); 2984 __ b(ne, &non_function_constructor);
2979 2985
2980 // r0 now contains the constructor function. Grab the 2986 // r0 now contains the constructor function. Grab the
2981 // instance class name from there. 2987 // instance class name from there.
2982 __ ldr(r0, FieldMemOperand(r0, JSFunction::kSharedFunctionInfoOffset)); 2988 __ ldr(r0, FieldMemOperand(r0, JSFunction::kSharedFunctionInfoOffset));
2983 __ ldr(r0, FieldMemOperand(r0, SharedFunctionInfo::kInstanceClassNameOffset)); 2989 __ ldr(r0, FieldMemOperand(r0, SharedFunctionInfo::kInstanceClassNameOffset));
2984 __ b(&done); 2990 __ b(&done);
2985 2991
2986 // Functions have class 'Function'. 2992 // Functions have class 'Function'.
2987 __ bind(&function); 2993 __ bind(&function);
2988 __ LoadRoot(r0, Heap::kfunction_class_symbolRootIndex); 2994 __ LoadRoot(r0, Heap::kfunction_class_symbolRootIndex);
2989 __ jmp(&done); 2995 __ jmp(&done);
2990 2996
2991 // Objects with a non-function constructor have class 'Object'. 2997 // Objects with a non-function constructor have class 'Object'.
2992 __ bind(&non_function_constructor); 2998 __ bind(&non_function_constructor);
2993 __ LoadRoot(r0, Heap::kObject_symbolRootIndex); 2999 __ LoadRoot(r0, Heap::kObject_symbolRootIndex);
2994 __ jmp(&done); 3000 __ jmp(&done);
2995 3001
2996 // Non-JS objects have class null. 3002 // Non-JS objects have class null.
2997 __ bind(&null); 3003 __ bind(&null);
2998 __ LoadRoot(r0, Heap::kNullValueRootIndex); 3004 __ LoadRoot(r0, Heap::kNullValueRootIndex);
2999 3005
3000 // All done. 3006 // All done.
3001 __ bind(&done); 3007 __ bind(&done);
3002 3008
3003 context()->Plug(r0); 3009 context()->Plug(r0);
3004 } 3010 }
3005 3011
3006 3012
3007 void FullCodeGenerator::EmitLog(CallRuntime* expr) { 3013 void FullCodeGenerator::EmitLog(CallRuntime* expr) {
3008 // Conditionally generate a log call. 3014 // Conditionally generate a log call.
3009 // Args: 3015 // Args:
3010 // 0 (literal string): The type of logging (corresponds to the flags). 3016 // 0 (literal string): The type of logging (corresponds to the flags).
3011 // This is used to determine whether or not to generate the log call. 3017 // This is used to determine whether or not to generate the log call.
3012 // 1 (string): Format string. Access the string at argument index 2 3018 // 1 (string): Format string. Access the string at argument index 2
3013 // with '%2s' (see Logger::LogRuntime for all the formats). 3019 // with '%2s' (see Logger::LogRuntime for all the formats).
3014 // 2 (array): Arguments to the format string. 3020 // 2 (array): Arguments to the format string.
3015 ZoneList<Expression*>* args = expr->arguments(); 3021 ZoneList<Expression*>* args = expr->arguments();
3016 ASSERT_EQ(args->length(), 3); 3022 ASSERT_EQ(args->length(), 3);
3017 if (CodeGenerator::ShouldGenerateLog(args->at(0))) { 3023 if (CodeGenerator::ShouldGenerateLog(args->at(0))) {
3018 VisitForStackValue(args->at(1)); 3024 VisitForStackValue(args->at(1));
3019 VisitForStackValue(args->at(2)); 3025 VisitForStackValue(args->at(2));
3020 __ CallRuntime(Runtime::kLog, 2); 3026 __ CallRuntime(Runtime::kLog, 2);
3021 } 3027 }
3022 3028
3023 // Finally, we're expected to leave a value on the top of the stack. 3029 // Finally, we're expected to leave a value on the top of the stack.
3024 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); 3030 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
3025 context()->Plug(r0); 3031 context()->Plug(r0);
3026 } 3032 }
3027 3033
3028 3034
3029 void FullCodeGenerator::EmitRandomHeapNumber(CallRuntime* expr) { 3035 void FullCodeGenerator::EmitRandomHeapNumber(CallRuntime* expr) {
3030 ASSERT(expr->arguments()->length() == 0); 3036 ASSERT(expr->arguments()->length() == 0);
3031 Label slow_allocate_heapnumber; 3037 Label slow_allocate_heapnumber;
3032 Label heapnumber_allocated; 3038 Label heapnumber_allocated;
3033 3039
3034 __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex); 3040 __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
3035 __ AllocateHeapNumber(r4, r1, r2, r6, &slow_allocate_heapnumber); 3041 __ AllocateHeapNumber(r4, r1, r2, r6, &slow_allocate_heapnumber);
3036 __ jmp(&heapnumber_allocated); 3042 __ jmp(&heapnumber_allocated);
3037 3043
3038 __ bind(&slow_allocate_heapnumber); 3044 __ bind(&slow_allocate_heapnumber);
3039 // Allocate a heap number. 3045 // Allocate a heap number.
3040 __ CallRuntime(Runtime::kNumberAlloc, 0); 3046 __ CallRuntime(Runtime::kNumberAlloc, 0);
3041 __ mov(r4, Operand(r0)); 3047 __ mov(r4, Operand(r0));
3042 3048
3043 __ bind(&heapnumber_allocated); 3049 __ bind(&heapnumber_allocated);
3044 3050
3045 // Convert 32 random bits in r0 to 0.(32 random bits) in a double 3051 // Convert 32 random bits in r0 to 0.(32 random bits) in a double
3046 // by computing: 3052 // by computing:
3047 // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)). 3053 // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)).
3048 if (CpuFeatures::IsSupported(VFP2)) { 3054 if (CpuFeatures::IsSupported(VFP2)) {
3049 __ PrepareCallCFunction(1, r0); 3055 __ PrepareCallCFunction(1, r0);
3050 __ ldr(r0, 3056 __ ldr(r0,
3051 ContextOperand(context_register(), Context::GLOBAL_OBJECT_INDEX)); 3057 ContextOperand(context_register(), Context::GLOBAL_OBJECT_INDEX));
3052 __ ldr(r0, FieldMemOperand(r0, GlobalObject::kNativeContextOffset)); 3058 __ ldr(r0, FieldMemOperand(r0, GlobalObject::kNativeContextOffset));
3053 __ CallCFunction(ExternalReference::random_uint32_function(isolate()), 1); 3059 __ CallCFunction(ExternalReference::random_uint32_function(isolate()), 1);
3054 3060
3055 CpuFeatures::Scope scope(VFP2); 3061 CpuFeatures::Scope scope(VFP2);
3056 // 0x41300000 is the top half of 1.0 x 2^20 as a double. 3062 // 0x41300000 is the top half of 1.0 x 2^20 as a double.
3057 // Create this constant using mov/orr to avoid PC relative load. 3063 // Create this constant using mov/orr to avoid PC relative load.
3058 __ mov(r1, Operand(0x41000000)); 3064 __ mov(r1, Operand(0x41000000));
3059 __ orr(r1, r1, Operand(0x300000)); 3065 __ orr(r1, r1, Operand(0x300000));
3060 // Move 0x41300000xxxxxxxx (x = random bits) to VFP. 3066 // Move 0x41300000xxxxxxxx (x = random bits) to VFP.
3061 __ vmov(d7, r0, r1); 3067 __ vmov(d7, r0, r1);
3062 // Move 0x4130000000000000 to VFP. 3068 // Move 0x4130000000000000 to VFP.
3063 __ mov(r0, Operand(0, RelocInfo::NONE)); 3069 __ mov(r0, Operand(0, RelocInfo::NONE));
3064 __ vmov(d8, r0, r1); 3070 __ vmov(d8, r0, r1);
3065 // Subtract and store the result in the heap number. 3071 // Subtract and store the result in the heap number.
3066 __ vsub(d7, d7, d8); 3072 __ vsub(d7, d7, d8);
3067 __ sub(r0, r4, Operand(kHeapObjectTag)); 3073 __ sub(r0, r4, Operand(kHeapObjectTag));
3068 __ vstr(d7, r0, HeapNumber::kValueOffset); 3074 __ vstr(d7, r0, HeapNumber::kValueOffset);
3069 __ mov(r0, r4); 3075 __ mov(r0, r4);
3070 } else { 3076 } else {
3071 __ PrepareCallCFunction(2, r0); 3077 __ PrepareCallCFunction(2, r0);
3072 __ ldr(r1, 3078 __ ldr(r1,
3073 ContextOperand(context_register(), Context::GLOBAL_OBJECT_INDEX)); 3079 ContextOperand(context_register(), Context::GLOBAL_OBJECT_INDEX));
3074 __ mov(r0, Operand(r4)); 3080 __ mov(r0, Operand(r4));
3075 __ ldr(r1, FieldMemOperand(r1, GlobalObject::kNativeContextOffset)); 3081 __ ldr(r1, FieldMemOperand(r1, GlobalObject::kNativeContextOffset));
3076 __ CallCFunction( 3082 __ CallCFunction(
3077 ExternalReference::fill_heap_number_with_random_function(isolate()), 2); 3083 ExternalReference::fill_heap_number_with_random_function(isolate()), 2);
3078 } 3084 }
3079 3085
3080 context()->Plug(r0); 3086 context()->Plug(r0);
3081 } 3087 }
3082 3088
3083 3089
3084 void FullCodeGenerator::EmitSubString(CallRuntime* expr) { 3090 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
3085 // Load the arguments on the stack and call the stub. 3091 // Load the arguments on the stack and call the stub.
3086 SubStringStub stub; 3092 SubStringStub stub;
3087 ZoneList<Expression*>* args = expr->arguments(); 3093 ZoneList<Expression*>* args = expr->arguments();
3088 ASSERT(args->length() == 3); 3094 ASSERT(args->length() == 3);
3089 VisitForStackValue(args->at(0)); 3095 VisitForStackValue(args->at(0));
3090 VisitForStackValue(args->at(1)); 3096 VisitForStackValue(args->at(1));
3091 VisitForStackValue(args->at(2)); 3097 VisitForStackValue(args->at(2));
3092 __ CallStub(&stub); 3098 __ CallStub(&stub);
3093 context()->Plug(r0); 3099 context()->Plug(r0);
3094 } 3100 }
3095 3101
3096 3102
3097 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) { 3103 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
3098 // Load the arguments on the stack and call the stub. 3104 // Load the arguments on the stack and call the stub.
3099 RegExpExecStub stub; 3105 RegExpExecStub stub;
3100 ZoneList<Expression*>* args = expr->arguments(); 3106 ZoneList<Expression*>* args = expr->arguments();
3101 ASSERT(args->length() == 4); 3107 ASSERT(args->length() == 4);
3102 VisitForStackValue(args->at(0)); 3108 VisitForStackValue(args->at(0));
3103 VisitForStackValue(args->at(1)); 3109 VisitForStackValue(args->at(1));
3104 VisitForStackValue(args->at(2)); 3110 VisitForStackValue(args->at(2));
3105 VisitForStackValue(args->at(3)); 3111 VisitForStackValue(args->at(3));
3106 __ CallStub(&stub); 3112 __ CallStub(&stub);
3107 context()->Plug(r0); 3113 context()->Plug(r0);
3108 } 3114 }
3109 3115
3110 3116
3111 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) { 3117 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3112 ZoneList<Expression*>* args = expr->arguments(); 3118 ZoneList<Expression*>* args = expr->arguments();
3113 ASSERT(args->length() == 1); 3119 ASSERT(args->length() == 1);
3114 VisitForAccumulatorValue(args->at(0)); // Load the object. 3120 VisitForAccumulatorValue(args->at(0)); // Load the object.
3115 3121
3116 Label done; 3122 Label done;
3117 // If the object is a smi return the object. 3123 // If the object is a smi return the object.
3118 __ JumpIfSmi(r0, &done); 3124 __ JumpIfSmi(r0, &done);
3119 // If the object is not a value type, return the object. 3125 // If the object is not a value type, return the object.
3120 __ CompareObjectType(r0, r1, r1, JS_VALUE_TYPE); 3126 __ CompareObjectType(r0, r1, r1, JS_VALUE_TYPE);
3121 __ b(ne, &done); 3127 __ b(ne, &done);
3122 __ ldr(r0, FieldMemOperand(r0, JSValue::kValueOffset)); 3128 __ ldr(r0, FieldMemOperand(r0, JSValue::kValueOffset));
3123 3129
3124 __ bind(&done); 3130 __ bind(&done);
3125 context()->Plug(r0); 3131 context()->Plug(r0);
3126 } 3132 }
3127 3133
3128 3134
3129 void FullCodeGenerator::EmitDateField(CallRuntime* expr) { 3135 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3130 ZoneList<Expression*>* args = expr->arguments(); 3136 ZoneList<Expression*>* args = expr->arguments();
3131 ASSERT(args->length() == 2); 3137 ASSERT(args->length() == 2);
3132 ASSERT_NE(NULL, args->at(1)->AsLiteral()); 3138 ASSERT_NE(NULL, args->at(1)->AsLiteral());
3133 Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->handle())); 3139 Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->handle()));
3134 3140
3135 VisitForAccumulatorValue(args->at(0)); // Load the object. 3141 VisitForAccumulatorValue(args->at(0)); // Load the object.
3136 3142
3137 Label runtime, done, not_date_object; 3143 Label runtime, done, not_date_object;
3138 Register object = r0; 3144 Register object = r0;
3139 Register result = r0; 3145 Register result = r0;
3140 Register scratch0 = r9; 3146 Register scratch0 = r9;
3141 Register scratch1 = r1; 3147 Register scratch1 = r1;
3142 3148
3143 __ JumpIfSmi(object, &not_date_object); 3149 __ JumpIfSmi(object, &not_date_object);
3144 __ CompareObjectType(object, scratch1, scratch1, JS_DATE_TYPE); 3150 __ CompareObjectType(object, scratch1, scratch1, JS_DATE_TYPE);
3145 __ b(ne, &not_date_object); 3151 __ b(ne, &not_date_object);
3146 3152
3147 if (index->value() == 0) { 3153 if (index->value() == 0) {
3148 __ ldr(result, FieldMemOperand(object, JSDate::kValueOffset)); 3154 __ ldr(result, FieldMemOperand(object, JSDate::kValueOffset));
3149 __ jmp(&done); 3155 __ jmp(&done);
3150 } else { 3156 } else {
3151 if (index->value() < JSDate::kFirstUncachedField) { 3157 if (index->value() < JSDate::kFirstUncachedField) {
3152 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate()); 3158 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3153 __ mov(scratch1, Operand(stamp)); 3159 __ mov(scratch1, Operand(stamp));
3154 __ ldr(scratch1, MemOperand(scratch1)); 3160 __ ldr(scratch1, MemOperand(scratch1));
3155 __ ldr(scratch0, FieldMemOperand(object, JSDate::kCacheStampOffset)); 3161 __ ldr(scratch0, FieldMemOperand(object, JSDate::kCacheStampOffset));
3156 __ cmp(scratch1, scratch0); 3162 __ cmp(scratch1, scratch0);
3157 __ b(ne, &runtime); 3163 __ b(ne, &runtime);
3158 __ ldr(result, FieldMemOperand(object, JSDate::kValueOffset + 3164 __ ldr(result, FieldMemOperand(object, JSDate::kValueOffset +
3159 kPointerSize * index->value())); 3165 kPointerSize * index->value()));
3160 __ jmp(&done); 3166 __ jmp(&done);
3161 } 3167 }
3162 __ bind(&runtime); 3168 __ bind(&runtime);
3163 __ PrepareCallCFunction(2, scratch1); 3169 __ PrepareCallCFunction(2, scratch1);
3164 __ mov(r1, Operand(index)); 3170 __ mov(r1, Operand(index));
3165 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2); 3171 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3166 __ jmp(&done); 3172 __ jmp(&done);
3167 } 3173 }
3168 3174
3169 __ bind(&not_date_object); 3175 __ bind(&not_date_object);
3170 __ CallRuntime(Runtime::kThrowNotDateError, 0); 3176 __ CallRuntime(Runtime::kThrowNotDateError, 0);
3171 __ bind(&done); 3177 __ bind(&done);
3172 context()->Plug(r0); 3178 context()->Plug(r0);
3173 } 3179 }
3174 3180
3175 3181
3176 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) { 3182 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
3177 // Load the arguments on the stack and call the runtime function. 3183 // Load the arguments on the stack and call the runtime function.
3178 ZoneList<Expression*>* args = expr->arguments(); 3184 ZoneList<Expression*>* args = expr->arguments();
3179 ASSERT(args->length() == 2); 3185 ASSERT(args->length() == 2);
3180 VisitForStackValue(args->at(0)); 3186 VisitForStackValue(args->at(0));
3181 VisitForStackValue(args->at(1)); 3187 VisitForStackValue(args->at(1));
3182 if (CpuFeatures::IsSupported(VFP2)) { 3188 if (CpuFeatures::IsSupported(VFP2)) {
3183 MathPowStub stub(MathPowStub::ON_STACK); 3189 MathPowStub stub(MathPowStub::ON_STACK);
3184 __ CallStub(&stub); 3190 __ CallStub(&stub);
3185 } else { 3191 } else {
3186 __ CallRuntime(Runtime::kMath_pow, 2); 3192 __ CallRuntime(Runtime::kMath_pow, 2);
3187 } 3193 }
3188 context()->Plug(r0); 3194 context()->Plug(r0);
3189 } 3195 }
3190 3196
3191 3197
3192 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) { 3198 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3193 ZoneList<Expression*>* args = expr->arguments(); 3199 ZoneList<Expression*>* args = expr->arguments();
3194 ASSERT(args->length() == 2); 3200 ASSERT(args->length() == 2);
3195 VisitForStackValue(args->at(0)); // Load the object. 3201 VisitForStackValue(args->at(0)); // Load the object.
3196 VisitForAccumulatorValue(args->at(1)); // Load the value. 3202 VisitForAccumulatorValue(args->at(1)); // Load the value.
3197 __ pop(r1); // r0 = value. r1 = object. 3203 __ pop(r1); // r0 = value. r1 = object.
3198 3204
3199 Label done; 3205 Label done;
3200 // If the object is a smi, return the value. 3206 // If the object is a smi, return the value.
3201 __ JumpIfSmi(r1, &done); 3207 __ JumpIfSmi(r1, &done);
3202 3208
3203 // If the object is not a value type, return the value. 3209 // If the object is not a value type, return the value.
3204 __ CompareObjectType(r1, r2, r2, JS_VALUE_TYPE); 3210 __ CompareObjectType(r1, r2, r2, JS_VALUE_TYPE);
3205 __ b(ne, &done); 3211 __ b(ne, &done);
3206 3212
3207 // Store the value. 3213 // Store the value.
3208 __ str(r0, FieldMemOperand(r1, JSValue::kValueOffset)); 3214 __ str(r0, FieldMemOperand(r1, JSValue::kValueOffset));
3209 // Update the write barrier. Save the value as it will be 3215 // Update the write barrier. Save the value as it will be
3210 // overwritten by the write barrier code and is needed afterward. 3216 // overwritten by the write barrier code and is needed afterward.
3211 __ mov(r2, r0); 3217 __ mov(r2, r0);
3212 __ RecordWriteField( 3218 __ RecordWriteField(
3213 r1, JSValue::kValueOffset, r2, r3, kLRHasBeenSaved, kDontSaveFPRegs); 3219 r1, JSValue::kValueOffset, r2, r3, kLRHasBeenSaved, kDontSaveFPRegs);
3214 3220
3215 __ bind(&done); 3221 __ bind(&done);
3216 context()->Plug(r0); 3222 context()->Plug(r0);
3217 } 3223 }
3218 3224
3219 3225
3220 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) { 3226 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3221 ZoneList<Expression*>* args = expr->arguments(); 3227 ZoneList<Expression*>* args = expr->arguments();
3222 ASSERT_EQ(args->length(), 1); 3228 ASSERT_EQ(args->length(), 1);
3223 // Load the argument on the stack and call the stub. 3229 // Load the argument on the stack and call the stub.
3224 VisitForStackValue(args->at(0)); 3230 VisitForStackValue(args->at(0));
3225 3231
3226 NumberToStringStub stub; 3232 NumberToStringStub stub;
3227 __ CallStub(&stub); 3233 __ CallStub(&stub);
3228 context()->Plug(r0); 3234 context()->Plug(r0);
3229 } 3235 }
3230 3236
3231 3237
3232 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) { 3238 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3233 ZoneList<Expression*>* args = expr->arguments(); 3239 ZoneList<Expression*>* args = expr->arguments();
3234 ASSERT(args->length() == 1); 3240 ASSERT(args->length() == 1);
3235 VisitForAccumulatorValue(args->at(0)); 3241 VisitForAccumulatorValue(args->at(0));
3236 3242
3237 Label done; 3243 Label done;
3238 StringCharFromCodeGenerator generator(r0, r1); 3244 StringCharFromCodeGenerator generator(r0, r1);
3239 generator.GenerateFast(masm_); 3245 generator.GenerateFast(masm_);
3240 __ jmp(&done); 3246 __ jmp(&done);
3241 3247
3242 NopRuntimeCallHelper call_helper; 3248 NopRuntimeCallHelper call_helper;
3243 generator.GenerateSlow(masm_, call_helper); 3249 generator.GenerateSlow(masm_, call_helper);
3244 3250
3245 __ bind(&done); 3251 __ bind(&done);
3246 context()->Plug(r1); 3252 context()->Plug(r1);
3247 } 3253 }
3248 3254
3249 3255
3250 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) { 3256 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3251 ZoneList<Expression*>* args = expr->arguments(); 3257 ZoneList<Expression*>* args = expr->arguments();
3252 ASSERT(args->length() == 2); 3258 ASSERT(args->length() == 2);
3253 VisitForStackValue(args->at(0)); 3259 VisitForStackValue(args->at(0));
3254 VisitForAccumulatorValue(args->at(1)); 3260 VisitForAccumulatorValue(args->at(1));
3255 3261
3256 Register object = r1; 3262 Register object = r1;
3257 Register index = r0; 3263 Register index = r0;
3258 Register result = r3; 3264 Register result = r3;
3259 3265
3260 __ pop(object); 3266 __ pop(object);
3261 3267
3262 Label need_conversion; 3268 Label need_conversion;
3263 Label index_out_of_range; 3269 Label index_out_of_range;
3264 Label done; 3270 Label done;
3265 StringCharCodeAtGenerator generator(object, 3271 StringCharCodeAtGenerator generator(object,
3266 index, 3272 index,
3267 result, 3273 result,
3268 &need_conversion, 3274 &need_conversion,
3269 &need_conversion, 3275 &need_conversion,
3270 &index_out_of_range, 3276 &index_out_of_range,
3271 STRING_INDEX_IS_NUMBER); 3277 STRING_INDEX_IS_NUMBER);
3272 generator.GenerateFast(masm_); 3278 generator.GenerateFast(masm_);
3273 __ jmp(&done); 3279 __ jmp(&done);
3274 3280
3275 __ bind(&index_out_of_range); 3281 __ bind(&index_out_of_range);
3276 // When the index is out of range, the spec requires us to return 3282 // When the index is out of range, the spec requires us to return
3277 // NaN. 3283 // NaN.
3278 __ LoadRoot(result, Heap::kNanValueRootIndex); 3284 __ LoadRoot(result, Heap::kNanValueRootIndex);
3279 __ jmp(&done); 3285 __ jmp(&done);
3280 3286
3281 __ bind(&need_conversion); 3287 __ bind(&need_conversion);
3282 // Load the undefined value into the result register, which will 3288 // Load the undefined value into the result register, which will
3283 // trigger conversion. 3289 // trigger conversion.
3284 __ LoadRoot(result, Heap::kUndefinedValueRootIndex); 3290 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3285 __ jmp(&done); 3291 __ jmp(&done);
3286 3292
3287 NopRuntimeCallHelper call_helper; 3293 NopRuntimeCallHelper call_helper;
3288 generator.GenerateSlow(masm_, call_helper); 3294 generator.GenerateSlow(masm_, call_helper);
3289 3295
3290 __ bind(&done); 3296 __ bind(&done);
3291 context()->Plug(result); 3297 context()->Plug(result);
3292 } 3298 }
3293 3299
3294 3300
3295 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) { 3301 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3296 ZoneList<Expression*>* args = expr->arguments(); 3302 ZoneList<Expression*>* args = expr->arguments();
3297 ASSERT(args->length() == 2); 3303 ASSERT(args->length() == 2);
3298 VisitForStackValue(args->at(0)); 3304 VisitForStackValue(args->at(0));
3299 VisitForAccumulatorValue(args->at(1)); 3305 VisitForAccumulatorValue(args->at(1));
3300 3306
3301 Register object = r1; 3307 Register object = r1;
3302 Register index = r0; 3308 Register index = r0;
3303 Register scratch = r3; 3309 Register scratch = r3;
3304 Register result = r0; 3310 Register result = r0;
3305 3311
3306 __ pop(object); 3312 __ pop(object);
3307 3313
3308 Label need_conversion; 3314 Label need_conversion;
3309 Label index_out_of_range; 3315 Label index_out_of_range;
3310 Label done; 3316 Label done;
3311 StringCharAtGenerator generator(object, 3317 StringCharAtGenerator generator(object,
3312 index, 3318 index,
3313 scratch, 3319 scratch,
3314 result, 3320 result,
3315 &need_conversion, 3321 &need_conversion,
3316 &need_conversion, 3322 &need_conversion,
3317 &index_out_of_range, 3323 &index_out_of_range,
3318 STRING_INDEX_IS_NUMBER); 3324 STRING_INDEX_IS_NUMBER);
3319 generator.GenerateFast(masm_); 3325 generator.GenerateFast(masm_);
3320 __ jmp(&done); 3326 __ jmp(&done);
3321 3327
3322 __ bind(&index_out_of_range); 3328 __ bind(&index_out_of_range);
3323 // When the index is out of range, the spec requires us to return 3329 // When the index is out of range, the spec requires us to return
3324 // the empty string. 3330 // the empty string.
3325 __ LoadRoot(result, Heap::kEmptyStringRootIndex); 3331 __ LoadRoot(result, Heap::kEmptyStringRootIndex);
3326 __ jmp(&done); 3332 __ jmp(&done);
3327 3333
3328 __ bind(&need_conversion); 3334 __ bind(&need_conversion);
3329 // Move smi zero into the result register, which will trigger 3335 // Move smi zero into the result register, which will trigger
3330 // conversion. 3336 // conversion.
3331 __ mov(result, Operand(Smi::FromInt(0))); 3337 __ mov(result, Operand(Smi::FromInt(0)));
3332 __ jmp(&done); 3338 __ jmp(&done);
3333 3339
3334 NopRuntimeCallHelper call_helper; 3340 NopRuntimeCallHelper call_helper;
3335 generator.GenerateSlow(masm_, call_helper); 3341 generator.GenerateSlow(masm_, call_helper);
3336 3342
3337 __ bind(&done); 3343 __ bind(&done);
3338 context()->Plug(result); 3344 context()->Plug(result);
3339 } 3345 }
3340 3346
3341 3347
3342 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) { 3348 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
3343 ZoneList<Expression*>* args = expr->arguments(); 3349 ZoneList<Expression*>* args = expr->arguments();
3344 ASSERT_EQ(2, args->length()); 3350 ASSERT_EQ(2, args->length());
3345 VisitForStackValue(args->at(0)); 3351 VisitForStackValue(args->at(0));
3346 VisitForStackValue(args->at(1)); 3352 VisitForStackValue(args->at(1));
3347 3353
3348 StringAddStub stub(NO_STRING_ADD_FLAGS); 3354 StringAddStub stub(NO_STRING_ADD_FLAGS);
3349 __ CallStub(&stub); 3355 __ CallStub(&stub);
3350 context()->Plug(r0); 3356 context()->Plug(r0);
3351 } 3357 }
3352 3358
3353 3359
3354 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) { 3360 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
3355 ZoneList<Expression*>* args = expr->arguments(); 3361 ZoneList<Expression*>* args = expr->arguments();
3356 ASSERT_EQ(2, args->length()); 3362 ASSERT_EQ(2, args->length());
3357 VisitForStackValue(args->at(0)); 3363 VisitForStackValue(args->at(0));
3358 VisitForStackValue(args->at(1)); 3364 VisitForStackValue(args->at(1));
3359 3365
3360 StringCompareStub stub; 3366 StringCompareStub stub;
3361 __ CallStub(&stub); 3367 __ CallStub(&stub);
3362 context()->Plug(r0); 3368 context()->Plug(r0);
3363 } 3369 }
3364 3370
3365 3371
3366 void FullCodeGenerator::EmitMathSin(CallRuntime* expr) { 3372 void FullCodeGenerator::EmitMathSin(CallRuntime* expr) {
3367 // Load the argument on the stack and call the stub. 3373 // Load the argument on the stack and call the stub.
3368 TranscendentalCacheStub stub(TranscendentalCache::SIN, 3374 TranscendentalCacheStub stub(TranscendentalCache::SIN,
3369 TranscendentalCacheStub::TAGGED); 3375 TranscendentalCacheStub::TAGGED);
3370 ZoneList<Expression*>* args = expr->arguments(); 3376 ZoneList<Expression*>* args = expr->arguments();
3371 ASSERT(args->length() == 1); 3377 ASSERT(args->length() == 1);
3372 VisitForStackValue(args->at(0)); 3378 VisitForStackValue(args->at(0));
3373 __ CallStub(&stub); 3379 __ CallStub(&stub);
3374 context()->Plug(r0); 3380 context()->Plug(r0);
3375 } 3381 }
3376 3382
3377 3383
3378 void FullCodeGenerator::EmitMathCos(CallRuntime* expr) { 3384 void FullCodeGenerator::EmitMathCos(CallRuntime* expr) {
3379 // Load the argument on the stack and call the stub. 3385 // Load the argument on the stack and call the stub.
3380 TranscendentalCacheStub stub(TranscendentalCache::COS, 3386 TranscendentalCacheStub stub(TranscendentalCache::COS,
3381 TranscendentalCacheStub::TAGGED); 3387 TranscendentalCacheStub::TAGGED);
3382 ZoneList<Expression*>* args = expr->arguments(); 3388 ZoneList<Expression*>* args = expr->arguments();
3383 ASSERT(args->length() == 1); 3389 ASSERT(args->length() == 1);
3384 VisitForStackValue(args->at(0)); 3390 VisitForStackValue(args->at(0));
3385 __ CallStub(&stub); 3391 __ CallStub(&stub);
3386 context()->Plug(r0); 3392 context()->Plug(r0);
3387 } 3393 }
3388 3394
3389 3395
3390 void FullCodeGenerator::EmitMathTan(CallRuntime* expr) { 3396 void FullCodeGenerator::EmitMathTan(CallRuntime* expr) {
3391 // Load the argument on the stack and call the stub. 3397 // Load the argument on the stack and call the stub.
3392 TranscendentalCacheStub stub(TranscendentalCache::TAN, 3398 TranscendentalCacheStub stub(TranscendentalCache::TAN,
3393 TranscendentalCacheStub::TAGGED); 3399 TranscendentalCacheStub::TAGGED);
3394 ZoneList<Expression*>* args = expr->arguments(); 3400 ZoneList<Expression*>* args = expr->arguments();
3395 ASSERT(args->length() == 1); 3401 ASSERT(args->length() == 1);
3396 VisitForStackValue(args->at(0)); 3402 VisitForStackValue(args->at(0));
3397 __ CallStub(&stub); 3403 __ CallStub(&stub);
3398 context()->Plug(r0); 3404 context()->Plug(r0);
3399 } 3405 }
3400 3406
3401 3407
3402 void FullCodeGenerator::EmitMathLog(CallRuntime* expr) { 3408 void FullCodeGenerator::EmitMathLog(CallRuntime* expr) {
3403 // Load the argument on the stack and call the stub. 3409 // Load the argument on the stack and call the stub.
3404 TranscendentalCacheStub stub(TranscendentalCache::LOG, 3410 TranscendentalCacheStub stub(TranscendentalCache::LOG,
3405 TranscendentalCacheStub::TAGGED); 3411 TranscendentalCacheStub::TAGGED);
3406 ZoneList<Expression*>* args = expr->arguments(); 3412 ZoneList<Expression*>* args = expr->arguments();
3407 ASSERT(args->length() == 1); 3413 ASSERT(args->length() == 1);
3408 VisitForStackValue(args->at(0)); 3414 VisitForStackValue(args->at(0));
3409 __ CallStub(&stub); 3415 __ CallStub(&stub);
3410 context()->Plug(r0); 3416 context()->Plug(r0);
3411 } 3417 }
3412 3418
3413 3419
3414 void FullCodeGenerator::EmitMathSqrt(CallRuntime* expr) { 3420 void FullCodeGenerator::EmitMathSqrt(CallRuntime* expr) {
3415 // Load the argument on the stack and call the runtime function. 3421 // Load the argument on the stack and call the runtime function.
3416 ZoneList<Expression*>* args = expr->arguments(); 3422 ZoneList<Expression*>* args = expr->arguments();
3417 ASSERT(args->length() == 1); 3423 ASSERT(args->length() == 1);
3418 VisitForStackValue(args->at(0)); 3424 VisitForStackValue(args->at(0));
3419 __ CallRuntime(Runtime::kMath_sqrt, 1); 3425 __ CallRuntime(Runtime::kMath_sqrt, 1);
3420 context()->Plug(r0); 3426 context()->Plug(r0);
3421 } 3427 }
3422 3428
3423 3429
3424 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) { 3430 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
3425 ZoneList<Expression*>* args = expr->arguments(); 3431 ZoneList<Expression*>* args = expr->arguments();
3426 ASSERT(args->length() >= 2); 3432 ASSERT(args->length() >= 2);
3427 3433
3428 int arg_count = args->length() - 2; // 2 ~ receiver and function. 3434 int arg_count = args->length() - 2; // 2 ~ receiver and function.
3429 for (int i = 0; i < arg_count + 1; i++) { 3435 for (int i = 0; i < arg_count + 1; i++) {
3430 VisitForStackValue(args->at(i)); 3436 VisitForStackValue(args->at(i));
3431 } 3437 }
3432 VisitForAccumulatorValue(args->last()); // Function. 3438 VisitForAccumulatorValue(args->last()); // Function.
3433 3439
3434 Label runtime, done; 3440 Label runtime, done;
3435 // Check for non-function argument (including proxy). 3441 // Check for non-function argument (including proxy).
3436 __ JumpIfSmi(r0, &runtime); 3442 __ JumpIfSmi(r0, &runtime);
3437 __ CompareObjectType(r0, r1, r1, JS_FUNCTION_TYPE); 3443 __ CompareObjectType(r0, r1, r1, JS_FUNCTION_TYPE);
3438 __ b(ne, &runtime); 3444 __ b(ne, &runtime);
3439 3445
3440 // InvokeFunction requires the function in r1. Move it in there. 3446 // InvokeFunction requires the function in r1. Move it in there.
3441 __ mov(r1, result_register()); 3447 __ mov(r1, result_register());
3442 ParameterCount count(arg_count); 3448 ParameterCount count(arg_count);
3443 __ InvokeFunction(r1, count, CALL_FUNCTION, 3449 __ InvokeFunction(r1, count, CALL_FUNCTION,
3444 NullCallWrapper(), CALL_AS_METHOD); 3450 NullCallWrapper(), CALL_AS_METHOD);
3445 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 3451 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3446 __ jmp(&done); 3452 __ jmp(&done);
3447 3453
3448 __ bind(&runtime); 3454 __ bind(&runtime);
3449 __ push(r0); 3455 __ push(r0);
3450 __ CallRuntime(Runtime::kCall, args->length()); 3456 __ CallRuntime(Runtime::kCall, args->length());
3451 __ bind(&done); 3457 __ bind(&done);
3452 3458
3453 context()->Plug(r0); 3459 context()->Plug(r0);
3454 } 3460 }
3455 3461
3456 3462
3457 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) { 3463 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
3458 RegExpConstructResultStub stub; 3464 RegExpConstructResultStub stub;
3459 ZoneList<Expression*>* args = expr->arguments(); 3465 ZoneList<Expression*>* args = expr->arguments();
3460 ASSERT(args->length() == 3); 3466 ASSERT(args->length() == 3);
3461 VisitForStackValue(args->at(0)); 3467 VisitForStackValue(args->at(0));
3462 VisitForStackValue(args->at(1)); 3468 VisitForStackValue(args->at(1));
3463 VisitForStackValue(args->at(2)); 3469 VisitForStackValue(args->at(2));
3464 __ CallStub(&stub); 3470 __ CallStub(&stub);
3465 context()->Plug(r0); 3471 context()->Plug(r0);
3466 } 3472 }
3467 3473
3468 3474
3469 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) { 3475 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
3470 ZoneList<Expression*>* args = expr->arguments(); 3476 ZoneList<Expression*>* args = expr->arguments();
3471 ASSERT_EQ(2, args->length()); 3477 ASSERT_EQ(2, args->length());
3472 ASSERT_NE(NULL, args->at(0)->AsLiteral()); 3478 ASSERT_NE(NULL, args->at(0)->AsLiteral());
3473 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->handle()))->value(); 3479 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->handle()))->value();
3474 3480
3475 Handle<FixedArray> jsfunction_result_caches( 3481 Handle<FixedArray> jsfunction_result_caches(
3476 isolate()->native_context()->jsfunction_result_caches()); 3482 isolate()->native_context()->jsfunction_result_caches());
3477 if (jsfunction_result_caches->length() <= cache_id) { 3483 if (jsfunction_result_caches->length() <= cache_id) {
3478 __ Abort("Attempt to use undefined cache."); 3484 __ Abort("Attempt to use undefined cache.");
3479 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); 3485 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
3480 context()->Plug(r0); 3486 context()->Plug(r0);
3481 return; 3487 return;
3482 } 3488 }
3483 3489
3484 VisitForAccumulatorValue(args->at(1)); 3490 VisitForAccumulatorValue(args->at(1));
3485 3491
3486 Register key = r0; 3492 Register key = r0;
3487 Register cache = r1; 3493 Register cache = r1;
3488 __ ldr(cache, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX)); 3494 __ ldr(cache, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
3489 __ ldr(cache, FieldMemOperand(cache, GlobalObject::kNativeContextOffset)); 3495 __ ldr(cache, FieldMemOperand(cache, GlobalObject::kNativeContextOffset));
3490 __ ldr(cache, ContextOperand(cache, Context::JSFUNCTION_RESULT_CACHES_INDEX)); 3496 __ ldr(cache, ContextOperand(cache, Context::JSFUNCTION_RESULT_CACHES_INDEX));
3491 __ ldr(cache, 3497 __ ldr(cache,
3492 FieldMemOperand(cache, FixedArray::OffsetOfElementAt(cache_id))); 3498 FieldMemOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
3493 3499
3494 3500
3495 Label done, not_found; 3501 Label done, not_found;
3496 // tmp now holds finger offset as a smi. 3502 // tmp now holds finger offset as a smi.
3497 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1); 3503 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
3498 __ ldr(r2, FieldMemOperand(cache, JSFunctionResultCache::kFingerOffset)); 3504 __ ldr(r2, FieldMemOperand(cache, JSFunctionResultCache::kFingerOffset));
3499 // r2 now holds finger offset as a smi. 3505 // r2 now holds finger offset as a smi.
3500 __ add(r3, cache, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); 3506 __ add(r3, cache, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3501 // r3 now points to the start of fixed array elements. 3507 // r3 now points to the start of fixed array elements.
3502 __ ldr(r2, MemOperand(r3, r2, LSL, kPointerSizeLog2 - kSmiTagSize, PreIndex)); 3508 __ ldr(r2, MemOperand(r3, r2, LSL, kPointerSizeLog2 - kSmiTagSize, PreIndex));
3503 // Note side effect of PreIndex: r3 now points to the key of the pair. 3509 // Note side effect of PreIndex: r3 now points to the key of the pair.
3504 __ cmp(key, r2); 3510 __ cmp(key, r2);
3505 __ b(ne, &not_found); 3511 __ b(ne, &not_found);
3506 3512
3507 __ ldr(r0, MemOperand(r3, kPointerSize)); 3513 __ ldr(r0, MemOperand(r3, kPointerSize));
3508 __ b(&done); 3514 __ b(&done);
3509 3515
3510 __ bind(&not_found); 3516 __ bind(&not_found);
3511 // Call runtime to perform the lookup. 3517 // Call runtime to perform the lookup.
3512 __ Push(cache, key); 3518 __ Push(cache, key);
3513 __ CallRuntime(Runtime::kGetFromCache, 2); 3519 __ CallRuntime(Runtime::kGetFromCache, 2);
3514 3520
3515 __ bind(&done); 3521 __ bind(&done);
3516 context()->Plug(r0); 3522 context()->Plug(r0);
3517 } 3523 }
3518 3524
3519 3525
3520 void FullCodeGenerator::EmitIsRegExpEquivalent(CallRuntime* expr) { 3526 void FullCodeGenerator::EmitIsRegExpEquivalent(CallRuntime* expr) {
3521 ZoneList<Expression*>* args = expr->arguments(); 3527 ZoneList<Expression*>* args = expr->arguments();
3522 ASSERT_EQ(2, args->length()); 3528 ASSERT_EQ(2, args->length());
3523 3529
3524 Register right = r0; 3530 Register right = r0;
3525 Register left = r1; 3531 Register left = r1;
3526 Register tmp = r2; 3532 Register tmp = r2;
3527 Register tmp2 = r3; 3533 Register tmp2 = r3;
3528 3534
3529 VisitForStackValue(args->at(0)); 3535 VisitForStackValue(args->at(0));
3530 VisitForAccumulatorValue(args->at(1)); 3536 VisitForAccumulatorValue(args->at(1));
3531 __ pop(left); 3537 __ pop(left);
3532 3538
3533 Label done, fail, ok; 3539 Label done, fail, ok;
3534 __ cmp(left, Operand(right)); 3540 __ cmp(left, Operand(right));
3535 __ b(eq, &ok); 3541 __ b(eq, &ok);
3536 // Fail if either is a non-HeapObject. 3542 // Fail if either is a non-HeapObject.
3537 __ and_(tmp, left, Operand(right)); 3543 __ and_(tmp, left, Operand(right));
3538 __ JumpIfSmi(tmp, &fail); 3544 __ JumpIfSmi(tmp, &fail);
3539 __ ldr(tmp, FieldMemOperand(left, HeapObject::kMapOffset)); 3545 __ ldr(tmp, FieldMemOperand(left, HeapObject::kMapOffset));
3540 __ ldrb(tmp2, FieldMemOperand(tmp, Map::kInstanceTypeOffset)); 3546 __ ldrb(tmp2, FieldMemOperand(tmp, Map::kInstanceTypeOffset));
3541 __ cmp(tmp2, Operand(JS_REGEXP_TYPE)); 3547 __ cmp(tmp2, Operand(JS_REGEXP_TYPE));
3542 __ b(ne, &fail); 3548 __ b(ne, &fail);
3543 __ ldr(tmp2, FieldMemOperand(right, HeapObject::kMapOffset)); 3549 __ ldr(tmp2, FieldMemOperand(right, HeapObject::kMapOffset));
3544 __ cmp(tmp, Operand(tmp2)); 3550 __ cmp(tmp, Operand(tmp2));
3545 __ b(ne, &fail); 3551 __ b(ne, &fail);
3546 __ ldr(tmp, FieldMemOperand(left, JSRegExp::kDataOffset)); 3552 __ ldr(tmp, FieldMemOperand(left, JSRegExp::kDataOffset));
3547 __ ldr(tmp2, FieldMemOperand(right, JSRegExp::kDataOffset)); 3553 __ ldr(tmp2, FieldMemOperand(right, JSRegExp::kDataOffset));
3548 __ cmp(tmp, tmp2); 3554 __ cmp(tmp, tmp2);
3549 __ b(eq, &ok); 3555 __ b(eq, &ok);
3550 __ bind(&fail); 3556 __ bind(&fail);
3551 __ LoadRoot(r0, Heap::kFalseValueRootIndex); 3557 __ LoadRoot(r0, Heap::kFalseValueRootIndex);
3552 __ jmp(&done); 3558 __ jmp(&done);
3553 __ bind(&ok); 3559 __ bind(&ok);
3554 __ LoadRoot(r0, Heap::kTrueValueRootIndex); 3560 __ LoadRoot(r0, Heap::kTrueValueRootIndex);
3555 __ bind(&done); 3561 __ bind(&done);
3556 3562
3557 context()->Plug(r0); 3563 context()->Plug(r0);
3558 } 3564 }
3559 3565
3560 3566
3561 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) { 3567 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3562 ZoneList<Expression*>* args = expr->arguments(); 3568 ZoneList<Expression*>* args = expr->arguments();
3563 VisitForAccumulatorValue(args->at(0)); 3569 VisitForAccumulatorValue(args->at(0));
3564 3570
3565 Label materialize_true, materialize_false; 3571 Label materialize_true, materialize_false;
3566 Label* if_true = NULL; 3572 Label* if_true = NULL;
3567 Label* if_false = NULL; 3573 Label* if_false = NULL;
3568 Label* fall_through = NULL; 3574 Label* fall_through = NULL;
3569 context()->PrepareTest(&materialize_true, &materialize_false, 3575 context()->PrepareTest(&materialize_true, &materialize_false,
3570 &if_true, &if_false, &fall_through); 3576 &if_true, &if_false, &fall_through);
3571 3577
3572 __ ldr(r0, FieldMemOperand(r0, String::kHashFieldOffset)); 3578 __ ldr(r0, FieldMemOperand(r0, String::kHashFieldOffset));
3573 __ tst(r0, Operand(String::kContainsCachedArrayIndexMask)); 3579 __ tst(r0, Operand(String::kContainsCachedArrayIndexMask));
3574 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3580 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3575 Split(eq, if_true, if_false, fall_through); 3581 Split(eq, if_true, if_false, fall_through);
3576 3582
3577 context()->Plug(if_true, if_false); 3583 context()->Plug(if_true, if_false);
3578 } 3584 }
3579 3585
3580 3586
3581 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) { 3587 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3582 ZoneList<Expression*>* args = expr->arguments(); 3588 ZoneList<Expression*>* args = expr->arguments();
3583 ASSERT(args->length() == 1); 3589 ASSERT(args->length() == 1);
3584 VisitForAccumulatorValue(args->at(0)); 3590 VisitForAccumulatorValue(args->at(0));
3585 3591
3586 __ AbortIfNotString(r0); 3592 __ AbortIfNotString(r0);
3587 3593
3588 3594
3589 __ ldr(r0, FieldMemOperand(r0, String::kHashFieldOffset)); 3595 __ ldr(r0, FieldMemOperand(r0, String::kHashFieldOffset));
3590 __ IndexFromHash(r0, r0); 3596 __ IndexFromHash(r0, r0);
3591 3597
3592 context()->Plug(r0); 3598 context()->Plug(r0);
3593 } 3599 }
3594 3600
3595 3601
3596 void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) { 3602 void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
3597 Label bailout, done, one_char_separator, long_separator, 3603 Label bailout, done, one_char_separator, long_separator,
3598 non_trivial_array, not_size_one_array, loop, 3604 non_trivial_array, not_size_one_array, loop,
3599 empty_separator_loop, one_char_separator_loop, 3605 empty_separator_loop, one_char_separator_loop,
3600 one_char_separator_loop_entry, long_separator_loop; 3606 one_char_separator_loop_entry, long_separator_loop;
3601 ZoneList<Expression*>* args = expr->arguments(); 3607 ZoneList<Expression*>* args = expr->arguments();
3602 ASSERT(args->length() == 2); 3608 ASSERT(args->length() == 2);
3603 VisitForStackValue(args->at(1)); 3609 VisitForStackValue(args->at(1));
3604 VisitForAccumulatorValue(args->at(0)); 3610 VisitForAccumulatorValue(args->at(0));
3605 3611
3606 // All aliases of the same register have disjoint lifetimes. 3612 // All aliases of the same register have disjoint lifetimes.
3607 Register array = r0; 3613 Register array = r0;
3608 Register elements = no_reg; // Will be r0. 3614 Register elements = no_reg; // Will be r0.
3609 Register result = no_reg; // Will be r0. 3615 Register result = no_reg; // Will be r0.
3610 Register separator = r1; 3616 Register separator = r1;
3611 Register array_length = r2; 3617 Register array_length = r2;
3612 Register result_pos = no_reg; // Will be r2 3618 Register result_pos = no_reg; // Will be r2
3613 Register string_length = r3; 3619 Register string_length = r3;
3614 Register string = r4; 3620 Register string = r4;
3615 Register element = r5; 3621 Register element = r5;
3616 Register elements_end = r6; 3622 Register elements_end = r6;
3617 Register scratch1 = r7; 3623 Register scratch1 = r7;
3618 Register scratch2 = r9; 3624 Register scratch2 = r9;
3619 3625
3620 // Separator operand is on the stack. 3626 // Separator operand is on the stack.
3621 __ pop(separator); 3627 __ pop(separator);
3622 3628
3623 // Check that the array is a JSArray. 3629 // Check that the array is a JSArray.
3624 __ JumpIfSmi(array, &bailout); 3630 __ JumpIfSmi(array, &bailout);
3625 __ CompareObjectType(array, scratch1, scratch2, JS_ARRAY_TYPE); 3631 __ CompareObjectType(array, scratch1, scratch2, JS_ARRAY_TYPE);
3626 __ b(ne, &bailout); 3632 __ b(ne, &bailout);
3627 3633
3628 // Check that the array has fast elements. 3634 // Check that the array has fast elements.
3629 __ CheckFastElements(scratch1, scratch2, &bailout); 3635 __ CheckFastElements(scratch1, scratch2, &bailout);
3630 3636
3631 // If the array has length zero, return the empty string. 3637 // If the array has length zero, return the empty string.
3632 __ ldr(array_length, FieldMemOperand(array, JSArray::kLengthOffset)); 3638 __ ldr(array_length, FieldMemOperand(array, JSArray::kLengthOffset));
3633 __ SmiUntag(array_length, SetCC); 3639 __ SmiUntag(array_length, SetCC);
3634 __ b(ne, &non_trivial_array); 3640 __ b(ne, &non_trivial_array);
3635 __ LoadRoot(r0, Heap::kEmptyStringRootIndex); 3641 __ LoadRoot(r0, Heap::kEmptyStringRootIndex);
3636 __ b(&done); 3642 __ b(&done);
3637 3643
3638 __ bind(&non_trivial_array); 3644 __ bind(&non_trivial_array);
3639 3645
3640 // Get the FixedArray containing array's elements. 3646 // Get the FixedArray containing array's elements.
3641 elements = array; 3647 elements = array;
3642 __ ldr(elements, FieldMemOperand(array, JSArray::kElementsOffset)); 3648 __ ldr(elements, FieldMemOperand(array, JSArray::kElementsOffset));
3643 array = no_reg; // End of array's live range. 3649 array = no_reg; // End of array's live range.
3644 3650
3645 // Check that all array elements are sequential ASCII strings, and 3651 // Check that all array elements are sequential ASCII strings, and
3646 // accumulate the sum of their lengths, as a smi-encoded value. 3652 // accumulate the sum of their lengths, as a smi-encoded value.
3647 __ mov(string_length, Operand(0)); 3653 __ mov(string_length, Operand(0));
3648 __ add(element, 3654 __ add(element,
3649 elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); 3655 elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3650 __ add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2)); 3656 __ add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2));
3651 // Loop condition: while (element < elements_end). 3657 // Loop condition: while (element < elements_end).
3652 // Live values in registers: 3658 // Live values in registers:
3653 // elements: Fixed array of strings. 3659 // elements: Fixed array of strings.
3654 // array_length: Length of the fixed array of strings (not smi) 3660 // array_length: Length of the fixed array of strings (not smi)
3655 // separator: Separator string 3661 // separator: Separator string
3656 // string_length: Accumulated sum of string lengths (smi). 3662 // string_length: Accumulated sum of string lengths (smi).
3657 // element: Current array element. 3663 // element: Current array element.
3658 // elements_end: Array end. 3664 // elements_end: Array end.
3659 if (generate_debug_code_) { 3665 if (generate_debug_code_) {
3660 __ cmp(array_length, Operand(0)); 3666 __ cmp(array_length, Operand(0));
3661 __ Assert(gt, "No empty arrays here in EmitFastAsciiArrayJoin"); 3667 __ Assert(gt, "No empty arrays here in EmitFastAsciiArrayJoin");
3662 } 3668 }
3663 __ bind(&loop); 3669 __ bind(&loop);
3664 __ ldr(string, MemOperand(element, kPointerSize, PostIndex)); 3670 __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
3665 __ JumpIfSmi(string, &bailout); 3671 __ JumpIfSmi(string, &bailout);
3666 __ ldr(scratch1, FieldMemOperand(string, HeapObject::kMapOffset)); 3672 __ ldr(scratch1, FieldMemOperand(string, HeapObject::kMapOffset));
3667 __ ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset)); 3673 __ ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
3668 __ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout); 3674 __ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout);
3669 __ ldr(scratch1, FieldMemOperand(string, SeqAsciiString::kLengthOffset)); 3675 __ ldr(scratch1, FieldMemOperand(string, SeqAsciiString::kLengthOffset));
3670 __ add(string_length, string_length, Operand(scratch1), SetCC); 3676 __ add(string_length, string_length, Operand(scratch1), SetCC);
3671 __ b(vs, &bailout); 3677 __ b(vs, &bailout);
3672 __ cmp(element, elements_end); 3678 __ cmp(element, elements_end);
3673 __ b(lt, &loop); 3679 __ b(lt, &loop);
3674 3680
3675 // If array_length is 1, return elements[0], a string. 3681 // If array_length is 1, return elements[0], a string.
3676 __ cmp(array_length, Operand(1)); 3682 __ cmp(array_length, Operand(1));
3677 __ b(ne, &not_size_one_array); 3683 __ b(ne, &not_size_one_array);
3678 __ ldr(r0, FieldMemOperand(elements, FixedArray::kHeaderSize)); 3684 __ ldr(r0, FieldMemOperand(elements, FixedArray::kHeaderSize));
3679 __ b(&done); 3685 __ b(&done);
3680 3686
3681 __ bind(&not_size_one_array); 3687 __ bind(&not_size_one_array);
3682 3688
3683 // Live values in registers: 3689 // Live values in registers:
3684 // separator: Separator string 3690 // separator: Separator string
3685 // array_length: Length of the array. 3691 // array_length: Length of the array.
3686 // string_length: Sum of string lengths (smi). 3692 // string_length: Sum of string lengths (smi).
3687 // elements: FixedArray of strings. 3693 // elements: FixedArray of strings.
3688 3694
3689 // Check that the separator is a flat ASCII string. 3695 // Check that the separator is a flat ASCII string.
3690 __ JumpIfSmi(separator, &bailout); 3696 __ JumpIfSmi(separator, &bailout);
3691 __ ldr(scratch1, FieldMemOperand(separator, HeapObject::kMapOffset)); 3697 __ ldr(scratch1, FieldMemOperand(separator, HeapObject::kMapOffset));
3692 __ ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset)); 3698 __ ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
3693 __ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout); 3699 __ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout);
3694 3700
3695 // Add (separator length times array_length) - separator length to the 3701 // Add (separator length times array_length) - separator length to the
3696 // string_length to get the length of the result string. array_length is not 3702 // string_length to get the length of the result string. array_length is not
3697 // smi but the other values are, so the result is a smi 3703 // smi but the other values are, so the result is a smi
3698 __ ldr(scratch1, FieldMemOperand(separator, SeqAsciiString::kLengthOffset)); 3704 __ ldr(scratch1, FieldMemOperand(separator, SeqAsciiString::kLengthOffset));
3699 __ sub(string_length, string_length, Operand(scratch1)); 3705 __ sub(string_length, string_length, Operand(scratch1));
3700 __ smull(scratch2, ip, array_length, scratch1); 3706 __ smull(scratch2, ip, array_length, scratch1);
3701 // Check for smi overflow. No overflow if higher 33 bits of 64-bit result are 3707 // Check for smi overflow. No overflow if higher 33 bits of 64-bit result are
3702 // zero. 3708 // zero.
3703 __ cmp(ip, Operand(0)); 3709 __ cmp(ip, Operand(0));
3704 __ b(ne, &bailout); 3710 __ b(ne, &bailout);
3705 __ tst(scratch2, Operand(0x80000000)); 3711 __ tst(scratch2, Operand(0x80000000));
3706 __ b(ne, &bailout); 3712 __ b(ne, &bailout);
3707 __ add(string_length, string_length, Operand(scratch2), SetCC); 3713 __ add(string_length, string_length, Operand(scratch2), SetCC);
3708 __ b(vs, &bailout); 3714 __ b(vs, &bailout);
3709 __ SmiUntag(string_length); 3715 __ SmiUntag(string_length);
3710 3716
3711 // Get first element in the array to free up the elements register to be used 3717 // Get first element in the array to free up the elements register to be used
3712 // for the result. 3718 // for the result.
3713 __ add(element, 3719 __ add(element,
3714 elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); 3720 elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3715 result = elements; // End of live range for elements. 3721 result = elements; // End of live range for elements.
3716 elements = no_reg; 3722 elements = no_reg;
3717 // Live values in registers: 3723 // Live values in registers:
3718 // element: First array element 3724 // element: First array element
3719 // separator: Separator string 3725 // separator: Separator string
3720 // string_length: Length of result string (not smi) 3726 // string_length: Length of result string (not smi)
3721 // array_length: Length of the array. 3727 // array_length: Length of the array.
3722 __ AllocateAsciiString(result, 3728 __ AllocateAsciiString(result,
3723 string_length, 3729 string_length,
3724 scratch1, 3730 scratch1,
3725 scratch2, 3731 scratch2,
3726 elements_end, 3732 elements_end,
3727 &bailout); 3733 &bailout);
3728 // Prepare for looping. Set up elements_end to end of the array. Set 3734 // Prepare for looping. Set up elements_end to end of the array. Set
3729 // result_pos to the position of the result where to write the first 3735 // result_pos to the position of the result where to write the first
3730 // character. 3736 // character.
3731 __ add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2)); 3737 __ add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2));
3732 result_pos = array_length; // End of live range for array_length. 3738 result_pos = array_length; // End of live range for array_length.
3733 array_length = no_reg; 3739 array_length = no_reg;
3734 __ add(result_pos, 3740 __ add(result_pos,
3735 result, 3741 result,
3736 Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag)); 3742 Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
3737 3743
3738 // Check the length of the separator. 3744 // Check the length of the separator.
3739 __ ldr(scratch1, FieldMemOperand(separator, SeqAsciiString::kLengthOffset)); 3745 __ ldr(scratch1, FieldMemOperand(separator, SeqAsciiString::kLengthOffset));
3740 __ cmp(scratch1, Operand(Smi::FromInt(1))); 3746 __ cmp(scratch1, Operand(Smi::FromInt(1)));
3741 __ b(eq, &one_char_separator); 3747 __ b(eq, &one_char_separator);
3742 __ b(gt, &long_separator); 3748 __ b(gt, &long_separator);
3743 3749
3744 // Empty separator case 3750 // Empty separator case
3745 __ bind(&empty_separator_loop); 3751 __ bind(&empty_separator_loop);
3746 // Live values in registers: 3752 // Live values in registers:
3747 // result_pos: the position to which we are currently copying characters. 3753 // result_pos: the position to which we are currently copying characters.
3748 // element: Current array element. 3754 // element: Current array element.
3749 // elements_end: Array end. 3755 // elements_end: Array end.
3750 3756
3751 // Copy next array element to the result. 3757 // Copy next array element to the result.
3752 __ ldr(string, MemOperand(element, kPointerSize, PostIndex)); 3758 __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
3753 __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset)); 3759 __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset));
3754 __ SmiUntag(string_length); 3760 __ SmiUntag(string_length);
3755 __ add(string, string, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag)); 3761 __ add(string, string, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
3756 __ CopyBytes(string, result_pos, string_length, scratch1); 3762 __ CopyBytes(string, result_pos, string_length, scratch1);
3757 __ cmp(element, elements_end); 3763 __ cmp(element, elements_end);
3758 __ b(lt, &empty_separator_loop); // End while (element < elements_end). 3764 __ b(lt, &empty_separator_loop); // End while (element < elements_end).
3759 ASSERT(result.is(r0)); 3765 ASSERT(result.is(r0));
3760 __ b(&done); 3766 __ b(&done);
3761 3767
3762 // One-character separator case 3768 // One-character separator case
3763 __ bind(&one_char_separator); 3769 __ bind(&one_char_separator);
3764 // Replace separator with its ASCII character value. 3770 // Replace separator with its ASCII character value.
3765 __ ldrb(separator, FieldMemOperand(separator, SeqAsciiString::kHeaderSize)); 3771 __ ldrb(separator, FieldMemOperand(separator, SeqAsciiString::kHeaderSize));
3766 // Jump into the loop after the code that copies the separator, so the first 3772 // Jump into the loop after the code that copies the separator, so the first
3767 // element is not preceded by a separator 3773 // element is not preceded by a separator
3768 __ jmp(&one_char_separator_loop_entry); 3774 __ jmp(&one_char_separator_loop_entry);
3769 3775
3770 __ bind(&one_char_separator_loop); 3776 __ bind(&one_char_separator_loop);
3771 // Live values in registers: 3777 // Live values in registers:
3772 // result_pos: the position to which we are currently copying characters. 3778 // result_pos: the position to which we are currently copying characters.
3773 // element: Current array element. 3779 // element: Current array element.
3774 // elements_end: Array end. 3780 // elements_end: Array end.
3775 // separator: Single separator ASCII char (in lower byte). 3781 // separator: Single separator ASCII char (in lower byte).
3776 3782
3777 // Copy the separator character to the result. 3783 // Copy the separator character to the result.
3778 __ strb(separator, MemOperand(result_pos, 1, PostIndex)); 3784 __ strb(separator, MemOperand(result_pos, 1, PostIndex));
3779 3785
3780 // Copy next array element to the result. 3786 // Copy next array element to the result.
3781 __ bind(&one_char_separator_loop_entry); 3787 __ bind(&one_char_separator_loop_entry);
3782 __ ldr(string, MemOperand(element, kPointerSize, PostIndex)); 3788 __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
3783 __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset)); 3789 __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset));
3784 __ SmiUntag(string_length); 3790 __ SmiUntag(string_length);
3785 __ add(string, string, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag)); 3791 __ add(string, string, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
3786 __ CopyBytes(string, result_pos, string_length, scratch1); 3792 __ CopyBytes(string, result_pos, string_length, scratch1);
3787 __ cmp(element, elements_end); 3793 __ cmp(element, elements_end);
3788 __ b(lt, &one_char_separator_loop); // End while (element < elements_end). 3794 __ b(lt, &one_char_separator_loop); // End while (element < elements_end).
3789 ASSERT(result.is(r0)); 3795 ASSERT(result.is(r0));
3790 __ b(&done); 3796 __ b(&done);
3791 3797
3792 // Long separator case (separator is more than one character). Entry is at the 3798 // Long separator case (separator is more than one character). Entry is at the
3793 // label long_separator below. 3799 // label long_separator below.
3794 __ bind(&long_separator_loop); 3800 __ bind(&long_separator_loop);
3795 // Live values in registers: 3801 // Live values in registers:
3796 // result_pos: the position to which we are currently copying characters. 3802 // result_pos: the position to which we are currently copying characters.
3797 // element: Current array element. 3803 // element: Current array element.
3798 // elements_end: Array end. 3804 // elements_end: Array end.
3799 // separator: Separator string. 3805 // separator: Separator string.
3800 3806
3801 // Copy the separator to the result. 3807 // Copy the separator to the result.
3802 __ ldr(string_length, FieldMemOperand(separator, String::kLengthOffset)); 3808 __ ldr(string_length, FieldMemOperand(separator, String::kLengthOffset));
3803 __ SmiUntag(string_length); 3809 __ SmiUntag(string_length);
3804 __ add(string, 3810 __ add(string,
3805 separator, 3811 separator,
3806 Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag)); 3812 Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
3807 __ CopyBytes(string, result_pos, string_length, scratch1); 3813 __ CopyBytes(string, result_pos, string_length, scratch1);
3808 3814
3809 __ bind(&long_separator); 3815 __ bind(&long_separator);
3810 __ ldr(string, MemOperand(element, kPointerSize, PostIndex)); 3816 __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
3811 __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset)); 3817 __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset));
3812 __ SmiUntag(string_length); 3818 __ SmiUntag(string_length);
3813 __ add(string, string, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag)); 3819 __ add(string, string, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
3814 __ CopyBytes(string, result_pos, string_length, scratch1); 3820 __ CopyBytes(string, result_pos, string_length, scratch1);
3815 __ cmp(element, elements_end); 3821 __ cmp(element, elements_end);
3816 __ b(lt, &long_separator_loop); // End while (element < elements_end). 3822 __ b(lt, &long_separator_loop); // End while (element < elements_end).
3817 ASSERT(result.is(r0)); 3823 ASSERT(result.is(r0));
3818 __ b(&done); 3824 __ b(&done);
3819 3825
3820 __ bind(&bailout); 3826 __ bind(&bailout);
3821 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); 3827 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
3822 __ bind(&done); 3828 __ bind(&done);
3823 context()->Plug(r0); 3829 context()->Plug(r0);
3824 } 3830 }
3825 3831
3826 3832
3827 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) { 3833 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
3828 Handle<String> name = expr->name(); 3834 Handle<String> name = expr->name();
3829 if (name->length() > 0 && name->Get(0) == '_') { 3835 if (name->length() > 0 && name->Get(0) == '_') {
3830 Comment cmnt(masm_, "[ InlineRuntimeCall"); 3836 Comment cmnt(masm_, "[ InlineRuntimeCall");
3831 EmitInlineRuntimeCall(expr); 3837 EmitInlineRuntimeCall(expr);
3832 return; 3838 return;
3833 } 3839 }
3834 3840
3835 Comment cmnt(masm_, "[ CallRuntime"); 3841 Comment cmnt(masm_, "[ CallRuntime");
3836 ZoneList<Expression*>* args = expr->arguments(); 3842 ZoneList<Expression*>* args = expr->arguments();
3837 3843
3838 if (expr->is_jsruntime()) { 3844 if (expr->is_jsruntime()) {
3839 // Prepare for calling JS runtime function. 3845 // Prepare for calling JS runtime function.
3840 __ ldr(r0, GlobalObjectOperand()); 3846 __ ldr(r0, GlobalObjectOperand());
3841 __ ldr(r0, FieldMemOperand(r0, GlobalObject::kBuiltinsOffset)); 3847 __ ldr(r0, FieldMemOperand(r0, GlobalObject::kBuiltinsOffset));
3842 __ push(r0); 3848 __ push(r0);
3843 } 3849 }
3844 3850
3845 // Push the arguments ("left-to-right"). 3851 // Push the arguments ("left-to-right").
3846 int arg_count = args->length(); 3852 int arg_count = args->length();
3847 for (int i = 0; i < arg_count; i++) { 3853 for (int i = 0; i < arg_count; i++) {
3848 VisitForStackValue(args->at(i)); 3854 VisitForStackValue(args->at(i));
3849 } 3855 }
3850 3856
3851 if (expr->is_jsruntime()) { 3857 if (expr->is_jsruntime()) {
3852 // Call the JS runtime function. 3858 // Call the JS runtime function.
3853 __ mov(r2, Operand(expr->name())); 3859 __ mov(r2, Operand(expr->name()));
3854 RelocInfo::Mode mode = RelocInfo::CODE_TARGET; 3860 RelocInfo::Mode mode = RelocInfo::CODE_TARGET;
3855 Handle<Code> ic = 3861 Handle<Code> ic =
3856 isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode); 3862 isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode);
3857 CallIC(ic, mode, expr->CallRuntimeFeedbackId()); 3863 CallIC(ic, mode, expr->CallRuntimeFeedbackId());
3858 // Restore context register. 3864 // Restore context register.
3859 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 3865 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3860 } else { 3866 } else {
3861 // Call the C runtime function. 3867 // Call the C runtime function.
3862 __ CallRuntime(expr->function(), arg_count); 3868 __ CallRuntime(expr->function(), arg_count);
3863 } 3869 }
3864 context()->Plug(r0); 3870 context()->Plug(r0);
3865 } 3871 }
3866 3872
3867 3873
3868 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) { 3874 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
3869 switch (expr->op()) { 3875 switch (expr->op()) {
3870 case Token::DELETE: { 3876 case Token::DELETE: {
3871 Comment cmnt(masm_, "[ UnaryOperation (DELETE)"); 3877 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
3872 Property* property = expr->expression()->AsProperty(); 3878 Property* property = expr->expression()->AsProperty();
3873 VariableProxy* proxy = expr->expression()->AsVariableProxy(); 3879 VariableProxy* proxy = expr->expression()->AsVariableProxy();
3874 3880
3875 if (property != NULL) { 3881 if (property != NULL) {
3876 VisitForStackValue(property->obj()); 3882 VisitForStackValue(property->obj());
3877 VisitForStackValue(property->key()); 3883 VisitForStackValue(property->key());
3878 StrictModeFlag strict_mode_flag = (language_mode() == CLASSIC_MODE) 3884 StrictModeFlag strict_mode_flag = (language_mode() == CLASSIC_MODE)
3879 ? kNonStrictMode : kStrictMode; 3885 ? kNonStrictMode : kStrictMode;
3880 __ mov(r1, Operand(Smi::FromInt(strict_mode_flag))); 3886 __ mov(r1, Operand(Smi::FromInt(strict_mode_flag)));
3881 __ push(r1); 3887 __ push(r1);
3882 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION); 3888 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
3883 context()->Plug(r0); 3889 context()->Plug(r0);
3884 } else if (proxy != NULL) { 3890 } else if (proxy != NULL) {
3885 Variable* var = proxy->var(); 3891 Variable* var = proxy->var();
3886 // Delete of an unqualified identifier is disallowed in strict mode 3892 // Delete of an unqualified identifier is disallowed in strict mode
3887 // but "delete this" is allowed. 3893 // but "delete this" is allowed.
3888 ASSERT(language_mode() == CLASSIC_MODE || var->is_this()); 3894 ASSERT(language_mode() == CLASSIC_MODE || var->is_this());
3889 if (var->IsUnallocated()) { 3895 if (var->IsUnallocated()) {
3890 __ ldr(r2, GlobalObjectOperand()); 3896 __ ldr(r2, GlobalObjectOperand());
3891 __ mov(r1, Operand(var->name())); 3897 __ mov(r1, Operand(var->name()));
3892 __ mov(r0, Operand(Smi::FromInt(kNonStrictMode))); 3898 __ mov(r0, Operand(Smi::FromInt(kNonStrictMode)));
3893 __ Push(r2, r1, r0); 3899 __ Push(r2, r1, r0);
3894 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION); 3900 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
3895 context()->Plug(r0); 3901 context()->Plug(r0);
3896 } else if (var->IsStackAllocated() || var->IsContextSlot()) { 3902 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
3897 // Result of deleting non-global, non-dynamic variables is false. 3903 // Result of deleting non-global, non-dynamic variables is false.
3898 // The subexpression does not have side effects. 3904 // The subexpression does not have side effects.
3899 context()->Plug(var->is_this()); 3905 context()->Plug(var->is_this());
3900 } else { 3906 } else {
3901 // Non-global variable. Call the runtime to try to delete from the 3907 // Non-global variable. Call the runtime to try to delete from the
3902 // context where the variable was introduced. 3908 // context where the variable was introduced.
3903 __ push(context_register()); 3909 __ push(context_register());
3904 __ mov(r2, Operand(var->name())); 3910 __ mov(r2, Operand(var->name()));
3905 __ push(r2); 3911 __ push(r2);
3906 __ CallRuntime(Runtime::kDeleteContextSlot, 2); 3912 __ CallRuntime(Runtime::kDeleteContextSlot, 2);
3907 context()->Plug(r0); 3913 context()->Plug(r0);
3908 } 3914 }
3909 } else { 3915 } else {
3910 // Result of deleting non-property, non-variable reference is true. 3916 // Result of deleting non-property, non-variable reference is true.
3911 // The subexpression may have side effects. 3917 // The subexpression may have side effects.
3912 VisitForEffect(expr->expression()); 3918 VisitForEffect(expr->expression());
3913 context()->Plug(true); 3919 context()->Plug(true);
3914 } 3920 }
3915 break; 3921 break;
3916 } 3922 }
3917 3923
3918 case Token::VOID: { 3924 case Token::VOID: {
3919 Comment cmnt(masm_, "[ UnaryOperation (VOID)"); 3925 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
3920 VisitForEffect(expr->expression()); 3926 VisitForEffect(expr->expression());
3921 context()->Plug(Heap::kUndefinedValueRootIndex); 3927 context()->Plug(Heap::kUndefinedValueRootIndex);
3922 break; 3928 break;
3923 } 3929 }
3924 3930
3925 case Token::NOT: { 3931 case Token::NOT: {
3926 Comment cmnt(masm_, "[ UnaryOperation (NOT)"); 3932 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
3927 if (context()->IsEffect()) { 3933 if (context()->IsEffect()) {
3928 // Unary NOT has no side effects so it's only necessary to visit the 3934 // Unary NOT has no side effects so it's only necessary to visit the
3929 // subexpression. Match the optimizing compiler by not branching. 3935 // subexpression. Match the optimizing compiler by not branching.
3930 VisitForEffect(expr->expression()); 3936 VisitForEffect(expr->expression());
3931 } else if (context()->IsTest()) { 3937 } else if (context()->IsTest()) {
3932 const TestContext* test = TestContext::cast(context()); 3938 const TestContext* test = TestContext::cast(context());
3933 // The labels are swapped for the recursive call. 3939 // The labels are swapped for the recursive call.
3934 VisitForControl(expr->expression(), 3940 VisitForControl(expr->expression(),
3935 test->false_label(), 3941 test->false_label(),
3936 test->true_label(), 3942 test->true_label(),
3937 test->fall_through()); 3943 test->fall_through());
3938 context()->Plug(test->true_label(), test->false_label()); 3944 context()->Plug(test->true_label(), test->false_label());
3939 } else { 3945 } else {
3940 // We handle value contexts explicitly rather than simply visiting 3946 // We handle value contexts explicitly rather than simply visiting
3941 // for control and plugging the control flow into the context, 3947 // for control and plugging the control flow into the context,
3942 // because we need to prepare a pair of extra administrative AST ids 3948 // because we need to prepare a pair of extra administrative AST ids
3943 // for the optimizing compiler. 3949 // for the optimizing compiler.
3944 ASSERT(context()->IsAccumulatorValue() || context()->IsStackValue()); 3950 ASSERT(context()->IsAccumulatorValue() || context()->IsStackValue());
3945 Label materialize_true, materialize_false, done; 3951 Label materialize_true, materialize_false, done;
3946 VisitForControl(expr->expression(), 3952 VisitForControl(expr->expression(),
3947 &materialize_false, 3953 &materialize_false,
3948 &materialize_true, 3954 &materialize_true,
3949 &materialize_true); 3955 &materialize_true);
3950 __ bind(&materialize_true); 3956 __ bind(&materialize_true);
3951 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS); 3957 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
3952 __ LoadRoot(r0, Heap::kTrueValueRootIndex); 3958 __ LoadRoot(r0, Heap::kTrueValueRootIndex);
3953 if (context()->IsStackValue()) __ push(r0); 3959 if (context()->IsStackValue()) __ push(r0);
3954 __ jmp(&done); 3960 __ jmp(&done);
3955 __ bind(&materialize_false); 3961 __ bind(&materialize_false);
3956 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS); 3962 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
3957 __ LoadRoot(r0, Heap::kFalseValueRootIndex); 3963 __ LoadRoot(r0, Heap::kFalseValueRootIndex);
3958 if (context()->IsStackValue()) __ push(r0); 3964 if (context()->IsStackValue()) __ push(r0);
3959 __ bind(&done); 3965 __ bind(&done);
3960 } 3966 }
3961 break; 3967 break;
3962 } 3968 }
3963 3969
3964 case Token::TYPEOF: { 3970 case Token::TYPEOF: {
3965 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)"); 3971 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
3966 { StackValueContext context(this); 3972 { StackValueContext context(this);
3967 VisitForTypeofValue(expr->expression()); 3973 VisitForTypeofValue(expr->expression());
3968 } 3974 }
3969 __ CallRuntime(Runtime::kTypeof, 1); 3975 __ CallRuntime(Runtime::kTypeof, 1);
3970 context()->Plug(r0); 3976 context()->Plug(r0);
3971 break; 3977 break;
3972 } 3978 }
3973 3979
3974 case Token::ADD: { 3980 case Token::ADD: {
3975 Comment cmt(masm_, "[ UnaryOperation (ADD)"); 3981 Comment cmt(masm_, "[ UnaryOperation (ADD)");
3976 VisitForAccumulatorValue(expr->expression()); 3982 VisitForAccumulatorValue(expr->expression());
3977 Label no_conversion; 3983 Label no_conversion;
3978 __ JumpIfSmi(result_register(), &no_conversion); 3984 __ JumpIfSmi(result_register(), &no_conversion);
3979 ToNumberStub convert_stub; 3985 ToNumberStub convert_stub;
3980 __ CallStub(&convert_stub); 3986 __ CallStub(&convert_stub);
3981 __ bind(&no_conversion); 3987 __ bind(&no_conversion);
3982 context()->Plug(result_register()); 3988 context()->Plug(result_register());
3983 break; 3989 break;
3984 } 3990 }
3985 3991
3986 case Token::SUB: 3992 case Token::SUB:
3987 EmitUnaryOperation(expr, "[ UnaryOperation (SUB)"); 3993 EmitUnaryOperation(expr, "[ UnaryOperation (SUB)");
3988 break; 3994 break;
3989 3995
3990 case Token::BIT_NOT: 3996 case Token::BIT_NOT:
3991 EmitUnaryOperation(expr, "[ UnaryOperation (BIT_NOT)"); 3997 EmitUnaryOperation(expr, "[ UnaryOperation (BIT_NOT)");
3992 break; 3998 break;
3993 3999
3994 default: 4000 default:
3995 UNREACHABLE(); 4001 UNREACHABLE();
3996 } 4002 }
3997 } 4003 }
3998 4004
3999 4005
4000 void FullCodeGenerator::EmitUnaryOperation(UnaryOperation* expr, 4006 void FullCodeGenerator::EmitUnaryOperation(UnaryOperation* expr,
4001 const char* comment) { 4007 const char* comment) {
4002 // TODO(svenpanne): Allowing format strings in Comment would be nice here... 4008 // TODO(svenpanne): Allowing format strings in Comment would be nice here...
4003 Comment cmt(masm_, comment); 4009 Comment cmt(masm_, comment);
4004 bool can_overwrite = expr->expression()->ResultOverwriteAllowed(); 4010 bool can_overwrite = expr->expression()->ResultOverwriteAllowed();
4005 UnaryOverwriteMode overwrite = 4011 UnaryOverwriteMode overwrite =
4006 can_overwrite ? UNARY_OVERWRITE : UNARY_NO_OVERWRITE; 4012 can_overwrite ? UNARY_OVERWRITE : UNARY_NO_OVERWRITE;
4007 UnaryOpStub stub(expr->op(), overwrite); 4013 UnaryOpStub stub(expr->op(), overwrite);
4008 // UnaryOpStub expects the argument to be in the 4014 // UnaryOpStub expects the argument to be in the
4009 // accumulator register r0. 4015 // accumulator register r0.
4010 VisitForAccumulatorValue(expr->expression()); 4016 VisitForAccumulatorValue(expr->expression());
4011 SetSourcePosition(expr->position()); 4017 SetSourcePosition(expr->position());
4012 CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, 4018 CallIC(stub.GetCode(), RelocInfo::CODE_TARGET,
4013 expr->UnaryOperationFeedbackId()); 4019 expr->UnaryOperationFeedbackId());
4014 context()->Plug(r0); 4020 context()->Plug(r0);
4015 } 4021 }
4016 4022
4017 4023
4018 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) { 4024 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4019 Comment cmnt(masm_, "[ CountOperation"); 4025 Comment cmnt(masm_, "[ CountOperation");
4020 SetSourcePosition(expr->position()); 4026 SetSourcePosition(expr->position());
4021 4027
4022 // Invalid left-hand sides are rewritten to have a 'throw ReferenceError' 4028 // Invalid left-hand sides are rewritten to have a 'throw ReferenceError'
4023 // as the left-hand side. 4029 // as the left-hand side.
4024 if (!expr->expression()->IsValidLeftHandSide()) { 4030 if (!expr->expression()->IsValidLeftHandSide()) {
4025 VisitForEffect(expr->expression()); 4031 VisitForEffect(expr->expression());
4026 return; 4032 return;
4027 } 4033 }
4028 4034
4029 // Expression can only be a property, a global or a (parameter or local) 4035 // Expression can only be a property, a global or a (parameter or local)
4030 // slot. 4036 // slot.
4031 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY }; 4037 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
4032 LhsKind assign_type = VARIABLE; 4038 LhsKind assign_type = VARIABLE;
4033 Property* prop = expr->expression()->AsProperty(); 4039 Property* prop = expr->expression()->AsProperty();
4034 // In case of a property we use the uninitialized expression context 4040 // In case of a property we use the uninitialized expression context
4035 // of the key to detect a named property. 4041 // of the key to detect a named property.
4036 if (prop != NULL) { 4042 if (prop != NULL) {
4037 assign_type = 4043 assign_type =
4038 (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY; 4044 (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY;
4039 } 4045 }
4040 4046
4041 // Evaluate expression and get value. 4047 // Evaluate expression and get value.
4042 if (assign_type == VARIABLE) { 4048 if (assign_type == VARIABLE) {
4043 ASSERT(expr->expression()->AsVariableProxy()->var() != NULL); 4049 ASSERT(expr->expression()->AsVariableProxy()->var() != NULL);
4044 AccumulatorValueContext context(this); 4050 AccumulatorValueContext context(this);
4045 EmitVariableLoad(expr->expression()->AsVariableProxy()); 4051 EmitVariableLoad(expr->expression()->AsVariableProxy());
4046 } else { 4052 } else {
4047 // Reserve space for result of postfix operation. 4053 // Reserve space for result of postfix operation.
4048 if (expr->is_postfix() && !context()->IsEffect()) { 4054 if (expr->is_postfix() && !context()->IsEffect()) {
4049 __ mov(ip, Operand(Smi::FromInt(0))); 4055 __ mov(ip, Operand(Smi::FromInt(0)));
4050 __ push(ip); 4056 __ push(ip);
4051 } 4057 }
4052 if (assign_type == NAMED_PROPERTY) { 4058 if (assign_type == NAMED_PROPERTY) {
4053 // Put the object both on the stack and in the accumulator. 4059 // Put the object both on the stack and in the accumulator.
4054 VisitForAccumulatorValue(prop->obj()); 4060 VisitForAccumulatorValue(prop->obj());
4055 __ push(r0); 4061 __ push(r0);
4056 EmitNamedPropertyLoad(prop); 4062 EmitNamedPropertyLoad(prop);
4057 } else { 4063 } else {
4058 VisitForStackValue(prop->obj()); 4064 VisitForStackValue(prop->obj());
4059 VisitForAccumulatorValue(prop->key()); 4065 VisitForAccumulatorValue(prop->key());
4060 __ ldr(r1, MemOperand(sp, 0)); 4066 __ ldr(r1, MemOperand(sp, 0));
4061 __ push(r0); 4067 __ push(r0);
4062 EmitKeyedPropertyLoad(prop); 4068 EmitKeyedPropertyLoad(prop);
4063 } 4069 }
4064 } 4070 }
4065 4071
4066 // We need a second deoptimization point after loading the value 4072 // We need a second deoptimization point after loading the value
4067 // in case evaluating the property load my have a side effect. 4073 // in case evaluating the property load my have a side effect.
4068 if (assign_type == VARIABLE) { 4074 if (assign_type == VARIABLE) {
4069 PrepareForBailout(expr->expression(), TOS_REG); 4075 PrepareForBailout(expr->expression(), TOS_REG);
4070 } else { 4076 } else {
4071 PrepareForBailoutForId(prop->LoadId(), TOS_REG); 4077 PrepareForBailoutForId(prop->LoadId(), TOS_REG);
4072 } 4078 }
4073 4079
4074 // Call ToNumber only if operand is not a smi. 4080 // Call ToNumber only if operand is not a smi.
4075 Label no_conversion; 4081 Label no_conversion;
4076 __ JumpIfSmi(r0, &no_conversion); 4082 __ JumpIfSmi(r0, &no_conversion);
4077 ToNumberStub convert_stub; 4083 ToNumberStub convert_stub;
4078 __ CallStub(&convert_stub); 4084 __ CallStub(&convert_stub);
4079 __ bind(&no_conversion); 4085 __ bind(&no_conversion);
4080 4086
4081 // Save result for postfix expressions. 4087 // Save result for postfix expressions.
4082 if (expr->is_postfix()) { 4088 if (expr->is_postfix()) {
4083 if (!context()->IsEffect()) { 4089 if (!context()->IsEffect()) {
4084 // Save the result on the stack. If we have a named or keyed property 4090 // Save the result on the stack. If we have a named or keyed property
4085 // we store the result under the receiver that is currently on top 4091 // we store the result under the receiver that is currently on top
4086 // of the stack. 4092 // of the stack.
4087 switch (assign_type) { 4093 switch (assign_type) {
4088 case VARIABLE: 4094 case VARIABLE:
4089 __ push(r0); 4095 __ push(r0);
4090 break; 4096 break;
4091 case NAMED_PROPERTY: 4097 case NAMED_PROPERTY:
4092 __ str(r0, MemOperand(sp, kPointerSize)); 4098 __ str(r0, MemOperand(sp, kPointerSize));
4093 break; 4099 break;
4094 case KEYED_PROPERTY: 4100 case KEYED_PROPERTY:
4095 __ str(r0, MemOperand(sp, 2 * kPointerSize)); 4101 __ str(r0, MemOperand(sp, 2 * kPointerSize));
4096 break; 4102 break;
4097 } 4103 }
4098 } 4104 }
4099 } 4105 }
4100 4106
4101 4107
4102 // Inline smi case if we are in a loop. 4108 // Inline smi case if we are in a loop.
4103 Label stub_call, done; 4109 Label stub_call, done;
4104 JumpPatchSite patch_site(masm_); 4110 JumpPatchSite patch_site(masm_);
4105 4111
4106 int count_value = expr->op() == Token::INC ? 1 : -1; 4112 int count_value = expr->op() == Token::INC ? 1 : -1;
4107 if (ShouldInlineSmiCase(expr->op())) { 4113 if (ShouldInlineSmiCase(expr->op())) {
4108 __ add(r0, r0, Operand(Smi::FromInt(count_value)), SetCC); 4114 __ add(r0, r0, Operand(Smi::FromInt(count_value)), SetCC);
4109 __ b(vs, &stub_call); 4115 __ b(vs, &stub_call);
4110 // We could eliminate this smi check if we split the code at 4116 // We could eliminate this smi check if we split the code at
4111 // the first smi check before calling ToNumber. 4117 // the first smi check before calling ToNumber.
4112 patch_site.EmitJumpIfSmi(r0, &done); 4118 patch_site.EmitJumpIfSmi(r0, &done);
4113 4119
4114 __ bind(&stub_call); 4120 __ bind(&stub_call);
4115 // Call stub. Undo operation first. 4121 // Call stub. Undo operation first.
4116 __ sub(r0, r0, Operand(Smi::FromInt(count_value))); 4122 __ sub(r0, r0, Operand(Smi::FromInt(count_value)));
4117 } 4123 }
4118 __ mov(r1, Operand(Smi::FromInt(count_value))); 4124 __ mov(r1, Operand(Smi::FromInt(count_value)));
4119 4125
4120 // Record position before stub call. 4126 // Record position before stub call.
4121 SetSourcePosition(expr->position()); 4127 SetSourcePosition(expr->position());
4122 4128
4123 BinaryOpStub stub(Token::ADD, NO_OVERWRITE); 4129 BinaryOpStub stub(Token::ADD, NO_OVERWRITE);
4124 CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->CountBinOpFeedbackId()); 4130 CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->CountBinOpFeedbackId());
4125 patch_site.EmitPatchInfo(); 4131 patch_site.EmitPatchInfo();
4126 __ bind(&done); 4132 __ bind(&done);
4127 4133
4128 // Store the value returned in r0. 4134 // Store the value returned in r0.
4129 switch (assign_type) { 4135 switch (assign_type) {
4130 case VARIABLE: 4136 case VARIABLE:
4131 if (expr->is_postfix()) { 4137 if (expr->is_postfix()) {
4132 { EffectContext context(this); 4138 { EffectContext context(this);
4133 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(), 4139 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4134 Token::ASSIGN); 4140 Token::ASSIGN);
4135 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 4141 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4136 context.Plug(r0); 4142 context.Plug(r0);
4137 } 4143 }
4138 // For all contexts except EffectConstant We have the result on 4144 // For all contexts except EffectConstant We have the result on
4139 // top of the stack. 4145 // top of the stack.
4140 if (!context()->IsEffect()) { 4146 if (!context()->IsEffect()) {
4141 context()->PlugTOS(); 4147 context()->PlugTOS();
4142 } 4148 }
4143 } else { 4149 } else {
4144 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(), 4150 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4145 Token::ASSIGN); 4151 Token::ASSIGN);
4146 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 4152 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4147 context()->Plug(r0); 4153 context()->Plug(r0);
4148 } 4154 }
4149 break; 4155 break;
4150 case NAMED_PROPERTY: { 4156 case NAMED_PROPERTY: {
4151 __ mov(r2, Operand(prop->key()->AsLiteral()->handle())); 4157 __ mov(r2, Operand(prop->key()->AsLiteral()->handle()));
4152 __ pop(r1); 4158 __ pop(r1);
4153 Handle<Code> ic = is_classic_mode() 4159 Handle<Code> ic = is_classic_mode()
4154 ? isolate()->builtins()->StoreIC_Initialize() 4160 ? isolate()->builtins()->StoreIC_Initialize()
4155 : isolate()->builtins()->StoreIC_Initialize_Strict(); 4161 : isolate()->builtins()->StoreIC_Initialize_Strict();
4156 CallIC(ic, RelocInfo::CODE_TARGET, expr->CountStoreFeedbackId()); 4162 CallIC(ic, RelocInfo::CODE_TARGET, expr->CountStoreFeedbackId());
4157 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 4163 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4158 if (expr->is_postfix()) { 4164 if (expr->is_postfix()) {
4159 if (!context()->IsEffect()) { 4165 if (!context()->IsEffect()) {
4160 context()->PlugTOS(); 4166 context()->PlugTOS();
4161 } 4167 }
4162 } else { 4168 } else {
4163 context()->Plug(r0); 4169 context()->Plug(r0);
4164 } 4170 }
4165 break; 4171 break;
4166 } 4172 }
4167 case KEYED_PROPERTY: { 4173 case KEYED_PROPERTY: {
4168 __ pop(r1); // Key. 4174 __ pop(r1); // Key.
4169 __ pop(r2); // Receiver. 4175 __ pop(r2); // Receiver.
4170 Handle<Code> ic = is_classic_mode() 4176 Handle<Code> ic = is_classic_mode()
4171 ? isolate()->builtins()->KeyedStoreIC_Initialize() 4177 ? isolate()->builtins()->KeyedStoreIC_Initialize()
4172 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict(); 4178 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
4173 CallIC(ic, RelocInfo::CODE_TARGET, expr->CountStoreFeedbackId()); 4179 CallIC(ic, RelocInfo::CODE_TARGET, expr->CountStoreFeedbackId());
4174 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 4180 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4175 if (expr->is_postfix()) { 4181 if (expr->is_postfix()) {
4176 if (!context()->IsEffect()) { 4182 if (!context()->IsEffect()) {
4177 context()->PlugTOS(); 4183 context()->PlugTOS();
4178 } 4184 }
4179 } else { 4185 } else {
4180 context()->Plug(r0); 4186 context()->Plug(r0);
4181 } 4187 }
4182 break; 4188 break;
4183 } 4189 }
4184 } 4190 }
4185 } 4191 }
4186 4192
4187 4193
4188 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) { 4194 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
4189 ASSERT(!context()->IsEffect()); 4195 ASSERT(!context()->IsEffect());
4190 ASSERT(!context()->IsTest()); 4196 ASSERT(!context()->IsTest());
4191 VariableProxy* proxy = expr->AsVariableProxy(); 4197 VariableProxy* proxy = expr->AsVariableProxy();
4192 if (proxy != NULL && proxy->var()->IsUnallocated()) { 4198 if (proxy != NULL && proxy->var()->IsUnallocated()) {
4193 Comment cmnt(masm_, "Global variable"); 4199 Comment cmnt(masm_, "Global variable");
4194 __ ldr(r0, GlobalObjectOperand()); 4200 __ ldr(r0, GlobalObjectOperand());
4195 __ mov(r2, Operand(proxy->name())); 4201 __ mov(r2, Operand(proxy->name()));
4196 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); 4202 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
4197 // Use a regular load, not a contextual load, to avoid a reference 4203 // Use a regular load, not a contextual load, to avoid a reference
4198 // error. 4204 // error.
4199 CallIC(ic); 4205 CallIC(ic);
4200 PrepareForBailout(expr, TOS_REG); 4206 PrepareForBailout(expr, TOS_REG);
4201 context()->Plug(r0); 4207 context()->Plug(r0);
4202 } else if (proxy != NULL && proxy->var()->IsLookupSlot()) { 4208 } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
4203 Label done, slow; 4209 Label done, slow;
4204 4210
4205 // Generate code for loading from variables potentially shadowed 4211 // Generate code for loading from variables potentially shadowed
4206 // by eval-introduced variables. 4212 // by eval-introduced variables.
4207 EmitDynamicLookupFastCase(proxy->var(), INSIDE_TYPEOF, &slow, &done); 4213 EmitDynamicLookupFastCase(proxy->var(), INSIDE_TYPEOF, &slow, &done);
4208 4214
4209 __ bind(&slow); 4215 __ bind(&slow);
4210 __ mov(r0, Operand(proxy->name())); 4216 __ mov(r0, Operand(proxy->name()));
4211 __ Push(cp, r0); 4217 __ Push(cp, r0);
4212 __ CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2); 4218 __ CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2);
4213 PrepareForBailout(expr, TOS_REG); 4219 PrepareForBailout(expr, TOS_REG);
4214 __ bind(&done); 4220 __ bind(&done);
4215 4221
4216 context()->Plug(r0); 4222 context()->Plug(r0);
4217 } else { 4223 } else {
4218 // This expression cannot throw a reference error at the top level. 4224 // This expression cannot throw a reference error at the top level.
4219 VisitInDuplicateContext(expr); 4225 VisitInDuplicateContext(expr);
4220 } 4226 }
4221 } 4227 }
4222 4228
4223 4229
4224 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr, 4230 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
4225 Expression* sub_expr, 4231 Expression* sub_expr,
4226 Handle<String> check) { 4232 Handle<String> check) {
4227 Label materialize_true, materialize_false; 4233 Label materialize_true, materialize_false;
4228 Label* if_true = NULL; 4234 Label* if_true = NULL;
4229 Label* if_false = NULL; 4235 Label* if_false = NULL;
4230 Label* fall_through = NULL; 4236 Label* fall_through = NULL;
4231 context()->PrepareTest(&materialize_true, &materialize_false, 4237 context()->PrepareTest(&materialize_true, &materialize_false,
4232 &if_true, &if_false, &fall_through); 4238 &if_true, &if_false, &fall_through);
4233 4239
4234 { AccumulatorValueContext context(this); 4240 { AccumulatorValueContext context(this);
4235 VisitForTypeofValue(sub_expr); 4241 VisitForTypeofValue(sub_expr);
4236 } 4242 }
4237 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 4243 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4238 4244
4239 if (check->Equals(isolate()->heap()->number_symbol())) { 4245 if (check->Equals(isolate()->heap()->number_symbol())) {
4240 __ JumpIfSmi(r0, if_true); 4246 __ JumpIfSmi(r0, if_true);
4241 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset)); 4247 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
4242 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex); 4248 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
4243 __ cmp(r0, ip); 4249 __ cmp(r0, ip);
4244 Split(eq, if_true, if_false, fall_through); 4250 Split(eq, if_true, if_false, fall_through);
4245 } else if (check->Equals(isolate()->heap()->string_symbol())) { 4251 } else if (check->Equals(isolate()->heap()->string_symbol())) {
4246 __ JumpIfSmi(r0, if_false); 4252 __ JumpIfSmi(r0, if_false);
4247 // Check for undetectable objects => false. 4253 // Check for undetectable objects => false.
4248 __ CompareObjectType(r0, r0, r1, FIRST_NONSTRING_TYPE); 4254 __ CompareObjectType(r0, r0, r1, FIRST_NONSTRING_TYPE);
4249 __ b(ge, if_false); 4255 __ b(ge, if_false);
4250 __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset)); 4256 __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
4251 __ tst(r1, Operand(1 << Map::kIsUndetectable)); 4257 __ tst(r1, Operand(1 << Map::kIsUndetectable));
4252 Split(eq, if_true, if_false, fall_through); 4258 Split(eq, if_true, if_false, fall_through);
4253 } else if (check->Equals(isolate()->heap()->boolean_symbol())) { 4259 } else if (check->Equals(isolate()->heap()->boolean_symbol())) {
4254 __ CompareRoot(r0, Heap::kTrueValueRootIndex); 4260 __ CompareRoot(r0, Heap::kTrueValueRootIndex);
4255 __ b(eq, if_true); 4261 __ b(eq, if_true);
4256 __ CompareRoot(r0, Heap::kFalseValueRootIndex); 4262 __ CompareRoot(r0, Heap::kFalseValueRootIndex);
4257 Split(eq, if_true, if_false, fall_through); 4263 Split(eq, if_true, if_false, fall_through);
4258 } else if (FLAG_harmony_typeof && 4264 } else if (FLAG_harmony_typeof &&
4259 check->Equals(isolate()->heap()->null_symbol())) { 4265 check->Equals(isolate()->heap()->null_symbol())) {
4260 __ CompareRoot(r0, Heap::kNullValueRootIndex); 4266 __ CompareRoot(r0, Heap::kNullValueRootIndex);
4261 Split(eq, if_true, if_false, fall_through); 4267 Split(eq, if_true, if_false, fall_through);
4262 } else if (check->Equals(isolate()->heap()->undefined_symbol())) { 4268 } else if (check->Equals(isolate()->heap()->undefined_symbol())) {
4263 __ CompareRoot(r0, Heap::kUndefinedValueRootIndex); 4269 __ CompareRoot(r0, Heap::kUndefinedValueRootIndex);
4264 __ b(eq, if_true); 4270 __ b(eq, if_true);
4265 __ JumpIfSmi(r0, if_false); 4271 __ JumpIfSmi(r0, if_false);
4266 // Check for undetectable objects => true. 4272 // Check for undetectable objects => true.
4267 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset)); 4273 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
4268 __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset)); 4274 __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
4269 __ tst(r1, Operand(1 << Map::kIsUndetectable)); 4275 __ tst(r1, Operand(1 << Map::kIsUndetectable));
4270 Split(ne, if_true, if_false, fall_through); 4276 Split(ne, if_true, if_false, fall_through);
4271 4277
4272 } else if (check->Equals(isolate()->heap()->function_symbol())) { 4278 } else if (check->Equals(isolate()->heap()->function_symbol())) {
4273 __ JumpIfSmi(r0, if_false); 4279 __ JumpIfSmi(r0, if_false);
4274 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2); 4280 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
4275 __ CompareObjectType(r0, r0, r1, JS_FUNCTION_TYPE); 4281 __ CompareObjectType(r0, r0, r1, JS_FUNCTION_TYPE);
4276 __ b(eq, if_true); 4282 __ b(eq, if_true);
4277 __ cmp(r1, Operand(JS_FUNCTION_PROXY_TYPE)); 4283 __ cmp(r1, Operand(JS_FUNCTION_PROXY_TYPE));
4278 Split(eq, if_true, if_false, fall_through); 4284 Split(eq, if_true, if_false, fall_through);
4279 } else if (check->Equals(isolate()->heap()->object_symbol())) { 4285 } else if (check->Equals(isolate()->heap()->object_symbol())) {
4280 __ JumpIfSmi(r0, if_false); 4286 __ JumpIfSmi(r0, if_false);
4281 if (!FLAG_harmony_typeof) { 4287 if (!FLAG_harmony_typeof) {
4282 __ CompareRoot(r0, Heap::kNullValueRootIndex); 4288 __ CompareRoot(r0, Heap::kNullValueRootIndex);
4283 __ b(eq, if_true); 4289 __ b(eq, if_true);
4284 } 4290 }
4285 // Check for JS objects => true. 4291 // Check for JS objects => true.
4286 __ CompareObjectType(r0, r0, r1, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE); 4292 __ CompareObjectType(r0, r0, r1, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE);
4287 __ b(lt, if_false); 4293 __ b(lt, if_false);
4288 __ CompareInstanceType(r0, r1, LAST_NONCALLABLE_SPEC_OBJECT_TYPE); 4294 __ CompareInstanceType(r0, r1, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
4289 __ b(gt, if_false); 4295 __ b(gt, if_false);
4290 // Check for undetectable objects => false. 4296 // Check for undetectable objects => false.
4291 __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset)); 4297 __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
4292 __ tst(r1, Operand(1 << Map::kIsUndetectable)); 4298 __ tst(r1, Operand(1 << Map::kIsUndetectable));
4293 Split(eq, if_true, if_false, fall_through); 4299 Split(eq, if_true, if_false, fall_through);
4294 } else { 4300 } else {
4295 if (if_false != fall_through) __ jmp(if_false); 4301 if (if_false != fall_through) __ jmp(if_false);
4296 } 4302 }
4297 context()->Plug(if_true, if_false); 4303 context()->Plug(if_true, if_false);
4298 } 4304 }
4299 4305
4300 4306
4301 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) { 4307 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4302 Comment cmnt(masm_, "[ CompareOperation"); 4308 Comment cmnt(masm_, "[ CompareOperation");
4303 SetSourcePosition(expr->position()); 4309 SetSourcePosition(expr->position());
4304 4310
4305 // First we try a fast inlined version of the compare when one of 4311 // First we try a fast inlined version of the compare when one of
4306 // the operands is a literal. 4312 // the operands is a literal.
4307 if (TryLiteralCompare(expr)) return; 4313 if (TryLiteralCompare(expr)) return;
4308 4314
4309 // Always perform the comparison for its control flow. Pack the result 4315 // Always perform the comparison for its control flow. Pack the result
4310 // into the expression's context after the comparison is performed. 4316 // into the expression's context after the comparison is performed.
4311 Label materialize_true, materialize_false; 4317 Label materialize_true, materialize_false;
4312 Label* if_true = NULL; 4318 Label* if_true = NULL;
4313 Label* if_false = NULL; 4319 Label* if_false = NULL;
4314 Label* fall_through = NULL; 4320 Label* fall_through = NULL;
4315 context()->PrepareTest(&materialize_true, &materialize_false, 4321 context()->PrepareTest(&materialize_true, &materialize_false,
4316 &if_true, &if_false, &fall_through); 4322 &if_true, &if_false, &fall_through);
4317 4323
4318 Token::Value op = expr->op(); 4324 Token::Value op = expr->op();
4319 VisitForStackValue(expr->left()); 4325 VisitForStackValue(expr->left());
4320 switch (op) { 4326 switch (op) {
4321 case Token::IN: 4327 case Token::IN:
4322 VisitForStackValue(expr->right()); 4328 VisitForStackValue(expr->right());
4323 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION); 4329 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
4324 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL); 4330 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
4325 __ LoadRoot(ip, Heap::kTrueValueRootIndex); 4331 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
4326 __ cmp(r0, ip); 4332 __ cmp(r0, ip);
4327 Split(eq, if_true, if_false, fall_through); 4333 Split(eq, if_true, if_false, fall_through);
4328 break; 4334 break;
4329 4335
4330 case Token::INSTANCEOF: { 4336 case Token::INSTANCEOF: {
4331 VisitForStackValue(expr->right()); 4337 VisitForStackValue(expr->right());
4332 InstanceofStub stub(InstanceofStub::kNoFlags); 4338 InstanceofStub stub(InstanceofStub::kNoFlags);
4333 __ CallStub(&stub); 4339 __ CallStub(&stub);
4334 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 4340 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4335 // The stub returns 0 for true. 4341 // The stub returns 0 for true.
4336 __ tst(r0, r0); 4342 __ tst(r0, r0);
4337 Split(eq, if_true, if_false, fall_through); 4343 Split(eq, if_true, if_false, fall_through);
4338 break; 4344 break;
4339 } 4345 }
4340 4346
4341 default: { 4347 default: {
4342 VisitForAccumulatorValue(expr->right()); 4348 VisitForAccumulatorValue(expr->right());
4343 Condition cond = eq; 4349 Condition cond = eq;
4344 switch (op) { 4350 switch (op) {
4345 case Token::EQ_STRICT: 4351 case Token::EQ_STRICT:
4346 case Token::EQ: 4352 case Token::EQ:
4347 cond = eq; 4353 cond = eq;
4348 break; 4354 break;
4349 case Token::LT: 4355 case Token::LT:
4350 cond = lt; 4356 cond = lt;
4351 break; 4357 break;
4352 case Token::GT: 4358 case Token::GT:
4353 cond = gt; 4359 cond = gt;
4354 break; 4360 break;
4355 case Token::LTE: 4361 case Token::LTE:
4356 cond = le; 4362 cond = le;
4357 break; 4363 break;
4358 case Token::GTE: 4364 case Token::GTE:
4359 cond = ge; 4365 cond = ge;
4360 break; 4366 break;
4361 case Token::IN: 4367 case Token::IN:
4362 case Token::INSTANCEOF: 4368 case Token::INSTANCEOF:
4363 default: 4369 default:
4364 UNREACHABLE(); 4370 UNREACHABLE();
4365 } 4371 }
4366 __ pop(r1); 4372 __ pop(r1);
4367 4373
4368 bool inline_smi_code = ShouldInlineSmiCase(op); 4374 bool inline_smi_code = ShouldInlineSmiCase(op);
4369 JumpPatchSite patch_site(masm_); 4375 JumpPatchSite patch_site(masm_);
4370 if (inline_smi_code) { 4376 if (inline_smi_code) {
4371 Label slow_case; 4377 Label slow_case;
4372 __ orr(r2, r0, Operand(r1)); 4378 __ orr(r2, r0, Operand(r1));
4373 patch_site.EmitJumpIfNotSmi(r2, &slow_case); 4379 patch_site.EmitJumpIfNotSmi(r2, &slow_case);
4374 __ cmp(r1, r0); 4380 __ cmp(r1, r0);
4375 Split(cond, if_true, if_false, NULL); 4381 Split(cond, if_true, if_false, NULL);
4376 __ bind(&slow_case); 4382 __ bind(&slow_case);
4377 } 4383 }
4378 4384
4379 // Record position and call the compare IC. 4385 // Record position and call the compare IC.
4380 SetSourcePosition(expr->position()); 4386 SetSourcePosition(expr->position());
4381 Handle<Code> ic = CompareIC::GetUninitialized(op); 4387 Handle<Code> ic = CompareIC::GetUninitialized(op);
4382 CallIC(ic, RelocInfo::CODE_TARGET, expr->CompareOperationFeedbackId()); 4388 CallIC(ic, RelocInfo::CODE_TARGET, expr->CompareOperationFeedbackId());
4383 patch_site.EmitPatchInfo(); 4389 patch_site.EmitPatchInfo();
4384 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 4390 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4385 __ cmp(r0, Operand(0)); 4391 __ cmp(r0, Operand(0));
4386 Split(cond, if_true, if_false, fall_through); 4392 Split(cond, if_true, if_false, fall_through);
4387 } 4393 }
4388 } 4394 }
4389 4395
4390 // Convert the result of the comparison into one expected for this 4396 // Convert the result of the comparison into one expected for this
4391 // expression's context. 4397 // expression's context.
4392 context()->Plug(if_true, if_false); 4398 context()->Plug(if_true, if_false);
4393 } 4399 }
4394 4400
4395 4401
4396 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr, 4402 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
4397 Expression* sub_expr, 4403 Expression* sub_expr,
4398 NilValue nil) { 4404 NilValue nil) {
4399 Label materialize_true, materialize_false; 4405 Label materialize_true, materialize_false;
4400 Label* if_true = NULL; 4406 Label* if_true = NULL;
4401 Label* if_false = NULL; 4407 Label* if_false = NULL;
4402 Label* fall_through = NULL; 4408 Label* fall_through = NULL;
4403 context()->PrepareTest(&materialize_true, &materialize_false, 4409 context()->PrepareTest(&materialize_true, &materialize_false,
4404 &if_true, &if_false, &fall_through); 4410 &if_true, &if_false, &fall_through);
4405 4411
4406 VisitForAccumulatorValue(sub_expr); 4412 VisitForAccumulatorValue(sub_expr);
4407 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 4413 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4408 Heap::RootListIndex nil_value = nil == kNullValue ? 4414 Heap::RootListIndex nil_value = nil == kNullValue ?
4409 Heap::kNullValueRootIndex : 4415 Heap::kNullValueRootIndex :
4410 Heap::kUndefinedValueRootIndex; 4416 Heap::kUndefinedValueRootIndex;
4411 __ LoadRoot(r1, nil_value); 4417 __ LoadRoot(r1, nil_value);
4412 __ cmp(r0, r1); 4418 __ cmp(r0, r1);
4413 if (expr->op() == Token::EQ_STRICT) { 4419 if (expr->op() == Token::EQ_STRICT) {
4414 Split(eq, if_true, if_false, fall_through); 4420 Split(eq, if_true, if_false, fall_through);
4415 } else { 4421 } else {
4416 Heap::RootListIndex other_nil_value = nil == kNullValue ? 4422 Heap::RootListIndex other_nil_value = nil == kNullValue ?
4417 Heap::kUndefinedValueRootIndex : 4423 Heap::kUndefinedValueRootIndex :
4418 Heap::kNullValueRootIndex; 4424 Heap::kNullValueRootIndex;
4419 __ b(eq, if_true); 4425 __ b(eq, if_true);
4420 __ LoadRoot(r1, other_nil_value); 4426 __ LoadRoot(r1, other_nil_value);
4421 __ cmp(r0, r1); 4427 __ cmp(r0, r1);
4422 __ b(eq, if_true); 4428 __ b(eq, if_true);
4423 __ JumpIfSmi(r0, if_false); 4429 __ JumpIfSmi(r0, if_false);
4424 // It can be an undetectable object. 4430 // It can be an undetectable object.
4425 __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset)); 4431 __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
4426 __ ldrb(r1, FieldMemOperand(r1, Map::kBitFieldOffset)); 4432 __ ldrb(r1, FieldMemOperand(r1, Map::kBitFieldOffset));
4427 __ and_(r1, r1, Operand(1 << Map::kIsUndetectable)); 4433 __ and_(r1, r1, Operand(1 << Map::kIsUndetectable));
4428 __ cmp(r1, Operand(1 << Map::kIsUndetectable)); 4434 __ cmp(r1, Operand(1 << Map::kIsUndetectable));
4429 Split(eq, if_true, if_false, fall_through); 4435 Split(eq, if_true, if_false, fall_through);
4430 } 4436 }
4431 context()->Plug(if_true, if_false); 4437 context()->Plug(if_true, if_false);
4432 } 4438 }
4433 4439
4434 4440
4435 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) { 4441 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4436 __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 4442 __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4437 context()->Plug(r0); 4443 context()->Plug(r0);
4438 } 4444 }
4439 4445
4440 4446
4441 Register FullCodeGenerator::result_register() { 4447 Register FullCodeGenerator::result_register() {
4442 return r0; 4448 return r0;
4443 } 4449 }
4444 4450
4445 4451
4446 Register FullCodeGenerator::context_register() { 4452 Register FullCodeGenerator::context_register() {
4447 return cp; 4453 return cp;
4448 } 4454 }
4449 4455
4450 4456
4451 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) { 4457 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
4452 ASSERT_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset); 4458 ASSERT_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
4453 __ str(value, MemOperand(fp, frame_offset)); 4459 __ str(value, MemOperand(fp, frame_offset));
4454 } 4460 }
4455 4461
4456 4462
4457 void FullCodeGenerator::LoadContextField(Register dst, int context_index) { 4463 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
4458 __ ldr(dst, ContextOperand(cp, context_index)); 4464 __ ldr(dst, ContextOperand(cp, context_index));
4459 } 4465 }
4460 4466
4461 4467
4462 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() { 4468 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
4463 Scope* declaration_scope = scope()->DeclarationScope(); 4469 Scope* declaration_scope = scope()->DeclarationScope();
4464 if (declaration_scope->is_global_scope() || 4470 if (declaration_scope->is_global_scope() ||
4465 declaration_scope->is_module_scope()) { 4471 declaration_scope->is_module_scope()) {
4466 // Contexts nested in the native context have a canonical empty function 4472 // Contexts nested in the native context have a canonical empty function
4467 // as their closure, not the anonymous closure containing the global 4473 // as their closure, not the anonymous closure containing the global
4468 // code. Pass a smi sentinel and let the runtime look up the empty 4474 // code. Pass a smi sentinel and let the runtime look up the empty
4469 // function. 4475 // function.
4470 __ mov(ip, Operand(Smi::FromInt(0))); 4476 __ mov(ip, Operand(Smi::FromInt(0)));
4471 } else if (declaration_scope->is_eval_scope()) { 4477 } else if (declaration_scope->is_eval_scope()) {
4472 // Contexts created by a call to eval have the same closure as the 4478 // Contexts created by a call to eval have the same closure as the
4473 // context calling eval, not the anonymous closure containing the eval 4479 // context calling eval, not the anonymous closure containing the eval
4474 // code. Fetch it from the context. 4480 // code. Fetch it from the context.
4475 __ ldr(ip, ContextOperand(cp, Context::CLOSURE_INDEX)); 4481 __ ldr(ip, ContextOperand(cp, Context::CLOSURE_INDEX));
4476 } else { 4482 } else {
4477 ASSERT(declaration_scope->is_function_scope()); 4483 ASSERT(declaration_scope->is_function_scope());
4478 __ ldr(ip, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 4484 __ ldr(ip, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4479 } 4485 }
4480 __ push(ip); 4486 __ push(ip);
4481 } 4487 }
4482 4488
4483 4489
4484 // ---------------------------------------------------------------------------- 4490 // ----------------------------------------------------------------------------
4485 // Non-local control flow support. 4491 // Non-local control flow support.
4486 4492
4487 void FullCodeGenerator::EnterFinallyBlock() { 4493 void FullCodeGenerator::EnterFinallyBlock() {
4488 ASSERT(!result_register().is(r1)); 4494 ASSERT(!result_register().is(r1));
4489 // Store result register while executing finally block. 4495 // Store result register while executing finally block.
4490 __ push(result_register()); 4496 __ push(result_register());
4491 // Cook return address in link register to stack (smi encoded Code* delta) 4497 // Cook return address in link register to stack (smi encoded Code* delta)
4492 __ sub(r1, lr, Operand(masm_->CodeObject())); 4498 __ sub(r1, lr, Operand(masm_->CodeObject()));
4493 ASSERT_EQ(1, kSmiTagSize + kSmiShiftSize); 4499 ASSERT_EQ(1, kSmiTagSize + kSmiShiftSize);
4494 STATIC_ASSERT(kSmiTag == 0); 4500 STATIC_ASSERT(kSmiTag == 0);
4495 __ add(r1, r1, Operand(r1)); // Convert to smi. 4501 __ add(r1, r1, Operand(r1)); // Convert to smi.
4496 4502
4497 // Store result register while executing finally block. 4503 // Store result register while executing finally block.
4498 __ push(r1); 4504 __ push(r1);
4499 4505
4500 // Store pending message while executing finally block. 4506 // Store pending message while executing finally block.
4501 ExternalReference pending_message_obj = 4507 ExternalReference pending_message_obj =
4502 ExternalReference::address_of_pending_message_obj(isolate()); 4508 ExternalReference::address_of_pending_message_obj(isolate());
4503 __ mov(ip, Operand(pending_message_obj)); 4509 __ mov(ip, Operand(pending_message_obj));
4504 __ ldr(r1, MemOperand(ip)); 4510 __ ldr(r1, MemOperand(ip));
4505 __ push(r1); 4511 __ push(r1);
4506 4512
4507 ExternalReference has_pending_message = 4513 ExternalReference has_pending_message =
4508 ExternalReference::address_of_has_pending_message(isolate()); 4514 ExternalReference::address_of_has_pending_message(isolate());
4509 __ mov(ip, Operand(has_pending_message)); 4515 __ mov(ip, Operand(has_pending_message));
4510 __ ldr(r1, MemOperand(ip)); 4516 __ ldr(r1, MemOperand(ip));
4511 __ SmiTag(r1); 4517 __ SmiTag(r1);
4512 __ push(r1); 4518 __ push(r1);
4513 4519
4514 ExternalReference pending_message_script = 4520 ExternalReference pending_message_script =
4515 ExternalReference::address_of_pending_message_script(isolate()); 4521 ExternalReference::address_of_pending_message_script(isolate());
4516 __ mov(ip, Operand(pending_message_script)); 4522 __ mov(ip, Operand(pending_message_script));
4517 __ ldr(r1, MemOperand(ip)); 4523 __ ldr(r1, MemOperand(ip));
4518 __ push(r1); 4524 __ push(r1);
4519 } 4525 }
4520 4526
4521 4527
4522 void FullCodeGenerator::ExitFinallyBlock() { 4528 void FullCodeGenerator::ExitFinallyBlock() {
4523 ASSERT(!result_register().is(r1)); 4529 ASSERT(!result_register().is(r1));
4524 // Restore pending message from stack. 4530 // Restore pending message from stack.
4525 __ pop(r1); 4531 __ pop(r1);
4526 ExternalReference pending_message_script = 4532 ExternalReference pending_message_script =
4527 ExternalReference::address_of_pending_message_script(isolate()); 4533 ExternalReference::address_of_pending_message_script(isolate());
4528 __ mov(ip, Operand(pending_message_script)); 4534 __ mov(ip, Operand(pending_message_script));
4529 __ str(r1, MemOperand(ip)); 4535 __ str(r1, MemOperand(ip));
4530 4536
4531 __ pop(r1); 4537 __ pop(r1);
4532 __ SmiUntag(r1); 4538 __ SmiUntag(r1);
4533 ExternalReference has_pending_message = 4539 ExternalReference has_pending_message =
4534 ExternalReference::address_of_has_pending_message(isolate()); 4540 ExternalReference::address_of_has_pending_message(isolate());
4535 __ mov(ip, Operand(has_pending_message)); 4541 __ mov(ip, Operand(has_pending_message));
4536 __ str(r1, MemOperand(ip)); 4542 __ str(r1, MemOperand(ip));
4537 4543
4538 __ pop(r1); 4544 __ pop(r1);
4539 ExternalReference pending_message_obj = 4545 ExternalReference pending_message_obj =
4540 ExternalReference::address_of_pending_message_obj(isolate()); 4546 ExternalReference::address_of_pending_message_obj(isolate());
4541 __ mov(ip, Operand(pending_message_obj)); 4547 __ mov(ip, Operand(pending_message_obj));
4542 __ str(r1, MemOperand(ip)); 4548 __ str(r1, MemOperand(ip));
4543 4549
4544 // Restore result register from stack. 4550 // Restore result register from stack.
4545 __ pop(r1); 4551 __ pop(r1);
4546 4552
4547 // Uncook return address and return. 4553 // Uncook return address and return.