My favorites | Sign in
v8
Project Home Downloads Wiki Issues Source Code Search
Checkout   Browse   Changes  
Changes to /trunk/src/x64/macro-assembler-x64.cc
r12566 vs. r12669 Compare: vs.  Format:
Revision r12669
Go to: 
Project members, sign in to write a code review
/trunk/src/x64/macro-assembler-x64.cc   r12566 /trunk/src/x64/macro-assembler-x64.cc   r12669
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution. 11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its 12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived 13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission. 14 // from this software without specific prior written permission.
15 // 15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 27
28 #include "v8.h" 28 #include "v8.h"
29 29
30 #if defined(V8_TARGET_ARCH_X64) 30 #if defined(V8_TARGET_ARCH_X64)
31 31
32 #include "bootstrapper.h" 32 #include "bootstrapper.h"
33 #include "codegen.h" 33 #include "codegen.h"
34 #include "assembler-x64.h" 34 #include "assembler-x64.h"
35 #include "macro-assembler-x64.h" 35 #include "macro-assembler-x64.h"
36 #include "serialize.h" 36 #include "serialize.h"
37 #include "debug.h" 37 #include "debug.h"
38 #include "heap.h" 38 #include "heap.h"
39 39
40 namespace v8 { 40 namespace v8 {
41 namespace internal { 41 namespace internal {
42 42
43 MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size) 43 MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size)
44 : Assembler(arg_isolate, buffer, size), 44 : Assembler(arg_isolate, buffer, size),
45 generating_stub_(false), 45 generating_stub_(false),
46 allow_stub_calls_(true), 46 allow_stub_calls_(true),
47 has_frame_(false), 47 has_frame_(false),
48 root_array_available_(true) { 48 root_array_available_(true) {
49 if (isolate() != NULL) { 49 if (isolate() != NULL) {
50 code_object_ = Handle<Object>(isolate()->heap()->undefined_value(), 50 code_object_ = Handle<Object>(isolate()->heap()->undefined_value(),
51 isolate()); 51 isolate());
52 } 52 }
53 } 53 }
54 54
55 55
56 static const int kInvalidRootRegisterDelta = -1; 56 static const int kInvalidRootRegisterDelta = -1;
57 57
58 58
59 intptr_t MacroAssembler::RootRegisterDelta(ExternalReference other) { 59 intptr_t MacroAssembler::RootRegisterDelta(ExternalReference other) {
60 if (predictable_code_size() && 60 if (predictable_code_size() &&
61 (other.address() < reinterpret_cast<Address>(isolate()) || 61 (other.address() < reinterpret_cast<Address>(isolate()) ||
62 other.address() >= reinterpret_cast<Address>(isolate() + 1))) { 62 other.address() >= reinterpret_cast<Address>(isolate() + 1))) {
63 return kInvalidRootRegisterDelta; 63 return kInvalidRootRegisterDelta;
64 } 64 }
65 Address roots_register_value = kRootRegisterBias + 65 Address roots_register_value = kRootRegisterBias +
66 reinterpret_cast<Address>(isolate()->heap()->roots_array_start()); 66 reinterpret_cast<Address>(isolate()->heap()->roots_array_start());
67 intptr_t delta = other.address() - roots_register_value; 67 intptr_t delta = other.address() - roots_register_value;
68 return delta; 68 return delta;
69 } 69 }
70 70
71 71
72 Operand MacroAssembler::ExternalOperand(ExternalReference target, 72 Operand MacroAssembler::ExternalOperand(ExternalReference target,
73 Register scratch) { 73 Register scratch) {
74 if (root_array_available_ && !Serializer::enabled()) { 74 if (root_array_available_ && !Serializer::enabled()) {
75 intptr_t delta = RootRegisterDelta(target); 75 intptr_t delta = RootRegisterDelta(target);
76 if (delta != kInvalidRootRegisterDelta && is_int32(delta)) { 76 if (delta != kInvalidRootRegisterDelta && is_int32(delta)) {
77 Serializer::TooLateToEnableNow(); 77 Serializer::TooLateToEnableNow();
78 return Operand(kRootRegister, static_cast<int32_t>(delta)); 78 return Operand(kRootRegister, static_cast<int32_t>(delta));
79 } 79 }
80 } 80 }
81 movq(scratch, target); 81 movq(scratch, target);
82 return Operand(scratch, 0); 82 return Operand(scratch, 0);
83 } 83 }
84 84
85 85
86 void MacroAssembler::Load(Register destination, ExternalReference source) { 86 void MacroAssembler::Load(Register destination, ExternalReference source) {
87 if (root_array_available_ && !Serializer::enabled()) { 87 if (root_array_available_ && !Serializer::enabled()) {
88 intptr_t delta = RootRegisterDelta(source); 88 intptr_t delta = RootRegisterDelta(source);
89 if (delta != kInvalidRootRegisterDelta && is_int32(delta)) { 89 if (delta != kInvalidRootRegisterDelta && is_int32(delta)) {
90 Serializer::TooLateToEnableNow(); 90 Serializer::TooLateToEnableNow();
91 movq(destination, Operand(kRootRegister, static_cast<int32_t>(delta))); 91 movq(destination, Operand(kRootRegister, static_cast<int32_t>(delta)));
92 return; 92 return;
93 } 93 }
94 } 94 }
95 // Safe code. 95 // Safe code.
96 if (destination.is(rax)) { 96 if (destination.is(rax)) {
97 load_rax(source); 97 load_rax(source);
98 } else { 98 } else {
99 movq(kScratchRegister, source); 99 movq(kScratchRegister, source);
100 movq(destination, Operand(kScratchRegister, 0)); 100 movq(destination, Operand(kScratchRegister, 0));
101 } 101 }
102 } 102 }
103 103
104 104
105 void MacroAssembler::Store(ExternalReference destination, Register source) { 105 void MacroAssembler::Store(ExternalReference destination, Register source) {
106 if (root_array_available_ && !Serializer::enabled()) { 106 if (root_array_available_ && !Serializer::enabled()) {
107 intptr_t delta = RootRegisterDelta(destination); 107 intptr_t delta = RootRegisterDelta(destination);
108 if (delta != kInvalidRootRegisterDelta && is_int32(delta)) { 108 if (delta != kInvalidRootRegisterDelta && is_int32(delta)) {
109 Serializer::TooLateToEnableNow(); 109 Serializer::TooLateToEnableNow();
110 movq(Operand(kRootRegister, static_cast<int32_t>(delta)), source); 110 movq(Operand(kRootRegister, static_cast<int32_t>(delta)), source);
111 return; 111 return;
112 } 112 }
113 } 113 }
114 // Safe code. 114 // Safe code.
115 if (source.is(rax)) { 115 if (source.is(rax)) {
116 store_rax(destination); 116 store_rax(destination);
117 } else { 117 } else {
118 movq(kScratchRegister, destination); 118 movq(kScratchRegister, destination);
119 movq(Operand(kScratchRegister, 0), source); 119 movq(Operand(kScratchRegister, 0), source);
120 } 120 }
121 } 121 }
122 122
123 123
124 void MacroAssembler::LoadAddress(Register destination, 124 void MacroAssembler::LoadAddress(Register destination,
125 ExternalReference source) { 125 ExternalReference source) {
126 if (root_array_available_ && !Serializer::enabled()) { 126 if (root_array_available_ && !Serializer::enabled()) {
127 intptr_t delta = RootRegisterDelta(source); 127 intptr_t delta = RootRegisterDelta(source);
128 if (delta != kInvalidRootRegisterDelta && is_int32(delta)) { 128 if (delta != kInvalidRootRegisterDelta && is_int32(delta)) {
129 Serializer::TooLateToEnableNow(); 129 Serializer::TooLateToEnableNow();
130 lea(destination, Operand(kRootRegister, static_cast<int32_t>(delta))); 130 lea(destination, Operand(kRootRegister, static_cast<int32_t>(delta)));
131 return; 131 return;
132 } 132 }
133 } 133 }
134 // Safe code. 134 // Safe code.
135 movq(destination, source); 135 movq(destination, source);
136 } 136 }
137 137
138 138
139 int MacroAssembler::LoadAddressSize(ExternalReference source) { 139 int MacroAssembler::LoadAddressSize(ExternalReference source) {
140 if (root_array_available_ && !Serializer::enabled()) { 140 if (root_array_available_ && !Serializer::enabled()) {
141 // This calculation depends on the internals of LoadAddress. 141 // This calculation depends on the internals of LoadAddress.
142 // It's correctness is ensured by the asserts in the Call 142 // It's correctness is ensured by the asserts in the Call
143 // instruction below. 143 // instruction below.
144 intptr_t delta = RootRegisterDelta(source); 144 intptr_t delta = RootRegisterDelta(source);
145 if (delta != kInvalidRootRegisterDelta && is_int32(delta)) { 145 if (delta != kInvalidRootRegisterDelta && is_int32(delta)) {
146 Serializer::TooLateToEnableNow(); 146 Serializer::TooLateToEnableNow();
147 // Operand is lea(scratch, Operand(kRootRegister, delta)); 147 // Operand is lea(scratch, Operand(kRootRegister, delta));
148 // Opcodes : REX.W 8D ModRM Disp8/Disp32 - 4 or 7. 148 // Opcodes : REX.W 8D ModRM Disp8/Disp32 - 4 or 7.
149 int size = 4; 149 int size = 4;
150 if (!is_int8(static_cast<int32_t>(delta))) { 150 if (!is_int8(static_cast<int32_t>(delta))) {
151 size += 3; // Need full four-byte displacement in lea. 151 size += 3; // Need full four-byte displacement in lea.
152 } 152 }
153 return size; 153 return size;
154 } 154 }
155 } 155 }
156 // Size of movq(destination, src); 156 // Size of movq(destination, src);
157 return 10; 157 return 10;
158 } 158 }
159 159
160 160
161 void MacroAssembler::PushAddress(ExternalReference source) { 161 void MacroAssembler::PushAddress(ExternalReference source) {
162 int64_t address = reinterpret_cast<int64_t>(source.address()); 162 int64_t address = reinterpret_cast<int64_t>(source.address());
163 if (is_int32(address) && !Serializer::enabled()) { 163 if (is_int32(address) && !Serializer::enabled()) {
164 if (emit_debug_code()) { 164 if (emit_debug_code()) {
165 movq(kScratchRegister, BitCast<int64_t>(kZapValue), RelocInfo::NONE); 165 movq(kScratchRegister, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
166 } 166 }
167 push(Immediate(static_cast<int32_t>(address))); 167 push(Immediate(static_cast<int32_t>(address)));
168 return; 168 return;
169 } 169 }
170 LoadAddress(kScratchRegister, source); 170 LoadAddress(kScratchRegister, source);
171 push(kScratchRegister); 171 push(kScratchRegister);
172 } 172 }
173 173
174 174
175 void MacroAssembler::LoadRoot(Register destination, Heap::RootListIndex index) { 175 void MacroAssembler::LoadRoot(Register destination, Heap::RootListIndex index) {
176 ASSERT(root_array_available_); 176 ASSERT(root_array_available_);
177 movq(destination, Operand(kRootRegister, 177 movq(destination, Operand(kRootRegister,
178 (index << kPointerSizeLog2) - kRootRegisterBias)); 178 (index << kPointerSizeLog2) - kRootRegisterBias));
179 } 179 }
180 180
181 181
182 void MacroAssembler::LoadRootIndexed(Register destination, 182 void MacroAssembler::LoadRootIndexed(Register destination,
183 Register variable_offset, 183 Register variable_offset,
184 int fixed_offset) { 184 int fixed_offset) {
185 ASSERT(root_array_available_); 185 ASSERT(root_array_available_);
186 movq(destination, 186 movq(destination,
187 Operand(kRootRegister, 187 Operand(kRootRegister,
188 variable_offset, times_pointer_size, 188 variable_offset, times_pointer_size,
189 (fixed_offset << kPointerSizeLog2) - kRootRegisterBias)); 189 (fixed_offset << kPointerSizeLog2) - kRootRegisterBias));
190 } 190 }
191 191
192 192
193 void MacroAssembler::StoreRoot(Register source, Heap::RootListIndex index) { 193 void MacroAssembler::StoreRoot(Register source, Heap::RootListIndex index) {
194 ASSERT(root_array_available_); 194 ASSERT(root_array_available_);
195 movq(Operand(kRootRegister, (index << kPointerSizeLog2) - kRootRegisterBias), 195 movq(Operand(kRootRegister, (index << kPointerSizeLog2) - kRootRegisterBias),
196 source); 196 source);
197 } 197 }
198 198
199 199
200 void MacroAssembler::PushRoot(Heap::RootListIndex index) { 200 void MacroAssembler::PushRoot(Heap::RootListIndex index) {
201 ASSERT(root_array_available_); 201 ASSERT(root_array_available_);
202 push(Operand(kRootRegister, (index << kPointerSizeLog2) - kRootRegisterBias)); 202 push(Operand(kRootRegister, (index << kPointerSizeLog2) - kRootRegisterBias));
203 } 203 }
204 204
205 205
206 void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) { 206 void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) {
207 ASSERT(root_array_available_); 207 ASSERT(root_array_available_);
208 cmpq(with, Operand(kRootRegister, 208 cmpq(with, Operand(kRootRegister,
209 (index << kPointerSizeLog2) - kRootRegisterBias)); 209 (index << kPointerSizeLog2) - kRootRegisterBias));
210 } 210 }
211 211
212 212
213 void MacroAssembler::CompareRoot(const Operand& with, 213 void MacroAssembler::CompareRoot(const Operand& with,
214 Heap::RootListIndex index) { 214 Heap::RootListIndex index) {
215 ASSERT(root_array_available_); 215 ASSERT(root_array_available_);
216 ASSERT(!with.AddressUsesRegister(kScratchRegister)); 216 ASSERT(!with.AddressUsesRegister(kScratchRegister));
217 LoadRoot(kScratchRegister, index); 217 LoadRoot(kScratchRegister, index);
218 cmpq(with, kScratchRegister); 218 cmpq(with, kScratchRegister);
219 } 219 }
220 220
221 221
222 void MacroAssembler::RememberedSetHelper(Register object, // For debug tests. 222 void MacroAssembler::RememberedSetHelper(Register object, // For debug tests.
223 Register addr, 223 Register addr,
224 Register scratch, 224 Register scratch,
225 SaveFPRegsMode save_fp, 225 SaveFPRegsMode save_fp,
226 RememberedSetFinalAction and_then) { 226 RememberedSetFinalAction and_then) {
227 if (emit_debug_code()) { 227 if (emit_debug_code()) {
228 Label ok; 228 Label ok;
229 JumpIfNotInNewSpace(object, scratch, &ok, Label::kNear); 229 JumpIfNotInNewSpace(object, scratch, &ok, Label::kNear);
230 int3(); 230 int3();
231 bind(&ok); 231 bind(&ok);
232 } 232 }
233 // Load store buffer top. 233 // Load store buffer top.
234 LoadRoot(scratch, Heap::kStoreBufferTopRootIndex); 234 LoadRoot(scratch, Heap::kStoreBufferTopRootIndex);
235 // Store pointer to buffer. 235 // Store pointer to buffer.
236 movq(Operand(scratch, 0), addr); 236 movq(Operand(scratch, 0), addr);
237 // Increment buffer top. 237 // Increment buffer top.
238 addq(scratch, Immediate(kPointerSize)); 238 addq(scratch, Immediate(kPointerSize));
239 // Write back new top of buffer. 239 // Write back new top of buffer.
240 StoreRoot(scratch, Heap::kStoreBufferTopRootIndex); 240 StoreRoot(scratch, Heap::kStoreBufferTopRootIndex);
241 // Call stub on end of buffer. 241 // Call stub on end of buffer.
242 Label done; 242 Label done;
243 // Check for end of buffer. 243 // Check for end of buffer.
244 testq(scratch, Immediate(StoreBuffer::kStoreBufferOverflowBit)); 244 testq(scratch, Immediate(StoreBuffer::kStoreBufferOverflowBit));
245 if (and_then == kReturnAtEnd) { 245 if (and_then == kReturnAtEnd) {
246 Label buffer_overflowed; 246 Label buffer_overflowed;
247 j(not_equal, &buffer_overflowed, Label::kNear); 247 j(not_equal, &buffer_overflowed, Label::kNear);
248 ret(0); 248 ret(0);
249 bind(&buffer_overflowed); 249 bind(&buffer_overflowed);
250 } else { 250 } else {
251 ASSERT(and_then == kFallThroughAtEnd); 251 ASSERT(and_then == kFallThroughAtEnd);
252 j(equal, &done, Label::kNear); 252 j(equal, &done, Label::kNear);
253 } 253 }
254 StoreBufferOverflowStub store_buffer_overflow = 254 StoreBufferOverflowStub store_buffer_overflow =
255 StoreBufferOverflowStub(save_fp); 255 StoreBufferOverflowStub(save_fp);
256 CallStub(&store_buffer_overflow); 256 CallStub(&store_buffer_overflow);
257 if (and_then == kReturnAtEnd) { 257 if (and_then == kReturnAtEnd) {
258 ret(0); 258 ret(0);
259 } else { 259 } else {
260 ASSERT(and_then == kFallThroughAtEnd); 260 ASSERT(and_then == kFallThroughAtEnd);
261 bind(&done); 261 bind(&done);
262 } 262 }
263 } 263 }
264 264
265 265
266 void MacroAssembler::InNewSpace(Register object, 266 void MacroAssembler::InNewSpace(Register object,
267 Register scratch, 267 Register scratch,
268 Condition cc, 268 Condition cc,
269 Label* branch, 269 Label* branch,
270 Label::Distance distance) { 270 Label::Distance distance) {
271 if (Serializer::enabled()) { 271 if (Serializer::enabled()) {
272 // Can't do arithmetic on external references if it might get serialized. 272 // Can't do arithmetic on external references if it might get serialized.
273 // The mask isn't really an address. We load it as an external reference in 273 // The mask isn't really an address. We load it as an external reference in
274 // case the size of the new space is different between the snapshot maker 274 // case the size of the new space is different between the snapshot maker
275 // and the running system. 275 // and the running system.
276 if (scratch.is(object)) { 276 if (scratch.is(object)) {
277 movq(kScratchRegister, ExternalReference::new_space_mask(isolate())); 277 movq(kScratchRegister, ExternalReference::new_space_mask(isolate()));
278 and_(scratch, kScratchRegister); 278 and_(scratch, kScratchRegister);
279 } else { 279 } else {
280 movq(scratch, ExternalReference::new_space_mask(isolate())); 280 movq(scratch, ExternalReference::new_space_mask(isolate()));
281 and_(scratch, object); 281 and_(scratch, object);
282 } 282 }
283 movq(kScratchRegister, ExternalReference::new_space_start(isolate())); 283 movq(kScratchRegister, ExternalReference::new_space_start(isolate()));
284 cmpq(scratch, kScratchRegister); 284 cmpq(scratch, kScratchRegister);
285 j(cc, branch, distance); 285 j(cc, branch, distance);
286 } else { 286 } else {
287 ASSERT(is_int32(static_cast<int64_t>(HEAP->NewSpaceMask()))); 287 ASSERT(is_int32(static_cast<int64_t>(HEAP->NewSpaceMask())));
288 intptr_t new_space_start = 288 intptr_t new_space_start =
289 reinterpret_cast<intptr_t>(HEAP->NewSpaceStart()); 289 reinterpret_cast<intptr_t>(HEAP->NewSpaceStart());
290 movq(kScratchRegister, -new_space_start, RelocInfo::NONE); 290 movq(kScratchRegister, -new_space_start, RelocInfo::NONE);
291 if (scratch.is(object)) { 291 if (scratch.is(object)) {
292 addq(scratch, kScratchRegister); 292 addq(scratch, kScratchRegister);
293 } else { 293 } else {
294 lea(scratch, Operand(object, kScratchRegister, times_1, 0)); 294 lea(scratch, Operand(object, kScratchRegister, times_1, 0));
295 } 295 }
296 and_(scratch, Immediate(static_cast<int32_t>(HEAP->NewSpaceMask()))); 296 and_(scratch, Immediate(static_cast<int32_t>(HEAP->NewSpaceMask())));
297 j(cc, branch, distance); 297 j(cc, branch, distance);
298 } 298 }
299 } 299 }
300 300
301 301
302 void MacroAssembler::RecordWriteField( 302 void MacroAssembler::RecordWriteField(
303 Register object, 303 Register object,
304 int offset, 304 int offset,
305 Register value, 305 Register value,
306 Register dst, 306 Register dst,
307 SaveFPRegsMode save_fp, 307 SaveFPRegsMode save_fp,
308 RememberedSetAction remembered_set_action, 308 RememberedSetAction remembered_set_action,
309 SmiCheck smi_check) { 309 SmiCheck smi_check) {
310 // The compiled code assumes that record write doesn't change the 310 // The compiled code assumes that record write doesn't change the
311 // context register, so we check that none of the clobbered 311 // context register, so we check that none of the clobbered
312 // registers are rsi. 312 // registers are rsi.
313 ASSERT(!value.is(rsi) && !dst.is(rsi)); 313 ASSERT(!value.is(rsi) && !dst.is(rsi));
314 314
315 // First, check if a write barrier is even needed. The tests below 315 // First, check if a write barrier is even needed. The tests below
316 // catch stores of Smis. 316 // catch stores of Smis.
317 Label done; 317 Label done;
318 318
319 // Skip barrier if writing a smi. 319 // Skip barrier if writing a smi.
320 if (smi_check == INLINE_SMI_CHECK) { 320 if (smi_check == INLINE_SMI_CHECK) {
321 JumpIfSmi(value, &done); 321 JumpIfSmi(value, &done);
322 } 322 }
323 323
324 // Although the object register is tagged, the offset is relative to the start 324 // Although the object register is tagged, the offset is relative to the start
325 // of the object, so so offset must be a multiple of kPointerSize. 325 // of the object, so so offset must be a multiple of kPointerSize.
326 ASSERT(IsAligned(offset, kPointerSize)); 326 ASSERT(IsAligned(offset, kPointerSize));
327 327
328 lea(dst, FieldOperand(object, offset)); 328 lea(dst, FieldOperand(object, offset));
329 if (emit_debug_code()) { 329 if (emit_debug_code()) {
330 Label ok; 330 Label ok;
331 testb(dst, Immediate((1 << kPointerSizeLog2) - 1)); 331 testb(dst, Immediate((1 << kPointerSizeLog2) - 1));
332 j(zero, &ok, Label::kNear); 332 j(zero, &ok, Label::kNear);
333 int3(); 333 int3();
334 bind(&ok); 334 bind(&ok);
335 } 335 }
336 336
337 RecordWrite( 337 RecordWrite(
338 object, dst, value, save_fp, remembered_set_action, OMIT_SMI_CHECK); 338 object, dst, value, save_fp, remembered_set_action, OMIT_SMI_CHECK);
339 339
340 bind(&done); 340 bind(&done);
341 341
342 // Clobber clobbered input registers when running with the debug-code flag 342 // Clobber clobbered input registers when running with the debug-code flag
343 // turned on to provoke errors. 343 // turned on to provoke errors.
344 if (emit_debug_code()) { 344 if (emit_debug_code()) {
345 movq(value, BitCast<int64_t>(kZapValue), RelocInfo::NONE); 345 movq(value, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
346 movq(dst, BitCast<int64_t>(kZapValue), RelocInfo::NONE); 346 movq(dst, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
347 } 347 }
348 } 348 }
349 349
350 350
351 void MacroAssembler::RecordWriteArray(Register object, 351 void MacroAssembler::RecordWriteArray(Register object,
352 Register value, 352 Register value,
353 Register index, 353 Register index,
354 SaveFPRegsMode save_fp, 354 SaveFPRegsMode save_fp,
355 RememberedSetAction remembered_set_action, 355 RememberedSetAction remembered_set_action,
356 SmiCheck smi_check) { 356 SmiCheck smi_check) {
357 // First, check if a write barrier is even needed. The tests below 357 // First, check if a write barrier is even needed. The tests below
358 // catch stores of Smis. 358 // catch stores of Smis.
359 Label done; 359 Label done;
360 360
361 // Skip barrier if writing a smi. 361 // Skip barrier if writing a smi.
362 if (smi_check == INLINE_SMI_CHECK) { 362 if (smi_check == INLINE_SMI_CHECK) {
363 JumpIfSmi(value, &done); 363 JumpIfSmi(value, &done);
364 } 364 }
365 365
366 // Array access: calculate the destination address. Index is not a smi. 366 // Array access: calculate the destination address. Index is not a smi.
367 Register dst = index; 367 Register dst = index;
368 lea(dst, Operand(object, index, times_pointer_size, 368 lea(dst, Operand(object, index, times_pointer_size,
369 FixedArray::kHeaderSize - kHeapObjectTag)); 369 FixedArray::kHeaderSize - kHeapObjectTag));
370 370
371 RecordWrite( 371 RecordWrite(
372 object, dst, value, save_fp, remembered_set_action, OMIT_SMI_CHECK); 372 object, dst, value, save_fp, remembered_set_action, OMIT_SMI_CHECK);
373 373
374 bind(&done); 374 bind(&done);
375 375
376 // Clobber clobbered input registers when running with the debug-code flag 376 // Clobber clobbered input registers when running with the debug-code flag
377 // turned on to provoke errors. 377 // turned on to provoke errors.
378 if (emit_debug_code()) { 378 if (emit_debug_code()) {
379 movq(value, BitCast<int64_t>(kZapValue), RelocInfo::NONE); 379 movq(value, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
380 movq(index, BitCast<int64_t>(kZapValue), RelocInfo::NONE); 380 movq(index, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
381 } 381 }
382 } 382 }
383 383
384 384
385 void MacroAssembler::RecordWrite(Register object, 385 void MacroAssembler::RecordWrite(Register object,
386 Register address, 386 Register address,
387 Register value, 387 Register value,
388 SaveFPRegsMode fp_mode, 388 SaveFPRegsMode fp_mode,
389 RememberedSetAction remembered_set_action, 389 RememberedSetAction remembered_set_action,
390 SmiCheck smi_check) { 390 SmiCheck smi_check) {
391 // The compiled code assumes that record write doesn't change the 391 // The compiled code assumes that record write doesn't change the
392 // context register, so we check that none of the clobbered 392 // context register, so we check that none of the clobbered
393 // registers are rsi. 393 // registers are rsi.
394 ASSERT(!value.is(rsi) && !address.is(rsi)); 394 ASSERT(!value.is(rsi) && !address.is(rsi));
395 395
396 ASSERT(!object.is(value)); 396 ASSERT(!object.is(value));
397 ASSERT(!object.is(address)); 397 ASSERT(!object.is(address));
398 ASSERT(!value.is(address)); 398 ASSERT(!value.is(address));
399 if (emit_debug_code()) { 399 if (emit_debug_code()) {
400 AbortIfSmi(object); 400 AbortIfSmi(object);
401 } 401 }
402 402
403 if (remembered_set_action == OMIT_REMEMBERED_SET && 403 if (remembered_set_action == OMIT_REMEMBERED_SET &&
404 !FLAG_incremental_marking) { 404 !FLAG_incremental_marking) {
405 return; 405 return;
406 } 406 }
407 407
408 if (emit_debug_code()) { 408 if (emit_debug_code()) {
409 Label ok; 409 Label ok;
410 cmpq(value, Operand(address, 0)); 410 cmpq(value, Operand(address, 0));
411 j(equal, &ok, Label::kNear); 411 j(equal, &ok, Label::kNear);
412 int3(); 412 int3();
413 bind(&ok); 413 bind(&ok);
414 } 414 }
415 415
416 // First, check if a write barrier is even needed. The tests below 416 // First, check if a write barrier is even needed. The tests below
417 // catch stores of smis and stores into the young generation. 417 // catch stores of smis and stores into the young generation.
418 Label done; 418 Label done;
419 419
420 if (smi_check == INLINE_SMI_CHECK) { 420 if (smi_check == INLINE_SMI_CHECK) {
421 // Skip barrier if writing a smi. 421 // Skip barrier if writing a smi.
422 JumpIfSmi(value, &done); 422 JumpIfSmi(value, &done);
423 } 423 }
424 424
425 CheckPageFlag(value, 425 CheckPageFlag(value,
426 value, // Used as scratch. 426 value, // Used as scratch.
427 MemoryChunk::kPointersToHereAreInterestingMask, 427 MemoryChunk::kPointersToHereAreInterestingMask,
428 zero, 428 zero,
429 &done, 429 &done,
430 Label::kNear); 430 Label::kNear);
431 431
432 CheckPageFlag(object, 432 CheckPageFlag(object,
433 value, // Used as scratch. 433 value, // Used as scratch.
434 MemoryChunk::kPointersFromHereAreInterestingMask, 434 MemoryChunk::kPointersFromHereAreInterestingMask,
435 zero, 435 zero,
436 &done, 436 &done,
437 Label::kNear); 437 Label::kNear);
438 438
439 RecordWriteStub stub(object, value, address, remembered_set_action, fp_mode); 439 RecordWriteStub stub(object, value, address, remembered_set_action, fp_mode);
440 CallStub(&stub); 440 CallStub(&stub);
441 441
442 bind(&done); 442 bind(&done);
443 443
444 // Clobber clobbered registers when running with the debug-code flag 444 // Clobber clobbered registers when running with the debug-code flag
445 // turned on to provoke errors. 445 // turned on to provoke errors.
446 if (emit_debug_code()) { 446 if (emit_debug_code()) {
447 movq(address, BitCast<int64_t>(kZapValue), RelocInfo::NONE); 447 movq(address, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
448 movq(value, BitCast<int64_t>(kZapValue), RelocInfo::NONE); 448 movq(value, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
449 } 449 }
450 } 450 }
451 451
452 452
453 void MacroAssembler::Assert(Condition cc, const char* msg) { 453 void MacroAssembler::Assert(Condition cc, const char* msg) {
454 if (emit_debug_code()) Check(cc, msg); 454 if (emit_debug_code()) Check(cc, msg);
455 } 455 }
456 456
457 457
458 void MacroAssembler::AssertFastElements(Register elements) { 458 void MacroAssembler::AssertFastElements(Register elements) {
459 if (emit_debug_code()) { 459 if (emit_debug_code()) {
460 Label ok; 460 Label ok;
461 CompareRoot(FieldOperand(elements, HeapObject::kMapOffset), 461 CompareRoot(FieldOperand(elements, HeapObject::kMapOffset),
462 Heap::kFixedArrayMapRootIndex); 462 Heap::kFixedArrayMapRootIndex);
463 j(equal, &ok, Label::kNear); 463 j(equal, &ok, Label::kNear);
464 CompareRoot(FieldOperand(elements, HeapObject::kMapOffset), 464 CompareRoot(FieldOperand(elements, HeapObject::kMapOffset),
465 Heap::kFixedDoubleArrayMapRootIndex); 465 Heap::kFixedDoubleArrayMapRootIndex);
466 j(equal, &ok, Label::kNear); 466 j(equal, &ok, Label::kNear);
467 CompareRoot(FieldOperand(elements, HeapObject::kMapOffset), 467 CompareRoot(FieldOperand(elements, HeapObject::kMapOffset),
468 Heap::kFixedCOWArrayMapRootIndex); 468 Heap::kFixedCOWArrayMapRootIndex);
469 j(equal, &ok, Label::kNear); 469 j(equal, &ok, Label::kNear);
470 Abort("JSObject with fast elements map has slow elements"); 470 Abort("JSObject with fast elements map has slow elements");
471 bind(&ok); 471 bind(&ok);
472 } 472 }
473 } 473 }
474 474
475 475
476 void MacroAssembler::Check(Condition cc, const char* msg) { 476 void MacroAssembler::Check(Condition cc, const char* msg) {
477 Label L; 477 Label L;
478 j(cc, &L, Label::kNear); 478 j(cc, &L, Label::kNear);
479 Abort(msg); 479 Abort(msg);
480 // Control will not return here. 480 // Control will not return here.
481 bind(&L); 481 bind(&L);
482 } 482 }
483 483
484 484
485 void MacroAssembler::CheckStackAlignment() { 485 void MacroAssembler::CheckStackAlignment() {
486 int frame_alignment = OS::ActivationFrameAlignment(); 486 int frame_alignment = OS::ActivationFrameAlignment();
487 int frame_alignment_mask = frame_alignment - 1; 487 int frame_alignment_mask = frame_alignment - 1;
488 if (frame_alignment > kPointerSize) { 488 if (frame_alignment > kPointerSize) {
489 ASSERT(IsPowerOf2(frame_alignment)); 489 ASSERT(IsPowerOf2(frame_alignment));
490 Label alignment_as_expected; 490 Label alignment_as_expected;
491 testq(rsp, Immediate(frame_alignment_mask)); 491 testq(rsp, Immediate(frame_alignment_mask));
492 j(zero, &alignment_as_expected, Label::kNear); 492 j(zero, &alignment_as_expected, Label::kNear);
493 // Abort if stack is not aligned. 493 // Abort if stack is not aligned.
494 int3(); 494 int3();
495 bind(&alignment_as_expected); 495 bind(&alignment_as_expected);
496 } 496 }
497 } 497 }
498 498
499 499
500 void MacroAssembler::NegativeZeroTest(Register result, 500 void MacroAssembler::NegativeZeroTest(Register result,
501 Register op, 501 Register op,
502 Label* then_label) { 502 Label* then_label) {
503 Label ok; 503 Label ok;
504 testl(result, result); 504 testl(result, result);
505 j(not_zero, &ok, Label::kNear); 505 j(not_zero, &ok, Label::kNear);
506 testl(op, op); 506 testl(op, op);
507 j(sign, then_label); 507 j(sign, then_label);
508 bind(&ok); 508 bind(&ok);
509 } 509 }
510 510
511 511
512 void MacroAssembler::Abort(const char* msg) { 512 void MacroAssembler::Abort(const char* msg) {
513 // We want to pass the msg string like a smi to avoid GC 513 // We want to pass the msg string like a smi to avoid GC
514 // problems, however msg is not guaranteed to be aligned 514 // problems, however msg is not guaranteed to be aligned
515 // properly. Instead, we pass an aligned pointer that is 515 // properly. Instead, we pass an aligned pointer that is
516 // a proper v8 smi, but also pass the alignment difference 516 // a proper v8 smi, but also pass the alignment difference
517 // from the real pointer as a smi. 517 // from the real pointer as a smi.
518 intptr_t p1 = reinterpret_cast<intptr_t>(msg); 518 intptr_t p1 = reinterpret_cast<intptr_t>(msg);
519 intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag; 519 intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
520 // Note: p0 might not be a valid Smi _value_, but it has a valid Smi tag. 520 // Note: p0 might not be a valid Smi _value_, but it has a valid Smi tag.
521 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi()); 521 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
522 #ifdef DEBUG 522 #ifdef DEBUG
523 if (msg != NULL) { 523 if (msg != NULL) {
524 RecordComment("Abort message: "); 524 RecordComment("Abort message: ");
525 RecordComment(msg); 525 RecordComment(msg);
526 } 526 }
527 #endif 527 #endif
528 push(rax); 528 push(rax);
529 movq(kScratchRegister, p0, RelocInfo::NONE); 529 movq(kScratchRegister, p0, RelocInfo::NONE);
530 push(kScratchRegister); 530 push(kScratchRegister);
531 movq(kScratchRegister, 531 movq(kScratchRegister,
532 reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(p1 - p0))), 532 reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(p1 - p0))),
533 RelocInfo::NONE); 533 RelocInfo::NONE);
534 push(kScratchRegister); 534 push(kScratchRegister);
535 535
536 if (!has_frame_) { 536 if (!has_frame_) {
537 // We don't actually want to generate a pile of code for this, so just 537 // We don't actually want to generate a pile of code for this, so just
538 // claim there is a stack frame, without generating one. 538 // claim there is a stack frame, without generating one.
539 FrameScope scope(this, StackFrame::NONE); 539 FrameScope scope(this, StackFrame::NONE);
540 CallRuntime(Runtime::kAbort, 2); 540 CallRuntime(Runtime::kAbort, 2);
541 } else { 541 } else {
542 CallRuntime(Runtime::kAbort, 2); 542 CallRuntime(Runtime::kAbort, 2);
543 } 543 }
544 // Control will not return here. 544 // Control will not return here.
545 int3(); 545 int3();
546 } 546 }
547 547
548 548
549 void MacroAssembler::CallStub(CodeStub* stub, TypeFeedbackId ast_id) { 549 void MacroAssembler::CallStub(CodeStub* stub, TypeFeedbackId ast_id) {
550 ASSERT(AllowThisStubCall(stub)); // Calls are not allowed in some stubs 550 ASSERT(AllowThisStubCall(stub)); // Calls are not allowed in some stubs
551 Call(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id); 551 Call(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id);
552 } 552 }
553 553
554 554
555 void MacroAssembler::TailCallStub(CodeStub* stub) { 555 void MacroAssembler::TailCallStub(CodeStub* stub) {
556 ASSERT(allow_stub_calls_ || stub->CompilingCallsToThisStubIsGCSafe()); 556 ASSERT(allow_stub_calls_ || stub->CompilingCallsToThisStubIsGCSafe());
557 Jump(stub->GetCode(), RelocInfo::CODE_TARGET); 557 Jump(stub->GetCode(), RelocInfo::CODE_TARGET);
558 } 558 }
559 559
560 560
561 void MacroAssembler::StubReturn(int argc) { 561 void MacroAssembler::StubReturn(int argc) {
562 ASSERT(argc >= 1 && generating_stub()); 562 ASSERT(argc >= 1 && generating_stub());
563 ret((argc - 1) * kPointerSize); 563 ret((argc - 1) * kPointerSize);
564 } 564 }
565 565
566 566
567 bool MacroAssembler::AllowThisStubCall(CodeStub* stub) { 567 bool MacroAssembler::AllowThisStubCall(CodeStub* stub) {
568 if (!has_frame_ && stub->SometimesSetsUpAFrame()) return false; 568 if (!has_frame_ && stub->SometimesSetsUpAFrame()) return false;
569 return allow_stub_calls_ || stub->CompilingCallsToThisStubIsGCSafe(); 569 return allow_stub_calls_ || stub->CompilingCallsToThisStubIsGCSafe();
570 } 570 }
571 571
572 572
573 void MacroAssembler::IllegalOperation(int num_arguments) { 573 void MacroAssembler::IllegalOperation(int num_arguments) {
574 if (num_arguments > 0) { 574 if (num_arguments > 0) {
575 addq(rsp, Immediate(num_arguments * kPointerSize)); 575 addq(rsp, Immediate(num_arguments * kPointerSize));
576 } 576 }
577 LoadRoot(rax, Heap::kUndefinedValueRootIndex); 577 LoadRoot(rax, Heap::kUndefinedValueRootIndex);
578 } 578 }
579 579
580 580
581 void MacroAssembler::IndexFromHash(Register hash, Register index) { 581 void MacroAssembler::IndexFromHash(Register hash, Register index) {
582 // The assert checks that the constants for the maximum number of digits 582 // The assert checks that the constants for the maximum number of digits
583 // for an array index cached in the hash field and the number of bits 583 // for an array index cached in the hash field and the number of bits
584 // reserved for it does not conflict. 584 // reserved for it does not conflict.
585 ASSERT(TenToThe(String::kMaxCachedArrayIndexLength) < 585 ASSERT(TenToThe(String::kMaxCachedArrayIndexLength) <
586 (1 << String::kArrayIndexValueBits)); 586 (1 << String::kArrayIndexValueBits));
587 // We want the smi-tagged index in key. Even if we subsequently go to 587 // We want the smi-tagged index in key. Even if we subsequently go to
588 // the slow case, converting the key to a smi is always valid. 588 // the slow case, converting the key to a smi is always valid.
589 // key: string key 589 // key: string key
590 // hash: key's hash field, including its array index value. 590 // hash: key's hash field, including its array index value.
591 and_(hash, Immediate(String::kArrayIndexValueMask)); 591 and_(hash, Immediate(String::kArrayIndexValueMask));
592 shr(hash, Immediate(String::kHashShift)); 592 shr(hash, Immediate(String::kHashShift));
593 // Here we actually clobber the key which will be used if calling into 593 // Here we actually clobber the key which will be used if calling into
594 // runtime later. However as the new key is the numeric value of a string key 594 // runtime later. However as the new key is the numeric value of a string key
595 // there is no difference in using either key. 595 // there is no difference in using either key.
596 Integer32ToSmi(index, hash); 596 Integer32ToSmi(index, hash);
597 } 597 }
598 598
599 599
600 void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) { 600 void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) {
601 CallRuntime(Runtime::FunctionForId(id), num_arguments); 601 CallRuntime(Runtime::FunctionForId(id), num_arguments);
602 } 602 }
603 603
604 604
605 void MacroAssembler::CallRuntimeSaveDoubles(Runtime::FunctionId id) { 605 void MacroAssembler::CallRuntimeSaveDoubles(Runtime::FunctionId id) {
606 const Runtime::Function* function = Runtime::FunctionForId(id); 606 const Runtime::Function* function = Runtime::FunctionForId(id);
607 Set(rax, function->nargs); 607 Set(rax, function->nargs);
608 LoadAddress(rbx, ExternalReference(function, isolate())); 608 LoadAddress(rbx, ExternalReference(function, isolate()));
609 CEntryStub ces(1, kSaveFPRegs); 609 CEntryStub ces(1, kSaveFPRegs);
610 CallStub(&ces); 610 CallStub(&ces);
611 } 611 }
612 612
613 613
614 void MacroAssembler::CallRuntime(const Runtime::Function* f, 614 void MacroAssembler::CallRuntime(const Runtime::Function* f,
615 int num_arguments) { 615 int num_arguments) {
616 // If the expected number of arguments of the runtime function is 616 // If the expected number of arguments of the runtime function is
617 // constant, we check that the actual number of arguments match the 617 // constant, we check that the actual number of arguments match the
618 // expectation. 618 // expectation.
619 if (f->nargs >= 0 && f->nargs != num_arguments) { 619 if (f->nargs >= 0 && f->nargs != num_arguments) {
620 IllegalOperation(num_arguments); 620 IllegalOperation(num_arguments);
621 return; 621 return;
622 } 622 }
623 623
624 // TODO(1236192): Most runtime routines don't need the number of 624 // TODO(1236192): Most runtime routines don't need the number of
625 // arguments passed in because it is constant. At some point we 625 // arguments passed in because it is constant. At some point we
626 // should remove this need and make the runtime routine entry code 626 // should remove this need and make the runtime routine entry code
627 // smarter. 627 // smarter.
628 Set(rax, num_arguments); 628 Set(rax, num_arguments);
629 LoadAddress(rbx, ExternalReference(f, isolate())); 629 LoadAddress(rbx, ExternalReference(f, isolate()));
630 CEntryStub ces(f->result_size); 630 CEntryStub ces(f->result_size);
631 CallStub(&ces); 631 CallStub(&ces);
632 } 632 }
633 633
634 634
635 void MacroAssembler::CallExternalReference(const ExternalReference& ext, 635 void MacroAssembler::CallExternalReference(const ExternalReference& ext,
636 int num_arguments) { 636 int num_arguments) {
637 Set(rax, num_arguments); 637 Set(rax, num_arguments);
638 LoadAddress(rbx, ext); 638 LoadAddress(rbx, ext);
639 639
640 CEntryStub stub(1); 640 CEntryStub stub(1);
641 CallStub(&stub); 641 CallStub(&stub);
642 } 642 }
643 643
644 644
645 void MacroAssembler::TailCallExternalReference(const ExternalReference& ext, 645 void MacroAssembler::TailCallExternalReference(const ExternalReference& ext,
646 int num_arguments, 646 int num_arguments,
647 int result_size) { 647 int result_size) {
648 // ----------- S t a t e ------------- 648 // ----------- S t a t e -------------
649 // -- rsp[0] : return address 649 // -- rsp[0] : return address
650 // -- rsp[8] : argument num_arguments - 1 650 // -- rsp[8] : argument num_arguments - 1
651 // ... 651 // ...
652 // -- rsp[8 * num_arguments] : argument 0 (receiver) 652 // -- rsp[8 * num_arguments] : argument 0 (receiver)
653 // ----------------------------------- 653 // -----------------------------------
654 654
655 // TODO(1236192): Most runtime routines don't need the number of 655 // TODO(1236192): Most runtime routines don't need the number of
656 // arguments passed in because it is constant. At some point we 656 // arguments passed in because it is constant. At some point we
657 // should remove this need and make the runtime routine entry code 657 // should remove this need and make the runtime routine entry code
658 // smarter. 658 // smarter.
659 Set(rax, num_arguments); 659 Set(rax, num_arguments);
660 JumpToExternalReference(ext, result_size); 660 JumpToExternalReference(ext, result_size);
661 } 661 }
662 662
663 663
664 void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid, 664 void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
665 int num_arguments, 665 int num_arguments,
666 int result_size) { 666 int result_size) {
667 TailCallExternalReference(ExternalReference(fid, isolate()), 667 TailCallExternalReference(ExternalReference(fid, isolate()),
668 num_arguments, 668 num_arguments,
669 result_size); 669 result_size);
670 } 670 }
671 671
672 672
673 static int Offset(ExternalReference ref0, ExternalReference ref1) { 673 static int Offset(ExternalReference ref0, ExternalReference ref1) {
674 int64_t offset = (ref0.address() - ref1.address()); 674 int64_t offset = (ref0.address() - ref1.address());
675 // Check that fits into int. 675 // Check that fits into int.
676 ASSERT(static_cast<int>(offset) == offset); 676 ASSERT(static_cast<int>(offset) == offset);
677 return static_cast<int>(offset); 677 return static_cast<int>(offset);
678 } 678 }
679 679
680 680
681 void MacroAssembler::PrepareCallApiFunction(int arg_stack_space) { 681 void MacroAssembler::PrepareCallApiFunction(int arg_stack_space) {
682 #if defined(_WIN64) && !defined(__MINGW64__) 682 #if defined(_WIN64) && !defined(__MINGW64__)
683 // We need to prepare a slot for result handle on stack and put 683 // We need to prepare a slot for result handle on stack and put
684 // a pointer to it into 1st arg register. 684 // a pointer to it into 1st arg register.
685 EnterApiExitFrame(arg_stack_space + 1); 685 EnterApiExitFrame(arg_stack_space + 1);
686 686
687 // rcx must be used to pass the pointer to the return value slot. 687 // rcx must be used to pass the pointer to the return value slot.
688 lea(rcx, StackSpaceOperand(arg_stack_space)); 688 lea(rcx, StackSpaceOperand(arg_stack_space));
689 #else 689 #else
690 EnterApiExitFrame(arg_stack_space); 690 EnterApiExitFrame(arg_stack_space);
691 #endif 691 #endif
692 } 692 }
693 693
694 694
695 void MacroAssembler::CallApiFunctionAndReturn(Address function_address, 695 void MacroAssembler::CallApiFunctionAndReturn(Address function_address,
696 int stack_space) { 696 int stack_space) {
697 Label empty_result; 697 Label empty_result;
698 Label prologue; 698 Label prologue;
699 Label promote_scheduled_exception; 699 Label promote_scheduled_exception;
700 Label delete_allocated_handles; 700 Label delete_allocated_handles;
701 Label leave_exit_frame; 701 Label leave_exit_frame;
702 Label write_back; 702 Label write_back;
703 703
704 Factory* factory = isolate()->factory(); 704 Factory* factory = isolate()->factory();
705 ExternalReference next_address = 705 ExternalReference next_address =
706 ExternalReference::handle_scope_next_address(); 706 ExternalReference::handle_scope_next_address();
707 const int kNextOffset = 0; 707 const int kNextOffset = 0;
708 const int kLimitOffset = Offset( 708 const int kLimitOffset = Offset(
709 ExternalReference::handle_scope_limit_address(), 709 ExternalReference::handle_scope_limit_address(),
710 next_address); 710 next_address);
711 const int kLevelOffset = Offset( 711 const int kLevelOffset = Offset(
712 ExternalReference::handle_scope_level_address(), 712 ExternalReference::handle_scope_level_address(),
713 next_address); 713 next_address);
714 ExternalReference scheduled_exception_address = 714 ExternalReference scheduled_exception_address =
715 ExternalReference::scheduled_exception_address(isolate()); 715 ExternalReference::scheduled_exception_address(isolate());
716 716
717 // Allocate HandleScope in callee-save registers. 717 // Allocate HandleScope in callee-save registers.
718 Register prev_next_address_reg = r14; 718 Register prev_next_address_reg = r14;
719 Register prev_limit_reg = rbx; 719 Register prev_limit_reg = rbx;
720 Register base_reg = r15; 720 Register base_reg = r15;
721 movq(base_reg, next_address); 721 movq(base_reg, next_address);
722 movq(prev_next_address_reg, Operand(base_reg, kNextOffset)); 722 movq(prev_next_address_reg, Operand(base_reg, kNextOffset));
723 movq(prev_limit_reg, Operand(base_reg, kLimitOffset)); 723 movq(prev_limit_reg, Operand(base_reg, kLimitOffset));
724 addl(Operand(base_reg, kLevelOffset), Immediate(1)); 724 addl(Operand(base_reg, kLevelOffset), Immediate(1));
725 // Call the api function! 725 // Call the api function!
726 movq(rax, reinterpret_cast<int64_t>(function_address), 726 movq(rax, reinterpret_cast<int64_t>(function_address),
727 RelocInfo::RUNTIME_ENTRY); 727 RelocInfo::RUNTIME_ENTRY);
728 call(rax); 728 call(rax);
729 729
730 #if defined(_WIN64) && !defined(__MINGW64__) 730 #if defined(_WIN64) && !defined(__MINGW64__)
731 // rax keeps a pointer to v8::Handle, unpack it. 731 // rax keeps a pointer to v8::Handle, unpack it.
732 movq(rax, Operand(rax, 0)); 732 movq(rax, Operand(rax, 0));
733 #endif 733 #endif
734 // Check if the result handle holds 0. 734 // Check if the result handle holds 0.
735 testq(rax, rax); 735 testq(rax, rax);
736 j(zero, &empty_result); 736 j(zero, &empty_result);
737 // It was non-zero. Dereference to get the result value. 737 // It was non-zero. Dereference to get the result value.
738 movq(rax, Operand(rax, 0)); 738 movq(rax, Operand(rax, 0));
739 bind(&prologue); 739 bind(&prologue);
740 740
741 // No more valid handles (the result handle was the last one). Restore 741 // No more valid handles (the result handle was the last one). Restore
742 // previous handle scope. 742 // previous handle scope.
743 subl(Operand(base_reg, kLevelOffset), Immediate(1)); 743 subl(Operand(base_reg, kLevelOffset), Immediate(1));
744 movq(Operand(base_reg, kNextOffset), prev_next_address_reg); 744 movq(Operand(base_reg, kNextOffset), prev_next_address_reg);
745 cmpq(prev_limit_reg, Operand(base_reg, kLimitOffset)); 745 cmpq(prev_limit_reg, Operand(base_reg, kLimitOffset));
746 j(not_equal, &delete_allocated_handles); 746 j(not_equal, &delete_allocated_handles);
747 bind(&leave_exit_frame); 747 bind(&leave_exit_frame);
748 748
749 // Check if the function scheduled an exception. 749 // Check if the function scheduled an exception.
750 movq(rsi, scheduled_exception_address); 750 movq(rsi, scheduled_exception_address);
751 Cmp(Operand(rsi, 0), factory->the_hole_value()); 751 Cmp(Operand(rsi, 0), factory->the_hole_value());
752 j(not_equal, &promote_scheduled_exception); 752 j(not_equal, &promote_scheduled_exception);
753 753
754 #if ENABLE_EXTRA_CHECKS 754 #if ENABLE_EXTRA_CHECKS
755 // Check if the function returned a valid JavaScript value. 755 // Check if the function returned a valid JavaScript value.
756 Label ok; 756 Label ok;
757 Register return_value = rax; 757 Register return_value = rax;
758 Register map = rcx; 758 Register map = rcx;
759 759
760 JumpIfSmi(return_value, &ok, Label::kNear); 760 JumpIfSmi(return_value, &ok, Label::kNear);
761 movq(map, FieldOperand(return_value, HeapObject::kMapOffset)); 761 movq(map, FieldOperand(return_value, HeapObject::kMapOffset));
762 762
763 CmpInstanceType(map, FIRST_NONSTRING_TYPE); 763 CmpInstanceType(map, FIRST_NONSTRING_TYPE);
764 j(below, &ok, Label::kNear); 764 j(below, &ok, Label::kNear);
765 765
766 CmpInstanceType(map, FIRST_SPEC_OBJECT_TYPE); 766 CmpInstanceType(map, FIRST_SPEC_OBJECT_TYPE);
767 j(above_equal, &ok, Label::kNear); 767 j(above_equal, &ok, Label::kNear);
768 768
769 CompareRoot(map, Heap::kHeapNumberMapRootIndex); 769 CompareRoot(map, Heap::kHeapNumberMapRootIndex);
770 j(equal, &ok, Label::kNear); 770 j(equal, &ok, Label::kNear);
771 771
772 CompareRoot(return_value, Heap::kUndefinedValueRootIndex); 772 CompareRoot(return_value, Heap::kUndefinedValueRootIndex);
773 j(equal, &ok, Label::kNear); 773 j(equal, &ok, Label::kNear);
774 774
775 CompareRoot(return_value, Heap::kTrueValueRootIndex); 775 CompareRoot(return_value, Heap::kTrueValueRootIndex);
776 j(equal, &ok, Label::kNear); 776 j(equal, &ok, Label::kNear);
777 777
778 CompareRoot(return_value, Heap::kFalseValueRootIndex); 778 CompareRoot(return_value, Heap::kFalseValueRootIndex);
779 j(equal, &ok, Label::kNear); 779 j(equal, &ok, Label::kNear);
780 780
781 CompareRoot(return_value, Heap::kNullValueRootIndex); 781 CompareRoot(return_value, Heap::kNullValueRootIndex);
782 j(equal, &ok, Label::kNear); 782 j(equal, &ok, Label::kNear);
783 783
784 Abort("API call returned invalid object"); 784 Abort("API call returned invalid object");
785 785
786 bind(&ok); 786 bind(&ok);
787 #endif 787 #endif
788 788
789 LeaveApiExitFrame(); 789 LeaveApiExitFrame();
790 ret(stack_space * kPointerSize); 790 ret(stack_space * kPointerSize);
791 791
792 bind(&empty_result); 792 bind(&empty_result);
793 // It was zero; the result is undefined. 793 // It was zero; the result is undefined.
794 LoadRoot(rax, Heap::kUndefinedValueRootIndex); 794 LoadRoot(rax, Heap::kUndefinedValueRootIndex);
795 jmp(&prologue); 795 jmp(&prologue);
796 796
797 bind(&promote_scheduled_exception); 797 bind(&promote_scheduled_exception);
798 TailCallRuntime(Runtime::kPromoteScheduledException, 0, 1); 798 TailCallRuntime(Runtime::kPromoteScheduledException, 0, 1);
799 799
800 // HandleScope limit has changed. Delete allocated extensions. 800 // HandleScope limit has changed. Delete allocated extensions.
801 bind(&delete_allocated_handles); 801 bind(&delete_allocated_handles);
802 movq(Operand(base_reg, kLimitOffset), prev_limit_reg); 802 movq(Operand(base_reg, kLimitOffset), prev_limit_reg);
803 movq(prev_limit_reg, rax); 803 movq(prev_limit_reg, rax);
804 #ifdef _WIN64 804 #ifdef _WIN64
805 LoadAddress(rcx, ExternalReference::isolate_address()); 805 LoadAddress(rcx, ExternalReference::isolate_address());
806 #else 806 #else
807 LoadAddress(rdi, ExternalReference::isolate_address()); 807 LoadAddress(rdi, ExternalReference::isolate_address());
808 #endif 808 #endif
809 LoadAddress(rax, 809 LoadAddress(rax,
810 ExternalReference::delete_handle_scope_extensions(isolate())); 810 ExternalReference::delete_handle_scope_extensions(isolate()));
811 call(rax); 811 call(rax);
812 movq(rax, prev_limit_reg); 812 movq(rax, prev_limit_reg);
813 jmp(&leave_exit_frame); 813 jmp(&leave_exit_frame);
814 } 814 }
815 815
816 816
817 void MacroAssembler::JumpToExternalReference(const ExternalReference& ext, 817 void MacroAssembler::JumpToExternalReference(const ExternalReference& ext,
818 int result_size) { 818 int result_size) {
819 // Set the entry point and jump to the C entry runtime stub. 819 // Set the entry point and jump to the C entry runtime stub.
820 LoadAddress(rbx, ext); 820 LoadAddress(rbx, ext);
821 CEntryStub ces(result_size); 821 CEntryStub ces(result_size);
822 jmp(ces.GetCode(), RelocInfo::CODE_TARGET); 822 jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
823 } 823 }
824 824
825 825
826 void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id, 826 void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id,
827 InvokeFlag flag, 827 InvokeFlag flag,
828 const CallWrapper& call_wrapper) { 828 const CallWrapper& call_wrapper) {
829 // You can't call a builtin without a valid frame. 829 // You can't call a builtin without a valid frame.
830 ASSERT(flag == JUMP_FUNCTION || has_frame()); 830 ASSERT(flag == JUMP_FUNCTION || has_frame());
831 831
832 // Rely on the assertion to check that the number of provided 832 // Rely on the assertion to check that the number of provided
833 // arguments match the expected number of arguments. Fake a 833 // arguments match the expected number of arguments. Fake a
834 // parameter count to avoid emitting code to do the check. 834 // parameter count to avoid emitting code to do the check.
835 ParameterCount expected(0); 835 ParameterCount expected(0);
836 GetBuiltinEntry(rdx, id); 836 GetBuiltinEntry(rdx, id);
837 InvokeCode(rdx, expected, expected, flag, call_wrapper, CALL_AS_METHOD); 837 InvokeCode(rdx, expected, expected, flag, call_wrapper, CALL_AS_METHOD);
838 } 838 }
839 839
840 840
841 void MacroAssembler::GetBuiltinFunction(Register target, 841 void MacroAssembler::GetBuiltinFunction(Register target,
842 Builtins::JavaScript id) { 842 Builtins::JavaScript id) {
843 // Load the builtins object into target register. 843 // Load the builtins object into target register.
844 movq(target, Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); 844 movq(target, Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
845 movq(target, FieldOperand(target, GlobalObject::kBuiltinsOffset)); 845 movq(target, FieldOperand(target, GlobalObject::kBuiltinsOffset));
846 movq(target, FieldOperand(target, 846 movq(target, FieldOperand(target,
847 JSBuiltinsObject::OffsetOfFunctionWithId(id))); 847 JSBuiltinsObject::OffsetOfFunctionWithId(id)));
848 } 848 }
849 849
850 850
851 void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) { 851 void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
852 ASSERT(!target.is(rdi)); 852 ASSERT(!target.is(rdi));
853 // Load the JavaScript builtin function from the builtins object. 853 // Load the JavaScript builtin function from the builtins object.
854 GetBuiltinFunction(rdi, id); 854 GetBuiltinFunction(rdi, id);
855 movq(target, FieldOperand(rdi, JSFunction::kCodeEntryOffset)); 855 movq(target, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
856 } 856 }
857 857
858 858
859 #define REG(Name) { kRegister_ ## Name ## _Code } 859 #define REG(Name) { kRegister_ ## Name ## _Code }
860 860
861 static const Register saved_regs[] = { 861 static const Register saved_regs[] = {
862 REG(rax), REG(rcx), REG(rdx), REG(rbx), REG(rbp), REG(rsi), REG(rdi), REG(r8), 862 REG(rax), REG(rcx), REG(rdx), REG(rbx), REG(rbp), REG(rsi), REG(rdi), REG(r8),
863 REG(r9), REG(r10), REG(r11) 863 REG(r9), REG(r10), REG(r11)
864 }; 864 };
865 865
866 #undef REG 866 #undef REG
867 867
868 static const int kNumberOfSavedRegs = sizeof(saved_regs) / sizeof(Register); 868 static const int kNumberOfSavedRegs = sizeof(saved_regs) / sizeof(Register);
869 869
870 870
871 void MacroAssembler::PushCallerSaved(SaveFPRegsMode fp_mode, 871 void MacroAssembler::PushCallerSaved(SaveFPRegsMode fp_mode,
872 Register exclusion1, 872 Register exclusion1,
873 Register exclusion2, 873 Register exclusion2,
874 Register exclusion3) { 874 Register exclusion3) {
875 // We don't allow a GC during a store buffer overflow so there is no need to 875 // We don't allow a GC during a store buffer overflow so there is no need to
876 // store the registers in any particular way, but we do have to store and 876 // store the registers in any particular way, but we do have to store and
877 // restore them. 877 // restore them.
878 for (int i = 0; i < kNumberOfSavedRegs; i++) { 878 for (int i = 0; i < kNumberOfSavedRegs; i++) {
879 Register reg = saved_regs[i]; 879 Register reg = saved_regs[i];
880 if (!reg.is(exclusion1) && !reg.is(exclusion2) && !reg.is(exclusion3)) { 880 if (!reg.is(exclusion1) && !reg.is(exclusion2) && !reg.is(exclusion3)) {
881 push(reg); 881 push(reg);
882 } 882 }
883 } 883 }
884 // R12 to r15 are callee save on all platforms. 884 // R12 to r15 are callee save on all platforms.
885 if (fp_mode == kSaveFPRegs) { 885 if (fp_mode == kSaveFPRegs) {
886 CpuFeatures::Scope scope(SSE2); 886 CpuFeatures::Scope scope(SSE2);
887 subq(rsp, Immediate(kDoubleSize * XMMRegister::kNumRegisters)); 887 subq(rsp, Immediate(kDoubleSize * XMMRegister::kNumRegisters));
888 for (int i = 0; i < XMMRegister::kNumRegisters; i++) { 888 for (int i = 0; i < XMMRegister::kNumRegisters; i++) {
889 XMMRegister reg = XMMRegister::from_code(i); 889 XMMRegister reg = XMMRegister::from_code(i);
890 movsd(Operand(rsp, i * kDoubleSize), reg); 890 movsd(Operand(rsp, i * kDoubleSize), reg);
891 } 891 }
892 } 892 }
893 } 893 }
894 894
895 895
896 void MacroAssembler::PopCallerSaved(SaveFPRegsMode fp_mode, 896 void MacroAssembler::PopCallerSaved(SaveFPRegsMode fp_mode,
897 Register exclusion1, 897 Register exclusion1,
898 Register exclusion2, 898 Register exclusion2,
899 Register exclusion3) { 899 Register exclusion3) {
900 if (fp_mode == kSaveFPRegs) { 900 if (fp_mode == kSaveFPRegs) {
901 CpuFeatures::Scope scope(SSE2); 901 CpuFeatures::Scope scope(SSE2);
902 for (int i = 0; i < XMMRegister::kNumRegisters; i++) { 902 for (int i = 0; i < XMMRegister::kNumRegisters; i++) {
903 XMMRegister reg = XMMRegister::from_code(i); 903 XMMRegister reg = XMMRegister::from_code(i);
904 movsd(reg, Operand(rsp, i * kDoubleSize)); 904 movsd(reg, Operand(rsp, i * kDoubleSize));
905 } 905 }
906 addq(rsp, Immediate(kDoubleSize * XMMRegister::kNumRegisters)); 906 addq(rsp, Immediate(kDoubleSize * XMMRegister::kNumRegisters));
907 } 907 }
908 for (int i = kNumberOfSavedRegs - 1; i >= 0; i--) { 908 for (int i = kNumberOfSavedRegs - 1; i >= 0; i--) {
909 Register reg = saved_regs[i]; 909 Register reg = saved_regs[i];
910 if (!reg.is(exclusion1) && !reg.is(exclusion2) && !reg.is(exclusion3)) { 910 if (!reg.is(exclusion1) && !reg.is(exclusion2) && !reg.is(exclusion3)) {
911 pop(reg); 911 pop(reg);
912 } 912 }
913 } 913 }
914 } 914 }
915 915
916 916
917 void MacroAssembler::Set(Register dst, int64_t x) { 917 void MacroAssembler::Set(Register dst, int64_t x) {
918 if (x == 0) { 918 if (x == 0) {
919 xorl(dst, dst); 919 xorl(dst, dst);
920 } else if (is_uint32(x)) { 920 } else if (is_uint32(x)) {
921 movl(dst, Immediate(static_cast<uint32_t>(x))); 921 movl(dst, Immediate(static_cast<uint32_t>(x)));
922 } else if (is_int32(x)) { 922 } else if (is_int32(x)) {
923 movq(dst, Immediate(static_cast<int32_t>(x))); 923 movq(dst, Immediate(static_cast<int32_t>(x)));
924 } else { 924 } else {
925 movq(dst, x, RelocInfo::NONE); 925 movq(dst, x, RelocInfo::NONE);
926 } 926 }
927 } 927 }
928 928
929 void MacroAssembler::Set(const Operand& dst, int64_t x) { 929 void MacroAssembler::Set(const Operand& dst, int64_t x) {
930 if (is_int32(x)) { 930 if (is_int32(x)) {
931 movq(dst, Immediate(static_cast<int32_t>(x))); 931 movq(dst, Immediate(static_cast<int32_t>(x)));
932 } else { 932 } else {
933 Set(kScratchRegister, x); 933 Set(kScratchRegister, x);
934 movq(dst, kScratchRegister); 934 movq(dst, kScratchRegister);
935 } 935 }
936 } 936 }
937 937
938 938
939 bool MacroAssembler::IsUnsafeInt(const int x) { 939 bool MacroAssembler::IsUnsafeInt(const int x) {
940 static const int kMaxBits = 17; 940 static const int kMaxBits = 17;
941 return !is_intn(x, kMaxBits); 941 return !is_intn(x, kMaxBits);
942 } 942 }
943 943
944 944
945 void MacroAssembler::SafeMove(Register dst, Smi* src) { 945 void MacroAssembler::SafeMove(Register dst, Smi* src) {
946 ASSERT(!dst.is(kScratchRegister)); 946 ASSERT(!dst.is(kScratchRegister));
947 ASSERT(kSmiValueSize == 32); // JIT cookie can be converted to Smi. 947 ASSERT(kSmiValueSize == 32); // JIT cookie can be converted to Smi.
948 if (IsUnsafeInt(src->value()) && jit_cookie() != 0) { 948 if (IsUnsafeInt(src->value()) && jit_cookie() != 0) {
949 Move(dst, Smi::FromInt(src->value() ^ jit_cookie())); 949 Move(dst, Smi::FromInt(src->value() ^ jit_cookie()));
950 Move(kScratchRegister, Smi::FromInt(jit_cookie())); 950 Move(kScratchRegister, Smi::FromInt(jit_cookie()));
951 xor_(dst, kScratchRegister); 951 xor_(dst, kScratchRegister);
952 } else { 952 } else {
953 Move(dst, src); 953 Move(dst, src);
954 } 954 }
955 } 955 }
956 956
957 957
958 void MacroAssembler::SafePush(Smi* src) { 958 void MacroAssembler::SafePush(Smi* src) {
959 ASSERT(kSmiValueSize == 32); // JIT cookie can be converted to Smi. 959 ASSERT(kSmiValueSize == 32); // JIT cookie can be converted to Smi.
960 if (IsUnsafeInt(src->value()) && jit_cookie() != 0) { 960 if (IsUnsafeInt(src->value()) && jit_cookie() != 0) {
961 Push(Smi::FromInt(src->value() ^ jit_cookie())); 961 Push(Smi::FromInt(src->value() ^ jit_cookie()));
962 Move(kScratchRegister, Smi::FromInt(jit_cookie())); 962 Move(kScratchRegister, Smi::FromInt(jit_cookie()));
963 xor_(Operand(rsp, 0), kScratchRegister); 963 xor_(Operand(rsp, 0), kScratchRegister);
964 } else { 964 } else {
965 Push(src); 965 Push(src);
966 } 966 }
967 } 967 }
968 968
969 969
970 // ---------------------------------------------------------------------------- 970 // ----------------------------------------------------------------------------
971 // Smi tagging, untagging and tag detection. 971 // Smi tagging, untagging and tag detection.
972 972
973 Register MacroAssembler::GetSmiConstant(Smi* source) { 973 Register MacroAssembler::GetSmiConstant(Smi* source) {
974 int value = source->value(); 974 int value = source->value();
975 if (value == 0) { 975 if (value == 0) {
976 xorl(kScratchRegister, kScratchRegister); 976 xorl(kScratchRegister, kScratchRegister);
977 return kScratchRegister; 977 return kScratchRegister;
978 } 978 }
979 if (value == 1) { 979 if (value == 1) {
980 return kSmiConstantRegister; 980 return kSmiConstantRegister;
981 } 981 }
982 LoadSmiConstant(kScratchRegister, source); 982 LoadSmiConstant(kScratchRegister, source);
983 return kScratchRegister; 983 return kScratchRegister;
984 } 984 }
985 985
986 void MacroAssembler::LoadSmiConstant(Register dst, Smi* source) { 986 void MacroAssembler::LoadSmiConstant(Register dst, Smi* source) {
987 if (emit_debug_code()) { 987 if (emit_debug_code()) {
988 movq(dst, 988 movq(dst,
989 reinterpret_cast<uint64_t>(Smi::FromInt(kSmiConstantRegisterValue)), 989 reinterpret_cast<uint64_t>(Smi::FromInt(kSmiConstantRegisterValue)),
990 RelocInfo::NONE); 990 RelocInfo::NONE);
991 cmpq(dst, kSmiConstantRegister); 991 cmpq(dst, kSmiConstantRegister);
992 if (allow_stub_calls()) { 992 if (allow_stub_calls()) {
993 Assert(equal, "Uninitialized kSmiConstantRegister"); 993 Assert(equal, "Uninitialized kSmiConstantRegister");
994 } else { 994 } else {
995 Label ok; 995 Label ok;
996 j(equal, &ok, Label::kNear); 996 j(equal, &ok, Label::kNear);
997 int3(); 997 int3();
998 bind(&ok); 998 bind(&ok);
999 } 999 }
1000 } 1000 }
1001 int value = source->value(); 1001 int value = source->value();
1002 if (value == 0) { 1002 if (value == 0) {
1003 xorl(dst, dst); 1003 xorl(dst, dst);
1004 return; 1004 return;
1005 } 1005 }
1006 bool negative = value < 0; 1006 bool negative = value < 0;
1007 unsigned int uvalue = negative ? -value : value; 1007 unsigned int uvalue = negative ? -value : value;
1008 1008
1009 switch (uvalue) { 1009 switch (uvalue) {
1010 case 9: 1010 case 9:
1011 lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_8, 0)); 1011 lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_8, 0));
1012 break; 1012 break;
1013 case 8: 1013 case 8:
1014 xorl(dst, dst); 1014 xorl(dst, dst);
1015 lea(dst, Operand(dst, kSmiConstantRegister, times_8, 0)); 1015 lea(dst, Operand(dst, kSmiConstantRegister, times_8, 0));
1016 break; 1016 break;
1017 case 4: 1017 case 4:
1018 xorl(dst, dst); 1018 xorl(dst, dst);
1019 lea(dst, Operand(dst, kSmiConstantRegister, times_4, 0)); 1019 lea(dst, Operand(dst, kSmiConstantRegister, times_4, 0));
1020 break; 1020 break;
1021 case 5: 1021 case 5:
1022 lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_4, 0)); 1022 lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_4, 0));
1023 break; 1023 break;
1024 case 3: 1024 case 3:
1025 lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_2, 0)); 1025 lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_2, 0));
1026 break; 1026 break;
1027 case 2: 1027 case 2:
1028 lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_1, 0)); 1028 lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_1, 0));
1029 break; 1029 break;
1030 case 1: 1030 case 1:
1031 movq(dst, kSmiConstantRegister); 1031 movq(dst, kSmiConstantRegister);
1032 break; 1032 break;
1033 case 0: 1033 case 0:
1034 UNREACHABLE(); 1034 UNREACHABLE();
1035 return; 1035 return;
1036 default: 1036 default:
1037 movq(dst, reinterpret_cast<uint64_t>(source), RelocInfo::NONE); 1037 movq(dst, reinterpret_cast<uint64_t>(source), RelocInfo::NONE);
1038 return; 1038 return;
1039 } 1039 }
1040 if (negative) { 1040 if (negative) {
1041 neg(dst); 1041 neg(dst);
1042 } 1042 }
1043 } 1043 }
1044 1044
1045 1045
1046 void MacroAssembler::Integer32ToSmi(Register dst, Register src) { 1046 void MacroAssembler::Integer32ToSmi(Register dst, Register src) {
1047 STATIC_ASSERT(kSmiTag == 0); 1047 STATIC_ASSERT(kSmiTag == 0);
1048 if (!dst.is(src)) { 1048 if (!dst.is(src)) {
1049 movl(dst, src); 1049 movl(dst, src);
1050 } 1050 }
1051 shl(dst, Immediate(kSmiShift)); 1051 shl(dst, Immediate(kSmiShift));
1052 } 1052 }
1053 1053
1054 1054
1055 void MacroAssembler::Integer32ToSmiField(const Operand& dst, Register src) { 1055 void MacroAssembler::Integer32ToSmiField(const Operand& dst, Register src) {
1056 if (emit_debug_code()) { 1056 if (emit_debug_code()) {
1057 testb(dst, Immediate(0x01)); 1057 testb(dst, Immediate(0x01));
1058 Label ok; 1058 Label ok;
1059 j(zero, &ok, Label::kNear); 1059 j(zero, &ok, Label::kNear);
1060 if (allow_stub_calls()) { 1060 if (allow_stub_calls()) {
1061 Abort("Integer32ToSmiField writing to non-smi location"); 1061 Abort("Integer32ToSmiField writing to non-smi location");
1062 } else { 1062 } else {
1063 int3(); 1063 int3();
1064 } 1064 }
1065 bind(&ok); 1065 bind(&ok);
1066 } 1066 }
1067 ASSERT(kSmiShift % kBitsPerByte == 0); 1067 ASSERT(kSmiShift % kBitsPerByte == 0);
1068 movl(Operand(dst, kSmiShift / kBitsPerByte), src); 1068 movl(Operand(dst, kSmiShift / kBitsPerByte), src);
1069 } 1069 }
1070 1070
1071 1071
1072 void MacroAssembler::Integer64PlusConstantToSmi(Register dst, 1072 void MacroAssembler::Integer64PlusConstantToSmi(Register dst,
1073 Register src, 1073 Register src,
1074 int constant) { 1074 int constant) {
1075 if (dst.is(src)) { 1075 if (dst.is(src)) {
1076 addl(dst, Immediate(constant)); 1076 addl(dst, Immediate(constant));
1077 } else { 1077 } else {
1078 leal(dst, Operand(src, constant)); 1078 leal(dst, Operand(src, constant));
1079 } 1079 }
1080 shl(dst, Immediate(kSmiShift)); 1080 shl(dst, Immediate(kSmiShift));
1081 } 1081 }
1082 1082
1083 1083
1084 void MacroAssembler::SmiToInteger32(Register dst, Register src) { 1084 void MacroAssembler::SmiToInteger32(Register dst, Register src) {
1085 STATIC_ASSERT(kSmiTag == 0); 1085 STATIC_ASSERT(kSmiTag == 0);
1086 if (!dst.is(src)) { 1086 if (!dst.is(src)) {
1087 movq(dst, src); 1087 movq(dst, src);
1088 } 1088 }
1089 shr(dst, Immediate(kSmiShift)); 1089 shr(dst, Immediate(kSmiShift));
1090 } 1090 }
1091 1091
1092 1092
1093 void MacroAssembler::SmiToInteger32(Register dst, const Operand& src) { 1093 void MacroAssembler::SmiToInteger32(Register dst, const Operand& src) {
1094 movl(dst, Operand(src, kSmiShift / kBitsPerByte)); 1094 movl(dst, Operand(src, kSmiShift / kBitsPerByte));
1095 } 1095 }
1096 1096
1097 1097
1098 void MacroAssembler::SmiToInteger64(Register dst, Register src) { 1098 void MacroAssembler::SmiToInteger64(Register dst, Register src) {
1099 STATIC_ASSERT(kSmiTag == 0); 1099 STATIC_ASSERT(kSmiTag == 0);
1100 if (!dst.is(src)) { 1100 if (!dst.is(src)) {
1101 movq(dst, src); 1101 movq(dst, src);
1102 } 1102 }
1103 sar(dst, Immediate(kSmiShift)); 1103 sar(dst, Immediate(kSmiShift));
1104 } 1104 }
1105 1105
1106 1106
1107 void MacroAssembler::SmiToInteger64(Register dst, const Operand& src) { 1107 void MacroAssembler::SmiToInteger64(Register dst, const Operand& src) {
1108 movsxlq(dst, Operand(src, kSmiShift / kBitsPerByte)); 1108 movsxlq(dst, Operand(src, kSmiShift / kBitsPerByte));
1109 } 1109 }
1110 1110
1111 1111
1112 void MacroAssembler::SmiTest(Register src) { 1112 void MacroAssembler::SmiTest(Register src) {
1113 testq(src, src); 1113 testq(src, src);
1114 } 1114 }
1115 1115
1116 1116
1117 void MacroAssembler::SmiCompare(Register smi1, Register smi2) { 1117 void MacroAssembler::SmiCompare(Register smi1, Register smi2) {
1118 if (emit_debug_code()) { 1118 if (emit_debug_code()) {
1119 AbortIfNotSmi(smi1); 1119 AbortIfNotSmi(smi1);
1120 AbortIfNotSmi(smi2); 1120 AbortIfNotSmi(smi2);
1121 } 1121 }
1122 cmpq(smi1, smi2); 1122 cmpq(smi1, smi2);
1123 } 1123 }
1124 1124
1125 1125
1126 void MacroAssembler::SmiCompare(Register dst, Smi* src) { 1126 void MacroAssembler::SmiCompare(Register dst, Smi* src) {
1127 if (emit_debug_code()) { 1127 if (emit_debug_code()) {
1128 AbortIfNotSmi(dst); 1128 AbortIfNotSmi(dst);
1129 } 1129 }
1130 Cmp(dst, src); 1130 Cmp(dst, src);
1131 } 1131 }
1132 1132
1133 1133
1134 void MacroAssembler::Cmp(Register dst, Smi* src) { 1134 void MacroAssembler::Cmp(Register dst, Smi* src) {
1135 ASSERT(!dst.is(kScratchRegister)); 1135 ASSERT(!dst.is(kScratchRegister));
1136 if (src->value() == 0) { 1136 if (src->value() == 0) {
1137 testq(dst, dst); 1137 testq(dst, dst);
1138 } else { 1138 } else {
1139 Register constant_reg = GetSmiConstant(src); 1139 Register constant_reg = GetSmiConstant(src);
1140 cmpq(dst, constant_reg); 1140 cmpq(dst, constant_reg);
1141 } 1141 }
1142 } 1142 }
1143 1143
1144 1144
1145 void MacroAssembler::SmiCompare(Register dst, const Operand& src) { 1145 void MacroAssembler::SmiCompare(Register dst, const Operand& src) {
1146 if (emit_debug_code()) { 1146 if (emit_debug_code()) {
1147 AbortIfNotSmi(dst); 1147 AbortIfNotSmi(dst);
1148 AbortIfNotSmi(src); 1148 AbortIfNotSmi(src);
1149 } 1149 }
1150 cmpq(dst, src); 1150 cmpq(dst, src);
1151 } 1151 }
1152 1152
1153 1153
1154 void MacroAssembler::SmiCompare(const Operand& dst, Register src) { 1154 void MacroAssembler::SmiCompare(const Operand& dst, Register src) {
1155 if (emit_debug_code()) { 1155 if (emit_debug_code()) {
1156 AbortIfNotSmi(dst); 1156 AbortIfNotSmi(dst);
1157 AbortIfNotSmi(src); 1157 AbortIfNotSmi(src);
1158 } 1158 }
1159 cmpq(dst, src); 1159 cmpq(dst, src);
1160 } 1160 }
1161 1161
1162 1162
1163 void MacroAssembler::SmiCompare(const Operand& dst, Smi* src) { 1163 void MacroAssembler::SmiCompare(const Operand& dst, Smi* src) {
1164 if (emit_debug_code()) { 1164 if (emit_debug_code()) {
1165 AbortIfNotSmi(dst); 1165 AbortIfNotSmi(dst);
1166 } 1166 }
1167 cmpl(Operand(dst, kSmiShift / kBitsPerByte), Immediate(src->value())); 1167 cmpl(Operand(dst, kSmiShift / kBitsPerByte), Immediate(src->value()));
1168 } 1168 }
1169 1169
1170 1170
1171 void MacroAssembler::Cmp(const Operand& dst, Smi* src) { 1171 void MacroAssembler::Cmp(const Operand& dst, Smi* src) {
1172 // The Operand cannot use the smi register. 1172 // The Operand cannot use the smi register.
1173 Register smi_reg = GetSmiConstant(src); 1173 Register smi_reg = GetSmiConstant(src);
1174 ASSERT(!dst.AddressUsesRegister(smi_reg)); 1174 ASSERT(!dst.AddressUsesRegister(smi_reg));
1175 cmpq(dst, smi_reg); 1175 cmpq(dst, smi_reg);
1176 } 1176 }
1177 1177
1178 1178
1179 void MacroAssembler::SmiCompareInteger32(const Operand& dst, Register src) { 1179 void MacroAssembler::SmiCompareInteger32(const Operand& dst, Register src) {
1180 cmpl(Operand(dst, kSmiShift / kBitsPerByte), src); 1180 cmpl(Operand(dst, kSmiShift / kBitsPerByte), src);
1181 } 1181 }
1182 1182
1183 1183
1184 void MacroAssembler::PositiveSmiTimesPowerOfTwoToInteger64(Register dst, 1184 void MacroAssembler::PositiveSmiTimesPowerOfTwoToInteger64(Register dst,
1185 Register src, 1185 Register src,
1186 int power) { 1186 int power) {
1187 ASSERT(power >= 0); 1187 ASSERT(power >= 0);
1188 ASSERT(power < 64); 1188 ASSERT(power < 64);
1189 if (power == 0) { 1189 if (power == 0) {
1190 SmiToInteger64(dst, src); 1190 SmiToInteger64(dst, src);
1191 return; 1191 return;
1192 } 1192 }
1193 if (!dst.is(src)) { 1193 if (!dst.is(src)) {
1194 movq(dst, src); 1194 movq(dst, src);
1195 } 1195 }
1196 if (power < kSmiShift) { 1196 if (power < kSmiShift) {
1197 sar(dst, Immediate(kSmiShift - power)); 1197 sar(dst, Immediate(kSmiShift - power));
1198 } else if (power > kSmiShift) { 1198 } else if (power > kSmiShift) {
1199 shl(dst, Immediate(power - kSmiShift)); 1199 shl(dst, Immediate(power - kSmiShift));
1200 } 1200 }
1201 } 1201 }
1202 1202
1203 1203
1204 void MacroAssembler::PositiveSmiDivPowerOfTwoToInteger32(Register dst, 1204 void MacroAssembler::PositiveSmiDivPowerOfTwoToInteger32(Register dst,
1205 Register src, 1205 Register src,
1206 int power) { 1206 int power) {
1207 ASSERT((0 <= power) && (power < 32)); 1207 ASSERT((0 <= power) && (power < 32));
1208 if (dst.is(src)) { 1208 if (dst.is(src)) {
1209 shr(dst, Immediate(power + kSmiShift)); 1209 shr(dst, Immediate(power + kSmiShift));
1210 } else { 1210 } else {
1211 UNIMPLEMENTED(); // Not used. 1211 UNIMPLEMENTED(); // Not used.
1212 } 1212 }
1213 } 1213 }
1214 1214
1215 1215
1216 void MacroAssembler::SmiOrIfSmis(Register dst, Register src1, Register src2, 1216 void MacroAssembler::SmiOrIfSmis(Register dst, Register src1, Register src2,
1217 Label* on_not_smis, 1217 Label* on_not_smis,
1218 Label::Distance near_jump) { 1218 Label::Distance near_jump) {
1219 if (dst.is(src1) || dst.is(src2)) { 1219 if (dst.is(src1) || dst.is(src2)) {
1220 ASSERT(!src1.is(kScratchRegister)); 1220 ASSERT(!src1.is(kScratchRegister));
1221 ASSERT(!src2.is(kScratchRegister)); 1221 ASSERT(!src2.is(kScratchRegister));
1222 movq(kScratchRegister, src1); 1222 movq(kScratchRegister, src1);
1223 or_(kScratchRegister, src2); 1223 or_(kScratchRegister, src2);
1224 JumpIfNotSmi(kScratchRegister, on_not_smis, near_jump); 1224 JumpIfNotSmi(kScratchRegister, on_not_smis, near_jump);
1225 movq(dst, kScratchRegister); 1225 movq(dst, kScratchRegister);
1226 } else { 1226 } else {
1227 movq(dst, src1); 1227 movq(dst, src1);
1228 or_(dst, src2); 1228 or_(dst, src2);
1229 JumpIfNotSmi(dst, on_not_smis, near_jump); 1229 JumpIfNotSmi(dst, on_not_smis, near_jump);
1230 } 1230 }
1231 } 1231 }
1232 1232
1233 1233
1234 Condition MacroAssembler::CheckSmi(Register src) { 1234 Condition MacroAssembler::CheckSmi(Register src) {
1235 STATIC_ASSERT(kSmiTag == 0); 1235 STATIC_ASSERT(kSmiTag == 0);
1236 testb(src, Immediate(kSmiTagMask)); 1236 testb(src, Immediate(kSmiTagMask));
1237 return zero; 1237 return zero;
1238 } 1238 }
1239 1239
1240 1240
1241 Condition MacroAssembler::CheckSmi(const Operand& src) { 1241 Condition MacroAssembler::CheckSmi(const Operand& src) {
1242 STATIC_ASSERT(kSmiTag == 0); 1242 STATIC_ASSERT(kSmiTag == 0);
1243 testb(src, Immediate(kSmiTagMask)); 1243 testb(src, Immediate(kSmiTagMask));
1244 return zero; 1244 return zero;
1245 } 1245 }
1246 1246
1247 1247
1248 Condition MacroAssembler::CheckNonNegativeSmi(Register src) { 1248 Condition MacroAssembler::CheckNonNegativeSmi(Register src) {
1249 STATIC_ASSERT(kSmiTag == 0); 1249 STATIC_ASSERT(kSmiTag == 0);
1250 // Test that both bits of the mask 0x8000000000000001 are zero. 1250 // Test that both bits of the mask 0x8000000000000001 are zero.
1251 movq(kScratchRegister, src); 1251 movq(kScratchRegister, src);
1252 rol(kScratchRegister, Immediate(1)); 1252 rol(kScratchRegister, Immediate(1));
1253 testb(kScratchRegister, Immediate(3)); 1253 testb(kScratchRegister, Immediate(3));
1254 return zero; 1254 return zero;
1255 } 1255 }
1256 1256
1257 1257
1258 Condition MacroAssembler::CheckBothSmi(Register first, Register second) { 1258 Condition MacroAssembler::CheckBothSmi(Register first, Register second) {
1259 if (first.is(second)) { 1259 if (first.is(second)) {
1260 return CheckSmi(first); 1260 return CheckSmi(first);
1261 } 1261 }
1262 STATIC_ASSERT(kSmiTag == 0 && kHeapObjectTag == 1 && kHeapObjectTagMask == 3); 1262 STATIC_ASSERT(kSmiTag == 0 && kHeapObjectTag == 1 && kHeapObjectTagMask == 3);
1263 leal(kScratchRegister, Operand(first, second, times_1, 0)); 1263 leal(kScratchRegister, Operand(first, second, times_1, 0));
1264 testb(kScratchRegister, Immediate(0x03)); 1264 testb(kScratchRegister, Immediate(0x03));
1265 return zero; 1265 return zero;
1266 } 1266 }
1267 1267
1268 1268
1269 Condition MacroAssembler::CheckBothNonNegativeSmi(Register first, 1269 Condition MacroAssembler::CheckBothNonNegativeSmi(Register first,
1270 Register second) { 1270 Register second) {
1271 if (first.is(second)) { 1271 if (first.is(second)) {
1272 return CheckNonNegativeSmi(first); 1272 return CheckNonNegativeSmi(first);
1273 } 1273 }
1274 movq(kScratchRegister, first); 1274 movq(kScratchRegister, first);
1275 or_(kScratchRegister, second); 1275 or_(kScratchRegister, second);
1276 rol(kScratchRegister, Immediate(1)); 1276 rol(kScratchRegister, Immediate(1));
1277 testl(kScratchRegister, Immediate(3)); 1277 testl(kScratchRegister, Immediate(3));
1278 return zero; 1278 return zero;
1279 } 1279 }
1280 1280
1281 1281
1282 Condition MacroAssembler::CheckEitherSmi(Register first, 1282 Condition MacroAssembler::CheckEitherSmi(Register first,
1283 Register second, 1283 Register second,
1284 Register scratch) { 1284 Register scratch) {
1285 if (first.is(second)) { 1285 if (first.is(second)) {
1286 return CheckSmi(first); 1286 return CheckSmi(first);
1287 } 1287 }
1288 if (scratch.is(second)) { 1288 if (scratch.is(second)) {
1289 andl(scratch, first); 1289 andl(scratch, first);
1290 } else { 1290 } else {
1291 if (!scratch.is(first)) { 1291 if (!scratch.is(first)) {
1292 movl(scratch, first); 1292 movl(scratch, first);
1293 } 1293 }
1294 andl(scratch, second); 1294 andl(scratch, second);
1295 } 1295 }
1296 testb(scratch, Immediate(kSmiTagMask)); 1296 testb(scratch, Immediate(kSmiTagMask));
1297 return zero; 1297 return zero;
1298 } 1298 }
1299 1299
1300 1300
1301 Condition MacroAssembler::CheckIsMinSmi(Register src) { 1301 Condition MacroAssembler::CheckIsMinSmi(Register src) {
1302 ASSERT(!src.is(kScratchRegister)); 1302 ASSERT(!src.is(kScratchRegister));
1303 // If we overflow by subtracting one, it's the minimal smi value. 1303 // If we overflow by subtracting one, it's the minimal smi value.
1304 cmpq(src, kSmiConstantRegister); 1304 cmpq(src, kSmiConstantRegister);
1305 return overflow; 1305 return overflow;
1306 } 1306 }
1307 1307
1308 1308
1309 Condition MacroAssembler::CheckInteger32ValidSmiValue(Register src) { 1309 Condition MacroAssembler::CheckInteger32ValidSmiValue(Register src) {
1310 // A 32-bit integer value can always be converted to a smi. 1310 // A 32-bit integer value can always be converted to a smi.
1311 return always; 1311 return always;
1312 } 1312 }
1313 1313
1314 1314
1315 Condition MacroAssembler::CheckUInteger32ValidSmiValue(Register src) { 1315 Condition MacroAssembler::CheckUInteger32ValidSmiValue(Register src) {
1316 // An unsigned 32-bit integer value is valid as long as the high bit 1316 // An unsigned 32-bit integer value is valid as long as the high bit
1317 // is not set. 1317 // is not set.
1318 testl(src, src); 1318 testl(src, src);
1319 return positive; 1319 return positive;
1320 } 1320 }
1321 1321
1322 1322
1323 void MacroAssembler::CheckSmiToIndicator(Register dst, Register src) { 1323 void MacroAssembler::CheckSmiToIndicator(Register dst, Register src) {
1324 if (dst.is(src)) { 1324 if (dst.is(src)) {
1325 andl(dst, Immediate(kSmiTagMask)); 1325 andl(dst, Immediate(kSmiTagMask));
1326 } else { 1326 } else {
1327 movl(dst, Immediate(kSmiTagMask)); 1327 movl(dst, Immediate(kSmiTagMask));
1328 andl(dst, src); 1328 andl(dst, src);
1329 } 1329 }
1330 } 1330 }
1331 1331
1332 1332
1333 void MacroAssembler::CheckSmiToIndicator(Register dst, const Operand& src) { 1333 void MacroAssembler::CheckSmiToIndicator(Register dst, const Operand& src) {
1334 if (!(src.AddressUsesRegister(dst))) { 1334 if (!(src.AddressUsesRegister(dst))) {
1335 movl(dst, Immediate(kSmiTagMask)); 1335 movl(dst, Immediate(kSmiTagMask));
1336 andl(dst, src); 1336 andl(dst, src);
1337 } else { 1337 } else {
1338 movl(dst, src); 1338 movl(dst, src);
1339 andl(dst, Immediate(kSmiTagMask)); 1339 andl(dst, Immediate(kSmiTagMask));
1340 } 1340 }
1341 } 1341 }
1342 1342
1343 1343
1344 void MacroAssembler::JumpIfNotValidSmiValue(Register src, 1344 void MacroAssembler::JumpIfNotValidSmiValue(Register src,
1345 Label* on_invalid, 1345 Label* on_invalid,
1346 Label::Distance near_jump) { 1346 Label::Distance near_jump) {
1347 Condition is_valid = CheckInteger32ValidSmiValue(src); 1347 Condition is_valid = CheckInteger32ValidSmiValue(src);
1348 j(NegateCondition(is_valid), on_invalid, near_jump); 1348 j(NegateCondition(is_valid), on_invalid, near_jump);
1349 } 1349 }
1350 1350
1351 1351
1352 void MacroAssembler::JumpIfUIntNotValidSmiValue(Register src, 1352 void MacroAssembler::JumpIfUIntNotValidSmiValue(Register src,
1353 Label* on_invalid, 1353 Label* on_invalid,
1354 Label::Distance near_jump) { 1354 Label::Distance near_jump) {
1355 Condition is_valid = CheckUInteger32ValidSmiValue(src); 1355 Condition is_valid = CheckUInteger32ValidSmiValue(src);
1356 j(NegateCondition(is_valid), on_invalid, near_jump); 1356 j(NegateCondition(is_valid), on_invalid, near_jump);
1357 } 1357 }
1358 1358
1359 1359
1360 void MacroAssembler::JumpIfSmi(Register src, 1360 void MacroAssembler::JumpIfSmi(Register src,
1361 Label* on_smi, 1361 Label* on_smi,
1362 Label::Distance near_jump) { 1362 Label::Distance near_jump) {
1363 Condition smi = CheckSmi(src); 1363 Condition smi = CheckSmi(src);
1364 j(smi, on_smi, near_jump); 1364 j(smi, on_smi, near_jump);
1365 } 1365 }
1366 1366
1367 1367
1368 void MacroAssembler::JumpIfNotSmi(Register src, 1368 void MacroAssembler::JumpIfNotSmi(Register src,
1369 Label* on_not_smi, 1369 Label* on_not_smi,
1370 Label::Distance near_jump) { 1370 Label::Distance near_jump) {
1371 Condition smi = CheckSmi(src); 1371 Condition smi = CheckSmi(src);
1372 j(NegateCondition(smi), on_not_smi, near_jump); 1372 j(NegateCondition(smi), on_not_smi, near_jump);
1373 } 1373 }
1374 1374
1375 1375
1376 void MacroAssembler::JumpUnlessNonNegativeSmi( 1376 void MacroAssembler::JumpUnlessNonNegativeSmi(
1377 Register src, Label* on_not_smi_or_negative, 1377 Register src, Label* on_not_smi_or_negative,
1378 Label::Distance near_jump) { 1378 Label::Distance near_jump) {
1379 Condition non_negative_smi = CheckNonNegativeSmi(src); 1379 Condition non_negative_smi = CheckNonNegativeSmi(src);
1380 j(NegateCondition(non_negative_smi), on_not_smi_or_negative, near_jump); 1380 j(NegateCondition(non_negative_smi), on_not_smi_or_negative, near_jump);
1381 } 1381 }
1382 1382
1383 1383
1384 void MacroAssembler::JumpIfSmiEqualsConstant(Register src, 1384 void MacroAssembler::JumpIfSmiEqualsConstant(Register src,
1385 Smi* constant, 1385 Smi* constant,
1386 Label* on_equals, 1386 Label* on_equals,
1387 Label::Distance near_jump) { 1387 Label::Distance near_jump) {
1388 SmiCompare(src, constant); 1388 SmiCompare(src, constant);
1389 j(equal, on_equals, near_jump); 1389 j(equal, on_equals, near_jump);
1390 } 1390 }
1391 1391
1392 1392
1393 void MacroAssembler::JumpIfNotBothSmi(Register src1, 1393 void MacroAssembler::JumpIfNotBothSmi(Register src1,
1394 Register src2, 1394 Register src2,
1395 Label* on_not_both_smi, 1395 Label* on_not_both_smi,
1396 Label::Distance near_jump) { 1396 Label::Distance near_jump) {
1397 Condition both_smi = CheckBothSmi(src1, src2); 1397 Condition both_smi = CheckBothSmi(src1, src2);
1398 j(NegateCondition(both_smi), on_not_both_smi, near_jump); 1398 j(NegateCondition(both_smi), on_not_both_smi, near_jump);
1399 } 1399 }
1400 1400
1401 1401
1402 void MacroAssembler::JumpUnlessBothNonNegativeSmi(Register src1, 1402 void MacroAssembler::JumpUnlessBothNonNegativeSmi(Register src1,
1403 Register src2, 1403 Register src2,
1404 Label* on_not_both_smi, 1404 Label* on_not_both_smi,
1405 Label::Distance near_jump) { 1405 Label::Distance near_jump) {
1406 Condition both_smi = CheckBothNonNegativeSmi(src1, src2); 1406 Condition both_smi = CheckBothNonNegativeSmi(src1, src2);
1407 j(NegateCondition(both_smi), on_not_both_smi, near_jump); 1407 j(NegateCondition(both_smi), on_not_both_smi, near_jump);
1408 } 1408 }
1409 1409
1410 1410
1411 void MacroAssembler::SmiTryAddConstant(Register dst, 1411 void MacroAssembler::SmiTryAddConstant(Register dst,
1412 Register src, 1412 Register src,
1413 Smi* constant, 1413 Smi* constant,
1414 Label* on_not_smi_result, 1414 Label* on_not_smi_result,
1415 Label::Distance near_jump) { 1415 Label::Distance near_jump) {
1416 // Does not assume that src is a smi. 1416 // Does not assume that src is a smi.
1417 ASSERT_EQ(static_cast<int>(1), static_cast<int>(kSmiTagMask)); 1417 ASSERT_EQ(static_cast<int>(1), static_cast<int>(kSmiTagMask));
1418 STATIC_ASSERT(kSmiTag == 0); 1418 STATIC_ASSERT(kSmiTag == 0);
1419 ASSERT(!dst.is(kScratchRegister)); 1419 ASSERT(!dst.is(kScratchRegister));
1420 ASSERT(!src.is(kScratchRegister)); 1420 ASSERT(!src.is(kScratchRegister));
1421 1421
1422 JumpIfNotSmi(src, on_not_smi_result, near_jump); 1422 JumpIfNotSmi(src, on_not_smi_result, near_jump);
1423 Register tmp = (dst.is(src) ? kScratchRegister : dst); 1423 Register tmp = (dst.is(src) ? kScratchRegister : dst);
1424 LoadSmiConstant(tmp, constant); 1424 LoadSmiConstant(tmp, constant);
1425 addq(tmp, src); 1425 addq(tmp, src);
1426 j(overflow, on_not_smi_result, near_jump); 1426 j(overflow, on_not_smi_result, near_jump);
1427 if (dst.is(src)) { 1427 if (dst.is(src)) {
1428 movq(dst, tmp); 1428 movq(dst, tmp);
1429 } 1429 }
1430 } 1430 }
1431 1431
1432 1432
1433 void MacroAssembler::SmiAddConstant(Register dst, Register src, Smi* constant) { 1433 void MacroAssembler::SmiAddConstant(Register dst, Register src, Smi* constant) {
1434 if (constant->value() == 0) { 1434 if (constant->value() == 0) {
1435 if (!dst.is(src)) { 1435 if (!dst.is(src)) {
1436 movq(dst, src); 1436 movq(dst, src);
1437 } 1437 }
1438 return; 1438 return;
1439 } else if (dst.is(src)) { 1439 } else if (dst.is(src)) {
1440 ASSERT(!dst.is(kScratchRegister)); 1440 ASSERT(!dst.is(kScratchRegister));
1441 switch (constant->value()) { 1441 switch (constant->value()) {
1442 case 1: 1442 case 1:
1443 addq(dst, kSmiConstantRegister); 1443 addq(dst, kSmiConstantRegister);
1444 return; 1444 return;
1445 case 2: 1445 case 2:
1446 lea(dst, Operand(src, kSmiConstantRegister, times_2, 0)); 1446 lea(dst, Operand(src, kSmiConstantRegister, times_2, 0));
1447 return; 1447 return;
1448 case 4: 1448 case 4:
1449 lea(dst, Operand(src, kSmiConstantRegister, times_4, 0)); 1449 lea(dst, Operand(src, kSmiConstantRegister, times_4, 0));
1450 return; 1450 return;
1451 case 8: 1451 case 8:
1452 lea(dst, Operand(src, kSmiConstantRegister, times_8, 0)); 1452 lea(dst, Operand(src, kSmiConstantRegister, times_8, 0));
1453 return; 1453 return;
1454 default: 1454 default:
1455 Register constant_reg = GetSmiConstant(constant); 1455 Register constant_reg = GetSmiConstant(constant);
1456 addq(dst, constant_reg); 1456 addq(dst, constant_reg);
1457 return; 1457 return;
1458 } 1458 }
1459 } else { 1459 } else {
1460 switch (constant->value()) { 1460 switch (constant->value()) {
1461 case 1: 1461 case 1:
1462 lea(dst, Operand(src, kSmiConstantRegister, times_1, 0)); 1462 lea(dst, Operand(src, kSmiConstantRegister, times_1, 0));
1463 return; 1463 return;
1464 case 2: 1464 case 2:
1465 lea(dst, Operand(src, kSmiConstantRegister, times_2, 0)); 1465 lea(dst, Operand(src, kSmiConstantRegister, times_2, 0));
1466 return; 1466 return;
1467 case 4: 1467 case 4:
1468 lea(dst, Operand(src, kSmiConstantRegister, times_4, 0)); 1468 lea(dst, Operand(src, kSmiConstantRegister, times_4, 0));
1469 return; 1469 return;
1470 case 8: 1470 case 8:
1471 lea(dst, Operand(src, kSmiConstantRegister, times_8, 0)); 1471 lea(dst, Operand(src, kSmiConstantRegister, times_8, 0));
1472 return; 1472 return;
1473 default: 1473 default:
1474 LoadSmiConstant(dst, constant); 1474 LoadSmiConstant(dst, constant);
1475 addq(dst, src); 1475 addq(dst, src);
1476 return; 1476 return;
1477 } 1477 }
1478 } 1478 }
1479 } 1479 }
1480 1480
1481 1481
1482 void MacroAssembler::SmiAddConstant(const Operand& dst, Smi* constant) { 1482 void MacroAssembler::SmiAddConstant(const Operand& dst, Smi* constant) {
1483 if (constant->value() != 0) { 1483 if (constant->value() != 0) {
1484 addl(Operand(dst, kSmiShift / kBitsPerByte), Immediate(constant->value())); 1484 addl(Operand(dst, kSmiShift / kBitsPerByte), Immediate(constant->value()));
1485 } 1485 }
1486 } 1486 }
1487 1487
1488 1488
1489 void MacroAssembler::SmiAddConstant(Register dst, 1489 void MacroAssembler::SmiAddConstant(Register dst,
1490 Register src, 1490 Register src,
1491 Smi* constant, 1491 Smi* constant,
1492 Label* on_not_smi_result, 1492 Label* on_not_smi_result,
1493 Label::Distance near_jump) { 1493 Label::Distance near_jump) {
1494 if (constant->value() == 0) { 1494 if (constant->value() == 0) {
1495 if (!dst.is(src)) { 1495 if (!dst.is(src)) {
1496 movq(dst, src); 1496 movq(dst, src);
1497 } 1497 }
1498 } else if (dst.is(src)) { 1498 } else if (dst.is(src)) {
1499 ASSERT(!dst.is(kScratchRegister)); 1499 ASSERT(!dst.is(kScratchRegister));
1500 1500
1501 LoadSmiConstant(kScratchRegister, constant); 1501 LoadSmiConstant(kScratchRegister, constant);
1502 addq(kScratchRegister, src); 1502 addq(kScratchRegister, src);
1503 j(overflow, on_not_smi_result, near_jump); 1503 j(overflow, on_not_smi_result, near_jump);
1504 movq(dst, kScratchRegister); 1504 movq(dst, kScratchRegister);
1505 } else { 1505 } else {
1506 LoadSmiConstant(dst, constant); 1506 LoadSmiConstant(dst, constant);
1507 addq(dst, src); 1507 addq(dst, src);
1508 j(overflow, on_not_smi_result, near_jump); 1508 j(overflow, on_not_smi_result, near_jump);
1509 } 1509 }
1510 } 1510 }
1511 1511
1512 1512
1513 void MacroAssembler::SmiSubConstant(Register dst, Register src, Smi* constant) { 1513 void MacroAssembler::SmiSubConstant(Register dst, Register src, Smi* constant) {
1514 if (constant->value() == 0) { 1514 if (constant->value() == 0) {
1515 if (!dst.is(src)) { 1515 if (!dst.is(src)) {
1516 movq(dst, src); 1516 movq(dst, src);
1517 } 1517 }
1518 } else if (dst.is(src)) { 1518 } else if (dst.is(src)) {
1519 ASSERT(!dst.is(kScratchRegister)); 1519 ASSERT(!dst.is(kScratchRegister));
1520 Register constant_reg = GetSmiConstant(constant); 1520 Register constant_reg = GetSmiConstant(constant);
1521 subq(dst, constant_reg); 1521 subq(dst, constant_reg);
1522 } else { 1522 } else {
1523 if (constant->value() == Smi::kMinValue) { 1523 if (constant->value() == Smi::kMinValue) {
1524 LoadSmiConstant(dst, constant); 1524 LoadSmiConstant(dst, constant);
1525 // Adding and subtracting the min-value gives the same result, it only 1525 // Adding and subtracting the min-value gives the same result, it only
1526 // differs on the overflow bit, which we don't check here. 1526 // differs on the overflow bit, which we don't check here.
1527 addq(dst, src); 1527 addq(dst, src);
1528 } else { 1528 } else {
1529 // Subtract by adding the negation. 1529 // Subtract by adding the negation.
1530 LoadSmiConstant(dst, Smi::FromInt(-constant->value())); 1530 LoadSmiConstant(dst, Smi::FromInt(-constant->value()));
1531 addq(dst, src); 1531 addq(dst, src);
1532 } 1532 }
1533 } 1533 }
1534 } 1534 }
1535 1535
1536 1536
1537 void MacroAssembler::SmiSubConstant(Register dst, 1537 void MacroAssembler::SmiSubConstant(Register dst,
1538 Register src, 1538 Register src,
1539 Smi* constant, 1539 Smi* constant,
1540 Label* on_not_smi_result, 1540 Label* on_not_smi_result,
1541 Label::Distance near_jump) { 1541 Label::Distance near_jump) {
1542 if (constant->value() == 0) { 1542 if (constant->value() == 0) {
1543 if (!dst.is(src)) { 1543 if (!dst.is(src)) {
1544 movq(dst, src); 1544 movq(dst, src);
1545 } 1545 }
1546 } else if (dst.is(src)) { 1546 } else if (dst.is(src)) {
1547 ASSERT(!dst.is(kScratchRegister)); 1547 ASSERT(!dst.is(kScratchRegister));
1548 if (constant->value() == Smi::kMinValue) { 1548 if (constant->value() == Smi::kMinValue) {
1549 // Subtracting min-value from any non-negative value will overflow. 1549 // Subtracting min-value from any non-negative value will overflow.
1550 // We test the non-negativeness before doing the subtraction. 1550 // We test the non-negativeness before doing the subtraction.
1551 testq(src, src); 1551 testq(src, src);
1552 j(not_sign, on_not_smi_result, near_jump); 1552 j(not_sign, on_not_smi_result, near_jump);
1553 LoadSmiConstant(kScratchRegister, constant); 1553 LoadSmiConstant(kScratchRegister, constant);
1554 subq(dst, kScratchRegister); 1554 subq(dst, kScratchRegister);
1555 } else { 1555 } else {
1556 // Subtract by adding the negation. 1556 // Subtract by adding the negation.
1557 LoadSmiConstant(kScratchRegister, Smi::FromInt(-constant->value())); 1557 LoadSmiConstant(kScratchRegister, Smi::FromInt(-constant->value()));
1558 addq(kScratchRegister, dst); 1558 addq(kScratchRegister, dst);
1559 j(overflow, on_not_smi_result, near_jump); 1559 j(overflow, on_not_smi_result, near_jump);
1560 movq(dst, kScratchRegister); 1560 movq(dst, kScratchRegister);
1561 } 1561 }
1562 } else { 1562 } else {
1563 if (constant->value() == Smi::kMinValue) { 1563 if (constant->value() == Smi::kMinValue) {
1564 // Subtracting min-value from any non-negative value will overflow. 1564 // Subtracting min-value from any non-negative value will overflow.
1565 // We test the non-negativeness before doing the subtraction. 1565 // We test the non-negativeness before doing the subtraction.
1566 testq(src, src); 1566 testq(src, src);
1567 j(not_sign, on_not_smi_result, near_jump); 1567 j(not_sign, on_not_smi_result, near_jump);
1568 LoadSmiConstant(dst, constant); 1568 LoadSmiConstant(dst, constant);
1569 // Adding and subtracting the min-value gives the same result, it only 1569 // Adding and subtracting the min-value gives the same result, it only
1570 // differs on the overflow bit, which we don't check here. 1570 // differs on the overflow bit, which we don't check here.
1571 addq(dst, src); 1571 addq(dst, src);
1572 } else { 1572 } else {
1573 // Subtract by adding the negation. 1573 // Subtract by adding the negation.
1574 LoadSmiConstant(dst, Smi::FromInt(-(constant->value()))); 1574 LoadSmiConstant(dst, Smi::FromInt(-(constant->value())));
1575 addq(dst, src); 1575 addq(dst, src);
1576 j(overflow, on_not_smi_result, near_jump); 1576 j(overflow, on_not_smi_result, near_jump);
1577 } 1577 }
1578 } 1578 }
1579 } 1579 }
1580 1580
1581 1581
1582 void MacroAssembler::SmiNeg(Register dst, 1582 void MacroAssembler::SmiNeg(Register dst,
1583 Register src, 1583 Register src,
1584 Label* on_smi_result, 1584 Label* on_smi_result,
1585 Label::Distance near_jump) { 1585 Label::Distance near_jump) {
1586 if (dst.is(src)) { 1586 if (dst.is(src)) {
1587 ASSERT(!dst.is(kScratchRegister)); 1587 ASSERT(!dst.is(kScratchRegister));
1588 movq(kScratchRegister, src); 1588 movq(kScratchRegister, src);
1589 neg(dst); // Low 32 bits are retained as zero by negation. 1589 neg(dst); // Low 32 bits are retained as zero by negation.
1590 // Test if result is zero or Smi::kMinValue. 1590 // Test if result is zero or Smi::kMinValue.
1591 cmpq(dst, kScratchRegister); 1591 cmpq(dst, kScratchRegister);
1592 j(not_equal, on_smi_result, near_jump); 1592 j(not_equal, on_smi_result, near_jump);
1593 movq(src, kScratchRegister); 1593 movq(src, kScratchRegister);
1594 } else { 1594 } else {
1595 movq(dst, src); 1595 movq(dst, src);
1596 neg(dst); 1596 neg(dst);
1597 cmpq(dst, src); 1597 cmpq(dst, src);
1598 // If the result is zero or Smi::kMinValue, negation failed to create a smi. 1598 // If the result is zero or Smi::kMinValue, negation failed to create a smi.
1599 j(not_equal, on_smi_result, near_jump); 1599 j(not_equal, on_smi_result, near_jump);
1600 } 1600 }
1601 } 1601 }
1602 1602
1603 1603
1604 void MacroAssembler::SmiAdd(Register dst, 1604 void MacroAssembler::SmiAdd(Register dst,
1605 Register src1, 1605 Register src1,
1606 Register src2, 1606 Register src2,
1607 Label* on_not_smi_result, 1607 Label* on_not_smi_result,
1608 Label::Distance near_jump) { 1608 Label::Distance near_jump) {
1609 ASSERT_NOT_NULL(on_not_smi_result); 1609 ASSERT_NOT_NULL(on_not_smi_result);
1610 ASSERT(!dst.is(src2)); 1610 ASSERT(!dst.is(src2));
1611 if (dst.is(src1)) { 1611 if (dst.is(src1)) {
1612 movq(kScratchRegister, src1); 1612 movq(kScratchRegister, src1);
1613 addq(kScratchRegister, src2); 1613 addq(kScratchRegister, src2);
1614 j(overflow, on_not_smi_result, near_jump); 1614 j(overflow, on_not_smi_result, near_jump);
1615 movq(dst, kScratchRegister); 1615 movq(dst, kScratchRegister);
1616 } else { 1616 } else {
1617 movq(dst, src1); 1617 movq(dst, src1);
1618 addq(dst, src2); 1618 addq(dst, src2);
1619 j(overflow, on_not_smi_result, near_jump); 1619 j(overflow, on_not_smi_result, near_jump);
1620 } 1620 }
1621 } 1621 }
1622 1622
1623 1623
1624 void MacroAssembler::SmiAdd(Register dst, 1624 void MacroAssembler::SmiAdd(Register dst,
1625 Register src1, 1625 Register src1,
1626 const Operand& src2, 1626 const Operand& src2,
1627 Label* on_not_smi_result, 1627 Label* on_not_smi_result,
1628 Label::Distance near_jump) { 1628 Label::Distance near_jump) {
1629 ASSERT_NOT_NULL(on_not_smi_result); 1629 ASSERT_NOT_NULL(on_not_smi_result);
1630 if (dst.is(src1)) { 1630 if (dst.is(src1)) {
1631 movq(kScratchRegister, src1); 1631 movq(kScratchRegister, src1);
1632 addq(kScratchRegister, src2); 1632 addq(kScratchRegister, src2);
1633 j(overflow, on_not_smi_result, near_jump); 1633 j(overflow, on_not_smi_result, near_jump);
1634 movq(dst, kScratchRegister); 1634 movq(dst, kScratchRegister);
1635 } else { 1635 } else {
1636 ASSERT(!src2.AddressUsesRegister(dst)); 1636 ASSERT(!src2.AddressUsesRegister(dst));
1637 movq(dst, src1); 1637 movq(dst, src1);
1638 addq(dst, src2); 1638 addq(dst, src2);
1639 j(overflow, on_not_smi_result, near_jump); 1639 j(overflow, on_not_smi_result, near_jump);
1640 } 1640 }
1641 } 1641 }
1642 1642
1643 1643
1644 void MacroAssembler::SmiAdd(Register dst, 1644 void MacroAssembler::SmiAdd(Register dst,
1645 Register src1, 1645 Register src1,
1646 Register src2) { 1646 Register src2) {
1647 // No overflow checking. Use only when it's known that 1647 // No overflow checking. Use only when it's known that
1648 // overflowing is impossible. 1648 // overflowing is impossible.
1649 if (!dst.is(src1)) { 1649 if (!dst.is(src1)) {
1650 if (emit_debug_code()) { 1650 if (emit_debug_code()) {
1651 movq(kScratchRegister, src1); 1651 movq(kScratchRegister, src1);
1652 addq(kScratchRegister, src2); 1652 addq(kScratchRegister, src2);
1653 Check(no_overflow, "Smi addition overflow"); 1653 Check(no_overflow, "Smi addition overflow");
1654 } 1654 }
1655 lea(dst, Operand(src1, src2, times_1, 0)); 1655 lea(dst, Operand(src1, src2, times_1, 0));
1656 } else { 1656 } else {
1657 addq(dst, src2); 1657 addq(dst, src2);
1658 Assert(no_overflow, "Smi addition overflow"); 1658 Assert(no_overflow, "Smi addition overflow");
1659 } 1659 }
1660 } 1660 }
1661 1661
1662 1662
1663 void MacroAssembler::SmiSub(Register dst, 1663 void MacroAssembler::SmiSub(Register dst,
1664 Register src1, 1664 Register src1,
1665 Register src2, 1665 Register src2,
1666 Label* on_not_smi_result, 1666 Label* on_not_smi_result,
1667 Label::Distance near_jump) { 1667 Label::Distance near_jump) {
1668 ASSERT_NOT_NULL(on_not_smi_result); 1668 ASSERT_NOT_NULL(on_not_smi_result);
1669 ASSERT(!dst.is(src2)); 1669 ASSERT(!dst.is(src2));
1670 if (dst.is(src1)) { 1670 if (dst.is(src1)) {
1671 cmpq(dst, src2); 1671 cmpq(dst, src2);
1672 j(overflow, on_not_smi_result, near_jump); 1672 j(overflow, on_not_smi_result, near_jump);
1673 subq(dst, src2); 1673 subq(dst, src2);
1674 } else { 1674 } else {
1675 movq(dst, src1); 1675 movq(dst, src1);
1676 subq(dst, src2); 1676 subq(dst, src2);
1677 j(overflow, on_not_smi_result, near_jump); 1677 j(overflow, on_not_smi_result, near_jump);
1678 } 1678 }
1679 } 1679 }
1680 1680
1681 1681
1682 void MacroAssembler::SmiSub(Register dst, Register src1, Register src2) { 1682 void MacroAssembler::SmiSub(Register dst, Register src1, Register src2) {
1683 // No overflow checking. Use only when it's known that 1683 // No overflow checking. Use only when it's known that
1684 // overflowing is impossible (e.g., subtracting two positive smis). 1684 // overflowing is impossible (e.g., subtracting two positive smis).
1685 ASSERT(!dst.is(src2)); 1685 ASSERT(!dst.is(src2));
1686 if (!dst.is(src1)) { 1686 if (!dst.is(src1)) {
1687 movq(dst, src1); 1687 movq(dst, src1);
1688 } 1688 }
1689 subq(dst, src2); 1689 subq(dst, src2);
1690 Assert(no_overflow, "Smi subtraction overflow"); 1690 Assert(no_overflow, "Smi subtraction overflow");
1691 } 1691 }
1692 1692
1693 1693
1694 void MacroAssembler::SmiSub(Register dst, 1694 void MacroAssembler::SmiSub(Register dst,
1695 Register src1, 1695 Register src1,
1696 const Operand& src2, 1696 const Operand& src2,
1697 Label* on_not_smi_result, 1697 Label* on_not_smi_result,
1698 Label::Distance near_jump) { 1698 Label::Distance near_jump) {
1699 ASSERT_NOT_NULL(on_not_smi_result); 1699 ASSERT_NOT_NULL(on_not_smi_result);
1700 if (dst.is(src1)) { 1700 if (dst.is(src1)) {
1701 movq(kScratchRegister, src2); 1701 movq(kScratchRegister, src2);
1702 cmpq(src1, kScratchRegister); 1702 cmpq(src1, kScratchRegister);
1703 j(overflow, on_not_smi_result, near_jump); 1703 j(overflow, on_not_smi_result, near_jump);
1704 subq(src1, kScratchRegister); 1704 subq(src1, kScratchRegister);
1705 } else { 1705 } else {
1706 movq(dst, src1); 1706 movq(dst, src1);
1707 subq(dst, src2); 1707 subq(dst, src2);
1708 j(overflow, on_not_smi_result, near_jump); 1708 j(overflow, on_not_smi_result, near_jump);
1709 } 1709 }
1710 } 1710 }
1711 1711
1712 1712
1713 void MacroAssembler::SmiSub(Register dst, 1713 void MacroAssembler::SmiSub(Register dst,
1714 Register src1, 1714 Register src1,
1715 const Operand& src2) { 1715 const Operand& src2) {
1716 // No overflow checking. Use only when it's known that 1716 // No overflow checking. Use only when it's known that
1717 // overflowing is impossible (e.g., subtracting two positive smis). 1717 // overflowing is impossible (e.g., subtracting two positive smis).
1718 if (!dst.is(src1)) { 1718 if (!dst.is(src1)) {
1719 movq(dst, src1); 1719 movq(dst, src1);
1720 } 1720 }
1721 subq(dst, src2); 1721 subq(dst, src2);
1722 Assert(no_overflow, "Smi subtraction overflow"); 1722 Assert(no_overflow, "Smi subtraction overflow");
1723 } 1723 }
1724 1724
1725 1725
1726 void MacroAssembler::SmiMul(Register dst, 1726 void MacroAssembler::SmiMul(Register dst,
1727 Register src1, 1727 Register src1,
1728 Register src2, 1728 Register src2,
1729 Label* on_not_smi_result, 1729 Label* on_not_smi_result,
1730 Label::Distance near_jump) { 1730 Label::Distance near_jump) {
1731 ASSERT(!dst.is(src2)); 1731 ASSERT(!dst.is(src2));
1732 ASSERT(!dst.is(kScratchRegister)); 1732 ASSERT(!dst.is(kScratchRegister));
1733 ASSERT(!src1.is(kScratchRegister)); 1733 ASSERT(!src1.is(kScratchRegister));
1734 ASSERT(!src2.is(kScratchRegister)); 1734 ASSERT(!src2.is(kScratchRegister));
1735 1735
1736 if (dst.is(src1)) { 1736 if (dst.is(src1)) {
1737 Label failure, zero_correct_result; 1737 Label failure, zero_correct_result;
1738 movq(kScratchRegister, src1); // Create backup for later testing. 1738 movq(kScratchRegister, src1); // Create backup for later testing.
1739 SmiToInteger64(dst, src1); 1739 SmiToInteger64(dst, src1);
1740 imul(dst, src2); 1740 imul(dst, src2);
1741 j(overflow, &failure, Label::kNear); 1741 j(overflow, &failure, Label::kNear);
1742 1742
1743 // Check for negative zero result. If product is zero, and one 1743 // Check for negative zero result. If product is zero, and one
1744 // argument is negative, go to slow case. 1744 // argument is negative, go to slow case.
1745 Label correct_result; 1745 Label correct_result;
1746 testq(dst, dst); 1746 testq(dst, dst);
1747 j(not_zero, &correct_result, Label::kNear); 1747 j(not_zero, &correct_result, Label::kNear);
1748 1748
1749 movq(dst, kScratchRegister); 1749 movq(dst, kScratchRegister);
1750 xor_(dst, src2); 1750 xor_(dst, src2);
1751 // Result was positive zero. 1751 // Result was positive zero.
1752 j(positive, &zero_correct_result, Label::kNear); 1752 j(positive, &zero_correct_result, Label::kNear);
1753 1753
1754 bind(&failure); // Reused failure exit, restores src1. 1754 bind(&failure); // Reused failure exit, restores src1.
1755 movq(src1, kScratchRegister); 1755 movq(src1, kScratchRegister);
1756 jmp(on_not_smi_result, near_jump); 1756 jmp(on_not_smi_result, near_jump);
1757 1757
1758 bind(&zero_correct_result); 1758 bind(&zero_correct_result);
1759 Set(dst, 0); 1759 Set(dst, 0);
1760 1760
1761 bind(&correct_result); 1761 bind(&correct_result);
1762 } else { 1762 } else {
1763 SmiToInteger64(dst, src1); 1763 SmiToInteger64(dst, src1);
1764 imul(dst, src2); 1764 imul(dst, src2);
1765 j(overflow, on_not_smi_result, near_jump); 1765 j(overflow, on_not_smi_result, near_jump);
1766 // Check for negative zero result. If product is zero, and one 1766 // Check for negative zero result. If product is zero, and one
1767 // argument is negative, go to slow case. 1767 // argument is negative, go to slow case.
1768 Label correct_result; 1768 Label correct_result;
1769 testq(dst, dst); 1769 testq(dst, dst);
1770 j(not_zero, &correct_result, Label::kNear); 1770 j(not_zero, &correct_result, Label::kNear);
1771 // One of src1 and src2 is zero, the check whether the other is 1771 // One of src1 and src2 is zero, the check whether the other is
1772 // negative. 1772 // negative.
1773 movq(kScratchRegister, src1); 1773 movq(kScratchRegister, src1);
1774 xor_(kScratchRegister, src2); 1774 xor_(kScratchRegister, src2);
1775 j(negative, on_not_smi_result, near_jump); 1775 j(negative, on_not_smi_result, near_jump);
1776 bind(&correct_result); 1776 bind(&correct_result);
1777 } 1777 }
1778 } 1778 }
1779 1779
1780 1780
1781 void MacroAssembler::SmiDiv(Register dst, 1781 void MacroAssembler::SmiDiv(Register dst,
1782 Register src1, 1782 Register src1,
1783 Register src2, 1783 Register src2,
1784 Label* on_not_smi_result, 1784 Label* on_not_smi_result,
1785 Label::Distance near_jump) { 1785 Label::Distance near_jump) {
1786 ASSERT(!src1.is(kScratchRegister)); 1786 ASSERT(!src1.is(kScratchRegister));
1787 ASSERT(!src2.is(kScratchRegister)); 1787 ASSERT(!src2.is(kScratchRegister));
1788 ASSERT(!dst.is(kScratchRegister)); 1788 ASSERT(!dst.is(kScratchRegister));
1789 ASSERT(!src2.is(rax)); 1789 ASSERT(!src2.is(rax));
1790 ASSERT(!src2.is(rdx)); 1790 ASSERT(!src2.is(rdx));
1791 ASSERT(!src1.is(rdx)); 1791 ASSERT(!src1.is(rdx));
1792 1792
1793 // Check for 0 divisor (result is +/-Infinity). 1793 // Check for 0 divisor (result is +/-Infinity).
1794 testq(src2, src2); 1794 testq(src2, src2);
1795 j(zero, on_not_smi_result, near_jump); 1795 j(zero, on_not_smi_result, near_jump);
1796 1796
1797 if (src1.is(rax)) { 1797 if (src1.is(rax)) {
1798 movq(kScratchRegister, src1); 1798 movq(kScratchRegister, src1);
1799 } 1799 }
1800 SmiToInteger32(rax, src1); 1800 SmiToInteger32(rax, src1);
1801 // We need to rule out dividing Smi::kMinValue by -1, since that would 1801 // We need to rule out dividing Smi::kMinValue by -1, since that would
1802 // overflow in idiv and raise an exception. 1802 // overflow in idiv and raise an exception.
1803 // We combine this with negative zero test (negative zero only happens 1803 // We combine this with negative zero test (negative zero only happens
1804 // when dividing zero by a negative number). 1804 // when dividing zero by a negative number).
1805 1805
1806 // We overshoot a little and go to slow case if we divide min-value 1806 // We overshoot a little and go to slow case if we divide min-value
1807 // by any negative value, not just -1. 1807 // by any negative value, not just -1.
1808 Label safe_div; 1808 Label safe_div;
1809 testl(rax, Immediate(0x7fffffff)); 1809 testl(rax, Immediate(0x7fffffff));
1810 j(not_zero, &safe_div, Label::kNear); 1810 j(not_zero, &safe_div, Label::kNear);
1811 testq(src2, src2); 1811 testq(src2, src2);
1812 if (src1.is(rax)) { 1812 if (src1.is(rax)) {
1813 j(positive, &safe_div, Label::kNear); 1813 j(positive, &safe_div, Label::kNear);
1814 movq(src1, kScratchRegister); 1814 movq(src1, kScratchRegister);
1815 jmp(on_not_smi_result, near_jump); 1815 jmp(on_not_smi_result, near_jump);
1816 } else { 1816 } else {
1817 j(negative, on_not_smi_result, near_jump); 1817 j(negative, on_not_smi_result, near_jump);
1818 } 1818 }
1819 bind(&safe_div); 1819 bind(&safe_div);
1820 1820
1821 SmiToInteger32(src2, src2); 1821 SmiToInteger32(src2, src2);
1822 // Sign extend src1 into edx:eax. 1822 // Sign extend src1 into edx:eax.
1823 cdq(); 1823 cdq();
1824 idivl(src2); 1824 idivl(src2);
1825 Integer32ToSmi(src2, src2); 1825 Integer32ToSmi(src2, src2);
1826 // Check that the remainder is zero. 1826 // Check that the remainder is zero.
1827 testl(rdx, rdx); 1827 testl(rdx, rdx);
1828 if (src1.is(rax)) { 1828 if (src1.is(rax)) {
1829 Label smi_result; 1829 Label smi_result;
1830 j(zero, &smi_result, Label::kNear); 1830 j(zero, &smi_result, Label::kNear);
1831 movq(src1, kScratchRegister); 1831 movq(src1, kScratchRegister);
1832 jmp(on_not_smi_result, near_jump); 1832 jmp(on_not_smi_result, near_jump);
1833 bind(&smi_result); 1833 bind(&smi_result);
1834 } else { 1834 } else {
1835 j(not_zero, on_not_smi_result, near_jump); 1835 j(not_zero, on_not_smi_result, near_jump);
1836 } 1836 }
1837 if (!dst.is(src1) && src1.is(rax)) { 1837 if (!dst.is(src1) && src1.is(rax)) {
1838 movq(src1, kScratchRegister); 1838 movq(src1, kScratchRegister);
1839 } 1839 }
1840 Integer32ToSmi(dst, rax); 1840 Integer32ToSmi(dst, rax);
1841 } 1841 }
1842 1842
1843 1843
1844 void MacroAssembler::SmiMod(Register dst, 1844 void MacroAssembler::SmiMod(Register dst,
1845 Register src1, 1845 Register src1,
1846 Register src2, 1846 Register src2,
1847 Label* on_not_smi_result, 1847 Label* on_not_smi_result,
1848 Label::Distance near_jump) { 1848 Label::Distance near_jump) {
1849 ASSERT(!dst.is(kScratchRegister)); 1849 ASSERT(!dst.is(kScratchRegister));
1850 ASSERT(!src1.is(kScratchRegister)); 1850 ASSERT(!src1.is(kScratchRegister));
1851 ASSERT(!src2.is(kScratchRegister)); 1851 ASSERT(!src2.is(kScratchRegister));
1852 ASSERT(!src2.is(rax)); 1852 ASSERT(!src2.is(rax));
1853 ASSERT(!src2.is(rdx)); 1853 ASSERT(!src2.is(rdx));
1854 ASSERT(!src1.is(rdx)); 1854 ASSERT(!src1.is(rdx));
1855 ASSERT(!src1.is(src2)); 1855 ASSERT(!src1.is(src2));
1856 1856
1857 testq(src2, src2); 1857 testq(src2, src2);
1858 j(zero, on_not_smi_result, near_jump); 1858 j(zero, on_not_smi_result, near_jump);
1859 1859
1860 if (src1.is(rax)) { 1860 if (src1.is(rax)) {
1861 movq(kScratchRegister, src1); 1861 movq(kScratchRegister, src1);
1862 } 1862 }
1863 SmiToInteger32(rax, src1); 1863 SmiToInteger32(rax, src1);
1864 SmiToInteger32(src2, src2); 1864 SmiToInteger32(src2, src2);
1865 1865
1866 // Test for the edge case of dividing Smi::kMinValue by -1 (will overflow). 1866 // Test for the edge case of dividing Smi::kMinValue by -1 (will overflow).
1867 Label safe_div; 1867 Label safe_div;
1868 cmpl(rax, Immediate(Smi::kMinValue)); 1868 cmpl(rax, Immediate(Smi::kMinValue));
1869 j(not_equal, &safe_div, Label::kNear); 1869 j(not_equal, &safe_div, Label::kNear);
1870 cmpl(src2, Immediate(-1)); 1870 cmpl(src2, Immediate(-1));
1871 j(not_equal, &safe_div, Label::kNear); 1871 j(not_equal, &safe_div, Label::kNear);
1872 // Retag inputs and go slow case. 1872 // Retag inputs and go slow case.
1873 Integer32ToSmi(src2, src2); 1873 Integer32ToSmi(src2, src2);
1874 if (src1.is(rax)) { 1874 if (src1.is(rax)) {
1875 movq(src1, kScratchRegister); 1875 movq(src1, kScratchRegister);
1876 } 1876 }
1877 jmp(on_not_smi_result, near_jump); 1877 jmp(on_not_smi_result, near_jump);
1878 bind(&safe_div); 1878 bind(&safe_div);
1879 1879
1880 // Sign extend eax into edx:eax. 1880 // Sign extend eax into edx:eax.
1881 cdq(); 1881 cdq();
1882 idivl(src2); 1882 idivl(src2);
1883 // Restore smi tags on inputs. 1883 // Restore smi tags on inputs.
1884 Integer32ToSmi(src2, src2); 1884 Integer32ToSmi(src2, src2);
1885 if (src1.is(rax)) { 1885 if (src1.is(rax)) {
1886 movq(src1, kScratchRegister); 1886 movq(src1, kScratchRegister);
1887 } 1887 }
1888 // Check for a negative zero result. If the result is zero, and the 1888 // Check for a negative zero result. If the result is zero, and the
1889 // dividend is negative, go slow to return a floating point negative zero. 1889 // dividend is negative, go slow to return a floating point negative zero.
1890 Label smi_result; 1890 Label smi_result;
1891 testl(rdx, rdx); 1891 testl(rdx, rdx);
1892 j(not_zero, &smi_result, Label::kNear); 1892 j(not_zero, &smi_result, Label::kNear);
1893 testq(src1, src1); 1893 testq(src1, src1);
1894 j(negative, on_not_smi_result, near_jump); 1894 j(negative, on_not_smi_result, near_jump);
1895 bind(&smi_result); 1895 bind(&smi_result);
1896 Integer32ToSmi(dst, rdx); 1896 Integer32ToSmi(dst, rdx);
1897 } 1897 }
1898 1898
1899 1899
1900 void MacroAssembler::SmiNot(Register dst, Register src) { 1900 void MacroAssembler::SmiNot(Register dst, Register src) {
1901 ASSERT(!dst.is(kScratchRegister)); 1901 ASSERT(!dst.is(kScratchRegister));
1902 ASSERT(!src.is(kScratchRegister)); 1902 ASSERT(!src.is(kScratchRegister));
1903 // Set tag and padding bits before negating, so that they are zero afterwards. 1903 // Set tag and padding bits before negating, so that they are zero afterwards.
1904 movl(kScratchRegister, Immediate(~0)); 1904 movl(kScratchRegister, Immediate(~0));
1905 if (dst.is(src)) { 1905 if (dst.is(src)) {
1906 xor_(dst, kScratchRegister); 1906 xor_(dst, kScratchRegister);
1907 } else { 1907 } else {
1908 lea(dst, Operand(src, kScratchRegister, times_1, 0)); 1908 lea(dst, Operand(src, kScratchRegister, times_1, 0));
1909 } 1909 }
1910 not_(dst); 1910 not_(dst);
1911 } 1911 }
1912 1912
1913 1913
1914 void MacroAssembler::SmiAnd(Register dst, Register src1, Register src2) { 1914 void MacroAssembler::SmiAnd(Register dst, Register src1, Register src2) {
1915 ASSERT(!dst.is(src2)); 1915 ASSERT(!dst.is(src2));
1916 if (!dst.is(src1)) { 1916 if (!dst.is(src1)) {
1917 movq(dst, src1); 1917 movq(dst, src1);
1918 } 1918 }
1919 and_(dst, src2); 1919 and_(dst, src2);
1920 } 1920 }
1921 1921
1922 1922
1923 void MacroAssembler::SmiAndConstant(Register dst, Register src, Smi* constant) { 1923 void MacroAssembler::SmiAndConstant(Register dst, Register src, Smi* constant) {
1924 if (constant->value() == 0) { 1924 if (constant->value() == 0) {
1925 Set(dst, 0); 1925 Set(dst, 0);
1926 } else if (dst.is(src)) { 1926 } else if (dst.is(src)) {
1927 ASSERT(!dst.is(kScratchRegister)); 1927 ASSERT(!dst.is(kScratchRegister));
1928 Register constant_reg = GetSmiConstant(constant); 1928 Register constant_reg = GetSmiConstant(constant);
1929 and_(dst, constant_reg); 1929 and_(dst, constant_reg);
1930 } else { 1930 } else {
1931 LoadSmiConstant(dst, constant); 1931 LoadSmiConstant(dst, constant);
1932 and_(dst, src); 1932 and_(dst, src);
1933 } 1933 }
1934 } 1934 }
1935 1935
1936 1936
1937 void MacroAssembler::SmiOr(Register dst, Register src1, Register src2) { 1937 void MacroAssembler::SmiOr(Register dst, Register src1, Register src2) {
1938 if (!dst.is(src1)) { 1938 if (!dst.is(src1)) {
1939 ASSERT(!src1.is(src2)); 1939 ASSERT(!src1.is(src2));
1940 movq(dst, src1); 1940 movq(dst, src1);
1941 } 1941 }
1942 or_(dst, src2); 1942 or_(dst, src2);
1943 } 1943 }
1944 1944
1945 1945
1946 void MacroAssembler::SmiOrConstant(Register dst, Register src, Smi* constant) { 1946 void MacroAssembler::SmiOrConstant(Register dst, Register src, Smi* constant) {
1947 if (dst.is(src)) { 1947 if (dst.is(src)) {
1948 ASSERT(!dst.is(kScratchRegister)); 1948 ASSERT(!dst.is(kScratchRegister));
1949 Register constant_reg = GetSmiConstant(constant); 1949 Register constant_reg = GetSmiConstant(constant);
1950 or_(dst, constant_reg); 1950 or_(dst, constant_reg);
1951 } else { 1951 } else {
1952 LoadSmiConstant(dst, constant); 1952 LoadSmiConstant(dst, constant);
1953 or_(dst, src); 1953 or_(dst, src);
1954 } 1954 }
1955 } 1955 }
1956 1956
1957 1957
1958 void MacroAssembler::SmiXor(Register dst, Register src1, Register src2) { 1958 void MacroAssembler::SmiXor(Register dst, Register src1, Register src2) {
1959 if (!dst.is(src1)) { 1959 if (!dst.is(src1)) {
1960 ASSERT(!src1.is(src2)); 1960 ASSERT(!src1.is(src2));
1961 movq(dst, src1); 1961 movq(dst, src1);
1962 } 1962 }
1963 xor_(dst, src2); 1963 xor_(dst, src2);
1964 } 1964 }
1965 1965
1966 1966
1967 void MacroAssembler::SmiXorConstant(Register dst, Register src, Smi* constant) { 1967 void MacroAssembler::SmiXorConstant(Register dst, Register src, Smi* constant) {
1968 if (dst.is(src)) { 1968 if (dst.is(src)) {
1969 ASSERT(!dst.is(kScratchRegister)); 1969 ASSERT(!dst.is(kScratchRegister));
1970 Register constant_reg = GetSmiConstant(constant); 1970 Register constant_reg = GetSmiConstant(constant);
1971 xor_(dst, constant_reg); 1971 xor_(dst, constant_reg);
1972 } else { 1972 } else {
1973 LoadSmiConstant(dst, constant); 1973 LoadSmiConstant(dst, constant);
1974 xor_(dst, src); 1974 xor_(dst, src);
1975 } 1975 }
1976 } 1976 }
1977 1977
1978 1978
1979 void MacroAssembler::SmiShiftArithmeticRightConstant(Register dst, 1979 void MacroAssembler::SmiShiftArithmeticRightConstant(Register dst,
1980 Register src, 1980 Register src,
1981 int shift_value) { 1981 int shift_value) {
1982 ASSERT(is_uint5(shift_value)); 1982 ASSERT(is_uint5(shift_value));
1983 if (shift_value > 0) { 1983 if (shift_value > 0) {
1984 if (dst.is(src)) { 1984 if (dst.is(src)) {
1985 sar(dst, Immediate(shift_value + kSmiShift)); 1985 sar(dst, Immediate(shift_value + kSmiShift));
1986 shl(dst, Immediate(kSmiShift)); 1986 shl(dst, Immediate(kSmiShift));
1987 } else { 1987 } else {
1988 UNIMPLEMENTED(); // Not used. 1988 UNIMPLEMENTED(); // Not used.
1989 } 1989 }
1990 } 1990 }
1991 } 1991 }
1992 1992
1993 1993
1994 void MacroAssembler::SmiShiftLeftConstant(Register dst, 1994 void MacroAssembler::SmiShiftLeftConstant(Register dst,
1995 Register src, 1995 Register src,
1996 int shift_value) { 1996 int shift_value) {
1997 if (!dst.is(src)) { 1997 if (!dst.is(src)) {
1998 movq(dst, src); 1998 movq(dst, src);
1999 } 1999 }
2000 if (shift_value > 0) { 2000 if (shift_value > 0) {
2001 shl(dst, Immediate(shift_value)); 2001 shl(dst, Immediate(shift_value));
2002 } 2002 }
2003 } 2003 }
2004 2004
2005 2005
2006 void MacroAssembler::SmiShiftLogicalRightConstant( 2006 void MacroAssembler::SmiShiftLogicalRightConstant(
2007 Register dst, Register src, int shift_value, 2007 Register dst, Register src, int shift_value,
2008 Label* on_not_smi_result, Label::Distance near_jump) { 2008 Label* on_not_smi_result, Label::Distance near_jump) {
2009 // Logic right shift interprets its result as an *unsigned* number. 2009 // Logic right shift interprets its result as an *unsigned* number.
2010 if (dst.is(src)) { 2010 if (dst.is(src)) {
2011 UNIMPLEMENTED(); // Not used. 2011 UNIMPLEMENTED(); // Not used.
2012 } else { 2012 } else {
2013 movq(dst, src); 2013 movq(dst, src);
2014 if (shift_value == 0) { 2014 if (shift_value == 0) {
2015 testq(dst, dst); 2015 testq(dst, dst);
2016 j(negative, on_not_smi_result, near_jump); 2016 j(negative, on_not_smi_result, near_jump);
2017 } 2017 }
2018 shr(dst, Immediate(shift_value + kSmiShift)); 2018 shr(dst, Immediate(shift_value + kSmiShift));
2019 shl(dst, Immediate(kSmiShift)); 2019 shl(dst, Immediate(kSmiShift));
2020 } 2020 }
2021 } 2021 }
2022 2022
2023 2023
2024 void MacroAssembler::SmiShiftLeft(Register dst, 2024 void MacroAssembler::SmiShiftLeft(Register dst,
2025 Register src1, 2025 Register src1,
2026 Register src2) { 2026 Register src2) {
2027 ASSERT(!dst.is(rcx)); 2027 ASSERT(!dst.is(rcx));
2028 // Untag shift amount. 2028 // Untag shift amount.
2029 if (!dst.is(src1)) { 2029 if (!dst.is(src1)) {
2030 movq(dst, src1); 2030 movq(dst, src1);
2031 } 2031 }
2032 SmiToInteger32(rcx, src2); 2032 SmiToInteger32(rcx, src2);
2033 // Shift amount specified by lower 5 bits, not six as the shl opcode. 2033 // Shift amount specified by lower 5 bits, not six as the shl opcode.
2034 and_(rcx, Immediate(0x1f)); 2034 and_(rcx, Immediate(0x1f));
2035 shl_cl(dst); 2035 shl_cl(dst);
2036 } 2036 }
2037 2037
2038 2038
2039 void MacroAssembler::SmiShiftLogicalRight(Register dst, 2039 void MacroAssembler::SmiShiftLogicalRight(Register dst,
2040 Register src1, 2040 Register src1,
2041 Register src2, 2041 Register src2,
2042 Label* on_not_smi_result, 2042 Label* on_not_smi_result,
2043 Label::Distance near_jump) { 2043 Label::Distance near_jump) {
2044 ASSERT(!dst.is(kScratchRegister)); 2044 ASSERT(!dst.is(kScratchRegister));
2045 ASSERT(!src1.is(kScratchRegister)); 2045 ASSERT(!src1.is(kScratchRegister));
2046 ASSERT(!src2.is(kScratchRegister)); 2046 ASSERT(!src2.is(kScratchRegister));
2047 ASSERT(!dst.is(rcx)); 2047 ASSERT(!dst.is(rcx));
2048 // dst and src1 can be the same, because the one case that bails out 2048 // dst and src1 can be the same, because the one case that bails out
2049 // is a shift by 0, which leaves dst, and therefore src1, unchanged. 2049 // is a shift by 0, which leaves dst, and therefore src1, unchanged.
2050 if (src1.is(rcx) || src2.is(rcx)) { 2050 if (src1.is(rcx) || src2.is(rcx)) {
2051 movq(kScratchRegister, rcx); 2051 movq(kScratchRegister, rcx);
2052 } 2052 }
2053 if (!dst.is(src1)) { 2053 if (!dst.is(src1)) {
2054 movq(dst, src1); 2054 movq(dst, src1);
2055 } 2055 }
2056 SmiToInteger32(rcx, src2); 2056 SmiToInteger32(rcx, src2);
2057 orl(rcx, Immediate(kSmiShift)); 2057 orl(rcx, Immediate(kSmiShift));
2058 shr_cl(dst); // Shift is rcx modulo 0x1f + 32. 2058 shr_cl(dst); // Shift is rcx modulo 0x1f + 32.
2059 shl(dst, Immediate(kSmiShift)); 2059 shl(dst, Immediate(kSmiShift));
2060 testq(dst, dst); 2060 testq(dst, dst);
2061 if (src1.is(rcx) || src2.is(rcx)) { 2061 if (src1.is(rcx) || src2.is(rcx)) {
2062 Label positive_result; 2062 Label positive_result;
2063 j(positive, &positive_result, Label::kNear); 2063 j(positive, &positive_result, Label::kNear);
2064 if (src1.is(rcx)) { 2064 if (src1.is(rcx)) {
2065 movq(src1, kScratchRegister); 2065 movq(src1, kScratchRegister);
2066 } else { 2066 } else {
2067 movq(src2, kScratchRegister); 2067 movq(src2, kScratchRegister);
2068 } 2068 }
2069 jmp(on_not_smi_result, near_jump); 2069 jmp(on_not_smi_result, near_jump);
2070 bind(&positive_result); 2070 bind(&positive_result);
2071 } else { 2071 } else {
2072 // src2 was zero and src1 negative. 2072 // src2 was zero and src1 negative.
2073 j(negative, on_not_smi_result, near_jump); 2073 j(negative, on_not_smi_result, near_jump);
2074 } 2074 }
2075 } 2075 }
2076 2076
2077 2077
2078 void MacroAssembler::SmiShiftArithmeticRight(Register dst, 2078 void MacroAssembler::SmiShiftArithmeticRight(Register dst,
2079 Register src1, 2079 Register src1,
2080 Register src2) { 2080 Register src2) {
2081 ASSERT(!dst.is(kScratchRegister)); 2081 ASSERT(!dst.is(kScratchRegister));
2082 ASSERT(!src1.is(kScratchRegister)); 2082 ASSERT(!src1.is(kScratchRegister));
2083 ASSERT(!src2.is(kScratchRegister)); 2083 ASSERT(!src2.is(kScratchRegister));
2084 ASSERT(!dst.is(rcx)); 2084 ASSERT(!dst.is(rcx));
2085 if (src1.is(rcx)) { 2085 if (src1.is(rcx)) {
2086 movq(kScratchRegister, src1); 2086 movq(kScratchRegister, src1);
2087 } else if (src2.is(rcx)) { 2087 } else if (src2.is(rcx)) {
2088 movq(kScratchRegister, src2); 2088 movq(kScratchRegister, src2);
2089 } 2089 }
2090 if (!dst.is(src1)) { 2090 if (!dst.is(src1)) {
2091 movq(dst, src1); 2091 movq(dst, src1);
2092 } 2092 }
2093 SmiToInteger32(rcx, src2); 2093 SmiToInteger32(rcx, src2);
2094 orl(rcx, Immediate(kSmiShift)); 2094 orl(rcx, Immediate(kSmiShift));
2095 sar_cl(dst); // Shift 32 + original rcx & 0x1f. 2095 sar_cl(dst); // Shift 32 + original rcx & 0x1f.
2096 shl(dst, Immediate(kSmiShift)); 2096 shl(dst, Immediate(kSmiShift));
2097 if (src1.is(rcx)) { 2097 if (src1.is(rcx)) {
2098 movq(src1, kScratchRegister); 2098 movq(src1, kScratchRegister);
2099 } else if (src2.is(rcx)) { 2099 } else if (src2.is(rcx)) {
2100 movq(src2, kScratchRegister); 2100 movq(src2, kScratchRegister);
2101 } 2101 }
2102 } 2102 }
2103 2103
2104 2104
2105 void MacroAssembler::SelectNonSmi(Register dst, 2105 void MacroAssembler::SelectNonSmi(Register dst,
2106 Register src1, 2106 Register src1,
2107 Register src2, 2107 Register src2,
2108 Label* on_not_smis, 2108 Label* on_not_smis,
2109 Label::Distance near_jump) { 2109 Label::Distance near_jump) {
2110 ASSERT(!dst.is(kScratchRegister)); 2110 ASSERT(!dst.is(kScratchRegister));
2111 ASSERT(!src1.is(kScratchRegister)); 2111 ASSERT(!src1.is(kScratchRegister));
2112 ASSERT(!src2.is(kScratchRegister)); 2112 ASSERT(!src2.is(kScratchRegister));
2113 ASSERT(!dst.is(src1)); 2113 ASSERT(!dst.is(src1));
2114 ASSERT(!dst.is(src2)); 2114 ASSERT(!dst.is(src2));
2115 // Both operands must not be smis. 2115 // Both operands must not be smis.
2116 #ifdef DEBUG 2116 #ifdef DEBUG
2117 if (allow_stub_calls()) { // Check contains a stub call. 2117 if (allow_stub_calls()) { // Check contains a stub call.
2118 Condition not_both_smis = NegateCondition(CheckBothSmi(src1, src2)); 2118 Condition not_both_smis = NegateCondition(CheckBothSmi(src1, src2));
2119 Check(not_both_smis, "Both registers were smis in SelectNonSmi."); 2119 Check(not_both_smis, "Both registers were smis in SelectNonSmi.");
2120 } 2120 }
2121 #endif 2121 #endif
2122 STATIC_ASSERT(kSmiTag == 0); 2122 STATIC_ASSERT(kSmiTag == 0);
2123 ASSERT_EQ(0, Smi::FromInt(0)); 2123 ASSERT_EQ(0, Smi::FromInt(0));
2124 movl(kScratchRegister, Immediate(kSmiTagMask)); 2124 movl(kScratchRegister, Immediate(kSmiTagMask));
2125 and_(kScratchRegister, src1); 2125 and_(kScratchRegister, src1);
2126 testl(kScratchRegister, src2); 2126 testl(kScratchRegister, src2);
2127 // If non-zero then both are smis. 2127 // If non-zero then both are smis.
2128 j(not_zero, on_not_smis, near_jump); 2128 j(not_zero, on_not_smis, near_jump);
2129 2129
2130 // Exactly one operand is a smi. 2130 // Exactly one operand is a smi.
2131 ASSERT_EQ(1, static_cast<int>(kSmiTagMask)); 2131 ASSERT_EQ(1, static_cast<int>(kSmiTagMask));
2132 // kScratchRegister still holds src1 & kSmiTag, which is either zero or one. 2132 // kScratchRegister still holds src1 & kSmiTag, which is either zero or one.
2133 subq(kScratchRegister, Immediate(1)); 2133 subq(kScratchRegister, Immediate(1));
2134 // If src1 is a smi, then scratch register all 1s, else it is all 0s. 2134 // If src1 is a smi, then scratch register all 1s, else it is all 0s.
2135 movq(dst, src1); 2135 movq(dst, src1);
2136 xor_(dst, src2); 2136 xor_(dst, src2);
2137 and_(dst, kScratchRegister); 2137 and_(dst, kScratchRegister);
2138 // If src1 is a smi, dst holds src1 ^ src2, else it is zero. 2138 // If src1 is a smi, dst holds src1 ^ src2, else it is zero.
2139 xor_(dst, src1); 2139 xor_(dst, src1);
2140 // If src1 is a smi, dst is src2, else it is src1, i.e., the non-smi. 2140 // If src1 is a smi, dst is src2, else it is src1, i.e., the non-smi.
2141 } 2141 }
2142 2142
2143 2143
2144 SmiIndex MacroAssembler::SmiToIndex(Register dst, 2144 SmiIndex MacroAssembler::SmiToIndex(Register dst,
2145 Register src, 2145 Register src,
2146 int shift) { 2146 int shift) {
2147 ASSERT(is_uint6(shift)); 2147 ASSERT(is_uint6(shift));
2148 // There is a possible optimization if shift is in the range 60-63, but that 2148 // There is a possible optimization if shift is in the range 60-63, but that
2149 // will (and must) never happen. 2149 // will (and must) never happen.
2150 if (!dst.is(src)) { 2150 if (!dst.is(src)) {
2151 movq(dst, src); 2151 movq(dst, src);
2152 } 2152 }
2153 if (shift < kSmiShift) { 2153 if (shift < kSmiShift) {
2154 sar(dst, Immediate(kSmiShift - shift)); 2154 sar(dst, Immediate(kSmiShift - shift));
2155 } else { 2155 } else {
2156 shl(dst, Immediate(shift - kSmiShift)); 2156 shl(dst, Immediate(shift - kSmiShift));
2157 } 2157 }
2158 return SmiIndex(dst, times_1); 2158 return SmiIndex(dst, times_1);
2159 } 2159 }
2160 2160
2161 SmiIndex MacroAssembler::SmiToNegativeIndex(Register dst, 2161 SmiIndex MacroAssembler::SmiToNegativeIndex(Register dst,
2162 Register src, 2162 Register src,
2163 int shift) { 2163 int shift) {
2164 // Register src holds a positive smi. 2164 // Register src holds a positive smi.
2165 ASSERT(is_uint6(shift)); 2165 ASSERT(is_uint6(shift));
2166 if (!dst.is(src)) { 2166 if (!dst.is(src)) {
2167 movq(dst, src); 2167 movq(dst, src);
2168 } 2168 }
2169 neg(dst); 2169 neg(dst);
2170 if (shift < kSmiShift) { 2170 if (shift < kSmiShift) {
2171 sar(dst, Immediate(kSmiShift - shift)); 2171 sar(dst, Immediate(kSmiShift - shift));
2172 } else { 2172 } else {
2173 shl(dst, Immediate(shift - kSmiShift)); 2173 shl(dst, Immediate(shift - kSmiShift));
2174 } 2174 }
2175 return SmiIndex(dst, times_1); 2175 return SmiIndex(dst, times_1);
2176 } 2176 }
2177 2177
2178 2178
2179 void MacroAssembler::AddSmiField(Register dst, const Operand& src) { 2179 void MacroAssembler::AddSmiField(Register dst, const Operand& src) {
2180 ASSERT_EQ(0, kSmiShift % kBitsPerByte); 2180 ASSERT_EQ(0, kSmiShift % kBitsPerByte);
2181 addl(dst, Operand(src, kSmiShift / kBitsPerByte)); 2181 addl(dst, Operand(src, kSmiShift / kBitsPerByte));
2182 } 2182 }
2183 2183
2184 2184
2185 void MacroAssembler::JumpIfNotString(Register object, 2185 void MacroAssembler::JumpIfNotString(Register object,
2186 Register object_map, 2186 Register object_map,
2187 Label* not_string, 2187 Label* not_string,
2188 Label::Distance near_jump) { 2188 Label::Distance near_jump) {
2189 Condition is_smi = CheckSmi(object); 2189 Condition is_smi = CheckSmi(object);
2190 j(is_smi, not_string, near_jump); 2190 j(is_smi, not_string, near_jump);
2191 CmpObjectType(object, FIRST_NONSTRING_TYPE, object_map); 2191 CmpObjectType(object, FIRST_NONSTRING_TYPE, object_map);
2192 j(above_equal, not_string, near_jump); 2192 j(above_equal, not_string, near_jump);
2193 } 2193 }
2194 2194
2195 2195
2196 void MacroAssembler::JumpIfNotBothSequentialAsciiStrings( 2196 void MacroAssembler::JumpIfNotBothSequentialAsciiStrings(
2197 Register first_object, 2197 Register first_object,
2198 Register second_object, 2198 Register second_object,
2199 Register scratch1, 2199 Register scratch1,
2200 Register scratch2, 2200 Register scratch2,
2201 Label* on_fail, 2201 Label* on_fail,
2202 Label::Distance near_jump) { 2202 Label::Distance near_jump) {
2203 // Check that both objects are not smis. 2203 // Check that both objects are not smis.
2204 Condition either_smi = CheckEitherSmi(first_object, second_object); 2204 Condition either_smi = CheckEitherSmi(first_object, second_object);
2205 j(either_smi, on_fail, near_jump); 2205 j(either_smi, on_fail, near_jump);
2206 2206
2207 // Load instance type for both strings. 2207 // Load instance type for both strings.
2208 movq(scratch1, FieldOperand(first_object, HeapObject::kMapOffset)); 2208 movq(scratch1, FieldOperand(first_object, HeapObject::kMapOffset));
2209 movq(scratch2, FieldOperand(second_object, HeapObject::kMapOffset)); 2209 movq(scratch2, FieldOperand(second_object, HeapObject::kMapOffset));
2210 movzxbl(scratch1, FieldOperand(scratch1, Map::kInstanceTypeOffset)); 2210 movzxbl(scratch1, FieldOperand(scratch1, Map::kInstanceTypeOffset));
2211 movzxbl(scratch2, FieldOperand(scratch2, Map::kInstanceTypeOffset)); 2211 movzxbl(scratch2, FieldOperand(scratch2, Map::kInstanceTypeOffset));
2212 2212
2213 // Check that both are flat ASCII strings. 2213 // Check that both are flat ASCII strings.
2214 ASSERT(kNotStringTag != 0); 2214 ASSERT(kNotStringTag != 0);
2215 const int kFlatAsciiStringMask = 2215 const int kFlatAsciiStringMask =
2216 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask; 2216 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
2217 const int kFlatAsciiStringTag = ASCII_STRING_TYPE; 2217 const int kFlatAsciiStringTag = ASCII_STRING_TYPE;
2218 2218
2219 andl(scratch1, Immediate(kFlatAsciiStringMask)); 2219 andl(scratch1, Immediate(kFlatAsciiStringMask));
2220 andl(scratch2, Immediate(kFlatAsciiStringMask)); 2220 andl(scratch2, Immediate(kFlatAsciiStringMask));
2221 // Interleave the bits to check both scratch1 and scratch2 in one test. 2221 // Interleave the bits to check both scratch1 and scratch2 in one test.
2222 ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3)); 2222 ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3));
2223 lea(scratch1, Operand(scratch1, scratch2, times_8, 0)); 2223 lea(scratch1, Operand(scratch1, scratch2, times_8, 0));
2224 cmpl(scratch1, 2224 cmpl(scratch1,
2225 Immediate(kFlatAsciiStringTag + (kFlatAsciiStringTag << 3))); 2225 Immediate(kFlatAsciiStringTag + (kFlatAsciiStringTag << 3)));
2226 j(not_equal, on_fail, near_jump); 2226 j(not_equal, on_fail, near_jump);
2227 } 2227 }
2228 2228
2229 2229
2230 void MacroAssembler::JumpIfInstanceTypeIsNotSequentialAscii( 2230 void MacroAssembler::JumpIfInstanceTypeIsNotSequentialAscii(
2231 Register instance_type, 2231 Register instance_type,
2232 Register scratch, 2232 Register scratch,
2233 Label* failure, 2233 Label* failure,
2234 Label::Distance near_jump) { 2234 Label::Distance near_jump) {
2235 if (!scratch.is(instance_type)) { 2235 if (!scratch.is(instance_type)) {
2236 movl(scratch, instance_type); 2236 movl(scratch, instance_type);
2237 } 2237 }
2238 2238
2239 const int kFlatAsciiStringMask = 2239 const int kFlatAsciiStringMask =
2240 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask; 2240 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
2241 2241
2242 andl(scratch, Immediate(kFlatAsciiStringMask)); 2242 andl(scratch, Immediate(kFlatAsciiStringMask));
2243 cmpl(scratch, Immediate(kStringTag | kSeqStringTag | kAsciiStringTag)); 2243 cmpl(scratch, Immediate(kStringTag | kSeqStringTag | kAsciiStringTag));
2244 j(not_equal, failure, near_jump); 2244 j(not_equal, failure, near_jump);
2245 } 2245 }
2246 2246
2247 2247
2248 void MacroAssembler::JumpIfBothInstanceTypesAreNotSequentialAscii( 2248 void MacroAssembler::JumpIfBothInstanceTypesAreNotSequentialAscii(
2249 Register first_object_instance_type, 2249 Register first_object_instance_type,
2250 Register second_object_instance_type, 2250 Register second_object_instance_type,
2251 Register scratch1, 2251 Register scratch1,
2252 Register scratch2, 2252 Register scratch2,
2253 Label* on_fail, 2253 Label* on_fail,
2254 Label::Distance near_jump) { 2254 Label::Distance near_jump) {
2255 // Load instance type for both strings. 2255 // Load instance type for both strings.
2256 movq(scratch1, first_object_instance_type); 2256 movq(scratch1, first_object_instance_type);
2257 movq(scratch2, second_object_instance_type); 2257 movq(scratch2, second_object_instance_type);
2258 2258
2259 // Check that both are flat ASCII strings. 2259 // Check that both are flat ASCII strings.
2260 ASSERT(kNotStringTag != 0); 2260 ASSERT(kNotStringTag != 0);
2261 const int kFlatAsciiStringMask = 2261 const int kFlatAsciiStringMask =
2262 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask; 2262 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
2263 const int kFlatAsciiStringTag = ASCII_STRING_TYPE; 2263 const int kFlatAsciiStringTag = ASCII_STRING_TYPE;
2264 2264
2265 andl(scratch1, Immediate(kFlatAsciiStringMask)); 2265 andl(scratch1, Immediate(kFlatAsciiStringMask));
2266 andl(scratch2, Immediate(kFlatAsciiStringMask)); 2266 andl(scratch2, Immediate(kFlatAsciiStringMask));
2267 // Interleave the bits to check both scratch1 and scratch2 in one test. 2267 // Interleave the bits to check both scratch1 and scratch2 in one test.
2268 ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3)); 2268 ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3));
2269 lea(scratch1, Operand(scratch1, scratch2, times_8, 0)); 2269 lea(scratch1, Operand(scratch1, scratch2, times_8, 0));
2270 cmpl(scratch1, 2270 cmpl(scratch1,
2271 Immediate(kFlatAsciiStringTag + (kFlatAsciiStringTag << 3))); 2271 Immediate(kFlatAsciiStringTag + (kFlatAsciiStringTag << 3)));
2272 j(not_equal, on_fail, near_jump); 2272 j(not_equal, on_fail, near_jump);
2273 } 2273 }
2274 2274
2275 2275
2276 2276
2277 void MacroAssembler::Move(Register dst, Register src) { 2277 void MacroAssembler::Move(Register dst, Register src) {
2278 if (!dst.is(src)) { 2278 if (!dst.is(src)) {
2279 movq(dst, src); 2279 movq(dst, src);
2280 } 2280 }
2281 } 2281 }
2282 2282
2283 2283
2284 void MacroAssembler::Move(Register dst, Handle<Object> source) { 2284 void MacroAssembler::Move(Register dst, Handle<Object> source) {
2285 ASSERT(!source->IsFailure()); 2285 ASSERT(!source->IsFailure());
2286 if (source->IsSmi()) { 2286 if (source->IsSmi()) {
2287 Move(dst, Smi::cast(*source)); 2287 Move(dst, Smi::cast(*source));
2288 } else { 2288 } else {
2289 movq(dst, source, RelocInfo::EMBEDDED_OBJECT); 2289 movq(dst, source, RelocInfo::EMBEDDED_OBJECT);
2290 } 2290 }
2291 } 2291 }
2292 2292
2293 2293
2294 void MacroAssembler::Move(const Operand& dst, Handle<Object> source) { 2294 void MacroAssembler::Move(const Operand& dst, Handle<Object> source) {
2295 ASSERT(!source->IsFailure()); 2295 ASSERT(!source->IsFailure());
2296 if (source->IsSmi()) { 2296 if (source->IsSmi()) {
2297 Move(dst, Smi::cast(*source)); 2297 Move(dst, Smi::cast(*source));
2298 } else { 2298 } else {
2299 movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT); 2299 movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
2300 movq(dst, kScratchRegister); 2300 movq(dst, kScratchRegister);
2301 } 2301 }
2302 } 2302 }
2303 2303
2304 2304
2305 void MacroAssembler::Cmp(Register dst, Handle<Object> source) { 2305 void MacroAssembler::Cmp(Register dst, Handle<Object> source) {
2306 if (source->IsSmi()) { 2306 if (source->IsSmi()) {
2307 Cmp(dst, Smi::cast(*source)); 2307 Cmp(dst, Smi::cast(*source));
2308 } else { 2308 } else {
2309 Move(kScratchRegister, source); 2309 Move(kScratchRegister, source);
2310 cmpq(dst, kScratchRegister); 2310 cmpq(dst, kScratchRegister);
2311 } 2311 }
2312 } 2312 }
2313 2313
2314 2314
2315 void MacroAssembler::Cmp(const Operand& dst, Handle<Object> source) { 2315 void MacroAssembler::Cmp(const Operand& dst, Handle<Object> source) {
2316 if (source->IsSmi()) { 2316 if (source->IsSmi()) {
2317 Cmp(dst, Smi::cast(*source)); 2317 Cmp(dst, Smi::cast(*source));
2318 } else { 2318 } else {
2319 ASSERT(source->IsHeapObject()); 2319 ASSERT(source->IsHeapObject());
2320 movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT); 2320 movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
2321 cmpq(dst, kScratchRegister); 2321 cmpq(dst, kScratchRegister);
2322 } 2322 }
2323 } 2323 }
2324 2324
2325 2325
2326 void MacroAssembler::Push(Handle<Object> source) { 2326 void MacroAssembler::Push(Handle<Object> source) {
2327 if (source->IsSmi()) { 2327 if (source->IsSmi()) {
2328 Push(Smi::cast(*source)); 2328 Push(Smi::cast(*source));
2329 } else { 2329 } else {
2330 ASSERT(source->IsHeapObject()); 2330 ASSERT(source->IsHeapObject());
2331 movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT); 2331 movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
2332 push(kScratchRegister); 2332 push(kScratchRegister);
2333 } 2333 }
2334 } 2334 }
2335 2335
2336 2336
2337 void MacroAssembler::LoadHeapObject(Register result, 2337 void MacroAssembler::LoadHeapObject(Register result,
2338 Handle<HeapObject> object) { 2338 Handle<HeapObject> object) {
2339 if (isolate()->heap()->InNewSpace(*object)) { 2339 if (isolate()->heap()->InNewSpace(*object)) {
2340 Handle<JSGlobalPropertyCell> cell = 2340 Handle<JSGlobalPropertyCell> cell =
2341 isolate()->factory()->NewJSGlobalPropertyCell(object); 2341 isolate()->factory()->NewJSGlobalPropertyCell(object);
2342 movq(result, cell, RelocInfo::GLOBAL_PROPERTY_CELL); 2342 movq(result, cell, RelocInfo::GLOBAL_PROPERTY_CELL);
2343 movq(result, Operand(result, 0)); 2343 movq(result, Operand(result, 0));
2344 } else { 2344 } else {
2345 Move(result, object); 2345 Move(result, object);
2346 } 2346 }
2347 } 2347 }
2348 2348
2349 2349
2350 void MacroAssembler::PushHeapObject(Handle<HeapObject> object) { 2350 void MacroAssembler::PushHeapObject(Handle<HeapObject> object) {
2351 if (isolate()->heap()->InNewSpace(*object)) { 2351 if (isolate()->heap()->InNewSpace(*object)) {
2352 Handle<JSGlobalPropertyCell> cell = 2352 Handle<JSGlobalPropertyCell> cell =
2353 isolate()->factory()->NewJSGlobalPropertyCell(object); 2353 isolate()->factory()->NewJSGlobalPropertyCell(object);
2354 movq(kScratchRegister, cell, RelocInfo::GLOBAL_PROPERTY_CELL); 2354 movq(kScratchRegister, cell, RelocInfo::GLOBAL_PROPERTY_CELL);
2355 movq(kScratchRegister, Operand(kScratchRegister, 0)); 2355 movq(kScratchRegister, Operand(kScratchRegister, 0));
2356 push(kScratchRegister); 2356 push(kScratchRegister);
2357 } else { 2357 } else {
2358 Push(object); 2358 Push(object);
2359 } 2359 }
2360 } 2360 }
2361 2361
2362 2362
2363 void MacroAssembler::LoadGlobalCell(Register dst, 2363 void MacroAssembler::LoadGlobalCell(Register dst,
2364 Handle<JSGlobalPropertyCell> cell) { 2364 Handle<JSGlobalPropertyCell> cell) {
2365 if (dst.is(rax)) { 2365 if (dst.is(rax)) {
2366 load_rax(cell.location(), RelocInfo::GLOBAL_PROPERTY_CELL); 2366 load_rax(cell.location(), RelocInfo::GLOBAL_PROPERTY_CELL);
2367 } else { 2367 } else {
2368 movq(dst, cell, RelocInfo::GLOBAL_PROPERTY_CELL); 2368 movq(dst, cell, RelocInfo::GLOBAL_PROPERTY_CELL);
2369 movq(dst, Operand(dst, 0)); 2369 movq(dst, Operand(dst, 0));
2370 } 2370 }
2371 } 2371 }
2372 2372
2373 2373
2374 void MacroAssembler::Push(Smi* source) { 2374 void MacroAssembler::Push(Smi* source) {
2375 intptr_t smi = reinterpret_cast<intptr_t>(source); 2375 intptr_t smi = reinterpret_cast<intptr_t>(source);
2376 if (is_int32(smi)) { 2376 if (is_int32(smi)) {
2377 push(Immediate(static_cast<int32_t>(smi))); 2377 push(Immediate(static_cast<int32_t>(smi)));
2378 } else { 2378 } else {
2379 Register constant = GetSmiConstant(source); 2379 Register constant = GetSmiConstant(source);
2380 push(constant); 2380 push(constant);
2381 } 2381 }
2382 } 2382 }
2383 2383
2384 2384
2385 void MacroAssembler::Drop(int stack_elements) { 2385 void MacroAssembler::Drop(int stack_elements) {
2386 if (stack_elements > 0) { 2386 if (stack_elements > 0) {
2387 addq(rsp, Immediate(stack_elements * kPointerSize)); 2387 addq(rsp, Immediate(stack_elements * kPointerSize));
2388 } 2388 }
2389 } 2389 }
2390 2390
2391 2391
2392 void MacroAssembler::Test(const Operand& src, Smi* source) { 2392 void MacroAssembler::Test(const Operand& src, Smi* source) {
2393 testl(Operand(src, kIntSize), Immediate(source->value())); 2393 testl(Operand(src, kIntSize), Immediate(source->value()));
2394 } 2394 }
2395 2395
2396 2396
2397 void MacroAssembler::TestBit(const Operand& src, int bits) { 2397 void MacroAssembler::TestBit(const Operand& src, int bits) {
2398 int byte_offset = bits / kBitsPerByte; 2398 int byte_offset = bits / kBitsPerByte;
2399 int bit_in_byte = bits & (kBitsPerByte - 1); 2399 int bit_in_byte = bits & (kBitsPerByte - 1);
2400 testb(Operand(src, byte_offset), Immediate(1 << bit_in_byte)); 2400 testb(Operand(src, byte_offset), Immediate(1 << bit_in_byte));
2401 } 2401 }
2402 2402
2403 2403
2404 void MacroAssembler::Jump(ExternalReference ext) { 2404 void MacroAssembler::Jump(ExternalReference ext) {
2405 LoadAddress(kScratchRegister, ext); 2405 LoadAddress(kScratchRegister, ext);
2406 jmp(kScratchRegister); 2406 jmp(kScratchRegister);
2407 } 2407 }
2408 2408
2409 2409
2410 void MacroAssembler::Jump(Address destination, RelocInfo::Mode rmode) { 2410 void MacroAssembler::Jump(Address destination, RelocInfo::Mode rmode) {
2411 movq(kScratchRegister, destination, rmode); 2411 movq(kScratchRegister, destination, rmode);
2412 jmp(kScratchRegister); 2412 jmp(kScratchRegister);
2413 } 2413 }
2414 2414
2415 2415
2416 void MacroAssembler::Jump(Handle<Code> code_object, RelocInfo::Mode rmode) { 2416 void MacroAssembler::Jump(Handle<Code> code_object, RelocInfo::Mode rmode) {
2417 // TODO(X64): Inline this 2417 // TODO(X64): Inline this
2418 jmp(code_object, rmode); 2418 jmp(code_object, rmode);
2419 } 2419 }
2420 2420
2421 2421
2422 int MacroAssembler::CallSize(ExternalReference ext) { 2422 int MacroAssembler::CallSize(ExternalReference ext) {
2423 // Opcode for call kScratchRegister is: Rex.B FF D4 (three bytes). 2423 // Opcode for call kScratchRegister is: Rex.B FF D4 (three bytes).
2424 const int kCallInstructionSize = 3; 2424 const int kCallInstructionSize = 3;
2425 return LoadAddressSize(ext) + kCallInstructionSize; 2425 return LoadAddressSize(ext) + kCallInstructionSize;
2426 } 2426 }
2427 2427
2428 2428
2429 void MacroAssembler::Call(ExternalReference ext) { 2429 void MacroAssembler::Call(ExternalReference ext) {
2430 #ifdef DEBUG 2430 #ifdef DEBUG
2431 int end_position = pc_offset() + CallSize(ext); 2431 int end_position = pc_offset() + CallSize(ext);
2432 #endif 2432 #endif
2433 LoadAddress(kScratchRegister, ext); 2433 LoadAddress(kScratchRegister, ext);
2434 call(kScratchRegister); 2434 call(kScratchRegister);
2435 #ifdef DEBUG 2435 #ifdef DEBUG
2436 CHECK_EQ(end_position, pc_offset()); 2436 CHECK_EQ(end_position, pc_offset());
2437 #endif 2437 #endif
2438 } 2438 }
2439 2439
2440 2440
2441 void MacroAssembler::Call(Address destination, RelocInfo::Mode rmode) { 2441 void MacroAssembler::Call(Address destination, RelocInfo::Mode rmode) {
2442 #ifdef DEBUG 2442 #ifdef DEBUG
2443 int end_position = pc_offset() + CallSize(destination, rmode); 2443 int end_position = pc_offset() + CallSize(destination, rmode);
2444 #endif 2444 #endif
2445 movq(kScratchRegister, destination, rmode); 2445 movq(kScratchRegister, destination, rmode);
2446 call(kScratchRegister); 2446 call(kScratchRegister);
2447 #ifdef DEBUG 2447 #ifdef DEBUG
2448 CHECK_EQ(pc_offset(), end_position); 2448 CHECK_EQ(pc_offset(), end_position);
2449 #endif 2449 #endif
2450 } 2450 }
2451 2451
2452 2452
2453 void MacroAssembler::Call(Handle<Code> code_object, 2453 void MacroAssembler::Call(Handle<Code> code_object,
2454 RelocInfo::Mode rmode, 2454 RelocInfo::Mode rmode,
2455 TypeFeedbackId ast_id) { 2455 TypeFeedbackId ast_id) {
2456 #ifdef DEBUG 2456 #ifdef DEBUG
2457 int end_position = pc_offset() + CallSize(code_object); 2457 int end_position = pc_offset() + CallSize(code_object);
2458 #endif 2458 #endif
2459 ASSERT(RelocInfo::IsCodeTarget(rmode)); 2459 ASSERT(RelocInfo::IsCodeTarget(rmode));
2460 call(code_object, rmode, ast_id); 2460 call(code_object, rmode, ast_id);
2461 #ifdef DEBUG 2461 #ifdef DEBUG
2462 CHECK_EQ(end_position, pc_offset()); 2462 CHECK_EQ(end_position, pc_offset());
2463 #endif 2463 #endif
2464 } 2464 }
2465 2465
2466 2466
2467 void MacroAssembler::Pushad() { 2467 void MacroAssembler::Pushad() {
2468 push(rax); 2468 push(rax);
2469 push(rcx); 2469 push(rcx);
2470 push(rdx); 2470 push(rdx);
2471 push(rbx); 2471 push(rbx);
2472 // Not pushing rsp or rbp. 2472 // Not pushing rsp or rbp.
2473 push(rsi); 2473 push(rsi);
2474 push(rdi); 2474 push(rdi);
2475 push(r8); 2475 push(r8);
2476 push(r9); 2476 push(r9);
2477 // r10 is kScratchRegister. 2477 // r10 is kScratchRegister.
2478 push(r11); 2478 push(r11);
2479 // r12 is kSmiConstantRegister. 2479 // r12 is kSmiConstantRegister.
2480 // r13 is kRootRegister. 2480 // r13 is kRootRegister.
2481 push(r14); 2481 push(r14);
2482 push(r15); 2482 push(r15);
2483 STATIC_ASSERT(11 == kNumSafepointSavedRegisters); 2483 STATIC_ASSERT(11 == kNumSafepointSavedRegisters);
2484 // Use lea for symmetry with Popad. 2484 // Use lea for symmetry with Popad.
2485 int sp_delta = 2485 int sp_delta =
2486 (kNumSafepointRegisters - kNumSafepointSavedRegisters) * kPointerSize; 2486 (kNumSafepointRegisters - kNumSafepointSavedRegisters) * kPointerSize;
2487 lea(rsp, Operand(rsp, -sp_delta)); 2487 lea(rsp, Operand(rsp, -sp_delta));
2488 } 2488 }
2489 2489
2490 2490
2491 void MacroAssembler::Popad() { 2491 void MacroAssembler::Popad() {
2492 // Popad must not change the flags, so use lea instead of addq. 2492 // Popad must not change the flags, so use lea instead of addq.
2493 int sp_delta = 2493 int sp_delta =
2494 (kNumSafepointRegisters - kNumSafepointSavedRegisters) * kPointerSize; 2494 (kNumSafepointRegisters - kNumSafepointSavedRegisters) * kPointerSize;
2495 lea(rsp, Operand(rsp, sp_delta)); 2495 lea(rsp, Operand(rsp, sp_delta));
2496 pop(r15); 2496 pop(r15);
2497 pop(r14); 2497 pop(r14);
2498 pop(r11); 2498 pop(r11);
2499 pop(r9); 2499 pop(r9);
2500 pop(r8); 2500 pop(r8);
2501 pop(rdi); 2501 pop(rdi);
2502 pop(rsi); 2502 pop(rsi);
2503 pop(rbx); 2503 pop(rbx);
2504 pop(rdx); 2504 pop(rdx);
2505 pop(rcx); 2505 pop(rcx);
2506 pop(rax); 2506 pop(rax);
2507 } 2507 }
2508 2508
2509 2509
2510 void MacroAssembler::Dropad() { 2510 void MacroAssembler::Dropad() {
2511 addq(rsp, Immediate(kNumSafepointRegisters * kPointerSize)); 2511 addq(rsp, Immediate(kNumSafepointRegisters * kPointerSize));
2512 } 2512 }
2513 2513
2514 2514
2515 // Order general registers are pushed by Pushad: 2515 // Order general registers are pushed by Pushad:
2516 // rax, rcx, rdx, rbx, rsi, rdi, r8, r9, r11, r14, r15. 2516 // rax, rcx, rdx, rbx, rsi, rdi, r8, r9, r11, r14, r15.
2517 const int 2517 const int
2518 MacroAssembler::kSafepointPushRegisterIndices[Register::kNumRegisters] = { 2518 MacroAssembler::kSafepointPushRegisterIndices[Register::kNumRegisters] = {
2519 0, 2519 0,
2520 1, 2520 1,
2521 2, 2521 2,
2522 3, 2522 3,
2523 -1, 2523 -1,
2524 -1, 2524 -1,
2525 4, 2525 4,
2526 5, 2526 5,
2527 6, 2527 6,
2528 7, 2528 7,
2529 -1, 2529 -1,
2530 8, 2530 8,
2531 -1, 2531 -1,
2532 -1, 2532 -1,
2533 9, 2533 9,
2534 10 2534 10
2535 }; 2535 };
2536 2536
2537 2537
2538 void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, 2538 void MacroAssembler::StoreToSafepointRegisterSlot(Register dst,
2539 const Immediate& imm) { 2539 const Immediate& imm) {
2540 movq(SafepointRegisterSlot(dst), imm); 2540 movq(SafepointRegisterSlot(dst), imm);
2541 } 2541 }
2542 2542
2543 2543
2544 void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Register src) { 2544 void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Register src) {
2545 movq(SafepointRegisterSlot(dst), src); 2545 movq(SafepointRegisterSlot(dst), src);
2546 } 2546 }
2547 2547
2548 2548
2549 void MacroAssembler::LoadFromSafepointRegisterSlot(Register dst, Register src) { 2549 void MacroAssembler::LoadFromSafepointRegisterSlot(Register dst, Register src) {
2550 movq(dst, SafepointRegisterSlot(src)); 2550 movq(dst, SafepointRegisterSlot(src));
2551 } 2551 }
2552 2552
2553 2553
2554 Operand MacroAssembler::SafepointRegisterSlot(Register reg) { 2554 Operand MacroAssembler::SafepointRegisterSlot(Register reg) {
2555 return Operand(rsp, SafepointRegisterStackIndex(reg.code()) * kPointerSize); 2555 return Operand(rsp, SafepointRegisterStackIndex(reg.code()) * kPointerSize);
2556 } 2556 }
2557 2557
2558 2558
2559 void MacroAssembler::PushTryHandler(StackHandler::Kind kind, 2559 void MacroAssembler::PushTryHandler(StackHandler::Kind kind,
2560 int handler_index) { 2560 int handler_index) {
2561 // Adjust this code if not the case. 2561 // Adjust this code if not the case.
2562 STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize); 2562 STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
2563 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0); 2563 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
2564 STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize); 2564 STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize);
2565 STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize); 2565 STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize);
2566 STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize); 2566 STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize);
2567 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize); 2567 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
2568 2568
2569 // We will build up the handler from the bottom by pushing on the stack. 2569 // We will build up the handler from the bottom by pushing on the stack.
2570 // First push the frame pointer and context. 2570 // First push the frame pointer and context.
2571 if (kind == StackHandler::JS_ENTRY) { 2571 if (kind == StackHandler::JS_ENTRY) {
2572 // The frame pointer does not point to a JS frame so we save NULL for 2572 // The frame pointer does not point to a JS frame so we save NULL for
2573 // rbp. We expect the code throwing an exception to check rbp before 2573 // rbp. We expect the code throwing an exception to check rbp before
2574 // dereferencing it to restore the context. 2574 // dereferencing it to restore the context.
2575 push(Immediate(0)); // NULL frame pointer. 2575 push(Immediate(0)); // NULL frame pointer.
2576 Push(Smi::FromInt(0)); // No context. 2576 Push(Smi::FromInt(0)); // No context.
2577 } else { 2577 } else {
2578 push(rbp); 2578 push(rbp);
2579 push(rsi); 2579 push(rsi);
2580 } 2580 }
2581 2581
2582 // Push the state and the code object. 2582 // Push the state and the code object.
2583 unsigned state = 2583 unsigned state =
2584 StackHandler::IndexField::encode(handler_index) | 2584 StackHandler::IndexField::encode(handler_index) |
2585 StackHandler::KindField::encode(kind); 2585 StackHandler::KindField::encode(kind);
2586 push(Immediate(state)); 2586 push(Immediate(state));
2587 Push(CodeObject()); 2587 Push(CodeObject());
2588 2588
2589 // Link the current handler as the next handler. 2589 // Link the current handler as the next handler.
2590 ExternalReference handler_address(Isolate::kHandlerAddress, isolate()); 2590 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
2591 push(ExternalOperand(handler_address)); 2591 push(ExternalOperand(handler_address));
2592 // Set this new handler as the current one. 2592 // Set this new handler as the current one.
2593 movq(ExternalOperand(handler_address), rsp); 2593 movq(ExternalOperand(handler_address), rsp);
2594 } 2594 }
2595 2595
2596 2596
2597 void MacroAssembler::PopTryHandler() { 2597 void MacroAssembler::PopTryHandler() {
2598 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0); 2598 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
2599 ExternalReference handler_address(Isolate::kHandlerAddress, isolate()); 2599 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
2600 pop(ExternalOperand(handler_address)); 2600 pop(ExternalOperand(handler_address));
2601 addq(rsp, Immediate(StackHandlerConstants::kSize - kPointerSize)); 2601 addq(rsp, Immediate(StackHandlerConstants::kSize - kPointerSize));
2602 } 2602 }
2603 2603
2604 2604
2605 void MacroAssembler::JumpToHandlerEntry() { 2605 void MacroAssembler::JumpToHandlerEntry() {
2606 // Compute the handler entry address and jump to it. The handler table is 2606 // Compute the handler entry address and jump to it. The handler table is
2607 // a fixed array of (smi-tagged) code offsets. 2607 // a fixed array of (smi-tagged) code offsets.
2608 // rax = exception, rdi = code object, rdx = state. 2608 // rax = exception, rdi = code object, rdx = state.
2609 movq(rbx, FieldOperand(rdi, Code::kHandlerTableOffset)); 2609 movq(rbx, FieldOperand(rdi, Code::kHandlerTableOffset));
2610 shr(rdx, Immediate(StackHandler::kKindWidth)); 2610 shr(rdx, Immediate(StackHandler::kKindWidth));
2611 movq(rdx, FieldOperand(rbx, rdx, times_8, FixedArray::kHeaderSize)); 2611 movq(rdx, FieldOperand(rbx, rdx, times_8, FixedArray::kHeaderSize));
2612 SmiToInteger64(rdx, rdx); 2612 SmiToInteger64(rdx, rdx);
2613 lea(rdi, FieldOperand(rdi, rdx, times_1, Code::kHeaderSize)); 2613 lea(rdi, FieldOperand(rdi, rdx, times_1, Code::kHeaderSize));
2614 jmp(rdi); 2614 jmp(rdi);
2615 } 2615 }
2616 2616
2617 2617
2618 void MacroAssembler::Throw(Register value) { 2618 void MacroAssembler::Throw(Register value) {
2619 // Adjust this code if not the case. 2619 // Adjust this code if not the case.
2620 STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize); 2620 STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
2621 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0); 2621 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
2622 STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize); 2622 STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize);
2623 STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize); 2623 STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize);
2624 STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize); 2624 STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize);
2625 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize); 2625 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
2626 2626
2627 // The exception is expected in rax. 2627 // The exception is expected in rax.
2628 if (!value.is(rax)) { 2628 if (!value.is(rax)) {
2629 movq(rax, value); 2629 movq(rax, value);
2630 } 2630 }
2631 // Drop the stack pointer to the top of the top handler. 2631 // Drop the stack pointer to the top of the top handler.
2632 ExternalReference handler_address(Isolate::kHandlerAddress, isolate()); 2632 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
2633 movq(rsp, ExternalOperand(handler_address)); 2633 movq(rsp, ExternalOperand(handler_address));
2634 // Restore the next handler. 2634 // Restore the next handler.
2635 pop(ExternalOperand(handler_address)); 2635 pop(ExternalOperand(handler_address));
2636 2636
2637 // Remove the code object and state, compute the handler address in rdi. 2637 // Remove the code object and state, compute the handler address in rdi.
2638 pop(rdi); // Code object. 2638 pop(rdi); // Code object.
2639 pop(rdx); // Offset and state. 2639 pop(rdx); // Offset and state.
2640 2640
2641 // Restore the context and frame pointer. 2641 // Restore the context and frame pointer.
2642 pop(rsi); // Context. 2642 pop(rsi); // Context.
2643 pop(rbp); // Frame pointer. 2643 pop(rbp); // Frame pointer.
2644 2644
2645 // If the handler is a JS frame, restore the context to the frame. 2645 // If the handler is a JS frame, restore the context to the frame.
2646 // (kind == ENTRY) == (rbp == 0) == (rsi == 0), so we could test either 2646 // (kind == ENTRY) == (rbp == 0) == (rsi == 0), so we could test either
2647 // rbp or rsi. 2647 // rbp or rsi.
2648 Label skip; 2648 Label skip;
2649 testq(rsi, rsi); 2649 testq(rsi, rsi);
2650 j(zero, &skip, Label::kNear); 2650 j(zero, &skip, Label::kNear);
2651 movq(Operand(rbp, StandardFrameConstants::kContextOffset), rsi); 2651 movq(Operand(rbp, StandardFrameConstants::kContextOffset), rsi);
2652 bind(&skip); 2652 bind(&skip);
2653 2653
2654 JumpToHandlerEntry(); 2654 JumpToHandlerEntry();
2655 } 2655 }
2656 2656
2657 2657
2658 void MacroAssembler::ThrowUncatchable(Register value) { 2658 void MacroAssembler::ThrowUncatchable(Register value) {
2659 // Adjust this code if not the case. 2659 // Adjust this code if not the case.
2660 STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize); 2660 STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
2661 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0); 2661 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
2662 STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize); 2662 STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize);
2663 STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize); 2663 STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize);
2664 STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize); 2664 STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize);
2665 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize); 2665 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
2666 2666
2667 // The exception is expected in rax. 2667 // The exception is expected in rax.
2668 if (!value.is(rax)) { 2668 if (!value.is(rax)) {
2669 movq(rax, value); 2669 movq(rax, value);
2670 } 2670 }
2671 // Drop the stack pointer to the top of the top stack handler. 2671 // Drop the stack pointer to the top of the top stack handler.
2672 ExternalReference handler_address(Isolate::kHandlerAddress, isolate()); 2672 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
2673 Load(rsp, handler_address); 2673 Load(rsp, handler_address);
2674 2674
2675 // Unwind the handlers until the top ENTRY handler is found. 2675 // Unwind the handlers until the top ENTRY handler is found.
2676 Label fetch_next, check_kind; 2676 Label fetch_next, check_kind;
2677 jmp(&check_kind, Label::kNear); 2677 jmp(&check_kind, Label::kNear);
2678 bind(&fetch_next); 2678 bind(&fetch_next);
2679 movq(rsp, Operand(rsp, StackHandlerConstants::kNextOffset)); 2679 movq(rsp, Operand(rsp, StackHandlerConstants::kNextOffset));
2680 2680
2681 bind(&check_kind); 2681 bind(&check_kind);
2682 STATIC_ASSERT(StackHandler::JS_ENTRY == 0); 2682 STATIC_ASSERT(StackHandler::JS_ENTRY == 0);
2683 testl(Operand(rsp, StackHandlerConstants::kStateOffset), 2683 testl(Operand(rsp, StackHandlerConstants::kStateOffset),
2684 Immediate(StackHandler::KindField::kMask)); 2684 Immediate(StackHandler::KindField::kMask));
2685 j(not_zero, &fetch_next); 2685 j(not_zero, &fetch_next);
2686 2686
2687 // Set the top handler address to next handler past the top ENTRY handler. 2687 // Set the top handler address to next handler past the top ENTRY handler.
2688 pop(ExternalOperand(handler_address)); 2688 pop(ExternalOperand(handler_address));
2689 2689
2690 // Remove the code object and state, compute the handler address in rdi. 2690 // Remove the code object and state, compute the handler address in rdi.
2691 pop(rdi); // Code object. 2691 pop(rdi); // Code object.
2692 pop(rdx); // Offset and state. 2692 pop(rdx); // Offset and state.
2693 2693
2694 // Clear the context pointer and frame pointer (0 was saved in the handler). 2694 // Clear the context pointer and frame pointer (0 was saved in the handler).
2695 pop(rsi); 2695 pop(rsi);
2696 pop(rbp); 2696 pop(rbp);
2697 2697
2698 JumpToHandlerEntry(); 2698 JumpToHandlerEntry();
2699 } 2699 }
2700 2700
2701 2701
2702 void MacroAssembler::Ret() { 2702 void MacroAssembler::Ret() {
2703 ret(0); 2703 ret(0);
2704 } 2704 }
2705 2705
2706 2706
2707 void MacroAssembler::Ret(int bytes_dropped, Register scratch) { 2707 void MacroAssembler::Ret(int bytes_dropped, Register scratch) {
2708 if (is_uint16(bytes_dropped)) { 2708 if (is_uint16(bytes_dropped)) {
2709 ret(bytes_dropped); 2709 ret(bytes_dropped);
2710 } else { 2710 } else {
2711 pop(scratch); 2711 pop(scratch);
2712 addq(rsp, Immediate(bytes_dropped)); 2712 addq(rsp, Immediate(bytes_dropped));
2713 push(scratch); 2713 push(scratch);
2714 ret(0); 2714 ret(0);
2715 } 2715 }
2716 } 2716 }
2717 2717
2718 2718
2719 void MacroAssembler::FCmp() { 2719 void MacroAssembler::FCmp() {
2720 fucomip(); 2720 fucomip();
2721 fstp(0); 2721 fstp(0);
2722 } 2722 }
2723 2723
2724 2724
2725 void MacroAssembler::CmpObjectType(Register heap_object, 2725 void MacroAssembler::CmpObjectType(Register heap_object,
2726 InstanceType type, 2726 InstanceType type,
2727 Register map) { 2727 Register map) {
2728 movq(map, FieldOperand(heap_object, HeapObject::kMapOffset)); 2728 movq(map, FieldOperand(heap_object, HeapObject::kMapOffset));
2729 CmpInstanceType(map, type); 2729 CmpInstanceType(map, type);
2730 } 2730 }
2731 2731
2732 2732
2733 void MacroAssembler::CmpInstanceType(Register map, InstanceType type) { 2733 void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
2734 cmpb(FieldOperand(map, Map::kInstanceTypeOffset), 2734 cmpb(FieldOperand(map, Map::kInstanceTypeOffset),
2735 Immediate(static_cast<int8_t>(type))); 2735 Immediate(static_cast<int8_t>(type)));
2736 } 2736 }
2737 2737
2738 2738
2739 void MacroAssembler::CheckFastElements(Register map, 2739 void MacroAssembler::CheckFastElements(Register map,
2740 Label* fail, 2740 Label* fail,
2741 Label::Distance distance) { 2741 Label::Distance distance) {
2742 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0); 2742 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
2743 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1); 2743 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
2744 STATIC_ASSERT(FAST_ELEMENTS == 2); 2744 STATIC_ASSERT(FAST_ELEMENTS == 2);
2745 STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3); 2745 STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
2746 cmpb(FieldOperand(map, Map::kBitField2Offset), 2746 cmpb(FieldOperand(map, Map::kBitField2Offset),
2747 Immediate(Map::kMaximumBitField2FastHoleyElementValue)); 2747 Immediate(Map::kMaximumBitField2FastHoleyElementValue));
2748 j(above, fail, distance); 2748 j(above, fail, distance);
2749 } 2749 }
2750 2750
2751 2751
2752 void MacroAssembler::CheckFastObjectElements(Register map, 2752 void MacroAssembler::CheckFastObjectElements(Register map,
2753 Label* fail, 2753 Label* fail,
2754 Label::Distance distance) { 2754 Label::Distance distance) {
2755 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0); 2755 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
2756 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1); 2756 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
2757 STATIC_ASSERT(FAST_ELEMENTS == 2); 2757 STATIC_ASSERT(FAST_ELEMENTS == 2);
2758 STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3); 2758 STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
2759 cmpb(FieldOperand(map, Map::kBitField2Offset), 2759 cmpb(FieldOperand(map, Map::kBitField2Offset),
2760 Immediate(Map::kMaximumBitField2FastHoleySmiElementValue)); 2760 Immediate(Map::kMaximumBitField2FastHoleySmiElementValue));
2761 j(below_equal, fail, distance); 2761 j(below_equal, fail, distance);
2762 cmpb(FieldOperand(map, Map::kBitField2Offset), 2762 cmpb(FieldOperand(map, Map::kBitField2Offset),
2763 Immediate(Map::kMaximumBitField2FastHoleyElementValue)); 2763 Immediate(Map::kMaximumBitField2FastHoleyElementValue));
2764 j(above, fail, distance); 2764 j(above, fail, distance);
2765 } 2765 }
2766 2766
2767 2767
2768 void MacroAssembler::CheckFastSmiElements(Register map, 2768 void MacroAssembler::CheckFastSmiElements(Register map,
2769 Label* fail, 2769 Label* fail,
2770 Label::Distance distance) { 2770 Label::Distance distance) {
2771 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0); 2771 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
2772 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1); 2772 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
2773 cmpb(FieldOperand(map, Map::kBitField2Offset), 2773 cmpb(FieldOperand(map, Map::kBitField2Offset),
2774 Immediate(Map::kMaximumBitField2FastHoleySmiElementValue)); 2774 Immediate(Map::kMaximumBitField2FastHoleySmiElementValue));
2775 j(above, fail, distance); 2775 j(above, fail, distance);
2776 } 2776 }
2777 2777
2778 2778
2779 void MacroAssembler::StoreNumberToDoubleElements( 2779 void MacroAssembler::StoreNumberToDoubleElements(
2780 Register maybe_number, 2780 Register maybe_number,
2781 Register elements, 2781 Register elements,
2782 Register index, 2782 Register index,
2783 XMMRegister xmm_scratch, 2783 XMMRegister xmm_scratch,
2784 Label* fail) { 2784 Label* fail) {
2785 Label smi_value, is_nan, maybe_nan, not_nan, have_double_value, done; 2785 Label smi_value, is_nan, maybe_nan, not_nan, have_double_value, done;
2786 2786
2787 JumpIfSmi(maybe_number, &smi_value, Label::kNear); 2787 JumpIfSmi(maybe_number, &smi_value, Label::kNear);
2788 2788
2789 CheckMap(maybe_number, 2789 CheckMap(maybe_number,
2790 isolate()->factory()->heap_number_map(), 2790 isolate()->factory()->heap_number_map(),
2791 fail, 2791 fail,
2792 DONT_DO_SMI_CHECK); 2792 DONT_DO_SMI_CHECK);
2793 2793
2794 // Double value, canonicalize NaN. 2794 // Double value, canonicalize NaN.
2795 uint32_t offset = HeapNumber::kValueOffset + sizeof(kHoleNanLower32); 2795 uint32_t offset = HeapNumber::kValueOffset + sizeof(kHoleNanLower32);
2796 cmpl(FieldOperand(maybe_number, offset), 2796 cmpl(FieldOperand(maybe_number, offset),
2797 Immediate(kNaNOrInfinityLowerBoundUpper32)); 2797 Immediate(kNaNOrInfinityLowerBoundUpper32));
2798 j(greater_equal, &maybe_nan, Label::kNear); 2798 j(greater_equal, &maybe_nan, Label::kNear);
2799 2799
2800 bind(&not_nan); 2800 bind(&not_nan);
2801 movsd(xmm_scratch, FieldOperand(maybe_number, HeapNumber::kValueOffset)); 2801 movsd(xmm_scratch, FieldOperand(maybe_number, HeapNumber::kValueOffset));
2802 bind(&have_double_value); 2802 bind(&have_double_value);
2803 movsd(FieldOperand(elements, index, times_8, FixedDoubleArray::kHeaderSize), 2803 movsd(FieldOperand(elements, index, times_8, FixedDoubleArray::kHeaderSize),
2804 xmm_scratch); 2804 xmm_scratch);
2805 jmp(&done); 2805 jmp(&done);
2806 2806
2807 bind(&maybe_nan); 2807 bind(&maybe_nan);
2808 // Could be NaN or Infinity. If fraction is not zero, it's NaN, otherwise 2808 // Could be NaN or Infinity. If fraction is not zero, it's NaN, otherwise
2809 // it's an Infinity, and the non-NaN code path applies. 2809 // it's an Infinity, and the non-NaN code path applies.
2810 j(greater, &is_nan, Label::kNear); 2810 j(greater, &is_nan, Label::kNear);
2811 cmpl(FieldOperand(maybe_number, HeapNumber::kValueOffset), Immediate(0)); 2811 cmpl(FieldOperand(maybe_number, HeapNumber::kValueOffset), Immediate(0));
2812 j(zero, &not_nan); 2812 j(zero, &not_nan);
2813 bind(&is_nan); 2813 bind(&is_nan);
2814 // Convert all NaNs to the same canonical NaN value when they are stored in 2814 // Convert all NaNs to the same canonical NaN value when they are stored in
2815 // the double array. 2815 // the double array.
2816 Set(kScratchRegister, BitCast<uint64_t>( 2816 Set(kScratchRegister, BitCast<uint64_t>(
2817 FixedDoubleArray::canonical_not_the_hole_nan_as_double())); 2817 FixedDoubleArray::canonical_not_the_hole_nan_as_double()));
2818 movq(xmm_scratch, kScratchRegister); 2818 movq(xmm_scratch, kScratchRegister);
2819 jmp(&have_double_value, Label::kNear); 2819 jmp(&have_double_value, Label::kNear);
2820 2820
2821 bind(&smi_value); 2821 bind(&smi_value);
2822 // Value is a smi. convert to a double and store. 2822 // Value is a smi. convert to a double and store.
2823 // Preserve original value. 2823 // Preserve original value.
2824 SmiToInteger32(kScratchRegister, maybe_number); 2824 SmiToInteger32(kScratchRegister, maybe_number);
2825 cvtlsi2sd(xmm_scratch, kScratchRegister); 2825 cvtlsi2sd(xmm_scratch, kScratchRegister);
2826 movsd(FieldOperand(elements, index, times_8, FixedDoubleArray::kHeaderSize), 2826 movsd(FieldOperand(elements, index, times_8, FixedDoubleArray::kHeaderSize),
2827 xmm_scratch); 2827 xmm_scratch);
2828 bind(&done); 2828 bind(&done);
2829 } 2829 }
2830 2830
2831 2831
2832 void MacroAssembler::CompareMap(Register obj, 2832 void MacroAssembler::CompareMap(Register obj,
2833 Handle<Map> map, 2833 Handle<Map> map,
2834 Label* early_success, 2834 Label* early_success,
2835 CompareMapMode mode) { 2835 CompareMapMode mode) {
2836 Cmp(FieldOperand(obj, HeapObject::kMapOffset), map); 2836 Cmp(FieldOperand(obj, HeapObject::kMapOffset), map);
2837 if (mode == ALLOW_ELEMENT_TRANSITION_MAPS) { 2837 if (mode == ALLOW_ELEMENT_TRANSITION_MAPS) {
2838 ElementsKind kind = map->elements_kind(); 2838 ElementsKind kind = map->elements_kind();
2839 if (IsFastElementsKind(kind)) { 2839 if (IsFastElementsKind(kind)) {
2840 bool packed = IsFastPackedElementsKind(kind); 2840 bool packed = IsFastPackedElementsKind(kind);
2841 Map* current_map = *map; 2841 Map* current_map = *map;
2842 while (CanTransitionToMoreGeneralFastElementsKind(kind, packed)) { 2842 while (CanTransitionToMoreGeneralFastElementsKind(kind, packed)) {
2843 kind = GetNextMoreGeneralFastElementsKind(kind, packed); 2843 kind = GetNextMoreGeneralFastElementsKind(kind, packed);
2844 current_map = current_map->LookupElementsTransitionMap(kind); 2844 current_map = current_map->LookupElementsTransitionMap(kind);
2845 if (!current_map) break; 2845 if (!current_map) break;
2846 j(equal, early_success, Label::kNear); 2846 j(equal, early_success, Label::kNear);
2847 Cmp(FieldOperand(obj, HeapObject::kMapOffset), 2847 Cmp(FieldOperand(obj, HeapObject::kMapOffset),
2848 Handle<Map>(current_map)); 2848 Handle<Map>(current_map));
2849 } 2849 }
2850 } 2850 }
2851 } 2851 }
2852 } 2852 }
2853 2853
2854 2854
2855 void MacroAssembler::CheckMap(Register obj, 2855 void MacroAssembler::CheckMap(Register obj,
2856 Handle<Map> map, 2856 Handle<Map> map,
2857 Label* fail, 2857 Label* fail,
2858 SmiCheckType smi_check_type, 2858 SmiCheckType smi_check_type,
2859 CompareMapMode mode) { 2859 CompareMapMode mode) {
2860 if (smi_check_type == DO_SMI_CHECK) { 2860 if (smi_check_type == DO_SMI_CHECK) {
2861 JumpIfSmi(obj, fail); 2861 JumpIfSmi(obj, fail);
2862 } 2862 }
2863 2863
2864 Label success; 2864 Label success;
2865 CompareMap(obj, map, &success, mode); 2865 CompareMap(obj, map, &success, mode);
2866 j(not_equal, fail); 2866 j(not_equal, fail);
2867 bind(&success); 2867 bind(&success);
2868 } 2868 }
2869 2869
2870 2870
2871 void MacroAssembler::ClampUint8(Register reg) { 2871 void MacroAssembler::ClampUint8(Register reg) {
2872 Label done; 2872 Label done;
2873 testl(reg, Immediate(0xFFFFFF00)); 2873 testl(reg, Immediate(0xFFFFFF00));
2874 j(zero, &done, Label::kNear); 2874 j(zero, &done, Label::kNear);
2875 setcc(negative, reg); // 1 if negative, 0 if positive. 2875 setcc(negative, reg); // 1 if negative, 0 if positive.
2876 decb(reg); // 0 if negative, 255 if positive. 2876 decb(reg); // 0 if negative, 255 if positive.
2877 bind(&done); 2877 bind(&done);
2878 } 2878 }
2879 2879
2880 2880
2881 void MacroAssembler::ClampDoubleToUint8(XMMRegister input_reg, 2881 void MacroAssembler::ClampDoubleToUint8(XMMRegister input_reg,
2882 XMMRegister temp_xmm_reg, 2882 XMMRegister temp_xmm_reg,
2883 Register result_reg, 2883 Register result_reg,
2884 Register temp_reg) { 2884 Register temp_reg) {
2885 Label done; 2885 Label done;
2886 Set(result_reg, 0); 2886 Set(result_reg, 0);
2887 xorps(temp_xmm_reg, temp_xmm_reg); 2887 xorps(temp_xmm_reg, temp_xmm_reg);
2888 ucomisd(input_reg, temp_xmm_reg); 2888 ucomisd(input_reg, temp_xmm_reg);
2889 j(below, &done, Label::kNear); 2889 j(below, &done, Label::kNear);
2890 cvtsd2si(result_reg, input_reg); 2890 cvtsd2si(result_reg, input_reg);
2891 testl(result_reg, Immediate(0xFFFFFF00)); 2891 testl(result_reg, Immediate(0xFFFFFF00));
2892 j(zero, &done, Label::kNear); 2892 j(zero, &done, Label::kNear);
2893 Set(result_reg, 255); 2893 Set(result_reg, 255);
2894 bind(&done); 2894 bind(&done);
2895 } 2895 }
2896 2896
2897 2897
2898 static double kUint32Bias = 2898 static double kUint32Bias =
2899 static_cast<double>(static_cast<uint32_t>(0xFFFFFFFF)) + 1; 2899 static_cast<double>(static_cast<uint32_t>(0xFFFFFFFF)) + 1;
2900 2900
2901 2901
2902 void MacroAssembler::LoadUint32(XMMRegister dst, 2902 void MacroAssembler::LoadUint32(XMMRegister dst,
2903 Register src, 2903 Register src,
2904 XMMRegister scratch) { 2904 XMMRegister scratch) {
2905 Label done; 2905 Label done;
2906 cmpl(src, Immediate(0)); 2906 cmpl(src, Immediate(0));
2907 movq(kScratchRegister, 2907 movq(kScratchRegister,
2908 reinterpret_cast<int64_t>(&kUint32Bias), 2908 reinterpret_cast<int64_t>(&kUint32Bias),
2909 RelocInfo::NONE); 2909 RelocInfo::NONE);
2910 movsd(scratch, Operand(kScratchRegister, 0)); 2910 movsd(scratch, Operand(kScratchRegister, 0));
2911 cvtlsi2sd(dst, src); 2911 cvtlsi2sd(dst, src);
2912 j(not_sign, &done, Label::kNear); 2912 j(not_sign, &done, Label::kNear);
2913 addsd(dst, scratch); 2913 addsd(dst, scratch);
2914 bind(&done); 2914 bind(&done);
2915 } 2915 }
2916 2916
2917 2917
2918 void MacroAssembler::LoadInstanceDescriptors(Register map, 2918 void MacroAssembler::LoadInstanceDescriptors(Register map,
2919 Register descriptors) { 2919 Register descriptors) {
2920 Register temp = descriptors; 2920 Register temp = descriptors;
2921 movq(temp, FieldOperand(map, Map::kTransitionsOrBackPointerOffset)); 2921 movq(temp, FieldOperand(map, Map::kTransitionsOrBackPointerOffset));
2922 2922
2923 Label ok, fail, load_from_back_pointer; 2923 Label ok, fail;
2924 CheckMap(temp, 2924 CheckMap(temp,
2925 isolate()->factory()->fixed_array_map(), 2925 isolate()->factory()->fixed_array_map(),
2926 &fail, 2926 &fail,
2927 DONT_DO_SMI_CHECK); 2927 DONT_DO_SMI_CHECK);
2928 movq(temp, FieldOperand(temp, TransitionArray::kDescriptorsPointerOffset)); 2928 movq(descriptors, FieldOperand(temp, TransitionArray::kDescriptorsOffset));
2929 movq(descriptors, FieldOperand(temp, JSGlobalPropertyCell::kValueOffset));
2930 jmp(&ok); 2929 jmp(&ok);
2931
2932 bind(&fail); 2930 bind(&fail);
2933 CompareRoot(temp, Heap::kUndefinedValueRootIndex);
2934 j(not_equal, &load_from_back_pointer, Label::kNear);
2935 Move(descriptors, isolate()->factory()->empty_descriptor_array()); 2931 Move(descriptors, isolate()->factory()->empty_descriptor_array());
2936 jmp(&ok);
2937
2938 bind(&load_from_back_pointer);
2939 movq(temp, FieldOperand(temp, Map::kTransitionsOrBackPointerOffset));
2940 movq(temp, FieldOperand(temp, TransitionArray::kDescriptorsPointerOffset));
2941 movq(descriptors, FieldOperand(temp, JSGlobalPropertyCell::kValueOffset));
2942
2943 bind(&ok); 2932 bind(&ok);
2944 } 2933 }
2945
2946
2947 void MacroAssembler::NumberOfOwnDescriptors(Register dst, Register map) {
2948 movq(dst, FieldOperand(map, Map::kBitField3Offset));
2949 DecodeField<Map::NumberOfOwnDescriptorsBits>(dst);
2950 }
2951 2934
2952 2935
2953 void MacroAssembler::EnumLength(Register dst, Register map) { 2936 void MacroAssembler::EnumLength(Register dst, Register map) {
2954 STATIC_ASSERT(Map::EnumLengthBits::kShift == 0); 2937 STATIC_ASSERT(Map::EnumLengthBits::kShift == 0);
2955 movq(dst, FieldOperand(map, Map::kBitField3Offset)); 2938 movq(dst, FieldOperand(map, Map::kBitField3Offset));
2956 Move(kScratchRegister, Smi::FromInt(Map::EnumLengthBits::kMask)); 2939 Move(kScratchRegister, Smi::FromInt(Map::EnumLengthBits::kMask));
2957 and_(dst, kScratchRegister); 2940 and_(dst, kScratchRegister);
2958 } 2941 }
2959 2942
2960 2943
2961 void MacroAssembler::DispatchMap(Register obj, 2944 void MacroAssembler::DispatchMap(Register obj,
2962 Handle<Map> map, 2945 Handle<Map> map,
2963 Handle<Code> success, 2946 Handle<Code> success,
2964 SmiCheckType smi_check_type) { 2947 SmiCheckType smi_check_type) {
2965 Label fail; 2948 Label fail;
2966 if (smi_check_type == DO_SMI_CHECK) { 2949 if (smi_check_type == DO_SMI_CHECK) {
2967 JumpIfSmi(obj, &fail); 2950 JumpIfSmi(obj, &fail);
2968 } 2951 }
2969 Cmp(FieldOperand(obj, HeapObject::kMapOffset), map); 2952 Cmp(FieldOperand(obj, HeapObject::kMapOffset), map);
2970 j(equal, success, RelocInfo::CODE_TARGET); 2953 j(equal, success, RelocInfo::CODE_TARGET);
2971 2954
2972 bind(&fail); 2955 bind(&fail);
2973 } 2956 }
2974 2957
2975 2958
2976 void MacroAssembler::AbortIfNotNumber(Register object) { 2959 void MacroAssembler::AbortIfNotNumber(Register object) {
2977 Label ok; 2960 Label ok;
2978 Condition is_smi = CheckSmi(object); 2961 Condition is_smi = CheckSmi(object);
2979 j(is_smi, &ok, Label::kNear); 2962 j(is_smi, &ok, Label::kNear);
2980 Cmp(FieldOperand(object, HeapObject::kMapOffset), 2963 Cmp(FieldOperand(object, HeapObject::kMapOffset),
2981 isolate()->factory()->heap_number_map()); 2964 isolate()->factory()->heap_number_map());
2982 Assert(equal, "Operand not a number"); 2965 Assert(equal, "Operand not a number");
2983 bind(&ok); 2966 bind(&ok);
2984 } 2967 }
2985 2968
2986 2969
2987 void MacroAssembler::AbortIfSmi(Register object) { 2970 void MacroAssembler::AbortIfSmi(Register object) {
2988 Condition is_smi = CheckSmi(object); 2971 Condition is_smi = CheckSmi(object);
2989 Assert(NegateCondition(is_smi), "Operand is a smi"); 2972 Assert(NegateCondition(is_smi), "Operand is a smi");
2990 } 2973 }
2991 2974
2992 2975
2993 void MacroAssembler::AbortIfNotSmi(Register object) { 2976 void MacroAssembler::AbortIfNotSmi(Register object) {
2994 Condition is_smi = CheckSmi(object); 2977 Condition is_smi = CheckSmi(object);
2995 Assert(is_smi, "Operand is not a smi"); 2978 Assert(is_smi, "Operand is not a smi");
2996 } 2979 }
2997 2980
2998 2981
2999 void MacroAssembler::AbortIfNotSmi(const Operand& object) { 2982 void MacroAssembler::AbortIfNotSmi(const Operand& object) {
3000 Condition is_smi = CheckSmi(object); 2983 Condition is_smi = CheckSmi(object);
3001 Assert(is_smi, "Operand is not a smi"); 2984 Assert(is_smi, "Operand is not a smi");
3002 } 2985 }
3003 2986
3004 2987
3005 void MacroAssembler::AbortIfNotZeroExtended(Register int32_register) { 2988 void MacroAssembler::AbortIfNotZeroExtended(Register int32_register) {
3006 ASSERT(!int32_register.is(kScratchRegister)); 2989 ASSERT(!int32_register.is(kScratchRegister));
3007 movq(kScratchRegister, 0x100000000l, RelocInfo::NONE); 2990 movq(kScratchRegister, 0x100000000l, RelocInfo::NONE);
3008 cmpq(kScratchRegister, int32_register); 2991 cmpq(kScratchRegister, int32_register);
3009 Assert(above_equal, "32 bit value in register is not zero-extended"); 2992 Assert(above_equal, "32 bit value in register is not zero-extended");
3010 } 2993 }
3011 2994
3012 2995
3013 void MacroAssembler::AbortIfNotString(Register object) { 2996 void MacroAssembler::AbortIfNotString(Register object) {
3014 testb(object, Immediate(kSmiTagMask)); 2997 testb(object, Immediate(kSmiTagMask));
3015 Assert(not_equal, "Operand is not a string"); 2998 Assert(not_equal, "Operand is not a string");
3016 push(object); 2999 push(object);
3017 movq(object, FieldOperand(object, HeapObject::kMapOffset)); 3000 movq(object, FieldOperand(object, HeapObject::kMapOffset));
3018 CmpInstanceType(object, FIRST_NONSTRING_TYPE); 3001 CmpInstanceType(object, FIRST_NONSTRING_TYPE);
3019 pop(object); 3002 pop(object);
3020 Assert(below, "Operand is not a string"); 3003 Assert(below, "Operand is not a string");
3021 } 3004 }
3022 3005
3023 3006
3024 void MacroAssembler::AbortIfNotRootValue(Register src, 3007 void MacroAssembler::AbortIfNotRootValue(Register src,
3025 Heap::RootListIndex root_value_index, 3008 Heap::RootListIndex root_value_index,
3026 const char* message) { 3009 const char* message) {
3027 ASSERT(!src.is(kScratchRegister)); 3010 ASSERT(!src.is(kScratchRegister));
3028 LoadRoot(kScratchRegister, root_value_index); 3011 LoadRoot(kScratchRegister, root_value_index);
3029 cmpq(src, kScratchRegister); 3012 cmpq(src, kScratchRegister);
3030 Check(equal, message); 3013 Check(equal, message);
3031 } 3014 }
3032 3015
3033 3016
3034 3017
3035 Condition MacroAssembler::IsObjectStringType(Register heap_object, 3018 Condition MacroAssembler::IsObjectStringType(Register heap_object,
3036 Register map, 3019 Register map,
3037 Register instance_type) { 3020 Register instance_type) {
3038 movq(map, FieldOperand(heap_object, HeapObject::kMapOffset)); 3021 movq(map, FieldOperand(heap_object, HeapObject::kMapOffset));
3039 movzxbl(instance_type, FieldOperand(map, Map::kInstanceTypeOffset)); 3022 movzxbl(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
3040 STATIC_ASSERT(kNotStringTag != 0); 3023 STATIC_ASSERT(kNotStringTag != 0);
3041 testb(instance_type, Immediate(kIsNotStringMask)); 3024 testb(instance_type, Immediate(kIsNotStringMask));
3042 return zero; 3025 return zero;
3043 } 3026 }
3044 3027
3045 3028
3046 void MacroAssembler::TryGetFunctionPrototype(Register function, 3029 void MacroAssembler::TryGetFunctionPrototype(Register function,
3047 Register result, 3030 Register result,
3048 Label* miss, 3031 Label* miss,
3049 bool miss_on_bound_function) { 3032 bool miss_on_bound_function) {
3050 // Check that the receiver isn't a smi. 3033 // Check that the receiver isn't a smi.
3051 testl(function, Immediate(kSmiTagMask)); 3034 testl(function, Immediate(kSmiTagMask));
3052 j(zero, miss); 3035 j(zero, miss);
3053 3036
3054 // Check that the function really is a function. 3037 // Check that the function really is a function.
3055 CmpObjectType(function, JS_FUNCTION_TYPE, result); 3038 CmpObjectType(function, JS_FUNCTION_TYPE, result);
3056 j(not_equal, miss); 3039 j(not_equal, miss);
3057 3040
3058 if (miss_on_bound_function) { 3041 if (miss_on_bound_function) {
3059 movq(kScratchRegister, 3042 movq(kScratchRegister,
3060 FieldOperand(function, JSFunction::kSharedFunctionInfoOffset)); 3043 FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
3061 // It's not smi-tagged (stored in the top half of a smi-tagged 8-byte 3044 // It's not smi-tagged (stored in the top half of a smi-tagged 8-byte
3062 // field). 3045 // field).
3063 TestBit(FieldOperand(kScratchRegister, 3046 TestBit(FieldOperand(kScratchRegister,
3064 SharedFunctionInfo::kCompilerHintsOffset), 3047 SharedFunctionInfo::kCompilerHintsOffset),
3065 SharedFunctionInfo::kBoundFunction); 3048 SharedFunctionInfo::kBoundFunction);
3066 j(not_zero, miss); 3049 j(not_zero, miss);
3067 } 3050 }
3068 3051
3069 // Make sure that the function has an instance prototype. 3052 // Make sure that the function has an instance prototype.
3070 Label non_instance; 3053 Label non_instance;
3071 testb(FieldOperand(result, Map::kBitFieldOffset), 3054 testb(FieldOperand(result, Map::kBitFieldOffset),
3072 Immediate(1 << Map::kHasNonInstancePrototype)); 3055 Immediate(1 << Map::kHasNonInstancePrototype));
3073 j(not_zero, &non_instance, Label::kNear); 3056 j(not_zero, &non_instance, Label::kNear);
3074 3057
3075 // Get the prototype or initial map from the function. 3058 // Get the prototype or initial map from the function.
3076 movq(result, 3059 movq(result,
3077 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset)); 3060 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
3078 3061
3079 // If the prototype or initial map is the hole, don't return it and 3062 // If the prototype or initial map is the hole, don't return it and
3080 // simply miss the cache instead. This will allow us to allocate a 3063 // simply miss the cache instead. This will allow us to allocate a
3081 // prototype object on-demand in the runtime system. 3064 // prototype object on-demand in the runtime system.
3082 CompareRoot(result, Heap::kTheHoleValueRootIndex); 3065 CompareRoot(result, Heap::kTheHoleValueRootIndex);
3083 j(equal, miss); 3066 j(equal, miss);
3084 3067
3085 // If the function does not have an initial map, we're done. 3068 // If the function does not have an initial map, we're done.
3086 Label done; 3069 Label done;
3087 CmpObjectType(result, MAP_TYPE, kScratchRegister); 3070 CmpObjectType(result, MAP_TYPE, kScratchRegister);
3088 j(not_equal, &done, Label::kNear); 3071 j(not_equal, &done, Label::kNear);
3089 3072
3090 // Get the prototype from the initial map. 3073 // Get the prototype from the initial map.
3091 movq(result, FieldOperand(result, Map::kPrototypeOffset)); 3074 movq(result, FieldOperand(result, Map::kPrototypeOffset));
3092 jmp(&done, Label::kNear); 3075 jmp(&done, Label::kNear);
3093 3076
3094 // Non-instance prototype: Fetch prototype from constructor field 3077 // Non-instance prototype: Fetch prototype from constructor field
3095 // in initial map. 3078 // in initial map.
3096 bind(&non_instance); 3079 bind(&non_instance);
3097 movq(result, FieldOperand(result, Map::kConstructorOffset)); 3080 movq(result, FieldOperand(result, Map::kConstructorOffset));
3098 3081
3099 // All done. 3082 // All done.
3100 bind(&done); 3083 bind(&done);
3101 } 3084 }
3102 3085
3103 3086
3104 void MacroAssembler::SetCounter(StatsCounter* counter, int value) { 3087 void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
3105 if (FLAG_native_code_counters && counter->Enabled()) { 3088 if (FLAG_native_code_counters && counter->Enabled()) {
3106 Operand counter_operand = ExternalOperand(ExternalReference(counter)); 3089 Operand counter_operand = ExternalOperand(ExternalReference(counter));
3107 movl(counter_operand, Immediate(value)); 3090 movl(counter_operand, Immediate(value));
3108 } 3091 }
3109 } 3092 }
3110 3093
3111 3094
3112 void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) { 3095 void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
3113 ASSERT(value > 0); 3096 ASSERT(value > 0);
3114 if (FLAG_native_code_counters && counter->Enabled()) { 3097 if (FLAG_native_code_counters && counter->Enabled()) {
3115 Operand counter_operand = ExternalOperand(ExternalReference(counter)); 3098 Operand counter_operand = ExternalOperand(ExternalReference(counter));
3116 if (value == 1) { 3099 if (value == 1) {
3117 incl(counter_operand); 3100 incl(counter_operand);
3118 } else { 3101 } else {
3119 addl(counter_operand, Immediate(value)); 3102 addl(counter_operand, Immediate(value));
3120 } 3103 }
3121 } 3104 }
3122 } 3105 }
3123 3106
3124 3107
3125 void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) { 3108 void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
3126 ASSERT(value > 0); 3109 ASSERT(value > 0);
3127 if (FLAG_native_code_counters && counter->Enabled()) { 3110 if (FLAG_native_code_counters && counter->Enabled()) {
3128 Operand counter_operand = ExternalOperand(ExternalReference(counter)); 3111 Operand counter_operand = ExternalOperand(ExternalReference(counter));
3129 if (value == 1) { 3112 if (value == 1) {
3130 decl(counter_operand); 3113 decl(counter_operand);
3131 } else { 3114 } else {
3132 subl(counter_operand, Immediate(value)); 3115 subl(counter_operand, Immediate(value));
3133 } 3116 }
3134 } 3117 }
3135 } 3118 }
3136 3119
3137 3120
3138 #ifdef ENABLE_DEBUGGER_SUPPORT 3121 #ifdef ENABLE_DEBUGGER_SUPPORT
3139 void MacroAssembler::DebugBreak() { 3122 void MacroAssembler::DebugBreak() {
3140 Set(rax, 0); // No arguments. 3123 Set(rax, 0); // No arguments.
3141 LoadAddress(rbx, ExternalReference(Runtime::kDebugBreak, isolate())); 3124 LoadAddress(rbx, ExternalReference(Runtime::kDebugBreak, isolate()));
3142 CEntryStub ces(1); 3125 CEntryStub ces(1);
3143 ASSERT(AllowThisStubCall(&ces)); 3126 ASSERT(AllowThisStubCall(&ces));
3144 Call(ces.GetCode(), RelocInfo::DEBUG_BREAK); 3127 Call(ces.GetCode(), RelocInfo::DEBUG_BREAK);
3145 } 3128 }
3146 #endif // ENABLE_DEBUGGER_SUPPORT 3129 #endif // ENABLE_DEBUGGER_SUPPORT
3147 3130
3148 3131
3149 void MacroAssembler::SetCallKind(Register dst, CallKind call_kind) { 3132 void MacroAssembler::SetCallKind(Register dst, CallKind call_kind) {
3150 // This macro takes the dst register to make the code more readable 3133 // This macro takes the dst register to make the code more readable
3151 // at the call sites. However, the dst register has to be rcx to 3134 // at the call sites. However, the dst register has to be rcx to
3152 // follow the calling convention which requires the call type to be 3135 // follow the calling convention which requires the call type to be
3153 // in rcx. 3136 // in rcx.
3154 ASSERT(dst.is(rcx)); 3137 ASSERT(dst.is(rcx));
3155 if (call_kind == CALL_AS_FUNCTION) { 3138 if (call_kind == CALL_AS_FUNCTION) {
3156 LoadSmiConstant(dst, Smi::FromInt(1)); 3139 LoadSmiConstant(dst, Smi::FromInt(1));
3157 } else { 3140 } else {
3158 LoadSmiConstant(dst, Smi::FromInt(0)); 3141 LoadSmiConstant(dst, Smi::FromInt(0));
3159 } 3142 }
3160 } 3143 }
3161 3144
3162 3145
3163 void MacroAssembler::InvokeCode(Register code, 3146 void MacroAssembler::InvokeCode(Register code,
3164 const ParameterCount& expected, 3147 const ParameterCount& expected,
3165 const ParameterCount& actual, 3148 const ParameterCount& actual,
3166 InvokeFlag flag, 3149 InvokeFlag flag,
3167 const CallWrapper& call_wrapper, 3150 const CallWrapper& call_wrapper,
3168 CallKind call_kind) { 3151 CallKind call_kind) {
3169 // You can't call a function without a valid frame. 3152 // You can't call a function without a valid frame.
3170 ASSERT(flag == JUMP_FUNCTION || has_frame()); 3153 ASSERT(flag == JUMP_FUNCTION || has_frame());
3171 3154
3172 Label done; 3155 Label done;
3173 bool definitely_mismatches = false; 3156 bool definitely_mismatches = false;
3174 InvokePrologue(expected, 3157 InvokePrologue(expected,
3175 actual, 3158 actual,
3176 Handle<Code>::null(), 3159 Handle<Code>::null(),
3177 code, 3160 code,
3178 &done, 3161 &done,
3179 &definitely_mismatches, 3162 &definitely_mismatches,
3180 flag, 3163 flag,
3181 Label::kNear, 3164 Label::kNear,
3182 call_wrapper, 3165 call_wrapper,
3183 call_kind); 3166 call_kind);
3184 if (!definitely_mismatches) { 3167 if (!definitely_mismatches) {
3185 if (flag == CALL_FUNCTION) { 3168 if (flag == CALL_FUNCTION) {
3186 call_wrapper.BeforeCall(CallSize(code)); 3169 call_wrapper.BeforeCall(CallSize(code));
3187 SetCallKind(rcx, call_kind); 3170 SetCallKind(rcx, call_kind);
3188 call(code); 3171 call(code);
3189 call_wrapper.AfterCall(); 3172 call_wrapper.AfterCall();
3190 } else { 3173 } else {
3191 ASSERT(flag == JUMP_FUNCTION); 3174 ASSERT(flag == JUMP_FUNCTION);
3192 SetCallKind(rcx, call_kind); 3175 SetCallKind(rcx, call_kind);
3193 jmp(code); 3176 jmp(code);
3194 } 3177 }
3195 bind(&done); 3178 bind(&done);
3196 } 3179 }
3197 } 3180 }
3198 3181
3199 3182
3200 void MacroAssembler::InvokeCode(Handle<Code> code, 3183 void MacroAssembler::InvokeCode(Handle<Code> code,
3201 const ParameterCount& expected, 3184 const ParameterCount& expected,
3202 const ParameterCount& actual, 3185 const ParameterCount& actual,
3203 RelocInfo::Mode rmode, 3186 RelocInfo::Mode rmode,
3204 InvokeFlag flag, 3187 InvokeFlag flag,
3205 const CallWrapper& call_wrapper, 3188 const CallWrapper& call_wrapper,
3206 CallKind call_kind) { 3189 CallKind call_kind) {
3207 // You can't call a function without a valid frame. 3190 // You can't call a function without a valid frame.
3208 ASSERT(flag == JUMP_FUNCTION || has_frame()); 3191 ASSERT(flag == JUMP_FUNCTION || has_frame());
3209 3192
3210 Label done; 3193 Label done;
3211 bool definitely_mismatches = false; 3194 bool definitely_mismatches = false;
3212 Register dummy = rax; 3195 Register dummy = rax;
3213 InvokePrologue(expected, 3196 InvokePrologue(expected,
3214 actual, 3197 actual,
3215 code, 3198 code,
3216 dummy, 3199 dummy,
3217 &done, 3200 &done,
3218 &definitely_mismatches, 3201 &definitely_mismatches,
3219 flag, 3202 flag,
3220 Label::kNear, 3203 Label::kNear,
3221 call_wrapper, 3204 call_wrapper,
3222 call_kind); 3205 call_kind);
3223 if (!definitely_mismatches) { 3206 if (!definitely_mismatches) {
3224 if (flag == CALL_FUNCTION) { 3207 if (flag == CALL_FUNCTION) {
3225 call_wrapper.BeforeCall(CallSize(code)); 3208 call_wrapper.BeforeCall(CallSize(code));
3226 SetCallKind(rcx, call_kind); 3209 SetCallKind(rcx, call_kind);
3227 Call(code, rmode); 3210 Call(code, rmode);
3228 call_wrapper.AfterCall(); 3211 call_wrapper.AfterCall();
3229 } else { 3212 } else {
3230 ASSERT(flag == JUMP_FUNCTION); 3213 ASSERT(flag == JUMP_FUNCTION);
3231 SetCallKind(rcx, call_kind); 3214 SetCallKind(rcx, call_kind);
3232 Jump(code, rmode); 3215 Jump(code, rmode);
3233 } 3216 }
3234 bind(&done); 3217 bind(&done);
3235 } 3218 }
3236 } 3219 }
3237 3220
3238 3221
3239 void MacroAssembler::InvokeFunction(Register function, 3222 void MacroAssembler::InvokeFunction(Register function,
3240 const ParameterCount& actual, 3223 const ParameterCount& actual,
3241 InvokeFlag flag, 3224 InvokeFlag flag,
3242 const CallWrapper& call_wrapper, 3225 const CallWrapper& call_wrapper,
3243 CallKind call_kind) { 3226 CallKind call_kind) {
3244 // You can't call a function without a valid frame. 3227 // You can't call a function without a valid frame.
3245 ASSERT(flag == JUMP_FUNCTION || has_frame()); 3228 ASSERT(flag == JUMP_FUNCTION || has_frame());
3246 3229
3247 ASSERT(function.is(rdi)); 3230 ASSERT(function.is(rdi));
3248 movq(rdx, FieldOperand(function, JSFunction::kSharedFunctionInfoOffset)); 3231 movq(rdx, FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
3249 movq(rsi, FieldOperand(function, JSFunction::kContextOffset)); 3232 movq(rsi, FieldOperand(function, JSFunction::kContextOffset));
3250 movsxlq(rbx, 3233 movsxlq(rbx,
3251 FieldOperand(rdx, SharedFunctionInfo::kFormalParameterCountOffset)); 3234 FieldOperand(rdx, SharedFunctionInfo::kFormalParameterCountOffset));
3252 // Advances rdx to the end of the Code object header, to the start of 3235 // Advances rdx to the end of the Code object header, to the start of
3253 // the executable code. 3236 // the executable code.
3254 movq(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset)); 3237 movq(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
3255 3238
3256 ParameterCount expected(rbx); 3239 ParameterCount expected(rbx);
3257 InvokeCode(rdx, expected, actual, flag, call_wrapper, call_kind); 3240 InvokeCode(rdx, expected, actual, flag, call_wrapper, call_kind);
3258 } 3241 }
3259 3242
3260 3243
3261 void MacroAssembler::InvokeFunction(Handle<JSFunction> function, 3244 void MacroAssembler::InvokeFunction(Handle<JSFunction> function,
3262 const ParameterCount& actual, 3245 const ParameterCount& actual,
3263 InvokeFlag flag, 3246 InvokeFlag flag,
3264 const CallWrapper& call_wrapper, 3247 const CallWrapper& call_wrapper,
3265 CallKind call_kind) { 3248 CallKind call_kind) {
3266 // You can't call a function without a valid frame. 3249 // You can't call a function without a valid frame.
3267 ASSERT(flag == JUMP_FUNCTION || has_frame()); 3250 ASSERT(flag == JUMP_FUNCTION || has_frame());
3268 3251
3269 // Get the function and setup the context. 3252 // Get the function and setup the context.
3270 LoadHeapObject(rdi, function); 3253 LoadHeapObject(rdi, function);
3271 movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset)); 3254 movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
3272 3255
3273 // We call indirectly through the code field in the function to 3256 // We call indirectly through the code field in the function to
3274 // allow recompilation to take effect without changing any of the 3257 // allow recompilation to take effect without changing any of the
3275 // call sites. 3258 // call sites.
3276 movq(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset)); 3259 movq(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
3277 ParameterCount expected(function->shared()->formal_parameter_count()); 3260 ParameterCount expected(function->shared()->formal_parameter_count());
3278 InvokeCode(rdx, expected, actual, flag, call_wrapper, call_kind); 3261 InvokeCode(rdx, expected, actual, flag, call_wrapper, call_kind);
3279 } 3262 }
3280 3263
3281 3264
3282 void MacroAssembler::InvokePrologue(const ParameterCount& expected, 3265 void MacroAssembler::InvokePrologue(const ParameterCount& expected,
3283 const ParameterCount& actual, 3266 const ParameterCount& actual,
3284 Handle<Code> code_constant, 3267 Handle<Code> code_constant,
3285 Register code_register, 3268 Register code_register,
3286 Label* done, 3269 Label* done,
3287 bool* definitely_mismatches, 3270 bool* definitely_mismatches,
3288 InvokeFlag flag, 3271 InvokeFlag flag,
3289 Label::Distance near_jump, 3272 Label::Distance near_jump,
3290 const CallWrapper& call_wrapper, 3273 const CallWrapper& call_wrapper,
3291 CallKind call_kind) { 3274 CallKind call_kind) {
3292 bool definitely_matches = false; 3275 bool definitely_matches = false;
3293 *definitely_mismatches = false; 3276 *definitely_mismatches = false;
3294 Label invoke; 3277 Label invoke;
3295 if (expected.is_immediate()) { 3278 if (expected.is_immediate()) {
3296 ASSERT(actual.is_immediate()); 3279 ASSERT(actual.is_immediate());
3297 if (expected.immediate() == actual.immediate()) { 3280 if (expected.immediate() == actual.immediate()) {
3298 definitely_matches = true; 3281 definitely_matches = true;
3299 } else { 3282 } else {
3300 Set(rax, actual.immediate()); 3283 Set(rax, actual.immediate());
3301 if (expected.immediate() == 3284 if (expected.immediate() ==
3302 SharedFunctionInfo::kDontAdaptArgumentsSentinel) { 3285 SharedFunctionInfo::kDontAdaptArgumentsSentinel) {
3303 // Don't worry about adapting arguments for built-ins that 3286 // Don't worry about adapting arguments for built-ins that
3304 // don't want that done. Skip adaption code by making it look 3287 // don't want that done. Skip adaption code by making it look
3305 // like we have a match between expected and actual number of 3288 // like we have a match between expected and actual number of
3306 // arguments. 3289 // arguments.
3307 definitely_matches = true; 3290 definitely_matches = true;
3308 } else { 3291 } else {
3309 *definitely_mismatches = true; 3292 *definitely_mismatches = true;
3310 Set(rbx, expected.immediate()); 3293 Set(rbx, expected.immediate());
3311 } 3294 }
3312 } 3295 }
3313 } else { 3296 } else {
3314 if (actual.is_immediate()) { 3297 if (actual.is_immediate()) {
3315 // Expected is in register, actual is immediate. This is the 3298 // Expected is in register, actual is immediate. This is the
3316 // case when we invoke function values without going through the 3299 // case when we invoke function values without going through the
3317 // IC mechanism. 3300 // IC mechanism.
3318 cmpq(expected.reg(), Immediate(actual.immediate())); 3301 cmpq(expected.reg(), Immediate(actual.immediate()));
3319 j(equal, &invoke, Label::kNear); 3302 j(equal, &invoke, Label::kNear);
3320 ASSERT(expected.reg().is(rbx)); 3303 ASSERT(expected.reg().is(rbx));
3321 Set(rax, actual.immediate()); 3304 Set(rax, actual.immediate());
3322 } else if (!expected.reg().is(actual.reg())) { 3305 } else if (!expected.reg().is(actual.reg())) {
3323 // Both expected and actual are in (different) registers. This 3306 // Both expected and actual are in (different) registers. This
3324 // is the case when we invoke functions using call and apply. 3307 // is the case when we invoke functions using call and apply.
3325 cmpq(expected.reg(), actual.reg()); 3308 cmpq(expected.reg(), actual.reg());
3326 j(equal, &invoke, Label::kNear); 3309 j(equal, &invoke, Label::kNear);
3327 ASSERT(actual.reg().is(rax)); 3310 ASSERT(actual.reg().is(rax));
3328 ASSERT(expected.reg().is(rbx)); 3311 ASSERT(expected.reg().is(rbx));
3329 } 3312 }
3330 } 3313 }
3331 3314
3332 if (!definitely_matches) { 3315 if (!definitely_matches) {
3333 Handle<Code> adaptor = isolate()->builtins()->ArgumentsAdaptorTrampoline(); 3316 Handle<Code> adaptor = isolate()->builtins()->ArgumentsAdaptorTrampoline();
3334 if (!code_constant.is_null()) { 3317 if (!code_constant.is_null()) {
3335 movq(rdx, code_constant, RelocInfo::EMBEDDED_OBJECT); 3318 movq(rdx, code_constant, RelocInfo::EMBEDDED_OBJECT);
3336 addq(rdx, Immediate(Code::kHeaderSize - kHeapObjectTag)); 3319 addq(rdx, Immediate(Code::kHeaderSize - kHeapObjectTag));
3337 } else if (!code_register.is(rdx)) { 3320 } else if (!code_register.is(rdx)) {
3338 movq(rdx, code_register); 3321 movq(rdx, code_register);
3339 } 3322 }
3340 3323
3341 if (flag == CALL_FUNCTION) { 3324 if (flag == CALL_FUNCTION) {
3342 call_wrapper.BeforeCall(CallSize(adaptor)); 3325 call_wrapper.BeforeCall(CallSize(adaptor));
3343 SetCallKind(rcx, call_kind); 3326 SetCallKind(rcx, call_kind);
3344 Call(adaptor, RelocInfo::CODE_TARGET); 3327 Call(adaptor, RelocInfo::CODE_TARGET);
3345 call_wrapper.AfterCall(); 3328 call_wrapper.AfterCall();
3346 if (!*definitely_mismatches) { 3329 if (!*definitely_mismatches) {
3347 jmp(done, near_jump); 3330 jmp(done, near_jump);
3348 } 3331 }
3349 } else { 3332 } else {
3350 SetCallKind(rcx, call_kind); 3333 SetCallKind(rcx, call_kind);
3351 Jump(adaptor, RelocInfo::CODE_TARGET); 3334 Jump(adaptor, RelocInfo::CODE_TARGET);
3352 } 3335 }
3353 bind(&invoke); 3336 bind(&invoke);
3354 } 3337 }
3355 } 3338 }
3356 3339
3357 3340
3358 void MacroAssembler::EnterFrame(StackFrame::Type type) { 3341 void MacroAssembler::EnterFrame(StackFrame::Type type) {
3359 push(rbp); 3342 push(rbp);
3360 movq(rbp, rsp); 3343 movq(rbp, rsp);
3361 push(rsi); // Context. 3344 push(rsi); // Context.
3362 Push(Smi::FromInt(type)); 3345 Push(Smi::FromInt(type));
3363 movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT); 3346 movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT);
3364 push(kScratchRegister); 3347 push(kScratchRegister);
3365 if (emit_debug_code()) { 3348 if (emit_debug_code()) {
3366 movq(kScratchRegister, 3349 movq(kScratchRegister,
3367 isolate()->factory()->undefined_value(), 3350 isolate()->factory()->undefined_value(),
3368 RelocInfo::EMBEDDED_OBJECT); 3351 RelocInfo::EMBEDDED_OBJECT);
3369 cmpq(Operand(rsp, 0), kScratchRegister); 3352 cmpq(Operand(rsp, 0), kScratchRegister);
3370 Check(not_equal, "code object not properly patched"); 3353 Check(not_equal, "code object not properly patched");
3371 } 3354 }
3372 } 3355 }
3373 3356
3374 3357
3375 void MacroAssembler::LeaveFrame(StackFrame::Type type) { 3358 void MacroAssembler::LeaveFrame(StackFrame::Type type) {
3376 if (emit_debug_code()) { 3359 if (emit_debug_code()) {
3377 Move(kScratchRegister, Smi::FromInt(type)); 3360 Move(kScratchRegister, Smi::FromInt(type));
3378 cmpq(Operand(rbp, StandardFrameConstants::kMarkerOffset), kScratchRegister); 3361 cmpq(Operand(rbp, StandardFrameConstants::kMarkerOffset), kScratchRegister);
3379 Check(equal, "stack frame types must match"); 3362 Check(equal, "stack frame types must match");
3380 } 3363 }
3381 movq(rsp, rbp); 3364 movq(rsp, rbp);
3382 pop(rbp); 3365 pop(rbp);
3383 } 3366 }
3384 3367
3385 3368
3386 void MacroAssembler::EnterExitFramePrologue(bool save_rax) { 3369 void MacroAssembler::EnterExitFramePrologue(bool save_rax) {
3387 // Set up the frame structure on the stack. 3370 // Set up the frame structure on the stack.
3388 // All constants are relative to the frame pointer of the exit frame. 3371 // All constants are relative to the frame pointer of the exit frame.
3389 ASSERT(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize); 3372 ASSERT(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize);
3390 ASSERT(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize); 3373 ASSERT(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize);
3391 ASSERT(ExitFrameConstants::kCallerFPOffset == 0 * kPointerSize); 3374 ASSERT(ExitFrameConstants::kCallerFPOffset == 0 * kPointerSize);
3392 push(rbp); 3375 push(rbp);
3393 movq(rbp, rsp); 3376 movq(rbp, rsp);
3394 3377
3395 // Reserve room for entry stack pointer and push the code object. 3378 // Reserve room for entry stack pointer and push the code object.
3396 ASSERT(ExitFrameConstants::kSPOffset == -1 * kPointerSize); 3379 ASSERT(ExitFrameConstants::kSPOffset == -1 * kPointerSize);
3397 push(Immediate(0)); // Saved entry sp, patched before call. 3380 push(Immediate(0)); // Saved entry sp, patched before call.
3398 movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT); 3381 movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT);
3399 push(kScratchRegister); // Accessed from EditFrame::code_slot. 3382 push(kScratchRegister); // Accessed from EditFrame::code_slot.
3400 3383
3401 // Save the frame pointer and the context in top. 3384 // Save the frame pointer and the context in top.
3402 if (save_rax) { 3385 if (save_rax) {
3403 movq(r14, rax); // Backup rax in callee-save register. 3386 movq(r14, rax); // Backup rax in callee-save register.
3404 } 3387 }
3405 3388
3406 Store(ExternalReference(Isolate::kCEntryFPAddress, isolate()), rbp); 3389 Store(ExternalReference(Isolate::kCEntryFPAddress, isolate()), rbp);
3407 Store(ExternalReference(Isolate::kContextAddress, isolate()), rsi); 3390 Store(ExternalReference(Isolate::kContextAddress, isolate()), rsi);
3408 } 3391 }
3409 3392
3410 3393
3411 void MacroAssembler::EnterExitFrameEpilogue(int arg_stack_space, 3394 void MacroAssembler::EnterExitFrameEpilogue(int arg_stack_space,
3412 bool save_doubles) { 3395 bool save_doubles) {
3413 #ifdef _WIN64 3396 #ifdef _WIN64
3414 const int kShadowSpace = 4; 3397 const int kShadowSpace = 4;
3415 arg_stack_space += kShadowSpace; 3398 arg_stack_space += kShadowSpace;
3416 #endif 3399 #endif
3417 // Optionally save all XMM registers. 3400 // Optionally save all XMM registers.
3418 if (save_doubles) { 3401 if (save_doubles) {
3419 int space = XMMRegister::kNumRegisters * kDoubleSize + 3402 int space = XMMRegister::kNumRegisters * kDoubleSize +
3420 arg_stack_space * kPointerSize; 3403 arg_stack_space * kPointerSize;
3421 subq(rsp, Immediate(space)); 3404 subq(rsp, Immediate(space));
3422 int offset = -2 * kPointerSize; 3405 int offset = -2 * kPointerSize;
3423 for (int i = 0; i < XMMRegister::kNumAllocatableRegisters; i++) { 3406 for (int i = 0; i < XMMRegister::kNumAllocatableRegisters; i++) {
3424 XMMRegister reg = XMMRegister::FromAllocationIndex(i); 3407 XMMRegister reg = XMMRegister::FromAllocationIndex(i);
3425 movsd(Operand(rbp, offset - ((i + 1) * kDoubleSize)), reg); 3408 movsd(Operand(rbp, offset - ((i + 1) * kDoubleSize)), reg);
3426 } 3409 }
3427 } else if (arg_stack_space > 0) { 3410 } else if (arg_stack_space > 0) {
3428 subq(rsp, Immediate(arg_stack_space * kPointerSize)); 3411 subq(rsp, Immediate(arg_stack_space * kPointerSize));
3429 } 3412 }
3430 3413
3431 // Get the required frame alignment for the OS. 3414 // Get the required frame alignment for the OS.
3432 const int kFrameAlignment = OS::ActivationFrameAlignment(); 3415 const int kFrameAlignment = OS::ActivationFrameAlignment();
3433 if (kFrameAlignment > 0) { 3416 if (kFrameAlignment > 0) {
3434 ASSERT(IsPowerOf2(kFrameAlignment)); 3417 ASSERT(IsPowerOf2(kFrameAlignment));
3435 ASSERT(is_int8(kFrameAlignment)); 3418 ASSERT(is_int8(kFrameAlignment));
3436 and_(rsp, Immediate(-kFrameAlignment)); 3419 and_(rsp, Immediate(-kFrameAlignment));
3437 } 3420 }
3438 3421
3439 // Patch the saved entry sp. 3422 // Patch the saved entry sp.
3440 movq(Operand(rbp, ExitFrameConstants::kSPOffset), rsp); 3423 movq(Operand(rbp, ExitFrameConstants::kSPOffset), rsp);
3441 } 3424 }
3442 3425
3443 3426
3444 void MacroAssembler::EnterExitFrame(int arg_stack_space, bool save_doubles) { 3427 void MacroAssembler::EnterExitFrame(int arg_stack_space, bool save_doubles) {
3445 EnterExitFramePrologue(true); 3428 EnterExitFramePrologue(true);
3446 3429
3447 // Set up argv in callee-saved register r15. It is reused in LeaveExitFrame, 3430 // Set up argv in callee-saved register r15. It is reused in LeaveExitFrame,
3448 // so it must be retained across the C-call. 3431 // so it must be retained across the C-call.
3449 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize; 3432 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
3450 lea(r15, Operand(rbp, r14, times_pointer_size, offset)); 3433 lea(r15, Operand(rbp, r14, times_pointer_size, offset));
3451 3434
3452 EnterExitFrameEpilogue(arg_stack_space, save_doubles); 3435 EnterExitFrameEpilogue(arg_stack_space, save_doubles);
3453 } 3436 }
3454 3437
3455 3438
3456 void MacroAssembler::EnterApiExitFrame(int arg_stack_space) { 3439 void MacroAssembler::EnterApiExitFrame(int arg_stack_space) {
3457 EnterExitFramePrologue(false); 3440 EnterExitFramePrologue(false);
3458 EnterExitFrameEpilogue(arg_stack_space, false); 3441 EnterExitFrameEpilogue(arg_stack_space, false);
3459 } 3442 }
3460 3443
3461 3444
3462 void MacroAssembler::LeaveExitFrame(bool save_doubles) { 3445 void MacroAssembler::LeaveExitFrame(bool save_doubles) {
3463 // Registers: 3446 // Registers:
3464 // r15 : argv 3447 // r15 : argv
3465 if (save_doubles) { 3448 if (save_doubles) {
3466 int offset = -2 * kPointerSize; 3449 int offset = -2 * kPointerSize;
3467 for (int i = 0; i < XMMRegister::kNumAllocatableRegisters; i++) { 3450 for (int i = 0; i < XMMRegister::kNumAllocatableRegisters; i++) {
3468 XMMRegister reg = XMMRegister::FromAllocationIndex(i); 3451 XMMRegister reg = XMMRegister::FromAllocationIndex(i);
3469 movsd(reg, Operand(rbp, offset - ((i + 1) * kDoubleSize))); 3452 movsd(reg, Operand(rbp, offset - ((i + 1) * kDoubleSize)));
3470 } 3453 }
3471 } 3454 }
3472 // Get the return address from the stack and restore the frame pointer. 3455 // Get the return address from the stack and restore the frame pointer.
3473 movq(rcx, Operand(rbp, 1 * kPointerSize)); 3456 movq(rcx, Operand(rbp, 1 * kPointerSize));
3474 movq(rbp, Operand(rbp, 0 * kPointerSize)); 3457 movq(rbp, Operand(rbp, 0 * kPointerSize));
3475 3458
3476 // Drop everything up to and including the arguments and the receiver 3459 // Drop everything up to and including the arguments and the receiver
3477 // from the caller stack. 3460 // from the caller stack.
3478 lea(rsp, Operand(r15, 1 * kPointerSize)); 3461 lea(rsp, Operand(r15, 1 * kPointerSize));
3479 3462
3480 // Push the return address to get ready to return. 3463 // Push the return address to get ready to return.
3481 push(rcx); 3464 push(rcx);
3482 3465
3483 LeaveExitFrameEpilogue(); 3466 LeaveExitFrameEpilogue();
3484 } 3467 }
3485 3468
3486 3469
3487 void MacroAssembler::LeaveApiExitFrame() { 3470 void MacroAssembler::LeaveApiExitFrame() {
3488 movq(rsp, rbp); 3471 movq(rsp, rbp);
3489 pop(rbp); 3472 pop(rbp);
3490 3473
3491 LeaveExitFrameEpilogue(); 3474 LeaveExitFrameEpilogue();
3492 } 3475 }
3493 3476
3494 3477
3495 void MacroAssembler::LeaveExitFrameEpilogue() { 3478 void MacroAssembler::LeaveExitFrameEpilogue() {
3496 // Restore current context from top and clear it in debug mode. 3479 // Restore current context from top and clear it in debug mode.
3497 ExternalReference context_address(Isolate::kContextAddress, isolate()); 3480 ExternalReference context_address(Isolate::kContextAddress, isolate());
3498 Operand context_operand = ExternalOperand(context_address); 3481 Operand context_operand = ExternalOperand(context_address);
3499 movq(rsi, context_operand); 3482 movq(rsi, context_operand);
3500 #ifdef DEBUG 3483 #ifdef DEBUG
3501 movq(context_operand, Immediate(0)); 3484 movq(context_operand, Immediate(0));
3502 #endif 3485 #endif
3503 3486
3504 // Clear the top frame. 3487 // Clear the top frame.
3505 ExternalReference c_entry_fp_address(Isolate::kCEntryFPAddress, 3488 ExternalReference c_entry_fp_address(Isolate::kCEntryFPAddress,
3506 isolate()); 3489 isolate());
3507 Operand c_entry_fp_operand = ExternalOperand(c_entry_fp_address); 3490 Operand c_entry_fp_operand = ExternalOperand(c_entry_fp_address);
3508 movq(c_entry_fp_operand, Immediate(0)); 3491 movq(c_entry_fp_operand, Immediate(0));
3509 } 3492 }
3510 3493
3511 3494
3512 void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg, 3495 void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
3513 Register scratch, 3496 Register scratch,
3514 Label* miss) { 3497 Label* miss) {
3515 Label same_contexts; 3498 Label same_contexts;
3516 3499
3517 ASSERT(!holder_reg.is(scratch)); 3500 ASSERT(!holder_reg.is(scratch));
3518 ASSERT(!scratch.is(kScratchRegister)); 3501 ASSERT(!scratch.is(kScratchRegister));
3519 // Load current lexical context from the stack frame. 3502 // Load current lexical context from the stack frame.
3520 movq(scratch, Operand(rbp, StandardFrameConstants::kContextOffset)); 3503 movq(scratch, Operand(rbp, StandardFrameConstants::kContextOffset));
3521 3504
3522 // When generating debug code, make sure the lexical context is set. 3505 // When generating debug code, make sure the lexical context is set.
3523 if (emit_debug_code()) { 3506 if (emit_debug_code()) {
3524 cmpq(scratch, Immediate(0)); 3507 cmpq(scratch, Immediate(0));
3525 Check(not_equal, "we should not have an empty lexical context"); 3508 Check(not_equal, "we should not have an empty lexical context");
3526 } 3509 }
3527 // Load the native context of the current context. 3510 // Load the native context of the current context.
3528 int offset = 3511 int offset =
3529 Context::kHeaderSize + Context::GLOBAL_OBJECT_INDEX * kPointerSize; 3512 Context::kHeaderSize + Context::GLOBAL_OBJECT_INDEX * kPointerSize;
3530 movq(scratch, FieldOperand(scratch, offset)); 3513 movq(scratch, FieldOperand(scratch, offset));
3531 movq(scratch, FieldOperand(scratch, GlobalObject::kNativeContextOffset)); 3514 movq(scratch, FieldOperand(scratch, GlobalObject::kNativeContextOffset));
3532 3515
3533 // Check the context is a native context. 3516 // Check the context is a native context.
3534 if (emit_debug_code()) { 3517 if (emit_debug_code()) {
3535 Cmp(FieldOperand(scratch, HeapObject::kMapOffset), 3518 Cmp(FieldOperand(scratch, HeapObject::kMapOffset),
3536 isolate()->factory()->native_context_map()); 3519 isolate()->factory()->native_context_map());
3537 Check(equal, "JSGlobalObject::native_context should be a native context."); 3520 Check(equal, "JSGlobalObject::native_context should be a native context.");
3538 } 3521 }
3539 3522
3540 // Check if both contexts are the same. 3523 // Check if both contexts are the same.
3541 cmpq(scratch, FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset)); 3524 cmpq(scratch, FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
3542 j(equal, &same_contexts); 3525 j(equal, &same_contexts);
3543 3526
3544 // Compare security tokens. 3527 // Compare security tokens.
3545 // Check that the security token in the calling global object is 3528 // Check that the security token in the calling global object is
3546 // compatible with the security token in the receiving global 3529 // compatible with the security token in the receiving global
3547 // object. 3530 // object.
3548 3531
3549 // Check the context is a native context. 3532 // Check the context is a native context.
3550 if (emit_debug_code()) { 3533 if (emit_debug_code()) {
3551 // Preserve original value of holder_reg. 3534 // Preserve original value of holder_reg.
3552 push(holder_reg); 3535 push(holder_reg);
3553 movq(holder_reg, 3536 movq(holder_reg,
3554 FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset)); 3537 FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
3555 CompareRoot(holder_reg, Heap::kNullValueRootIndex); 3538 CompareRoot(holder_reg, Heap::kNullValueRootIndex);
3556 Check(not_equal, "JSGlobalProxy::context() should not be null."); 3539 Check(not_equal, "JSGlobalProxy::context() should not be null.");
3557 3540
3558 // Read the first word and compare to native_context_map(), 3541 // Read the first word and compare to native_context_map(),
3559 movq(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset)); 3542 movq(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset));
3560 CompareRoot(holder_reg, Heap::kNativeContextMapRootIndex); 3543 CompareRoot(holder_reg, Heap::kNativeContextMapRootIndex);
3561 Check(equal, "JSGlobalObject::native_context should be a native context."); 3544 Check(equal, "JSGlobalObject::native_context should be a native context.");
3562 pop(holder_reg); 3545 pop(holder_reg);
3563 } 3546 }
3564 3547
3565 movq(kScratchRegister, 3548 movq(kScratchRegister,
3566 FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset)); 3549 FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
3567 int token_offset = 3550 int token_offset =
3568 Context::kHeaderSize + Context::SECURITY_TOKEN_INDEX * kPointerSize; 3551 Context::kHeaderSize + Context::SECURITY_TOKEN_INDEX * kPointerSize;
3569 movq(scratch, FieldOperand(scratch, token_offset)); 3552 movq(scratch, FieldOperand(scratch, token_offset));
3570 cmpq(scratch, FieldOperand(kScratchRegister, token_offset)); 3553 cmpq(scratch, FieldOperand(kScratchRegister, token_offset));
3571 j(not_equal, miss); 3554 j(not_equal, miss);
3572 3555
3573 bind(&same_contexts); 3556 bind(&same_contexts);
3574 } 3557 }
3575 3558
3576 3559
3577 void MacroAssembler::GetNumberHash(Register r0, Register scratch) { 3560 void MacroAssembler::GetNumberHash(Register r0, Register scratch) {
3578 // First of all we assign the hash seed to scratch. 3561 // First of all we assign the hash seed to scratch.
3579 LoadRoot(scratch, Heap::kHashSeedRootIndex); 3562 LoadRoot(scratch, Heap::kHashSeedRootIndex);
3580 SmiToInteger32(scratch, scratch); 3563 SmiToInteger32(scratch, scratch);
3581 3564
3582 // Xor original key with a seed. 3565 // Xor original key with a seed.
3583 xorl(r0, scratch); 3566 xorl(r0, scratch);
3584 3567
3585 // Compute the hash code from the untagged key. This must be kept in sync 3568 // Compute the hash code from the untagged key. This must be kept in sync
3586 // with ComputeIntegerHash in utils.h. 3569 // with ComputeIntegerHash in utils.h.
3587 // 3570 //
3588 // hash = ~hash + (hash << 15); 3571 // hash = ~hash + (hash << 15);
3589 movl(scratch, r0); 3572 movl(scratch, r0);
3590 notl(r0); 3573 notl(r0);
3591 shll(scratch, Immediate(15)); 3574 shll(scratch, Immediate(15));
3592 addl(r0, scratch); 3575 addl(r0, scratch);
3593 // hash = hash ^ (hash >> 12); 3576 // hash = hash ^ (hash >> 12);
3594 movl(scratch, r0); 3577 movl(scratch, r0);
3595 shrl(scratch, Immediate(12)); 3578 shrl(scratch, Immediate(12));
3596 xorl(r0, scratch); 3579 xorl(r0, scratch);
3597 // hash = hash + (hash << 2); 3580 // hash = hash + (hash << 2);
3598 leal(r0, Operand(r0, r0, times_4, 0)); 3581 leal(r0, Operand(r0, r0, times_4, 0));
3599 // hash = hash ^ (hash >> 4); 3582 // hash = hash ^ (hash >> 4);
3600 movl(scratch, r0); 3583 movl(scratch, r0);
3601 shrl(scratch, Immediate(4)); 3584 shrl(scratch, Immediate(4));
3602 xorl(r0, scratch); 3585 xorl(r0, scratch);
3603 // hash = hash * 2057; 3586 // hash = hash * 2057;
3604 imull(r0, r0, Immediate(2057)); 3587 imull(r0, r0, Immediate(2057));
3605 // hash = hash ^ (hash >> 16); 3588 // hash = hash ^ (hash >> 16);
3606 movl(scratch, r0); 3589 movl(scratch, r0);
3607 shrl(scratch, Immediate(16)); 3590 shrl(scratch, Immediate(16));
3608 xorl(r0, scratch); 3591 xorl(r0, scratch);
3609 } 3592 }
3610 3593
3611 3594
3612 3595
3613 void MacroAssembler::LoadFromNumberDictionary(Label* miss, 3596 void MacroAssembler::LoadFromNumberDictionary(Label* miss,
3614 Register elements, 3597 Register elements,
3615 Register key, 3598 Register key,
3616 Register r0, 3599 Register r0,
3617 Register r1, 3600 Register r1,
3618 Register r2, 3601 Register r2,
3619 Register result) { 3602 Register result) {
3620 // Register use: 3603 // Register use:
3621 // 3604 //
3622 // elements - holds the slow-case elements of the receiver on entry. 3605 // elements - holds the slow-case elements of the receiver on entry.
3623 // Unchanged unless 'result' is the same register. 3606 // Unchanged unless 'result' is the same register.
3624 // 3607 //
3625 // key - holds the smi key on entry. 3608 // key - holds the smi key on entry.
3626 // Unchanged unless 'result' is the same register. 3609 // Unchanged unless 'result' is the same register.
3627 // 3610 //
3628 // Scratch registers: 3611 // Scratch registers:
3629 // 3612 //
3630 // r0 - holds the untagged key on entry and holds the hash once computed. 3613 // r0 - holds the untagged key on entry and holds the hash once computed.
3631 // 3614 //
3632 // r1 - used to hold the capacity mask of the dictionary 3615 // r1 - used to hold the capacity mask of the dictionary
3633 // 3616 //
3634 // r2 - used for the index into the dictionary. 3617 // r2 - used for the index into the dictionary.
3635 // 3618 //
3636 // result - holds the result on exit if the load succeeded. 3619 // result - holds the result on exit if the load succeeded.
3637 // Allowed to be the same as 'key' or 'result'. 3620 // Allowed to be the same as 'key' or 'result'.
3638 // Unchanged on bailout so 'key' or 'result' can be used 3621 // Unchanged on bailout so 'key' or 'result' can be used
3639 // in further computation. 3622 // in further computation.
3640 3623
3641 Label done; 3624 Label done;
3642 3625
3643 GetNumberHash(r0, r1); 3626 GetNumberHash(r0, r1);
3644 3627
3645 // Compute capacity mask. 3628 // Compute capacity mask.
3646 SmiToInteger32(r1, FieldOperand(elements, 3629 SmiToInteger32(r1, FieldOperand(elements,
3647 SeededNumberDictionary::kCapacityOffset)); 3630 SeededNumberDictionary::kCapacityOffset));
3648 decl(r1); 3631 decl(r1);
3649 3632
3650 // Generate an unrolled loop that performs a few probes before giving up. 3633 // Generate an unrolled loop that performs a few probes before giving up.
3651 const int kProbes = 4; 3634 const int kProbes = 4;
3652 for (int i = 0; i < kProbes; i++) { 3635 for (int i = 0; i < kProbes; i++) {
3653 // Use r2 for index calculations and keep the hash intact in r0. 3636 // Use r2 for index calculations and keep the hash intact in r0.
3654 movq(r2, r0); 3637 movq(r2, r0);
3655 // Compute the masked index: (hash + i + i * i) & mask. 3638 // Compute the masked index: (hash + i + i * i) & mask.
3656 if (i > 0) { 3639 if (i > 0) {
3657 addl(r2, Immediate(SeededNumberDictionary::GetProbeOffset(i))); 3640 addl(r2, Immediate(SeededNumberDictionary::GetProbeOffset(i)));
3658 } 3641 }
3659 and_(r2, r1); 3642 and_(r2, r1);
3660 3643
3661 // Scale the index by multiplying by the entry size. 3644 // Scale the index by multiplying by the entry size.
3662 ASSERT(SeededNumberDictionary::kEntrySize == 3); 3645 ASSERT(SeededNumberDictionary::kEntrySize == 3);
3663 lea(r2, Operand(r2, r2, times_2, 0)); // r2 = r2 * 3 3646 lea(r2, Operand(r2, r2, times_2, 0)); // r2 = r2 * 3
3664 3647
3665 // Check if the key matches. 3648 // Check if the key matches.
3666 cmpq(key, FieldOperand(elements, 3649 cmpq(key, FieldOperand(elements,
3667 r2, 3650 r2,
3668 times_pointer_size, 3651 times_pointer_size,
3669 SeededNumberDictionary::kElementsStartOffset)); 3652 SeededNumberDictionary::kElementsStartOffset));
3670 if (i != (kProbes - 1)) { 3653 if (i != (kProbes - 1)) {
3671 j(equal, &done); 3654 j(equal, &done);
3672 } else { 3655 } else {
3673 j(not_equal, miss); 3656 j(not_equal, miss);
3674 } 3657 }
3675 } 3658 }
3676 3659
3677 bind(&done); 3660 bind(&done);
3678 // Check that the value is a normal propety. 3661 // Check that the value is a normal propety.
3679 const int kDetailsOffset = 3662 const int kDetailsOffset =
3680 SeededNumberDictionary::kElementsStartOffset + 2 * kPointerSize; 3663 SeededNumberDictionary::kElementsStartOffset + 2 * kPointerSize;
3681 ASSERT_EQ(NORMAL, 0); 3664 ASSERT_EQ(NORMAL, 0);
3682 Test(FieldOperand(elements, r2, times_pointer_size, kDetailsOffset), 3665 Test(FieldOperand(elements, r2, times_pointer_size, kDetailsOffset),
3683 Smi::FromInt(PropertyDetails::TypeField::kMask)); 3666 Smi::FromInt(PropertyDetails::TypeField::kMask));
3684 j(not_zero, miss); 3667 j(not_zero, miss);
3685 3668
3686 // Get the value at the masked, scaled index. 3669 // Get the value at the masked, scaled index.
3687 const int kValueOffset = 3670 const int kValueOffset =
3688 SeededNumberDictionary::kElementsStartOffset + kPointerSize; 3671 SeededNumberDictionary::kElementsStartOffset + kPointerSize;
3689 movq(result, FieldOperand(elements, r2, times_pointer_size, kValueOffset)); 3672 movq(result, FieldOperand(elements, r2, times_pointer_size, kValueOffset));
3690 } 3673 }
3691 3674
3692 3675
3693 void MacroAssembler::LoadAllocationTopHelper(Register result, 3676 void MacroAssembler::LoadAllocationTopHelper(Register result,
3694 Register scratch, 3677 Register scratch,
3695 AllocationFlags flags) { 3678 AllocationFlags flags) {
3696 ExternalReference new_space_allocation_top = 3679 ExternalReference new_space_allocation_top =
3697 ExternalReference::new_space_allocation_top_address(isolate()); 3680 ExternalReference::new_space_allocation_top_address(isolate());
3698 3681
3699 // Just return if allocation top is already known. 3682 // Just return if allocation top is already known.
3700 if ((flags & RESULT_CONTAINS_TOP) != 0) { 3683 if ((flags & RESULT_CONTAINS_TOP) != 0) {
3701 // No use of scratch if allocation top is provided. 3684 // No use of scratch if allocation top is provided.
3702 ASSERT(!scratch.is_valid()); 3685 ASSERT(!scratch.is_valid());
3703 #ifdef DEBUG 3686 #ifdef DEBUG
3704 // Assert that result actually contains top on entry. 3687 // Assert that result actually contains top on entry.
3705 Operand top_operand = ExternalOperand(new_space_allocation_top); 3688 Operand top_operand = ExternalOperand(new_space_allocation_top);
3706 cmpq(result, top_operand); 3689 cmpq(result, top_operand);
3707 Check(equal, "Unexpected allocation top"); 3690 Check(equal, "Unexpected allocation top");
3708 #endif 3691 #endif
3709 return; 3692 return;
3710 } 3693 }
3711 3694
3712 // Move address of new object to result. Use scratch register if available, 3695 // Move address of new object to result. Use scratch register if available,
3713 // and keep address in scratch until call to UpdateAllocationTopHelper. 3696 // and keep address in scratch until call to UpdateAllocationTopHelper.
3714 if (scratch.is_valid()) { 3697 if (scratch.is_valid()) {
3715 LoadAddress(scratch, new_space_allocation_top); 3698 LoadAddress(scratch, new_space_allocation_top);
3716 movq(result, Operand(scratch, 0)); 3699 movq(result, Operand(scratch, 0));
3717 } else { 3700 } else {
3718 Load(result, new_space_allocation_top); 3701 Load(result, new_space_allocation_top);
3719 } 3702 }
3720 } 3703 }
3721 3704
3722 3705
3723 void MacroAssembler::UpdateAllocationTopHelper(Register result_end, 3706 void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
3724 Register scratch) { 3707 Register scratch) {
3725 if (emit_debug_code()) { 3708 if (emit_debug_code()) {
3726 testq(result_end, Immediate(kObjectAlignmentMask)); 3709 testq(result_end, Immediate(kObjectAlignmentMask));
3727 Check(zero, "Unaligned allocation in new space"); 3710 Check(zero, "Unaligned allocation in new space");
3728 } 3711 }
3729 3712
3730 ExternalReference new_space_allocation_top = 3713 ExternalReference new_space_allocation_top =
3731 ExternalReference::new_space_allocation_top_address(isolate()); 3714 ExternalReference::new_space_allocation_top_address(isolate());
3732 3715
3733 // Update new top. 3716 // Update new top.
3734 if (scratch.is_valid()) { 3717 if (scratch.is_valid()) {
3735 // Scratch already contains address of allocation top. 3718 // Scratch already contains address of allocation top.
3736 movq(Operand(scratch, 0), result_end); 3719 movq(Operand(scratch, 0), result_end);
3737 } else { 3720 } else {
3738 Store(new_space_allocation_top, result_end); 3721 Store(new_space_allocation_top, result_end);
3739 } 3722 }
3740 } 3723 }
3741 3724
3742 3725
3743 void MacroAssembler::AllocateInNewSpace(int object_size, 3726 void MacroAssembler::AllocateInNewSpace(int object_size,
3744 Register result, 3727 Register result,
3745 Register result_end, 3728 Register result_end,
3746 Register scratch, 3729 Register scratch,
3747 Label* gc_required, 3730 Label* gc_required,
3748 AllocationFlags flags) { 3731 AllocationFlags flags) {
3749 if (!FLAG_inline_new) { 3732 if (!FLAG_inline_new) {
3750 if (emit_debug_code()) { 3733 if (emit_debug_code()) {
3751 // Trash the registers to simulate an allocation failure. 3734 // Trash the registers to simulate an allocation failure.
3752 movl(result, Immediate(0x7091)); 3735 movl(result, Immediate(0x7091));
3753 if (result_end.is_valid()) { 3736 if (result_end.is_valid()) {
3754 movl(result_end, Immediate(0x7191)); 3737 movl(result_end, Immediate(0x7191));
3755 } 3738 }
3756 if (scratch.is_valid()) { 3739 if (scratch.is_valid()) {
3757 movl(scratch, Immediate(0x7291)); 3740 movl(scratch, Immediate(0x7291));
3758 } 3741 }
3759 } 3742 }
3760 jmp(gc_required); 3743 jmp(gc_required);
3761 return; 3744 return;
3762 } 3745 }
3763 ASSERT(!result.is(result_end)); 3746 ASSERT(!result.is(result_end));
3764 3747
3765 // Load address of new object into result. 3748 // Load address of new object into result.
3766 LoadAllocationTopHelper(result, scratch, flags); 3749 LoadAllocationTopHelper(result, scratch, flags);
3767 3750
3768 // Calculate new top and bail out if new space is exhausted. 3751 // Calculate new top and bail out if new space is exhausted.
3769 ExternalReference new_space_allocation_limit = 3752 ExternalReference new_space_allocation_limit =
3770 ExternalReference::new_space_allocation_limit_address(isolate()); 3753 ExternalReference::new_space_allocation_limit_address(isolate());
3771 3754
3772 Register top_reg = result_end.is_valid() ? result_end : result; 3755 Register top_reg = result_end.is_valid() ? result_end : result;
3773 3756
3774 if (!top_reg.is(result)) { 3757 if (!top_reg.is(result)) {
3775 movq(top_reg, result); 3758 movq(top_reg, result);
3776 } 3759 }
3777 addq(top_reg, Immediate(object_size)); 3760 addq(top_reg, Immediate(object_size));
3778 j(carry, gc_required); 3761 j(carry, gc_required);
3779 Operand limit_operand = ExternalOperand(new_space_allocation_limit); 3762 Operand limit_operand = ExternalOperand(new_space_allocation_limit);
3780 cmpq(top_reg, limit_operand); 3763 cmpq(top_reg, limit_operand);
3781 j(above, gc_required); 3764 j(above, gc_required);
3782 3765
3783 // Update allocation top. 3766 // Update allocation top.
3784 UpdateAllocationTopHelper(top_reg, scratch); 3767 UpdateAllocationTopHelper(top_reg, scratch);
3785 3768
3786 if (top_reg.is(result)) { 3769 if (top_reg.is(result)) {
3787 if ((flags & TAG_OBJECT) != 0) { 3770 if ((flags & TAG_OBJECT) != 0) {
3788 subq(result, Immediate(object_size - kHeapObjectTag)); 3771 subq(result, Immediate(object_size - kHeapObjectTag));
3789 } else { 3772 } else {
3790 subq(result, Immediate(object_size)); 3773 subq(result, Immediate(object_size));
3791 } 3774 }
3792 } else if ((flags & TAG_OBJECT) != 0) { 3775 } else if ((flags & TAG_OBJECT) != 0) {
3793 // Tag the result if requested. 3776 // Tag the result if requested.
3794 addq(result, Immediate(kHeapObjectTag)); 3777 addq(result, Immediate(kHeapObjectTag));
3795 } 3778 }
3796 } 3779 }
3797 3780
3798 3781
3799 void MacroAssembler::AllocateInNewSpace(int header_size, 3782 void MacroAssembler::AllocateInNewSpace(int header_size,
3800 ScaleFactor element_size, 3783 ScaleFactor element_size,
3801 Register element_count, 3784 Register element_count,
3802 Register result, 3785 Register result,
3803 Register result_end, 3786 Register result_end,
3804 Register scratch, 3787 Register scratch,
3805 Label* gc_required, 3788 Label* gc_required,
3806 AllocationFlags flags) { 3789 AllocationFlags flags) {
3807 if (!FLAG_inline_new) { 3790 if (!FLAG_inline_new) {
3808 if (emit_debug_code()) { 3791 if (emit_debug_code()) {
3809 // Trash the registers to simulate an allocation failure. 3792 // Trash the registers to simulate an allocation failure.
3810 movl(result, Immediate(0x7091)); 3793 movl(result, Immediate(0x7091));
3811 movl(result_end, Immediate(0x7191)); 3794 movl(result_end, Immediate(0x7191));
3812 if (scratch.is_valid()) { 3795 if (scratch.is_valid()) {
3813 movl(scratch, Immediate(0x7291)); 3796 movl(scratch, Immediate(0x7291));
3814 } 3797 }
3815 // Register element_count is not modified by the function. 3798 // Register element_count is not modified by the function.
3816 } 3799 }
3817 jmp(gc_required); 3800 jmp(gc_required);
3818 return; 3801 return;
3819 } 3802 }
3820 ASSERT(!result.is(result_end)); 3803 ASSERT(!result.is(result_end));
3821 3804
3822 // Load address of new object into result. 3805 // Load address of new object into result.
3823 LoadAllocationTopHelper(result, scratch, flags); 3806 LoadAllocationTopHelper(result, scratch, flags);
3824 3807
3825 // Calculate new top and bail out if new space is exhausted. 3808 // Calculate new top and bail out if new space is exhausted.
3826 ExternalReference new_space_allocation_limit = 3809 ExternalReference new_space_allocation_limit =
3827 ExternalReference::new_space_allocation_limit_address(isolate()); 3810 ExternalReference::new_space_allocation_limit_address(isolate());
3828 3811
3829 // We assume that element_count*element_size + header_size does not 3812 // We assume that element_count*element_size + header_size does not
3830 // overflow. 3813 // overflow.
3831 lea(result_end, Operand(element_count, element_size, header_size)); 3814 lea(result_end, Operand(element_count, element_size, header_size));
3832 addq(result_end, result); 3815 addq(result_end, result);
3833 j(carry, gc_required); 3816 j(carry, gc_required);
3834 Operand limit_operand = ExternalOperand(new_space_allocation_limit); 3817 Operand limit_operand = ExternalOperand(new_space_allocation_limit);
3835 cmpq(result_end, limit_operand); 3818 cmpq(result_end, limit_operand);
3836 j(above, gc_required); 3819 j(above, gc_required);
3837 3820
3838 // Update allocation top. 3821 // Update allocation top.
3839 UpdateAllocationTopHelper(result_end, scratch); 3822 UpdateAllocationTopHelper(result_end, scratch);
3840 3823
3841 // Tag the result if requested. 3824 // Tag the result if requested.
3842 if ((flags & TAG_OBJECT) != 0) { 3825 if ((flags & TAG_OBJECT) != 0) {
3843 addq(result, Immediate(kHeapObjectTag)); 3826 addq(result, Immediate(kHeapObjectTag));
3844 } 3827 }
3845 } 3828 }
3846 3829
3847 3830
3848 void MacroAssembler::AllocateInNewSpace(Register object_size, 3831 void MacroAssembler::AllocateInNewSpace(Register object_size,
3849 Register result, 3832 Register result,
3850 Register result_end, 3833 Register result_end,
3851 Register scratch, 3834 Register scratch,
3852 Label* gc_required, 3835 Label* gc_required,
3853 AllocationFlags flags) { 3836 AllocationFlags flags) {
3854 if (!FLAG_inline_new) { 3837 if (!FLAG_inline_new) {
3855 if (emit_debug_code()) { 3838 if (emit_debug_code()) {
3856 // Trash the registers to simulate an allocation failure. 3839 // Trash the registers to simulate an allocation failure.
3857 movl(result, Immediate(0x7091)); 3840 movl(result, Immediate(0x7091));
3858 movl(result_end, Immediate(0x7191)); 3841 movl(result_end, Immediate(0x7191));
3859 if (scratch.is_valid()) { 3842 if (scratch.is_valid()) {
3860 movl(scratch, Immediate(0x7291)); 3843 movl(scratch, Immediate(0x7291));
3861 } 3844 }
3862 // object_size is left unchanged by this function. 3845 // object_size is left unchanged by this function.
3863 } 3846 }
3864 jmp(gc_required); 3847 jmp(gc_required);
3865 return; 3848 return;
3866 } 3849 }
3867 ASSERT(!result.is(result_end)); 3850 ASSERT(!result.is(result_end));
3868 3851
3869 // Load address of new object into result. 3852 // Load address of new object into result.
3870 LoadAllocationTopHelper(result, scratch, flags); 3853 LoadAllocationTopHelper(result, scratch, flags);
3871 3854
3872 // Calculate new top and bail out if new space is exhausted. 3855 // Calculate new top and bail out if new space is exhausted.
3873 ExternalReference new_space_allocation_limit = 3856 ExternalReference new_space_allocation_limit =
3874 ExternalReference::new_space_allocation_limit_address(isolate()); 3857 ExternalReference::new_space_allocation_limit_address(isolate());
3875 if (!object_size.is(result_end)) { 3858 if (!object_size.is(result_end)) {
3876 movq(result_end, object_size); 3859 movq(result_end, object_size);
3877 } 3860 }
3878 addq(result_end, result); 3861 addq(result_end, result);
3879 j(carry, gc_required); 3862 j(carry, gc_required);
3880 Operand limit_operand = ExternalOperand(new_space_allocation_limit); 3863 Operand limit_operand = ExternalOperand(new_space_allocation_limit);
3881 cmpq(result_end, limit_operand); 3864 cmpq(result_end, limit_operand);
3882 j(above, gc_required); 3865 j(above, gc_required);
3883 3866
3884 // Update allocation top. 3867 // Update allocation top.
3885 UpdateAllocationTopHelper(result_end, scratch); 3868 UpdateAllocationTopHelper(result_end, scratch);
3886 3869
3887 // Tag the result if requested. 3870 // Tag the result if requested.
3888 if ((flags & TAG_OBJECT) != 0) { 3871 if ((flags & TAG_OBJECT) != 0) {
3889 addq(result, Immediate(kHeapObjectTag)); 3872 addq(result, Immediate(kHeapObjectTag));
3890 } 3873 }
3891 } 3874 }
3892 3875
3893 3876
3894 void MacroAssembler::UndoAllocationInNewSpace(Register object) { 3877 void MacroAssembler::UndoAllocationInNewSpace(Register object) {
3895 ExternalReference new_space_allocation_top = 3878 ExternalReference new_space_allocation_top =
3896 ExternalReference::new_space_allocation_top_address(isolate()); 3879 ExternalReference::new_space_allocation_top_address(isolate());
3897 3880
3898 // Make sure the object has no tag before resetting top. 3881 // Make sure the object has no tag before resetting top.
3899 and_(object, Immediate(~kHeapObjectTagMask)); 3882 and_(object, Immediate(~kHeapObjectTagMask));
3900 Operand top_operand = ExternalOperand(new_space_allocation_top); 3883 Operand top_operand = ExternalOperand(new_space_allocation_top);
3901 #ifdef DEBUG 3884 #ifdef DEBUG
3902 cmpq(object, top_operand); 3885 cmpq(object, top_operand);
3903 Check(below, "Undo allocation of non allocated memory"); 3886 Check(below, "Undo allocation of non allocated memory");
3904 #endif 3887 #endif
3905 movq(top_operand, object); 3888 movq(top_operand, object);
3906 } 3889 }
3907 3890
3908 3891
3909 void MacroAssembler::AllocateHeapNumber(Register result, 3892 void MacroAssembler::AllocateHeapNumber(Register result,
3910 Register scratch, 3893 Register scratch,
3911 Label* gc_required) { 3894 Label* gc_required) {
3912 // Allocate heap number in new space. 3895 // Allocate heap number in new space.
3913 AllocateInNewSpace(HeapNumber::kSize, 3896 AllocateInNewSpace(HeapNumber::kSize,
3914 result, 3897 result,
3915 scratch, 3898 scratch,
3916 no_reg, 3899 no_reg,
3917 gc_required, 3900 gc_required,
3918 TAG_OBJECT); 3901 TAG_OBJECT);
3919 3902
3920 // Set the map. 3903 // Set the map.
3921 LoadRoot(kScratchRegister, Heap::kHeapNumberMapRootIndex); 3904 LoadRoot(kScratchRegister, Heap::kHeapNumberMapRootIndex);
3922 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister); 3905 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
3923 } 3906 }
3924 3907
3925 3908
3926 void MacroAssembler::AllocateTwoByteString(Register result, 3909 void MacroAssembler::AllocateTwoByteString(Register result,
3927 Register length, 3910 Register length,
3928 Register scratch1, 3911 Register scratch1,
3929 Register scratch2, 3912 Register scratch2,
3930 Register scratch3, 3913 Register scratch3,
3931 Label* gc_required) { 3914 Label* gc_required) {
3932 // Calculate the number of bytes needed for the characters in the string while 3915 // Calculate the number of bytes needed for the characters in the string while
3933 // observing object alignment. 3916 // observing object alignment.
3934 const int kHeaderAlignment = SeqTwoByteString::kHeaderSize & 3917 const int kHeaderAlignment = SeqTwoByteString::kHeaderSize &
3935 kObjectAlignmentMask; 3918 kObjectAlignmentMask;
3936 ASSERT(kShortSize == 2); 3919 ASSERT(kShortSize == 2);
3937 // scratch1 = length * 2 + kObjectAlignmentMask. 3920 // scratch1 = length * 2 + kObjectAlignmentMask.
3938 lea(scratch1, Operand(length, length, times_1, kObjectAlignmentMask + 3921 lea(scratch1, Operand(length, length, times_1, kObjectAlignmentMask +
3939 kHeaderAlignment)); 3922 kHeaderAlignment));
3940 and_(scratch1, Immediate(~kObjectAlignmentMask)); 3923 and_(scratch1, Immediate(~kObjectAlignmentMask));
3941 if (kHeaderAlignment > 0) { 3924 if (kHeaderAlignment > 0) {
3942 subq(scratch1, Immediate(kHeaderAlignment)); 3925 subq(scratch1, Immediate(kHeaderAlignment));
3943 } 3926 }
3944 3927
3945 // Allocate two byte string in new space. 3928 // Allocate two byte string in new space.
3946 AllocateInNewSpace(SeqTwoByteString::kHeaderSize, 3929 AllocateInNewSpace(SeqTwoByteString::kHeaderSize,
3947 times_1, 3930 times_1,
3948 scratch1, 3931 scratch1,
3949 result, 3932 result,
3950 scratch2, 3933 scratch2,
3951 scratch3, 3934 scratch3,
3952 gc_required, 3935 gc_required,
3953 TAG_OBJECT); 3936 TAG_OBJECT);
3954 3937
3955 // Set the map, length and hash field. 3938 // Set the map, length and hash field.
3956 LoadRoot(kScratchRegister, Heap::kStringMapRootIndex); 3939 LoadRoot(kScratchRegister, Heap::kStringMapRootIndex);
3957 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister); 3940 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
3958 Integer32ToSmi(scratch1, length); 3941 Integer32ToSmi(scratch1, length);
3959 movq(FieldOperand(result, String::kLengthOffset), scratch1); 3942 movq(FieldOperand(result, String::kLengthOffset), scratch1);
3960 movq(FieldOperand(result, String::kHashFieldOffset), 3943 movq(FieldOperand(result, String::kHashFieldOffset),
3961 Immediate(String::kEmptyHashField)); 3944 Immediate(String::kEmptyHashField));
3962 } 3945 }
3963 3946
3964 3947
3965 void MacroAssembler::AllocateAsciiString(Register result, 3948 void MacroAssembler::AllocateAsciiString(Register result,
3966 Register length, 3949 Register length,
3967 Register scratch1, 3950 Register scratch1,
3968 Register scratch2, 3951 Register scratch2,
3969 Register scratch3, 3952 Register scratch3,
3970 Label* gc_required) { 3953 Label* gc_required) {
3971 // Calculate the number of bytes needed for the characters in the string while 3954 // Calculate the number of bytes needed for the characters in the string while
3972 // observing object alignment. 3955 // observing object alignment.
3973 const int kHeaderAlignment = SeqAsciiString::kHeaderSize & 3956 const int kHeaderAlignment = SeqAsciiString::kHeaderSize &
3974 kObjectAlignmentMask; 3957 kObjectAlignmentMask;
3975 movl(scratch1, length); 3958 movl(scratch1, length);
3976 ASSERT(kCharSize == 1); 3959 ASSERT(kCharSize == 1);
3977 addq(scratch1, Immediate(kObjectAlignmentMask + kHeaderAlignment)); 3960 addq(scratch1, Immediate(kObjectAlignmentMask + kHeaderAlignment));
3978 and_(scratch1, Immediate(~kObjectAlignmentMask)); 3961 and_(scratch1, Immediate(~kObjectAlignmentMask));
3979 if (kHeaderAlignment > 0) { 3962 if (kHeaderAlignment > 0) {
3980 subq(scratch1, Immediate(kHeaderAlignment)); 3963 subq(scratch1, Immediate(kHeaderAlignment));
3981 } 3964 }
3982 3965
3983 // Allocate ASCII string in new space. 3966 // Allocate ASCII string in new space.
3984 AllocateInNewSpace(SeqAsciiString::kHeaderSize, 3967 AllocateInNewSpace(SeqAsciiString::kHeaderSize,
3985 times_1, 3968 times_1,
3986 scratch1, 3969 scratch1,
3987 result, 3970 result,
3988 scratch2, 3971 scratch2,
3989 scratch3, 3972 scratch3,
3990 gc_required, 3973 gc_required,
3991 TAG_OBJECT); 3974 TAG_OBJECT);
3992 3975
3993 // Set the map, length and hash field. 3976 // Set the map, length and hash field.
3994 LoadRoot(kScratchRegister, Heap::kAsciiStringMapRootIndex); 3977 LoadRoot(kScratchRegister, Heap::kAsciiStringMapRootIndex);
3995 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister); 3978 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
3996 Integer32ToSmi(scratch1, length); 3979 Integer32ToSmi(scratch1, length);
3997 movq(FieldOperand(result, String::kLengthOffset), scratch1); 3980 movq(FieldOperand(result, String::kLengthOffset), scratch1);
3998 movq(FieldOperand(result, String::kHashFieldOffset), 3981 movq(FieldOperand(result, String::kHashFieldOffset),
3999 Immediate(String::kEmptyHashField)); 3982 Immediate(String::kEmptyHashField));
4000 } 3983 }
4001 3984
4002 3985
4003 void MacroAssembler::AllocateTwoByteConsString(Register result, 3986 void MacroAssembler::AllocateTwoByteConsString(Register result,
4004 Register scratch1, 3987 Register scratch1,
4005 Register scratch2, 3988 Register scratch2,
4006 Label* gc_required) { 3989 Label* gc_required) {
4007 // Allocate heap number in new space. 3990 // Allocate heap number in new space.
4008 AllocateInNewSpace(ConsString::kSize, 3991 AllocateInNewSpace(ConsString::kSize,
4009 result, 3992 result,
4010 scratch1, 3993 scratch1,
4011 scratch2, 3994 scratch2,
4012 gc_required, 3995 gc_required,
4013 TAG_OBJECT); 3996 TAG_OBJECT);
4014 3997
4015 // Set the map. The other fields are left uninitialized. 3998 // Set the map. The other fields are left uninitialized.
4016 LoadRoot(kScratchRegister, Heap::kConsStringMapRootIndex); 3999 LoadRoot(kScratchRegister, Heap::kConsStringMapRootIndex);
4017 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister); 4000 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
4018 } 4001 }
4019 4002
4020 4003
4021 void MacroAssembler::AllocateAsciiConsString(Register result, 4004 void MacroAssembler::AllocateAsciiConsString(Register result,
4022 Register scratch1, 4005 Register scratch1,
4023 Register scratch2, 4006 Register scratch2,
4024 Label* gc_required) { 4007 Label* gc_required) {
4025 // Allocate heap number in new space. 4008 // Allocate heap number in new space.
4026 AllocateInNewSpace(ConsString::kSize, 4009 AllocateInNewSpace(ConsString::kSize,
4027 result, 4010 result,
4028 scratch1, 4011 scratch1,
4029 scratch2, 4012 scratch2,
4030 gc_required, 4013 gc_required,
4031 TAG_OBJECT); 4014 TAG_OBJECT);
4032 4015
4033 // Set the map. The other fields are left uninitialized. 4016 // Set the map. The other fields are left uninitialized.
4034 LoadRoot(kScratchRegister, Heap::kConsAsciiStringMapRootIndex); 4017 LoadRoot(kScratchRegister, Heap::kConsAsciiStringMapRootIndex);
4035 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister); 4018 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
4036 } 4019 }
4037 4020
4038 4021
4039 void MacroAssembler::AllocateTwoByteSlicedString(Register result, 4022 void MacroAssembler::AllocateTwoByteSlicedString(Register result,
4040 Register scratch1, 4023 Register scratch1,
4041 Register scratch2, 4024 Register scratch2,
4042 Label* gc_required) { 4025 Label* gc_required) {
4043 // Allocate heap number in new space. 4026 // Allocate heap number in new space.
4044 AllocateInNewSpace(SlicedString::kSize, 4027 AllocateInNewSpace(SlicedString::kSize,
4045 result, 4028 result,
4046 scratch1, 4029 scratch1,
4047 scratch2, 4030 scratch2,
4048 gc_required, 4031 gc_required,
4049 TAG_OBJECT); 4032 TAG_OBJECT);
4050 4033
4051 // Set the map. The other fields are left uninitialized. 4034 // Set the map. The other fields are left uninitialized.
4052 LoadRoot(kScratchRegister, Heap::kSlicedStringMapRootIndex); 4035 LoadRoot(kScratchRegister, Heap::kSlicedStringMapRootIndex);
4053 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister); 4036 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
4054 } 4037 }
4055 4038
4056 4039
4057 void MacroAssembler::AllocateAsciiSlicedString(Register result, 4040 void MacroAssembler::AllocateAsciiSlicedString(Register result,
4058 Register scratch1, 4041 Register scratch1,
4059 Register scratch2, 4042 Register scratch2,
4060 Label* gc_required) { 4043 Label* gc_required) {
4061 // Allocate heap number in new space. 4044 // Allocate heap number in new space.
4062 AllocateInNewSpace(SlicedString::kSize, 4045 AllocateInNewSpace(SlicedString::kSize,
4063 result, 4046 result,
4064 scratch1, 4047 scratch1,
4065 scratch2, 4048 scratch2,
4066 gc_required, 4049 gc_required,
4067 TAG_OBJECT); 4050 TAG_OBJECT);
4068 4051
4069 // Set the map. The other fields are left uninitialized. 4052 // Set the map. The other fields are left uninitialized.
4070 LoadRoot(kScratchRegister, Heap::kSlicedAsciiStringMapRootIndex); 4053 LoadRoot(kScratchRegister, Heap::kSlicedAsciiStringMapRootIndex);
4071 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister); 4054 movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
4072 } 4055 }
4073 4056
4074 4057
4075 // Copy memory, byte-by-byte, from source to destination. Not optimized for 4058 // Copy memory, byte-by-byte, from source to destination. Not optimized for
4076 // long or aligned copies. The contents of scratch and length are destroyed. 4059 // long or aligned copies. The contents of scratch and length are destroyed.
4077 // Destination is incremented by length, source, length and scratch are 4060 // Destination is incremented by length, source, length and scratch are
4078 // clobbered. 4061 // clobbered.
4079 // A simpler loop is faster on small copies, but slower on large ones. 4062 // A simpler loop is faster on small copies, but slower on large ones.
4080 // The cld() instruction must have been emitted, to set the direction flag(), 4063 // The cld() instruction must have been emitted, to set the direction flag(),
4081 // before calling this function. 4064 // before calling this function.
4082 void MacroAssembler::CopyBytes(Register destination, 4065 void MacroAssembler::CopyBytes(Register destination,
4083 Register source, 4066 Register source,
4084 Register length, 4067 Register length,
4085 int min_length, 4068 int min_length,
4086 Register scratch) { 4069 Register scratch) {
4087 ASSERT(min_length >= 0); 4070 ASSERT(min_length >= 0);
4088 if (emit_debug_code()) { 4071 if (emit_debug_code()) {
4089 cmpl(length, Immediate(min_length)); 4072 cmpl(length, Immediate(min_length));
4090 Assert(greater_equal, "Invalid min_length"); 4073 Assert(greater_equal, "Invalid min_length");
4091 } 4074 }
4092 Label loop, done, short_string, short_loop; 4075 Label loop, done, short_string, short_loop;
4093 4076
4094 const int kLongStringLimit = 20; 4077 const int kLongStringLimit = 20;
4095 if (min_length <= kLongStringLimit) { 4078 if (min_length <= kLongStringLimit) {
4096 cmpl(length, Immediate(kLongStringLimit)); 4079 cmpl(length, Immediate(kLongStringLimit));
4097 j(less_equal, &short_string); 4080 j(less_equal, &short_string);
4098 } 4081 }
4099 4082
4100 ASSERT(source.is(rsi)); 4083 ASSERT(source.is(rsi));
4101 ASSERT(destination.is(rdi)); 4084 ASSERT(destination.is(rdi));
4102 ASSERT(length.is(rcx)); 4085 ASSERT(length.is(rcx));
4103 4086
4104 // Because source is 8-byte aligned in our uses of this function, 4087 // Because source is 8-byte aligned in our uses of this function,
4105 // we keep source aligned for the rep movs operation by copying the odd bytes 4088 // we keep source aligned for the rep movs operation by copying the odd bytes
4106 // at the end of the ranges. 4089 // at the end of the ranges.
4107 movq(scratch, length); 4090 movq(scratch, length);
4108 shrl(length, Immediate(3)); 4091 shrl(length, Immediate(3));
4109 repmovsq(); 4092 repmovsq();
4110 // Move remaining bytes of length. 4093 // Move remaining bytes of length.
4111 andl(scratch, Immediate(0x7)); 4094 andl(scratch, Immediate(0x7));
4112 movq(length, Operand(source, scratch, times_1, -8)); 4095 movq(length, Operand(source, scratch, times_1, -8));
4113 movq(Operand(destination, scratch, times_1, -8), length); 4096 movq(Operand(destination, scratch, times_1, -8), length);
4114 addq(destination, scratch); 4097 addq(destination, scratch);
4115 4098
4116 if (min_length <= kLongStringLimit) { 4099 if (min_length <= kLongStringLimit) {
4117 jmp(&done); 4100 jmp(&done);
4118 4101
4119 bind(&short_string); 4102 bind(&short_string);
4120 if (min_length == 0) { 4103 if (min_length == 0) {
4121 testl(length, length); 4104 testl(length, length);
4122 j(zero, &done); 4105 j(zero, &done);
4123 } 4106 }
4124 lea(scratch, Operand(destination, length, times_1, 0)); 4107 lea(scratch, Operand(destination, length, times_1, 0));
4125 4108
4126 bind(&short_loop); 4109 bind(&short_loop);
4127 movb(length, Operand(source, 0)); 4110 movb(length, Operand(source, 0));
4128 movb(Operand(destination, 0), length); 4111 movb(Operand(destination, 0), length);
4129 incq(source); 4112 incq(source);
4130 incq(destination); 4113 incq(destination);
4131 cmpq(destination, scratch); 4114 cmpq(destination, scratch);
4132 j(not_equal, &short_loop); 4115 j(not_equal, &short_loop);
4133 4116
4134 bind(&done); 4117 bind(&done);
4135 } 4118 }
4136 } 4119 }
4137 4120
4138 4121
4139 void MacroAssembler::InitializeFieldsWithFiller(Register start_offset, 4122 void MacroAssembler::InitializeFieldsWithFiller(Register start_offset,
4140 Register end_offset, 4123 Register end_offset,
4141 Register filler) { 4124 Register filler) {
4142 Label loop, entry; 4125 Label loop, entry;
4143 jmp(&entry); 4126 jmp(&entry);
4144 bind(&loop); 4127 bind(&loop);
4145 movq(Operand(start_offset, 0), filler); 4128 movq(Operand(start_offset, 0), filler);
4146 addq(start_offset, Immediate(kPointerSize)); 4129 addq(start_offset, Immediate(kPointerSize));
4147 bind(&entry); 4130 bind(&entry);
4148 cmpq(start_offset, end_offset); 4131 cmpq(start_offset, end_offset);
4149 j(less, &loop); 4132 j(less, &loop);
4150 } 4133 }
4151 4134
4152 4135
4153 void MacroAssembler::LoadContext(Register dst, int context_chain_length) { 4136 void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
4154 if (context_chain_length > 0) { 4137 if (context_chain_length > 0) {
4155 // Move up the chain of contexts to the context containing the slot. 4138 // Move up the chain of contexts to the context containing the slot.
4156 movq(dst, Operand(rsi, Context::SlotOffset(Context::PREVIOUS_INDEX))); 4139 movq(dst, Operand(rsi, Context::SlotOffset(Context::PREVIOUS_INDEX)));
4157 for (int i = 1; i < context_chain_length; i++) { 4140 for (int i = 1; i < context_chain_length; i++) {
4158 movq(dst, Operand(dst, Context::SlotOffset(Context::PREVIOUS_INDEX))); 4141 movq(dst, Operand(dst, Context::SlotOffset(Context::PREVIOUS_INDEX)));
4159 } 4142 }
4160 } else { 4143 } else {
4161 // Slot is in the current function context. Move it into the 4144 // Slot is in the current function context. Move it into the
4162 // destination register in case we store into it (the write barrier 4145 // destination register in case we store into it (the write barrier
4163 // cannot be allowed to destroy the context in rsi). 4146 // cannot be allowed to destroy the context in rsi).
4164 movq(dst, rsi); 4147 movq(dst, rsi);
4165 } 4148 }
4166 4149
4167 // We should not have found a with context by walking the context 4150 // We should not have found a with context by walking the context
4168 // chain (i.e., the static scope chain and runtime context chain do 4151 // chain (i.e., the static scope chain and runtime context chain do
4169 // not agree). A variable occurring in such a scope should have 4152 // not agree). A variable occurring in such a scope should have
4170 // slot type LOOKUP and not CONTEXT. 4153 // slot type LOOKUP and not CONTEXT.
4171 if (emit_debug_code()) { 4154 if (emit_debug_code()) {
4172 CompareRoot(FieldOperand(dst, HeapObject::kMapOffset), 4155 CompareRoot(FieldOperand(dst, HeapObject::kMapOffset),
4173 Heap::kWithContextMapRootIndex); 4156 Heap::kWithContextMapRootIndex);
4174 Check(not_equal, "Variable resolved to with context."); 4157 Check(not_equal, "Variable resolved to with context.");
4175 } 4158 }
4176 } 4159 }
4177 4160
4178 4161
4179 void MacroAssembler::LoadTransitionedArrayMapConditional( 4162 void MacroAssembler::LoadTransitionedArrayMapConditional(
4180 ElementsKind expected_kind, 4163 ElementsKind expected_kind,
4181 ElementsKind transitioned_kind, 4164 ElementsKind transitioned_kind,
4182 Register map_in_out, 4165 Register map_in_out,
4183 Register scratch, 4166 Register scratch,
4184 Label* no_map_match) { 4167 Label* no_map_match) {
4185 // Load the global or builtins object from the current context. 4168 // Load the global or builtins object from the current context.
4186 movq(scratch, 4169 movq(scratch,
4187 Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); 4170 Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
4188 movq(scratch, FieldOperand(scratch, GlobalObject::kNativeContextOffset)); 4171 movq(scratch, FieldOperand(scratch, GlobalObject::kNativeContextOffset));
4189 4172
4190 // Check that the function's map is the same as the expected cached map. 4173 // Check that the function's map is the same as the expected cached map.
4191 movq(scratch, Operand(scratch, 4174 movq(scratch, Operand(scratch,
4192 Context::SlotOffset(Context::JS_ARRAY_MAPS_INDEX))); 4175 Context::SlotOffset(Context::JS_ARRAY_MAPS_INDEX)));
4193 4176
4194 int offset = expected_kind * kPointerSize + 4177 int offset = expected_kind * kPointerSize +
4195 FixedArrayBase::kHeaderSize; 4178 FixedArrayBase::kHeaderSize;
4196 cmpq(map_in_out, FieldOperand(scratch, offset)); 4179 cmpq(map_in_out, FieldOperand(scratch, offset));
4197 j(not_equal, no_map_match); 4180 j(not_equal, no_map_match);
4198 4181
4199 // Use the transitioned cached map. 4182 // Use the transitioned cached map.
4200 offset = transitioned_kind * kPointerSize + 4183 offset = transitioned_kind * kPointerSize +
4201 FixedArrayBase::kHeaderSize; 4184 FixedArrayBase::kHeaderSize;
4202 movq(map_in_out, FieldOperand(scratch, offset)); 4185 movq(map_in_out, FieldOperand(scratch, offset));
4203 } 4186 }
4204 4187
4205 4188
4206 void MacroAssembler::LoadInitialArrayMap( 4189 void MacroAssembler::LoadInitialArrayMap(
4207 Register function_in, Register scratch, 4190 Register function_in, Register scratch,
4208 Register map_out, bool can_have_holes) { 4191 Register map_out, bool can_have_holes) {
4209 ASSERT(!function_in.is(map_out)); 4192 ASSERT(!function_in.is(map_out));
4210 Label done; 4193 Label done;
4211 movq(map_out, FieldOperand(function_in, 4194 movq(map_out, FieldOperand(function_in,
4212 JSFunction::kPrototypeOrInitialMapOffset)); 4195 JSFunction::kPrototypeOrInitialMapOffset));
4213 if (!FLAG_smi_only_arrays) { 4196 if (!FLAG_smi_only_arrays) {
4214 ElementsKind kind = can_have_holes ? FAST_HOLEY_ELEMENTS : FAST_ELEMENTS; 4197 ElementsKind kind = can_have_holes ? FAST_HOLEY_ELEMENTS : FAST_ELEMENTS;
4215 LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS, 4198 LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
4216 kind, 4199 kind,
4217 map_out, 4200 map_out,
4218 scratch, 4201 scratch,
4219 &done); 4202 &done);
4220 } else if (can_have_holes) { 4203 } else if (can_have_holes) {
4221 LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS, 4204 LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
4222 FAST_HOLEY_SMI_ELEMENTS, 4205 FAST_HOLEY_SMI_ELEMENTS,
4223 map_out, 4206 map_out,
4224 scratch, 4207 scratch,
4225 &done); 4208 &done);
4226 } 4209 }
4227 bind(&done); 4210 bind(&done);
4228 } 4211 }
4229 4212
4230 #ifdef _WIN64 4213 #ifdef _WIN64
4231 static const int kRegisterPassedArguments = 4; 4214 static const int kRegisterPassedArguments = 4;
4232 #else 4215 #else
4233 static const int kRegisterPassedArguments = 6; 4216 static const int kRegisterPassedArguments = 6;
4234 #endif 4217 #endif
4235 4218
4236 void MacroAssembler::LoadGlobalFunction(int index, Register function) { 4219 void MacroAssembler::LoadGlobalFunction(int index, Register function) {
4237 // Load the global or builtins object from the current context. 4220 // Load the global or builtins object from the current context.
4238 movq(function, 4221 movq(function,
4239 Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); 4222 Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
4240 // Load the native context from the global or builtins object. 4223 // Load the native context from the global or builtins object.
4241 movq(function, FieldOperand(function, GlobalObject::kNativeContextOffset)); 4224 movq(function, FieldOperand(function, GlobalObject::kNativeContextOffset));
4242 // Load the function from the native context. 4225 // Load the function from the native context.
4243 movq(function, Operand(function, Context::SlotOffset(index))); 4226 movq(function, Operand(function, Context::SlotOffset(index)));
4244 } 4227 }
4245 4228
4246 4229
4247 void MacroAssembler::LoadGlobalFunctionInitialMap(Register function, 4230 void MacroAssembler::LoadGlobalFunctionInitialMap(Register function,
4248 Register map) { 4231 Register map) {
4249 // Load the initial map. The global functions all have initial maps. 4232 // Load the initial map. The global functions all have initial maps.
4250 movq(map, FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset)); 4233 movq(map, FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
4251 if (emit_debug_code()) { 4234 if (emit_debug_code()) {
4252 Label ok, fail; 4235 Label ok, fail;
4253 CheckMap(map, isolate()->factory()->meta_map(), &fail, DO_SMI_CHECK); 4236 CheckMap(map, isolate()->factory()->meta_map(), &fail, DO_SMI_CHECK);
4254 jmp(&ok); 4237 jmp(&ok);
4255 bind(&fail); 4238 bind(&fail);
4256 Abort("Global functions must have initial map"); 4239 Abort("Global functions must have initial map");
4257 bind(&ok); 4240 bind(&ok);
4258 } 4241 }
4259 } 4242 }
4260 4243
4261 4244
4262 int MacroAssembler::ArgumentStackSlotsForCFunctionCall(int num_arguments) { 4245 int MacroAssembler::ArgumentStackSlotsForCFunctionCall(int num_arguments) {
4263 // On Windows 64 stack slots are reserved by the caller for all arguments 4246 // On Windows 64 stack slots are reserved by the caller for all arguments
4264 // including the ones passed in registers, and space is always allocated for 4247 // including the ones passed in registers, and space is always allocated for
4265 // the four register arguments even if the function takes fewer than four 4248 // the four register arguments even if the function takes fewer than four
4266 // arguments. 4249 // arguments.
4267 // On AMD64 ABI (Linux/Mac) the first six arguments are passed in registers 4250 // On AMD64 ABI (Linux/Mac) the first six arguments are passed in registers
4268 // and the caller does not reserve stack slots for them. 4251 // and the caller does not reserve stack slots for them.
4269 ASSERT(num_arguments >= 0); 4252 ASSERT(num_arguments >= 0);
4270 #ifdef _WIN64 4253 #ifdef _WIN64
4271 const int kMinimumStackSlots = kRegisterPassedArguments; 4254 const int kMinimumStackSlots = kRegisterPassedArguments;
4272 if (num_arguments < kMinimumStackSlots) return kMinimumStackSlots; 4255 if (num_arguments < kMinimumStackSlots) return kMinimumStackSlots;
4273 return num_arguments; 4256 return num_arguments;
4274 #else 4257 #else
4275 if (num_arguments < kRegisterPassedArguments) return 0; 4258 if (num_arguments < kRegisterPassedArguments) return 0;
4276 return num_arguments - kRegisterPassedArguments; 4259 return num_arguments - kRegisterPassedArguments;
4277 #endif 4260 #endif
4278 } 4261 }
4279 4262
4280 4263
4281 void MacroAssembler::PrepareCallCFunction(int num_arguments) { 4264 void MacroAssembler::PrepareCallCFunction(int num_arguments) {
4282 int frame_alignment = OS::ActivationFrameAlignment(); 4265 int frame_alignment = OS::ActivationFrameAlignment();
4283 ASSERT(frame_alignment != 0); 4266 ASSERT(frame_alignment != 0);
4284 ASSERT(num_arguments >= 0); 4267 ASSERT(num_arguments >= 0);
4285 4268
4286 // Make stack end at alignment and allocate space for arguments and old rsp. 4269 // Make stack end at alignment and allocate space for arguments and old rsp.
4287 movq(kScratchRegister, rsp); 4270 movq(kScratchRegister, rsp);
4288 ASSERT(IsPowerOf2(frame_alignment)); 4271 ASSERT(IsPowerOf2(frame_alignment));
4289 int argument_slots_on_stack = 4272 int argument_slots_on_stack =
4290 ArgumentStackSlotsForCFunctionCall(num_arguments); 4273 ArgumentStackSlotsForCFunctionCall(num_arguments);
4291 subq(rsp, Immediate((argument_slots_on_stack + 1) * kPointerSize)); 4274 subq(rsp, Immediate((argument_slots_on_stack + 1) * kPointerSize));
4292 and_(rsp, Immediate(-frame_alignment)); 4275 and_(rsp, Immediate(-frame_alignment));
4293 movq(Operand(rsp, argument_slots_on_stack * kPointerSize), kScratchRegister); 4276 movq(Operand(rsp, argument_slots_on_stack * kPointerSize), kScratchRegister);
4294 } 4277 }
4295 4278
4296 4279
4297 void MacroAssembler::CallCFunction(ExternalReference function, 4280 void MacroAssembler::CallCFunction(ExternalReference function,
4298 int num_arguments) { 4281 int num_arguments) {
4299 LoadAddress(rax, function); 4282 LoadAddress(rax, function);
4300 CallCFunction(rax, num_arguments); 4283 CallCFunction(rax, num_arguments);
4301 } 4284 }
4302 4285
4303 4286
4304 void MacroAssembler::CallCFunction(Register function, int num_arguments) { 4287 void MacroAssembler::CallCFunction(Register function, int num_arguments) {
4305 ASSERT(has_frame()); 4288 ASSERT(has_frame());
4306 // Check stack alignment. 4289 // Check stack alignment.
4307 if (emit_debug_code()) { 4290 if (emit_debug_code()) {
4308 CheckStackAlignment(); 4291 CheckStackAlignment();
4309 } 4292 }
4310 4293
4311 call(function); 4294 call(function);
4312 ASSERT(OS::ActivationFrameAlignment() != 0); 4295 ASSERT(OS::ActivationFrameAlignment() != 0);
4313 ASSERT(num_arguments >= 0); 4296 ASSERT(num_arguments >= 0);
4314 int argument_slots_on_stack = 4297 int argument_slots_on_stack =
4315 ArgumentStackSlotsForCFunctionCall(num_arguments); 4298 ArgumentStackSlotsForCFunctionCall(num_arguments);
4316 movq(rsp, Operand(rsp, argument_slots_on_stack * kPointerSize)); 4299 movq(rsp, Operand(rsp, argument_slots_on_stack * kPointerSize));
4317 } 4300 }
4318 4301
4319 4302
4320 bool AreAliased(Register r1, Register r2, Register r3, Register r4) { 4303 bool AreAliased(Register r1, Register r2, Register r3, Register r4) {
4321 if (r1.is(r2)) return true; 4304 if (r1.is(r2)) return true;
4322 if (r1.is(r3)) return true; 4305 if (r1.is(r3)) return true;
4323 if (r1.is(r4)) return true; 4306 if (r1.is(r4)) return true;
4324 if (r2.is(r3)) return true; 4307 if (r2.is(r3)) return true;
4325 if (r2.is(r4)) return true; 4308 if (r2.is(r4)) return true;
4326 if (r3.is(r4)) return true; 4309 if (r3.is(r4)) return true;
4327 return false; 4310 return false;
4328 } 4311 }
4329 4312
4330 4313
4331 CodePatcher::CodePatcher(byte* address, int size) 4314 CodePatcher::CodePatcher(byte* address, int size)
4332 : address_(address), 4315 : address_(address),
4333 size_(size), 4316 size_(size),
4334 masm_(NULL, address, size + Assembler::kGap) { 4317 masm_(NULL, address, size + Assembler::kGap) {
4335 // Create a new macro assembler pointing to the address of the code to patch. 4318 // Create a new macro assembler pointing to the address of the code to patch.
4336 // The size is adjusted with kGap on order for the assembler to generate size 4319 // The size is adjusted with kGap on order for the assembler to generate size
4337 // bytes of instructions without failing with buffer size constraints. 4320 // bytes of instructions without failing with buffer size constraints.
4338 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); 4321 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
4339 } 4322 }
4340 4323
4341 4324
4342 CodePatcher::~CodePatcher() { 4325 CodePatcher::~CodePatcher() {
4343 // Indicate that code has changed. 4326 // Indicate that code has changed.
4344 CPU::FlushICache(address_, size_); 4327 CPU::FlushICache(address_, size_);
4345 4328
4346 // Check that the code was patched as expected. 4329 // Check that the code was patched as expected.
4347 ASSERT(masm_.pc_ == address_ + size_); 4330 ASSERT(masm_.pc_ == address_ + size_);
4348 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); 4331 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
4349 } 4332 }
4350 4333
4351 4334
4352 void MacroAssembler::CheckPageFlag( 4335 void MacroAssembler::CheckPageFlag(
4353 Register object, 4336 Register object,
4354 Register scratch, 4337 Register scratch,
4355 int mask, 4338 int mask,
4356 Condition cc, 4339 Condition cc,
4357 Label* condition_met, 4340 Label* condition_met,
4358 Label::Distance condition_met_distance) { 4341 Label::Distance condition_met_distance) {
4359 ASSERT(cc == zero || cc == not_zero); 4342 ASSERT(cc == zero || cc == not_zero);
4360 if (scratch.is(object)) { 4343 if (scratch.is(object)) {
4361 and_(scratch, Immediate(~Page::kPageAlignmentMask)); 4344 and_(scratch, Immediate(~Page::kPageAlignmentMask));
4362 } else { 4345 } else {
4363 movq(scratch, Immediate(~Page::kPageAlignmentMask)); 4346 movq(scratch, Immediate(~Page::kPageAlignmentMask));
4364 and_(scratch, object); 4347 and_(scratch, object);
4365 } 4348 }
4366 if (mask < (1 << kBitsPerByte)) { 4349 if (mask < (1 << kBitsPerByte)) {
4367 testb(Operand(scratch, MemoryChunk::kFlagsOffset), 4350 testb(Operand(scratch, MemoryChunk::kFlagsOffset),
4368 Immediate(static_cast<uint8_t>(mask))); 4351 Immediate(static_cast<uint8_t>(mask)));
4369 } else { 4352 } else {
4370 testl(Operand(scratch, MemoryChunk::kFlagsOffset), Immediate(mask)); 4353 testl(Operand(scratch, MemoryChunk::kFlagsOffset), Immediate(mask));
4371 } 4354 }
4372 j(cc, condition_met, condition_met_distance); 4355 j(cc, condition_met, condition_met_distance);
4373 } 4356 }
4374 4357
4375 4358
4376 void MacroAssembler::JumpIfBlack(Register object, 4359 void MacroAssembler::JumpIfBlack(Register object,
4377 Register bitmap_scratch, 4360 Register bitmap_scratch,
4378 Register mask_scratch, 4361 Register mask_scratch,
4379 Label* on_black, 4362 Label* on_black,
4380 Label::Distance on_black_distance) { 4363 Label::Distance on_black_distance) {
4381 ASSERT(!AreAliased(object, bitmap_scratch, mask_scratch, rcx)); 4364 ASSERT(!AreAliased(object, bitmap_scratch, mask_scratch, rcx));
4382 GetMarkBits(object, bitmap_scratch, mask_scratch); 4365 GetMarkBits(object, bitmap_scratch, mask_scratch);
4383 4366
4384 ASSERT(strcmp(Marking::kBlackBitPattern, "10") == 0); 4367 ASSERT(strcmp(Marking::kBlackBitPattern, "10") == 0);
4385 // The mask_scratch register contains a 1 at the position of the first bit 4368 // The mask_scratch register contains a 1 at the position of the first bit
4386 // and a 0 at all other positions, including the position of the second bit. 4369 // and a 0 at all other positions, including the position of the second bit.
4387 movq(rcx, mask_scratch); 4370 movq(rcx, mask_scratch);
4388 // Make rcx into a mask that covers both marking bits using the operation 4371 // Make rcx into a mask that covers both marking bits using the operation
4389 // rcx = mask | (mask << 1). 4372 // rcx = mask | (mask << 1).
4390 lea(rcx, Operand(mask_scratch, mask_scratch, times_2, 0)); 4373 lea(rcx, Operand(mask_scratch, mask_scratch, times_2, 0));
4391 // Note that we are using a 4-byte aligned 8-byte load. 4374 // Note that we are using a 4-byte aligned 8-byte load.
4392 and_(rcx, Operand(bitmap_scratch, MemoryChunk::kHeaderSize)); 4375 and_(rcx, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
4393 cmpq(mask_scratch, rcx); 4376 cmpq(mask_scratch, rcx);
4394 j(equal, on_black, on_black_distance); 4377 j(equal, on_black, on_black_distance);
4395 } 4378 }
4396 4379
4397 4380
4398 // Detect some, but not all, common pointer-free objects. This is used by the 4381 // Detect some, but not all, common pointer-free objects. This is used by the
4399 // incremental write barrier which doesn't care about oddballs (they are always 4382 // incremental write barrier which doesn't care about oddballs (they are always
4400 // marked black immediately so this code is not hit). 4383 // marked black immediately so this code is not hit).
4401 void MacroAssembler::JumpIfDataObject( 4384 void MacroAssembler::JumpIfDataObject(
4402 Register value, 4385 Register value,
4403 Register scratch, 4386 Register scratch,
4404 Label* not_data_object, 4387 Label* not_data_object,
4405 Label::Distance not_data_object_distance) { 4388 Label::Distance not_data_object_distance) {
4406 Label is_data_object; 4389 Label is_data_object;
4407 movq(scratch, FieldOperand(value, HeapObject::kMapOffset)); 4390 movq(scratch, FieldOperand(value, HeapObject::kMapOffset));
4408 CompareRoot(scratch, Heap::kHeapNumberMapRootIndex); 4391 CompareRoot(scratch, Heap::kHeapNumberMapRootIndex);
4409 j(equal, &is_data_object, Label::kNear); 4392 j(equal, &is_data_object, Label::kNear);
4410 ASSERT(kIsIndirectStringTag == 1 && kIsIndirectStringMask == 1); 4393 ASSERT(kIsIndirectStringTag == 1 && kIsIndirectStringMask == 1);
4411 ASSERT(kNotStringTag == 0x80 && kIsNotStringMask == 0x80); 4394 ASSERT(kNotStringTag == 0x80 && kIsNotStringMask == 0x80);
4412 // If it's a string and it's not a cons string then it's an object containing 4395 // If it's a string and it's not a cons string then it's an object containing
4413 // no GC pointers. 4396 // no GC pointers.
4414 testb(FieldOperand(scratch, Map::kInstanceTypeOffset), 4397 testb(FieldOperand(scratch, Map::kInstanceTypeOffset),
4415 Immediate(kIsIndirectStringMask | kIsNotStringMask)); 4398 Immediate(kIsIndirectStringMask | kIsNotStringMask));
4416 j(not_zero, not_data_object, not_data_object_distance); 4399 j(not_zero, not_data_object, not_data_object_distance);
4417 bind(&is_data_object); 4400 bind(&is_data_object);
4418 } 4401 }
4419 4402
4420 4403
4421 void MacroAssembler::GetMarkBits(Register addr_reg, 4404 void MacroAssembler::GetMarkBits(Register addr_reg,
4422 Register bitmap_reg, 4405 Register bitmap_reg,
4423 Register mask_reg) { 4406 Register mask_reg) {
4424 ASSERT(!AreAliased(addr_reg, bitmap_reg, mask_reg, rcx)); 4407 ASSERT(!AreAliased(addr_reg, bitmap_reg, mask_reg, rcx));
4425 movq(bitmap_reg, addr_reg); 4408 movq(bitmap_reg, addr_reg);
4426 // Sign extended 32 bit immediate. 4409 // Sign extended 32 bit immediate.
4427 and_(bitmap_reg, Immediate(~Page::kPageAlignmentMask)); 4410 and_(bitmap_reg, Immediate(~Page::kPageAlignmentMask));
4428 movq(rcx, addr_reg); 4411 movq(rcx, addr_reg);
4429 int shift = 4412 int shift =
4430 Bitmap::kBitsPerCellLog2 + kPointerSizeLog2 - Bitmap::kBytesPerCellLog2; 4413 Bitmap::kBitsPerCellLog2 + kPointerSizeLog2 - Bitmap::kBytesPerCellLog2;
4431 shrl(rcx, Immediate(shift)); 4414 shrl(rcx, Immediate(shift));
4432 and_(rcx, 4415 and_(rcx,
4433 Immediate((Page::kPageAlignmentMask >> shift) & 4416 Immediate((Page::kPageAlignmentMask >> shift) &
4434 ~(Bitmap::kBytesPerCell - 1))); 4417 ~(Bitmap::kBytesPerCell - 1)));
4435 4418
4436 addq(bitmap_reg, rcx); 4419 addq(bitmap_reg, rcx);
4437 movq(rcx, addr_reg); 4420 movq(rcx, addr_reg);
4438 shrl(rcx, Immediate(kPointerSizeLog2)); 4421 shrl(rcx, Immediate(kPointerSizeLog2));
4439 and_(rcx, Immediate((1 << Bitmap::kBitsPerCellLog2) - 1)); 4422 and_(rcx, Immediate((1 << Bitmap::kBitsPerCellLog2) - 1));
4440 movl(mask_reg, Immediate(1)); 4423 movl(mask_reg, Immediate(1));
4441 shl_cl(mask_reg); 4424 shl_cl(mask_reg);
4442 } 4425 }
4443 4426
4444 4427
4445 void MacroAssembler::EnsureNotWhite( 4428 void MacroAssembler::EnsureNotWhite(
4446 Register value, 4429 Register value,
4447 Register bitmap_scratch, 4430 Register bitmap_scratch,
4448 Register mask_scratch, 4431 Register mask_scratch,
4449 Label* value_is_white_and_not_data, 4432 Label* value_is_white_and_not_data,
4450 Label::Distance distance) { 4433 Label::Distance distance) {
4451 ASSERT(!AreAliased(value, bitmap_scratch, mask_scratch, rcx)); 4434 ASSERT(!AreAliased(value, bitmap_scratch, mask_scratch, rcx));
4452 GetMarkBits(value, bitmap_scratch, mask_scratch); 4435 GetMarkBits(value, bitmap_scratch, mask_scratch);
4453 4436
4454 // If the value is black or grey we don't need to do anything. 4437 // If the value is black or grey we don't need to do anything.
4455 ASSERT(strcmp(Marking::kWhiteBitPattern, "00") == 0); 4438 ASSERT(strcmp(Marking::kWhiteBitPattern, "00") == 0);
4456 ASSERT(strcmp(Marking::kBlackBitPattern, "10") == 0); 4439 ASSERT(strcmp(Marking::kBlackBitPattern, "10") == 0);
4457 ASSERT(strcmp(Marking::kGreyBitPattern, "11") == 0); 4440 ASSERT(strcmp(Marking::kGreyBitPattern, "11") == 0);
4458 ASSERT(strcmp(Marking::kImpossibleBitPattern, "01") == 0); 4441 ASSERT(strcmp(Marking::kImpossibleBitPattern, "01") == 0);
4459 4442
4460 Label done; 4443 Label done;
4461 4444
4462 // Since both black and grey have a 1 in the first position and white does 4445 // Since both black and grey have a 1 in the first position and white does
4463 // not have a 1 there we only need to check one bit. 4446 // not have a 1 there we only need to check one bit.
4464 testq(Operand(bitmap_scratch, MemoryChunk::kHeaderSize), mask_scratch); 4447 testq(Operand(bitmap_scratch, MemoryChunk::kHeaderSize), mask_scratch);
4465 j(not_zero, &done, Label::kNear); 4448 j(not_zero, &done, Label::kNear);
4466 4449
4467 if (emit_debug_code()) { 4450 if (emit_debug_code()) {
4468 // Check for impossible bit pattern. 4451 // Check for impossible bit pattern.
4469 Label ok; 4452 Label ok;
4470 push(mask_scratch); 4453 push(mask_scratch);
4471 // shl. May overflow making the check conservative. 4454 // shl. May overflow making the check conservative.
4472 addq(mask_scratch, mask_scratch); 4455 addq(mask_scratch, mask_scratch);
4473 testq(Operand(bitmap_scratch, MemoryChunk::kHeaderSize), mask_scratch); 4456 testq(Operand(bitmap_scratch, MemoryChunk::kHeaderSize), mask_scratch);
4474 j(zero, &ok, Label::kNear); 4457 j(zero, &ok, Label::kNear);
4475 int3(); 4458 int3();
4476 bind(&ok); 4459 bind(&ok);
4477 pop(mask_scratch); 4460 pop(mask_scratch);
4478 } 4461 }
4479 4462
4480 // Value is white. We check whether it is data that doesn't need scanning. 4463 // Value is white. We check whether it is data that doesn't need scanning.
4481 // Currently only checks for HeapNumber and non-cons strings. 4464 // Currently only checks for HeapNumber and non-cons strings.
4482 Register map = rcx; // Holds map while checking type. 4465 Register map = rcx; // Holds map while checking type.
4483 Register length = rcx; // Holds length of object after checking type. 4466 Register length = rcx; // Holds length of object after checking type.
4484 Label not_heap_number; 4467 Label not_heap_number;
4485 Label is_data_object; 4468 Label is_data_object;
4486 4469
4487 // Check for heap-number 4470 // Check for heap-number
4488 movq(map, FieldOperand(value, HeapObject::kMapOffset)); 4471 movq(map, FieldOperand(value, HeapObject::kMapOffset));
4489 CompareRoot(map, Heap::kHeapNumberMapRootIndex); 4472 CompareRoot(map, Heap::kHeapNumberMapRootIndex);
4490 j(not_equal, &not_heap_number, Label::kNear); 4473 j(not_equal, &not_heap_number, Label::kNear);
4491 movq(length, Immediate(HeapNumber::kSize)); 4474 movq(length, Immediate(HeapNumber::kSize));
4492 jmp(&is_data_object, Label::kNear); 4475 jmp(&is_data_object, Label::kNear);
4493 4476
4494 bind(&not_heap_number); 4477 bind(&not_heap_number);
4495 // Check for strings. 4478 // Check for strings.
4496 ASSERT(kIsIndirectStringTag == 1 && kIsIndirectStringMask == 1); 4479 ASSERT(kIsIndirectStringTag == 1 && kIsIndirectStringMask == 1);
4497 ASSERT(kNotStringTag == 0x80 && kIsNotStringMask == 0x80); 4480 ASSERT(kNotStringTag == 0x80 && kIsNotStringMask == 0x80);
4498 // If it's a string and it's not a cons string then it's an object containing 4481 // If it's a string and it's not a cons string then it's an object containing
4499 // no GC pointers. 4482 // no GC pointers.
4500 Register instance_type = rcx; 4483 Register instance_type = rcx;
4501 movzxbl(instance_type, FieldOperand(map, Map::kInstanceTypeOffset)); 4484 movzxbl(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
4502 testb(instance_type, Immediate(kIsIndirectStringMask | kIsNotStringMask)); 4485 testb(instance_type, Immediate(kIsIndirectStringMask | kIsNotStringMask));
4503 j(not_zero, value_is_white_and_not_data); 4486 j(not_zero, value_is_white_and_not_data);
4504 // It's a non-indirect (non-cons and non-slice) string. 4487 // It's a non-indirect (non-cons and non-slice) string.
4505 // If it's external, the length is just ExternalString::kSize. 4488 // If it's external, the length is just ExternalString::kSize.
4506 // Otherwise it's String::kHeaderSize + string->length() * (1 or 2). 4489 // Otherwise it's String::kHeaderSize + string->length() * (1 or 2).
4507 Label not_external; 4490 Label not_external;
4508 // External strings are the only ones with the kExternalStringTag bit 4491 // External strings are the only ones with the kExternalStringTag bit
4509 // set. 4492 // set.
4510 ASSERT_EQ(0, kSeqStringTag & kExternalStringTag); 4493 ASSERT_EQ(0, kSeqStringTag & kExternalStringTag);
4511 ASSERT_EQ(0, kConsStringTag & kExternalStringTag); 4494 ASSERT_EQ(0, kConsStringTag & kExternalStringTag);
4512 testb(instance_type, Immediate(kExternalStringTag)); 4495 testb(instance_type, Immediate(kExternalStringTag));
4513 j(zero, &not_external, Label::kNear); 4496 j(zero, &not_external, Label::kNear);
4514 movq(length, Immediate(ExternalString::kSize)); 4497 movq(length, Immediate(ExternalString::kSize));
4515 jmp(&is_data_object, Label::kNear); 4498 jmp(&is_data_object, Label::kNear);
4516 4499
4517 bind(&not_external); 4500 bind(&not_external);
4518 // Sequential string, either ASCII or UC16. 4501 // Sequential string, either ASCII or UC16.
4519 ASSERT(kAsciiStringTag == 0x04); 4502 ASSERT(kAsciiStringTag == 0x04);
4520 and_(length, Immediate(kStringEncodingMask)); 4503 and_(length, Immediate(kStringEncodingMask));
4521 xor_(length, Immediate(kStringEncodingMask)); 4504 xor_(length, Immediate(kStringEncodingMask));
4522 addq(length, Immediate(0x04)); 4505 addq(length, Immediate(0x04));
4523 // Value now either 4 (if ASCII) or 8 (if UC16), i.e. char-size shifted by 2. 4506 // Value now either 4 (if ASCII) or 8 (if UC16), i.e. char-size shifted by 2.
4524 imul(length, FieldOperand(value, String::kLengthOffset)); 4507 imul(length, FieldOperand(value, String::kLengthOffset));
4525 shr(length, Immediate(2 + kSmiTagSize + kSmiShiftSize)); 4508 shr(length, Immediate(2 + kSmiTagSize + kSmiShiftSize));
4526 addq(length, Immediate(SeqString::kHeaderSize + kObjectAlignmentMask)); 4509 addq(length, Immediate(SeqString::kHeaderSize + kObjectAlignmentMask));
4527 and_(length, Immediate(~kObjectAlignmentMask)); 4510 and_(length, Immediate(~kObjectAlignmentMask));
4528 4511
4529 bind(&is_data_object); 4512 bind(&is_data_object);
4530 // Value is a data object, and it is white. Mark it black. Since we know 4513 // Value is a data object, and it is white. Mark it black. Since we know
4531 // that the object is white we can make it black by flipping one bit. 4514 // that the object is white we can make it black by flipping one bit.
4532 or_(Operand(bitmap_scratch, MemoryChunk::kHeaderSize), mask_scratch); 4515 or_(Operand(bitmap_scratch, MemoryChunk::kHeaderSize), mask_scratch);
4533 4516
4534 and_(bitmap_scratch, Immediate(~Page::kPageAlignmentMask)); 4517 and_(bitmap_scratch, Immediate(~Page::kPageAlignmentMask));
4535 addl(Operand(bitmap_scratch, MemoryChunk::kLiveBytesOffset), length); 4518 addl(Operand(bitmap_scratch, MemoryChunk::kLiveBytesOffset), length);
4536 4519
4537 bind(&done); 4520 bind(&done);
4538 } 4521 }
4539 4522
4540 4523
4541 void MacroAssembler::CheckEnumCache(Register null_value, Label* call_runtime) { 4524 void MacroAssembler::CheckEnumCache(Register null_value, Label* call_runtime) {
4542 Label next, start; 4525 Label next, start;
4543 Register empty_fixed_array_value = r8; 4526 Register empty_fixed_array_value = r8;
4544 LoadRoot(empty_fixed_array_value, Heap::kEmptyFixedArrayRootIndex); 4527 LoadRoot(empty_fixed_array_value, Heap::kEmptyFixedArrayRootIndex);
4545 movq(rcx, rax); 4528 movq(rcx, rax);
4546 4529
4547 // Check if the enum length field is properly initialized, indicating that 4530 // Check if the enum length field is properly initialized, indicating that
4548 // there is an enum cache. 4531 // there is an enum cache.
4549 movq(rbx, FieldOperand(rcx, HeapObject::kMapOffset)); 4532 movq(rbx, FieldOperand(rcx, HeapObject::kMapOffset));
4550 4533
4551 EnumLength(rdx, rbx); 4534 EnumLength(rdx, rbx);
4552 Cmp(rdx, Smi::FromInt(Map::kInvalidEnumCache)); 4535 Cmp(rdx, Smi::FromInt(Map::kInvalidEnumCache));
4553 j(equal, call_runtime); 4536 j(equal, call_runtime);
4554 4537
4555 jmp(&start); 4538 jmp(&start);
4556 4539
4557 bind(&next); 4540 bind(&next);
4558 4541
4559 movq(rbx, FieldOperand(rcx, HeapObject::kMapOffset)); 4542 movq(rbx, FieldOperand(rcx, HeapObject::kMapOffset));
4560 4543
4561 // For all objects but the receiver, check that the cache is empty. 4544 // For all objects but the receiver, check that the cache is empty.
4562 EnumLength(rdx, rbx); 4545 EnumLength(rdx, rbx);
4563 Cmp(rdx, Smi::FromInt(0)); 4546 Cmp(rdx, Smi::FromInt(0));
4564 j(not_equal, call_runtime); 4547 j(not_equal, call_runtime);
4565 4548
4566 bind(&start); 4549 bind(&start);
4567 4550
4568 // Check that there are no elements. Register rcx contains the current JS 4551 // Check that there are no elements. Register rcx contains the current JS
4569 // object we've reached through the prototype chain. 4552 // object we've reached through the prototype chain.
4570 cmpq(empty_fixed_array_value, 4553 cmpq(empty_fixed_array_value,
4571 FieldOperand(rcx, JSObject::kElementsOffset)); 4554 FieldOperand(rcx, JSObject::kElementsOffset));
4572 j(not_equal, call_runtime); 4555 j(not_equal, call_runtime);
4573 4556
4574 movq(rcx, FieldOperand(rbx, Map::kPrototypeOffset)); 4557 movq(rcx, FieldOperand(rbx, Map::kPrototypeOffset));
4575 cmpq(rcx, null_value); 4558 cmpq(rcx, null_value);
4576 j(not_equal, &next); 4559 j(not_equal, &next);
4577 } 4560 }
4578 4561
4579 4562
4580 } } // namespace v8::internal 4563 } } // namespace v8::internal
4581 4564
4582 #endif // V8_TARGET_ARCH_X64 4565 #endif // V8_TARGET_ARCH_X64
Powered by Google Project Hosting